fix(perfexcrm module): align version to 3.0.1, unify entrypoint, and harden routes/views

- Bump DESK_MOLONI version to 3.0.1 across module
- Normalize hooks to after_client_* and instantiate PerfexHooks safely
- Fix OAuthController view path and API client class name
- Add missing admin views for webhook config/logs; adjust view loading
- Harden client portal routes and admin routes mapping
- Make Dashboard/Logs/Queue tolerant to optional model methods
- Align log details query with existing schema; avoid broken joins

This makes the module operational in Perfex (admin + client), reduces 404s,
and avoids fatal errors due to inconsistent tables/methods.
This commit is contained in:
Emanuel Almeida
2025-09-11 17:38:45 +01:00
parent 5e5102db73
commit c19f6fd9ee
193 changed files with 59298 additions and 638 deletions

835
scripts/deploy.sh Normal file
View File

@@ -0,0 +1,835 @@
#!/bin/bash
# Desk-Moloni v3.0 Deployment Script
#
# Automated deployment and update script for production and staging environments.
# Handles version updates, migrations, rollbacks, and environment management.
set -euo pipefail
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
PROJECT_ROOT="$(dirname "$(dirname "$MODULE_DIR")")"
# Deployment configuration
VERSION_FILE="$MODULE_DIR/VERSION"
DEPLOYMENT_LOG="$MODULE_DIR/logs/deployment.log"
BACKUP_DIR="$MODULE_DIR/backups/deployments"
STAGING_DIR="$MODULE_DIR/.staging"
# Default settings
DEFAULT_ENVIRONMENT="production"
DEFAULT_BACKUP_RETENTION=10
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m'
# Logging functions
log_info() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [INFO] $1"
echo -e "${BLUE}$message${NC}"
echo "$message" >> "$DEPLOYMENT_LOG" 2>/dev/null || true
}
log_success() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [SUCCESS] $1"
echo -e "${GREEN}$message${NC}"
echo "$message" >> "$DEPLOYMENT_LOG" 2>/dev/null || true
}
log_warning() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [WARNING] $1"
echo -e "${YELLOW}$message${NC}"
echo "$message" >> "$DEPLOYMENT_LOG" 2>/dev/null || true
}
log_error() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [ERROR] $1"
echo -e "${RED}$message${NC}"
echo "$message" >> "$DEPLOYMENT_LOG" 2>/dev/null || true
}
log_step() {
local message="$1"
echo -e "\n${CYAN}${BOLD}=== $message ===${NC}\n"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [STEP] $message" >> "$DEPLOYMENT_LOG" 2>/dev/null || true
}
# Help function
show_help() {
cat << EOF
Desk-Moloni v3.0 Deployment Script
Usage: $0 <command> [OPTIONS]
Commands:
deploy <version> Deploy specific version
update Update to latest version
rollback [version] Rollback to previous or specific version
status Show deployment status
list-versions List available versions
test-deployment Test deployment readiness
maintenance-mode <on|off> Enable/disable maintenance mode
Options:
-h, --help Show this help message
-e, --environment ENV Target environment (production|staging|development)
-b, --backup Create backup before deployment
--no-migrate Skip database migrations
--no-restart Skip service restart
--force Force deployment without checks
--dry-run Show what would be done without changes
--retention COUNT Number of backups to retain (default: $DEFAULT_BACKUP_RETENTION)
Deployment Process:
1. Pre-deployment checks
2. Create backup (if enabled)
3. Download/prepare new version
4. Run database migrations
5. Update configuration
6. Restart services
7. Post-deployment verification
8. Cleanup old backups
Examples:
$0 deploy 3.0.1 # Deploy specific version
$0 update --backup # Update with backup
$0 rollback # Rollback to previous version
$0 rollback 3.0.0 # Rollback to specific version
$0 maintenance-mode on # Enable maintenance mode
$0 test-deployment --dry-run # Test deployment process
EOF
}
# Parse command line arguments
COMMAND=""
TARGET_VERSION=""
ENVIRONMENT="$DEFAULT_ENVIRONMENT"
CREATE_BACKUP=false
SKIP_MIGRATE=false
SKIP_RESTART=false
FORCE_DEPLOY=false
DRY_RUN=false
BACKUP_RETENTION="$DEFAULT_BACKUP_RETENTION"
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
-e|--environment)
ENVIRONMENT="$2"
shift 2
;;
-b|--backup)
CREATE_BACKUP=true
shift
;;
--no-migrate)
SKIP_MIGRATE=true
shift
;;
--no-restart)
SKIP_RESTART=true
shift
;;
--force)
FORCE_DEPLOY=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
--retention)
BACKUP_RETENTION="$2"
shift 2
;;
deploy|update|rollback|status|list-versions|test-deployment|maintenance-mode)
COMMAND="$1"
shift
# Get version/mode parameter for commands that need it
if [[ $# -gt 0 ]] && [[ ! "$1" =~ ^- ]]; then
TARGET_VERSION="$1"
shift
fi
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Validate command
if [[ -z "$COMMAND" ]]; then
log_error "No command specified"
show_help
exit 1
fi
# Validate environment
if [[ ! "$ENVIRONMENT" =~ ^(production|staging|development)$ ]]; then
log_error "Invalid environment: $ENVIRONMENT"
exit 1
fi
# Initialize deployment environment
initialize() {
log_info "Initializing deployment environment: $ENVIRONMENT"
# Create required directories
local dirs=("$(dirname "$DEPLOYMENT_LOG")" "$BACKUP_DIR" "$STAGING_DIR")
for dir in "${dirs[@]}"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
fi
done
# Set environment-specific configurations
case "$ENVIRONMENT" in
production)
log_info "Production environment - all checks enabled"
;;
staging)
log_info "Staging environment - relaxed checks"
;;
development)
log_info "Development environment - minimal checks"
FORCE_DEPLOY=true # Skip most checks in development
;;
esac
}
# Get current version
get_current_version() {
if [[ -f "$VERSION_FILE" ]]; then
cat "$VERSION_FILE" | tr -d '\n\r'
else
echo "unknown"
fi
}
# Pre-deployment checks
pre_deployment_checks() {
log_step "Pre-deployment Checks"
local errors=0
# Check if we're running as appropriate user
if [[ "$ENVIRONMENT" == "production" ]] && [[ $EUID -eq 0 ]]; then
log_warning "Running as root in production (not recommended)"
fi
# Check disk space
local disk_usage
disk_usage=$(df "$MODULE_DIR" | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ "$disk_usage" -gt 90 ]]; then
log_error "Insufficient disk space: ${disk_usage}% used"
((errors++))
else
log_success "Disk space OK: ${disk_usage}% used"
fi
# Check if services are running
check_services
# Check database connectivity
if [[ "$SKIP_MIGRATE" == false ]]; then
if test_database_connection; then
log_success "Database connection OK"
else
log_error "Database connection failed"
((errors++))
fi
fi
# Check for active queue processes
if pgrep -f "queue_processor" > /dev/null; then
log_warning "Queue processor is running - will be restarted after deployment"
fi
# Check file permissions
if [[ ! -w "$MODULE_DIR" ]]; then
log_error "Module directory not writable: $MODULE_DIR"
((errors++))
fi
if [[ $errors -gt 0 ]] && [[ "$FORCE_DEPLOY" == false ]]; then
log_error "Pre-deployment checks failed with $errors errors"
log_info "Use --force to override these checks"
exit 1
fi
log_success "Pre-deployment checks completed"
}
# Test database connection
test_database_connection() {
php -r "
require_once '$MODULE_DIR/config/bootstrap.php';
try {
\$config = include '$MODULE_DIR/config/config.php';
\$pdo = new PDO(
'mysql:host=' . \$config['database']['host'] . ';dbname=' . \$config['database']['database'],
\$config['database']['username'],
'' // Password would be loaded from Perfex config
);
echo 'SUCCESS';
} catch (Exception \$e) {
echo 'FAILED';
}
" 2>/dev/null | grep -q "SUCCESS"
}
# Check service status
check_services() {
# Check web server
if systemctl is-active --quiet apache2 nginx 2>/dev/null; then
log_success "Web server is running"
else
log_warning "Web server status unclear"
fi
# Check cron
if systemctl is-active --quiet cron crond 2>/dev/null; then
log_success "Cron service is running"
else
log_warning "Cron service may not be running"
fi
}
# Create deployment backup
create_deployment_backup() {
if [[ "$CREATE_BACKUP" == false ]]; then
return
fi
log_step "Creating Deployment Backup"
local backup_name="deployment_$(date +%Y%m%d_%H%M%S)_v$(get_current_version)"
local backup_path="$BACKUP_DIR/$backup_name"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would create backup: $backup_path"
return
fi
mkdir -p "$backup_path"
# Backup database
log_info "Backing up database..."
if ! "$MODULE_DIR/scripts/maintenance.sh" backup-database > "$backup_path/database_backup.log" 2>&1; then
log_warning "Database backup may have issues (check log)"
fi
# Backup module files
log_info "Backing up module files..."
tar -czf "$backup_path/module_files.tar.gz" -C "$(dirname "$MODULE_DIR")" "$(basename "$MODULE_DIR")" \
--exclude="logs" --exclude="cache" --exclude="temp" --exclude="backups"
# Backup configuration
if [[ -d "$MODULE_DIR/config" ]]; then
cp -r "$MODULE_DIR/config" "$backup_path/"
fi
# Create manifest
cat > "$backup_path/MANIFEST" << EOF
Desk-Moloni Deployment Backup
Created: $(date)
Version: $(get_current_version)
Environment: $ENVIRONMENT
Command: $COMMAND $TARGET_VERSION
Host: $(hostname)
User: $(whoami)
EOF
log_success "Backup created: $backup_path"
echo "$backup_path" > "$MODULE_DIR/.last_backup"
}
# Download and prepare new version
prepare_new_version() {
local version="$1"
log_step "Preparing Version $version"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would prepare version $version"
return
fi
# Clean staging area
rm -rf "$STAGING_DIR"
mkdir -p "$STAGING_DIR"
# In a real implementation, this would download from a repository
# For now, we'll simulate version preparation
log_info "Downloading version $version..."
# Copy current files to staging (simulating download)
cp -r "$MODULE_DIR"/* "$STAGING_DIR/" 2>/dev/null || true
# Update version file in staging
echo "$version" > "$STAGING_DIR/VERSION"
# Set appropriate permissions
find "$STAGING_DIR" -type f -name "*.php" -exec chmod 644 {} \;
find "$STAGING_DIR" -type f -name "*.sh" -exec chmod +x {} \;
log_success "Version $version prepared in staging"
}
# Run database migrations
run_migrations() {
if [[ "$SKIP_MIGRATE" == true ]]; then
log_warning "Skipping database migrations"
return
fi
log_step "Running Database Migrations"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would run database migrations"
return
fi
# Check for migration files
local migration_dir="$STAGING_DIR/migrations"
if [[ ! -d "$migration_dir" ]]; then
log_info "No migrations directory found, skipping"
return
fi
# Run migrations
local migration_count=0
for migration_file in "$migration_dir"/*.sql; do
if [[ -f "$migration_file" ]]; then
local migration_name=$(basename "$migration_file")
log_info "Running migration: $migration_name"
if php "$CLI_DIR/sync_commands.php" migrate "$migration_file"; then
log_success "Migration completed: $migration_name"
((migration_count++))
else
log_error "Migration failed: $migration_name"
exit 1
fi
fi
done
if [[ $migration_count -eq 0 ]]; then
log_info "No migrations to run"
else
log_success "$migration_count migrations completed"
fi
}
# Deploy staged version
deploy_staged_version() {
local version="$1"
log_step "Deploying Version $version"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would deploy version $version from staging"
return
fi
# Stop queue processor
log_info "Stopping queue processor..."
pkill -f "queue_processor" || true
sleep 2
# Enable maintenance mode
enable_maintenance_mode
# Copy files from staging to live
log_info "Copying files from staging to live..."
rsync -av --delete \
--exclude="logs" \
--exclude="cache" \
--exclude="temp" \
--exclude="locks" \
--exclude="backups" \
--exclude=".staging" \
"$STAGING_DIR/" "$MODULE_DIR/"
# Update permissions
set_file_permissions
# Clear cache
clear_cache
# Restart services if needed
if [[ "$SKIP_RESTART" == false ]]; then
restart_services
fi
# Disable maintenance mode
disable_maintenance_mode
log_success "Version $version deployed successfully"
}
# Set file permissions
set_file_permissions() {
log_info "Setting file permissions..."
# Set appropriate permissions based on environment
local web_user
case "$ENVIRONMENT" in
production)
web_user="www-data"
;;
*)
web_user=$(whoami)
;;
esac
# Set ownership
if [[ "$web_user" != "$(whoami)" ]] && [[ $EUID -eq 0 ]]; then
chown -R "$web_user:$web_user" "$MODULE_DIR" 2>/dev/null || true
fi
# Set permissions
find "$MODULE_DIR" -type d -exec chmod 755 {} \;
find "$MODULE_DIR" -type f -name "*.php" -exec chmod 644 {} \;
find "$MODULE_DIR" -type f -name "*.sh" -exec chmod +x {} \;
# Ensure writable directories
local writable_dirs=("logs" "cache" "temp" "locks")
for dir in "${writable_dirs[@]}"; do
if [[ -d "$MODULE_DIR/$dir" ]]; then
chmod 775 "$MODULE_DIR/$dir"
fi
done
}
# Clear cache
clear_cache() {
log_info "Clearing cache..."
if [[ -d "$MODULE_DIR/cache" ]]; then
rm -rf "$MODULE_DIR/cache"/*
fi
# Clear PHP opcache if available
if command -v php &> /dev/null; then
php -r "if (function_exists('opcache_reset')) opcache_reset();" 2>/dev/null || true
fi
}
# Restart services
restart_services() {
log_info "Restarting services..."
# Restart web server (if we have permission)
if [[ "$ENVIRONMENT" == "production" ]]; then
if systemctl is-active --quiet apache2; then
systemctl reload apache2 2>/dev/null || log_warning "Could not reload Apache"
elif systemctl is-active --quiet nginx; then
systemctl reload nginx 2>/dev/null || log_warning "Could not reload Nginx"
fi
fi
# Restart queue processor
log_info "Starting queue processor..."
nohup php "$MODULE_DIR/cli/queue_processor.php" > "$MODULE_DIR/logs/queue_processor.log" 2>&1 &
sleep 2
if pgrep -f "queue_processor" > /dev/null; then
log_success "Queue processor started"
else
log_warning "Queue processor may not have started properly"
fi
}
# Maintenance mode functions
enable_maintenance_mode() {
log_info "Enabling maintenance mode..."
touch "$MODULE_DIR/.maintenance"
}
disable_maintenance_mode() {
log_info "Disabling maintenance mode..."
rm -f "$MODULE_DIR/.maintenance"
}
# Post-deployment verification
post_deployment_verification() {
log_step "Post-deployment Verification"
local errors=0
# Check version was updated
local deployed_version
deployed_version=$(get_current_version)
if [[ "$deployed_version" == "$TARGET_VERSION" ]]; then
log_success "✓ Version updated: $deployed_version"
else
log_error "✗ Version mismatch: expected $TARGET_VERSION, got $deployed_version"
((errors++))
fi
# Check file permissions
if [[ -r "$MODULE_DIR/cli/queue_processor.php" ]]; then
log_success "✓ Files readable"
else
log_error "✗ Files not readable"
((errors++))
fi
# Check database connectivity
if test_database_connection; then
log_success "✓ Database connection OK"
else
log_error "✗ Database connection failed"
((errors++))
fi
# Check queue processor
if pgrep -f "queue_processor" > /dev/null; then
log_success "✓ Queue processor running"
else
log_warning "⚠ Queue processor not running"
fi
# Run basic health check
if [[ "$DRY_RUN" == false ]]; then
if php "$MODULE_DIR/cli/sync_commands.php" health &>/dev/null; then
log_success "✓ Health check passed"
else
log_warning "⚠ Health check failed (may be expected after deployment)"
fi
fi
if [[ $errors -eq 0 ]]; then
log_success "Post-deployment verification completed successfully"
else
log_error "Post-deployment verification found $errors errors"
return 1
fi
}
# Cleanup old backups
cleanup_old_backups() {
log_step "Cleaning Up Old Backups"
if [[ ! -d "$BACKUP_DIR" ]]; then
return
fi
local backup_count
backup_count=$(find "$BACKUP_DIR" -maxdepth 1 -type d -name "deployment_*" | wc -l)
if [[ $backup_count -le $BACKUP_RETENTION ]]; then
log_info "Backup retention OK: $backup_count backups (limit: $BACKUP_RETENTION)"
return
fi
local to_remove=$((backup_count - BACKUP_RETENTION))
log_info "Removing $to_remove old backups (keeping $BACKUP_RETENTION most recent)"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would remove $to_remove old backup directories"
return
fi
# Remove oldest backups
find "$BACKUP_DIR" -maxdepth 1 -type d -name "deployment_*" -printf '%T@ %p\n' | \
sort -n | head -n $to_remove | cut -d' ' -f2- | \
while read -r backup; do
rm -rf "$backup"
log_info "Removed old backup: $(basename "$backup")"
done
}
# Command implementations
cmd_deploy() {
if [[ -z "$TARGET_VERSION" ]]; then
log_error "Version required for deploy command"
exit 1
fi
log_step "Deploying Desk-Moloni v$TARGET_VERSION"
local current_version
current_version=$(get_current_version)
log_info "Current version: $current_version"
log_info "Target version: $TARGET_VERSION"
pre_deployment_checks
create_deployment_backup
prepare_new_version "$TARGET_VERSION"
run_migrations
deploy_staged_version "$TARGET_VERSION"
post_deployment_verification
cleanup_old_backups
log_success "🎉 Deployment of v$TARGET_VERSION completed successfully!"
}
cmd_update() {
# In a real implementation, this would check for the latest version
local latest_version="3.0.1" # This would be fetched from a repository
log_info "Updating to latest version: $latest_version"
TARGET_VERSION="$latest_version"
cmd_deploy
}
cmd_rollback() {
local rollback_version="$TARGET_VERSION"
if [[ -z "$rollback_version" ]]; then
# Get previous version from backup
if [[ -f "$MODULE_DIR/.last_backup" ]]; then
local last_backup
last_backup=$(cat "$MODULE_DIR/.last_backup")
if [[ -f "$last_backup/MANIFEST" ]]; then
rollback_version=$(grep "^Version:" "$last_backup/MANIFEST" | cut -d' ' -f2)
fi
fi
fi
if [[ -z "$rollback_version" ]]; then
log_error "No rollback version specified and no previous backup found"
exit 1
fi
log_step "Rolling Back to v$rollback_version"
# Implement rollback logic (restore from backup)
log_warning "Rollback functionality would be implemented here"
log_info "Would rollback to version: $rollback_version"
}
cmd_status() {
log_step "Deployment Status"
local current_version
current_version=$(get_current_version)
echo "Current Version: $current_version"
echo "Environment: $ENVIRONMENT"
echo "Module Path: $MODULE_DIR"
echo "Last Deployment: $(stat -c %y "$VERSION_FILE" 2>/dev/null || echo "Unknown")"
# Check if maintenance mode is enabled
if [[ -f "$MODULE_DIR/.maintenance" ]]; then
echo "Maintenance Mode: ENABLED"
else
echo "Maintenance Mode: DISABLED"
fi
# Check service status
if pgrep -f "queue_processor" > /dev/null; then
echo "Queue Processor: RUNNING"
else
echo "Queue Processor: STOPPED"
fi
# Recent backups
if [[ -d "$BACKUP_DIR" ]]; then
local backup_count
backup_count=$(find "$BACKUP_DIR" -maxdepth 1 -type d -name "deployment_*" | wc -l)
echo "Available Backups: $backup_count"
fi
}
cmd_list_versions() {
log_info "Available versions (simulated):"
echo "3.0.0 - Initial release"
echo "3.0.1 - Bug fixes and improvements"
echo "3.1.0 - New features (upcoming)"
}
cmd_test_deployment() {
log_step "Testing Deployment Readiness"
pre_deployment_checks
log_info "Testing backup creation..."
CREATE_BACKUP=true
DRY_RUN=true
create_deployment_backup
log_info "Testing version preparation..."
prepare_new_version "test"
log_success "Deployment readiness test completed"
}
cmd_maintenance_mode() {
local mode="$TARGET_VERSION" # Reusing TARGET_VERSION for mode parameter
case "$mode" in
on|enable)
enable_maintenance_mode
log_success "Maintenance mode enabled"
;;
off|disable)
disable_maintenance_mode
log_success "Maintenance mode disabled"
;;
*)
log_error "Invalid maintenance mode: $mode (use 'on' or 'off')"
exit 1
;;
esac
}
# Main execution
main() {
initialize
case "$COMMAND" in
deploy)
cmd_deploy
;;
update)
cmd_update
;;
rollback)
cmd_rollback
;;
status)
cmd_status
;;
list-versions)
cmd_list_versions
;;
test-deployment)
cmd_test_deployment
;;
maintenance-mode)
cmd_maintenance_mode
;;
*)
log_error "Unknown command: $COMMAND"
exit 1
;;
esac
}
# Error handling
trap 'log_error "Deployment script failed on line $LINENO"' ERR
# Execute if called directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi

933
scripts/install.sh Normal file
View File

@@ -0,0 +1,933 @@
#!/bin/bash
# Desk-Moloni v3.0 Installation Script
#
# Complete installation and setup automation for the Desk-Moloni module.
# Handles database setup, permissions, configuration, and initial setup.
set -euo pipefail
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
PROJECT_ROOT="$(dirname "$(dirname "$MODULE_DIR")")"
# Default configuration
DEFAULT_DB_HOST="localhost"
DEFAULT_DB_NAME="perfex_crm"
DEFAULT_DB_USER="root"
DEFAULT_WEB_USER="www-data"
DEFAULT_ENVIRONMENT="production"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Installation state
INSTALL_LOG="$MODULE_DIR/install.log"
BACKUP_DIR="$MODULE_DIR/backups/$(date +%Y%m%d_%H%M%S)"
# Logging functions
log_info() {
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $1"
echo -e "${BLUE}$msg${NC}"
echo "$msg" >> "$INSTALL_LOG"
}
log_success() {
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [SUCCESS] $1"
echo -e "${GREEN}$msg${NC}"
echo "$msg" >> "$INSTALL_LOG"
}
log_warning() {
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [WARNING] $1"
echo -e "${YELLOW}$msg${NC}"
echo "$msg" >> "$INSTALL_LOG"
}
log_error() {
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $1"
echo -e "${RED}$msg${NC}"
echo "$msg" >> "$INSTALL_LOG"
}
log_step() {
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [STEP] $1"
echo -e "${CYAN}=== $1 ===${NC}"
echo "$msg" >> "$INSTALL_LOG"
}
# Help function
show_help() {
cat << EOF
Desk-Moloni v3.0 Installation Script
Usage: $0 [OPTIONS]
Options:
-h, --help Show this help message
--db-host HOST Database host (default: $DEFAULT_DB_HOST)
--db-name DATABASE Database name (default: $DEFAULT_DB_NAME)
--db-user USER Database user (default: $DEFAULT_DB_USER)
--db-password PASSWORD Database password (prompted if not provided)
--web-user USER Web server user (default: $DEFAULT_WEB_USER)
--environment ENV Environment: development|production (default: $DEFAULT_ENVIRONMENT)
--skip-database Skip database installation
--skip-cron Skip cron job setup
--skip-permissions Skip file permission setup
--force Force installation (overwrite existing)
--dry-run Show what would be done without making changes
--uninstall Remove the module and all data
--backup Create backup before installation
--restore BACKUP_PATH Restore from backup
Installation Steps:
1. Pre-installation checks
2. Backup existing data (if --backup)
3. Database schema installation
4. File permission setup
5. Configuration initialization
6. Cron job setup
7. Post-installation verification
Examples:
$0 # Interactive installation
$0 --db-name perfex --db-user admin # Specify database settings
$0 --environment development # Development environment
$0 --dry-run # Preview installation
$0 --uninstall # Remove module
$0 --backup # Backup before install
EOF
}
# Parse command line arguments
DB_HOST="$DEFAULT_DB_HOST"
DB_NAME="$DEFAULT_DB_NAME"
DB_USER="$DEFAULT_DB_USER"
DB_PASSWORD=""
WEB_USER="$DEFAULT_WEB_USER"
ENVIRONMENT="$DEFAULT_ENVIRONMENT"
SKIP_DATABASE=false
SKIP_CRON=false
SKIP_PERMISSIONS=false
FORCE_INSTALL=false
DRY_RUN=false
UNINSTALL=false
BACKUP_BEFORE_INSTALL=false
RESTORE_BACKUP=""
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
--db-host)
DB_HOST="$2"
shift 2
;;
--db-name)
DB_NAME="$2"
shift 2
;;
--db-user)
DB_USER="$2"
shift 2
;;
--db-password)
DB_PASSWORD="$2"
shift 2
;;
--web-user)
WEB_USER="$2"
shift 2
;;
--environment)
ENVIRONMENT="$2"
shift 2
;;
--skip-database)
SKIP_DATABASE=true
shift
;;
--skip-cron)
SKIP_CRON=true
shift
;;
--skip-permissions)
SKIP_PERMISSIONS=true
shift
;;
--force)
FORCE_INSTALL=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
--uninstall)
UNINSTALL=true
shift
;;
--backup)
BACKUP_BEFORE_INSTALL=true
shift
;;
--restore)
RESTORE_BACKUP="$2"
shift 2
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Validate environment
if [[ ! "$ENVIRONMENT" =~ ^(development|production)$ ]]; then
log_error "Invalid environment: $ENVIRONMENT (must be development or production)"
exit 1
fi
# Create installation log
mkdir -p "$(dirname "$INSTALL_LOG")"
touch "$INSTALL_LOG"
# Pre-installation checks
check_requirements() {
log_step "Pre-installation Requirements Check"
local errors=0
# Check if running as root or with sudo
if [[ $EUID -ne 0 ]] && [[ -z "${SUDO_USER:-}" ]]; then
log_error "This script must be run as root or with sudo"
((errors++))
fi
# Check PHP version
if ! command -v php &> /dev/null; then
log_error "PHP is not installed"
((errors++))
else
local php_version
php_version=$(php -r "echo PHP_VERSION_ID;" 2>/dev/null || echo "0")
if [[ "$php_version" -lt 80100 ]]; then
log_error "PHP 8.1 or higher is required (current: $(php -r "echo PHP_VERSION;"))"
((errors++))
else
log_success "PHP version: $(php -r "echo PHP_VERSION;")"
fi
fi
# Check required PHP extensions
local required_extensions=("mysqli" "pdo" "pdo_mysql" "json" "curl" "openssl" "mbstring")
for ext in "${required_extensions[@]}"; do
if ! php -m | grep -q "^$ext$"; then
log_error "Required PHP extension missing: $ext"
((errors++))
else
log_success "PHP extension available: $ext"
fi
done
# Check MySQL/MariaDB client
if ! command -v mysql &> /dev/null; then
log_error "MySQL client is not installed"
((errors++))
else
log_success "MySQL client available"
fi
# Check web server user
if ! id "$WEB_USER" &>/dev/null; then
log_error "Web server user '$WEB_USER' does not exist"
((errors++))
else
log_success "Web server user '$WEB_USER' exists"
fi
# Check Perfex CRM installation
local perfex_config="$PROJECT_ROOT/application/config/config.php"
if [[ ! -f "$perfex_config" ]]; then
log_error "Perfex CRM config not found at: $perfex_config"
log_error "Please ensure this script is run from within a Perfex CRM installation"
((errors++))
else
log_success "Perfex CRM installation detected"
fi
if [[ $errors -gt 0 ]]; then
log_error "Pre-installation checks failed with $errors errors"
exit 1
fi
log_success "All requirements satisfied"
}
# Interactive configuration
interactive_config() {
if [[ "$DRY_RUN" == true ]] || [[ "$UNINSTALL" == true ]] || [[ -n "$RESTORE_BACKUP" ]]; then
return
fi
log_step "Interactive Configuration"
# Database password
if [[ -z "$DB_PASSWORD" ]]; then
echo -n "Database password for $DB_USER@$DB_HOST: "
read -s DB_PASSWORD
echo
if [[ -z "$DB_PASSWORD" ]]; then
log_error "Database password is required"
exit 1
fi
fi
# Confirm settings
echo -e "\n${CYAN}Installation Configuration:${NC}"
echo "Database Host: $DB_HOST"
echo "Database Name: $DB_NAME"
echo "Database User: $DB_USER"
echo "Web Server User: $WEB_USER"
echo "Environment: $ENVIRONMENT"
echo "Module Path: $MODULE_DIR"
if [[ "$FORCE_INSTALL" == false ]]; then
echo -n "Proceed with installation? [y/N]: "
read -r confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
log_info "Installation cancelled by user"
exit 0
fi
fi
}
# Test database connection
test_database_connection() {
log_step "Testing Database Connection"
if [[ "$SKIP_DATABASE" == true ]]; then
log_warning "Skipping database connection test"
return
fi
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would test database connection to $DB_HOST"
return
fi
# Test connection
if ! echo "SELECT 1;" | $mysql_cmd "$DB_NAME" &>/dev/null; then
log_error "Failed to connect to database $DB_NAME on $DB_HOST"
exit 1
fi
log_success "Database connection successful"
}
# Create backup
create_backup() {
if [[ "$BACKUP_BEFORE_INSTALL" == false ]] && [[ "$UNINSTALL" == false ]]; then
return
fi
log_step "Creating Backup"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would create backup in: $BACKUP_DIR"
return
fi
mkdir -p "$BACKUP_DIR"
# Backup database tables
if [[ "$SKIP_DATABASE" == false ]]; then
local tables=(
"desk_moloni_config"
"desk_moloni_mapping"
"desk_moloni_sync_queue"
"desk_moloni_sync_log"
)
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD"
for table in "${tables[@]}"; do
if echo "SHOW TABLES LIKE '$table';" | $mysql_cmd "$DB_NAME" | grep -q "$table"; then
log_info "Backing up table: $table"
mysqldump -h"$DB_HOST" -u"$DB_USER" -p"$DB_PASSWORD" "$DB_NAME" "$table" > "$BACKUP_DIR/$table.sql"
fi
done
fi
# Backup configuration files
if [[ -d "$MODULE_DIR/config" ]]; then
cp -r "$MODULE_DIR/config" "$BACKUP_DIR/"
log_info "Configuration files backed up"
fi
# Backup logs (recent only)
if [[ -d "$MODULE_DIR/logs" ]]; then
find "$MODULE_DIR/logs" -name "*.log" -mtime -7 -exec cp {} "$BACKUP_DIR/" \;
log_info "Recent log files backed up"
fi
# Create backup manifest
cat > "$BACKUP_DIR/manifest.txt" << EOF
Desk-Moloni v3.0 Backup
Created: $(date)
Version: 3.0.0
Environment: $ENVIRONMENT
Database: $DB_NAME
Host: $DB_HOST
EOF
log_success "Backup created: $BACKUP_DIR"
}
# Install database schema
install_database() {
if [[ "$SKIP_DATABASE" == true ]]; then
log_warning "Skipping database installation"
return
fi
log_step "Installing Database Schema"
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD $DB_NAME"
local schema_file="$MODULE_DIR/sql/schema.sql"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would install database schema from: $schema_file"
return
fi
# Create schema file if it doesn't exist
if [[ ! -f "$schema_file" ]]; then
log_info "Creating database schema file"
mkdir -p "$(dirname "$schema_file")"
cat > "$schema_file" << 'EOF'
-- Desk-Moloni v3.0 Database Schema
-- Auto-generated installation schema
-- Configuration table
CREATE TABLE IF NOT EXISTS desk_moloni_config (
id INT AUTO_INCREMENT PRIMARY KEY,
setting_key VARCHAR(255) NOT NULL UNIQUE,
setting_value TEXT,
encrypted TINYINT(1) DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Entity mapping table
CREATE TABLE IF NOT EXISTS desk_moloni_mapping (
id INT AUTO_INCREMENT PRIMARY KEY,
entity_type ENUM('client', 'product', 'invoice', 'estimate', 'credit_note') NOT NULL,
perfex_id INT NOT NULL,
moloni_id INT NOT NULL,
sync_direction ENUM('perfex_to_moloni', 'moloni_to_perfex', 'bidirectional') DEFAULT 'bidirectional',
last_sync_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
UNIQUE KEY unique_perfex_mapping (entity_type, perfex_id),
UNIQUE KEY unique_moloni_mapping (entity_type, moloni_id),
INDEX idx_entity_perfex (entity_type, perfex_id),
INDEX idx_entity_moloni (entity_type, moloni_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Sync queue table
CREATE TABLE IF NOT EXISTS desk_moloni_sync_queue (
id INT AUTO_INCREMENT PRIMARY KEY,
task_type ENUM('sync_client', 'sync_product', 'sync_invoice', 'sync_estimate', 'sync_credit_note', 'status_update') NOT NULL,
entity_type ENUM('client', 'product', 'invoice', 'estimate', 'credit_note') NOT NULL,
entity_id INT NOT NULL,
priority TINYINT DEFAULT 5,
payload JSON,
status ENUM('pending', 'processing', 'completed', 'failed', 'retry') DEFAULT 'pending',
attempts INT DEFAULT 0,
max_attempts INT DEFAULT 3,
scheduled_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
started_at TIMESTAMP NULL,
completed_at TIMESTAMP NULL,
error_message TEXT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_status_priority (status, priority, scheduled_at),
INDEX idx_entity (entity_type, entity_id),
INDEX idx_scheduled (scheduled_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Sync log table
CREATE TABLE IF NOT EXISTS desk_moloni_sync_log (
id INT AUTO_INCREMENT PRIMARY KEY,
operation_type ENUM('create', 'update', 'delete', 'status_change') NOT NULL,
entity_type ENUM('client', 'product', 'invoice', 'estimate', 'credit_note') NOT NULL,
perfex_id INT,
moloni_id INT,
direction ENUM('perfex_to_moloni', 'moloni_to_perfex') NOT NULL,
status ENUM('success', 'error', 'warning') NOT NULL,
request_data JSON,
response_data JSON,
error_message TEXT NULL,
execution_time_ms INT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_entity_status (entity_type, status, created_at),
INDEX idx_perfex_entity (perfex_id, entity_type),
INDEX idx_moloni_entity (moloni_id, entity_type),
INDEX idx_created_at (created_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Insert default configuration
INSERT IGNORE INTO desk_moloni_config (setting_key, setting_value, encrypted) VALUES
('module_version', '3.0.0', 0),
('installation_date', NOW(), 0),
('sync_enabled', '1', 0),
('queue_batch_size', '10', 0),
('max_retry_attempts', '3', 0),
('api_rate_limit', '60', 0);
EOF
log_success "Database schema file created"
fi
# Execute schema
log_info "Executing database schema..."
if $mysql_cmd < "$schema_file"; then
log_success "Database schema installed successfully"
else
log_error "Failed to install database schema"
exit 1
fi
# Verify tables were created
local tables=("desk_moloni_config" "desk_moloni_mapping" "desk_moloni_sync_queue" "desk_moloni_sync_log")
for table in "${tables[@]}"; do
if echo "SHOW TABLES LIKE '$table';" | $mysql_cmd | grep -q "$table"; then
log_success "Table created: $table"
else
log_error "Failed to create table: $table"
exit 1
fi
done
}
# Setup file permissions
setup_permissions() {
if [[ "$SKIP_PERMISSIONS" == true ]]; then
log_warning "Skipping permission setup"
return
fi
log_step "Setting Up File Permissions"
local directories=(
"$MODULE_DIR/logs"
"$MODULE_DIR/locks"
"$MODULE_DIR/cache"
"$MODULE_DIR/temp"
)
if [[ "$DRY_RUN" == true ]]; then
log_info "Would create directories and set permissions"
return
fi
# Create directories
for dir in "${directories[@]}"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
log_info "Created directory: $dir"
fi
chown -R "$WEB_USER:$WEB_USER" "$dir"
chmod 755 "$dir"
log_success "Permissions set for: $dir"
done
# Set permissions on CLI scripts
local cli_files=(
"$MODULE_DIR/cli/queue_processor.php"
"$MODULE_DIR/cli/sync_commands.php"
)
for file in "${cli_files[@]}"; do
if [[ -f "$file" ]]; then
chmod +x "$file"
log_success "Made executable: $file"
fi
done
# Set permissions on shell scripts
find "$MODULE_DIR/scripts" -name "*.sh" -type f -exec chmod +x {} \;
log_success "Made shell scripts executable"
}
# Initialize configuration
initialize_config() {
log_step "Initializing Configuration"
local config_dir="$MODULE_DIR/config"
local config_file="$config_dir/config.php"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would initialize configuration files"
return
fi
mkdir -p "$config_dir"
# Create main configuration file
cat > "$config_file" << EOF
<?php
/**
* Desk-Moloni v3.0 Configuration
* Auto-generated during installation on $(date)
*/
return [
'environment' => '$ENVIRONMENT',
'version' => '3.0.0',
'installation_date' => '$(date -u +%Y-%m-%dT%H:%M:%SZ)',
// Database configuration
'database' => [
'host' => '$DB_HOST',
'database' => '$DB_NAME',
'username' => '$DB_USER',
// Password stored securely in Perfex config
],
// Queue configuration
'queue' => [
'batch_size' => 10,
'max_attempts' => 3,
'retry_delay' => 300, // 5 minutes
'max_execution_time' => 300, // 5 minutes
],
// API configuration
'api' => [
'rate_limit' => 60, // requests per minute
'timeout' => 30, // seconds
'retry_attempts' => 3,
],
// Logging configuration
'logging' => [
'level' => '$ENVIRONMENT' === 'development' ? 'debug' : 'info',
'retention_days' => 30,
'max_file_size' => '10M',
],
// Security configuration
'security' => [
'encryption_method' => 'AES-256-GCM',
'token_refresh_threshold' => 300, // 5 minutes before expiry
],
];
EOF
chown "$WEB_USER:$WEB_USER" "$config_file"
chmod 644 "$config_file"
log_success "Configuration file created: $config_file"
# Create environment-specific config
local env_config="$config_dir/config.$ENVIRONMENT.php"
cat > "$env_config" << EOF
<?php
/**
* Desk-Moloni v3.0 Environment Configuration ($ENVIRONMENT)
*/
return [
'debug' => $([ "$ENVIRONMENT" == "development" ] && echo "true" || echo "false"),
'log_level' => '$([ "$ENVIRONMENT" == "development" ] && echo "debug" || echo "info")',
'api_timeout' => $([ "$ENVIRONMENT" == "development" ] && echo "60" || echo "30"),
];
EOF
chown "$WEB_USER:$WEB_USER" "$env_config"
chmod 644 "$env_config"
log_success "Environment configuration created: $env_config"
}
# Setup cron jobs
setup_cron_jobs() {
if [[ "$SKIP_CRON" == true ]]; then
log_warning "Skipping cron job setup"
return
fi
log_step "Setting Up Cron Jobs"
local cron_script="$MODULE_DIR/scripts/setup_cron.sh"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would setup cron jobs using: $cron_script"
return
fi
if [[ -f "$cron_script" ]]; then
chmod +x "$cron_script"
# Run cron setup script
if "$cron_script" --user "$WEB_USER"; then
log_success "Cron jobs installed successfully"
else
log_error "Failed to install cron jobs"
exit 1
fi
else
log_warning "Cron setup script not found: $cron_script"
fi
}
# Post-installation verification
verify_installation() {
log_step "Post-Installation Verification"
local errors=0
# Verify database tables
if [[ "$SKIP_DATABASE" == false ]]; then
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD $DB_NAME"
local required_tables=("desk_moloni_config" "desk_moloni_mapping" "desk_moloni_sync_queue" "desk_moloni_sync_log")
for table in "${required_tables[@]}"; do
if echo "SHOW TABLES LIKE '$table';" | $mysql_cmd | grep -q "$table"; then
log_success "✓ Table exists: $table"
else
log_error "✗ Table missing: $table"
((errors++))
fi
done
fi
# Verify file permissions
local required_dirs=("$MODULE_DIR/logs" "$MODULE_DIR/locks")
for dir in "${required_dirs[@]}"; do
if [[ -d "$dir" ]] && [[ -w "$dir" ]]; then
log_success "✓ Directory writable: $dir"
else
log_error "✗ Directory not writable: $dir"
((errors++))
fi
done
# Verify CLI commands
local cli_files=("$MODULE_DIR/cli/queue_processor.php" "$MODULE_DIR/cli/sync_commands.php")
for file in "${cli_files[@]}"; do
if [[ -f "$file" ]] && [[ -x "$file" ]]; then
log_success "✓ CLI command executable: $(basename "$file")"
else
log_error "✗ CLI command not executable: $(basename "$file")"
((errors++))
fi
done
# Test basic functionality
if [[ "$DRY_RUN" == false ]]; then
local health_cmd="php $MODULE_DIR/cli/sync_commands.php health"
if $health_cmd &>/dev/null; then
log_success "✓ Health check command works"
else
log_warning "⚠ Health check command failed (may be expected on first run)"
fi
fi
if [[ $errors -eq 0 ]]; then
log_success "All verification checks passed"
else
log_error "Verification failed with $errors errors"
exit 1
fi
}
# Uninstallation
uninstall_module() {
log_step "Uninstalling Desk-Moloni Module"
local confirm="no"
if [[ "$FORCE_INSTALL" == false ]]; then
echo -e "${RED}WARNING: This will permanently delete all Desk-Moloni data!${NC}"
echo -n "Type 'YES' to confirm uninstallation: "
read -r confirm
else
confirm="YES"
fi
if [[ "$confirm" != "YES" ]]; then
log_info "Uninstallation cancelled"
exit 0
fi
# Create backup before uninstall
BACKUP_BEFORE_INSTALL=true
create_backup
if [[ "$DRY_RUN" == true ]]; then
log_info "Would uninstall module and remove all data"
return
fi
# Remove cron jobs
local cron_script="$MODULE_DIR/scripts/setup_cron.sh"
if [[ -f "$cron_script" ]]; then
"$cron_script" --uninstall --user "$WEB_USER" || true
log_success "Cron jobs removed"
fi
# Drop database tables
if [[ "$SKIP_DATABASE" == false ]]; then
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD $DB_NAME"
local tables=("desk_moloni_sync_log" "desk_moloni_sync_queue" "desk_moloni_mapping" "desk_moloni_config")
for table in "${tables[@]}"; do
echo "DROP TABLE IF EXISTS $table;" | $mysql_cmd
log_success "Dropped table: $table"
done
fi
# Remove module files (but preserve backups)
local preserve_dirs=("backups")
for dir in "$MODULE_DIR"/*; do
if [[ -d "$dir" ]]; then
local dirname=$(basename "$dir")
if [[ ! " ${preserve_dirs[@]} " =~ " ${dirname} " ]]; then
rm -rf "$dir"
log_success "Removed directory: $dir"
fi
elif [[ -f "$dir" ]]; then
rm -f "$dir"
log_success "Removed file: $dir"
fi
done
log_success "Module uninstalled successfully"
log_info "Backup preserved at: $BACKUP_DIR"
}
# Restore from backup
restore_from_backup() {
log_step "Restoring from Backup"
if [[ ! -d "$RESTORE_BACKUP" ]]; then
log_error "Backup directory not found: $RESTORE_BACKUP"
exit 1
fi
local manifest_file="$RESTORE_BACKUP/manifest.txt"
if [[ ! -f "$manifest_file" ]]; then
log_error "Backup manifest not found: $manifest_file"
exit 1
fi
log_info "Backup details:"
cat "$manifest_file"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would restore from backup: $RESTORE_BACKUP"
return
fi
echo -n "Proceed with restore? [y/N]: "
read -r confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
log_info "Restore cancelled"
exit 0
fi
# Restore database tables
local mysql_cmd="mysql -h$DB_HOST -u$DB_USER -p$DB_PASSWORD $DB_NAME"
for sql_file in "$RESTORE_BACKUP"/*.sql; do
if [[ -f "$sql_file" ]]; then
local table_name=$(basename "$sql_file" .sql)
log_info "Restoring table: $table_name"
$mysql_cmd < "$sql_file"
log_success "Table restored: $table_name"
fi
done
# Restore configuration files
if [[ -d "$RESTORE_BACKUP/config" ]]; then
cp -r "$RESTORE_BACKUP/config/"* "$MODULE_DIR/config/" 2>/dev/null || true
log_success "Configuration files restored"
fi
log_success "Restore completed successfully"
}
# Main installation function
main_install() {
log_step "Starting Desk-Moloni v3.0 Installation"
# Installation steps
check_requirements
interactive_config
test_database_connection
create_backup
install_database
setup_permissions
initialize_config
setup_cron_jobs
verify_installation
log_step "Installation Completed Successfully"
echo -e "\n${GREEN}🎉 Desk-Moloni v3.0 has been installed successfully!${NC}\n"
echo "Next steps:"
echo "1. Configure OAuth credentials in the Perfex CRM admin panel"
echo "2. Test the API connection: php $MODULE_DIR/cli/sync_commands.php test-connection"
echo "3. Monitor the queue processor: tail -f $MODULE_DIR/logs/queue_processor.log"
echo "4. Check system health: php $MODULE_DIR/cli/sync_commands.php health"
echo ""
echo "Documentation: See $MODULE_DIR/README.md"
echo "Log files: $MODULE_DIR/logs/"
echo "Configuration: $MODULE_DIR/config/"
echo ""
if [[ "$ENVIRONMENT" == "development" ]]; then
echo -e "${YELLOW}Development environment detected${NC}"
echo "- Debug mode enabled"
echo "- Extended API timeouts"
echo "- Detailed logging"
echo ""
fi
}
# Main execution
main() {
# Handle special operations
if [[ "$UNINSTALL" == true ]]; then
uninstall_module
exit 0
fi
if [[ -n "$RESTORE_BACKUP" ]]; then
restore_from_backup
exit 0
fi
# Normal installation
main_install
}
# Error handling
trap 'log_error "Installation failed on line $LINENO. Check $INSTALL_LOG for details."' ERR
# Run main function
main "$@"

609
scripts/maintenance.sh Normal file
View File

@@ -0,0 +1,609 @@
#!/bin/bash
# Desk-Moloni v3.0 Maintenance Script
#
# Automated maintenance tasks including log cleanup, optimization,
# health checks, and system maintenance operations.
set -euo pipefail
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
CLI_DIR="$MODULE_DIR/cli"
LOG_DIR="$MODULE_DIR/logs"
LOCK_DIR="$MODULE_DIR/locks"
CACHE_DIR="$MODULE_DIR/cache"
TEMP_DIR="$MODULE_DIR/temp"
# Maintenance configuration
DEFAULT_LOG_RETENTION_DAYS=30
DEFAULT_QUEUE_CLEANUP_DAYS=7
DEFAULT_TEMP_CLEANUP_HOURS=24
DEFAULT_OPTIMIZATION_INTERVAL=7 # days
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Logging functions
log_info() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [INFO] $1"
echo -e "${BLUE}$message${NC}"
echo "$message" >> "$LOG_DIR/maintenance.log" 2>/dev/null || true
}
log_success() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [SUCCESS] $1"
echo -e "${GREEN}$message${NC}"
echo "$message" >> "$LOG_DIR/maintenance.log" 2>/dev/null || true
}
log_warning() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [WARNING] $1"
echo -e "${YELLOW}$message${NC}"
echo "$message" >> "$LOG_DIR/maintenance.log" 2>/dev/null || true
}
log_error() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [ERROR] $1"
echo -e "${RED}$message${NC}"
echo "$message" >> "$LOG_DIR/maintenance.log" 2>/dev/null || true
}
# Help function
show_help() {
cat << EOF
Desk-Moloni v3.0 Maintenance Script
Usage: $0 [OPTIONS] [TASKS]
Tasks:
all Run all maintenance tasks (default)
cleanup Clean up old logs and temporary files
optimize Optimize database tables and indexes
health-check Perform comprehensive health check
queue-maintenance Clean up and optimize queue
cache-cleanup Clear expired cache files
log-rotation Rotate and compress log files
backup-cleanup Clean up old backup files
token-refresh Refresh OAuth tokens if needed
stats-update Update performance statistics
Options:
-h, --help Show this help message
--log-retention DAYS Log retention period (default: $DEFAULT_LOG_RETENTION_DAYS)
--queue-cleanup DAYS Queue cleanup period (default: $DEFAULT_QUEUE_CLEANUP_DAYS)
--temp-cleanup HOURS Temp file cleanup period (default: $DEFAULT_TEMP_CLEANUP_HOURS)
--dry-run Show what would be done without changes
--verbose Verbose output
--force Force operations without prompts
Examples:
$0 # Run all maintenance tasks
$0 cleanup optimize # Run specific tasks
$0 --dry-run # Preview maintenance actions
$0 health-check --verbose # Detailed health check
EOF
}
# Parse command line arguments
TASKS=()
LOG_RETENTION_DAYS=$DEFAULT_LOG_RETENTION_DAYS
QUEUE_CLEANUP_DAYS=$DEFAULT_QUEUE_CLEANUP_DAYS
TEMP_CLEANUP_HOURS=$DEFAULT_TEMP_CLEANUP_HOURS
DRY_RUN=false
VERBOSE=false
FORCE=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
--log-retention)
LOG_RETENTION_DAYS="$2"
shift 2
;;
--queue-cleanup)
QUEUE_CLEANUP_DAYS="$2"
shift 2
;;
--temp-cleanup)
TEMP_CLEANUP_HOURS="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
--force)
FORCE=true
shift
;;
all|cleanup|optimize|health-check|queue-maintenance|cache-cleanup|log-rotation|backup-cleanup|token-refresh|stats-update)
TASKS+=("$1")
shift
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Default to all tasks if none specified
if [[ ${#TASKS[@]} -eq 0 ]]; then
TASKS=("all")
fi
# Create required directories
ensure_directories() {
local dirs=("$LOG_DIR" "$LOCK_DIR" "$CACHE_DIR" "$TEMP_DIR")
for dir in "${dirs[@]}"; do
if [[ ! -d "$dir" ]]; then
if [[ "$DRY_RUN" == true ]]; then
log_info "Would create directory: $dir"
else
mkdir -p "$dir"
log_info "Created directory: $dir"
fi
fi
done
}
# Log cleanup task
task_cleanup() {
log_info "Starting cleanup task"
local files_removed=0
local space_freed=0
# Clean up old log files
if [[ -d "$LOG_DIR" ]]; then
log_info "Cleaning up log files older than $LOG_RETENTION_DAYS days"
if [[ "$DRY_RUN" == true ]]; then
local old_logs
old_logs=$(find "$LOG_DIR" -name "*.log" -type f -mtime +$LOG_RETENTION_DAYS 2>/dev/null | wc -l)
log_info "Would remove $old_logs old log files"
else
while IFS= read -r -d '' file; do
local size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo 0)
rm "$file"
((files_removed++))
((space_freed += size))
[[ "$VERBOSE" == true ]] && log_info "Removed: $(basename "$file")"
done < <(find "$LOG_DIR" -name "*.log" -type f -mtime +$LOG_RETENTION_DAYS -print0 2>/dev/null)
fi
fi
# Clean up temporary files
if [[ -d "$TEMP_DIR" ]]; then
log_info "Cleaning up temporary files older than $TEMP_CLEANUP_HOURS hours"
if [[ "$DRY_RUN" == true ]]; then
local old_temps
old_temps=$(find "$TEMP_DIR" -type f -mmin +$((TEMP_CLEANUP_HOURS * 60)) 2>/dev/null | wc -l)
log_info "Would remove $old_temps temporary files"
else
while IFS= read -r -d '' file; do
local size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo 0)
rm "$file"
((files_removed++))
((space_freed += size))
[[ "$VERBOSE" == true ]] && log_info "Removed: $(basename "$file")"
done < <(find "$TEMP_DIR" -type f -mmin +$((TEMP_CLEANUP_HOURS * 60)) -print0 2>/dev/null)
fi
fi
# Clean up orphaned lock files
if [[ -d "$LOCK_DIR" ]]; then
log_info "Cleaning up orphaned lock files"
for lock_file in "$LOCK_DIR"/*.lock; do
if [[ -f "$lock_file" ]]; then
# Check if process is still running (basic check)
local lock_name=$(basename "$lock_file" .lock)
if ! pgrep -f "$lock_name" > /dev/null; then
if [[ "$DRY_RUN" == true ]]; then
log_info "Would remove orphaned lock: $(basename "$lock_file")"
else
rm "$lock_file"
log_info "Removed orphaned lock: $(basename "$lock_file")"
fi
fi
fi
done
fi
if [[ "$DRY_RUN" == false ]]; then
local space_mb=$((space_freed / 1024 / 1024))
log_success "Cleanup completed: $files_removed files removed, ${space_mb}MB freed"
fi
}
# Database optimization task
task_optimize() {
log_info "Starting database optimization task"
local tables=("desk_moloni_config" "desk_moloni_mapping" "desk_moloni_sync_queue" "desk_moloni_sync_log")
if [[ "$DRY_RUN" == true ]]; then
log_info "Would optimize ${#tables[@]} database tables"
return
fi
# Get database connection details from PHP config
local db_config
if ! db_config=$(php -r "
require_once '$MODULE_DIR/config/bootstrap.php';
\$config = include '$MODULE_DIR/config/config.php';
echo \$config['database']['host'] . '|' . \$config['database']['database'] . '|' . \$config['database']['username'];
" 2>/dev/null); then
log_error "Failed to get database configuration"
return 1
fi
IFS='|' read -r db_host db_name db_user <<< "$db_config"
# Read password securely (this is a simplified approach)
log_info "Enter database password for optimization:"
read -s db_password
local mysql_cmd="mysql -h$db_host -u$db_user -p$db_password $db_name"
for table in "${tables[@]}"; do
log_info "Optimizing table: $table"
# Check if table exists
if ! echo "SHOW TABLES LIKE '$table';" | $mysql_cmd 2>/dev/null | grep -q "$table"; then
log_warning "Table not found: $table"
continue
fi
# Optimize table
if echo "OPTIMIZE TABLE $table;" | $mysql_cmd &>/dev/null; then
log_success "Optimized: $table"
else
log_error "Failed to optimize: $table"
fi
# Analyze table for better query planning
if echo "ANALYZE TABLE $table;" | $mysql_cmd &>/dev/null; then
[[ "$VERBOSE" == true ]] && log_info "Analyzed: $table"
fi
done
log_success "Database optimization completed"
}
# Health check task
task_health_check() {
log_info "Starting comprehensive health check"
local issues=0
# Check CLI commands
local cli_commands=("queue_processor.php" "sync_commands.php")
for cmd in "${cli_commands[@]}"; do
local cmd_path="$CLI_DIR/$cmd"
if [[ -f "$cmd_path" && -x "$cmd_path" ]]; then
log_success "✓ CLI command available: $cmd"
else
log_error "✗ CLI command missing or not executable: $cmd"
((issues++))
fi
done
# Check directory permissions
local required_dirs=("$LOG_DIR" "$LOCK_DIR" "$CACHE_DIR" "$TEMP_DIR")
for dir in "${required_dirs[@]}"; do
if [[ -d "$dir" && -w "$dir" ]]; then
log_success "✓ Directory writable: $(basename "$dir")"
else
log_error "✗ Directory not writable: $(basename "$dir")"
((issues++))
fi
done
# Check disk space
local disk_usage
disk_usage=$(df "$MODULE_DIR" | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ "$disk_usage" -lt 90 ]]; then
log_success "✓ Disk usage OK: ${disk_usage}%"
else
log_warning "⚠ Disk usage high: ${disk_usage}%"
fi
# Check memory usage (if possible)
if command -v free &> /dev/null; then
local mem_usage
mem_usage=$(free | awk 'NR==2{printf "%.0f", $3*100/$2}')
if [[ "$mem_usage" -lt 80 ]]; then
log_success "✓ Memory usage OK: ${mem_usage}%"
else
log_warning "⚠ Memory usage high: ${mem_usage}%"
fi
fi
# Check recent errors in logs
if [[ -f "$LOG_DIR/queue_processor.log" ]]; then
local recent_errors
recent_errors=$(tail -n 100 "$LOG_DIR/queue_processor.log" | grep -c "ERROR" || true)
if [[ "$recent_errors" -eq 0 ]]; then
log_success "✓ No recent errors in queue processor"
else
log_warning "$recent_errors recent errors in queue processor"
fi
fi
# Test basic CLI functionality
if [[ "$DRY_RUN" == false ]]; then
if php "$CLI_DIR/sync_commands.php" test-connection &>/dev/null; then
log_success "✓ API connection test passed"
else
log_warning "⚠ API connection test failed (may be expected)"
fi
fi
if [[ "$issues" -eq 0 ]]; then
log_success "Health check completed - no critical issues found"
else
log_error "Health check found $issues critical issues"
return 1
fi
}
# Queue maintenance task
task_queue_maintenance() {
log_info "Starting queue maintenance task"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would clean up completed queue entries older than $QUEUE_CLEANUP_DAYS days"
log_info "Would reset stuck processing tasks"
return
fi
# Clean up old completed tasks
php "$CLI_DIR/sync_commands.php" queue-cleanup --days="$QUEUE_CLEANUP_DAYS" || true
# Reset stuck processing tasks (running for more than 1 hour)
php "$CLI_DIR/sync_commands.php" queue-reset-stuck || true
log_success "Queue maintenance completed"
}
# Cache cleanup task
task_cache_cleanup() {
log_info "Starting cache cleanup task"
if [[ ! -d "$CACHE_DIR" ]]; then
log_info "Cache directory not found, skipping"
return
fi
local cache_files=0
local cache_size=0
if [[ "$DRY_RUN" == true ]]; then
cache_files=$(find "$CACHE_DIR" -type f 2>/dev/null | wc -l)
log_info "Would clear $cache_files cache files"
else
# Clear all cache files
for cache_file in "$CACHE_DIR"/*; do
if [[ -f "$cache_file" ]]; then
local size=$(stat -f%z "$cache_file" 2>/dev/null || stat -c%s "$cache_file" 2>/dev/null || echo 0)
rm "$cache_file"
((cache_files++))
((cache_size += size))
fi
done
local size_mb=$((cache_size / 1024 / 1024))
log_success "Cache cleanup completed: $cache_files files, ${size_mb}MB cleared"
fi
}
# Log rotation task
task_log_rotation() {
log_info "Starting log rotation task"
if [[ ! -d "$LOG_DIR" ]]; then
return
fi
local rotated=0
# Rotate large log files
for log_file in "$LOG_DIR"/*.log; do
if [[ -f "$log_file" ]]; then
local size=$(stat -f%z "$log_file" 2>/dev/null || stat -c%s "$log_file" 2>/dev/null || echo 0)
local size_mb=$((size / 1024 / 1024))
# Rotate files larger than 10MB
if [[ "$size_mb" -gt 10 ]]; then
local base_name=$(basename "$log_file" .log)
local timestamp=$(date +%Y%m%d_%H%M%S)
local rotated_name="$LOG_DIR/${base_name}_${timestamp}.log"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would rotate large log file: $(basename "$log_file") (${size_mb}MB)"
else
cp "$log_file" "$rotated_name"
> "$log_file" # Truncate original file
# Compress rotated file
if command -v gzip &> /dev/null; then
gzip "$rotated_name"
log_info "Rotated and compressed: $(basename "$log_file")"
else
log_info "Rotated: $(basename "$log_file")"
fi
((rotated++))
fi
fi
fi
done
if [[ "$DRY_RUN" == false ]]; then
log_success "Log rotation completed: $rotated files rotated"
fi
}
# Backup cleanup task
task_backup_cleanup() {
log_info "Starting backup cleanup task"
local backup_dir="$MODULE_DIR/backups"
if [[ ! -d "$backup_dir" ]]; then
log_info "No backup directory found, skipping"
return
fi
local retention_days=90 # Keep backups for 90 days
local removed=0
if [[ "$DRY_RUN" == true ]]; then
local old_backups
old_backups=$(find "$backup_dir" -type d -mtime +$retention_days 2>/dev/null | wc -l)
log_info "Would remove $old_backups old backup directories"
else
while IFS= read -r -d '' backup; do
rm -rf "$backup"
((removed++))
[[ "$VERBOSE" == true ]] && log_info "Removed backup: $(basename "$backup")"
done < <(find "$backup_dir" -type d -mtime +$retention_days -print0 2>/dev/null)
log_success "Backup cleanup completed: $removed old backups removed"
fi
}
# Token refresh task
task_token_refresh() {
log_info "Starting token refresh task"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would check and refresh OAuth tokens if needed"
return
fi
# Run token refresh command
if php "$CLI_DIR/sync_commands.php" token-refresh &>/dev/null; then
log_success "Token refresh completed"
else
log_warning "Token refresh failed or not needed"
fi
}
# Statistics update task
task_stats_update() {
log_info "Starting statistics update task"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would update performance and usage statistics"
return
fi
# Update statistics
if php "$CLI_DIR/sync_commands.php" stats-update &>/dev/null; then
log_success "Statistics update completed"
else
log_warning "Statistics update failed"
fi
}
# Execute maintenance tasks
run_tasks() {
local start_time=$(date +%s)
log_info "Starting maintenance run with tasks: ${TASKS[*]}"
if [[ "$DRY_RUN" == true ]]; then
log_warning "DRY RUN MODE - No changes will be made"
fi
for task in "${TASKS[@]}"; do
case "$task" in
all)
task_cleanup
task_optimize
task_health_check
task_queue_maintenance
task_cache_cleanup
task_log_rotation
task_backup_cleanup
task_token_refresh
task_stats_update
;;
cleanup)
task_cleanup
;;
optimize)
task_optimize
;;
health-check)
task_health_check
;;
queue-maintenance)
task_queue_maintenance
;;
cache-cleanup)
task_cache_cleanup
;;
log-rotation)
task_log_rotation
;;
backup-cleanup)
task_backup_cleanup
;;
token-refresh)
task_token_refresh
;;
stats-update)
task_stats_update
;;
*)
log_error "Unknown task: $task"
;;
esac
done
local end_time=$(date +%s)
local duration=$((end_time - start_time))
log_success "Maintenance completed in ${duration} seconds"
}
# Main execution
main() {
# Ensure required directories exist
ensure_directories
# Run maintenance tasks
run_tasks
}
# Error handling
trap 'log_error "Maintenance script failed on line $LINENO"' ERR
# Execute if called directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi

View File

@@ -0,0 +1,627 @@
#!/bin/bash
# Desk-Moloni v3.0 Performance Analysis and Report Generator
# Author: Descomplicar.pt
# Version: 3.0.0
# License: Commercial
set -e
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
REPORT_FILE="/tmp/desk-moloni-performance-report-$(date +%Y%m%d-%H%M%S).html"
JSON_REPORT="/tmp/desk-moloni-performance-data-$(date +%Y%m%d-%H%M%S).json"
PERFEX_ROOT=""
# Performance thresholds
SYNC_TIME_THRESHOLD=30 # seconds
SUCCESS_RATE_THRESHOLD=99.5 # percentage
API_RESPONSE_THRESHOLD=5 # seconds
QUEUE_RATE_THRESHOLD=1000 # tasks per hour
MEMORY_THRESHOLD=80 # percentage
CPU_THRESHOLD=80 # percentage
# Functions
log() {
echo -e "${GREEN}[PERF]${NC} $1"
}
warning() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
error() {
echo -e "${RED}[ERROR]${NC} $1"
}
info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
# Find Perfex root directory
find_perfex_root() {
local current_dir="$MODULE_DIR"
while [[ "$current_dir" != "/" ]]; do
if [[ -f "$current_dir/application/config/app.php" ]]; then
PERFEX_ROOT="$current_dir"
return 0
fi
current_dir="$(dirname "$current_dir")"
done
return 1
}
# Performance banner
echo "========================================================================"
echo " DESK-MOLONI v3.0 PERFORMANCE REPORT"
echo "========================================================================"
echo "Report File: $REPORT_FILE"
echo "JSON Data: $JSON_REPORT"
echo "Analysis Date: $(date)"
echo ""
log "Starting comprehensive performance analysis..."
# Find Perfex installation
if ! find_perfex_root; then
error "Could not find Perfex CRM installation directory"
exit 1
fi
log "Perfex CRM root found: $PERFEX_ROOT"
# Initialize HTML report
cat > "$REPORT_FILE" << 'EOF'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Desk-Moloni Performance Report</title>
<style>
body { font-family: Arial, sans-serif; margin: 20px; background-color: #f5f5f5; }
.container { max-width: 1200px; margin: 0 auto; background: white; padding: 20px; border-radius: 8px; box-shadow: 0 2px 10px rgba(0,0,0,0.1); }
.header { text-align: center; border-bottom: 3px solid #007cba; padding-bottom: 20px; margin-bottom: 30px; }
.metric-grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); gap: 20px; margin: 20px 0; }
.metric-card { background: #f8f9fa; padding: 20px; border-radius: 8px; border-left: 4px solid #007cba; }
.metric-value { font-size: 2em; font-weight: bold; color: #007cba; }
.metric-label { color: #666; font-size: 0.9em; margin-top: 5px; }
.status-excellent { color: #28a745; }
.status-good { color: #17a2b8; }
.status-warning { color: #ffc107; }
.status-critical { color: #dc3545; }
.chart-container { margin: 20px 0; padding: 20px; background: #f8f9fa; border-radius: 8px; }
.table { width: 100%; border-collapse: collapse; margin: 20px 0; }
.table th, .table td { padding: 12px; text-align: left; border-bottom: 1px solid #ddd; }
.table th { background-color: #007cba; color: white; }
.recommendation { background: #e7f3ff; padding: 15px; margin: 10px 0; border-radius: 5px; border-left: 4px solid #007cba; }
.section { margin: 30px 0; }
.section h2 { color: #007cba; border-bottom: 2px solid #007cba; padding-bottom: 10px; }
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🚀 Desk-Moloni v3.0 Performance Report</h1>
<p>Generated on <strong>__REPORT_DATE__</strong></p>
</div>
EOF
# Initialize JSON report
cat > "$JSON_REPORT" << EOF
{
"report_meta": {
"version": "3.0.0",
"generated_at": "$(date -Iseconds)",
"module_path": "$MODULE_DIR",
"perfex_root": "$PERFEX_ROOT"
},
"metrics": {},
"recommendations": [],
"status": "unknown"
}
EOF
# 1. System Information
echo ""
log "=== COLLECTING SYSTEM INFORMATION ==="
SYSTEM_INFO=$(cat << EOF
{
"php_version": "$(php -r 'echo PHP_VERSION;')",
"memory_limit": "$(php -r 'echo ini_get("memory_limit");')",
"max_execution_time": "$(php -r 'echo ini_get("max_execution_time");')",
"server_software": "${SERVER_SOFTWARE:-Unknown}",
"operating_system": "$(uname -s -r)",
"cpu_cores": "$(nproc 2>/dev/null || echo 'Unknown')",
"total_memory": "$(free -h | awk '/^Mem:/ {print $2}' 2>/dev/null || echo 'Unknown')"
}
EOF
)
info "System Information collected"
# 2. Database Performance Analysis
echo ""
log "=== ANALYZING DATABASE PERFORMANCE ==="
DB_METRICS=""
if command -v mysql > /dev/null 2>&1; then
# Try to connect to database and get metrics
DB_METRICS=$(cat << 'EOF'
{
"connection_test": "attempting",
"table_sizes": {},
"query_performance": {},
"index_usage": {}
}
EOF
)
# Check if we can determine database connection details
if [[ -f "$PERFEX_ROOT/application/config/database.php" ]]; then
info "Database configuration found"
# Get table sizes (basic estimation)
TABLE_COUNT=$(find "$MODULE_DIR/database" -name "*.sql" | wc -l)
DB_METRICS=$(echo "$DB_METRICS" | jq ".table_count = $TABLE_COUNT")
else
warning "Database configuration not accessible"
fi
else
warning "MySQL client not available for database analysis"
DB_METRICS='{"status": "unavailable", "reason": "mysql client not found"}'
fi
info "Database metrics collected"
# 3. File System Performance
echo ""
log "=== ANALYZING FILE SYSTEM PERFORMANCE ==="
# Calculate directory sizes
MODULE_SIZE=$(du -sb "$MODULE_DIR" 2>/dev/null | cut -f1 || echo "0")
UPLOADS_SIZE=0
if [[ -d "$PERFEX_ROOT/uploads/desk_moloni" ]]; then
UPLOADS_SIZE=$(du -sb "$PERFEX_ROOT/uploads/desk_moloni" 2>/dev/null | cut -f1 || echo "0")
fi
# Count files
TOTAL_FILES=$(find "$MODULE_DIR" -type f | wc -l)
PHP_FILES=$(find "$MODULE_DIR" -name "*.php" | wc -l)
JS_FILES=$(find "$MODULE_DIR" -name "*.js" | wc -l)
CSS_FILES=$(find "$MODULE_DIR" -name "*.css" | wc -l)
FILESYSTEM_METRICS=$(cat << EOF
{
"module_size_bytes": $MODULE_SIZE,
"uploads_size_bytes": $UPLOADS_SIZE,
"total_files": $TOTAL_FILES,
"php_files": $PHP_FILES,
"js_files": $JS_FILES,
"css_files": $CSS_FILES,
"module_size_mb": $(echo "scale=2; $MODULE_SIZE/1024/1024" | bc -l 2>/dev/null || echo "0")
}
EOF
)
info "File system metrics collected"
# 4. Code Quality Metrics
echo ""
log "=== ANALYZING CODE QUALITY ==="
# Lines of code analysis
TOTAL_LOC=0
PHP_LOC=0
JS_LOC=0
if [[ $PHP_FILES -gt 0 ]]; then
PHP_LOC=$(find "$MODULE_DIR" -name "*.php" -exec wc -l {} + 2>/dev/null | tail -1 | awk '{print $1}' || echo "0")
fi
if [[ $JS_FILES -gt 0 ]]; then
JS_LOC=$(find "$MODULE_DIR" -name "*.js" -exec wc -l {} + 2>/dev/null | tail -1 | awk '{print $1}' || echo "0")
fi
TOTAL_LOC=$((PHP_LOC + JS_LOC))
# Test coverage estimation
TEST_FILES=$(find "$MODULE_DIR" -name "*Test.php" | wc -l)
TEST_LOC=0
if [[ $TEST_FILES -gt 0 ]]; then
TEST_LOC=$(find "$MODULE_DIR" -name "*Test.php" -exec wc -l {} + 2>/dev/null | tail -1 | awk '{print $1}' || echo "0")
fi
CODE_METRICS=$(cat << EOF
{
"total_lines_of_code": $TOTAL_LOC,
"php_lines_of_code": $PHP_LOC,
"js_lines_of_code": $JS_LOC,
"test_files": $TEST_FILES,
"test_lines_of_code": $TEST_LOC,
"test_coverage_estimate": $(echo "scale=2; $TEST_LOC*100/$PHP_LOC" | bc -l 2>/dev/null || echo "0")
}
EOF
)
info "Code quality metrics collected"
# 5. Performance Simulation
echo ""
log "=== RUNNING PERFORMANCE SIMULATION ==="
# Simulate basic performance tests
START_TIME=$(date +%s.%N)
# Test file loading performance
if [[ -f "$MODULE_DIR/desk_moloni.php" ]]; then
php -l "$MODULE_DIR/desk_moloni.php" > /dev/null 2>&1
fi
# Test autoloader performance
if [[ -f "$MODULE_DIR/vendor/autoload.php" ]]; then
php -r "require_once '$MODULE_DIR/vendor/autoload.php';" > /dev/null 2>&1
fi
END_TIME=$(date +%s.%N)
LOAD_TIME=$(echo "$END_TIME - $START_TIME" | bc -l 2>/dev/null || echo "0")
# Memory usage estimation
MEMORY_USAGE=$(php -r "
\$start = memory_get_usage();
if (file_exists('$MODULE_DIR/vendor/autoload.php')) {
require_once '$MODULE_DIR/vendor/autoload.php';
}
echo memory_get_usage() - \$start;
" 2>/dev/null || echo "0")
PERFORMANCE_METRICS=$(cat << EOF
{
"module_load_time": "$LOAD_TIME",
"estimated_memory_usage": $MEMORY_USAGE,
"memory_usage_mb": $(echo "scale=2; $MEMORY_USAGE/1024/1024" | bc -l 2>/dev/null || echo "0")
}
EOF
)
info "Performance simulation completed"
# 6. Security Performance
echo ""
log "=== ANALYZING SECURITY PERFORMANCE ==="
# Check for security-related files
SECURITY_FILES=0
[[ -f "$MODULE_DIR/libraries/Encryption.php" ]] && ((SECURITY_FILES++))
[[ -f "$MODULE_DIR/config/security.php" ]] && ((SECURITY_FILES++))
# Check for security patterns in code
ENCRYPTION_USAGE=$(grep -r "encrypt\|decrypt" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
OAUTH_USAGE=$(grep -r "oauth\|OAuth" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
SECURITY_METRICS=$(cat << EOF
{
"security_files": $SECURITY_FILES,
"encryption_usage_count": $ENCRYPTION_USAGE,
"oauth_implementation_count": $OAUTH_USAGE,
"security_score": $(echo "scale=2; ($SECURITY_FILES + $ENCRYPTION_USAGE + $OAUTH_USAGE) * 10" | bc -l 2>/dev/null || echo "0")
}
EOF
)
info "Security performance metrics collected"
# 7. Generate Performance Score
echo ""
log "=== CALCULATING PERFORMANCE SCORE ==="
# Calculate overall performance score
PERFORMANCE_SCORE=0
# File organization score (0-25 points)
if [[ $TOTAL_FILES -lt 100 && $MODULE_SIZE -lt 10000000 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 25))
elif [[ $TOTAL_FILES -lt 200 && $MODULE_SIZE -lt 50000000 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 20))
else
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 10))
fi
# Code quality score (0-25 points)
if [[ $TEST_FILES -gt 10 && $PHP_FILES -gt 0 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 25))
elif [[ $TEST_FILES -gt 5 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 15))
else
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 5))
fi
# Security implementation score (0-25 points)
if [[ $SECURITY_FILES -gt 1 && $ENCRYPTION_USAGE -gt 5 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 25))
elif [[ $SECURITY_FILES -gt 0 || $ENCRYPTION_USAGE -gt 0 ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 15))
else
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 5))
fi
# Architecture score (0-25 points)
if [[ -f "$MODULE_DIR/composer.json" && -d "$MODULE_DIR/vendor" ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 25))
elif [[ -f "$MODULE_DIR/composer.json" ]]; then
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 15))
else
PERFORMANCE_SCORE=$((PERFORMANCE_SCORE + 10))
fi
# Determine performance grade
if [[ $PERFORMANCE_SCORE -ge 90 ]]; then
PERFORMANCE_GRADE="A+"
GRADE_COLOR="status-excellent"
elif [[ $PERFORMANCE_SCORE -ge 80 ]]; then
PERFORMANCE_GRADE="A"
GRADE_COLOR="status-excellent"
elif [[ $PERFORMANCE_SCORE -ge 70 ]]; then
PERFORMANCE_GRADE="B"
GRADE_COLOR="status-good"
elif [[ $PERFORMANCE_SCORE -ge 60 ]]; then
PERFORMANCE_GRADE="C"
GRADE_COLOR="status-warning"
else
PERFORMANCE_GRADE="D"
GRADE_COLOR="status-critical"
fi
info "Performance score calculated: $PERFORMANCE_SCORE/100 ($PERFORMANCE_GRADE)"
# 8. Compile final JSON report
echo ""
log "=== COMPILING FINAL REPORT ==="
FINAL_JSON=$(cat << EOF
{
"report_meta": {
"version": "3.0.0",
"generated_at": "$(date -Iseconds)",
"module_path": "$MODULE_DIR",
"perfex_root": "$PERFEX_ROOT",
"performance_score": $PERFORMANCE_SCORE,
"performance_grade": "$PERFORMANCE_GRADE"
},
"system_info": $SYSTEM_INFO,
"database_metrics": $DB_METRICS,
"filesystem_metrics": $FILESYSTEM_METRICS,
"code_metrics": $CODE_METRICS,
"performance_metrics": $PERFORMANCE_METRICS,
"security_metrics": $SECURITY_METRICS,
"recommendations": [
{
"category": "optimization",
"priority": "medium",
"description": "Consider implementing Redis caching for improved performance"
},
{
"category": "monitoring",
"priority": "high",
"description": "Set up performance monitoring for production environment"
},
{
"category": "testing",
"priority": "medium",
"description": "Increase test coverage for better code quality assurance"
}
]
}
EOF
)
echo "$FINAL_JSON" > "$JSON_REPORT"
# 9. Generate HTML report
log "=== GENERATING HTML REPORT ==="
# Update HTML report with actual data
sed -i "s/__REPORT_DATE__/$(date)/" "$REPORT_FILE"
cat >> "$REPORT_FILE" << EOF
<div class="section">
<h2>📊 Performance Overview</h2>
<div class="metric-grid">
<div class="metric-card">
<div class="metric-value $GRADE_COLOR">$PERFORMANCE_GRADE</div>
<div class="metric-label">Performance Grade</div>
</div>
<div class="metric-card">
<div class="metric-value">$PERFORMANCE_SCORE/100</div>
<div class="metric-label">Overall Score</div>
</div>
<div class="metric-card">
<div class="metric-value">$(echo "scale=1; $MODULE_SIZE/1024/1024" | bc -l 2>/dev/null || echo "0") MB</div>
<div class="metric-label">Module Size</div>
</div>
<div class="metric-card">
<div class="metric-value">$TOTAL_FILES</div>
<div class="metric-label">Total Files</div>
</div>
</div>
</div>
<div class="section">
<h2>💾 System Information</h2>
<table class="table">
<tr><th>Metric</th><th>Value</th><th>Status</th></tr>
<tr><td>PHP Version</td><td>$(php -r 'echo PHP_VERSION;')</td><td class="status-excellent">✓</td></tr>
<tr><td>Memory Limit</td><td>$(php -r 'echo ini_get("memory_limit");')</td><td class="status-good">Good</td></tr>
<tr><td>Max Execution Time</td><td>$(php -r 'echo ini_get("max_execution_time");')s</td><td class="status-good">Good</td></tr>
<tr><td>Operating System</td><td>$(uname -s -r)</td><td class="status-excellent">✓</td></tr>
</table>
</div>
<div class="section">
<h2>📁 File System Analysis</h2>
<div class="metric-grid">
<div class="metric-card">
<div class="metric-value">$PHP_FILES</div>
<div class="metric-label">PHP Files</div>
</div>
<div class="metric-card">
<div class="metric-value">$TEST_FILES</div>
<div class="metric-label">Test Files</div>
</div>
<div class="metric-card">
<div class="metric-value">$(echo "scale=0; $PHP_LOC" | bc -l 2>/dev/null || echo "0")</div>
<div class="metric-label">Lines of PHP Code</div>
</div>
<div class="metric-card">
<div class="metric-value">$(echo "scale=1; $TEST_LOC*100/$PHP_LOC" | bc -l 2>/dev/null || echo "0")%</div>
<div class="metric-label">Test Coverage Est.</div>
</div>
</div>
</div>
<div class="section">
<h2>🔒 Security Performance</h2>
<table class="table">
<tr><th>Security Feature</th><th>Implementation</th><th>Status</th></tr>
<tr><td>Encryption Library</td><td>$([[ -f "$MODULE_DIR/libraries/Encryption.php" ]] && echo "Implemented" || echo "Not Found")</td><td class="$([[ -f "$MODULE_DIR/libraries/Encryption.php" ]] && echo "status-excellent" || echo "status-warning")">$([[ -f "$MODULE_DIR/libraries/Encryption.php" ]] && echo "✓" || echo "⚠")</td></tr>
<tr><td>OAuth Implementation</td><td>$OAUTH_USAGE references found</td><td class="status-good">Good</td></tr>
<tr><td>Input Validation</td><td>$(grep -r "filter_var\|htmlspecialchars" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l) patterns found</td><td class="status-good">Good</td></tr>
</table>
</div>
<div class="section">
<h2>🚀 Performance Recommendations</h2>
<div class="recommendation">
<h4>💡 Immediate Improvements</h4>
<ul>
<li><strong>Enable OpCache:</strong> Configure PHP OpCache for better performance</li>
<li><strong>Database Optimization:</strong> Add proper indexes for query optimization</li>
<li><strong>Caching Strategy:</strong> Implement Redis caching for API responses</li>
</ul>
</div>
<div class="recommendation">
<h4>📈 Long-term Optimizations</h4>
<ul>
<li><strong>Load Testing:</strong> Perform comprehensive load testing</li>
<li><strong>Monitoring:</strong> Set up performance monitoring and alerting</li>
<li><strong>Code Optimization:</strong> Profile and optimize critical code paths</li>
</ul>
</div>
<div class="recommendation">
<h4>🔧 Development Best Practices</h4>
<ul>
<li><strong>Test Coverage:</strong> Increase unit test coverage to >80%</li>
<li><strong>Code Quality:</strong> Implement static analysis tools</li>
<li><strong>Documentation:</strong> Maintain comprehensive API documentation</li>
</ul>
</div>
</div>
<div class="section">
<h2>📋 Performance Checklist</h2>
<table class="table">
<tr><th>Performance Factor</th><th>Current Status</th><th>Target</th><th>Action Required</th></tr>
<tr><td>Module Load Time</td><td>${LOAD_TIME}s</td><td>&lt;0.5s</td><td>$([[ $(echo "$LOAD_TIME < 0.5" | bc -l 2>/dev/null) == "1" ]] && echo "✓ Met" || echo "⚠ Optimize")</td></tr>
<tr><td>Memory Usage</td><td>$(echo "scale=1; $MEMORY_USAGE/1024/1024" | bc -l 2>/dev/null || echo "0") MB</td><td>&lt;64 MB</td><td>$([[ $(echo "$MEMORY_USAGE < 67108864" | bc -l 2>/dev/null) == "1" ]] && echo "✓ Met" || echo "⚠ Optimize")</td></tr>
<tr><td>File Count</td><td>$TOTAL_FILES files</td><td>&lt;200 files</td><td>$([[ $TOTAL_FILES -lt 200 ]] && echo "✓ Met" || echo "⚠ Reduce")</td></tr>
<tr><td>Test Coverage</td><td>$(echo "scale=1; $TEST_LOC*100/$PHP_LOC" | bc -l 2>/dev/null || echo "0")%</td><td>&gt;80%</td><td>$([[ $(echo "$TEST_LOC*100/$PHP_LOC > 80" | bc -l 2>/dev/null) == "1" ]] && echo "✓ Met" || echo "⚠ Increase")</td></tr>
</table>
</div>
<div class="section">
<h2>📊 Benchmark Results</h2>
<div class="chart-container">
<h4>Performance Metrics Summary</h4>
<table class="table">
<tr><th>Metric Category</th><th>Score</th><th>Weight</th><th>Contribution</th></tr>
<tr><td>File Organization</td><td>$([[ $TOTAL_FILES -lt 100 ]] && echo "25/25" || echo "20/25")</td><td>25%</td><td>$([[ $TOTAL_FILES -lt 100 ]] && echo "Excellent" || echo "Good")</td></tr>
<tr><td>Code Quality</td><td>$([[ $TEST_FILES -gt 10 ]] && echo "25/25" || echo "15/25")</td><td>25%</td><td>$([[ $TEST_FILES -gt 10 ]] && echo "Excellent" || echo "Good")</td></tr>
<tr><td>Security Implementation</td><td>$([[ $SECURITY_FILES -gt 1 ]] && echo "25/25" || echo "15/25")</td><td>25%</td><td>$([[ $SECURITY_FILES -gt 1 ]] && echo "Excellent" || echo "Good")</td></tr>
<tr><td>Architecture</td><td>$([[ -f "$MODULE_DIR/composer.json" ]] && echo "25/25" || echo "10/25")</td><td>25%</td><td>$([[ -f "$MODULE_DIR/composer.json" ]] && echo "Excellent" || echo "Fair")</td></tr>
</table>
</div>
</div>
<div class="section">
<h2>🎯 Next Steps</h2>
<ol>
<li><strong>Review Performance Score:</strong> Current score is $PERFORMANCE_SCORE/100 ($PERFORMANCE_GRADE)</li>
<li><strong>Implement Recommendations:</strong> Focus on high-priority optimizations</li>
<li><strong>Setup Monitoring:</strong> Implement performance monitoring in production</li>
<li><strong>Schedule Regular Audits:</strong> Run performance analysis monthly</li>
<li><strong>Load Testing:</strong> Perform comprehensive load testing before production</li>
</ol>
</div>
<div style="text-align: center; margin-top: 40px; padding-top: 20px; border-top: 1px solid #ddd; color: #666;">
<p>Generated by Desk-Moloni v3.0 Performance Analyzer</p>
<p>© 2025 Descomplicar®. All rights reserved.</p>
</div>
</div>
</body>
</html>
EOF
# 10. Display summary
echo ""
echo "========================================================================"
echo " PERFORMANCE ANALYSIS COMPLETE"
echo "========================================================================"
echo ""
printf "Performance Grade: %s\n" "$PERFORMANCE_GRADE"
printf "Overall Score: %d/100\n" "$PERFORMANCE_SCORE"
printf "Module Size: %.1f MB\n" "$(echo "scale=1; $MODULE_SIZE/1024/1024" | bc -l 2>/dev/null || echo "0")"
printf "Total Files: %d\n" "$TOTAL_FILES"
printf "Lines of Code: %d\n" "$TOTAL_LOC"
printf "Test Files: %d\n" "$TEST_FILES"
echo ""
echo "Reports Generated:"
echo " 📊 HTML Report: $REPORT_FILE"
echo " 📋 JSON Data: $JSON_REPORT"
echo ""
# Performance recommendations
echo "🚀 KEY RECOMMENDATIONS:"
if [[ $PERFORMANCE_SCORE -lt 70 ]]; then
echo " ⚠️ Performance needs significant improvement"
echo " 🔧 Focus on code optimization and testing"
elif [[ $PERFORMANCE_SCORE -lt 85 ]]; then
echo " ✅ Good performance with room for improvement"
echo " 📈 Implement caching and monitoring"
else
echo " 🎉 Excellent performance! Maintain current standards"
echo " 🔍 Focus on monitoring and continuous optimization"
fi
echo ""
echo "========================================================================"
# Open report in browser if available
if command -v xdg-open > /dev/null 2>&1; then
log "Opening performance report in browser..."
xdg-open "$REPORT_FILE" 2>/dev/null &
elif command -v open > /dev/null 2>&1; then
log "Opening performance report in browser..."
open "$REPORT_FILE" 2>/dev/null &
fi
# Exit with appropriate code
if [[ $PERFORMANCE_SCORE -lt 60 ]]; then
exit 1
else
exit 0
fi

View File

@@ -0,0 +1,597 @@
#!/bin/bash
# Desk-Moloni v3.0 Production Readiness Validator
# Author: Descomplicar.pt
# Version: 3.0.0
# License: Commercial
set -e
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
REPORT_FILE="/tmp/desk-moloni-production-readiness-$(date +%Y%m%d-%H%M%S).txt"
CRITICAL_FAILURES=0
HIGH_FAILURES=0
MEDIUM_FAILURES=0
LOW_FAILURES=0
# Functions
log() {
echo -e "${GREEN}[VALIDATE]${NC} $1" | tee -a "$REPORT_FILE"
}
critical() {
echo -e "${RED}[CRITICAL]${NC} $1" | tee -a "$REPORT_FILE"
((CRITICAL_FAILURES++))
}
high() {
echo -e "${RED}[HIGH]${NC} $1" | tee -a "$REPORT_FILE"
((HIGH_FAILURES++))
}
medium() {
echo -e "${YELLOW}[MEDIUM]${NC} $1" | tee -a "$REPORT_FILE"
((MEDIUM_FAILURES++))
}
low() {
echo -e "${BLUE}[LOW]${NC} $1" | tee -a "$REPORT_FILE"
((LOW_FAILURES++))
}
pass() {
echo -e "${GREEN}[PASS]${NC} $1" | tee -a "$REPORT_FILE"
}
# Production readiness banner
echo "========================================================================"
echo " DESK-MOLONI v3.0 PRODUCTION READINESS VALIDATOR"
echo "========================================================================"
echo "Validation Report: $REPORT_FILE"
echo "Validation Date: $(date)"
echo ""
log "Starting comprehensive production readiness validation..."
# 1. Module Structure Validation
echo ""
log "=== MODULE STRUCTURE VALIDATION ==="
# Check core files exist
CORE_FILES=(
"desk_moloni.php"
"composer.json"
"phpunit.xml"
"VERSION"
"README.md"
)
for file in "${CORE_FILES[@]}"; do
if [[ -f "$MODULE_DIR/$file" ]]; then
pass "Core file exists: $file"
else
critical "Missing core file: $file"
fi
done
# Check directory structure
CORE_DIRECTORIES=(
"assets"
"cli"
"config"
"controllers"
"database"
"docs"
"helpers"
"language"
"libraries"
"models"
"scripts"
"src"
"tests"
"views"
)
for dir in "${CORE_DIRECTORIES[@]}"; do
if [[ -d "$MODULE_DIR/$dir" ]]; then
pass "Core directory exists: $dir"
else
high "Missing core directory: $dir"
fi
done
# Check specific implementation files
IMPLEMENTATION_FILES=(
"libraries/Encryption.php"
"database/migrations/001_create_desk_moloni_tables.sql"
"config/config.php"
"cli/queue_processor.php"
"scripts/install.sh"
"scripts/security_audit.sh"
"scripts/performance_report.sh"
)
for file in "${IMPLEMENTATION_FILES[@]}"; do
if [[ -f "$MODULE_DIR/$file" ]]; then
pass "Implementation file exists: $file"
else
high "Missing implementation file: $file"
fi
done
# 2. Test Infrastructure Validation
echo ""
log "=== TEST INFRASTRUCTURE VALIDATION ==="
# Count test files
TEST_FILE_COUNT=$(find "$MODULE_DIR/tests" -name "*Test.php" 2>/dev/null | wc -l)
if [[ $TEST_FILE_COUNT -ge 20 ]]; then
pass "Test suite comprehensive: $TEST_FILE_COUNT test files"
elif [[ $TEST_FILE_COUNT -ge 10 ]]; then
medium "Test suite adequate: $TEST_FILE_COUNT test files"
else
high "Test suite insufficient: $TEST_FILE_COUNT test files (minimum 20 required)"
fi
# Check test categories
TEST_CATEGORIES=(
"tests/contract"
"tests/integration"
"tests/security"
"tests/performance"
"tests/unit"
"tests/database"
)
for category in "${TEST_CATEGORIES[@]}"; do
if [[ -d "$MODULE_DIR/$category" ]]; then
TEST_COUNT=$(find "$MODULE_DIR/$category" -name "*Test.php" | wc -l)
if [[ $TEST_COUNT -gt 0 ]]; then
pass "Test category implemented: $category ($TEST_COUNT tests)"
else
medium "Test category empty: $category"
fi
else
high "Missing test category: $category"
fi
done
# Validate PHPUnit configuration
if [[ -f "$MODULE_DIR/phpunit.xml" ]]; then
if grep -q "testsuites" "$MODULE_DIR/phpunit.xml"; then
pass "PHPUnit configuration includes test suites"
else
medium "PHPUnit configuration missing test suites"
fi
if grep -q "coverage" "$MODULE_DIR/phpunit.xml"; then
pass "PHPUnit configuration includes coverage reporting"
else
low "PHPUnit configuration missing coverage reporting"
fi
else
critical "PHPUnit configuration file missing"
fi
# 3. Security Implementation Validation
echo ""
log "=== SECURITY IMPLEMENTATION VALIDATION ==="
# Check encryption implementation
if [[ -f "$MODULE_DIR/libraries/Encryption.php" ]]; then
if grep -q "AES-256-GCM" "$MODULE_DIR/libraries/Encryption.php"; then
pass "Strong encryption algorithm implemented (AES-256-GCM)"
else
critical "Weak or missing encryption algorithm"
fi
if grep -q "random_bytes" "$MODULE_DIR/libraries/Encryption.php"; then
pass "Cryptographically secure random number generation"
else
high "Weak random number generation detected"
fi
else
critical "Encryption library missing"
fi
# Check OAuth implementation
OAUTH_FILES=$(find "$MODULE_DIR" -name "*.php" -exec grep -l "oauth\|OAuth" {} \; 2>/dev/null | wc -l)
if [[ $OAUTH_FILES -gt 0 ]]; then
pass "OAuth implementation found in $OAUTH_FILES files"
# Check for PKCE implementation
PKCE_IMPL=$(grep -r "code_challenge\|code_verifier" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $PKCE_IMPL -gt 0 ]]; then
pass "PKCE (Proof Key for Code Exchange) implemented"
else
medium "PKCE not implemented - consider for enhanced security"
fi
else
critical "OAuth implementation not found"
fi
# Check input validation
VALIDATION_PATTERNS=$(grep -r "filter_var\|htmlspecialchars\|strip_tags\|mysqli_real_escape_string" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $VALIDATION_PATTERNS -gt 10 ]]; then
pass "Comprehensive input validation implemented"
elif [[ $VALIDATION_PATTERNS -gt 5 ]]; then
medium "Basic input validation implemented"
else
high "Insufficient input validation"
fi
# Check for hardcoded secrets
HARDCODED_SECRETS=$(grep -r -i -E "(password|secret|key|token).*=.*['\"][^'\"]*['\"]" "$MODULE_DIR" --include="*.php" | grep -v "// " | grep -v "/\*" | wc -l)
if [[ $HARDCODED_SECRETS -gt 0 ]]; then
critical "Potential hardcoded secrets found: $HARDCODED_SECRETS instances"
else
pass "No hardcoded secrets detected"
fi
# 4. Performance and Scalability Validation
echo ""
log "=== PERFORMANCE AND SCALABILITY VALIDATION ==="
# Check for caching implementation
CACHING_IMPL=$(grep -r "cache\|redis" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $CACHING_IMPL -gt 5 ]]; then
pass "Caching strategy implemented"
elif [[ $CACHING_IMPL -gt 0 ]]; then
medium "Basic caching implemented"
else
high "No caching strategy detected"
fi
# Check queue implementation
if [[ -f "$MODULE_DIR/cli/queue_processor.php" ]]; then
pass "Queue processing system implemented"
# Check for queue management features
if grep -q "priority\|retry\|failed" "$MODULE_DIR/cli/queue_processor.php"; then
pass "Advanced queue features implemented"
else
medium "Basic queue implementation only"
fi
else
critical "Queue processing system missing"
fi
# Check database optimization
DB_MIGRATIONS=$(find "$MODULE_DIR/database/migrations" -name "*.sql" 2>/dev/null | wc -l)
if [[ $DB_MIGRATIONS -gt 0 ]]; then
pass "Database migration system implemented"
# Check for indexes in migrations
INDEX_COUNT=$(grep -i "INDEX\|KEY" "$MODULE_DIR/database/migrations/"*.sql 2>/dev/null | wc -l)
if [[ $INDEX_COUNT -gt 5 ]]; then
pass "Database indexes implemented for performance"
else
medium "Limited database optimization detected"
fi
else
critical "Database migration system missing"
fi
# 5. Code Quality Validation
echo ""
log "=== CODE QUALITY VALIDATION ==="
# Check for Composer dependencies
if [[ -f "$MODULE_DIR/composer.json" ]]; then
pass "Composer dependency management implemented"
# Check for development vs production dependencies
if grep -q "require-dev" "$MODULE_DIR/composer.json"; then
pass "Development dependencies separated"
else
low "No development dependencies separation"
fi
# Check for autoloading
if grep -q "autoload" "$MODULE_DIR/composer.json"; then
pass "Autoloading configuration present"
else
medium "Missing autoloading configuration"
fi
else
high "Composer dependency management missing"
fi
# Check code organization
PHP_FILE_COUNT=$(find "$MODULE_DIR" -name "*.php" | wc -l)
if [[ $PHP_FILE_COUNT -gt 20 ]]; then
pass "Comprehensive PHP implementation: $PHP_FILE_COUNT files"
elif [[ $PHP_FILE_COUNT -gt 10 ]]; then
medium "Adequate PHP implementation: $PHP_FILE_COUNT files"
else
high "Limited PHP implementation: $PHP_FILE_COUNT files"
fi
# Check for namespacing
NAMESPACE_USAGE=$(grep -r "namespace\|use " "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $NAMESPACE_USAGE -gt 10 ]]; then
pass "Proper namespacing implemented"
elif [[ $NAMESPACE_USAGE -gt 0 ]]; then
medium "Basic namespacing implemented"
else
low "No namespacing detected"
fi
# 6. Documentation Validation
echo ""
log "=== DOCUMENTATION VALIDATION ==="
# Check for essential documentation
DOCUMENTATION_FILES=(
"README.md"
"docs/ADMINISTRATOR_GUIDE.md"
"docs/CLIENT_USER_GUIDE.md"
"docs/TROUBLESHOOTING_MANUAL.md"
"docs/MAINTENANCE_PROCEDURES.md"
)
for doc in "${DOCUMENTATION_FILES[@]}"; do
if [[ -f "$MODULE_DIR/$doc" ]]; then
FILE_SIZE=$(stat -c%s "$MODULE_DIR/$doc" 2>/dev/null || echo 0)
if [[ $FILE_SIZE -gt 1000 ]]; then
pass "Documentation complete: $doc ($(($FILE_SIZE / 1024))KB)"
else
medium "Documentation minimal: $doc"
fi
else
high "Missing documentation: $doc"
fi
done
# Check for code documentation
PHPDOC_COUNT=$(grep -r "@param\|@return\|@throws" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $PHPDOC_COUNT -gt 50 ]]; then
pass "Comprehensive code documentation"
elif [[ $PHPDOC_COUNT -gt 20 ]]; then
medium "Basic code documentation"
else
low "Limited code documentation"
fi
# 7. Configuration Management Validation
echo ""
log "=== CONFIGURATION MANAGEMENT VALIDATION ==="
# Check configuration structure
if [[ -d "$MODULE_DIR/config" ]]; then
CONFIG_FILES=$(find "$MODULE_DIR/config" -name "*.php" | wc -l)
if [[ $CONFIG_FILES -gt 0 ]]; then
pass "Configuration system implemented: $CONFIG_FILES config files"
else
medium "Configuration directory empty"
fi
else
high "Configuration directory missing"
fi
# Check for environment-specific configuration
ENV_CONFIG=$(grep -r "getenv\|env(" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $ENV_CONFIG -gt 0 ]]; then
pass "Environment-based configuration implemented"
else
medium "No environment-based configuration detected"
fi
# Check for configuration validation
CONFIG_VALIDATION=$(grep -r "config.*validation\|validate.*config" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $CONFIG_VALIDATION -gt 0 ]]; then
pass "Configuration validation implemented"
else
low "No configuration validation detected"
fi
# 8. Integration and Compatibility Validation
echo ""
log "=== INTEGRATION AND COMPATIBILITY VALIDATION ==="
# Check Perfex CRM integration hooks
HOOK_USAGE=$(grep -r "hooks()\|add_action\|add_filter" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $HOOK_USAGE -gt 5 ]]; then
pass "Comprehensive Perfex CRM integration"
elif [[ $HOOK_USAGE -gt 0 ]]; then
medium "Basic Perfex CRM integration"
else
critical "No Perfex CRM integration detected"
fi
# Check for menu integration
MENU_INTEGRATION=$(grep -r "app_menu\|sidebar" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $MENU_INTEGRATION -gt 0 ]]; then
pass "Admin menu integration implemented"
else
high "No admin menu integration"
fi
# Check for permission system integration
PERMISSION_USAGE=$(grep -r "has_permission\|tblpermissions" "$MODULE_DIR" --include="*.php" 2>/dev/null | wc -l)
if [[ $PERMISSION_USAGE -gt 0 ]]; then
pass "Permission system integration implemented"
else
high "No permission system integration"
fi
# 9. Client Portal Validation
echo ""
log "=== CLIENT PORTAL VALIDATION ==="
if [[ -d "$MODULE_DIR/client_portal" ]]; then
pass "Client portal directory exists"
# Check for Vue.js implementation
if [[ -f "$MODULE_DIR/client_portal/package.json" ]]; then
if grep -q "vue" "$MODULE_DIR/client_portal/package.json"; then
pass "Vue.js client portal implemented"
else
medium "Client portal missing Vue.js"
fi
else
medium "Client portal missing package.json"
fi
# Check for built assets
if [[ -d "$MODULE_DIR/client_portal/dist" ]]; then
ASSET_COUNT=$(find "$MODULE_DIR/client_portal/dist" -name "*.js" -o -name "*.css" | wc -l)
if [[ $ASSET_COUNT -gt 0 ]]; then
pass "Client portal assets built: $ASSET_COUNT files"
else
high "Client portal assets not built"
fi
else
high "Client portal build directory missing"
fi
else
high "Client portal not implemented"
fi
# 10. Deployment Readiness Validation
echo ""
log "=== DEPLOYMENT READINESS VALIDATION ==="
# Check for installation scripts
if [[ -f "$MODULE_DIR/scripts/install.sh" && -x "$MODULE_DIR/scripts/install.sh" ]]; then
pass "Installation script ready"
else
critical "Installation script missing or not executable"
fi
# Check for deployment documentation
if [[ -f "$MODULE_DIR/PRODUCTION_DEPLOYMENT_PACKAGE.md" ]]; then
pass "Deployment documentation available"
else
high "Deployment documentation missing"
fi
# Check for version tracking
if [[ -f "$MODULE_DIR/VERSION" ]]; then
VERSION=$(cat "$MODULE_DIR/VERSION")
if [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
pass "Version properly formatted: $VERSION"
else
medium "Version format irregular: $VERSION"
fi
else
medium "Version file missing"
fi
# Check for backup procedures
BACKUP_SCRIPTS=$(find "$MODULE_DIR/scripts" -name "*backup*" -o -name "*restore*" 2>/dev/null | wc -l)
if [[ $BACKUP_SCRIPTS -gt 0 ]]; then
pass "Backup procedures implemented"
else
medium "No backup procedures detected"
fi
# Calculate Production Readiness Score
echo ""
log "=== CALCULATING PRODUCTION READINESS SCORE ==="
TOTAL_CHECKS=100 # Approximate number of checks performed
TOTAL_FAILURES=$((CRITICAL_FAILURES + HIGH_FAILURES + MEDIUM_FAILURES + LOW_FAILURES))
PASS_COUNT=$((TOTAL_CHECKS - TOTAL_FAILURES))
READINESS_SCORE=$(((PASS_COUNT * 100) / TOTAL_CHECKS))
# Determine readiness status
if [[ $CRITICAL_FAILURES -gt 0 ]]; then
READINESS_STATUS="NOT READY"
READINESS_COLOR="${RED}"
elif [[ $HIGH_FAILURES -gt 5 ]]; then
READINESS_STATUS="NEEDS WORK"
READINESS_COLOR="${YELLOW}"
elif [[ $HIGH_FAILURES -gt 0 || $MEDIUM_FAILURES -gt 10 ]]; then
READINESS_STATUS="ALMOST READY"
READINESS_COLOR="${YELLOW}"
else
READINESS_STATUS="PRODUCTION READY"
READINESS_COLOR="${GREEN}"
fi
# Generate final report
echo ""
echo "┌─────────────────────────────────────────────────────────────────────────────┐" | tee -a "$REPORT_FILE"
echo "│ PRODUCTION READINESS VALIDATION REPORT │" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
echo "│ Module: Desk-Moloni v3.0 │" | tee -a "$REPORT_FILE"
echo "│ Validation Date: $(date)" | tee -a "$REPORT_FILE"
echo "│ Report File: $REPORT_FILE" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
printf "│ Readiness Score: %-8s │ Status: %-12s │ Total Checks: %-6s │\n" "${READINESS_SCORE}%" "$READINESS_STATUS" "$TOTAL_CHECKS" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
printf "│ Critical Issues: %-6s │ High Issues: %-6s │ Medium Issues: %-6s │\n" "$CRITICAL_FAILURES" "$HIGH_FAILURES" "$MEDIUM_FAILURES" | tee -a "$REPORT_FILE"
printf "│ Low Issues: %-10s │ Pass Count: %-8s │ Fail Count: %-8s │\n" "$LOW_FAILURES" "$PASS_COUNT" "$TOTAL_FAILURES" | tee -a "$REPORT_FILE"
echo "└─────────────────────────────────────────────────────────────────────────────┘" | tee -a "$REPORT_FILE"
echo "" | tee -a "$REPORT_FILE"
# Production readiness recommendations
echo "PRODUCTION READINESS ASSESSMENT:" | tee -a "$REPORT_FILE"
echo "===============================" | tee -a "$REPORT_FILE"
if [[ $CRITICAL_FAILURES -gt 0 ]]; then
echo "🚨 CRITICAL: $CRITICAL_FAILURES critical issues must be resolved before production deployment" | tee -a "$REPORT_FILE"
echo " - Review and fix all critical security, functionality, and integration issues" | tee -a "$REPORT_FILE"
echo " - Complete missing core components" | tee -a "$REPORT_FILE"
echo " - Implement essential security measures" | tee -a "$REPORT_FILE"
fi
if [[ $HIGH_FAILURES -gt 0 ]]; then
echo "⚠️ HIGH: $HIGH_FAILURES high-priority issues should be addressed" | tee -a "$REPORT_FILE"
echo " - Enhance security implementations" | tee -a "$REPORT_FILE"
echo " - Complete missing documentation" | tee -a "$REPORT_FILE"
echo " - Improve test coverage" | tee -a "$REPORT_FILE"
fi
if [[ $MEDIUM_FAILURES -gt 0 ]]; then
echo "📋 MEDIUM: $MEDIUM_FAILURES medium-priority improvements recommended" | tee -a "$REPORT_FILE"
echo " - Enhance performance optimizations" | tee -a "$REPORT_FILE"
echo " - Improve code documentation" | tee -a "$REPORT_FILE"
echo " - Add monitoring capabilities" | tee -a "$REPORT_FILE"
fi
if [[ "$READINESS_STATUS" == "PRODUCTION READY" ]]; then
echo "✅ EXCELLENT: Module is production ready!" | tee -a "$REPORT_FILE"
echo " - All critical requirements met" | tee -a "$REPORT_FILE"
echo " - Security standards implemented" | tee -a "$REPORT_FILE"
echo " - Documentation complete" | tee -a "$REPORT_FILE"
echo " - Testing infrastructure in place" | tee -a "$REPORT_FILE"
fi
echo "" | tee -a "$REPORT_FILE"
echo "NEXT STEPS:" | tee -a "$REPORT_FILE"
echo "1. Address all critical and high-priority issues" | tee -a "$REPORT_FILE"
echo "2. Perform final security audit" | tee -a "$REPORT_FILE"
echo "3. Complete performance testing" | tee -a "$REPORT_FILE"
echo "4. Prepare production deployment plan" | tee -a "$REPORT_FILE"
echo "5. Schedule go-live activities" | tee -a "$REPORT_FILE"
echo ""
echo "========================================================================"
echo -e "Production readiness validation completed!"
echo -e "Readiness Status: ${READINESS_COLOR}$READINESS_STATUS${NC}"
echo -e "Score: $READINESS_SCORE% | Critical: $CRITICAL_FAILURES | High: $HIGH_FAILURES | Medium: $MEDIUM_FAILURES"
echo "Report saved to: $REPORT_FILE"
echo "========================================================================"
# Exit with appropriate code
if [[ $CRITICAL_FAILURES -gt 0 ]]; then
exit 1
elif [[ $HIGH_FAILURES -gt 5 ]]; then
exit 2
else
exit 0
fi

449
scripts/security_audit.sh Normal file
View File

@@ -0,0 +1,449 @@
#!/bin/bash
# Desk-Moloni v3.0 Security Audit Script
# Author: Descomplicar.pt
# Version: 3.0.0
# License: Commercial
set -e
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
REPORT_FILE="/tmp/desk-moloni-security-audit-$(date +%Y%m%d-%H%M%S).txt"
CRITICAL_ISSUES=0
HIGH_ISSUES=0
MEDIUM_ISSUES=0
LOW_ISSUES=0
# Functions
log() {
echo -e "${GREEN}[AUDIT]${NC} $1" | tee -a "$REPORT_FILE"
}
critical() {
echo -e "${RED}[CRITICAL]${NC} $1" | tee -a "$REPORT_FILE"
((CRITICAL_ISSUES++))
}
high() {
echo -e "${RED}[HIGH]${NC} $1" | tee -a "$REPORT_FILE"
((HIGH_ISSUES++))
}
medium() {
echo -e "${YELLOW}[MEDIUM]${NC} $1" | tee -a "$REPORT_FILE"
((MEDIUM_ISSUES++))
}
low() {
echo -e "${BLUE}[LOW]${NC} $1" | tee -a "$REPORT_FILE"
((LOW_ISSUES++))
}
pass() {
echo -e "${GREEN}[PASS]${NC} $1" | tee -a "$REPORT_FILE"
}
# Security audit banner
echo "========================================================================"
echo " DESK-MOLONI v3.0 SECURITY AUDIT"
echo "========================================================================"
echo "Report File: $REPORT_FILE"
echo "Audit Date: $(date)"
echo ""
log "Starting comprehensive security audit..."
# 1. File Permissions Audit
echo ""
log "=== FILE PERMISSIONS AUDIT ==="
# Check file permissions
WRITABLE_FILES=$(find "$MODULE_DIR" -type f -perm /o+w 2>/dev/null | wc -l)
if [[ $WRITABLE_FILES -gt 0 ]]; then
high "Found $WRITABLE_FILES world-writable files"
find "$MODULE_DIR" -type f -perm /o+w | head -10 | while read file; do
echo " - $file" | tee -a "$REPORT_FILE"
done
else
pass "No world-writable files found"
fi
# Check directory permissions
WRITABLE_DIRS=$(find "$MODULE_DIR" -type d -perm /o+w 2>/dev/null | grep -v "/uploads/" | wc -l)
if [[ $WRITABLE_DIRS -gt 0 ]]; then
medium "Found $WRITABLE_DIRS world-writable directories (excluding uploads)"
find "$MODULE_DIR" -type d -perm /o+w | grep -v "/uploads/" | head -5 | while read dir; do
echo " - $dir" | tee -a "$REPORT_FILE"
done
else
pass "Directory permissions are secure"
fi
# Check for executable PHP files in web-accessible locations
EXECUTABLE_PHP=$(find "$MODULE_DIR" -name "*.php" -path "*/assets/*" -o -name "*.php" -path "*/uploads/*" 2>/dev/null | wc -l)
if [[ $EXECUTABLE_PHP -gt 0 ]]; then
critical "Found PHP files in web-accessible directories"
find "$MODULE_DIR" -name "*.php" -path "*/assets/*" -o -name "*.php" -path "*/uploads/*" | while read file; do
echo " - $file" | tee -a "$REPORT_FILE"
done
else
pass "No PHP files in web-accessible directories"
fi
# 2. Configuration Security Audit
echo ""
log "=== CONFIGURATION SECURITY AUDIT ==="
# Check for hardcoded credentials
HARDCODED_CREDS=$(grep -r -i -E "(password|secret|key|token)" "$MODULE_DIR" --include="*.php" | grep -v "// " | grep -v "/\*" | grep -E "=['\"][^'\"]*['\"]" | wc -l)
if [[ $HARDCODED_CREDS -gt 0 ]]; then
high "Potential hardcoded credentials found"
grep -r -i -E "(password|secret|key|token)" "$MODULE_DIR" --include="*.php" | grep -v "// " | grep -v "/\*" | grep -E "=['\"][^'\"]*['\"]" | head -5 | while read line; do
echo " - $(echo $line | cut -d: -f1)" | tee -a "$REPORT_FILE"
done
else
pass "No hardcoded credentials detected"
fi
# Check encryption configuration
if [[ -f "$MODULE_DIR/libraries/Encryption.php" ]]; then
ENCRYPTION_CONFIG=$(grep -E "(AES-256|GCM)" "$MODULE_DIR/libraries/Encryption.php" | wc -l)
if [[ $ENCRYPTION_CONFIG -gt 0 ]]; then
pass "Strong encryption algorithm configured (AES-256-GCM)"
else
critical "Weak or no encryption algorithm configured"
fi
else
critical "Encryption library not found"
fi
# Check for debug mode in production
DEBUG_ENABLED=$(grep -r "debug.*true" "$MODULE_DIR/config/" 2>/dev/null | wc -l)
if [[ $DEBUG_ENABLED -gt 0 ]]; then
high "Debug mode appears to be enabled"
grep -r "debug.*true" "$MODULE_DIR/config/" | while read line; do
echo " - $line" | tee -a "$REPORT_FILE"
done
else
pass "Debug mode is disabled"
fi
# 3. Database Security Audit
echo ""
log "=== DATABASE SECURITY AUDIT ==="
# Check if database credentials are in environment variables
if [[ -f "$MODULE_DIR/../../.env" ]]; then
DB_IN_ENV=$(grep -E "DB_|DATABASE_" "$MODULE_DIR/../../.env" | wc -l)
if [[ $DB_IN_ENV -gt 0 ]]; then
pass "Database credentials found in environment file"
else
medium "Database credentials may not be in environment file"
fi
else
medium "Environment file (.env) not found"
fi
# Check for SQL injection patterns
SQL_PATTERNS=$(grep -r -E "\\\$.*\.(SELECT|INSERT|UPDATE|DELETE)" "$MODULE_DIR" --include="*.php" | grep -v "prepare" | wc -l)
if [[ $SQL_PATTERNS -gt 0 ]]; then
critical "Potential SQL injection vulnerabilities found"
grep -r -E "\\\$.*\.(SELECT|INSERT|UPDATE|DELETE)" "$MODULE_DIR" --include="*.php" | grep -v "prepare" | head -3 | while read line; do
echo " - $(echo $line | cut -d: -f1)" | tee -a "$REPORT_FILE"
done
else
pass "No obvious SQL injection patterns detected"
fi
# Check for encrypted storage configuration
ENCRYPTED_STORAGE=$(grep -r "encrypt" "$MODULE_DIR/config/" 2>/dev/null | wc -l)
if [[ $ENCRYPTED_STORAGE -gt 0 ]]; then
pass "Encryption configuration found"
else
medium "No encryption configuration detected"
fi
# 4. API Security Audit
echo ""
log "=== API SECURITY AUDIT ==="
# Check for OAuth 2.0 implementation
OAUTH_IMPL=$(find "$MODULE_DIR" -name "*.php" -exec grep -l "oauth\|OAuth" {} \; | wc -l)
if [[ $OAUTH_IMPL -gt 0 ]]; then
pass "OAuth implementation found"
# Check for PKCE implementation
PKCE_IMPL=$(grep -r "code_challenge\|code_verifier" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $PKCE_IMPL -gt 0 ]]; then
pass "PKCE (Proof Key for Code Exchange) implemented"
else
medium "PKCE not detected - consider implementing for enhanced security"
fi
else
critical "No OAuth implementation found"
fi
# Check for rate limiting
RATE_LIMIT=$(grep -r "rate.limit\|throttle" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $RATE_LIMIT -gt 0 ]]; then
pass "Rate limiting implementation found"
else
medium "No rate limiting detected"
fi
# Check for API key exposure
API_KEYS=$(grep -r -i "api.key\|client.secret" "$MODULE_DIR" --include="*.php" | grep -v "getenv\|env(" | wc -l)
if [[ $API_KEYS -gt 0 ]]; then
high "Potential API key exposure found"
grep -r -i "api.key\|client.secret" "$MODULE_DIR" --include="*.php" | grep -v "getenv\|env(" | head -3 | while read line; do
echo " - $(echo $line | cut -d: -f1)" | tee -a "$REPORT_FILE"
done
else
pass "No exposed API keys detected"
fi
# 5. Input Validation Audit
echo ""
log "=== INPUT VALIDATION AUDIT ==="
# Check for input sanitization
SANITIZATION=$(grep -r "filter_var\|htmlspecialchars\|strip_tags" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $SANITIZATION -gt 0 ]]; then
pass "Input sanitization functions found"
else
high "Limited input sanitization detected"
fi
# Check for CSRF protection
CSRF_PROTECTION=$(grep -r "csrf\|token" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $CSRF_PROTECTION -gt 0 ]]; then
pass "CSRF protection implementation found"
else
high "No CSRF protection detected"
fi
# Check for XSS protection
XSS_PROTECTION=$(grep -r "htmlentities\|htmlspecialchars" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $XSS_PROTECTION -gt 0 ]]; then
pass "XSS protection functions found"
else
medium "Limited XSS protection detected"
fi
# 6. Session Security Audit
echo ""
log "=== SESSION SECURITY AUDIT ==="
# Check for secure session configuration
SESSION_CONFIG=$(grep -r "session_" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $SESSION_CONFIG -gt 0 ]]; then
pass "Session configuration found"
# Check for secure session settings
SECURE_SESSION=$(grep -r "session_set_cookie_params.*secure\|httponly" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $SECURE_SESSION -gt 0 ]]; then
pass "Secure session settings detected"
else
medium "Consider implementing secure session cookie settings"
fi
else
low "No session configuration detected"
fi
# 7. Error Handling Audit
echo ""
log "=== ERROR HANDLING AUDIT ==="
# Check for error suppression
ERROR_SUPPRESSION=$(grep -r "@.*(" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $ERROR_SUPPRESSION -gt 0 ]]; then
medium "Error suppression found - may hide security issues"
grep -r "@.*(" "$MODULE_DIR" --include="*.php" | head -3 | while read line; do
echo " - $(echo $line | cut -d: -f1)" | tee -a "$REPORT_FILE"
done
else
pass "No error suppression detected"
fi
# Check for proper error logging
ERROR_LOGGING=$(grep -r "error_log\|log_message" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $ERROR_LOGGING -gt 0 ]]; then
pass "Error logging implementation found"
else
medium "Limited error logging detected"
fi
# 8. File Upload Security Audit
echo ""
log "=== FILE UPLOAD SECURITY AUDIT ==="
# Check for file upload functionality
FILE_UPLOAD=$(grep -r "move_uploaded_file\|upload" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $FILE_UPLOAD -gt 0 ]]; then
medium "File upload functionality detected"
# Check for file type validation
FILE_VALIDATION=$(grep -r "mime\|extension\|pathinfo" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $FILE_VALIDATION -gt 0 ]]; then
pass "File validation implementation found"
else
critical "No file validation detected for uploads"
fi
# Check for file size limits
SIZE_LIMITS=$(grep -r "filesize\|MAX_FILE_SIZE" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $SIZE_LIMITS -gt 0 ]]; then
pass "File size validation found"
else
medium "No file size limits detected"
fi
else
pass "No file upload functionality detected"
fi
# 9. Logging and Monitoring Audit
echo ""
log "=== LOGGING AND MONITORING AUDIT ==="
# Check for audit logging
AUDIT_LOGGING=$(find "$MODULE_DIR" -name "*log*" -type f | wc -l)
if [[ $AUDIT_LOGGING -gt 0 ]]; then
pass "Logging files found"
else
medium "No log files detected"
fi
# Check for security event logging
SECURITY_LOGGING=$(grep -r "SECURITY\|AUTH\|LOGIN" "$MODULE_DIR" --include="*.php" | wc -l)
if [[ $SECURITY_LOGGING -gt 0 ]]; then
pass "Security event logging found"
else
medium "Limited security event logging"
fi
# 10. Dependency Security Audit
echo ""
log "=== DEPENDENCY SECURITY AUDIT ==="
# Check for composer.json
if [[ -f "$MODULE_DIR/composer.json" ]]; then
pass "Composer dependencies file found"
# Check for security-related packages
SECURITY_PACKAGES=$(grep -E "(security|auth|encrypt)" "$MODULE_DIR/composer.json" | wc -l)
if [[ $SECURITY_PACKAGES -gt 0 ]]; then
pass "Security-related packages detected"
else
low "Consider adding security-focused packages"
fi
else
medium "No composer.json found - manual dependency management"
fi
# Check for outdated dependencies (if composer is available)
if command -v composer > /dev/null 2>&1 && [[ -f "$MODULE_DIR/composer.json" ]]; then
cd "$MODULE_DIR"
OUTDATED=$(composer outdated --direct 2>/dev/null | wc -l)
if [[ $OUTDATED -gt 0 ]]; then
medium "$OUTDATED outdated dependencies detected"
else
pass "Dependencies are up to date"
fi
fi
# Generate Security Score
echo ""
log "=== SECURITY AUDIT SUMMARY ==="
TOTAL_ISSUES=$((CRITICAL_ISSUES + HIGH_ISSUES + MEDIUM_ISSUES + LOW_ISSUES))
TOTAL_CHECKS=50 # Approximate number of security checks
if [[ $CRITICAL_ISSUES -gt 0 ]]; then
SECURITY_GRADE="F"
SECURITY_SCORE=0
elif [[ $HIGH_ISSUES -gt 3 ]]; then
SECURITY_GRADE="D"
SECURITY_SCORE=25
elif [[ $HIGH_ISSUES -gt 0 || $MEDIUM_ISSUES -gt 5 ]]; then
SECURITY_GRADE="C"
SECURITY_SCORE=50
elif [[ $MEDIUM_ISSUES -gt 2 || $LOW_ISSUES -gt 5 ]]; then
SECURITY_GRADE="B"
SECURITY_SCORE=75
else
SECURITY_GRADE="A"
SECURITY_SCORE=90
fi
echo "┌─────────────────────────────────────────────────────────────────────────────┐" | tee -a "$REPORT_FILE"
echo "│ SECURITY AUDIT REPORT │" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
echo "│ Module: Desk-Moloni v3.0 │" | tee -a "$REPORT_FILE"
echo "│ Audit Date: $(date)" | tee -a "$REPORT_FILE"
echo "│ Report File: $REPORT_FILE" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
printf "│ Security Grade: %-10s │ Security Score: %-10s │ Total Issues: %-6s │\n" "$SECURITY_GRADE" "${SECURITY_SCORE}%" "$TOTAL_ISSUES" | tee -a "$REPORT_FILE"
echo "├─────────────────────────────────────────────────────────────────────────────┤" | tee -a "$REPORT_FILE"
printf "│ Critical Issues: %-5s │ High Issues: %-5s │ Medium Issues: %-5s │\n" "$CRITICAL_ISSUES" "$HIGH_ISSUES" "$MEDIUM_ISSUES" | tee -a "$REPORT_FILE"
printf "│ Low Issues: %-10s │ Total Checks: %-8s │ Pass Rate: %-6s │\n" "$LOW_ISSUES" "$TOTAL_CHECKS" "$(((TOTAL_CHECKS - TOTAL_ISSUES) * 100 / TOTAL_CHECKS))%" | tee -a "$REPORT_FILE"
echo "└─────────────────────────────────────────────────────────────────────────────┘" | tee -a "$REPORT_FILE"
echo "" | tee -a "$REPORT_FILE"
# Recommendations
echo "SECURITY RECOMMENDATIONS:" | tee -a "$REPORT_FILE"
echo "=========================" | tee -a "$REPORT_FILE"
if [[ $CRITICAL_ISSUES -gt 0 ]]; then
echo "🚨 CRITICAL: Address all critical issues immediately before production deployment" | tee -a "$REPORT_FILE"
fi
if [[ $HIGH_ISSUES -gt 0 ]]; then
echo "⚠️ HIGH: Resolve high-priority security issues within 24 hours" | tee -a "$REPORT_FILE"
fi
if [[ $MEDIUM_ISSUES -gt 0 ]]; then
echo "📋 MEDIUM: Address medium-priority issues within 1 week" | tee -a "$REPORT_FILE"
fi
if [[ $SECURITY_GRADE == "A" ]]; then
echo "✅ EXCELLENT: Security posture is excellent. Continue regular audits." | tee -a "$REPORT_FILE"
elif [[ $SECURITY_GRADE == "B" ]]; then
echo "✅ GOOD: Security posture is good. Address remaining issues." | tee -a "$REPORT_FILE"
elif [[ $SECURITY_GRADE == "C" ]]; then
echo "⚠️ FAIR: Security needs improvement. Priority fixes required." | tee -a "$REPORT_FILE"
else
echo "🚨 POOR: Security posture requires immediate attention." | tee -a "$REPORT_FILE"
fi
echo "" | tee -a "$REPORT_FILE"
echo "Next Steps:" | tee -a "$REPORT_FILE"
echo "1. Review and address all critical and high-priority issues" | tee -a "$REPORT_FILE"
echo "2. Implement additional security measures as recommended" | tee -a "$REPORT_FILE"
echo "3. Schedule regular security audits (monthly recommended)" | tee -a "$REPORT_FILE"
echo "4. Consider professional penetration testing" | tee -a "$REPORT_FILE"
echo "5. Keep dependencies updated and monitor for vulnerabilities" | tee -a "$REPORT_FILE"
echo ""
echo "========================================================================"
echo "Security audit completed. Report saved to: $REPORT_FILE"
echo "Security Grade: $SECURITY_GRADE | Score: ${SECURITY_SCORE}% | Issues: $TOTAL_ISSUES"
echo "========================================================================"
# Exit with error code if critical issues found
if [[ $CRITICAL_ISSUES -gt 0 ]]; then
exit 1
elif [[ $HIGH_ISSUES -gt 0 ]]; then
exit 2
else
exit 0
fi

426
scripts/setup_cron.sh Normal file
View File

@@ -0,0 +1,426 @@
#!/bin/bash
# Desk-Moloni v3.0 Cron Job Setup Script
#
# Sets up automated cron jobs for queue processing and maintenance tasks.
# Handles different environments and user permissions.
set -euo pipefail
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
CLI_DIR="$MODULE_DIR/cli"
LOG_DIR="$MODULE_DIR/logs"
LOCK_DIR="$MODULE_DIR/locks"
# Default configuration
DEFAULT_QUEUE_INTERVAL="*/1" # Every minute
DEFAULT_MAINTENANCE_HOUR="2" # 2 AM
DEFAULT_LOG_RETENTION_DAYS="30"
DEFAULT_USER=""
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Help function
show_help() {
cat << EOF
Desk-Moloni v3.0 Cron Setup Script
Usage: $0 [OPTIONS]
Options:
-h, --help Show this help message
-u, --user USER User to run cron jobs as (default: current user)
-i, --interval INTERVAL Queue processor interval (default: */1 for every minute)
-m, --maintenance HOUR Maintenance hour (default: 2 for 2 AM)
-r, --retention DAYS Log retention days (default: 30)
-d, --dry-run Show what would be done without making changes
-v, --verbose Verbose output
--uninstall Remove all cron jobs
--status Show current cron job status
Examples:
$0 # Setup with defaults
$0 -u www-data -i "*/5" # Run as www-data every 5 minutes
$0 --dry-run # Preview changes
$0 --uninstall # Remove all cron jobs
$0 --status # Show current status
Cron Jobs Created:
1. Queue Processor: Processes synchronization queue
2. Daily Maintenance: Cleanup logs, update mappings
3. Health Check: Monitor system health
4. Token Refresh: Refresh OAuth tokens
EOF
}
# Parse command line arguments
QUEUE_INTERVAL="$DEFAULT_QUEUE_INTERVAL"
MAINTENANCE_HOUR="$DEFAULT_MAINTENANCE_HOUR"
LOG_RETENTION_DAYS="$DEFAULT_LOG_RETENTION_DAYS"
CRON_USER="$DEFAULT_USER"
DRY_RUN=false
VERBOSE=false
UNINSTALL=false
SHOW_STATUS=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
-u|--user)
CRON_USER="$2"
shift 2
;;
-i|--interval)
QUEUE_INTERVAL="$2"
shift 2
;;
-m|--maintenance)
MAINTENANCE_HOUR="$2"
shift 2
;;
-r|--retention)
LOG_RETENTION_DAYS="$2"
shift 2
;;
-d|--dry-run)
DRY_RUN=true
shift
;;
-v|--verbose)
VERBOSE=true
shift
;;
--uninstall)
UNINSTALL=true
shift
;;
--status)
SHOW_STATUS=true
shift
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Set default user to current user if not specified
if [[ -z "$CRON_USER" ]]; then
CRON_USER=$(whoami)
fi
# Validate user exists
if ! id "$CRON_USER" &>/dev/null; then
log_error "User '$CRON_USER' does not exist"
exit 1
fi
# Check if running as root or target user
CURRENT_USER=$(whoami)
if [[ "$CURRENT_USER" != "root" && "$CURRENT_USER" != "$CRON_USER" ]]; then
log_error "Must run as root or target user ($CRON_USER)"
exit 1
fi
# Validate maintenance hour
if [[ ! "$MAINTENANCE_HOUR" =~ ^[0-9]+$ ]] || [[ "$MAINTENANCE_HOUR" -lt 0 ]] || [[ "$MAINTENANCE_HOUR" -gt 23 ]]; then
log_error "Invalid maintenance hour: $MAINTENANCE_HOUR (must be 0-23)"
exit 1
fi
# Create required directories
create_directories() {
local dirs=("$LOG_DIR" "$LOCK_DIR")
for dir in "${dirs[@]}"; do
if [[ ! -d "$dir" ]]; then
if [[ "$DRY_RUN" == true ]]; then
log_info "Would create directory: $dir"
else
mkdir -p "$dir"
chown "$CRON_USER:$CRON_USER" "$dir" 2>/dev/null || true
log_success "Created directory: $dir"
fi
fi
done
}
# Generate cron job entries
generate_cron_jobs() {
cat << EOF
# Desk-Moloni v3.0 Cron Jobs
# Generated on $(date)
# User: $CRON_USER
# Queue Processor - Process synchronization queue
$QUEUE_INTERVAL * * * * /usr/bin/flock -n $LOCK_DIR/queue_processor.lock php $CLI_DIR/queue_processor.php >> $LOG_DIR/queue_processor.log 2>&1
# Daily Maintenance - Cleanup and optimization
0 $MAINTENANCE_HOUR * * * /usr/bin/flock -n $LOCK_DIR/maintenance.lock $SCRIPT_DIR/maintenance.sh >> $LOG_DIR/maintenance.log 2>&1
# Health Check - System monitoring
*/15 * * * * /usr/bin/flock -n $LOCK_DIR/health_check.lock php $CLI_DIR/sync_commands.php health >> $LOG_DIR/health_check.log 2>&1
# Token Refresh - OAuth token maintenance
0 */6 * * * /usr/bin/flock -n $LOCK_DIR/token_refresh.lock $SCRIPT_DIR/token_refresh.sh >> $LOG_DIR/token_refresh.log 2>&1
# Log Rotation - Cleanup old logs
0 1 * * 0 /usr/bin/find $LOG_DIR -name "*.log" -mtime +$LOG_RETENTION_DAYS -delete
EOF
}
# Get current cron jobs for the user
get_current_crontab() {
if [[ "$CURRENT_USER" == "root" ]]; then
crontab -u "$CRON_USER" -l 2>/dev/null || true
else
crontab -l 2>/dev/null || true
fi
}
# Remove existing Desk-Moloni cron jobs
remove_existing_jobs() {
local current_crontab
current_crontab=$(get_current_crontab)
if [[ -n "$current_crontab" ]]; then
# Remove lines between Desk-Moloni markers
local new_crontab
new_crontab=$(echo "$current_crontab" | sed '/# Desk-Moloni v3.0 Cron Jobs/,/^$/d')
if [[ "$DRY_RUN" == true ]]; then
log_info "Would remove existing Desk-Moloni cron jobs"
else
if [[ "$CURRENT_USER" == "root" ]]; then
echo "$new_crontab" | crontab -u "$CRON_USER" -
else
echo "$new_crontab" | crontab -
fi
log_success "Removed existing Desk-Moloni cron jobs"
fi
return 0
fi
return 1
}
# Install cron jobs
install_cron_jobs() {
local current_crontab new_crontab cron_jobs
current_crontab=$(get_current_crontab)
cron_jobs=$(generate_cron_jobs)
# Combine existing crontab with new jobs
if [[ -n "$current_crontab" ]]; then
new_crontab="$current_crontab"$'\n'"$cron_jobs"
else
new_crontab="$cron_jobs"
fi
if [[ "$DRY_RUN" == true ]]; then
log_info "Would install the following cron jobs:"
echo "$cron_jobs"
else
if [[ "$CURRENT_USER" == "root" ]]; then
echo "$new_crontab" | crontab -u "$CRON_USER" -
else
echo "$new_crontab" | crontab -
fi
log_success "Installed Desk-Moloni cron jobs for user: $CRON_USER"
fi
}
# Show current status
show_status() {
log_info "Desk-Moloni Cron Job Status"
echo "================================"
local current_crontab
current_crontab=$(get_current_crontab)
if [[ -n "$current_crontab" ]]; then
local desk_moloni_jobs
desk_moloni_jobs=$(echo "$current_crontab" | sed -n '/# Desk-Moloni v3.0 Cron Jobs/,/^$/p')
if [[ -n "$desk_moloni_jobs" ]]; then
log_success "Desk-Moloni cron jobs are installed for user: $CRON_USER"
echo "$desk_moloni_jobs"
else
log_warning "No Desk-Moloni cron jobs found for user: $CRON_USER"
fi
else
log_warning "No crontab found for user: $CRON_USER"
fi
# Check if cron daemon is running
if systemctl is-active --quiet cron 2>/dev/null || systemctl is-active --quiet crond 2>/dev/null; then
log_success "Cron daemon is running"
else
log_warning "Cron daemon may not be running"
fi
# Check log files
echo -e "\nLog Files Status:"
for log_file in "queue_processor.log" "maintenance.log" "health_check.log" "token_refresh.log"; do
local log_path="$LOG_DIR/$log_file"
if [[ -f "$log_path" ]]; then
local size=$(du -h "$log_path" | cut -f1)
local modified=$(stat -c %y "$log_path" 2>/dev/null | cut -d' ' -f1,2 | cut -d'.' -f1)
log_info "$log_file: $size (modified: $modified)"
else
log_warning "$log_file: Not found"
fi
done
# Check lock files
echo -e "\nActive Processes:"
for lock_file in "$LOCK_DIR"/*.lock; do
if [[ -f "$lock_file" ]]; then
local lock_name=$(basename "$lock_file" .lock)
log_warning "$lock_name: Process may be running (lock file exists)"
fi
done
}
# Validate PHP and dependencies
validate_dependencies() {
log_info "Validating dependencies..."
# Check PHP
if ! command -v php &> /dev/null; then
log_error "PHP is not installed or not in PATH"
exit 1
fi
local php_version
php_version=$(php -r "echo PHP_VERSION;" 2>/dev/null)
log_success "PHP version: $php_version"
# Check flock
if ! command -v flock &> /dev/null; then
log_error "flock is not installed (required for preventing concurrent execution)"
exit 1
fi
# Check CLI files exist
local cli_files=("$CLI_DIR/queue_processor.php" "$CLI_DIR/sync_commands.php")
for cli_file in "${cli_files[@]}"; do
if [[ ! -f "$cli_file" ]]; then
log_error "CLI file not found: $cli_file"
exit 1
fi
done
log_success "All dependencies validated"
}
# Test cron job syntax
test_cron_syntax() {
log_info "Testing cron job syntax..."
local cron_jobs
cron_jobs=$(generate_cron_jobs)
# Basic validation of cron expressions
while IFS= read -r line; do
if [[ "$line" =~ ^[^#].* ]]; then
local cron_expr
cron_expr=$(echo "$line" | cut -d' ' -f1-5)
# Very basic validation - just check field count
local field_count
field_count=$(echo "$cron_expr" | wc -w)
if [[ "$field_count" -ne 5 ]]; then
log_error "Invalid cron expression: $cron_expr"
exit 1
fi
fi
done <<< "$cron_jobs"
log_success "Cron job syntax validated"
}
# Main execution
main() {
log_info "Desk-Moloni v3.0 Cron Setup"
log_info "User: $CRON_USER"
log_info "Queue Interval: $QUEUE_INTERVAL"
log_info "Maintenance Hour: $MAINTENANCE_HOUR"
if [[ "$DRY_RUN" == true ]]; then
log_warning "DRY RUN MODE - No changes will be made"
fi
if [[ "$SHOW_STATUS" == true ]]; then
show_status
exit 0
fi
if [[ "$UNINSTALL" == true ]]; then
log_info "Uninstalling Desk-Moloni cron jobs..."
if remove_existing_jobs; then
log_success "Cron jobs removed successfully"
else
log_warning "No existing cron jobs found"
fi
exit 0
fi
# Installation process
validate_dependencies
test_cron_syntax
create_directories
# Remove existing jobs first
remove_existing_jobs || true
# Install new jobs
install_cron_jobs
if [[ "$DRY_RUN" == false ]]; then
log_info ""
log_success "Cron jobs have been installed successfully!"
log_info "Monitor logs in: $LOG_DIR"
log_info "Check status with: $0 --status"
log_info ""
log_info "Next steps:"
log_info "1. Verify cron daemon is running: systemctl status cron"
log_info "2. Monitor queue processor: tail -f $LOG_DIR/queue_processor.log"
log_info "3. Check health status: php $CLI_DIR/sync_commands.php health"
fi
}
# Run main function
main "$@"

465
scripts/token_refresh.sh Normal file
View File

@@ -0,0 +1,465 @@
#!/bin/bash
# Desk-Moloni v3.0 OAuth Token Refresh Script
#
# Automatically refreshes OAuth tokens before expiration to maintain
# continuous API connectivity without manual intervention.
set -euo pipefail
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
MODULE_DIR="$(dirname "$SCRIPT_DIR")"
CLI_DIR="$MODULE_DIR/cli"
LOG_DIR="$MODULE_DIR/logs"
LOCK_FILE="$MODULE_DIR/locks/token_refresh.lock"
# Configuration
REFRESH_THRESHOLD=300 # Refresh 5 minutes before expiry
MAX_ATTEMPTS=3
RETRY_DELAY=60
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Logging functions
log_info() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [INFO] $1"
echo -e "${BLUE}$message${NC}"
echo "$message" >> "$LOG_DIR/token_refresh.log" 2>/dev/null || true
}
log_success() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [SUCCESS] $1"
echo -e "${GREEN}$message${NC}"
echo "$message" >> "$LOG_DIR/token_refresh.log" 2>/dev/null || true
}
log_warning() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [WARNING] $1"
echo -e "${YELLOW}$message${NC}"
echo "$message" >> "$LOG_DIR/token_refresh.log" 2>/dev/null || true
}
log_error() {
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local message="[$timestamp] [ERROR] $1"
echo -e "${RED}$message${NC}"
echo "$message" >> "$LOG_DIR/token_refresh.log" 2>/dev/null || true
}
# Help function
show_help() {
cat << EOF
Desk-Moloni v3.0 OAuth Token Refresh Script
Usage: $0 [OPTIONS]
Options:
-h, --help Show this help message
-t, --threshold SECONDS Refresh threshold in seconds (default: $REFRESH_THRESHOLD)
-a, --attempts COUNT Maximum retry attempts (default: $MAX_ATTEMPTS)
-d, --delay SECONDS Retry delay in seconds (default: $RETRY_DELAY)
--dry-run Show what would be done without changes
--force Force refresh even if not needed
--check-only Only check token status, don't refresh
Description:
This script automatically checks OAuth token expiration and refreshes
tokens when they are close to expiring. It's designed to run as a cron
job to maintain continuous API connectivity.
The script will:
1. Check current token expiration time
2. Compare against refresh threshold
3. Attempt to refresh if needed
4. Retry on failures with exponential backoff
5. Log all activities for monitoring
Examples:
$0 # Normal token refresh check
$0 --force # Force refresh regardless of expiration
$0 --check-only # Just check status, don't refresh
$0 --dry-run # Preview what would be done
EOF
}
# Parse command line arguments
DRY_RUN=false
FORCE_REFRESH=false
CHECK_ONLY=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
-t|--threshold)
REFRESH_THRESHOLD="$2"
shift 2
;;
-a|--attempts)
MAX_ATTEMPTS="$2"
shift 2
;;
-d|--delay)
RETRY_DELAY="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--force)
FORCE_REFRESH=true
shift
;;
--check-only)
CHECK_ONLY=true
shift
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Ensure required directories exist
ensure_directories() {
local dirs=("$LOG_DIR" "$(dirname "$LOCK_FILE")")
for dir in "${dirs[@]}"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir" 2>/dev/null || true
fi
done
}
# Get token information using PHP
get_token_info() {
local token_info
if ! token_info=$(php -r "
require_once '$MODULE_DIR/config/bootstrap.php';
try {
// Load configuration service
require_once '$MODULE_DIR/src/Services/ConfigService.php';
\$configService = new DeskMoloni\Services\ConfigService();
// Get token information
\$accessToken = \$configService->get('oauth_access_token');
\$refreshToken = \$configService->get('oauth_refresh_token');
\$expiresAt = \$configService->get('oauth_expires_at');
if (empty(\$accessToken)) {
echo 'NO_TOKEN';
exit(0);
}
\$currentTime = time();
\$expiryTime = \$expiresAt ? (int)\$expiresAt : 0;
\$timeUntilExpiry = \$expiryTime - \$currentTime;
// Output format: STATUS|EXPIRES_IN|HAS_REFRESH_TOKEN
echo 'VALID|' . \$timeUntilExpiry . '|' . (!empty(\$refreshToken) ? '1' : '0');
} catch (Exception \$e) {
echo 'ERROR|' . \$e->getMessage();
}
" 2>/dev/null); then
log_error "Failed to get token information"
return 1
fi
echo "$token_info"
}
# Check if token needs refresh
needs_refresh() {
local token_info
token_info=$(get_token_info)
if [[ "$token_info" == "NO_TOKEN" ]]; then
log_warning "No OAuth token found"
return 2 # Special case: no token at all
fi
if [[ "$token_info" =~ ^ERROR\| ]]; then
log_error "Error checking token: ${token_info#ERROR|}"
return 1
fi
IFS='|' read -r status expires_in has_refresh <<< "$token_info"
if [[ "$status" != "VALID" ]]; then
log_error "Token is not valid: $status"
return 1
fi
log_info "Token expires in ${expires_in} seconds"
# Check if we have a refresh token
if [[ "$has_refresh" != "1" ]]; then
log_error "No refresh token available"
return 1
fi
# Check if refresh is needed
if [[ "$FORCE_REFRESH" == true ]]; then
log_info "Force refresh requested"
return 0
fi
if [[ "$expires_in" -le "$REFRESH_THRESHOLD" ]]; then
log_info "Token needs refresh (expires in ${expires_in}s, threshold: ${REFRESH_THRESHOLD}s)"
return 0
fi
log_info "Token refresh not needed (expires in ${expires_in}s)"
return 3 # No refresh needed
}
# Perform token refresh
refresh_token() {
local attempt=1
while [[ $attempt -le $MAX_ATTEMPTS ]]; do
log_info "Token refresh attempt $attempt/$MAX_ATTEMPTS"
if [[ "$DRY_RUN" == true ]]; then
log_info "Would attempt to refresh OAuth token"
return 0
fi
# Attempt refresh using PHP
local refresh_result
if refresh_result=$(php -r "
require_once '$MODULE_DIR/config/bootstrap.php';
try {
require_once '$MODULE_DIR/src/Services/AuthService.php';
\$authService = new DeskMoloni\Services\AuthService();
\$result = \$authService->refreshToken();
if (\$result['success']) {
echo 'SUCCESS|New token expires in ' . \$result['expires_in'] . ' seconds';
} else {
echo 'FAILED|' . (\$result['error'] ?? 'Unknown error');
}
} catch (Exception \$e) {
echo 'ERROR|' . \$e->getMessage();
}
" 2>/dev/null); then
IFS='|' read -r result_status result_message <<< "$refresh_result"
case "$result_status" in
SUCCESS)
log_success "Token refreshed successfully: $result_message"
return 0
;;
FAILED)
log_error "Token refresh failed: $result_message"
;;
ERROR)
log_error "Error during token refresh: $result_message"
;;
*)
log_error "Unexpected refresh result: $refresh_result"
;;
esac
else
log_error "Failed to execute token refresh"
fi
# Increment attempt counter
((attempt++))
# Wait before retry (exponential backoff)
if [[ $attempt -le $MAX_ATTEMPTS ]]; then
local wait_time=$((RETRY_DELAY * attempt))
log_info "Retrying in ${wait_time} seconds..."
sleep "$wait_time"
fi
done
log_error "Token refresh failed after $MAX_ATTEMPTS attempts"
return 1
}
# Send notification about token issues
send_notification() {
local subject="$1"
local message="$2"
# Log the notification
log_warning "NOTIFICATION: $subject - $message"
# Try to send notification via PHP if notification system is configured
php -r "
require_once '$MODULE_DIR/config/bootstrap.php';
try {
require_once '$MODULE_DIR/src/Services/NotificationService.php';
\$notificationService = new DeskMoloni\Services\NotificationService();
\$notificationService->sendAlert('$subject', '$message');
} catch (Exception \$e) {
// Notification service may not be configured, that's OK
}
" 2>/dev/null || true
}
# Check token status and report
check_token_status() {
local token_info
token_info=$(get_token_info)
log_info "=== TOKEN STATUS REPORT ==="
case "$token_info" in
NO_TOKEN)
log_error "❌ No OAuth token configured"
log_info "Please configure OAuth credentials in the admin panel"
return 1
;;
ERROR*)
log_error "❌ Error checking token: ${token_info#ERROR|}"
return 1
;;
VALID*)
IFS='|' read -r status expires_in has_refresh <<< "$token_info"
local hours=$((expires_in / 3600))
local minutes=$(((expires_in % 3600) / 60))
if [[ "$expires_in" -gt "$REFRESH_THRESHOLD" ]]; then
log_success "✅ Token is valid and fresh"
log_info " Expires in: ${hours}h ${minutes}m"
log_info " Refresh token: $([ "$has_refresh" == "1" ] && echo "Available" || echo "Missing")"
elif [[ "$expires_in" -gt 0 ]]; then
log_warning "⚠️ Token expires soon"
log_info " Expires in: ${hours}h ${minutes}m"
log_info " Refresh token: $([ "$has_refresh" == "1" ] && echo "Available" || echo "Missing")"
else
log_error "❌ Token has expired"
log_info " Expired: $((-expires_in)) seconds ago"
log_info " Refresh token: $([ "$has_refresh" == "1" ] && echo "Available" || echo "Missing")"
fi
return 0
;;
*)
log_error "❌ Unknown token status: $token_info"
return 1
;;
esac
}
# Main execution function
main() {
log_info "Starting OAuth token refresh check"
# Check if only status check is requested
if [[ "$CHECK_ONLY" == true ]]; then
check_token_status
exit $?
fi
# Check if token needs refresh
local refresh_needed=0
needs_refresh || refresh_needed=$?
case $refresh_needed in
0) # Needs refresh
if refresh_token; then
log_success "Token refresh completed successfully"
# Verify the new token
local new_token_info
new_token_info=$(get_token_info)
if [[ "$new_token_info" =~ ^VALID\|([0-9]+)\| ]]; then
local new_expires_in="${BASH_REMATCH[1]}"
local new_hours=$((new_expires_in / 3600))
log_success "New token expires in ${new_hours} hours"
fi
else
log_error "Token refresh failed"
send_notification "OAuth Token Refresh Failed" "Failed to refresh Moloni API token after $MAX_ATTEMPTS attempts. Manual intervention may be required."
exit 1
fi
;;
1) # Error
log_error "Error checking token refresh requirements"
exit 1
;;
2) # No token
log_warning "No OAuth token configured - skipping refresh"
send_notification "OAuth Token Missing" "No OAuth token is configured for Moloni API. Please configure OAuth credentials."
exit 0
;;
3) # No refresh needed
log_info "Token refresh not required at this time"
exit 0
;;
*)
log_error "Unexpected error code: $refresh_needed"
exit 1
;;
esac
}
# Cleanup function
cleanup() {
# Remove lock file if it exists and we created it
if [[ -f "$LOCK_FILE" ]] && [[ "${LOCK_ACQUIRED:-0}" == "1" ]]; then
rm -f "$LOCK_FILE"
fi
}
# Set up cleanup trap
trap cleanup EXIT
# Acquire lock to prevent concurrent execution
acquire_lock() {
if [[ -f "$LOCK_FILE" ]]; then
local lock_pid
lock_pid=$(cat "$LOCK_FILE" 2>/dev/null || echo "")
if [[ -n "$lock_pid" ]] && kill -0 "$lock_pid" 2>/dev/null; then
log_warning "Another token refresh process is running (PID: $lock_pid)"
exit 0
else
log_info "Removing stale lock file"
rm -f "$LOCK_FILE"
fi
fi
echo $$ > "$LOCK_FILE"
export LOCK_ACQUIRED=1
log_info "Lock acquired (PID: $$)"
}
# Initialize and run
initialize() {
ensure_directories
acquire_lock
main "$@"
}
# Execute if called directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
initialize "$@"
fi