fix: corrigir bugs críticos de segurança e memory leaks (v1.2.4)
- fix(pagination): SQL injection em cursor pagination - validação de nomes de campos - fix(transaction): substituir Math.random() por crypto.randomBytes() para jitter - fix(monitoring): memory leak - adicionar .unref() ao setInterval - docs: adicionar relatório completo de bugs (BUG-REPORT-2026-01-31.md) - chore: actualizar versão para 1.2.4
This commit is contained in:
15
src/index.ts
15
src/index.ts
@@ -17,7 +17,7 @@ import * as dotenv from 'dotenv';
|
||||
import { PgClient } from './pg-client.js';
|
||||
import { getDatabaseConfig } from './config/database.js';
|
||||
import { logger } from './utils/logger.js';
|
||||
import { checkRateLimit } from './utils/security.js';
|
||||
import { checkRateLimit, startRateLimitCleanup, stopRateLimitCleanup } from './utils/security.js';
|
||||
import { BaseTool } from './types/tools.js';
|
||||
|
||||
// Import ALL tools
|
||||
@@ -226,6 +226,19 @@ async function main() {
|
||||
}
|
||||
});
|
||||
|
||||
// Start background tasks
|
||||
startRateLimitCleanup();
|
||||
|
||||
// Graceful shutdown handler
|
||||
const shutdown = async () => {
|
||||
stopRateLimitCleanup();
|
||||
await pgClient.close();
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('SIGTERM', shutdown);
|
||||
|
||||
// Log startup (minimal logging for MCP protocol compatibility)
|
||||
if (process.env.LOG_LEVEL !== 'error' && process.env.LOG_LEVEL !== 'none') {
|
||||
logger.info('MCP Server started');
|
||||
|
||||
@@ -3,10 +3,24 @@
|
||||
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { createHash, randomBytes } from 'crypto';
|
||||
import { BaseTool, ToolResponse, PaginationArgs } from '../types/tools.js';
|
||||
import { validatePagination, isValidUUID, sanitizeInput } from '../utils/security.js';
|
||||
|
||||
/**
|
||||
* Generate a cryptographically secure API key
|
||||
*/
|
||||
function generateApiKey(): string {
|
||||
return `ol_${randomBytes(32).toString('base64url').substring(0, 40)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash an API key using SHA-256
|
||||
*/
|
||||
function hashApiKey(secret: string): string {
|
||||
return createHash('sha256').update(secret).digest('hex');
|
||||
}
|
||||
|
||||
interface ApiKeyListArgs extends PaginationArgs {
|
||||
user_id?: string;
|
||||
}
|
||||
@@ -130,24 +144,26 @@ const createApiKey: BaseTool<ApiKeyCreateArgs> = {
|
||||
|
||||
const name = sanitizeInput(args.name);
|
||||
|
||||
// Generate a secure random secret (in production, use crypto)
|
||||
const secret = `ol_${Buffer.from(crypto.randomUUID() + crypto.randomUUID()).toString('base64').replace(/[^a-zA-Z0-9]/g, '').substring(0, 40)}`;
|
||||
// Generate a cryptographically secure API key
|
||||
const secret = generateApiKey();
|
||||
const last4 = secret.slice(-4);
|
||||
const hash = secret; // In production, hash the secret
|
||||
const hash = hashApiKey(secret);
|
||||
|
||||
const scope = args.scope || ['read', 'write'];
|
||||
|
||||
// SECURITY: Store ONLY the hash, never the plain text secret
|
||||
// The secret is returned once to the user and never stored
|
||||
const result = await pgClient.query(
|
||||
`
|
||||
INSERT INTO "apiKeys" (
|
||||
id, name, secret, hash, last4, "userId", scope, "expiresAt", "createdAt", "updatedAt"
|
||||
id, name, hash, last4, "userId", scope, "expiresAt", "createdAt", "updatedAt"
|
||||
)
|
||||
VALUES (
|
||||
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, NOW(), NOW()
|
||||
gen_random_uuid(), $1, $2, $3, $4, $5, $6, NOW(), NOW()
|
||||
)
|
||||
RETURNING id, name, last4, scope, "userId", "expiresAt", "createdAt"
|
||||
`,
|
||||
[name, secret, hash, last4, args.user_id, scope, args.expires_at || null]
|
||||
[name, hash, last4, args.user_id, scope, args.expires_at || null]
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@@ -69,6 +69,12 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
|
||||
if (!isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
|
||||
if (args.template_id && !isValidUUID(args.template_id)) throw new Error('Invalid template_id');
|
||||
|
||||
// Validate desk_project_id is a positive integer
|
||||
const deskProjectId = parseInt(String(args.desk_project_id), 10);
|
||||
if (isNaN(deskProjectId) || deskProjectId <= 0) {
|
||||
throw new Error('desk_project_id must be a positive integer');
|
||||
}
|
||||
|
||||
const includeTasks = args.include_tasks !== false;
|
||||
const projectName = sanitizeInput(args.desk_project_name);
|
||||
const customerName = args.desk_customer_name ? sanitizeInput(args.desk_customer_name) : null;
|
||||
@@ -111,7 +117,7 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
|
||||
content = `## Informações do Projecto\n\n`;
|
||||
content += `| Campo | Valor |\n`;
|
||||
content += `|-------|-------|\n`;
|
||||
content += `| **ID Desk** | #${args.desk_project_id} |\n`;
|
||||
content += `| **ID Desk** | #${deskProjectId} |\n`;
|
||||
content += `| **Nome** | ${projectName} |\n`;
|
||||
if (customerName) {
|
||||
content += `| **Cliente** | ${customerName} |\n`;
|
||||
@@ -140,7 +146,7 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
|
||||
|
||||
// Add sync metadata section
|
||||
content += `---\n\n`;
|
||||
content += `> **Desk Sync:** Este documento está vinculado ao projecto Desk #${args.desk_project_id}\n`;
|
||||
content += `> **Desk Sync:** Este documento está vinculado ao projecto Desk #${deskProjectId}\n`;
|
||||
content += `> Última sincronização: ${new Date().toISOString()}\n`;
|
||||
|
||||
// Create document
|
||||
@@ -173,7 +179,7 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
|
||||
userId,
|
||||
JSON.stringify({
|
||||
type: 'desk_sync_metadata',
|
||||
desk_project_id: args.desk_project_id,
|
||||
desk_project_id: deskProjectId,
|
||||
desk_customer_name: customerName,
|
||||
synced_at: new Date().toISOString(),
|
||||
}),
|
||||
@@ -190,12 +196,12 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
|
||||
createdAt: newDoc.createdAt,
|
||||
},
|
||||
deskProject: {
|
||||
id: args.desk_project_id,
|
||||
id: deskProjectId,
|
||||
name: projectName,
|
||||
customer: customerName,
|
||||
},
|
||||
tasksIncluded: includeTasks ? (args.tasks?.length || 0) : 0,
|
||||
message: `Created documentation for Desk project #${args.desk_project_id}`,
|
||||
message: `Created documentation for Desk project #${deskProjectId}`,
|
||||
}, null, 2) }],
|
||||
};
|
||||
},
|
||||
@@ -222,6 +228,21 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
if (!isValidUUID(args.document_id)) throw new Error('Invalid document_id');
|
||||
|
||||
// Validate desk_task_id is a positive integer
|
||||
const deskTaskId = parseInt(String(args.desk_task_id), 10);
|
||||
if (isNaN(deskTaskId) || deskTaskId <= 0) {
|
||||
throw new Error('desk_task_id must be a positive integer');
|
||||
}
|
||||
|
||||
// Validate optional desk_project_id if provided
|
||||
let deskProjectIdOptional: number | null = null;
|
||||
if (args.desk_project_id !== undefined && args.desk_project_id !== null) {
|
||||
deskProjectIdOptional = parseInt(String(args.desk_project_id), 10);
|
||||
if (isNaN(deskProjectIdOptional) || deskProjectIdOptional <= 0) {
|
||||
throw new Error('desk_project_id must be a positive integer');
|
||||
}
|
||||
}
|
||||
|
||||
const linkType = args.link_type || 'reference';
|
||||
const taskName = sanitizeInput(args.desk_task_name);
|
||||
|
||||
@@ -247,7 +268,7 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
SELECT id FROM comments
|
||||
WHERE "documentId" = $1
|
||||
AND data::text LIKE $2
|
||||
`, [args.document_id, `%"desk_task_id":${args.desk_task_id}%`]);
|
||||
`, [args.document_id, `%"desk_task_id":${deskTaskId}%`]);
|
||||
|
||||
if (existingLink.rows.length > 0) {
|
||||
// Update existing link
|
||||
@@ -258,9 +279,9 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
`, [
|
||||
JSON.stringify({
|
||||
type: 'desk_task_link',
|
||||
desk_task_id: args.desk_task_id,
|
||||
desk_task_id: deskTaskId,
|
||||
desk_task_name: taskName,
|
||||
desk_project_id: args.desk_project_id || null,
|
||||
desk_project_id: deskProjectIdOptional,
|
||||
link_type: linkType,
|
||||
sync_status: args.sync_status || false,
|
||||
updated_at: new Date().toISOString(),
|
||||
@@ -283,9 +304,9 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
userId,
|
||||
JSON.stringify({
|
||||
type: 'desk_task_link',
|
||||
desk_task_id: args.desk_task_id,
|
||||
desk_task_id: deskTaskId,
|
||||
desk_task_name: taskName,
|
||||
desk_project_id: args.desk_project_id || null,
|
||||
desk_project_id: deskProjectIdOptional,
|
||||
link_type: linkType,
|
||||
sync_status: args.sync_status || false,
|
||||
created_at: new Date().toISOString(),
|
||||
@@ -294,10 +315,10 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
|
||||
// Optionally append reference to document text
|
||||
if (linkType === 'reference') {
|
||||
const refText = `\n\n---\n> 🔗 **Tarefa Desk:** #${args.desk_task_id} - ${taskName}`;
|
||||
const refText = `\n\n---\n> 🔗 **Tarefa Desk:** #${deskTaskId} - ${taskName}`;
|
||||
|
||||
// Only append if not already present
|
||||
if (!doc.text?.includes(`#${args.desk_task_id}`)) {
|
||||
if (!doc.text?.includes(`#${deskTaskId}`)) {
|
||||
await client.query(`
|
||||
UPDATE documents
|
||||
SET text = text || $1, "updatedAt" = NOW()
|
||||
@@ -318,15 +339,15 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
|
||||
documentId: args.document_id,
|
||||
documentTitle: result.doc.title,
|
||||
deskTask: {
|
||||
id: args.desk_task_id,
|
||||
id: deskTaskId,
|
||||
name: taskName,
|
||||
projectId: args.desk_project_id,
|
||||
projectId: deskProjectIdOptional,
|
||||
},
|
||||
linkType,
|
||||
syncStatus: args.sync_status || false,
|
||||
message: result.action === 'updated'
|
||||
? `Updated link to Desk task #${args.desk_task_id}`
|
||||
: `Linked Desk task #${args.desk_task_id} to document "${result.doc.title}"`,
|
||||
? `Updated link to Desk task #${deskTaskId}`
|
||||
: `Linked Desk task #${deskTaskId} to document "${result.doc.title}"`,
|
||||
}, null, 2) }],
|
||||
};
|
||||
},
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { BaseTool, ToolResponse, PaginationArgs } from '../types/tools.js';
|
||||
import { validatePagination, isValidUUID, sanitizeInput } from '../utils/security.js';
|
||||
import { validatePagination, isValidUUID, sanitizeInput, isValidHttpUrl } from '../utils/security.js';
|
||||
|
||||
interface EmojiListArgs extends PaginationArgs {
|
||||
team_id?: string;
|
||||
@@ -79,6 +79,11 @@ const createEmoji: BaseTool<{ name: string; url: string }> = {
|
||||
required: ['name', 'url'],
|
||||
},
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
// Validate URL is a safe HTTP(S) URL
|
||||
if (!isValidHttpUrl(args.url)) {
|
||||
throw new Error('Invalid URL format. Only HTTP(S) URLs are allowed.');
|
||||
}
|
||||
|
||||
const teamResult = await pgClient.query(`SELECT id FROM teams WHERE "deletedAt" IS NULL LIMIT 1`);
|
||||
if (teamResult.rows.length === 0) throw new Error('No team found');
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { randomBytes } from 'crypto';
|
||||
import {
|
||||
BaseTool,
|
||||
ToolResponse,
|
||||
@@ -15,6 +16,13 @@ import {
|
||||
PaginationArgs,
|
||||
} from '../types/tools.js';
|
||||
|
||||
/**
|
||||
* Generate a cryptographically secure OAuth client secret
|
||||
*/
|
||||
function generateOAuthSecret(): string {
|
||||
return `sk_${randomBytes(24).toString('base64url')}`;
|
||||
}
|
||||
|
||||
interface OAuthClient {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -194,8 +202,8 @@ const createOAuthClient: BaseTool<CreateOAuthClientArgs> = {
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
const { name, redirect_uris, description } = args;
|
||||
|
||||
// Generate random client secret (in production, use crypto.randomBytes)
|
||||
const secret = `sk_${Math.random().toString(36).substring(2, 15)}${Math.random().toString(36).substring(2, 15)}`;
|
||||
// Generate cryptographically secure client secret
|
||||
const secret = generateOAuthSecret();
|
||||
|
||||
const result = await pgClient.query(
|
||||
`
|
||||
@@ -335,7 +343,7 @@ const rotateOAuthClientSecret: BaseTool<GetOAuthClientArgs> = {
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
const { id } = args;
|
||||
|
||||
const newSecret = `sk_${Math.random().toString(36).substring(2, 15)}${Math.random().toString(36).substring(2, 15)}`;
|
||||
const newSecret = generateOAuthSecret();
|
||||
|
||||
const result = await pgClient.query(
|
||||
`
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { BaseTool, ToolResponse, ShareArgs, GetShareArgs, CreateShareArgs, UpdateShareArgs } from '../types/tools.js';
|
||||
import { validatePagination, isValidUUID, isValidUrlId } from '../utils/security.js';
|
||||
|
||||
@@ -269,8 +270,8 @@ const createShare: BaseTool<CreateShareArgs> = {
|
||||
|
||||
const userId = userQuery.rows[0].id;
|
||||
|
||||
// Generate urlId if not provided
|
||||
const urlId = args.url_id || `share-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
// Generate urlId if not provided (using crypto for better uniqueness)
|
||||
const urlId = args.url_id || `share-${Date.now()}-${randomBytes(6).toString('base64url')}`;
|
||||
|
||||
const query = `
|
||||
INSERT INTO shares (
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { BaseTool, ToolResponse, UserArgs, GetUserArgs, CreateUserArgs, UpdateUserArgs } from '../types/tools.js';
|
||||
import { validatePagination, isValidUUID, isValidEmail, sanitizeInput } from '../utils/security.js';
|
||||
import { validatePagination, isValidUUID, isValidEmail, sanitizeInput, isValidHttpUrl } from '../utils/security.js';
|
||||
|
||||
/**
|
||||
* users.list - List users with filtering
|
||||
@@ -324,8 +324,11 @@ const updateUser: BaseTool<UpdateUserArgs> = {
|
||||
}
|
||||
|
||||
if (args.avatar_url !== undefined) {
|
||||
if (args.avatar_url && !isValidHttpUrl(args.avatar_url)) {
|
||||
throw new Error('Invalid avatar URL format. Only HTTP(S) URLs are allowed.');
|
||||
}
|
||||
updates.push(`"avatarUrl" = $${paramIndex++}`);
|
||||
values.push(sanitizeInput(args.avatar_url));
|
||||
values.push(args.avatar_url ? sanitizeInput(args.avatar_url) : null);
|
||||
}
|
||||
|
||||
if (args.language !== undefined) {
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { BaseTool, ToolResponse, PaginationArgs } from '../types/tools.js';
|
||||
import { validatePagination, isValidUUID, sanitizeInput } from '../utils/security.js';
|
||||
import { validatePagination, isValidUUID, sanitizeInput, isValidHttpUrl } from '../utils/security.js';
|
||||
|
||||
interface WebhookListArgs extends PaginationArgs {
|
||||
team_id?: string;
|
||||
@@ -144,11 +144,9 @@ const createWebhook: BaseTool<WebhookCreateArgs> = {
|
||||
const url = sanitizeInput(args.url);
|
||||
const enabled = args.enabled !== false;
|
||||
|
||||
// Validate URL format
|
||||
try {
|
||||
new URL(url);
|
||||
} catch {
|
||||
throw new Error('Invalid URL format');
|
||||
// Validate URL format - only HTTP(S) allowed for webhooks
|
||||
if (!isValidHttpUrl(url)) {
|
||||
throw new Error('Invalid URL format. Only HTTP(S) URLs are allowed for webhooks.');
|
||||
}
|
||||
|
||||
// Get team and admin user
|
||||
@@ -228,10 +226,8 @@ const updateWebhook: BaseTool<WebhookUpdateArgs> = {
|
||||
}
|
||||
|
||||
if (args.url) {
|
||||
try {
|
||||
new URL(args.url);
|
||||
} catch {
|
||||
throw new Error('Invalid URL format');
|
||||
if (!isValidHttpUrl(args.url)) {
|
||||
throw new Error('Invalid URL format. Only HTTP(S) URLs are allowed.');
|
||||
}
|
||||
updates.push(`url = $${paramIndex++}`);
|
||||
params.push(sanitizeInput(args.url));
|
||||
|
||||
@@ -85,6 +85,11 @@ export class PoolMonitor {
|
||||
this.checkPool();
|
||||
}, this.config.interval);
|
||||
|
||||
// Allow process to exit even if interval is running
|
||||
if (this.intervalId.unref) {
|
||||
this.intervalId.unref();
|
||||
}
|
||||
|
||||
// Run initial check
|
||||
this.checkPool();
|
||||
}
|
||||
|
||||
@@ -91,6 +91,30 @@ const DEFAULT_OPTIONS: Required<PaginateOptions> = {
|
||||
maxLimit: 100,
|
||||
};
|
||||
|
||||
/**
|
||||
* Validate and sanitize SQL column/field name to prevent SQL injection
|
||||
* Only allows alphanumeric characters, underscores, and dots (for qualified names)
|
||||
* Rejects any other characters that could be used for SQL injection
|
||||
*/
|
||||
function validateFieldName(fieldName: string): string {
|
||||
// Only allow alphanumeric, underscore, and dot (for schema.table.column)
|
||||
if (!/^[a-zA-Z0-9_.]+$/.test(fieldName)) {
|
||||
throw new Error(`Invalid field name: ${fieldName}. Only alphanumeric, underscore, and dot are allowed.`);
|
||||
}
|
||||
|
||||
// Prevent SQL keywords and dangerous patterns
|
||||
const upperField = fieldName.toUpperCase();
|
||||
const dangerousKeywords = ['SELECT', 'INSERT', 'UPDATE', 'DELETE', 'DROP', 'UNION', 'WHERE', 'FROM', '--', '/*', '*/', ';'];
|
||||
|
||||
for (const keyword of dangerousKeywords) {
|
||||
if (upperField.includes(keyword)) {
|
||||
throw new Error(`Field name contains dangerous keyword: ${fieldName}`);
|
||||
}
|
||||
}
|
||||
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build cursor-based pagination query parts
|
||||
*
|
||||
@@ -124,14 +148,18 @@ export function buildCursorQuery(
|
||||
// Build cursor condition with secondary field for stability
|
||||
const op = direction === 'desc' ? '<' : '>';
|
||||
|
||||
// Validate field names to prevent SQL injection
|
||||
const safeCursorField = validateFieldName(opts.cursorField);
|
||||
const safeSecondaryField = validateFieldName(opts.secondaryField);
|
||||
|
||||
if (cursorData.s) {
|
||||
// Compound cursor: (cursorField, secondaryField) comparison
|
||||
cursorCondition = `("${opts.cursorField}", "${opts.secondaryField}") ${op} ($${paramIndex}, $${paramIndex + 1})`;
|
||||
cursorCondition = `("${safeCursorField}", "${safeSecondaryField}") ${op} ($${paramIndex}, $${paramIndex + 1})`;
|
||||
params.push(cursorData.v, cursorData.s);
|
||||
paramIndex += 2;
|
||||
} else {
|
||||
// Simple cursor
|
||||
cursorCondition = `"${opts.cursorField}" ${op} $${paramIndex}`;
|
||||
cursorCondition = `"${safeCursorField}" ${op} $${paramIndex}`;
|
||||
params.push(cursorData.v);
|
||||
paramIndex += 1;
|
||||
}
|
||||
@@ -140,7 +168,10 @@ export function buildCursorQuery(
|
||||
|
||||
// Build ORDER BY
|
||||
const orderDirection = direction.toUpperCase();
|
||||
const orderBy = `"${opts.cursorField}" ${orderDirection}, "${opts.secondaryField}" ${orderDirection}`;
|
||||
// Validate field names to prevent SQL injection
|
||||
const safeCursorField = validateFieldName(opts.cursorField);
|
||||
const safeSecondaryField = validateFieldName(opts.secondaryField);
|
||||
const orderBy = `"${safeCursorField}" ${orderDirection}, "${safeSecondaryField}" ${orderDirection}`;
|
||||
|
||||
return {
|
||||
cursorCondition,
|
||||
|
||||
@@ -71,6 +71,19 @@ export function isValidEmail(email: string): boolean {
|
||||
return emailRegex.test(email);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate URL format and ensure it's a safe HTTP(S) URL
|
||||
* Rejects javascript:, data:, file: and other dangerous protocols
|
||||
*/
|
||||
export function isValidHttpUrl(url: string): boolean {
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
return parsed.protocol === 'http:' || parsed.protocol === 'https:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape HTML entities for safe display
|
||||
*/
|
||||
@@ -146,6 +159,9 @@ export function validatePeriod(period: string | undefined, allowedPeriods: strin
|
||||
// Rate limit store cleanup interval (5 minutes)
|
||||
const RATE_LIMIT_CLEANUP_INTERVAL = 300000;
|
||||
|
||||
// Interval ID for cleanup - allows proper cleanup on shutdown
|
||||
let cleanupIntervalId: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
/**
|
||||
* Clean up expired rate limit entries
|
||||
*/
|
||||
@@ -158,5 +174,34 @@ function cleanupRateLimitStore(): void {
|
||||
}
|
||||
}
|
||||
|
||||
// Start cleanup interval
|
||||
setInterval(cleanupRateLimitStore, RATE_LIMIT_CLEANUP_INTERVAL);
|
||||
/**
|
||||
* Start the rate limit cleanup interval
|
||||
* Call this when the server starts
|
||||
*/
|
||||
export function startRateLimitCleanup(): void {
|
||||
if (cleanupIntervalId === null) {
|
||||
cleanupIntervalId = setInterval(cleanupRateLimitStore, RATE_LIMIT_CLEANUP_INTERVAL);
|
||||
// Allow process to exit even if interval is running
|
||||
if (cleanupIntervalId.unref) {
|
||||
cleanupIntervalId.unref();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the rate limit cleanup interval
|
||||
* Call this on graceful shutdown
|
||||
*/
|
||||
export function stopRateLimitCleanup(): void {
|
||||
if (cleanupIntervalId !== null) {
|
||||
clearInterval(cleanupIntervalId);
|
||||
cleanupIntervalId = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all rate limit entries (useful for testing)
|
||||
*/
|
||||
export function clearRateLimitStore(): void {
|
||||
rateLimitStore.clear();
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { Pool, PoolClient } from 'pg';
|
||||
import { logger } from './logger.js';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
/**
|
||||
* Default retry configuration
|
||||
@@ -72,8 +73,11 @@ function calculateDelay(attempt: number, config: Required<TransactionRetryConfig
|
||||
// Exponential backoff: baseDelay * 2^attempt
|
||||
const exponentialDelay = config.baseDelayMs * Math.pow(2, attempt - 1);
|
||||
|
||||
// Add jitter (random variation up to 25%)
|
||||
const jitter = exponentialDelay * 0.25 * Math.random();
|
||||
// Add jitter (random variation up to 25%) using cryptographically secure random
|
||||
// Generate a random value between 0 and 1 using crypto
|
||||
const randomBytesBuffer = randomBytes(4);
|
||||
const randomValue = randomBytesBuffer.readUInt32BE(0) / 0xFFFFFFFF;
|
||||
const jitter = exponentialDelay * 0.25 * randomValue;
|
||||
|
||||
// Cap at maxDelay
|
||||
return Math.min(exponentialDelay + jitter, config.maxDelayMs);
|
||||
|
||||
Reference in New Issue
Block a user