fix(security): Resolve 21 SQL injection vulnerabilities and add transactions

Security fixes (v1.2.2):
- Fix SQL injection in analytics.ts (16 occurrences)
- Fix SQL injection in advanced-search.ts (1 occurrence)
- Fix SQL injection in search-queries.ts (1 occurrence)
- Add validateDaysInterval(), isValidISODate(), validatePeriod() to security.ts
- Use make_interval(days => N) for safe PostgreSQL intervals
- Validate UUIDs BEFORE string construction

Transaction support:
- bulk-operations.ts: 6 atomic operations with withTransaction()
- desk-sync.ts: 2 operations with transactions
- export-import.ts: 1 operation with transaction

Rate limiting:
- Add automatic cleanup of expired entries (every 5 minutes)

Audit:
- Archive previous audit docs to docs/audits/2026-01-31-v1.2.1/
- Create new AUDIT-REQUEST.md for v1.2.2 verification

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-31 14:47:41 +00:00
parent 7895f31394
commit 7c83a9e168
14 changed files with 3195 additions and 487 deletions

View File

@@ -4,10 +4,28 @@
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
*/
import { Pool } from 'pg';
import { Pool, PoolClient } from 'pg';
import { BaseTool, ToolResponse } from '../types/tools.js';
import { isValidUUID, sanitizeInput } from '../utils/security.js';
/**
* Execute operations within a transaction
*/
async function withTransaction<T>(pool: Pool, callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await pool.connect();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
interface ExportCollectionArgs {
collection_id: string;
include_children?: boolean;
@@ -188,105 +206,110 @@ const importMarkdownFolder: BaseTool<ImportMarkdownArgs> = {
const createHierarchy = args.create_hierarchy !== false;
// Verify collection exists
const collection = await pgClient.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
// Use transaction for atomic import (all documents or none)
const { imported, errors } = await withTransaction(pgClient, async (client) => {
// Verify collection exists
const collection = await client.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
const teamId = collection.rows[0].teamId;
const teamId = collection.rows[0].teamId;
// Get admin user for createdById
const userResult = await pgClient.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
// Get admin user for createdById
const userResult = await client.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
const imported: Array<{ id: string; title: string; path: string }> = [];
const errors: Array<{ title: string; error: string }> = [];
const pathToId: Record<string, string> = {};
const importedList: Array<{ id: string; title: string; path: string }> = [];
const errorList: Array<{ title: string; error: string }> = [];
const pathToId: Record<string, string> = {};
// First pass: create all documents (sorted by path depth)
const sortedDocs = [...args.documents].sort((a, b) => {
const depthA = (a.parent_path || '').split('/').filter(Boolean).length;
const depthB = (b.parent_path || '').split('/').filter(Boolean).length;
return depthA - depthB;
});
// First pass: create all documents (sorted by path depth)
const sortedDocs = [...args.documents].sort((a, b) => {
const depthA = (a.parent_path || '').split('/').filter(Boolean).length;
const depthB = (b.parent_path || '').split('/').filter(Boolean).length;
return depthA - depthB;
});
for (const doc of sortedDocs) {
try {
let parentDocumentId: string | null = null;
for (const doc of sortedDocs) {
try {
let parentDocumentId: string | null = null;
// Resolve parent if specified
if (doc.parent_path && createHierarchy) {
const parentPath = doc.parent_path.trim();
// Resolve parent if specified
if (doc.parent_path && createHierarchy) {
const parentPath = doc.parent_path.trim();
if (pathToId[parentPath]) {
parentDocumentId = pathToId[parentPath];
} else {
// Try to find existing parent by title
const parentTitle = parentPath.split('/').pop();
const existingParent = await pgClient.query(
`SELECT id FROM documents WHERE title = $1 AND "collectionId" = $2 AND "deletedAt" IS NULL LIMIT 1`,
[parentTitle, args.collection_id]
);
if (pathToId[parentPath]) {
parentDocumentId = pathToId[parentPath];
} else {
// Try to find existing parent by title
const parentTitle = parentPath.split('/').pop();
const existingParent = await client.query(
`SELECT id FROM documents WHERE title = $1 AND "collectionId" = $2 AND "deletedAt" IS NULL LIMIT 1`,
[parentTitle, args.collection_id]
);
if (existingParent.rows.length > 0) {
parentDocumentId = existingParent.rows[0].id;
if (parentDocumentId) {
pathToId[parentPath] = parentDocumentId;
if (existingParent.rows.length > 0) {
parentDocumentId = existingParent.rows[0].id;
if (parentDocumentId) {
pathToId[parentPath] = parentDocumentId;
}
}
}
}
}
// Strip YAML frontmatter if present
let content = doc.content;
if (content.startsWith('---')) {
const endOfFrontmatter = content.indexOf('---', 3);
if (endOfFrontmatter !== -1) {
content = content.substring(endOfFrontmatter + 3).trim();
// Strip YAML frontmatter if present
let content = doc.content;
if (content.startsWith('---')) {
const endOfFrontmatter = content.indexOf('---', 3);
if (endOfFrontmatter !== -1) {
content = content.substring(endOfFrontmatter + 3).trim();
}
}
// Create document
const result = await client.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId", "parentDocumentId",
"createdById", "lastModifiedById", template, "createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $7, false, NOW(), NOW()
)
RETURNING id, title
`, [
sanitizeInput(doc.title),
content,
doc.emoji || null,
args.collection_id,
teamId,
parentDocumentId,
userId,
]);
const newDoc = result.rows[0];
const fullPath = doc.parent_path ? `${doc.parent_path}/${doc.title}` : doc.title;
pathToId[fullPath] = newDoc.id;
importedList.push({
id: newDoc.id,
title: newDoc.title,
path: fullPath,
});
} catch (error) {
errorList.push({
title: doc.title,
error: error instanceof Error ? error.message : String(error),
});
}
// Create document
const result = await pgClient.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId", "parentDocumentId",
"createdById", "lastModifiedById", template, "createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $7, false, NOW(), NOW()
)
RETURNING id, title
`, [
sanitizeInput(doc.title),
content,
doc.emoji || null,
args.collection_id,
teamId,
parentDocumentId,
userId,
]);
const newDoc = result.rows[0];
const fullPath = doc.parent_path ? `${doc.parent_path}/${doc.title}` : doc.title;
pathToId[fullPath] = newDoc.id;
imported.push({
id: newDoc.id,
title: newDoc.title,
path: fullPath,
});
} catch (error) {
errors.push({
title: doc.title,
error: error instanceof Error ? error.message : String(error),
});
}
}
return { imported: importedList, errors: errorList };
});
return {
content: [{ type: 'text', text: JSON.stringify({