fix(security): Resolve 21 SQL injection vulnerabilities and add transactions

Security fixes (v1.2.2):
- Fix SQL injection in analytics.ts (16 occurrences)
- Fix SQL injection in advanced-search.ts (1 occurrence)
- Fix SQL injection in search-queries.ts (1 occurrence)
- Add validateDaysInterval(), isValidISODate(), validatePeriod() to security.ts
- Use make_interval(days => N) for safe PostgreSQL intervals
- Validate UUIDs BEFORE string construction

Transaction support:
- bulk-operations.ts: 6 atomic operations with withTransaction()
- desk-sync.ts: 2 operations with transactions
- export-import.ts: 1 operation with transaction

Rate limiting:
- Add automatic cleanup of expired entries (every 5 minutes)

Audit:
- Archive previous audit docs to docs/audits/2026-01-31-v1.2.1/
- Create new AUDIT-REQUEST.md for v1.2.2 verification

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-31 14:47:41 +00:00
parent 7895f31394
commit 7c83a9e168
14 changed files with 3195 additions and 487 deletions

View File

@@ -6,7 +6,7 @@
import { Pool } from 'pg';
import { BaseTool, ToolResponse, PaginationArgs } from '../types/tools.js';
import { validatePagination, isValidUUID, sanitizeInput } from '../utils/security.js';
import { validatePagination, isValidUUID, sanitizeInput, validateDaysInterval } from '../utils/security.js';
interface AdvancedSearchArgs extends PaginationArgs {
query: string;
@@ -197,11 +197,14 @@ const searchRecent: BaseTool<PaginationArgs & { collection_id?: string; days?: n
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const { limit, offset } = validatePagination(args.limit, args.offset);
const days = args.days || 7;
// Validate and sanitize days parameter
const safeDays = validateDaysInterval(args.days, 7, 365);
const conditions: string[] = [
'd."deletedAt" IS NULL',
'd."archivedAt" IS NULL',
`d."updatedAt" >= NOW() - INTERVAL '${days} days'`
`d."updatedAt" >= NOW() - make_interval(days => ${safeDays})`
];
const params: any[] = [];
let idx = 1;
@@ -229,7 +232,7 @@ const searchRecent: BaseTool<PaginationArgs & { collection_id?: string; days?: n
return {
content: [{ type: 'text', text: JSON.stringify({
data: result.rows,
days,
days: safeDays,
pagination: { limit, offset, total: result.rows.length }
}, null, 2) }],
};

View File

@@ -6,7 +6,7 @@
import { Pool } from 'pg';
import { BaseTool, ToolResponse } from '../types/tools.js';
import { isValidUUID } from '../utils/security.js';
import { isValidUUID, validateDaysInterval, isValidISODate, validatePeriod } from '../utils/security.js';
interface DateRangeArgs {
date_from?: string;
@@ -27,11 +27,23 @@ const getAnalyticsOverview: BaseTool<DateRangeArgs> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const dateCondition = args.date_from && args.date_to
? `AND "createdAt" BETWEEN '${args.date_from}' AND '${args.date_to}'`
: '';
// Validate date parameters if provided
if (args.date_from && !isValidISODate(args.date_from)) {
throw new Error('Invalid date_from format. Use ISO format (YYYY-MM-DD)');
}
if (args.date_to && !isValidISODate(args.date_to)) {
throw new Error('Invalid date_to format. Use ISO format (YYYY-MM-DD)');
}
// Document stats
// Build date condition with parameterized query
const dateParams: any[] = [];
let dateCondition = '';
if (args.date_from && args.date_to) {
dateCondition = `AND "createdAt" BETWEEN $1 AND $2`;
dateParams.push(args.date_from, args.date_to);
}
// Document stats (no date filter needed for totals)
const docStats = await pgClient.query(`
SELECT
COUNT(*) as "totalDocuments",
@@ -105,19 +117,32 @@ const getUserActivityAnalytics: BaseTool<{ user_id?: string; days?: number }> =
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const days = args.days || 30;
const userCondition = args.user_id ? `AND u.id = '${args.user_id}'` : '';
// Validate user_id FIRST before using it
if (args.user_id && !isValidUUID(args.user_id)) {
throw new Error('Invalid user_id');
}
if (args.user_id && !isValidUUID(args.user_id)) throw new Error('Invalid user_id');
// Validate and sanitize days parameter
const safeDays = validateDaysInterval(args.days, 30, 365);
// Most active users
// Build query with safe interval (number is safe after validation)
const params: any[] = [];
let paramIdx = 1;
let userCondition = '';
if (args.user_id) {
userCondition = `AND u.id = $${paramIdx++}`;
params.push(args.user_id);
}
// Most active users - using make_interval for safety
const activeUsers = await pgClient.query(`
SELECT
u.id, u.name, u.email,
COUNT(DISTINCT d.id) FILTER (WHERE d."createdAt" >= NOW() - INTERVAL '${days} days') as "documentsCreated",
COUNT(DISTINCT d2.id) FILTER (WHERE d2."updatedAt" >= NOW() - INTERVAL '${days} days') as "documentsEdited",
COUNT(DISTINCT v."documentId") FILTER (WHERE v."createdAt" >= NOW() - INTERVAL '${days} days') as "documentsViewed",
COUNT(DISTINCT c.id) FILTER (WHERE c."createdAt" >= NOW() - INTERVAL '${days} days') as "commentsAdded"
COUNT(DISTINCT d.id) FILTER (WHERE d."createdAt" >= NOW() - make_interval(days => ${safeDays})) as "documentsCreated",
COUNT(DISTINCT d2.id) FILTER (WHERE d2."updatedAt" >= NOW() - make_interval(days => ${safeDays})) as "documentsEdited",
COUNT(DISTINCT v."documentId") FILTER (WHERE v."createdAt" >= NOW() - make_interval(days => ${safeDays})) as "documentsViewed",
COUNT(DISTINCT c.id) FILTER (WHERE c."createdAt" >= NOW() - make_interval(days => ${safeDays})) as "commentsAdded"
FROM users u
LEFT JOIN documents d ON d."createdById" = u.id
LEFT JOIN documents d2 ON d2."lastModifiedById" = u.id
@@ -127,7 +152,7 @@ const getUserActivityAnalytics: BaseTool<{ user_id?: string; days?: number }> =
GROUP BY u.id, u.name, u.email
ORDER BY "documentsCreated" DESC
LIMIT 20
`);
`, params);
// Activity by day of week
const activityByDay = await pgClient.query(`
@@ -135,7 +160,7 @@ const getUserActivityAnalytics: BaseTool<{ user_id?: string; days?: number }> =
EXTRACT(DOW FROM d."createdAt") as "dayOfWeek",
COUNT(*) as "documentsCreated"
FROM documents d
WHERE d."createdAt" >= NOW() - INTERVAL '${days} days'
WHERE d."createdAt" >= NOW() - make_interval(days => ${safeDays})
AND d."deletedAt" IS NULL
GROUP BY EXTRACT(DOW FROM d."createdAt")
ORDER BY "dayOfWeek"
@@ -147,7 +172,7 @@ const getUserActivityAnalytics: BaseTool<{ user_id?: string; days?: number }> =
EXTRACT(HOUR FROM d."createdAt") as "hour",
COUNT(*) as "documentsCreated"
FROM documents d
WHERE d."createdAt" >= NOW() - INTERVAL '${days} days'
WHERE d."createdAt" >= NOW() - make_interval(days => ${safeDays})
AND d."deletedAt" IS NULL
GROUP BY EXTRACT(HOUR FROM d."createdAt")
ORDER BY "hour"
@@ -158,7 +183,7 @@ const getUserActivityAnalytics: BaseTool<{ user_id?: string; days?: number }> =
activeUsers: activeUsers.rows,
activityByDayOfWeek: activityByDay.rows,
activityByHour: activityByHour.rows,
periodDays: days,
periodDays: safeDays,
}, null, 2) }],
};
},
@@ -177,11 +202,20 @@ const getContentInsights: BaseTool<{ collection_id?: string }> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const collectionCondition = args.collection_id
? `AND d."collectionId" = '${args.collection_id}'`
: '';
// Validate collection_id FIRST before using it
if (args.collection_id && !isValidUUID(args.collection_id)) {
throw new Error('Invalid collection_id');
}
if (args.collection_id && !isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
// Build parameterized query
const params: any[] = [];
let paramIdx = 1;
let collectionCondition = '';
if (args.collection_id) {
collectionCondition = `AND d."collectionId" = $${paramIdx++}`;
params.push(args.collection_id);
}
// Most viewed documents
const mostViewed = await pgClient.query(`
@@ -196,7 +230,7 @@ const getContentInsights: BaseTool<{ collection_id?: string }> = {
GROUP BY d.id, d.title, d.emoji, c.name
ORDER BY "viewCount" DESC
LIMIT 10
`);
`, params);
// Most starred documents
const mostStarred = await pgClient.query(`
@@ -211,7 +245,7 @@ const getContentInsights: BaseTool<{ collection_id?: string }> = {
HAVING COUNT(s.id) > 0
ORDER BY "starCount" DESC
LIMIT 10
`);
`, params);
// Stale documents (not updated in 90 days)
const staleDocuments = await pgClient.query(`
@@ -228,7 +262,7 @@ const getContentInsights: BaseTool<{ collection_id?: string }> = {
${collectionCondition}
ORDER BY d."updatedAt" ASC
LIMIT 20
`);
`, params);
// Documents without views
const neverViewed = await pgClient.query(`
@@ -244,7 +278,7 @@ const getContentInsights: BaseTool<{ collection_id?: string }> = {
${collectionCondition}
ORDER BY d."createdAt" DESC
LIMIT 20
`);
`, params);
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -270,11 +304,19 @@ const getCollectionStats: BaseTool<{ collection_id?: string }> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const collectionCondition = args.collection_id
? `AND c.id = '${args.collection_id}'`
: '';
// Validate collection_id FIRST before using it
if (args.collection_id && !isValidUUID(args.collection_id)) {
throw new Error('Invalid collection_id');
}
if (args.collection_id && !isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
// Build parameterized query
const params: any[] = [];
let collectionCondition = '';
if (args.collection_id) {
collectionCondition = `AND c.id = $1`;
params.push(args.collection_id);
}
const stats = await pgClient.query(`
SELECT
@@ -293,7 +335,7 @@ const getCollectionStats: BaseTool<{ collection_id?: string }> = {
WHERE c."deletedAt" IS NULL ${collectionCondition}
GROUP BY c.id, c.name, c.icon, c.color
ORDER BY "documentCount" DESC
`);
`, params);
return {
content: [{ type: 'text', text: JSON.stringify({ data: stats.rows }, null, 2) }],
@@ -314,14 +356,18 @@ const getGrowthMetrics: BaseTool<{ period?: string }> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const period = args.period || 'month';
const intervals: Record<string, string> = {
week: '7 days',
month: '30 days',
quarter: '90 days',
year: '365 days',
// Validate period against allowed values
const allowedPeriods = ['week', 'month', 'quarter', 'year'];
const period = validatePeriod(args.period, allowedPeriods, 'month');
// Map periods to safe integer days (no string interpolation needed)
const periodDays: Record<string, number> = {
week: 7,
month: 30,
quarter: 90,
year: 365,
};
const interval = intervals[period] || '30 days';
const safeDays = periodDays[period];
// Document growth by day
const documentGrowth = await pgClient.query(`
@@ -330,7 +376,7 @@ const getGrowthMetrics: BaseTool<{ period?: string }> = {
COUNT(*) as "newDocuments",
SUM(COUNT(*)) OVER (ORDER BY DATE(d."createdAt")) as "cumulativeDocuments"
FROM documents d
WHERE d."createdAt" >= NOW() - INTERVAL '${interval}'
WHERE d."createdAt" >= NOW() - make_interval(days => ${safeDays})
AND d."deletedAt" IS NULL
GROUP BY DATE(d."createdAt")
ORDER BY date
@@ -343,7 +389,7 @@ const getGrowthMetrics: BaseTool<{ period?: string }> = {
COUNT(*) as "newUsers",
SUM(COUNT(*)) OVER (ORDER BY DATE(u."createdAt")) as "cumulativeUsers"
FROM users u
WHERE u."createdAt" >= NOW() - INTERVAL '${interval}'
WHERE u."createdAt" >= NOW() - make_interval(days => ${safeDays})
AND u."deletedAt" IS NULL
GROUP BY DATE(u."createdAt")
ORDER BY date
@@ -355,7 +401,7 @@ const getGrowthMetrics: BaseTool<{ period?: string }> = {
DATE(c."createdAt") as date,
COUNT(*) as "newCollections"
FROM collections c
WHERE c."createdAt" >= NOW() - INTERVAL '${interval}'
WHERE c."createdAt" >= NOW() - make_interval(days => ${safeDays})
AND c."deletedAt" IS NULL
GROUP BY DATE(c."createdAt")
ORDER BY date
@@ -364,10 +410,10 @@ const getGrowthMetrics: BaseTool<{ period?: string }> = {
// Period comparison
const comparison = await pgClient.query(`
SELECT
(SELECT COUNT(*) FROM documents WHERE "createdAt" >= NOW() - INTERVAL '${interval}' AND "deletedAt" IS NULL) as "currentPeriodDocs",
(SELECT COUNT(*) FROM documents WHERE "createdAt" >= NOW() - INTERVAL '${interval}' * 2 AND "createdAt" < NOW() - INTERVAL '${interval}' AND "deletedAt" IS NULL) as "previousPeriodDocs",
(SELECT COUNT(*) FROM users WHERE "createdAt" >= NOW() - INTERVAL '${interval}' AND "deletedAt" IS NULL) as "currentPeriodUsers",
(SELECT COUNT(*) FROM users WHERE "createdAt" >= NOW() - INTERVAL '${interval}' * 2 AND "createdAt" < NOW() - INTERVAL '${interval}' AND "deletedAt" IS NULL) as "previousPeriodUsers"
(SELECT COUNT(*) FROM documents WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays}) AND "deletedAt" IS NULL) as "currentPeriodDocs",
(SELECT COUNT(*) FROM documents WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays * 2}) AND "createdAt" < NOW() - make_interval(days => ${safeDays}) AND "deletedAt" IS NULL) as "previousPeriodDocs",
(SELECT COUNT(*) FROM users WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays}) AND "deletedAt" IS NULL) as "currentPeriodUsers",
(SELECT COUNT(*) FROM users WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays * 2}) AND "createdAt" < NOW() - make_interval(days => ${safeDays}) AND "deletedAt" IS NULL) as "previousPeriodUsers"
`);
return {
@@ -395,7 +441,8 @@ const getSearchAnalytics: BaseTool<{ days?: number }> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const days = args.days || 30;
// Validate and sanitize days parameter
const safeDays = validateDaysInterval(args.days, 30, 365);
// Popular search queries
const popularQueries = await pgClient.query(`
@@ -404,7 +451,7 @@ const getSearchAnalytics: BaseTool<{ days?: number }> = {
COUNT(*) as "searchCount",
COUNT(DISTINCT "userId") as "uniqueSearchers"
FROM search_queries
WHERE "createdAt" >= NOW() - INTERVAL '${days} days'
WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays})
GROUP BY query
ORDER BY "searchCount" DESC
LIMIT 20
@@ -417,7 +464,7 @@ const getSearchAnalytics: BaseTool<{ days?: number }> = {
COUNT(*) as "searches",
COUNT(DISTINCT "userId") as "uniqueSearchers"
FROM search_queries
WHERE "createdAt" >= NOW() - INTERVAL '${days} days'
WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays})
GROUP BY DATE("createdAt")
ORDER BY date
`);
@@ -428,7 +475,7 @@ const getSearchAnalytics: BaseTool<{ days?: number }> = {
query,
COUNT(*) as "searchCount"
FROM search_queries
WHERE "createdAt" >= NOW() - INTERVAL '${days} days'
WHERE "createdAt" >= NOW() - make_interval(days => ${safeDays})
AND results = 0
GROUP BY query
ORDER BY "searchCount" DESC
@@ -440,7 +487,7 @@ const getSearchAnalytics: BaseTool<{ days?: number }> = {
popularQueries: popularQueries.rows,
searchVolume: searchVolume.rows,
zeroResultQueries: zeroResults.rows,
periodDays: days,
periodDays: safeDays,
}, null, 2) }],
};
},

View File

@@ -4,10 +4,28 @@
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
*/
import { Pool } from 'pg';
import { Pool, PoolClient } from 'pg';
import { BaseTool, ToolResponse } from '../types/tools.js';
import { isValidUUID } from '../utils/security.js';
/**
* Execute operations within a transaction
*/
async function withTransaction<T>(pool: Pool, callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await pool.connect();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
/**
* bulk.archive_documents - Archive multiple documents
*/
@@ -30,12 +48,15 @@ const bulkArchiveDocuments: BaseTool<{ document_ids: string[] }> = {
if (!isValidUUID(id)) throw new Error(`Invalid document ID: ${id}`);
}
const result = await pgClient.query(`
UPDATE documents
SET "archivedAt" = NOW(), "updatedAt" = NOW()
WHERE id = ANY($1) AND "archivedAt" IS NULL AND "deletedAt" IS NULL
RETURNING id, title
`, [args.document_ids]);
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
return await client.query(`
UPDATE documents
SET "archivedAt" = NOW(), "updatedAt" = NOW()
WHERE id = ANY($1) AND "archivedAt" IS NULL AND "deletedAt" IS NULL
RETURNING id, title
`, [args.document_ids]);
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -69,15 +90,18 @@ const bulkDeleteDocuments: BaseTool<{ document_ids: string[] }> = {
if (!isValidUUID(id)) throw new Error(`Invalid document ID: ${id}`);
}
const deletedById = await pgClient.query(`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`);
const userId = deletedById.rows.length > 0 ? deletedById.rows[0].id : null;
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
const deletedById = await client.query(`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`);
const userId = deletedById.rows.length > 0 ? deletedById.rows[0].id : null;
const result = await pgClient.query(`
UPDATE documents
SET "deletedAt" = NOW(), "deletedById" = $2, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NULL
RETURNING id, title
`, [args.document_ids, userId]);
return await client.query(`
UPDATE documents
SET "deletedAt" = NOW(), "deletedById" = $2, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NULL
RETURNING id, title
`, [args.document_ids, userId]);
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -115,19 +139,22 @@ const bulkMoveDocuments: BaseTool<{ document_ids: string[]; collection_id: strin
if (!isValidUUID(id)) throw new Error(`Invalid document ID: ${id}`);
}
// Verify collection exists
const collectionCheck = await pgClient.query(
`SELECT id FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collectionCheck.rows.length === 0) throw new Error('Collection not found');
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
// Verify collection exists
const collectionCheck = await client.query(
`SELECT id FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collectionCheck.rows.length === 0) throw new Error('Collection not found');
const result = await pgClient.query(`
UPDATE documents
SET "collectionId" = $2, "parentDocumentId" = $3, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NULL
RETURNING id, title, "collectionId"
`, [args.document_ids, args.collection_id, args.parent_document_id || null]);
return await client.query(`
UPDATE documents
SET "collectionId" = $2, "parentDocumentId" = $3, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NULL
RETURNING id, title, "collectionId"
`, [args.document_ids, args.collection_id, args.parent_document_id || null]);
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -161,12 +188,15 @@ const bulkRestoreDocuments: BaseTool<{ document_ids: string[] }> = {
if (!isValidUUID(id)) throw new Error(`Invalid document ID: ${id}`);
}
const result = await pgClient.query(`
UPDATE documents
SET "deletedAt" = NULL, "deletedById" = NULL, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NOT NULL
RETURNING id, title
`, [args.document_ids]);
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
return await client.query(`
UPDATE documents
SET "deletedAt" = NULL, "deletedById" = NULL, "updatedAt" = NOW()
WHERE id = ANY($1) AND "deletedAt" IS NOT NULL
RETURNING id, title
`, [args.document_ids]);
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -204,29 +234,35 @@ const bulkAddUsersToCollection: BaseTool<{ user_ids: string[]; collection_id: st
}
const permission = args.permission || 'read_write';
const creatorResult = await pgClient.query(`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`);
const createdById = creatorResult.rows.length > 0 ? creatorResult.rows[0].id : args.user_ids[0];
const added: string[] = [];
const skipped: string[] = [];
// Use transaction for atomic operation
const { added, skipped } = await withTransaction(pgClient, async (client) => {
const creatorResult = await client.query(`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`);
const createdById = creatorResult.rows.length > 0 ? creatorResult.rows[0].id : args.user_ids[0];
for (const userId of args.user_ids) {
// Check if already exists
const existing = await pgClient.query(
`SELECT "userId" FROM collection_users WHERE "userId" = $1 AND "collectionId" = $2`,
[userId, args.collection_id]
);
const addedList: string[] = [];
const skippedList: string[] = [];
if (existing.rows.length > 0) {
skipped.push(userId);
} else {
await pgClient.query(`
INSERT INTO collection_users ("userId", "collectionId", permission, "createdById", "createdAt", "updatedAt")
VALUES ($1, $2, $3, $4, NOW(), NOW())
`, [userId, args.collection_id, permission, createdById]);
added.push(userId);
for (const userId of args.user_ids) {
// Check if already exists
const existing = await client.query(
`SELECT "userId" FROM collection_users WHERE "userId" = $1 AND "collectionId" = $2`,
[userId, args.collection_id]
);
if (existing.rows.length > 0) {
skippedList.push(userId);
} else {
await client.query(`
INSERT INTO collection_users ("userId", "collectionId", permission, "createdById", "createdAt", "updatedAt")
VALUES ($1, $2, $3, $4, NOW(), NOW())
`, [userId, args.collection_id, permission, createdById]);
addedList.push(userId);
}
}
}
return { added: addedList, skipped: skippedList };
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -264,11 +300,14 @@ const bulkRemoveUsersFromCollection: BaseTool<{ user_ids: string[]; collection_i
if (!isValidUUID(id)) throw new Error(`Invalid user ID: ${id}`);
}
const result = await pgClient.query(`
DELETE FROM collection_users
WHERE "userId" = ANY($1) AND "collectionId" = $2
RETURNING "userId"
`, [args.user_ids, args.collection_id]);
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
return await client.query(`
DELETE FROM collection_users
WHERE "userId" = ANY($1) AND "collectionId" = $2
RETURNING "userId"
`, [args.user_ids, args.collection_id]);
});
return {
content: [{ type: 'text', text: JSON.stringify({

View File

@@ -4,10 +4,28 @@
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
*/
import { Pool } from 'pg';
import { Pool, PoolClient } from 'pg';
import { BaseTool, ToolResponse } from '../types/tools.js';
import { isValidUUID, sanitizeInput } from '../utils/security.js';
/**
* Execute operations within a transaction
*/
async function withTransaction<T>(pool: Pool, callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await pool.connect();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
interface CreateDeskProjectDocArgs {
collection_id: string;
desk_project_id: number;
@@ -69,113 +87,118 @@ const createDeskProjectDoc: BaseTool<CreateDeskProjectDocArgs> = {
if (!isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
if (args.template_id && !isValidUUID(args.template_id)) throw new Error('Invalid template_id');
// Verify collection exists
const collection = await pgClient.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
const teamId = collection.rows[0].teamId;
// Get admin user
const userResult = await pgClient.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
// Get template content if specified
let baseContent = '';
if (args.template_id) {
const template = await pgClient.query(
`SELECT text FROM documents WHERE id = $1 AND template = true AND "deletedAt" IS NULL`,
[args.template_id]
);
if (template.rows.length > 0) {
baseContent = template.rows[0].text || '';
}
}
// Build document content
const includeTasks = args.include_tasks !== false;
const projectName = sanitizeInput(args.desk_project_name);
const customerName = args.desk_customer_name ? sanitizeInput(args.desk_customer_name) : null;
let content = baseContent || '';
// Use transaction for atomic operation (document + comment must be created together)
const newDoc = await withTransaction(pgClient, async (client) => {
// Verify collection exists
const collection = await client.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
// Add project header if no template
if (!args.template_id) {
content = `## Informações do Projecto\n\n`;
content += `| Campo | Valor |\n`;
content += `|-------|-------|\n`;
content += `| **ID Desk** | #${args.desk_project_id} |\n`;
content += `| **Nome** | ${projectName} |\n`;
if (customerName) {
content += `| **Cliente** | ${customerName} |\n`;
const teamId = collection.rows[0].teamId;
// Get admin user
const userResult = await client.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
// Get template content if specified
let baseContent = '';
if (args.template_id) {
const template = await client.query(
`SELECT text FROM documents WHERE id = $1 AND template = true AND "deletedAt" IS NULL`,
[args.template_id]
);
if (template.rows.length > 0) {
baseContent = template.rows[0].text || '';
}
}
content += `| **Criado em** | ${new Date().toISOString().split('T')[0]} |\n`;
content += `\n`;
if (args.desk_project_description) {
content += `## Descrição\n\n${sanitizeInput(args.desk_project_description)}\n\n`;
// Build document content
let content = baseContent || '';
// Add project header if no template
if (!args.template_id) {
content = `## Informações do Projecto\n\n`;
content += `| Campo | Valor |\n`;
content += `|-------|-------|\n`;
content += `| **ID Desk** | #${args.desk_project_id} |\n`;
content += `| **Nome** | ${projectName} |\n`;
if (customerName) {
content += `| **Cliente** | ${customerName} |\n`;
}
content += `| **Criado em** | ${new Date().toISOString().split('T')[0]} |\n`;
content += `\n`;
if (args.desk_project_description) {
content += `## Descrição\n\n${sanitizeInput(args.desk_project_description)}\n\n`;
}
}
}
// Add tasks section
if (includeTasks && args.tasks && args.tasks.length > 0) {
content += `## Tarefas\n\n`;
content += `| ID | Tarefa | Estado | Responsável |\n`;
content += `|----|--------|--------|-------------|\n`;
// Add tasks section
if (includeTasks && args.tasks && args.tasks.length > 0) {
content += `## Tarefas\n\n`;
content += `| ID | Tarefa | Estado | Responsável |\n`;
content += `|----|--------|--------|-------------|\n`;
for (const task of args.tasks) {
const assignees = task.assignees?.join(', ') || '-';
const statusEmoji = task.status === 'complete' ? '✅' : task.status === 'in_progress' ? '🔄' : '⬜';
content += `| #${task.id} | ${sanitizeInput(task.name)} | ${statusEmoji} ${task.status} | ${assignees} |\n`;
for (const task of args.tasks) {
const assignees = task.assignees?.join(', ') || '-';
const statusEmoji = task.status === 'complete' ? '✅' : task.status === 'in_progress' ? '🔄' : '⬜';
content += `| #${task.id} | ${sanitizeInput(task.name)} | ${statusEmoji} ${task.status} | ${assignees} |\n`;
}
content += `\n`;
}
content += `\n`;
}
// Add sync metadata section
content += `---\n\n`;
content += `> **Desk Sync:** Este documento está vinculado ao projecto Desk #${args.desk_project_id}\n`;
content += `> Última sincronização: ${new Date().toISOString()}\n`;
// Add sync metadata section
content += `---\n\n`;
content += `> **Desk Sync:** Este documento está vinculado ao projecto Desk #${args.desk_project_id}\n`;
content += `> Última sincronização: ${new Date().toISOString()}\n`;
// Create document
const result = await pgClient.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId",
"createdById", "lastModifiedById", template,
"createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, '📋', $3, $4, $5, $5, false, NOW(), NOW()
)
RETURNING id, title, "createdAt"
`, [
projectName,
content,
args.collection_id,
teamId,
userId,
]);
// Create document
const result = await client.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId",
"createdById", "lastModifiedById", template,
"createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, '📋', $3, $4, $5, $5, false, NOW(), NOW()
)
RETURNING id, title, "createdAt"
`, [
projectName,
content,
args.collection_id,
teamId,
userId,
]);
const newDoc = result.rows[0];
const doc = result.rows[0];
// Store Desk reference in document metadata (using a comment as metadata storage)
await pgClient.query(`
INSERT INTO comments (id, "documentId", "createdById", data, "createdAt", "updatedAt")
VALUES (gen_random_uuid(), $1, $2, $3, NOW(), NOW())
`, [
newDoc.id,
userId,
JSON.stringify({
type: 'desk_sync_metadata',
desk_project_id: args.desk_project_id,
desk_customer_name: customerName,
synced_at: new Date().toISOString(),
}),
]);
// Store Desk reference in document metadata (using a comment as metadata storage)
await client.query(`
INSERT INTO comments (id, "documentId", "createdById", data, "createdAt", "updatedAt")
VALUES (gen_random_uuid(), $1, $2, $3, NOW(), NOW())
`, [
doc.id,
userId,
JSON.stringify({
type: 'desk_sync_metadata',
desk_project_id: args.desk_project_id,
desk_customer_name: customerName,
synced_at: new Date().toISOString(),
}),
]);
return doc;
});
return {
content: [{ type: 'text', text: JSON.stringify({
@@ -217,37 +240,65 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
handler: async (args, pgClient): Promise<ToolResponse> => {
if (!isValidUUID(args.document_id)) throw new Error('Invalid document_id');
// Verify document exists
const document = await pgClient.query(
`SELECT id, title, text FROM documents WHERE id = $1 AND "deletedAt" IS NULL`,
[args.document_id]
);
if (document.rows.length === 0) throw new Error('Document not found');
const doc = document.rows[0];
const linkType = args.link_type || 'reference';
const taskName = sanitizeInput(args.desk_task_name);
// Get admin user
const userResult = await pgClient.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
const userId = userResult.rows.length > 0 ? userResult.rows[0].id : null;
// Use transaction for atomic operation
const result = await withTransaction(pgClient, async (client) => {
// Verify document exists
const document = await client.query(
`SELECT id, title, text FROM documents WHERE id = $1 AND "deletedAt" IS NULL`,
[args.document_id]
);
if (document.rows.length === 0) throw new Error('Document not found');
// Check if link already exists (search in comments)
const existingLink = await pgClient.query(`
SELECT id FROM comments
WHERE "documentId" = $1
AND data::text LIKE $2
`, [args.document_id, `%"desk_task_id":${args.desk_task_id}%`]);
const doc = document.rows[0];
if (existingLink.rows.length > 0) {
// Update existing link
await pgClient.query(`
UPDATE comments
SET data = $1, "updatedAt" = NOW()
WHERE id = $2
// Get admin user
const userResult = await client.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
const userId = userResult.rows.length > 0 ? userResult.rows[0].id : null;
// Check if link already exists (search in comments)
const existingLink = await client.query(`
SELECT id FROM comments
WHERE "documentId" = $1
AND data::text LIKE $2
`, [args.document_id, `%"desk_task_id":${args.desk_task_id}%`]);
if (existingLink.rows.length > 0) {
// Update existing link
await client.query(`
UPDATE comments
SET data = $1, "updatedAt" = NOW()
WHERE id = $2
`, [
JSON.stringify({
type: 'desk_task_link',
desk_task_id: args.desk_task_id,
desk_task_name: taskName,
desk_project_id: args.desk_project_id || null,
link_type: linkType,
sync_status: args.sync_status || false,
updated_at: new Date().toISOString(),
}),
existingLink.rows[0].id,
]);
return {
action: 'updated',
doc,
};
}
// Create new link
await client.query(`
INSERT INTO comments (id, "documentId", "createdById", data, "createdAt", "updatedAt")
VALUES (gen_random_uuid(), $1, $2, $3, NOW(), NOW())
`, [
args.document_id,
userId,
JSON.stringify({
type: 'desk_task_link',
desk_task_id: args.desk_task_id,
@@ -255,65 +306,35 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
desk_project_id: args.desk_project_id || null,
link_type: linkType,
sync_status: args.sync_status || false,
updated_at: new Date().toISOString(),
created_at: new Date().toISOString(),
}),
existingLink.rows[0].id,
]);
return {
content: [{ type: 'text', text: JSON.stringify({
action: 'updated',
documentId: args.document_id,
documentTitle: doc.title,
deskTask: {
id: args.desk_task_id,
name: taskName,
projectId: args.desk_project_id,
},
linkType,
syncStatus: args.sync_status || false,
message: `Updated link to Desk task #${args.desk_task_id}`,
}, null, 2) }],
};
}
// Optionally append reference to document text
if (linkType === 'reference') {
const refText = `\n\n---\n> 🔗 **Tarefa Desk:** #${args.desk_task_id} - ${taskName}`;
// Create new link
await pgClient.query(`
INSERT INTO comments (id, "documentId", "createdById", data, "createdAt", "updatedAt")
VALUES (gen_random_uuid(), $1, $2, $3, NOW(), NOW())
`, [
args.document_id,
userId,
JSON.stringify({
type: 'desk_task_link',
desk_task_id: args.desk_task_id,
desk_task_name: taskName,
desk_project_id: args.desk_project_id || null,
link_type: linkType,
sync_status: args.sync_status || false,
created_at: new Date().toISOString(),
}),
]);
// Optionally append reference to document text
if (linkType === 'reference') {
const refText = `\n\n---\n> 🔗 **Tarefa Desk:** #${args.desk_task_id} - ${taskName}`;
// Only append if not already present
if (!doc.text?.includes(`#${args.desk_task_id}`)) {
await pgClient.query(`
UPDATE documents
SET text = text || $1, "updatedAt" = NOW()
WHERE id = $2
`, [refText, args.document_id]);
// Only append if not already present
if (!doc.text?.includes(`#${args.desk_task_id}`)) {
await client.query(`
UPDATE documents
SET text = text || $1, "updatedAt" = NOW()
WHERE id = $2
`, [refText, args.document_id]);
}
}
}
return {
action: 'created',
doc,
};
});
return {
content: [{ type: 'text', text: JSON.stringify({
action: 'created',
action: result.action,
documentId: args.document_id,
documentTitle: doc.title,
documentTitle: result.doc.title,
deskTask: {
id: args.desk_task_id,
name: taskName,
@@ -321,7 +342,9 @@ const linkDeskTask: BaseTool<LinkDeskTaskArgs> = {
},
linkType,
syncStatus: args.sync_status || false,
message: `Linked Desk task #${args.desk_task_id} to document "${doc.title}"`,
message: result.action === 'updated'
? `Updated link to Desk task #${args.desk_task_id}`
: `Linked Desk task #${args.desk_task_id} to document "${result.doc.title}"`,
}, null, 2) }],
};
},

View File

@@ -4,10 +4,28 @@
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
*/
import { Pool } from 'pg';
import { Pool, PoolClient } from 'pg';
import { BaseTool, ToolResponse } from '../types/tools.js';
import { isValidUUID, sanitizeInput } from '../utils/security.js';
/**
* Execute operations within a transaction
*/
async function withTransaction<T>(pool: Pool, callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await pool.connect();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
interface ExportCollectionArgs {
collection_id: string;
include_children?: boolean;
@@ -188,105 +206,110 @@ const importMarkdownFolder: BaseTool<ImportMarkdownArgs> = {
const createHierarchy = args.create_hierarchy !== false;
// Verify collection exists
const collection = await pgClient.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
// Use transaction for atomic import (all documents or none)
const { imported, errors } = await withTransaction(pgClient, async (client) => {
// Verify collection exists
const collection = await client.query(
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
[args.collection_id]
);
if (collection.rows.length === 0) throw new Error('Collection not found');
const teamId = collection.rows[0].teamId;
const teamId = collection.rows[0].teamId;
// Get admin user for createdById
const userResult = await pgClient.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
// Get admin user for createdById
const userResult = await client.query(
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
);
if (userResult.rows.length === 0) throw new Error('No admin user found');
const userId = userResult.rows[0].id;
const imported: Array<{ id: string; title: string; path: string }> = [];
const errors: Array<{ title: string; error: string }> = [];
const pathToId: Record<string, string> = {};
const importedList: Array<{ id: string; title: string; path: string }> = [];
const errorList: Array<{ title: string; error: string }> = [];
const pathToId: Record<string, string> = {};
// First pass: create all documents (sorted by path depth)
const sortedDocs = [...args.documents].sort((a, b) => {
const depthA = (a.parent_path || '').split('/').filter(Boolean).length;
const depthB = (b.parent_path || '').split('/').filter(Boolean).length;
return depthA - depthB;
});
// First pass: create all documents (sorted by path depth)
const sortedDocs = [...args.documents].sort((a, b) => {
const depthA = (a.parent_path || '').split('/').filter(Boolean).length;
const depthB = (b.parent_path || '').split('/').filter(Boolean).length;
return depthA - depthB;
});
for (const doc of sortedDocs) {
try {
let parentDocumentId: string | null = null;
for (const doc of sortedDocs) {
try {
let parentDocumentId: string | null = null;
// Resolve parent if specified
if (doc.parent_path && createHierarchy) {
const parentPath = doc.parent_path.trim();
// Resolve parent if specified
if (doc.parent_path && createHierarchy) {
const parentPath = doc.parent_path.trim();
if (pathToId[parentPath]) {
parentDocumentId = pathToId[parentPath];
} else {
// Try to find existing parent by title
const parentTitle = parentPath.split('/').pop();
const existingParent = await pgClient.query(
`SELECT id FROM documents WHERE title = $1 AND "collectionId" = $2 AND "deletedAt" IS NULL LIMIT 1`,
[parentTitle, args.collection_id]
);
if (pathToId[parentPath]) {
parentDocumentId = pathToId[parentPath];
} else {
// Try to find existing parent by title
const parentTitle = parentPath.split('/').pop();
const existingParent = await client.query(
`SELECT id FROM documents WHERE title = $1 AND "collectionId" = $2 AND "deletedAt" IS NULL LIMIT 1`,
[parentTitle, args.collection_id]
);
if (existingParent.rows.length > 0) {
parentDocumentId = existingParent.rows[0].id;
if (parentDocumentId) {
pathToId[parentPath] = parentDocumentId;
if (existingParent.rows.length > 0) {
parentDocumentId = existingParent.rows[0].id;
if (parentDocumentId) {
pathToId[parentPath] = parentDocumentId;
}
}
}
}
}
// Strip YAML frontmatter if present
let content = doc.content;
if (content.startsWith('---')) {
const endOfFrontmatter = content.indexOf('---', 3);
if (endOfFrontmatter !== -1) {
content = content.substring(endOfFrontmatter + 3).trim();
// Strip YAML frontmatter if present
let content = doc.content;
if (content.startsWith('---')) {
const endOfFrontmatter = content.indexOf('---', 3);
if (endOfFrontmatter !== -1) {
content = content.substring(endOfFrontmatter + 3).trim();
}
}
// Create document
const result = await client.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId", "parentDocumentId",
"createdById", "lastModifiedById", template, "createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $7, false, NOW(), NOW()
)
RETURNING id, title
`, [
sanitizeInput(doc.title),
content,
doc.emoji || null,
args.collection_id,
teamId,
parentDocumentId,
userId,
]);
const newDoc = result.rows[0];
const fullPath = doc.parent_path ? `${doc.parent_path}/${doc.title}` : doc.title;
pathToId[fullPath] = newDoc.id;
importedList.push({
id: newDoc.id,
title: newDoc.title,
path: fullPath,
});
} catch (error) {
errorList.push({
title: doc.title,
error: error instanceof Error ? error.message : String(error),
});
}
// Create document
const result = await pgClient.query(`
INSERT INTO documents (
id, title, text, emoji, "collectionId", "teamId", "parentDocumentId",
"createdById", "lastModifiedById", template, "createdAt", "updatedAt"
)
VALUES (
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $7, false, NOW(), NOW()
)
RETURNING id, title
`, [
sanitizeInput(doc.title),
content,
doc.emoji || null,
args.collection_id,
teamId,
parentDocumentId,
userId,
]);
const newDoc = result.rows[0];
const fullPath = doc.parent_path ? `${doc.parent_path}/${doc.title}` : doc.title;
pathToId[fullPath] = newDoc.id;
imported.push({
id: newDoc.id,
title: newDoc.title,
path: fullPath,
});
} catch (error) {
errors.push({
title: doc.title,
error: error instanceof Error ? error.message : String(error),
});
}
}
return { imported: importedList, errors: errorList };
});
return {
content: [{ type: 'text', text: JSON.stringify({

View File

@@ -5,7 +5,7 @@
import { Pool } from 'pg';
import { BaseTool, ToolResponse, PaginationArgs } from '../types/tools.js';
import { validatePagination, isValidUUID, sanitizeInput } from '../utils/security.js';
import { validatePagination, isValidUUID, sanitizeInput, validateDaysInterval } from '../utils/security.js';
interface SearchQueryListArgs extends PaginationArgs {
user_id?: string;
@@ -137,8 +137,10 @@ const getSearchQueryStats: BaseTool<SearchQueryStatsArgs> = {
},
},
handler: async (args, pgClient): Promise<ToolResponse> => {
const days = args.days || 30;
const conditions: string[] = [`sq."createdAt" > NOW() - INTERVAL '${days} days'`];
// Validate and sanitize days parameter
const safeDays = validateDaysInterval(args.days, 30, 365);
const conditions: string[] = [`sq."createdAt" > NOW() - make_interval(days => ${safeDays})`];
const params: any[] = [];
let paramIndex = 1;
@@ -219,7 +221,7 @@ const getSearchQueryStats: BaseTool<SearchQueryStatsArgs> = {
${whereClause}
GROUP BY DATE(sq."createdAt")
ORDER BY date DESC
LIMIT ${days}
LIMIT ${safeDays}
`,
params
);
@@ -228,7 +230,7 @@ const getSearchQueryStats: BaseTool<SearchQueryStatsArgs> = {
content: [{
type: 'text',
text: JSON.stringify({
period: `Last ${days} days`,
period: `Last ${safeDays} days`,
overall: overallStats.rows[0],
popularSearches: popularSearches.rows,
zeroResultSearches: zeroResultSearches.rows,