feat: Add export/import and Desk CRM sync tools (164 total)
New modules: - export-import.ts (2 tools): export_collection_to_markdown, import_markdown_folder - desk-sync.ts (2 tools): create_desk_project_doc, link_desk_task Updated: - CHANGELOG.md: Version 1.2.1 - CLAUDE.md: Updated to 164 tools across 33 modules - CONTINUE.md: Updated state documentation - AUDIT-REQUEST.md: Updated metrics and file list Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
304
src/tools/export-import.ts
Normal file
304
src/tools/export-import.ts
Normal file
@@ -0,0 +1,304 @@
|
||||
/**
|
||||
* MCP Outline PostgreSQL - Export/Import Tools
|
||||
* Advanced export to Markdown and import from Markdown folders
|
||||
* @author Descomplicar® | @link descomplicar.pt | @copyright 2026
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { BaseTool, ToolResponse } from '../types/tools.js';
|
||||
import { isValidUUID, sanitizeInput } from '../utils/security.js';
|
||||
|
||||
interface ExportCollectionArgs {
|
||||
collection_id: string;
|
||||
include_children?: boolean;
|
||||
include_metadata?: boolean;
|
||||
format?: 'markdown' | 'json';
|
||||
}
|
||||
|
||||
interface ImportMarkdownArgs {
|
||||
collection_id: string;
|
||||
documents: Array<{
|
||||
title: string;
|
||||
content: string;
|
||||
parent_path?: string;
|
||||
emoji?: string;
|
||||
}>;
|
||||
create_hierarchy?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* export.collection_to_markdown - Export entire collection to Markdown
|
||||
*/
|
||||
const exportCollectionToMarkdown: BaseTool<ExportCollectionArgs> = {
|
||||
name: 'outline_export_collection_to_markdown',
|
||||
description: 'Export an entire collection to Markdown format with document hierarchy.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
collection_id: { type: 'string', description: 'Collection ID (UUID)' },
|
||||
include_children: { type: 'boolean', description: 'Include nested documents (default: true)' },
|
||||
include_metadata: { type: 'boolean', description: 'Include YAML frontmatter with metadata (default: true)' },
|
||||
format: { type: 'string', enum: ['markdown', 'json'], description: 'Output format (default: markdown)' },
|
||||
},
|
||||
required: ['collection_id'],
|
||||
},
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
if (!isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
|
||||
|
||||
const includeChildren = args.include_children !== false;
|
||||
const includeMetadata = args.include_metadata !== false;
|
||||
const format = args.format || 'markdown';
|
||||
|
||||
// Get collection info
|
||||
const collection = await pgClient.query(`
|
||||
SELECT id, name, description, icon, color
|
||||
FROM collections
|
||||
WHERE id = $1 AND "deletedAt" IS NULL
|
||||
`, [args.collection_id]);
|
||||
|
||||
if (collection.rows.length === 0) throw new Error('Collection not found');
|
||||
|
||||
// Get all documents in collection
|
||||
const documents = await pgClient.query(`
|
||||
WITH RECURSIVE doc_tree AS (
|
||||
SELECT
|
||||
d.id, d.title, d.text, d.emoji, d."parentDocumentId",
|
||||
d."createdAt", d."updatedAt", d."publishedAt",
|
||||
u.name as "authorName",
|
||||
0 as depth,
|
||||
d.title as path
|
||||
FROM documents d
|
||||
LEFT JOIN users u ON d."createdById" = u.id
|
||||
WHERE d."collectionId" = $1
|
||||
AND d."parentDocumentId" IS NULL
|
||||
AND d."deletedAt" IS NULL
|
||||
AND d.template = false
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
d.id, d.title, d.text, d.emoji, d."parentDocumentId",
|
||||
d."createdAt", d."updatedAt", d."publishedAt",
|
||||
u.name as "authorName",
|
||||
dt.depth + 1,
|
||||
dt.path || '/' || d.title
|
||||
FROM documents d
|
||||
LEFT JOIN users u ON d."createdById" = u.id
|
||||
JOIN doc_tree dt ON d."parentDocumentId" = dt.id
|
||||
WHERE d."deletedAt" IS NULL AND d.template = false
|
||||
)
|
||||
SELECT * FROM doc_tree
|
||||
ORDER BY path
|
||||
`, [args.collection_id]);
|
||||
|
||||
if (format === 'json') {
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify({
|
||||
collection: collection.rows[0],
|
||||
documents: documents.rows,
|
||||
exportedAt: new Date().toISOString(),
|
||||
totalDocuments: documents.rows.length,
|
||||
}, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
// Build Markdown output
|
||||
const markdownFiles: Array<{ path: string; content: string }> = [];
|
||||
|
||||
for (const doc of documents.rows) {
|
||||
let content = '';
|
||||
|
||||
if (includeMetadata) {
|
||||
content += '---\n';
|
||||
content += `title: "${doc.title.replace(/"/g, '\\"')}"\n`;
|
||||
if (doc.emoji) content += `emoji: "${doc.emoji}"\n`;
|
||||
content += `author: "${doc.authorName || 'Unknown'}"\n`;
|
||||
content += `created: ${doc.createdAt}\n`;
|
||||
content += `updated: ${doc.updatedAt}\n`;
|
||||
if (doc.publishedAt) content += `published: ${doc.publishedAt}\n`;
|
||||
content += `outline_id: ${doc.id}\n`;
|
||||
content += '---\n\n';
|
||||
}
|
||||
|
||||
// Add title as H1 if not already in content
|
||||
if (!doc.text?.startsWith('# ')) {
|
||||
content += `# ${doc.emoji ? doc.emoji + ' ' : ''}${doc.title}\n\n`;
|
||||
}
|
||||
|
||||
content += doc.text || '';
|
||||
|
||||
const fileName = doc.path
|
||||
.replace(/[^a-zA-Z0-9\/\-_\s]/g, '')
|
||||
.replace(/\s+/g, '-')
|
||||
.toLowerCase();
|
||||
|
||||
markdownFiles.push({
|
||||
path: `${fileName}.md`,
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify({
|
||||
collection: {
|
||||
name: collection.rows[0].name,
|
||||
description: collection.rows[0].description,
|
||||
},
|
||||
files: markdownFiles,
|
||||
exportedAt: new Date().toISOString(),
|
||||
totalFiles: markdownFiles.length,
|
||||
message: `Exported ${markdownFiles.length} documents from collection "${collection.rows[0].name}"`,
|
||||
}, null, 2) }],
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* import.markdown_folder - Import Markdown documents into collection
|
||||
*/
|
||||
const importMarkdownFolder: BaseTool<ImportMarkdownArgs> = {
|
||||
name: 'outline_import_markdown_folder',
|
||||
description: 'Import multiple Markdown documents into a collection, preserving hierarchy.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
collection_id: { type: 'string', description: 'Target collection ID (UUID)' },
|
||||
documents: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
title: { type: 'string', description: 'Document title' },
|
||||
content: { type: 'string', description: 'Markdown content' },
|
||||
parent_path: { type: 'string', description: 'Parent document path (e.g., "parent/child")' },
|
||||
emoji: { type: 'string', description: 'Document emoji' },
|
||||
},
|
||||
required: ['title', 'content'],
|
||||
},
|
||||
description: 'Array of documents to import',
|
||||
},
|
||||
create_hierarchy: { type: 'boolean', description: 'Create parent documents if they don\'t exist (default: true)' },
|
||||
},
|
||||
required: ['collection_id', 'documents'],
|
||||
},
|
||||
handler: async (args, pgClient): Promise<ToolResponse> => {
|
||||
if (!isValidUUID(args.collection_id)) throw new Error('Invalid collection_id');
|
||||
if (!args.documents || args.documents.length === 0) throw new Error('At least one document required');
|
||||
if (args.documents.length > 100) throw new Error('Maximum 100 documents per import');
|
||||
|
||||
const createHierarchy = args.create_hierarchy !== false;
|
||||
|
||||
// Verify collection exists
|
||||
const collection = await pgClient.query(
|
||||
`SELECT id, "teamId" FROM collections WHERE id = $1 AND "deletedAt" IS NULL`,
|
||||
[args.collection_id]
|
||||
);
|
||||
if (collection.rows.length === 0) throw new Error('Collection not found');
|
||||
|
||||
const teamId = collection.rows[0].teamId;
|
||||
|
||||
// Get admin user for createdById
|
||||
const userResult = await pgClient.query(
|
||||
`SELECT id FROM users WHERE role = 'admin' AND "deletedAt" IS NULL LIMIT 1`
|
||||
);
|
||||
if (userResult.rows.length === 0) throw new Error('No admin user found');
|
||||
const userId = userResult.rows[0].id;
|
||||
|
||||
const imported: Array<{ id: string; title: string; path: string }> = [];
|
||||
const errors: Array<{ title: string; error: string }> = [];
|
||||
const pathToId: Record<string, string> = {};
|
||||
|
||||
// First pass: create all documents (sorted by path depth)
|
||||
const sortedDocs = [...args.documents].sort((a, b) => {
|
||||
const depthA = (a.parent_path || '').split('/').filter(Boolean).length;
|
||||
const depthB = (b.parent_path || '').split('/').filter(Boolean).length;
|
||||
return depthA - depthB;
|
||||
});
|
||||
|
||||
for (const doc of sortedDocs) {
|
||||
try {
|
||||
let parentDocumentId: string | null = null;
|
||||
|
||||
// Resolve parent if specified
|
||||
if (doc.parent_path && createHierarchy) {
|
||||
const parentPath = doc.parent_path.trim();
|
||||
|
||||
if (pathToId[parentPath]) {
|
||||
parentDocumentId = pathToId[parentPath];
|
||||
} else {
|
||||
// Try to find existing parent by title
|
||||
const parentTitle = parentPath.split('/').pop();
|
||||
const existingParent = await pgClient.query(
|
||||
`SELECT id FROM documents WHERE title = $1 AND "collectionId" = $2 AND "deletedAt" IS NULL LIMIT 1`,
|
||||
[parentTitle, args.collection_id]
|
||||
);
|
||||
|
||||
if (existingParent.rows.length > 0) {
|
||||
parentDocumentId = existingParent.rows[0].id;
|
||||
if (parentDocumentId) {
|
||||
pathToId[parentPath] = parentDocumentId;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Strip YAML frontmatter if present
|
||||
let content = doc.content;
|
||||
if (content.startsWith('---')) {
|
||||
const endOfFrontmatter = content.indexOf('---', 3);
|
||||
if (endOfFrontmatter !== -1) {
|
||||
content = content.substring(endOfFrontmatter + 3).trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Create document
|
||||
const result = await pgClient.query(`
|
||||
INSERT INTO documents (
|
||||
id, title, text, emoji, "collectionId", "teamId", "parentDocumentId",
|
||||
"createdById", "lastModifiedById", template, "createdAt", "updatedAt"
|
||||
)
|
||||
VALUES (
|
||||
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $7, false, NOW(), NOW()
|
||||
)
|
||||
RETURNING id, title
|
||||
`, [
|
||||
sanitizeInput(doc.title),
|
||||
content,
|
||||
doc.emoji || null,
|
||||
args.collection_id,
|
||||
teamId,
|
||||
parentDocumentId,
|
||||
userId,
|
||||
]);
|
||||
|
||||
const newDoc = result.rows[0];
|
||||
const fullPath = doc.parent_path ? `${doc.parent_path}/${doc.title}` : doc.title;
|
||||
pathToId[fullPath] = newDoc.id;
|
||||
|
||||
imported.push({
|
||||
id: newDoc.id,
|
||||
title: newDoc.title,
|
||||
path: fullPath,
|
||||
});
|
||||
} catch (error) {
|
||||
errors.push({
|
||||
title: doc.title,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify({
|
||||
imported,
|
||||
errors,
|
||||
importedCount: imported.length,
|
||||
errorCount: errors.length,
|
||||
collectionId: args.collection_id,
|
||||
message: `Imported ${imported.length} documents${errors.length > 0 ? `, ${errors.length} failed` : ''}`,
|
||||
}, null, 2) }],
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
export const exportImportTools: BaseTool<any>[] = [exportCollectionToMarkdown, importMarkdownFolder];
|
||||
Reference in New Issue
Block a user