diff --git a/api/scripts/sessions-indexer.ts b/api/scripts/sessions-indexer.ts new file mode 100644 index 0000000..83cfc70 --- /dev/null +++ b/api/scripts/sessions-indexer.ts @@ -0,0 +1,50 @@ +#!/usr/bin/env tsx +/** + * CLI do indexer de sessões Claude Code (Observabilidade/Espelho). + * + * Modos: + * --full Full scan de ~/.claude/projects -> SQLite em ~/.claude-work/sessions.db + * --watch Modo incremental (stub; implementação Task 8) + * + * Env: + * OBSERVABILIDADE_DB Override ao caminho da BD SQLite + */ +import { indexAll, DEFAULT_DB_PATH } from '../services/sessions/indexer.js' +import { startWatcher } from '../services/sessions/watcher.js' + +async function main(): Promise { + const args = process.argv.slice(2) + const mode = args.find((a) => a === '--full' || a === '--watch') + if (!mode) { + console.error('Uso: sessions-indexer.ts [--full|--watch]') + process.exit(1) + } + + const dbPath = process.env.OBSERVABILIDADE_DB ?? DEFAULT_DB_PATH + console.log(`[indexer] modo=${mode} db=${dbPath}`) + + if (mode === '--watch') { + startWatcher() + return + } + + const start = Date.now() + let lastLogged = 0 + const { indexed, failed } = await indexAll({ + dbPath, + onProgress: (done, total) => { + if (done - lastLogged >= 50 || done === total) { + console.log(`[indexer] ${done}/${total}`) + lastLogged = done + } + }, + }) + const durationMs = Date.now() - start + const durationSec = (durationMs / 1000).toFixed(1) + console.log(`[indexer] concluído em ${durationSec}s · indexed=${indexed} failed=${failed}`) +} + +main().catch((err) => { + console.error('[indexer] falha fatal:', err) + process.exit(2) +}) diff --git a/api/services/sessions/indexer.ts b/api/services/sessions/indexer.ts new file mode 100644 index 0000000..a2735bf --- /dev/null +++ b/api/services/sessions/indexer.ts @@ -0,0 +1,98 @@ +import { readdirSync, statSync } from 'fs' +import { homedir } from 'os' +import { join } from 'path' +import { parseSessionFile } from './parser.js' +import { openSessionsDb, type SessionsDb } from './db.js' +import type { SessionMeta } from '../../types/session.js' + +export const PROJECTS_ROOT = join(homedir(), '.claude', 'projects') +export const DEFAULT_DB_PATH = join(homedir(), '.claude-work', 'sessions.db') + +/** + * Percorre a raiz de projectos Claude (profundidade 2) e devolve todos os .jsonl. + * Estrutura: ~/.claude/projects//.jsonl + */ +export function findAllJsonl(root: string = PROJECTS_ROOT): string[] { + const result: string[] = [] + let entries: string[] + try { + entries = readdirSync(root) + } catch { + return result + } + for (const entry of entries) { + const projectDir = join(root, entry) + let st + try { + st = statSync(projectDir) + } catch { + continue + } + if (!st.isDirectory()) continue + let files: string[] + try { + files = readdirSync(projectDir) + } catch { + continue + } + for (const f of files) { + if (f.endsWith('.jsonl')) result.push(join(projectDir, f)) + } + } + return result +} + +/** + * Indexa um único ficheiro (parse + upsert). Uso individual — útil para o watcher (Task 8). + */ +export async function indexFile(db: SessionsDb, path: string): Promise { + const { meta } = await parseSessionFile(path) + db.upsertSession(meta) +} + +export interface IndexAllOptions { + dbPath?: string + onProgress?: (done: number, total: number) => void +} + +/** + * Full scan: percorre todos os JSONL e faz upsert em lote (batch 50 via transacção). + */ +export async function indexAll( + options: IndexAllOptions = {}, +): Promise<{ indexed: number; failed: number }> { + const db = openSessionsDb(options.dbPath ?? DEFAULT_DB_PATH) + const files = findAllJsonl() + const BATCH = 50 + let indexed = 0 + let failed = 0 + let batch: SessionMeta[] = [] + + try { + for (let i = 0; i < files.length; i++) { + try { + const { meta } = await parseSessionFile(files[i]) + batch.push(meta) + if (batch.length >= BATCH) { + db.upsertMany(batch) + indexed += batch.length + batch = [] + } + } catch (err) { + failed++ + console.error(`[indexer] erro em ${files[i]}:`, err) + } + if (options.onProgress) { + options.onProgress(indexed + failed + batch.length, files.length) + } + } + if (batch.length > 0) { + db.upsertMany(batch) + indexed += batch.length + } + } finally { + db.close() + } + + return { indexed, failed } +} diff --git a/api/services/sessions/watcher.ts b/api/services/sessions/watcher.ts new file mode 100644 index 0000000..bdaf013 --- /dev/null +++ b/api/services/sessions/watcher.ts @@ -0,0 +1,7 @@ +/** + * Watcher incremental via chokidar — stub. + * Implementação completa na Task 8 (Pipeline: incremental watch + resiliência). + */ +export function startWatcher(): never { + throw new Error('watcher não implementado — ver Task 8') +}