2a523a505e
Adiciona tabela 'patterns' à BD sessions (UNIQUE por week_iso+pattern_key) e helpers upsertPattern/getPatternsByWeek/getConsecutiveWeeks no SessionsDb. Módulo patterns.ts implementa 6 detectores heurísticos para deteccão semanal: 1. skills_with_high_error_rate (ratio > 0.2, severity warning|action) 2. tools_low_efficiency (tool_calls/event_count médio > 0.5) 3. skill_tool_pairs (top 5 co-ocorrências) 4. duration_outliers (sessões > p95 com outcome != completed) 5. abandoned_sessions (event_count<3 AND outcome=unknown, >=5) 6. growing_complexity (avg tool_calls actual > anterior*1.3) 5 testes cobrem detector de erro, abandonadas, consecutive_weeks, idempotência do upsert e toPatternRecord. Refs Fase 6A · Desk #2059 · Project #65 Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
304 lines
11 KiB
TypeScript
304 lines
11 KiB
TypeScript
import Database from 'better-sqlite3'
|
|
import { mkdirSync } from 'fs'
|
|
import { dirname } from 'path'
|
|
import type { SessionMeta } from '../../types/session.js'
|
|
|
|
export interface ListFilters {
|
|
days?: number
|
|
project?: string
|
|
tool?: string
|
|
skill?: string
|
|
q?: string
|
|
limit?: number
|
|
offset?: number
|
|
}
|
|
|
|
export interface PatternRecord {
|
|
id?: number
|
|
detected_at: string
|
|
week_iso: string
|
|
pattern_key: string
|
|
title: string
|
|
description: string
|
|
severity: 'info' | 'warning' | 'action'
|
|
metric_value: number | null
|
|
sample_session_ids: string[]
|
|
affected_count: number
|
|
consecutive_weeks: number
|
|
}
|
|
|
|
export interface SessionsDb {
|
|
upsertSession(meta: SessionMeta): void
|
|
upsertMany(metas: SessionMeta[]): void
|
|
listSessions(filters: ListFilters): SessionMeta[]
|
|
countSessions(filters: ListFilters): number
|
|
getSession(id: string): SessionMeta | null
|
|
deleteByJsonlPath(path: string): void
|
|
upsertPattern(p: PatternRecord): void
|
|
getPatternsByWeek(week: string): PatternRecord[]
|
|
getConsecutiveWeeks(pattern_key: string, uptoWeek: string): number
|
|
rawDb(): Database.Database
|
|
close(): void
|
|
}
|
|
|
|
const SCHEMA = `
|
|
CREATE TABLE IF NOT EXISTS sessions (
|
|
session_id TEXT PRIMARY KEY,
|
|
project_path TEXT NOT NULL,
|
|
project_slug TEXT NOT NULL,
|
|
jsonl_path TEXT NOT NULL UNIQUE,
|
|
started_at TEXT NOT NULL,
|
|
ended_at TEXT,
|
|
duration_sec INTEGER,
|
|
event_count INTEGER NOT NULL,
|
|
user_messages INTEGER NOT NULL,
|
|
assistant_msgs INTEGER NOT NULL,
|
|
tool_calls INTEGER NOT NULL,
|
|
first_prompt TEXT,
|
|
tools_used TEXT NOT NULL,
|
|
skills_invoked TEXT NOT NULL,
|
|
outcome TEXT NOT NULL,
|
|
permission_mode TEXT,
|
|
file_size INTEGER NOT NULL,
|
|
indexed_at TEXT NOT NULL
|
|
);
|
|
CREATE INDEX IF NOT EXISTS idx_started ON sessions(started_at DESC);
|
|
CREATE INDEX IF NOT EXISTS idx_project ON sessions(project_slug, started_at DESC);
|
|
|
|
CREATE TABLE IF NOT EXISTS patterns (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
detected_at TEXT NOT NULL,
|
|
week_iso TEXT NOT NULL,
|
|
pattern_key TEXT NOT NULL,
|
|
title TEXT NOT NULL,
|
|
description TEXT NOT NULL,
|
|
severity TEXT NOT NULL,
|
|
metric_value REAL,
|
|
sample_session_ids TEXT NOT NULL,
|
|
affected_count INTEGER NOT NULL,
|
|
consecutive_weeks INTEGER NOT NULL DEFAULT 1,
|
|
UNIQUE(week_iso, pattern_key)
|
|
);
|
|
CREATE INDEX IF NOT EXISTS idx_patterns_week ON patterns(week_iso);
|
|
CREATE INDEX IF NOT EXISTS idx_patterns_key ON patterns(pattern_key);
|
|
`
|
|
|
|
function rowToMeta(row: Record<string, unknown>): SessionMeta {
|
|
return {
|
|
session_id: row.session_id as string,
|
|
project_path: row.project_path as string,
|
|
project_slug: row.project_slug as string,
|
|
jsonl_path: row.jsonl_path as string,
|
|
started_at: row.started_at as string,
|
|
ended_at: (row.ended_at as string | null) ?? null,
|
|
duration_sec: (row.duration_sec as number | null) ?? null,
|
|
event_count: row.event_count as number,
|
|
user_messages: row.user_messages as number,
|
|
assistant_msgs: row.assistant_msgs as number,
|
|
tool_calls: row.tool_calls as number,
|
|
first_prompt: (row.first_prompt as string | null) ?? null,
|
|
tools_used: JSON.parse(row.tools_used as string),
|
|
skills_invoked: JSON.parse(row.skills_invoked as string),
|
|
outcome: row.outcome as SessionMeta['outcome'],
|
|
permission_mode: (row.permission_mode as string | null) ?? null,
|
|
file_size: row.file_size as number,
|
|
indexed_at: row.indexed_at as string,
|
|
}
|
|
}
|
|
|
|
function buildWhere(f: ListFilters): { sql: string; params: Record<string, unknown> } {
|
|
const parts: string[] = []
|
|
const params: Record<string, unknown> = {}
|
|
if (f.days) {
|
|
const cutoff = new Date(Date.now() - f.days * 86400_000).toISOString()
|
|
parts.push('started_at >= @cutoff')
|
|
params.cutoff = cutoff
|
|
}
|
|
if (f.project) {
|
|
parts.push('project_slug = @project')
|
|
params.project = f.project
|
|
}
|
|
if (f.tool) {
|
|
parts.push("tools_used LIKE @toolLike")
|
|
params.toolLike = `%"${f.tool}"%`
|
|
}
|
|
if (f.skill) {
|
|
parts.push('skills_invoked LIKE @skillLike')
|
|
params.skillLike = `%"${f.skill}"%`
|
|
}
|
|
if (f.q) {
|
|
parts.push('first_prompt LIKE @q')
|
|
params.q = `%${f.q}%`
|
|
}
|
|
return {
|
|
sql: parts.length ? 'WHERE ' + parts.join(' AND ') : '',
|
|
params,
|
|
}
|
|
}
|
|
|
|
export function openSessionsDb(dbPath: string): SessionsDb {
|
|
mkdirSync(dirname(dbPath), { recursive: true })
|
|
const db = new Database(dbPath)
|
|
db.pragma('journal_mode = WAL')
|
|
db.pragma('synchronous = NORMAL')
|
|
db.exec(SCHEMA)
|
|
|
|
const upsertStmt = db.prepare(`
|
|
INSERT INTO sessions (session_id, project_path, project_slug, jsonl_path, started_at, ended_at,
|
|
duration_sec, event_count, user_messages, assistant_msgs, tool_calls, first_prompt,
|
|
tools_used, skills_invoked, outcome, permission_mode, file_size, indexed_at)
|
|
VALUES (@session_id, @project_path, @project_slug, @jsonl_path, @started_at, @ended_at,
|
|
@duration_sec, @event_count, @user_messages, @assistant_msgs, @tool_calls, @first_prompt,
|
|
@tools_used, @skills_invoked, @outcome, @permission_mode, @file_size, @indexed_at)
|
|
ON CONFLICT(session_id) DO UPDATE SET
|
|
project_path = excluded.project_path,
|
|
project_slug = excluded.project_slug,
|
|
jsonl_path = excluded.jsonl_path,
|
|
started_at = excluded.started_at,
|
|
ended_at = excluded.ended_at,
|
|
duration_sec = excluded.duration_sec,
|
|
event_count = excluded.event_count,
|
|
user_messages = excluded.user_messages,
|
|
assistant_msgs = excluded.assistant_msgs,
|
|
tool_calls = excluded.tool_calls,
|
|
first_prompt = excluded.first_prompt,
|
|
tools_used = excluded.tools_used,
|
|
skills_invoked = excluded.skills_invoked,
|
|
outcome = excluded.outcome,
|
|
permission_mode = excluded.permission_mode,
|
|
file_size = excluded.file_size,
|
|
indexed_at = excluded.indexed_at
|
|
`)
|
|
|
|
const upsertManyTxn = db.transaction((metas: SessionMeta[]) => {
|
|
for (const meta of metas) {
|
|
upsertStmt.run({
|
|
...meta,
|
|
tools_used: JSON.stringify(meta.tools_used),
|
|
skills_invoked: JSON.stringify(meta.skills_invoked),
|
|
})
|
|
}
|
|
})
|
|
|
|
return {
|
|
upsertSession(meta) {
|
|
upsertStmt.run({
|
|
...meta,
|
|
tools_used: JSON.stringify(meta.tools_used),
|
|
skills_invoked: JSON.stringify(meta.skills_invoked),
|
|
})
|
|
},
|
|
upsertMany(metas) {
|
|
upsertManyTxn(metas)
|
|
},
|
|
listSessions(filters) {
|
|
const { sql, params } = buildWhere(filters)
|
|
const limit = filters.limit ?? 50
|
|
const offset = filters.offset ?? 0
|
|
const rows = db
|
|
.prepare(`SELECT * FROM sessions ${sql} ORDER BY started_at DESC LIMIT @limit OFFSET @offset`)
|
|
.all({ ...params, limit, offset }) as Record<string, unknown>[]
|
|
return rows.map(rowToMeta)
|
|
},
|
|
countSessions(filters) {
|
|
const { sql, params } = buildWhere(filters)
|
|
const row = db.prepare(`SELECT COUNT(*) as c FROM sessions ${sql}`).get(params) as { c: number }
|
|
return row.c
|
|
},
|
|
getSession(id) {
|
|
const row = db.prepare('SELECT * FROM sessions WHERE session_id = ?').get(id) as Record<string, unknown> | undefined
|
|
return row ? rowToMeta(row) : null
|
|
},
|
|
deleteByJsonlPath(path) {
|
|
db.prepare('DELETE FROM sessions WHERE jsonl_path = ?').run(path)
|
|
},
|
|
upsertPattern(p: PatternRecord) {
|
|
db.prepare(`
|
|
INSERT INTO patterns (detected_at, week_iso, pattern_key, title, description,
|
|
severity, metric_value, sample_session_ids, affected_count, consecutive_weeks)
|
|
VALUES (@detected_at, @week_iso, @pattern_key, @title, @description,
|
|
@severity, @metric_value, @sample_session_ids, @affected_count, @consecutive_weeks)
|
|
ON CONFLICT(week_iso, pattern_key) DO UPDATE SET
|
|
detected_at = excluded.detected_at,
|
|
title = excluded.title,
|
|
description = excluded.description,
|
|
severity = excluded.severity,
|
|
metric_value = excluded.metric_value,
|
|
sample_session_ids = excluded.sample_session_ids,
|
|
affected_count = excluded.affected_count,
|
|
consecutive_weeks = excluded.consecutive_weeks
|
|
`).run({
|
|
detected_at: p.detected_at,
|
|
week_iso: p.week_iso,
|
|
pattern_key: p.pattern_key,
|
|
title: p.title,
|
|
description: p.description,
|
|
severity: p.severity,
|
|
metric_value: p.metric_value,
|
|
sample_session_ids: JSON.stringify(p.sample_session_ids),
|
|
affected_count: p.affected_count,
|
|
consecutive_weeks: p.consecutive_weeks,
|
|
})
|
|
},
|
|
getPatternsByWeek(week: string): PatternRecord[] {
|
|
const rows = db.prepare('SELECT * FROM patterns WHERE week_iso = ? ORDER BY severity DESC, affected_count DESC').all(week) as Record<string, unknown>[]
|
|
return rows.map((r) => ({
|
|
id: r.id as number,
|
|
detected_at: r.detected_at as string,
|
|
week_iso: r.week_iso as string,
|
|
pattern_key: r.pattern_key as string,
|
|
title: r.title as string,
|
|
description: r.description as string,
|
|
severity: r.severity as PatternRecord['severity'],
|
|
metric_value: (r.metric_value as number | null) ?? null,
|
|
sample_session_ids: JSON.parse(r.sample_session_ids as string),
|
|
affected_count: r.affected_count as number,
|
|
consecutive_weeks: r.consecutive_weeks as number,
|
|
}))
|
|
},
|
|
getConsecutiveWeeks(pattern_key: string, uptoWeek: string): number {
|
|
// Conta semanas consecutivas até uptoWeek (inclusive) em que pattern_key apareceu
|
|
const rows = db.prepare('SELECT DISTINCT week_iso FROM patterns WHERE pattern_key = ? AND week_iso <= ? ORDER BY week_iso DESC').all(pattern_key, uptoWeek) as { week_iso: string }[]
|
|
if (rows.length === 0) return 0
|
|
let count = 0
|
|
let cursor = uptoWeek
|
|
for (const row of rows) {
|
|
if (row.week_iso === cursor) {
|
|
count++
|
|
cursor = prevWeekIso(cursor)
|
|
} else {
|
|
break
|
|
}
|
|
}
|
|
return count
|
|
},
|
|
rawDb(): Database.Database {
|
|
return db
|
|
},
|
|
close() {
|
|
db.close()
|
|
},
|
|
}
|
|
}
|
|
|
|
/** Calcula semana ISO anterior (YYYY-Www). */
|
|
export function prevWeekIso(week: string): string {
|
|
const m = week.match(/^(\d{4})-W(\d{2})$/)
|
|
if (!m) return week
|
|
const year = parseInt(m[1], 10)
|
|
const w = parseInt(m[2], 10)
|
|
if (w > 1) return `${year}-W${String(w - 1).padStart(2, '0')}`
|
|
// Semana 1 → última semana do ano anterior (52 ou 53)
|
|
const prevYear = year - 1
|
|
const last = weeksInYear(prevYear)
|
|
return `${prevYear}-W${String(last).padStart(2, '0')}`
|
|
}
|
|
|
|
function weeksInYear(year: number): number {
|
|
// ISO: ano tem 53 semanas se 1 Jan é quinta ou (ano bissexto e 1 Jan é quarta)
|
|
const jan1 = new Date(Date.UTC(year, 0, 1)).getUTCDay()
|
|
const isLeap = (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0
|
|
if (jan1 === 4 || (isLeap && jan1 === 3)) return 53
|
|
return 52
|
|
}
|