mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-01 19:25:10 +02:00
feat: Session Intelligence Layer — /checkpoint + /health + context recovery (v0.15.0.0) (#733)
* feat: session timeline binaries (gstack-timeline-log + gstack-timeline-read) New binaries for the Session Intelligence Layer. gstack-timeline-log appends JSONL events to ~/.gstack/projects/$SLUG/timeline.jsonl. gstack-timeline-read reads, filters, and formats timeline data for /retro consumption. Timeline is local-only project intelligence, never sent anywhere. Always-on regardless of telemetry setting. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * feat: preamble context recovery + timeline events + predictive suggestions Layers 1-3 of the Session Intelligence Layer: - Timeline start/complete events injected into every skill via preamble - Context recovery (tier 2+): lists recent CEO plans, checkpoints, reviews - Cross-session injection: LAST_SESSION and LATEST_CHECKPOINT for branch - Predictive skill suggestion from recent timeline patterns - Welcome back message synthesis - Routing rules for /checkpoint and /health Timeline writes are NOT gated by telemetry (local project intelligence). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * feat: /checkpoint + /health skills (Layers 4-5) /checkpoint: save/resume/list working state snapshots. Supports cross-branch listing for Conductor workspace handoff. Session duration tracking. /health: code quality scorekeeper. Wraps project tools (tsc, biome, knip, shellcheck, tests), computes composite 0-10 score, tracks trends over time. Auto-detects tools or reads from CLAUDE.md ## Health Stack. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * chore: regenerate SKILL.md files + add timeline tests 9 timeline tests (all passing) mirroring learnings.test.ts pattern. All 34 SKILL.md files regenerated with new preamble (context recovery, timeline events, routing rules for /checkpoint and /health). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * chore: bump version and changelog (v0.15.0.0) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * docs: update self-learning roadmap post-Session Intelligence R1-R3 marked shipped with actual versions. R4 becomes Adaptive Ceremony (trust as separate policy engine, scope-aware, gradual degradation). R5 becomes /autoship (resumable state machine, not linear chain). R6-R7 unbundled from old R5. Added State Systems reference, Risk Register (Codex-reviewed), and validation metrics for R4. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * test: E2E tests for Session Intelligence (timeline, recovery, checkpoint) 3 gate-tier E2E tests: - timeline-event-flow: binary data flow round-trip (no LLM) - context-recovery-artifacts: seeded artifacts appear in preamble - checkpoint-save-resume: checkpoint file created with YAML frontmatter Also fixes package.json version sync (0.14.6.0 → 0.15.0.0). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,154 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { execSync, ExecSyncOptionsWithStringEncoding } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
const ROOT = path.resolve(import.meta.dir, '..');
|
||||
const BIN = path.join(ROOT, 'bin');
|
||||
|
||||
let tmpDir: string;
|
||||
let slugDir: string;
|
||||
|
||||
function runLog(input: string, opts: { expectFail?: boolean } = {}): { stdout: string; exitCode: number } {
|
||||
const execOpts: ExecSyncOptionsWithStringEncoding = {
|
||||
cwd: ROOT,
|
||||
env: { ...process.env, GSTACK_HOME: tmpDir },
|
||||
encoding: 'utf-8',
|
||||
timeout: 15000,
|
||||
};
|
||||
try {
|
||||
const stdout = execSync(`${BIN}/gstack-timeline-log '${input.replace(/'/g, "'\\''")}'`, execOpts).trim();
|
||||
return { stdout, exitCode: 0 };
|
||||
} catch (e: any) {
|
||||
if (opts.expectFail) {
|
||||
return { stdout: e.stderr?.toString() || '', exitCode: e.status || 1 };
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function runRead(args: string = ''): string {
|
||||
const execOpts: ExecSyncOptionsWithStringEncoding = {
|
||||
cwd: ROOT,
|
||||
env: { ...process.env, GSTACK_HOME: tmpDir },
|
||||
encoding: 'utf-8',
|
||||
timeout: 15000,
|
||||
};
|
||||
try {
|
||||
return execSync(`${BIN}/gstack-timeline-read ${args}`, execOpts).trim();
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gstack-timeline-'));
|
||||
slugDir = path.join(tmpDir, 'projects');
|
||||
fs.mkdirSync(slugDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function findTimelineFile(): string | null {
|
||||
const projectDirs = fs.readdirSync(slugDir);
|
||||
if (projectDirs.length === 0) return null;
|
||||
const f = path.join(slugDir, projectDirs[0], 'timeline.jsonl');
|
||||
return fs.existsSync(f) ? f : null;
|
||||
}
|
||||
|
||||
describe('gstack-timeline-log', () => {
|
||||
test('accepts valid JSON and appends to timeline.jsonl', () => {
|
||||
const input = '{"skill":"review","event":"started","branch":"main"}';
|
||||
const result = runLog(input);
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const content = fs.readFileSync(f!, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.skill).toBe('review');
|
||||
expect(parsed.event).toBe('started');
|
||||
expect(parsed.branch).toBe('main');
|
||||
});
|
||||
|
||||
test('rejects invalid JSON with exit 0 (non-blocking)', () => {
|
||||
const result = runLog('not json at all');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
// No file should be created
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
|
||||
test('injects timestamp when ts field is missing', () => {
|
||||
const input = '{"skill":"review","event":"started","branch":"main"}';
|
||||
runLog(input);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const parsed = JSON.parse(fs.readFileSync(f!, 'utf-8').trim());
|
||||
expect(parsed.ts).toBeDefined();
|
||||
expect(new Date(parsed.ts).getTime()).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('preserves timestamp when ts field is present', () => {
|
||||
const input = '{"skill":"review","event":"completed","branch":"main","ts":"2025-06-15T10:00:00Z"}';
|
||||
runLog(input);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const parsed = JSON.parse(fs.readFileSync(f!, 'utf-8').trim());
|
||||
expect(parsed.ts).toBe('2025-06-15T10:00:00Z');
|
||||
});
|
||||
|
||||
test('validates required fields (skill, event) - exits 0 if missing skill', () => {
|
||||
const result = runLog('{"event":"started","branch":"main"}');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
|
||||
test('validates required fields (skill, event) - exits 0 if missing event', () => {
|
||||
const result = runLog('{"skill":"review","branch":"main"}');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-timeline-read', () => {
|
||||
test('returns empty output for missing file (exit 0)', () => {
|
||||
const output = runRead();
|
||||
expect(output).toBe('');
|
||||
});
|
||||
|
||||
test('filters by --branch', () => {
|
||||
runLog(JSON.stringify({ skill: 'review', event: 'completed', branch: 'feature-a', outcome: 'approved', ts: '2026-03-28T10:00:00Z' }));
|
||||
runLog(JSON.stringify({ skill: 'ship', event: 'completed', branch: 'feature-b', outcome: 'merged', ts: '2026-03-28T11:00:00Z' }));
|
||||
|
||||
const output = runRead('--branch feature-a');
|
||||
expect(output).toContain('review');
|
||||
expect(output).not.toContain('feature-b');
|
||||
});
|
||||
|
||||
test('limits output with --limit', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
runLog(JSON.stringify({ skill: 'review', event: 'completed', branch: 'main', outcome: 'approved', ts: `2026-03-2${i}T10:00:00Z` }));
|
||||
}
|
||||
|
||||
const unlimited = runRead('--limit 20');
|
||||
const limited = runRead('--limit 2');
|
||||
|
||||
// Count event lines (lines starting with "- ")
|
||||
const unlimitedEvents = unlimited.split('\n').filter(l => l.startsWith('- ')).length;
|
||||
const limitedEvents = limited.split('\n').filter(l => l.startsWith('- ')).length;
|
||||
|
||||
expect(unlimitedEvents).toBe(5);
|
||||
expect(limitedEvents).toBe(2);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user