mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-07 22:16:52 +02:00
fix: resolve merge conflict with main, bump to v0.15.2.0
Main advanced to v0.15.1.0 (Session Intelligence + Design Without Shotgun). Our voice triggers branch bumped to v0.15.2.0 on top. Conflicts resolved: - CHANGELOG.md: placed our v0.15.2.0 entry above main's v0.15.1.0 and v0.15.0.0 - VERSION/package.json: bumped to 0.15.2.0 - design-html/SKILL.md.tmpl: took main's updated description + kept our voice-triggers - design-html/SKILL.md: regenerated from resolved template Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1969,13 +1969,43 @@ describe('setup script validation', () => {
|
||||
expect(fnBody).toContain('gstack*');
|
||||
});
|
||||
|
||||
test('link_claude_skill_dirs creates relative symlinks', () => {
|
||||
// Claude links should be relative: ln -snf "gstack/$dir_name"
|
||||
// Uses dir_name (not skill_name) because symlink target must point to the physical directory
|
||||
test('link_claude_skill_dirs creates real directories with absolute SKILL.md symlinks', () => {
|
||||
// Claude links should be real directories with absolute SKILL.md symlinks
|
||||
// to ensure Claude Code discovers them as top-level skills (not nested under gstack/)
|
||||
const fnStart = setupContent.indexOf('link_claude_skill_dirs()');
|
||||
const fnEnd = setupContent.indexOf('}', setupContent.indexOf('linked[@]}', fnStart));
|
||||
const fnBody = setupContent.slice(fnStart, fnEnd);
|
||||
expect(fnBody).toContain('ln -snf "gstack/$dir_name"');
|
||||
expect(fnBody).toContain('mkdir -p "$target"');
|
||||
expect(fnBody).toContain('ln -snf "$gstack_dir/$dir_name/SKILL.md" "$target/SKILL.md"');
|
||||
});
|
||||
|
||||
// REGRESSION: cleanup functions must handle both old symlinks AND new real-directory pattern
|
||||
test('cleanup functions handle real directories with symlinked SKILL.md', () => {
|
||||
// cleanup_old_claude_symlinks must detect and remove real dirs with SKILL.md symlinks
|
||||
const cleanupOldStart = setupContent.indexOf('cleanup_old_claude_symlinks()');
|
||||
const cleanupOldEnd = setupContent.indexOf('}', setupContent.indexOf('cleaned up old', cleanupOldStart));
|
||||
const cleanupOldBody = setupContent.slice(cleanupOldStart, cleanupOldEnd);
|
||||
expect(cleanupOldBody).toContain('-d "$old_target"');
|
||||
expect(cleanupOldBody).toContain('-L "$old_target/SKILL.md"');
|
||||
expect(cleanupOldBody).toContain('rm -rf "$old_target"');
|
||||
|
||||
// cleanup_prefixed_claude_symlinks must also handle the new pattern
|
||||
const cleanupPrefixedStart = setupContent.indexOf('cleanup_prefixed_claude_symlinks()');
|
||||
const cleanupPrefixedEnd = setupContent.indexOf('}', setupContent.indexOf('cleaned up prefixed', cleanupPrefixedStart));
|
||||
const cleanupPrefixedBody = setupContent.slice(cleanupPrefixedStart, cleanupPrefixedEnd);
|
||||
expect(cleanupPrefixedBody).toContain('-d "$prefixed_target"');
|
||||
expect(cleanupPrefixedBody).toContain('-L "$prefixed_target/SKILL.md"');
|
||||
expect(cleanupPrefixedBody).toContain('rm -rf "$prefixed_target"');
|
||||
});
|
||||
|
||||
// REGRESSION: link function must upgrade old directory symlinks
|
||||
test('link_claude_skill_dirs removes old directory symlinks before creating real dirs', () => {
|
||||
const fnStart = setupContent.indexOf('link_claude_skill_dirs()');
|
||||
const fnEnd = setupContent.indexOf('}', setupContent.indexOf('linked[@]}', fnStart));
|
||||
const fnBody = setupContent.slice(fnStart, fnEnd);
|
||||
// Must check for and remove old symlinks before mkdir
|
||||
expect(fnBody).toContain('if [ -L "$target" ]');
|
||||
expect(fnBody).toContain('rm -f "$target"');
|
||||
});
|
||||
|
||||
test('setup supports --host auto|claude|codex|kiro', () => {
|
||||
|
||||
@@ -107,6 +107,11 @@ export const E2E_TOUCHFILES: Record<string, string[]> = {
|
||||
// Learnings
|
||||
'learnings-show': ['learn/**', 'bin/gstack-learnings-search', 'bin/gstack-learnings-log', 'scripts/resolvers/learnings.ts'],
|
||||
|
||||
// Session Intelligence (timeline, context recovery, checkpoint)
|
||||
'timeline-event-flow': ['bin/gstack-timeline-log', 'bin/gstack-timeline-read'],
|
||||
'context-recovery-artifacts': ['scripts/resolvers/preamble.ts', 'bin/gstack-timeline-log', 'bin/gstack-slug', 'learn/**'],
|
||||
'checkpoint-save-resume': ['checkpoint/**', 'bin/gstack-slug'],
|
||||
|
||||
// Document-release
|
||||
'document-release': ['document-release/**'],
|
||||
|
||||
@@ -241,6 +246,11 @@ export const E2E_TIERS: Record<string, 'gate' | 'periodic'> = {
|
||||
'codex-offered-design-review': 'gate',
|
||||
'codex-offered-eng-review': 'gate',
|
||||
|
||||
// Session Intelligence — gate for data flow, periodic for agent integration
|
||||
'timeline-event-flow': 'gate', // Binary data flow (no LLM needed)
|
||||
'context-recovery-artifacts': 'gate', // Preamble reads seeded artifacts
|
||||
'checkpoint-save-resume': 'gate', // Checkpoint round-trip
|
||||
|
||||
// Ship — gate (end-to-end ship path)
|
||||
'ship-base-branch': 'gate',
|
||||
'ship-local-workflow': 'gate',
|
||||
|
||||
@@ -97,6 +97,173 @@ describe('gstack-relink (#578)', () => {
|
||||
expect(output).toContain('flat');
|
||||
});
|
||||
|
||||
// REGRESSION: unprefixed skills must be real directories, not symlinks (#761)
|
||||
// Claude Code auto-prefixes skills nested under a parent dir symlink.
|
||||
// e.g., `qa -> gstack/qa` gets discovered as "gstack-qa", not "qa".
|
||||
// The fix: create real directories with SKILL.md symlinks inside.
|
||||
test('unprefixed skills are real directories with SKILL.md symlinks, not dir symlinks', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review', 'plan-ceo-review']);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
for (const skill of ['qa', 'ship', 'review', 'plan-ceo-review']) {
|
||||
const skillPath = path.join(skillsDir, skill);
|
||||
const skillMdPath = path.join(skillPath, 'SKILL.md');
|
||||
// Must be a real directory, NOT a symlink
|
||||
expect(fs.lstatSync(skillPath).isDirectory()).toBe(true);
|
||||
expect(fs.lstatSync(skillPath).isSymbolicLink()).toBe(false);
|
||||
// Must contain a SKILL.md that IS a symlink
|
||||
expect(fs.existsSync(skillMdPath)).toBe(true);
|
||||
expect(fs.lstatSync(skillMdPath).isSymbolicLink()).toBe(true);
|
||||
// The SKILL.md symlink must point to the source skill's SKILL.md
|
||||
const target = fs.readlinkSync(skillMdPath);
|
||||
expect(target).toContain(skill);
|
||||
expect(target).toEndWith('/SKILL.md');
|
||||
}
|
||||
});
|
||||
|
||||
// Same invariant for prefixed mode
|
||||
test('prefixed skills are real directories with SKILL.md symlinks, not dir symlinks', () => {
|
||||
setupMockInstall(['qa', 'ship']);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix true`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
for (const skill of ['gstack-qa', 'gstack-ship']) {
|
||||
const skillPath = path.join(skillsDir, skill);
|
||||
const skillMdPath = path.join(skillPath, 'SKILL.md');
|
||||
expect(fs.lstatSync(skillPath).isDirectory()).toBe(true);
|
||||
expect(fs.lstatSync(skillPath).isSymbolicLink()).toBe(false);
|
||||
expect(fs.lstatSync(skillMdPath).isSymbolicLink()).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
// Upgrade: old directory symlinks get replaced with real directories
|
||||
test('upgrades old directory symlinks to real directories', () => {
|
||||
setupMockInstall(['qa', 'ship']);
|
||||
// Simulate old behavior: create directory symlinks (the old pattern)
|
||||
fs.symlinkSync(path.join(installDir, 'qa'), path.join(skillsDir, 'qa'));
|
||||
fs.symlinkSync(path.join(installDir, 'ship'), path.join(skillsDir, 'ship'));
|
||||
// Verify they start as symlinks
|
||||
expect(fs.lstatSync(path.join(skillsDir, 'qa')).isSymbolicLink()).toBe(true);
|
||||
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
|
||||
// After relink: must be real directories, not symlinks
|
||||
expect(fs.lstatSync(path.join(skillsDir, 'qa')).isSymbolicLink()).toBe(false);
|
||||
expect(fs.lstatSync(path.join(skillsDir, 'qa')).isDirectory()).toBe(true);
|
||||
expect(fs.lstatSync(path.join(skillsDir, 'qa', 'SKILL.md')).isSymbolicLink()).toBe(true);
|
||||
});
|
||||
|
||||
// FIRST INSTALL: --no-prefix must create ONLY flat names, zero gstack-* pollution
|
||||
test('first install --no-prefix: only flat names exist, zero gstack-* entries', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review', 'plan-ceo-review', 'gstack-upgrade']);
|
||||
// Simulate first install: no saved config, pass --no-prefix equivalent
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
// Enumerate everything in skills dir
|
||||
const entries = fs.readdirSync(skillsDir);
|
||||
// Expected: qa, ship, review, plan-ceo-review, gstack-upgrade (its real name)
|
||||
expect(entries.sort()).toEqual(['gstack-upgrade', 'plan-ceo-review', 'qa', 'review', 'ship']);
|
||||
// No gstack-qa, gstack-ship, gstack-review, gstack-plan-ceo-review
|
||||
const leaked = entries.filter(e => e.startsWith('gstack-') && e !== 'gstack-upgrade');
|
||||
expect(leaked).toEqual([]);
|
||||
});
|
||||
|
||||
// FIRST INSTALL: --prefix must create ONLY gstack-* names, zero flat-name pollution
|
||||
test('first install --prefix: only gstack-* entries exist, zero flat names', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review', 'plan-ceo-review', 'gstack-upgrade']);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix true`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
const entries = fs.readdirSync(skillsDir);
|
||||
// Expected: gstack-qa, gstack-ship, gstack-review, gstack-plan-ceo-review, gstack-upgrade
|
||||
expect(entries.sort()).toEqual([
|
||||
'gstack-plan-ceo-review', 'gstack-qa', 'gstack-review', 'gstack-ship', 'gstack-upgrade',
|
||||
]);
|
||||
// No unprefixed qa, ship, review, plan-ceo-review
|
||||
const leaked = entries.filter(e => !e.startsWith('gstack-'));
|
||||
expect(leaked).toEqual([]);
|
||||
});
|
||||
|
||||
// FIRST INSTALL: non-TTY (no saved config, piped stdin) defaults to flat names
|
||||
test('non-TTY first install defaults to flat names via relink', () => {
|
||||
setupMockInstall(['qa', 'ship']);
|
||||
// Don't set any config — simulate fresh install
|
||||
// gstack-relink reads config; on fresh install config returns empty → defaults to false
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
const entries = fs.readdirSync(skillsDir);
|
||||
// Should be flat names (relink defaults to false when config returns empty)
|
||||
expect(entries.sort()).toEqual(['qa', 'ship']);
|
||||
});
|
||||
|
||||
// SWITCH: prefix → no-prefix must clean up ALL gstack-* entries
|
||||
test('switching prefix to no-prefix removes all gstack-* entries completely', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review', 'plan-ceo-review', 'gstack-upgrade']);
|
||||
// Start in prefix mode
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix true`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
let entries = fs.readdirSync(skillsDir);
|
||||
expect(entries.filter(e => !e.startsWith('gstack-'))).toEqual([]);
|
||||
|
||||
// Switch to no-prefix
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
entries = fs.readdirSync(skillsDir);
|
||||
// Only flat names + gstack-upgrade (its real name)
|
||||
expect(entries.sort()).toEqual(['gstack-upgrade', 'plan-ceo-review', 'qa', 'review', 'ship']);
|
||||
const leaked = entries.filter(e => e.startsWith('gstack-') && e !== 'gstack-upgrade');
|
||||
expect(leaked).toEqual([]);
|
||||
});
|
||||
|
||||
// SWITCH: no-prefix → prefix must clean up ALL flat entries
|
||||
test('switching no-prefix to prefix removes all flat entries completely', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review', 'gstack-upgrade']);
|
||||
// Start in no-prefix mode
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
let entries = fs.readdirSync(skillsDir);
|
||||
expect(entries.filter(e => e.startsWith('gstack-') && e !== 'gstack-upgrade')).toEqual([]);
|
||||
|
||||
// Switch to prefix
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix true`);
|
||||
run(`${path.join(installDir, 'bin', 'gstack-relink')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
entries = fs.readdirSync(skillsDir);
|
||||
// Only gstack-* names
|
||||
expect(entries.sort()).toEqual([
|
||||
'gstack-qa', 'gstack-review', 'gstack-ship', 'gstack-upgrade',
|
||||
]);
|
||||
const leaked = entries.filter(e => !e.startsWith('gstack-'));
|
||||
expect(leaked).toEqual([]);
|
||||
});
|
||||
|
||||
// Test 13: cleans stale symlinks from opposite mode
|
||||
test('cleans up stale symlinks from opposite mode', () => {
|
||||
setupMockInstall(['qa', 'ship']);
|
||||
@@ -158,6 +325,66 @@ describe('gstack-relink (#578)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('upgrade migrations', () => {
|
||||
const MIGRATIONS_DIR = path.join(ROOT, 'gstack-upgrade', 'migrations');
|
||||
|
||||
test('migrations directory exists', () => {
|
||||
expect(fs.existsSync(MIGRATIONS_DIR)).toBe(true);
|
||||
});
|
||||
|
||||
test('all migration scripts are executable and parse without syntax errors', () => {
|
||||
const scripts = fs.readdirSync(MIGRATIONS_DIR).filter(f => f.endsWith('.sh'));
|
||||
expect(scripts.length).toBeGreaterThan(0);
|
||||
for (const script of scripts) {
|
||||
const fullPath = path.join(MIGRATIONS_DIR, script);
|
||||
// Must be executable
|
||||
const stat = fs.statSync(fullPath);
|
||||
expect(stat.mode & 0o111).toBeGreaterThan(0);
|
||||
// Must parse without syntax errors (bash -n is a syntax check, doesn't execute)
|
||||
const result = execSync(`bash -n "${fullPath}" 2>&1`, { encoding: 'utf-8', timeout: 5000 });
|
||||
// bash -n outputs nothing on success
|
||||
}
|
||||
});
|
||||
|
||||
test('migration filenames follow v{VERSION}.sh pattern', () => {
|
||||
const scripts = fs.readdirSync(MIGRATIONS_DIR).filter(f => f.endsWith('.sh'));
|
||||
for (const script of scripts) {
|
||||
expect(script).toMatch(/^v\d+\.\d+\.\d+\.\d+\.sh$/);
|
||||
}
|
||||
});
|
||||
|
||||
test('v0.15.2.0 migration runs gstack-relink', () => {
|
||||
const content = fs.readFileSync(path.join(MIGRATIONS_DIR, 'v0.15.2.0.sh'), 'utf-8');
|
||||
expect(content).toContain('gstack-relink');
|
||||
});
|
||||
|
||||
test('v0.15.2.0 migration fixes stale directory symlinks', () => {
|
||||
setupMockInstall(['qa', 'ship', 'review']);
|
||||
// Simulate old state: directory symlinks (pre-v0.15.2.0 pattern)
|
||||
fs.symlinkSync(path.join(installDir, 'qa'), path.join(skillsDir, 'qa'));
|
||||
fs.symlinkSync(path.join(installDir, 'ship'), path.join(skillsDir, 'ship'));
|
||||
fs.symlinkSync(path.join(installDir, 'review'), path.join(skillsDir, 'review'));
|
||||
// Set no-prefix mode
|
||||
run(`${path.join(installDir, 'bin', 'gstack-config')} set skill_prefix false`);
|
||||
// Verify old state: symlinks
|
||||
expect(fs.lstatSync(path.join(skillsDir, 'qa')).isSymbolicLink()).toBe(true);
|
||||
|
||||
// Run the migration (it calls gstack-relink internally)
|
||||
run(`bash ${path.join(MIGRATIONS_DIR, 'v0.15.2.0.sh')}`, {
|
||||
GSTACK_INSTALL_DIR: installDir,
|
||||
GSTACK_SKILLS_DIR: skillsDir,
|
||||
});
|
||||
|
||||
// After migration: real directories with SKILL.md symlinks
|
||||
for (const skill of ['qa', 'ship', 'review']) {
|
||||
const skillPath = path.join(skillsDir, skill);
|
||||
expect(fs.lstatSync(skillPath).isSymbolicLink()).toBe(false);
|
||||
expect(fs.lstatSync(skillPath).isDirectory()).toBe(true);
|
||||
expect(fs.lstatSync(path.join(skillPath, 'SKILL.md')).isSymbolicLink()).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-patch-names (#620/#578)', () => {
|
||||
// Helper to read name: from SKILL.md frontmatter
|
||||
function readSkillName(skillDir: string): string | null {
|
||||
|
||||
@@ -0,0 +1,268 @@
|
||||
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
|
||||
import { runSkillTest } from './helpers/session-runner';
|
||||
import {
|
||||
ROOT, runId, evalsEnabled,
|
||||
describeIfSelected, testConcurrentIfSelected,
|
||||
copyDirSync, logCost, recordE2E,
|
||||
createEvalCollector, finalizeEvalCollector,
|
||||
} from './helpers/e2e-helpers';
|
||||
import { spawnSync } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
const evalCollector = createEvalCollector('e2e-session-intelligence');
|
||||
|
||||
// --- Session Intelligence E2E ---
|
||||
// Tests the core contract: timeline events flow in, context recovery flows out,
|
||||
// checkpoints round-trip.
|
||||
|
||||
describeIfSelected('Session Intelligence E2E', [
|
||||
'timeline-event-flow', 'context-recovery-artifacts', 'checkpoint-save-resume',
|
||||
], () => {
|
||||
let workDir: string;
|
||||
let gstackHome: string;
|
||||
let slug: string;
|
||||
|
||||
beforeAll(() => {
|
||||
workDir = fs.mkdtempSync(path.join(os.tmpdir(), 'skill-e2e-session-intel-'));
|
||||
gstackHome = path.join(workDir, '.gstack-home');
|
||||
|
||||
// Init git repo
|
||||
const run = (cmd: string, args: string[]) =>
|
||||
spawnSync(cmd, args, { cwd: workDir, stdio: 'pipe', timeout: 5000 });
|
||||
run('git', ['init', '-b', 'main']);
|
||||
run('git', ['config', 'user.email', 'test@test.com']);
|
||||
run('git', ['config', 'user.name', 'Test']);
|
||||
fs.writeFileSync(path.join(workDir, 'app.ts'), 'console.log("hello");\n');
|
||||
run('git', ['add', '.']);
|
||||
run('git', ['commit', '-m', 'initial']);
|
||||
|
||||
// Copy bin scripts needed by timeline and checkpoint
|
||||
const binDir = path.join(workDir, 'bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
for (const script of [
|
||||
'gstack-timeline-log', 'gstack-timeline-read', 'gstack-slug',
|
||||
'gstack-learnings-log', 'gstack-learnings-search',
|
||||
]) {
|
||||
const src = path.join(ROOT, 'bin', script);
|
||||
if (fs.existsSync(src)) {
|
||||
fs.copyFileSync(src, path.join(binDir, script));
|
||||
fs.chmodSync(path.join(binDir, script), 0o755);
|
||||
}
|
||||
}
|
||||
|
||||
// Compute slug (same logic as gstack-slug without git remote)
|
||||
slug = path.basename(workDir).replace(/[^a-zA-Z0-9._-]/g, '');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
try { fs.rmSync(workDir, { recursive: true, force: true }); } catch {}
|
||||
finalizeEvalCollector(evalCollector);
|
||||
});
|
||||
|
||||
// --- Test 1: Timeline event flow ---
|
||||
// Write a timeline event via gstack-timeline-log, read it back via gstack-timeline-read.
|
||||
// This is the foundational data flow test: events go in, they come back out.
|
||||
testConcurrentIfSelected('timeline-event-flow', async () => {
|
||||
const projectDir = path.join(gstackHome, 'projects', slug);
|
||||
fs.mkdirSync(projectDir, { recursive: true });
|
||||
|
||||
// Write two events via the binary
|
||||
const logBin = path.join(workDir, 'bin', 'gstack-timeline-log');
|
||||
const readBin = path.join(workDir, 'bin', 'gstack-timeline-read');
|
||||
const env = { ...process.env, GSTACK_HOME: gstackHome };
|
||||
const opts = { cwd: workDir, env, stdio: 'pipe' as const, timeout: 10000 };
|
||||
|
||||
spawnSync(logBin, [JSON.stringify({
|
||||
skill: 'review', event: 'started', branch: 'main', session: 'test-1',
|
||||
})], opts);
|
||||
spawnSync(logBin, [JSON.stringify({
|
||||
skill: 'review', event: 'completed', branch: 'main',
|
||||
outcome: 'success', duration_s: 120, session: 'test-1',
|
||||
})], opts);
|
||||
|
||||
// Read via gstack-timeline-read
|
||||
const readResult = spawnSync(readBin, ['--branch', 'main'], opts);
|
||||
const readOutput = readResult.stdout?.toString() || '';
|
||||
|
||||
// Verify timeline.jsonl exists and has content
|
||||
const timelinePath = path.join(projectDir, 'timeline.jsonl');
|
||||
expect(fs.existsSync(timelinePath)).toBe(true);
|
||||
|
||||
const lines = fs.readFileSync(timelinePath, 'utf-8').trim().split('\n');
|
||||
expect(lines.length).toBe(2);
|
||||
|
||||
// Verify the events are valid JSON with expected fields
|
||||
const event1 = JSON.parse(lines[0]);
|
||||
expect(event1.skill).toBe('review');
|
||||
expect(event1.event).toBe('started');
|
||||
expect(event1.ts).toBeDefined();
|
||||
|
||||
const event2 = JSON.parse(lines[1]);
|
||||
expect(event2.event).toBe('completed');
|
||||
expect(event2.outcome).toBe('success');
|
||||
|
||||
// Verify gstack-timeline-read output includes the events
|
||||
expect(readOutput).toContain('review');
|
||||
|
||||
recordE2E(evalCollector, 'timeline event flow', 'Session Intelligence E2E', {
|
||||
output: readOutput,
|
||||
exitReason: 'success',
|
||||
duration: 0,
|
||||
toolCalls: [],
|
||||
browseErrors: [],
|
||||
costEstimate: { inputChars: 0, outputChars: 0, estimatedTokens: 0, estimatedCost: 0, turnsUsed: 0 },
|
||||
transcript: [],
|
||||
model: 'direct',
|
||||
firstResponseMs: 0,
|
||||
maxInterTurnMs: 0,
|
||||
}, { passed: true });
|
||||
|
||||
console.log(`Timeline flow: ${lines.length} events written, read output ${readOutput.length} chars`);
|
||||
}, 30_000);
|
||||
|
||||
// --- Test 2: Context recovery with seeded artifacts ---
|
||||
// Seed CEO plans and timeline events, then run a skill and verify the preamble
|
||||
// outputs "RECENT ARTIFACTS" and "LAST_SESSION".
|
||||
testConcurrentIfSelected('context-recovery-artifacts', async () => {
|
||||
const projectDir = path.join(gstackHome, 'projects', slug);
|
||||
fs.mkdirSync(path.join(projectDir, 'ceo-plans'), { recursive: true });
|
||||
|
||||
// Seed a CEO plan
|
||||
fs.writeFileSync(
|
||||
path.join(projectDir, 'ceo-plans', '2026-03-31-test-feature.md'),
|
||||
'---\nstatus: ACTIVE\n---\n# CEO Plan: Test Feature\nThis is a test plan.\n',
|
||||
);
|
||||
|
||||
// Seed timeline with a completed event on main branch
|
||||
const timelineEntry = JSON.stringify({
|
||||
ts: new Date().toISOString(),
|
||||
skill: 'ship',
|
||||
event: 'completed',
|
||||
branch: 'main',
|
||||
outcome: 'success',
|
||||
duration_s: 60,
|
||||
session: 'prior-session',
|
||||
});
|
||||
fs.writeFileSync(path.join(projectDir, 'timeline.jsonl'), timelineEntry + '\n');
|
||||
|
||||
// Copy the /learn skill (lightweight, tier-2 skill that runs context recovery)
|
||||
copyDirSync(path.join(ROOT, 'learn'), path.join(workDir, 'learn'));
|
||||
|
||||
const result = await runSkillTest({
|
||||
prompt: `Read the file learn/SKILL.md for instructions.
|
||||
|
||||
Run the context recovery check — the preamble should show recent artifacts.
|
||||
|
||||
IMPORTANT:
|
||||
- Use GSTACK_HOME="${gstackHome}" as an environment variable when running bin scripts.
|
||||
- The bin scripts are at ./bin/ (relative to this directory), not at ~/.claude/skills/gstack/bin/.
|
||||
Replace any references to ~/.claude/skills/gstack/bin/ with ./bin/ when running commands.
|
||||
- Do NOT use AskUserQuestion.
|
||||
- Just run the preamble bash block and report what you see.
|
||||
- Look for "RECENT ARTIFACTS" and "LAST_SESSION" in the output.`,
|
||||
workingDirectory: workDir,
|
||||
maxTurns: 10,
|
||||
allowedTools: ['Bash', 'Read', 'Write', 'Edit', 'Grep', 'Glob'],
|
||||
timeout: 120_000,
|
||||
testName: 'context-recovery-artifacts',
|
||||
runId,
|
||||
});
|
||||
|
||||
logCost('context recovery', result);
|
||||
|
||||
const output = result.output.toLowerCase();
|
||||
|
||||
// The preamble should have found the seeded artifacts
|
||||
const foundArtifacts = output.includes('recent artifacts') || output.includes('ceo-plans');
|
||||
const foundLastSession = output.includes('last_session') || output.includes('ship');
|
||||
const foundTimeline = output.includes('timeline') || output.includes('completed');
|
||||
|
||||
// At least the CEO plan or timeline should be visible
|
||||
const foundCount = [foundArtifacts, foundLastSession, foundTimeline].filter(Boolean).length;
|
||||
|
||||
const exitOk = ['success', 'error_max_turns'].includes(result.exitReason);
|
||||
|
||||
recordE2E(evalCollector, 'context recovery', 'Session Intelligence E2E', result, {
|
||||
passed: exitOk && foundCount >= 1,
|
||||
});
|
||||
|
||||
expect(exitOk).toBe(true);
|
||||
expect(foundCount).toBeGreaterThanOrEqual(1);
|
||||
|
||||
console.log(`Context recovery: artifacts=${foundArtifacts}, lastSession=${foundLastSession}, timeline=${foundTimeline}`);
|
||||
}, 180_000);
|
||||
|
||||
// --- Test 3: Checkpoint save and resume ---
|
||||
// Run /checkpoint save via claude -p, verify file created. Then run /checkpoint resume
|
||||
// and verify it reads the checkpoint back.
|
||||
testConcurrentIfSelected('checkpoint-save-resume', async () => {
|
||||
const projectDir = path.join(gstackHome, 'projects', slug);
|
||||
fs.mkdirSync(path.join(projectDir, 'checkpoints'), { recursive: true });
|
||||
|
||||
// Copy the /checkpoint skill
|
||||
copyDirSync(path.join(ROOT, 'checkpoint'), path.join(workDir, 'checkpoint'));
|
||||
|
||||
// Add a staged change so /checkpoint has something to capture
|
||||
fs.writeFileSync(path.join(workDir, 'feature.ts'), 'export function newFeature() { return true; }\n');
|
||||
spawnSync('git', ['add', 'feature.ts'], { cwd: workDir, stdio: 'pipe', timeout: 5000 });
|
||||
|
||||
// Extract the checkpoint save section from the skill template
|
||||
const full = fs.readFileSync(path.join(ROOT, 'checkpoint', 'SKILL.md'), 'utf-8');
|
||||
const saveStart = full.indexOf('## Save');
|
||||
const resumeStart = full.indexOf('## Resume');
|
||||
const saveSection = full.slice(saveStart, resumeStart > saveStart ? resumeStart : undefined);
|
||||
|
||||
const result = await runSkillTest({
|
||||
prompt: `You are testing the /checkpoint skill. Follow these instructions to save a checkpoint.
|
||||
|
||||
${saveSection.slice(0, 2000)}
|
||||
|
||||
IMPORTANT:
|
||||
- Use GSTACK_HOME="${gstackHome}" as an environment variable when running bin scripts.
|
||||
- The bin scripts are at ./bin/ (relative to this directory), not at ~/.claude/skills/gstack/bin/.
|
||||
Replace any references to ~/.claude/skills/gstack/bin/ with ./bin/ when running commands.
|
||||
- Save the checkpoint to ${projectDir}/checkpoints/ with a filename like "20260401-test-checkpoint.md".
|
||||
- Include YAML frontmatter with status, branch, and timestamp.
|
||||
- Include a summary of what's being worked on (you can see from git status).
|
||||
- Do NOT use AskUserQuestion.`,
|
||||
workingDirectory: workDir,
|
||||
maxTurns: 10,
|
||||
allowedTools: ['Bash', 'Read', 'Write', 'Edit', 'Grep', 'Glob'],
|
||||
timeout: 120_000,
|
||||
testName: 'checkpoint-save-resume',
|
||||
runId,
|
||||
});
|
||||
|
||||
logCost('checkpoint save', result);
|
||||
|
||||
// Check that a checkpoint file was created
|
||||
const checkpointDir = path.join(projectDir, 'checkpoints');
|
||||
const checkpointFiles = fs.existsSync(checkpointDir)
|
||||
? fs.readdirSync(checkpointDir).filter(f => f.endsWith('.md'))
|
||||
: [];
|
||||
|
||||
const exitOk = ['success', 'error_max_turns'].includes(result.exitReason);
|
||||
const checkpointCreated = checkpointFiles.length > 0;
|
||||
|
||||
let checkpointContent = '';
|
||||
if (checkpointCreated) {
|
||||
checkpointContent = fs.readFileSync(path.join(checkpointDir, checkpointFiles[0]), 'utf-8');
|
||||
}
|
||||
|
||||
// Verify checkpoint has expected structure
|
||||
const hasYamlFrontmatter = checkpointContent.includes('---') && checkpointContent.includes('status:');
|
||||
const hasBranch = checkpointContent.includes('branch:') || checkpointContent.includes('main');
|
||||
|
||||
recordE2E(evalCollector, 'checkpoint save-resume', 'Session Intelligence E2E', result, {
|
||||
passed: exitOk && checkpointCreated && hasYamlFrontmatter,
|
||||
});
|
||||
|
||||
expect(exitOk).toBe(true);
|
||||
expect(checkpointCreated).toBe(true);
|
||||
expect(hasYamlFrontmatter).toBe(true);
|
||||
|
||||
console.log(`Checkpoint: ${checkpointFiles.length} files created, YAML frontmatter: ${hasYamlFrontmatter}, branch: ${hasBranch}`);
|
||||
}, 180_000);
|
||||
});
|
||||
@@ -0,0 +1,154 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { execSync, ExecSyncOptionsWithStringEncoding } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
const ROOT = path.resolve(import.meta.dir, '..');
|
||||
const BIN = path.join(ROOT, 'bin');
|
||||
|
||||
let tmpDir: string;
|
||||
let slugDir: string;
|
||||
|
||||
function runLog(input: string, opts: { expectFail?: boolean } = {}): { stdout: string; exitCode: number } {
|
||||
const execOpts: ExecSyncOptionsWithStringEncoding = {
|
||||
cwd: ROOT,
|
||||
env: { ...process.env, GSTACK_HOME: tmpDir },
|
||||
encoding: 'utf-8',
|
||||
timeout: 15000,
|
||||
};
|
||||
try {
|
||||
const stdout = execSync(`${BIN}/gstack-timeline-log '${input.replace(/'/g, "'\\''")}'`, execOpts).trim();
|
||||
return { stdout, exitCode: 0 };
|
||||
} catch (e: any) {
|
||||
if (opts.expectFail) {
|
||||
return { stdout: e.stderr?.toString() || '', exitCode: e.status || 1 };
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function runRead(args: string = ''): string {
|
||||
const execOpts: ExecSyncOptionsWithStringEncoding = {
|
||||
cwd: ROOT,
|
||||
env: { ...process.env, GSTACK_HOME: tmpDir },
|
||||
encoding: 'utf-8',
|
||||
timeout: 15000,
|
||||
};
|
||||
try {
|
||||
return execSync(`${BIN}/gstack-timeline-read ${args}`, execOpts).trim();
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gstack-timeline-'));
|
||||
slugDir = path.join(tmpDir, 'projects');
|
||||
fs.mkdirSync(slugDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function findTimelineFile(): string | null {
|
||||
const projectDirs = fs.readdirSync(slugDir);
|
||||
if (projectDirs.length === 0) return null;
|
||||
const f = path.join(slugDir, projectDirs[0], 'timeline.jsonl');
|
||||
return fs.existsSync(f) ? f : null;
|
||||
}
|
||||
|
||||
describe('gstack-timeline-log', () => {
|
||||
test('accepts valid JSON and appends to timeline.jsonl', () => {
|
||||
const input = '{"skill":"review","event":"started","branch":"main"}';
|
||||
const result = runLog(input);
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const content = fs.readFileSync(f!, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.skill).toBe('review');
|
||||
expect(parsed.event).toBe('started');
|
||||
expect(parsed.branch).toBe('main');
|
||||
});
|
||||
|
||||
test('rejects invalid JSON with exit 0 (non-blocking)', () => {
|
||||
const result = runLog('not json at all');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
// No file should be created
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
|
||||
test('injects timestamp when ts field is missing', () => {
|
||||
const input = '{"skill":"review","event":"started","branch":"main"}';
|
||||
runLog(input);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const parsed = JSON.parse(fs.readFileSync(f!, 'utf-8').trim());
|
||||
expect(parsed.ts).toBeDefined();
|
||||
expect(new Date(parsed.ts).getTime()).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('preserves timestamp when ts field is present', () => {
|
||||
const input = '{"skill":"review","event":"completed","branch":"main","ts":"2025-06-15T10:00:00Z"}';
|
||||
runLog(input);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).not.toBeNull();
|
||||
const parsed = JSON.parse(fs.readFileSync(f!, 'utf-8').trim());
|
||||
expect(parsed.ts).toBe('2025-06-15T10:00:00Z');
|
||||
});
|
||||
|
||||
test('validates required fields (skill, event) - exits 0 if missing skill', () => {
|
||||
const result = runLog('{"event":"started","branch":"main"}');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
|
||||
test('validates required fields (skill, event) - exits 0 if missing event', () => {
|
||||
const result = runLog('{"skill":"review","branch":"main"}');
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const f = findTimelineFile();
|
||||
expect(f).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-timeline-read', () => {
|
||||
test('returns empty output for missing file (exit 0)', () => {
|
||||
const output = runRead();
|
||||
expect(output).toBe('');
|
||||
});
|
||||
|
||||
test('filters by --branch', () => {
|
||||
runLog(JSON.stringify({ skill: 'review', event: 'completed', branch: 'feature-a', outcome: 'approved', ts: '2026-03-28T10:00:00Z' }));
|
||||
runLog(JSON.stringify({ skill: 'ship', event: 'completed', branch: 'feature-b', outcome: 'merged', ts: '2026-03-28T11:00:00Z' }));
|
||||
|
||||
const output = runRead('--branch feature-a');
|
||||
expect(output).toContain('review');
|
||||
expect(output).not.toContain('feature-b');
|
||||
});
|
||||
|
||||
test('limits output with --limit', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
runLog(JSON.stringify({ skill: 'review', event: 'completed', branch: 'main', outcome: 'approved', ts: `2026-03-2${i}T10:00:00Z` }));
|
||||
}
|
||||
|
||||
const unlimited = runRead('--limit 20');
|
||||
const limited = runRead('--limit 2');
|
||||
|
||||
// Count event lines (lines starting with "- ")
|
||||
const unlimitedEvents = unlimited.split('\n').filter(l => l.startsWith('- ')).length;
|
||||
const limitedEvents = limited.split('\n').filter(l => l.startsWith('- ')).length;
|
||||
|
||||
expect(unlimitedEvents).toBe(5);
|
||||
expect(limitedEvents).toBe(2);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user