mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-02 11:45:20 +02:00
562a67503a
* feat: session timeline binaries (gstack-timeline-log + gstack-timeline-read) New binaries for the Session Intelligence Layer. gstack-timeline-log appends JSONL events to ~/.gstack/projects/$SLUG/timeline.jsonl. gstack-timeline-read reads, filters, and formats timeline data for /retro consumption. Timeline is local-only project intelligence, never sent anywhere. Always-on regardless of telemetry setting. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * feat: preamble context recovery + timeline events + predictive suggestions Layers 1-3 of the Session Intelligence Layer: - Timeline start/complete events injected into every skill via preamble - Context recovery (tier 2+): lists recent CEO plans, checkpoints, reviews - Cross-session injection: LAST_SESSION and LATEST_CHECKPOINT for branch - Predictive skill suggestion from recent timeline patterns - Welcome back message synthesis - Routing rules for /checkpoint and /health Timeline writes are NOT gated by telemetry (local project intelligence). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * feat: /checkpoint + /health skills (Layers 4-5) /checkpoint: save/resume/list working state snapshots. Supports cross-branch listing for Conductor workspace handoff. Session duration tracking. /health: code quality scorekeeper. Wraps project tools (tsc, biome, knip, shellcheck, tests), computes composite 0-10 score, tracks trends over time. Auto-detects tools or reads from CLAUDE.md ## Health Stack. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * chore: regenerate SKILL.md files + add timeline tests 9 timeline tests (all passing) mirroring learnings.test.ts pattern. All 34 SKILL.md files regenerated with new preamble (context recovery, timeline events, routing rules for /checkpoint and /health). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * chore: bump version and changelog (v0.15.0.0) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * docs: update self-learning roadmap post-Session Intelligence R1-R3 marked shipped with actual versions. R4 becomes Adaptive Ceremony (trust as separate policy engine, scope-aware, gradual degradation). R5 becomes /autoship (resumable state machine, not linear chain). R6-R7 unbundled from old R5. Added State Systems reference, Risk Register (Codex-reviewed), and validation metrics for R4. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * test: E2E tests for Session Intelligence (timeline, recovery, checkpoint) 3 gate-tier E2E tests: - timeline-event-flow: binary data flow round-trip (no LLM) - context-recovery-artifacts: seeded artifacts appear in preamble - checkpoint-save-resume: checkpoint file created with YAML frontmatter Also fixes package.json version sync (0.14.6.0 → 0.15.0.0). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
95 lines
2.6 KiB
Bash
Executable File
95 lines
2.6 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# gstack-timeline-read — read and format project timeline
|
|
# Usage: gstack-timeline-read [--since "7 days ago"] [--limit N] [--branch NAME]
|
|
#
|
|
# Session timeline: local-only, never sent anywhere.
|
|
# Reads ~/.gstack/projects/$SLUG/timeline.jsonl, filters, formats.
|
|
# Exit 0 silently if no timeline file exists.
|
|
set -euo pipefail
|
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null)"
|
|
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
|
|
|
|
SINCE=""
|
|
LIMIT=20
|
|
BRANCH=""
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
case "$1" in
|
|
--since) SINCE="$2"; shift 2 ;;
|
|
--limit) LIMIT="$2"; shift 2 ;;
|
|
--branch) BRANCH="$2"; shift 2 ;;
|
|
*) shift ;;
|
|
esac
|
|
done
|
|
|
|
TIMELINE_FILE="$GSTACK_HOME/projects/$SLUG/timeline.jsonl"
|
|
|
|
if [ ! -f "$TIMELINE_FILE" ]; then
|
|
exit 0
|
|
fi
|
|
|
|
cat "$TIMELINE_FILE" 2>/dev/null | bun -e "
|
|
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
|
|
const since = '${SINCE}';
|
|
const branch = '${BRANCH}';
|
|
const limit = ${LIMIT};
|
|
|
|
let sinceMs = 0;
|
|
if (since) {
|
|
// Parse relative time like '7 days ago'
|
|
const match = since.match(/(\d+)\s*(day|hour|minute|week|month)s?\s*ago/i);
|
|
if (match) {
|
|
const n = parseInt(match[1]);
|
|
const unit = match[2].toLowerCase();
|
|
const ms = { minute: 60000, hour: 3600000, day: 86400000, week: 604800000, month: 2592000000 };
|
|
sinceMs = Date.now() - n * (ms[unit] || 86400000);
|
|
}
|
|
}
|
|
|
|
const entries = [];
|
|
for (const line of lines) {
|
|
try {
|
|
const e = JSON.parse(line);
|
|
if (sinceMs && new Date(e.ts).getTime() < sinceMs) continue;
|
|
if (branch && e.branch !== branch) continue;
|
|
entries.push(e);
|
|
} catch {}
|
|
}
|
|
|
|
if (entries.length === 0) process.exit(0);
|
|
|
|
// Take last N entries
|
|
const recent = entries.slice(-limit);
|
|
|
|
// Skill counts (completed events only)
|
|
const counts = {};
|
|
const branches = new Set();
|
|
for (const e of entries) {
|
|
if (e.event === 'completed') {
|
|
counts[e.skill] = (counts[e.skill] || 0) + 1;
|
|
}
|
|
if (e.branch) branches.add(e.branch);
|
|
}
|
|
|
|
// Output summary
|
|
const countStr = Object.entries(counts)
|
|
.sort((a, b) => b[1] - a[1])
|
|
.map(([s, n]) => n + ' /' + s)
|
|
.join(', ');
|
|
|
|
if (countStr) {
|
|
console.log('TIMELINE: ' + countStr + ' across ' + branches.size + ' branch' + (branches.size !== 1 ? 'es' : ''));
|
|
}
|
|
|
|
// Output recent events
|
|
console.log('');
|
|
console.log('## Recent Events');
|
|
for (const e of recent) {
|
|
const ts = (e.ts || '').replace('T', ' ').replace(/\.\d+Z$/, 'Z');
|
|
const dur = e.duration_s ? ' (' + e.duration_s + 's)' : '';
|
|
const outcome = e.outcome ? ' [' + e.outcome + ']' : '';
|
|
console.log('- ' + ts + ' /' + e.skill + ' ' + e.event + outcome + dur + (e.branch ? ' on ' + e.branch : ''));
|
|
}
|
|
" 2>/dev/null || exit 0
|