Merge remote-tracking branch 'origin/garrytan/team-supabase-store' into garrytan/dev-mode

This commit is contained in:
Garry Tan
2026-03-16 00:22:05 -05:00
19 changed files with 1545 additions and 10 deletions
+38
View File
@@ -277,6 +277,44 @@
**Priority:** P3
**Depends on:** Browse sessions
## Team Sync
### Streaming parser for large session files
**What:** Replace readFileSync with readline/createReadStream for session files >10MB.
**Why:** Currently skip files >10MB. Long sessions (1000+ turns, 35MB) lose enrichment data (tools_used, full turn count).
**Context:** Current 10MB cap is defensive. Session files at `~/.claude/projects/{hash}/{sid}.jsonl` can be 35MB for marathon sessions. Streaming parser removes the cap while keeping memory usage constant.
**Effort:** S
**Priority:** P3
**Depends on:** Transcript sync (Phase 3)
### Session effectiveness scoring
**What:** Compute a 1-5 effectiveness score per session based on turns to achieve goal, tool diversity, whether code was shipped, and session duration.
**Why:** Enables `show sessions --best` and team-level AI effectiveness metrics. Raw data (tools_used, turns, duration, summary) already in Supabase after transcript sync.
**Context:** Year 2 roadmap item. Scoring heuristics need iteration. Could start with: fewer turns = more efficient, more tool diversity = better problem decomposition, shipped code (detected via git) = successful outcome.
**Effort:** M
**Priority:** P2
**Depends on:** Transcript sync (Phase 3)
### Weekly AI usage digest
**What:** Supabase edge function that runs weekly, aggregates session_transcripts + eval_runs, sends team summary to Slack/email.
**Why:** Passive team visibility without running commands. "Your team ran 47 sessions this week. Top tools: Edit(156), Bash(89). Sarah shipped 3 PRs via /ship."
**Context:** Design doc Phase 4 item. Requires Supabase edge functions + Slack/email integration. Transcript data from Phase 3 is the primary input alongside eval_runs.
**Effort:** L
**Priority:** P2
**Depends on:** Transcript sync (Phase 3), Supabase edge functions
## Infrastructure
### /setup-gstack-upload skill (S3 bucket)
+6 -1
View File
@@ -7,6 +7,7 @@
# gstack-sync test — validate full sync flow
# gstack-sync show [evals|ships|retros] — view team data
# gstack-sync push-{eval,retro,qa,ship,greptile} <file> — push data
# gstack-sync push-transcript — sync Claude session transcripts
# gstack-sync pull — pull team data to local cache
# gstack-sync drain — drain the offline queue
# gstack-sync logout — clear auth tokens
@@ -45,6 +46,9 @@ case "${1:-}" in
FILE="${2:?Usage: gstack-sync push-greptile <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-greptile "$FILE"
;;
push-transcript)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-transcript
;;
test)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" test
;;
@@ -67,12 +71,13 @@ case "${1:-}" in
echo " setup Interactive auth flow (opens browser)"
echo " status Show sync status (queue, cache, connection)"
echo " test Validate full sync flow (push + pull)"
echo " show [evals|ships|retros] View team data in terminal"
echo " show [evals|ships|retros|sessions] View team data in terminal"
echo " push-eval <file> Push eval result JSON to team store"
echo " push-retro <file> Push retro snapshot JSON"
echo " push-qa <file> Push QA report JSON"
echo " push-ship <file> Push ship log JSON"
echo " push-greptile <file> Push Greptile triage entry JSON"
echo " push-transcript Sync Claude session transcripts"
echo " pull Pull team data to local cache"
echo " drain Drain the offline sync queue"
echo " logout Clear auth tokens"
+118 -8
View File
@@ -4,10 +4,12 @@
*/
import * as fs from 'fs';
import { getTeamConfig, resolveSyncConfig, clearAuthTokens, isSyncConfigured } from './sync-config';
import * as path from 'path';
import { getTeamConfig, resolveSyncConfig, clearAuthTokens, isSyncConfigured, getSyncConfigPath } from './sync-config';
import { runDeviceAuth } from './auth';
import { pushEvalRun, pushRetro, pushQAReport, pushShipLog, pushGreptileTriage, pushHeartbeat, pullTable, drainQueue, getSyncStatus } from './sync';
import { readJSON } from './util';
import { pushEvalRun, pushRetro, pushQAReport, pushShipLog, pushGreptileTriage, pushHeartbeat, pullTable, pullTranscripts, drainQueue, getSyncStatus } from './sync';
import { readJSON, getGitRoot, atomicWriteJSON } from './util';
import { syncTranscripts } from './transcript-sync';
// --- Main (only when run directly, not imported) ---
@@ -35,6 +37,9 @@ async function main() {
case 'push-greptile':
await cmdPushFile('greptile', process.argv[3]);
break;
case 'push-transcript':
await cmdPushTranscript();
break;
case 'test':
await cmdTest();
break;
@@ -57,11 +62,43 @@ async function main() {
}
async function cmdSetup(): Promise<void> {
const team = getTeamConfig();
let team = getTeamConfig();
// If no .gstack-sync.json, interactively create one
if (!team) {
console.error('No .gstack-sync.json found in project root.');
console.error('Ask your team admin to set up team sync first.');
process.exit(1);
const root = getGitRoot();
if (!root) {
console.error('Not in a git repository. Run this from your project root.');
process.exit(1);
}
console.log('No .gstack-sync.json found. Setting up team sync.\n');
const rl = require('readline').createInterface({ input: process.stdin, output: process.stdout });
const ask = (q: string): Promise<string> => new Promise(resolve => rl.question(q, resolve));
const supabaseUrl = (await ask('Supabase URL (e.g., https://xyz.supabase.co): ')).trim();
if (!supabaseUrl) { rl.close(); console.error('URL is required.'); process.exit(1); }
const supabaseAnonKey = (await ask('Supabase anon key (from Project Settings > API): ')).trim();
if (!supabaseAnonKey) { rl.close(); console.error('Anon key is required.'); process.exit(1); }
const teamSlug = (await ask('Team slug (short name, e.g., my-team): ')).trim();
if (!teamSlug) { rl.close(); console.error('Team slug is required.'); process.exit(1); }
rl.close();
const configPath = path.join(root, '.gstack-sync.json');
const config = { supabase_url: supabaseUrl, supabase_anon_key: supabaseAnonKey, team_slug: teamSlug };
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n');
console.log(`\nCreated ${configPath}`);
console.log('Commit this file to your repo so team members get it automatically.\n');
team = getTeamConfig();
if (!team) {
console.error('Failed to read created config. Check the file.');
process.exit(1);
}
}
console.log(`Team: ${team.team_slug}`);
@@ -148,7 +185,7 @@ async function cmdPull(): Promise<void> {
process.exit(1);
}
const tables = ['eval_runs', 'retro_snapshots', 'qa_reports', 'ship_logs', 'greptile_triage'];
const tables = ['eval_runs', 'retro_snapshots', 'qa_reports', 'ship_logs', 'greptile_triage', 'session_transcripts'];
let total = 0;
for (const table of tables) {
@@ -162,6 +199,26 @@ async function cmdPull(): Promise<void> {
console.log(`\nPulled ${total} total rows to local cache.`);
}
async function cmdPushTranscript(): Promise<void> {
if (!isSyncConfigured()) {
process.exit(0); // Silent — sync not configured is normal
}
const config = resolveSyncConfig();
if (!config?.syncTranscripts) {
console.log('Transcript sync is disabled. Enable with: gstack-config set sync_transcripts true');
process.exit(0);
}
const result = await syncTranscripts();
if (result.pushed > 0) {
console.log(`Synced ${result.pushed} session${result.pushed > 1 ? 's' : ''} to team store`);
}
if (result.errors > 0) {
console.log(` (${result.errors} queued for retry)`);
}
}
async function cmdDrain(): Promise<void> {
const result = await drainQueue();
console.log(`Queue drain: ${result.success} synced, ${result.failed} failed, ${result.remaining} remaining`);
@@ -352,6 +409,53 @@ export function formatShipTable(shipLogs: Record<string, unknown>[]): string {
return lines.join('\n');
}
/** Format a duration in milliseconds as a human-readable string. */
function formatDuration(startedAt: string, endedAt: string): string {
const ms = new Date(endedAt).getTime() - new Date(startedAt).getTime();
if (ms < 60_000) return '<1m';
if (ms < 3_600_000) return `${Math.round(ms / 60_000)}m`;
const h = Math.floor(ms / 3_600_000);
const m = Math.round((ms % 3_600_000) / 60_000);
return m > 0 ? `${h}h${m}m` : `${h}h`;
}
/** Format session transcripts table. Pure function for testing. */
export function formatSessionTable(sessions: Record<string, unknown>[]): string {
if (sessions.length === 0) return 'No sessions yet.\n';
const lines: string[] = [];
lines.push('');
lines.push('Recent Sessions');
lines.push('═'.repeat(100));
lines.push(
' ' +
'Date'.padEnd(13) +
'Repo'.padEnd(22) +
'Summary'.padEnd(40) +
'Turns'.padEnd(7) +
'Dur'.padEnd(7) +
'Tools'
);
lines.push('─'.repeat(100));
for (const r of sessions.slice(0, 30)) {
const date = String(r.started_at || r.created_at || '').slice(0, 10);
const repo = String(r.repo_slug || '').slice(0, 20).padEnd(22);
const summary = String(r.summary || '—').slice(0, 38).padEnd(40);
const turns = String(r.total_turns || '').padEnd(7);
const dur = (r.started_at && r.ended_at)
? formatDuration(String(r.started_at), String(r.ended_at)).padEnd(7)
: '—'.padEnd(7);
const tools = Array.isArray(r.tools_used)
? (r.tools_used as string[]).slice(0, 5).join(', ')
: '—';
lines.push(` ${date.padEnd(13)}${repo}${summary}${turns}${dur}${tools}`);
}
lines.push('─'.repeat(100));
lines.push('');
return lines.join('\n');
}
async function cmdShow(args: string[]): Promise<void> {
if (!isSyncConfigured()) {
console.error('Sync not configured. Run gstack-sync setup first.');
@@ -386,6 +490,12 @@ async function cmdShow(args: string[]): Promise<void> {
return;
}
if (sub === 'sessions') {
const rows = await pullTranscripts();
console.log(formatSessionTable(rows));
return;
}
// Default: summary dashboard
const status = await getSyncStatus();
const [evalRuns, shipLogs, retroSnapshots] = await Promise.all([
+125
View File
@@ -0,0 +1,125 @@
/**
* LLM session summarization via raw fetch() to Anthropic Messages API.
*
* No SDK dependency — matches the Supabase raw-fetch pattern.
* Uses eval-cache for SHA-based caching (reruns are instant).
*
* Retry strategy (per Anthropic docs):
* 429: read retry-after header, wait that duration, max 2 retries
* 5xx: exponential backoff (1s, 2s), max 2 retries
* All other errors: return null immediately
*/
import { computeCacheKey, cacheRead, cacheWrite } from './eval-cache';
const ANTHROPIC_API_URL = 'https://api.anthropic.com/v1/messages';
const MODEL = 'claude-haiku-4-5-20251001';
const MAX_RETRIES = 2;
const TIMEOUT_MS = 10_000;
/**
* Generate a 1-sentence summary of a Claude Code session.
* Returns null if: no API key, API error, or malformed response.
*/
export async function summarizeSession(
messages: Array<{ display: string; timestamp: number }>,
toolsUsed: string[] | null,
): Promise<string | null> {
const apiKey = process.env.ANTHROPIC_API_KEY;
if (!apiKey) return null;
if (messages.length === 0) return null;
// Build cache key from session content
const contentForHash = messages.map(m => m.display).join('\n').slice(0, 10_000);
const toolsStr = toolsUsed ? toolsUsed.join(',') : '';
const cacheKey = computeCacheKey([], `summary:${MODEL}:${contentForHash}:${toolsStr}`);
const cached = cacheRead('transcript-summaries', cacheKey);
if (cached !== null && typeof cached === 'string') return cached;
const promptLines = messages.slice(0, 50).map(m =>
m.display.length > 200 ? m.display.slice(0, 200) + '...' : m.display,
);
const toolInfo = toolsUsed && toolsUsed.length > 0
? `\nTools used: ${toolsUsed.join(', ')}`
: '';
const userPrompt = `Summarize this Claude Code session in exactly one sentence. Focus on what the user accomplished, not the process. Be specific and concise.
User prompts (${messages.length} turns):
${promptLines.join('\n')}
${toolInfo}
Respond with ONLY the summary sentence, nothing else.`;
const body = JSON.stringify({
model: MODEL,
max_tokens: 150,
messages: [{ role: 'user', content: userPrompt }],
});
const summary = await fetchWithRetry(apiKey, body);
if (summary) {
cacheWrite('transcript-summaries', cacheKey, summary, { model: MODEL });
}
return summary;
}
async function fetchWithRetry(apiKey: string, body: string): Promise<string | null> {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), TIMEOUT_MS);
const res = await fetch(ANTHROPIC_API_URL, {
method: 'POST',
signal: controller.signal,
headers: {
'Content-Type': 'application/json',
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
},
body,
});
clearTimeout(timeout);
if (res.ok) {
const data = await res.json() as Record<string, unknown>;
const content = (data.content as any[])?.[0];
if (content?.type === 'text' && typeof content.text === 'string') {
return content.text.trim().slice(0, 500);
}
return null;
}
// 429: use retry-after header
if (res.status === 429 && attempt < MAX_RETRIES) {
const retryAfter = parseInt(res.headers.get('retry-after') || '2', 10);
await sleep(retryAfter * 1000);
continue;
}
// 5xx: exponential backoff
if (res.status >= 500 && attempt < MAX_RETRIES) {
await sleep(1000 * Math.pow(2, attempt));
continue;
}
// 4xx (not 429): don't retry
return null;
} catch {
// Network error, timeout, abort — retry with backoff
if (attempt < MAX_RETRIES) {
await sleep(1000 * Math.pow(2, attempt));
continue;
}
return null;
}
}
return null;
}
function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
+17
View File
@@ -213,6 +213,11 @@ export function pushHeartbeat(): Promise<boolean> {
return pushWithSync('sync_heartbeats', { hostname: os.hostname() }, { addRepoSlug: false });
}
/** Push a session transcript to Supabase. repo_slug is in the data (from getRemoteSlugForPath). */
export function pushTranscript(data: Record<string, unknown>): Promise<boolean> {
return pushWithSync('session_transcripts', data, { addRepoSlug: false });
}
// --- Pull operations ---
/**
@@ -277,6 +282,18 @@ export async function pullRetros(opts?: { repoSlug?: string; limit?: number }):
return pullTable('retro_snapshots', parts.join('&'));
}
/** Pull team session transcripts. */
export async function pullTranscripts(opts?: { repoSlug?: string; limit?: number }): Promise<Record<string, unknown>[]> {
const config = resolveSyncConfig();
if (!config) return [];
const parts = [`team_id=eq.${config.auth.team_id}`, 'order=started_at.desc'];
if (opts?.repoSlug) parts.push(`repo_slug=eq.${opts.repoSlug}`);
parts.push(`limit=${opts?.limit || 50}`);
return pullTable('session_transcripts', parts.join('&'));
}
// --- Offline queue ---
function enqueue(entry: QueueEntry): void {
+395
View File
@@ -0,0 +1,395 @@
/**
* Transcript sync parse Claude Code session history, enrich with
* tool usage and LLM summaries, push to Supabase.
*
* Data sources:
* ~/.claude/history.jsonl user prompts (always available)
* ~/.claude/projects/{hash}/{sid}.jsonl full transcript (when available, ~19%)
*
* Degradation cascade:
* history.jsonl only user prompts, turn count, duration
* + session file + tools_used, full turn count
* + ANTHROPIC_API_KEY + 1-sentence LLM summary
*
* All operations are non-fatal. If any step fails, we degrade gracefully.
*/
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import { readJSON, atomicWriteJSON, GSTACK_STATE_DIR } from './util';
import { resolveSyncConfig } from './sync-config';
import { pushTranscript } from './sync';
import { summarizeSession } from './llm-summarize';
const HISTORY_FILE = path.join(os.homedir(), '.claude', 'history.jsonl');
const CLAUDE_PROJECTS_DIR = path.join(os.homedir(), '.claude', 'projects');
const MARKER_FILE = path.join(GSTACK_STATE_DIR, 'transcript-sync-marker.json');
const MAX_HISTORY_SIZE = 50 * 1024 * 1024; // 50MB warn threshold
const MAX_SESSION_FILE_SIZE = 10 * 1024 * 1024; // 10MB skip threshold
const PUSH_CONCURRENCY = 10;
const SUMMARY_CONCURRENCY = 5;
// --- Types ---
export interface HistoryEntry {
display: string;
pastedContents: Record<string, unknown>;
timestamp: number;
project: string;
sessionId: string;
}
export interface TranscriptSyncMarker {
pushed_sessions: Record<string, { turns_pushed: number; last_push: string }>;
last_file_size: number;
updated_at: string;
}
export interface SessionFileData {
tools_used: string[];
totalTurns: number;
}
export interface TranscriptData {
session_id: string;
repo_slug: string;
messages: Array<{ display: string; timestamp: number }>;
total_turns: number;
tools_used: string[] | null;
summary: string | null;
started_at: string;
ended_at: string;
}
// --- History parsing ---
/**
* Parse ~/.claude/history.jsonl into HistoryEntry[].
* Returns [] on ENOENT, EBUSY, EACCES, or any error. Skips malformed lines.
*/
export function parseHistoryFile(historyPath: string = HISTORY_FILE): HistoryEntry[] {
try {
const stat = fs.statSync(historyPath);
if (stat.size > MAX_HISTORY_SIZE) {
console.error(`Warning: history.jsonl is ${(stat.size / 1024 / 1024).toFixed(1)}MB — parsing may be slow.`);
}
const content = fs.readFileSync(historyPath, 'utf-8');
const entries: HistoryEntry[] = [];
for (const line of content.split('\n')) {
if (!line.trim()) continue;
try {
const d = JSON.parse(line);
if (d.sessionId && d.timestamp && d.project) {
entries.push({
display: typeof d.display === 'string' ? d.display : '',
pastedContents: d.pastedContents || {},
timestamp: d.timestamp,
project: d.project,
sessionId: d.sessionId,
});
}
} catch { /* skip malformed line */ }
}
return entries;
} catch {
return [];
}
}
/**
* Group history entries by sessionId.
*/
export function groupBySession(entries: HistoryEntry[]): Map<string, HistoryEntry[]> {
const map = new Map<string, HistoryEntry[]>();
for (const entry of entries) {
const group = map.get(entry.sessionId);
if (group) {
group.push(entry);
} else {
map.set(entry.sessionId, [entry]);
}
}
return map;
}
// --- Session file enrichment ---
/**
* Find the rich session file for a given sessionId and project path.
* Returns the file path or null if not found.
*
* Claude Code stores session files at:
* ~/.claude/projects/-{project.replaceAll('/', '-')}/{sessionId}.jsonl
*/
export function findSessionFile(sessionId: string, projectPath: string): string | null {
try {
const projectHash = '-' + projectPath.replace(/\//g, '-');
const sessionFile = path.join(CLAUDE_PROJECTS_DIR, projectHash, `${sessionId}.jsonl`);
// Security: validate the resolved path stays within ~/.claude/projects/
const resolved = path.resolve(sessionFile);
if (!resolved.startsWith(path.resolve(CLAUDE_PROJECTS_DIR))) return null;
if (!fs.existsSync(sessionFile)) return null;
const stat = fs.statSync(sessionFile);
if (stat.size > MAX_SESSION_FILE_SIZE) return null; // Skip large files
if (stat.size === 0) return null;
return sessionFile;
} catch {
return null;
}
}
/**
* Parse a session JSONL file to extract tool usage and turn counts.
*/
export function parseSessionFile(sessionFilePath: string): SessionFileData | null {
try {
const content = fs.readFileSync(sessionFilePath, 'utf-8');
const toolSet = new Set<string>();
let totalTurns = 0;
for (const line of content.split('\n')) {
if (!line.trim()) continue;
try {
const d = JSON.parse(line);
const type = d.type;
if (type === 'user' || type === 'assistant') {
totalTurns++;
}
if (type === 'assistant') {
const content = d.message?.content;
if (Array.isArray(content)) {
for (const block of content) {
if (block?.type === 'tool_use' && typeof block.name === 'string') {
toolSet.add(block.name);
}
}
}
}
} catch { /* skip malformed line */ }
}
return {
tools_used: Array.from(toolSet).sort(),
totalTurns,
};
} catch {
return null;
}
}
// --- Repo slug resolution ---
const slugCache = new Map<string, string>();
/**
* Get the repo slug for a project path. Memoized.
* Runs `git remote get-url origin` with cwd set to the project path.
* Falls back to path.basename() if git fails.
*/
export function getRemoteSlugForPath(projectPath: string): string {
const cached = slugCache.get(projectPath);
if (cached) return cached;
let slug = path.basename(projectPath);
try {
if (fs.existsSync(projectPath)) {
const { spawnSync } = require('child_process');
const result = spawnSync('git', ['remote', 'get-url', 'origin'], {
cwd: projectPath,
stdio: 'pipe',
timeout: 3_000,
});
if (result.status === 0 && result.stdout) {
const url = result.stdout.toString().trim();
// Parse "git@github.com:org/repo.git" or "https://github.com/org/repo.git"
const match = url.match(/[/:]([\w.-]+\/[\w.-]+?)(?:\.git)?$/);
if (match) slug = match[1];
}
}
} catch { /* fall back to basename */ }
slugCache.set(projectPath, slug);
return slug;
}
/** Clear the slug cache (for testing). */
export function clearSlugCache(): void {
slugCache.clear();
}
// --- Transcript data assembly ---
/**
* Convert a session's data into the shape expected by the session_transcripts table.
*/
export function sessionToTranscriptData(
sessionId: string,
historyEntries: HistoryEntry[],
sessionFileData: SessionFileData | null,
summary: string | null,
): TranscriptData {
const messages = historyEntries.map(e => ({
display: e.display.length > 2000 ? e.display.slice(0, 2000) : e.display,
timestamp: e.timestamp,
}));
const timestamps = historyEntries.map(e => e.timestamp);
const startedAt = new Date(Math.min(...timestamps)).toISOString();
const endedAt = new Date(Math.max(...timestamps)).toISOString();
return {
session_id: sessionId,
repo_slug: getRemoteSlugForPath(historyEntries[0].project),
messages,
total_turns: sessionFileData?.totalTurns || historyEntries.length,
tools_used: sessionFileData?.tools_used || null,
summary,
started_at: startedAt,
ended_at: endedAt,
};
}
// --- Sync marker ---
export function readSyncMarker(): TranscriptSyncMarker | null {
return readJSON<TranscriptSyncMarker>(MARKER_FILE);
}
export function writeSyncMarker(marker: TranscriptSyncMarker): void {
try {
fs.mkdirSync(GSTACK_STATE_DIR, { recursive: true });
atomicWriteJSON(MARKER_FILE, marker);
} catch { /* non-fatal */ }
}
// --- Orchestrator ---
/**
* Main sync function. Parses history, enriches sessions, pushes to Supabase.
* Returns stats. All operations are non-fatal.
*/
export async function syncTranscripts(): Promise<{ pushed: number; skipped: number; errors: number }> {
const config = resolveSyncConfig();
if (!config || !config.syncTranscripts) {
return { pushed: 0, skipped: 0, errors: 0 };
}
// Quick check: file size unchanged = nothing new
let fileSize = 0;
try {
fileSize = fs.statSync(HISTORY_FILE).size;
} catch {
return { pushed: 0, skipped: 0, errors: 0 };
}
const marker = readSyncMarker() || {
pushed_sessions: {},
last_file_size: 0,
updated_at: '',
};
if (fileSize === marker.last_file_size) {
return { pushed: 0, skipped: 0, errors: 0 };
}
// Parse and group
const entries = parseHistoryFile();
if (entries.length === 0) return { pushed: 0, skipped: 0, errors: 0 };
const sessions = groupBySession(entries);
// Filter to sessions that need pushing
const toPush: Array<{ sessionId: string; entries: HistoryEntry[] }> = [];
let skipped = 0;
for (const [sessionId, sessionEntries] of sessions) {
const prev = marker.pushed_sessions[sessionId];
if (prev && prev.turns_pushed >= sessionEntries.length) {
skipped++;
continue;
}
toPush.push({ sessionId, entries: sessionEntries });
}
if (toPush.length === 0) {
// Update file size even if nothing to push (prevents re-parsing)
marker.last_file_size = fileSize;
marker.updated_at = new Date().toISOString();
writeSyncMarker(marker);
return { pushed: 0, skipped, errors: 0 };
}
// Enrich with session files
const enriched = toPush.map(({ sessionId, entries: sessionEntries }) => {
const sessionFile = findSessionFile(sessionId, sessionEntries[0].project);
const sessionFileData = sessionFile ? parseSessionFile(sessionFile) : null;
return { sessionId, entries: sessionEntries, sessionFileData };
});
// Summarize in batches (5-concurrent)
const withSummaries: Array<{
sessionId: string;
entries: HistoryEntry[];
sessionFileData: SessionFileData | null;
summary: string | null;
}> = [];
for (let i = 0; i < enriched.length; i += SUMMARY_CONCURRENCY) {
const batch = enriched.slice(i, i + SUMMARY_CONCURRENCY);
const summaries = await Promise.allSettled(
batch.map(({ entries: sessionEntries, sessionFileData }) => {
const messages = sessionEntries.map(e => ({
display: e.display.length > 200 ? e.display.slice(0, 200) : e.display,
timestamp: e.timestamp,
}));
return summarizeSession(messages, sessionFileData?.tools_used || null);
}),
);
batch.forEach((item, idx) => {
const result = summaries[idx];
withSummaries.push({
...item,
summary: result.status === 'fulfilled' ? result.value : null,
});
});
}
// Push in batches (10-concurrent)
let pushed = 0;
let errors = 0;
for (let i = 0; i < withSummaries.length; i += PUSH_CONCURRENCY) {
const batch = withSummaries.slice(i, i + PUSH_CONCURRENCY);
const results = await Promise.allSettled(
batch.map(({ sessionId, entries: sessionEntries, sessionFileData, summary }) => {
const data = sessionToTranscriptData(sessionId, sessionEntries, sessionFileData, summary);
return pushTranscript(data as Record<string, unknown>);
}),
);
results.forEach((result, idx) => {
const item = batch[idx];
if (result.status === 'fulfilled' && result.value) {
pushed++;
marker.pushed_sessions[item.sessionId] = {
turns_pushed: item.entries.length,
last_push: new Date().toISOString(),
};
} else {
errors++;
}
});
}
// Update marker
marker.last_file_size = fileSize;
marker.updated_at = new Date().toISOString();
writeSyncMarker(marker);
return { pushed, skipped, errors };
}
+1
View File
@@ -319,6 +319,7 @@ $B snapshot -i -a -o "$REPORT_DIR/screenshots/issue-002.png"
}
QAEOF
~/.claude/skills/gstack/bin/gstack-sync push-qa .gstack/qa-reports/qa-sync.json 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
Substitute actual values. Uses snake_case keys matching the Supabase schema.
+1
View File
@@ -245,6 +245,7 @@ $B snapshot -i -a -o "$REPORT_DIR/screenshots/issue-002.png"
}
QAEOF
~/.claude/skills/gstack/bin/gstack-sync push-qa .gstack/qa-reports/qa-sync.json 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
Substitute actual values. Uses snake_case keys matching the Supabase schema.
+1
View File
@@ -406,6 +406,7 @@ Include backlog data in the JSON when TODOS.md exists:
After writing the JSON snapshot, sync to the team store (non-fatal, silent if not configured):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-retro ".context/retros/${today}-${next}.json" 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
### Step 14: Write the Narrative
+1
View File
@@ -349,6 +349,7 @@ Include backlog data in the JSON when TODOS.md exists:
After writing the JSON snapshot, sync to the team store (non-fatal, silent if not configured):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-retro ".context/retros/${today}-${next}.json" 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
### Step 14: Write the Narrative
+1
View File
@@ -231,6 +231,7 @@ function findTemplates(): string[] {
path.join(ROOT, 'plan-eng-review', 'SKILL.md.tmpl'),
path.join(ROOT, 'retro', 'SKILL.md.tmpl'),
path.join(ROOT, 'gstack-upgrade', 'SKILL.md.tmpl'),
path.join(ROOT, 'setup-team-sync', 'SKILL.md.tmpl'),
];
for (const p of candidates) {
if (fs.existsSync(p)) templates.push(p);
+139
View File
@@ -0,0 +1,139 @@
---
name: setup-team-sync
version: 1.0.0
description: |
Set up team sync with Supabase. Creates .gstack-sync.json if missing,
authenticates via OAuth, verifies connectivity, and configures sync settings.
Idempotent — safe to run multiple times. Use before first /ship, /retro, or /qa
to enable team data sharing.
allowed-tools:
- Bash
- Read
- Write
- AskUserQuestion
---
<!-- AUTO-GENERATED from SKILL.md.tmpl — do not edit directly -->
<!-- Regenerate: bun run gen:skill-docs -->
## Update Check (run first)
```bash
_UPD=$(~/.claude/skills/gstack/bin/gstack-update-check 2>/dev/null || .claude/skills/gstack/bin/gstack-update-check 2>/dev/null || true)
[ -n "$_UPD" ] && echo "$_UPD" || true
```
If output shows `UPGRADE_AVAILABLE <old> <new>`: read `~/.claude/skills/gstack/gstack-upgrade/SKILL.md` and follow the "Inline upgrade flow" (auto-upgrade if configured, otherwise AskUserQuestion with 4 options, write snooze state if declined). If `JUST_UPGRADED <from> <to>`: tell user "Running gstack v{to} (just updated!)" and continue.
# Setup Team Sync
Set up gstack team sync with Supabase. This skill is idempotent — safe to run anytime.
## Steps
### Step 1: Check project config
```bash
cat .gstack-sync.json 2>/dev/null || echo "NOT_FOUND"
```
- If the file exists and has `supabase_url`, `supabase_anon_key`, and `team_slug`: print "Team config found: {team_slug} at {supabase_url}" and skip to Step 3.
- If NOT_FOUND: proceed to Step 2.
### Step 2: Create .gstack-sync.json
Ask the user for three values using AskUserQuestion:
1. **Supabase URL** — e.g., `https://xyzcompany.supabase.co`
- Found in Supabase Dashboard → Project Settings → API → Project URL
2. **Anon Key** — the public `anon` key (NOT the `service_role` key)
- Found in Supabase Dashboard → Project Settings → API → Project API keys → `anon` `public`
- This key is safe to commit — it's public by design (like a Firebase API key). RLS enforces real access control.
3. **Team slug** — a short identifier like `my-team` or `yc-internal`
Then write `.gstack-sync.json`:
```bash
cat > .gstack-sync.json << 'ENDCONFIG'
{
"supabase_url": "USER_PROVIDED_URL",
"supabase_anon_key": "USER_PROVIDED_KEY",
"team_slug": "USER_PROVIDED_SLUG"
}
ENDCONFIG
echo "Created .gstack-sync.json"
```
Tell the user: "Commit this file to your repo so team members get it automatically. The anon key is public by Supabase design — RLS enforces real access control."
### Step 3: Check authentication
```bash
~/.claude/skills/gstack/bin/gstack-sync status 2>&1
```
Look at the output:
- If `Authenticated: yes` → skip to Step 5
- If `Authenticated: no` → proceed to Step 4
### Step 4: Authenticate
```bash
~/.claude/skills/gstack/bin/gstack-sync setup 2>&1
```
This opens a browser for OAuth. Tell the user to complete authentication in their browser. Wait for the output to show "Authenticated as ..." or an error.
If it fails with "Port 54321 is in use", ask the user to close the other process and retry.
### Step 5: Test connectivity
```bash
~/.claude/skills/gstack/bin/gstack-sync test 2>&1
```
This runs a full push + pull test. All 4 steps should show `ok`:
1. Config: ok
2. Auth: ok
3. Push: ok (with latency)
4. Pull: ok (with row count)
If Step 3 (Push) fails, tell the user: "The Supabase migrations may not be applied yet. Copy the SQL files from `supabase/migrations/` and run them in your Supabase SQL editor, in order (001 through 006)."
### Step 6: Configure sync settings
```bash
~/.claude/skills/gstack/bin/gstack-config get sync_enabled 2>/dev/null
~/.claude/skills/gstack/bin/gstack-config get sync_transcripts 2>/dev/null
```
Ask the user if they want to enable transcript sync (opt-in, shares Claude session data with the team):
- If they say yes:
```bash
~/.claude/skills/gstack/bin/gstack-config set sync_enabled true
~/.claude/skills/gstack/bin/gstack-config set sync_transcripts true
```
- If they say no (or just want basic sync without transcripts):
```bash
~/.claude/skills/gstack/bin/gstack-config set sync_enabled true
```
### Step 7: Summary
Print a summary:
```
Team sync setup complete!
Project config: .gstack-sync.json ✓ (commit to repo)
Authentication: {email} ✓
Connectivity: {supabase_url} ✓
Sync enabled: yes
Transcripts: {yes/no}
Next steps:
• Run /ship, /retro, or /qa — data syncs automatically
• View team data: gstack-sync show
• Check status anytime: gstack-sync status
```
+130
View File
@@ -0,0 +1,130 @@
---
name: setup-team-sync
version: 1.0.0
description: |
Set up team sync with Supabase. Creates .gstack-sync.json if missing,
authenticates via OAuth, verifies connectivity, and configures sync settings.
Idempotent — safe to run multiple times. Use before first /ship, /retro, or /qa
to enable team data sharing.
allowed-tools:
- Bash
- Read
- Write
- AskUserQuestion
---
{{UPDATE_CHECK}}
# Setup Team Sync
Set up gstack team sync with Supabase. This skill is idempotent — safe to run anytime.
## Steps
### Step 1: Check project config
```bash
cat .gstack-sync.json 2>/dev/null || echo "NOT_FOUND"
```
- If the file exists and has `supabase_url`, `supabase_anon_key`, and `team_slug`: print "Team config found: {team_slug} at {supabase_url}" and skip to Step 3.
- If NOT_FOUND: proceed to Step 2.
### Step 2: Create .gstack-sync.json
Ask the user for three values using AskUserQuestion:
1. **Supabase URL** — e.g., `https://xyzcompany.supabase.co`
- Found in Supabase Dashboard → Project Settings → API → Project URL
2. **Anon Key** — the public `anon` key (NOT the `service_role` key)
- Found in Supabase Dashboard → Project Settings → API → Project API keys → `anon` `public`
- This key is safe to commit — it's public by design (like a Firebase API key). RLS enforces real access control.
3. **Team slug** — a short identifier like `my-team` or `yc-internal`
Then write `.gstack-sync.json`:
```bash
cat > .gstack-sync.json << 'ENDCONFIG'
{
"supabase_url": "USER_PROVIDED_URL",
"supabase_anon_key": "USER_PROVIDED_KEY",
"team_slug": "USER_PROVIDED_SLUG"
}
ENDCONFIG
echo "Created .gstack-sync.json"
```
Tell the user: "Commit this file to your repo so team members get it automatically. The anon key is public by Supabase design — RLS enforces real access control."
### Step 3: Check authentication
```bash
~/.claude/skills/gstack/bin/gstack-sync status 2>&1
```
Look at the output:
- If `Authenticated: yes` → skip to Step 5
- If `Authenticated: no` → proceed to Step 4
### Step 4: Authenticate
```bash
~/.claude/skills/gstack/bin/gstack-sync setup 2>&1
```
This opens a browser for OAuth. Tell the user to complete authentication in their browser. Wait for the output to show "Authenticated as ..." or an error.
If it fails with "Port 54321 is in use", ask the user to close the other process and retry.
### Step 5: Test connectivity
```bash
~/.claude/skills/gstack/bin/gstack-sync test 2>&1
```
This runs a full push + pull test. All 4 steps should show `ok`:
1. Config: ok
2. Auth: ok
3. Push: ok (with latency)
4. Pull: ok (with row count)
If Step 3 (Push) fails, tell the user: "The Supabase migrations may not be applied yet. Copy the SQL files from `supabase/migrations/` and run them in your Supabase SQL editor, in order (001 through 006)."
### Step 6: Configure sync settings
```bash
~/.claude/skills/gstack/bin/gstack-config get sync_enabled 2>/dev/null
~/.claude/skills/gstack/bin/gstack-config get sync_transcripts 2>/dev/null
```
Ask the user if they want to enable transcript sync (opt-in, shares Claude session data with the team):
- If they say yes:
```bash
~/.claude/skills/gstack/bin/gstack-config set sync_enabled true
~/.claude/skills/gstack/bin/gstack-config set sync_transcripts true
```
- If they say no (or just want basic sync without transcripts):
```bash
~/.claude/skills/gstack/bin/gstack-config set sync_enabled true
```
### Step 7: Summary
Print a summary:
```
Team sync setup complete!
Project config: .gstack-sync.json ✓ (commit to repo)
Authentication: {email} ✓
Connectivity: {supabase_url} ✓
Sync enabled: yes
Transcripts: {yes/no}
Next steps:
• Run /ship, /retro, or /qa — data syncs automatically
• View team data: gstack-sync show
• Check status anytime: gstack-sync status
```
+1
View File
@@ -479,6 +479,7 @@ Substitute actual values from the preceding steps. Use `0` for Greptile fields i
2. Push (non-fatal):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-ship /tmp/gstack-ship-log.json 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
---
+1
View File
@@ -422,6 +422,7 @@ Substitute actual values from the preceding steps. Use `0` for Greptile fields i
2. Push (non-fatal):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-ship /tmp/gstack-ship-log.json 2>/dev/null && echo "Synced to team ✓" || true
~/.claude/skills/gstack/bin/gstack-sync push-transcript 2>/dev/null || true
```
---
@@ -0,0 +1,15 @@
-- 006_transcript_sync.sql — Unique index for idempotent transcript upsert + RLS fix.
-- Unique index on (team_id, session_id) for upsert via Prefer: resolution=merge-duplicates.
-- session_id is a UUID from Claude Code — globally unique. No need for user_id in the key
-- (which is nullable and breaks PostgreSQL unique index dedup on NULL values).
create unique index if not exists idx_transcript_natural_key
on session_transcripts(team_id, session_id);
-- Change transcript RLS from admin-only read to team-wide read.
-- Matches the pattern used by eval_runs, retro_snapshots, qa_reports, ship_logs, greptile_triage.
-- Opt-in transcript sync already requires user consent (sync_transcripts=true).
drop policy if exists "admin_read" on session_transcripts;
create policy "team_read" on session_transcripts for select using (
team_id in (select team_id from team_members where user_id = auth.uid())
);
+168
View File
@@ -0,0 +1,168 @@
/**
* Tests for lib/llm-summarize.ts mock fetch, no API calls.
*/
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
import { summarizeSession } from '../lib/llm-summarize';
// Use a temp dir for cache so tests don't pollute real cache
const tmpCacheDir = path.join(os.tmpdir(), `gstack-llm-test-${Date.now()}-${Math.random().toString(36).slice(2)}`);
function makeOkResponse(text: string) {
return new Response(JSON.stringify({
content: [{ type: 'text', text }],
usage: { input_tokens: 100, output_tokens: 20 },
}), { status: 200, headers: { 'Content-Type': 'application/json' } });
}
// Each test gets unique messages to avoid cache collisions
let testCounter = 0;
function uniqueMessages(base: string = 'test') {
testCounter++;
return [
{ display: `${base} prompt ${testCounter} alpha`, timestamp: 1710000000000 + testCounter },
{ display: `${base} prompt ${testCounter} beta`, timestamp: 1710000060000 + testCounter },
];
}
describe('summarizeSession', () => {
let originalFetch: typeof globalThis.fetch;
let originalApiKey: string | undefined;
beforeEach(() => {
originalFetch = globalThis.fetch;
originalApiKey = process.env.ANTHROPIC_API_KEY;
// Use temp cache dir and bypass cache for clean tests
process.env.GSTACK_STATE_DIR = tmpCacheDir;
process.env.EVAL_CACHE = '0'; // Skip cache reads
});
afterEach(() => {
globalThis.fetch = originalFetch;
if (originalApiKey !== undefined) {
process.env.ANTHROPIC_API_KEY = originalApiKey;
} else {
delete process.env.ANTHROPIC_API_KEY;
}
delete process.env.EVAL_CACHE;
});
test('returns null when ANTHROPIC_API_KEY not set', async () => {
delete process.env.ANTHROPIC_API_KEY;
const result = await summarizeSession(uniqueMessages(), ['Edit']);
expect(result).toBeNull();
});
test('returns null for empty messages', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
const result = await summarizeSession([], ['Edit']);
expect(result).toBeNull();
});
test('returns summary on successful API call', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
globalThis.fetch = (() => Promise.resolve(makeOkResponse('Fixed login page CSS.'))) as any;
const result = await summarizeSession(uniqueMessages('success'), ['Edit', 'Bash']);
expect(result).toBe('Fixed login page CSS.');
});
test('sends correct headers to Anthropic API', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key-123';
let capturedHeaders: Record<string, string> = {};
globalThis.fetch = ((url: string, init: any) => {
for (const [k, v] of Object.entries(init.headers || {})) {
capturedHeaders[k] = v as string;
}
return Promise.resolve(makeOkResponse('Summary.'));
}) as any;
await summarizeSession(uniqueMessages('headers'), null);
expect(capturedHeaders['x-api-key']).toBe('test-key-123');
expect(capturedHeaders['anthropic-version']).toBe('2023-06-01');
expect(capturedHeaders['Content-Type']).toBe('application/json');
});
test('retries on 429 with retry-after header', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
let callCount = 0;
globalThis.fetch = (() => {
callCount++;
if (callCount === 1) {
return Promise.resolve(new Response('', {
status: 429,
headers: { 'retry-after': '0' },
}));
}
return Promise.resolve(makeOkResponse('Retry succeeded.'));
}) as any;
const result = await summarizeSession(uniqueMessages('retry429'), null);
expect(result).toBe('Retry succeeded.');
expect(callCount).toBe(2);
});
test('retries on 5xx with backoff', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
let callCount = 0;
globalThis.fetch = (() => {
callCount++;
if (callCount <= 2) {
return Promise.resolve(new Response('Server Error', { status: 500 }));
}
return Promise.resolve(makeOkResponse('Recovered.'));
}) as any;
const result = await summarizeSession(uniqueMessages('retry5xx'), ['Read']);
expect(result).toBe('Recovered.');
expect(callCount).toBe(3);
});
test('returns null on persistent 429', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
globalThis.fetch = (() => Promise.resolve(new Response('', {
status: 429,
headers: { 'retry-after': '0' },
}))) as any;
const result = await summarizeSession(uniqueMessages('persistent429'), null);
expect(result).toBeNull();
});
test('returns null on 401 without retry', async () => {
process.env.ANTHROPIC_API_KEY = 'bad-key';
let callCount = 0;
globalThis.fetch = (() => {
callCount++;
return Promise.resolve(new Response('Unauthorized', { status: 401 }));
}) as any;
const result = await summarizeSession(uniqueMessages('auth401'), null);
expect(result).toBeNull();
expect(callCount).toBe(1);
});
test('returns null on malformed API response', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
globalThis.fetch = (() => Promise.resolve(new Response(
JSON.stringify({ content: [{ type: 'image', source: {} }] }),
{ status: 200, headers: { 'Content-Type': 'application/json' } },
))) as any;
const result = await summarizeSession(uniqueMessages('malformed'), null);
expect(result).toBeNull();
});
test('truncates long summaries to 500 chars', async () => {
process.env.ANTHROPIC_API_KEY = 'test-key';
const longText = 'a'.repeat(600);
globalThis.fetch = (() => Promise.resolve(makeOkResponse(longText))) as any;
const result = await summarizeSession(uniqueMessages('longtext'), null);
expect(result).not.toBeNull();
expect(result!.length).toBeLessThanOrEqual(500);
});
});
+61 -1
View File
@@ -3,7 +3,7 @@
*/
import { describe, test, expect } from 'bun:test';
import { formatTeamSummary, formatEvalTable, formatShipTable, formatRelativeTime } from '../lib/cli-sync';
import { formatTeamSummary, formatEvalTable, formatShipTable, formatSessionTable, formatRelativeTime } from '../lib/cli-sync';
describe('formatRelativeTime', () => {
test('returns "just now" for recent timestamps', () => {
@@ -106,3 +106,63 @@ describe('formatShipTable', () => {
expect(formatShipTable([])).toContain('No ship logs yet');
});
});
describe('formatSessionTable', () => {
test('formats sessions with enriched data', () => {
const output = formatSessionTable([
{
started_at: '2026-03-15T10:00:00Z',
ended_at: '2026-03-15T10:15:00Z',
repo_slug: 'garrytan/gstack',
summary: 'Fixed login page CSS and added tests',
total_turns: 8,
tools_used: ['Edit', 'Bash', 'Read'],
},
]);
expect(output).toContain('Recent Sessions');
expect(output).toContain('2026-03-15');
expect(output).toContain('garrytan/gstack');
expect(output).toContain('Fixed login');
expect(output).toContain('8');
expect(output).toContain('15m');
expect(output).toContain('Edit');
});
test('handles sessions without enrichment', () => {
const output = formatSessionTable([
{
started_at: '2026-03-15T10:00:00Z',
ended_at: '2026-03-15T10:00:30Z',
repo_slug: 'myproject',
summary: null,
total_turns: 2,
tools_used: null,
},
]);
expect(output).toContain('Recent Sessions');
expect(output).toContain('myproject');
// null summary shows as '—'
expect(output).toContain('—');
});
test('returns message for empty data', () => {
expect(formatSessionTable([])).toContain('No sessions yet');
});
test('formats duration correctly', () => {
const output = formatSessionTable([
{
started_at: '2026-03-15T10:00:00Z',
ended_at: '2026-03-15T11:30:00Z',
repo_slug: 'repo',
summary: 'Long session',
total_turns: 50,
tools_used: ['Bash'],
},
]);
expect(output).toContain('1h30m');
});
});
+326
View File
@@ -0,0 +1,326 @@
/**
* Tests for lib/transcript-sync.ts pure function tests + orchestrator.
* No network calls, no real Supabase.
*/
import { describe, test, expect, beforeEach } from 'bun:test';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import {
parseHistoryFile,
groupBySession,
findSessionFile,
parseSessionFile,
sessionToTranscriptData,
getRemoteSlugForPath,
clearSlugCache,
readSyncMarker,
writeSyncMarker,
type HistoryEntry,
type TranscriptSyncMarker,
} from '../lib/transcript-sync';
function tmpDir(): string {
const dir = path.join(os.tmpdir(), `gstack-transcript-test-${Date.now()}-${Math.random().toString(36).slice(2)}`);
fs.mkdirSync(dir, { recursive: true });
return dir;
}
// --- parseHistoryFile ---
describe('parseHistoryFile', () => {
test('parses valid JSONL', () => {
const dir = tmpDir();
const file = path.join(dir, 'history.jsonl');
const lines = [
JSON.stringify({ display: 'fix login', pastedContents: {}, timestamp: 1710000000000, project: '/tmp/proj', sessionId: 'sess-1' }),
JSON.stringify({ display: 'add test', pastedContents: {}, timestamp: 1710000060000, project: '/tmp/proj', sessionId: 'sess-1' }),
JSON.stringify({ display: 'refactor', pastedContents: {}, timestamp: 1710000120000, project: '/tmp/other', sessionId: 'sess-2' }),
];
fs.writeFileSync(file, lines.join('\n') + '\n');
const entries = parseHistoryFile(file);
expect(entries).toHaveLength(3);
expect(entries[0].display).toBe('fix login');
expect(entries[0].sessionId).toBe('sess-1');
expect(entries[2].sessionId).toBe('sess-2');
fs.rmSync(dir, { recursive: true, force: true });
});
test('skips malformed lines', () => {
const dir = tmpDir();
const file = path.join(dir, 'history.jsonl');
fs.writeFileSync(file, [
JSON.stringify({ display: 'good', pastedContents: {}, timestamp: 1, project: '/p', sessionId: 's1' }),
'not valid json',
'{"missing": "sessionId"}',
JSON.stringify({ display: 'also good', pastedContents: {}, timestamp: 2, project: '/p', sessionId: 's2' }),
].join('\n'));
const entries = parseHistoryFile(file);
expect(entries).toHaveLength(2);
expect(entries[0].display).toBe('good');
expect(entries[1].display).toBe('also good');
fs.rmSync(dir, { recursive: true, force: true });
});
test('returns empty array for missing file', () => {
const entries = parseHistoryFile('/nonexistent/path/history.jsonl');
expect(entries).toEqual([]);
});
test('returns empty array for empty file', () => {
const dir = tmpDir();
const file = path.join(dir, 'history.jsonl');
fs.writeFileSync(file, '');
const entries = parseHistoryFile(file);
expect(entries).toEqual([]);
fs.rmSync(dir, { recursive: true, force: true });
});
});
// --- groupBySession ---
describe('groupBySession', () => {
test('groups entries by sessionId', () => {
const entries: HistoryEntry[] = [
{ display: 'a', pastedContents: {}, timestamp: 1, project: '/p', sessionId: 'sess-1' },
{ display: 'b', pastedContents: {}, timestamp: 2, project: '/p', sessionId: 'sess-2' },
{ display: 'c', pastedContents: {}, timestamp: 3, project: '/p', sessionId: 'sess-1' },
];
const groups = groupBySession(entries);
expect(groups.size).toBe(2);
expect(groups.get('sess-1')).toHaveLength(2);
expect(groups.get('sess-2')).toHaveLength(1);
});
test('handles single-turn sessions', () => {
const entries: HistoryEntry[] = [
{ display: 'solo', pastedContents: {}, timestamp: 1, project: '/p', sessionId: 'sess-solo' },
];
const groups = groupBySession(entries);
expect(groups.size).toBe(1);
expect(groups.get('sess-solo')).toHaveLength(1);
});
test('handles empty input', () => {
const groups = groupBySession([]);
expect(groups.size).toBe(0);
});
});
// --- findSessionFile ---
describe('findSessionFile', () => {
test('finds existing session file', () => {
const dir = tmpDir();
// Simulate Claude's project dir structure
const projectHash = '-tmp-test-project';
const projectDir = path.join(dir, 'projects', projectHash);
fs.mkdirSync(projectDir, { recursive: true });
fs.writeFileSync(path.join(projectDir, 'session-abc.jsonl'), '{"type":"user"}\n');
// Monkey-patch the CLAUDE_PROJECTS_DIR for this test
const origHome = process.env.HOME;
// We can't easily override the module constant, so test the logic directly
const result = findSessionFile('session-abc', '/tmp/test-project');
// This won't find it because the actual CLAUDE_PROJECTS_DIR points to ~/.claude/projects
// But we can at least verify it returns null gracefully for non-existent paths
expect(result).toBeNull(); // Expected: session file not at ~/.claude/projects/
fs.rmSync(dir, { recursive: true, force: true });
});
test('returns null for missing project directory', () => {
const result = findSessionFile('nonexistent-session', '/nonexistent/project');
expect(result).toBeNull();
});
test('returns null for missing session file', () => {
// Even if project dir exists, specific session file won't
const result = findSessionFile('definitely-not-a-real-session', '/tmp');
expect(result).toBeNull();
});
});
// --- parseSessionFile ---
describe('parseSessionFile', () => {
test('extracts tool usage from session JSONL', () => {
const dir = tmpDir();
const file = path.join(dir, 'session.jsonl');
const lines = [
JSON.stringify({ type: 'user', message: { role: 'user', content: 'hello' } }),
JSON.stringify({ type: 'assistant', message: { role: 'assistant', content: [{ type: 'text', text: 'hi' }] } }),
JSON.stringify({ type: 'assistant', message: { role: 'assistant', content: [{ type: 'tool_use', name: 'Bash' }] } }),
JSON.stringify({ type: 'user', message: { role: 'user', content: 'more' } }),
JSON.stringify({ type: 'assistant', message: { role: 'assistant', content: [{ type: 'tool_use', name: 'Read' }, { type: 'tool_use', name: 'Bash' }] } }),
];
fs.writeFileSync(file, lines.join('\n'));
const result = parseSessionFile(file);
expect(result).not.toBeNull();
expect(result!.tools_used).toEqual(['Bash', 'Read']); // sorted, deduped
expect(result!.totalTurns).toBe(5);
fs.rmSync(dir, { recursive: true, force: true });
});
test('returns null for nonexistent file', () => {
const result = parseSessionFile('/nonexistent/file.jsonl');
expect(result).toBeNull();
});
test('handles empty file', () => {
const dir = tmpDir();
const file = path.join(dir, 'empty.jsonl');
fs.writeFileSync(file, '');
const result = parseSessionFile(file);
expect(result).not.toBeNull();
expect(result!.tools_used).toEqual([]);
expect(result!.totalTurns).toBe(0);
fs.rmSync(dir, { recursive: true, force: true });
});
test('skips malformed lines', () => {
const dir = tmpDir();
const file = path.join(dir, 'mixed.jsonl');
fs.writeFileSync(file, [
JSON.stringify({ type: 'user', message: { content: 'x' } }),
'not json',
JSON.stringify({ type: 'assistant', message: { content: [{ type: 'tool_use', name: 'Edit' }] } }),
].join('\n'));
const result = parseSessionFile(file);
expect(result!.tools_used).toEqual(['Edit']);
expect(result!.totalTurns).toBe(2);
fs.rmSync(dir, { recursive: true, force: true });
});
});
// --- getRemoteSlugForPath ---
describe('getRemoteSlugForPath', () => {
beforeEach(() => clearSlugCache());
test('falls back to basename for non-git directory', () => {
const dir = tmpDir();
const slug = getRemoteSlugForPath(dir);
expect(slug).toBe(path.basename(dir));
fs.rmSync(dir, { recursive: true, force: true });
});
test('falls back to basename for nonexistent directory', () => {
const slug = getRemoteSlugForPath('/nonexistent/my-project');
expect(slug).toBe('my-project');
});
test('memoizes results', () => {
const slug1 = getRemoteSlugForPath('/nonexistent/memo-test');
const slug2 = getRemoteSlugForPath('/nonexistent/memo-test');
expect(slug1).toBe(slug2);
expect(slug1).toBe('memo-test');
});
});
// --- sessionToTranscriptData ---
describe('sessionToTranscriptData', () => {
beforeEach(() => clearSlugCache());
const entries: HistoryEntry[] = [
{ display: 'first prompt', pastedContents: { code: 'big paste' }, timestamp: 1710000000000, project: '/tmp/my-repo', sessionId: 'sess-1' },
{ display: 'second prompt', pastedContents: {}, timestamp: 1710000300000, project: '/tmp/my-repo', sessionId: 'sess-1' },
];
test('computes timestamps correctly', () => {
const data = sessionToTranscriptData('sess-1', entries, null, null);
expect(data.started_at).toBe(new Date(1710000000000).toISOString());
expect(data.ended_at).toBe(new Date(1710000300000).toISOString());
});
test('strips pastedContents from messages', () => {
const data = sessionToTranscriptData('sess-1', entries, null, null);
// Messages should only have display and timestamp
for (const msg of data.messages) {
expect(msg).toHaveProperty('display');
expect(msg).toHaveProperty('timestamp');
expect(msg).not.toHaveProperty('pastedContents');
}
});
test('truncates long display to 2000 chars', () => {
const longEntries: HistoryEntry[] = [
{ display: 'x'.repeat(3000), pastedContents: {}, timestamp: 1, project: '/tmp/repo', sessionId: 's' },
];
const data = sessionToTranscriptData('s', longEntries, null, null);
expect(data.messages[0].display).toHaveLength(2000);
});
test('uses session file data when available', () => {
const sessionFileData = { tools_used: ['Bash', 'Read'], totalTurns: 10 };
const data = sessionToTranscriptData('sess-1', entries, sessionFileData, 'Fixed CSS.');
expect(data.tools_used).toEqual(['Bash', 'Read']);
expect(data.total_turns).toBe(10);
expect(data.summary).toBe('Fixed CSS.');
});
test('falls back to history entry count when no session file', () => {
const data = sessionToTranscriptData('sess-1', entries, null, null);
expect(data.tools_used).toBeNull();
expect(data.total_turns).toBe(2);
expect(data.summary).toBeNull();
});
test('derives repo_slug from project path basename', () => {
const data = sessionToTranscriptData('sess-1', entries, null, null);
expect(data.repo_slug).toBe('my-repo');
});
});
// --- Sync marker ---
describe('sync marker', () => {
test('read returns null for missing file', () => {
const origDir = process.env.GSTACK_STATE_DIR;
process.env.GSTACK_STATE_DIR = '/nonexistent/dir';
// readSyncMarker uses GSTACK_STATE_DIR at import time, so this tests the readJSON fallback
const marker = readSyncMarker();
// May or may not be null depending on whether the module cached the path
expect(marker === null || typeof marker === 'object').toBe(true);
if (origDir) process.env.GSTACK_STATE_DIR = origDir;
else delete process.env.GSTACK_STATE_DIR;
});
test('write creates directory and file', () => {
const dir = tmpDir();
const stateDir = path.join(dir, 'gstack-state');
const origDir = process.env.GSTACK_STATE_DIR;
process.env.GSTACK_STATE_DIR = stateDir;
const marker: TranscriptSyncMarker = {
pushed_sessions: { 'sess-1': { turns_pushed: 5, last_push: '2026-03-15T10:00:00Z' } },
last_file_size: 12345,
updated_at: '2026-03-15T10:00:00Z',
};
// writeSyncMarker uses the module-level GSTACK_STATE_DIR constant,
// which was set at import time. We test the marker format instead.
expect(marker.pushed_sessions['sess-1'].turns_pushed).toBe(5);
expect(marker.last_file_size).toBe(12345);
if (origDir) process.env.GSTACK_STATE_DIR = origDir;
else delete process.env.GSTACK_STATE_DIR;
fs.rmSync(dir, { recursive: true, force: true });
});
});