Merge remote-tracking branch 'origin/garrytan/team-supabase-store' into garrytan/dev-mode

This commit is contained in:
Garry Tan
2026-03-15 20:41:33 -05:00
15 changed files with 714 additions and 76 deletions
+31 -19
View File
@@ -2,15 +2,14 @@
# gstack-sync — team data sync CLI.
#
# Usage:
# gstack-sync setup — interactive auth flow
# gstack-sync status — show sync status (queue, cache, connection)
# gstack-sync push-eval <file> — push an eval result JSON to Supabase
# gstack-sync push-retro <file> — push a retro snapshot JSON
# gstack-sync push-qa <file> — push a QA report JSON
# gstack-sync push-ship <file> — push a ship log JSON
# gstack-sync pullpull team data to local cache
# gstack-sync draindrain the offline queue
# gstack-sync logout — clear auth tokens
# gstack-sync setup — interactive auth flow
# gstack-sync status — show sync status
# gstack-sync test — validate full sync flow
# gstack-sync show [evals|ships|retros] — view team data
# gstack-sync push-{eval,retro,qa,ship,greptile} <file> — push data
# gstack-sync pull — pull team data to local cache
# gstack-sync drain drain the offline queue
# gstack-sync logout clear auth tokens
#
# Env overrides (for testing):
# GSTACK_DIR — override auto-detected gstack root
@@ -42,6 +41,16 @@ case "${1:-}" in
FILE="${2:?Usage: gstack-sync push-ship <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-ship "$FILE"
;;
push-greptile)
FILE="${2:?Usage: gstack-sync push-greptile <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-greptile "$FILE"
;;
test)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" test
;;
show)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" show "${@:2}"
;;
pull)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" pull
;;
@@ -52,18 +61,21 @@ case "${1:-}" in
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" logout
;;
*)
echo "Usage: gstack-sync {setup|status|push-eval|push-retro|push-qa|push-ship|pull|drain|logout}"
echo "Usage: gstack-sync <command> [args]"
echo ""
echo "Commands:"
echo " setup Interactive auth flow (opens browser)"
echo " status Show sync status (queue, cache, connection)"
echo " push-eval <file> Push eval result JSON to team store"
echo " push-retro <file> Push retro snapshot JSON"
echo " push-qa <file> Push QA report JSON"
echo " push-ship <file> Push ship log JSON"
echo " pull Pull team data to local cache"
echo " drain Drain the offline sync queue"
echo " logout Clear auth tokens"
echo " setup Interactive auth flow (opens browser)"
echo " status Show sync status (queue, cache, connection)"
echo " test Validate full sync flow (push + pull)"
echo " show [evals|ships|retros] View team data in terminal"
echo " push-eval <file> Push eval result JSON to team store"
echo " push-retro <file> Push retro snapshot JSON"
echo " push-qa <file> Push QA report JSON"
echo " push-ship <file> Push ship log JSON"
echo " push-greptile <file> Push Greptile triage entry JSON"
echo " pull Pull team data to local cache"
echo " drain Drain the offline sync queue"
echo " logout Clear auth tokens"
exit 1
;;
esac
+132
View File
@@ -0,0 +1,132 @@
# Team Sync Setup Guide
Team sync lets your team share eval results, retro snapshots, QA reports, ship logs, and Greptile triage data via a shared Supabase store. All sync is optional and non-fatal — without it, everything works locally as before.
## Prerequisites
- A [Supabase](https://supabase.com) project (free tier works)
- gstack v0.3.10+
## Step 1: Create a Supabase project
1. Go to [supabase.com](https://supabase.com) and create a new project
2. Note your **Project URL** (e.g., `https://xxxx.supabase.co`)
3. Note your **anon/public key** from Settings > API
## Step 2: Run migrations
In the Supabase SQL Editor, run these files **in order**:
```
supabase/migrations/001_teams.sql
supabase/migrations/002_eval_runs.sql
supabase/migrations/003_data_tables.sql
supabase/migrations/004_eval_costs.sql
supabase/migrations/005_sync_heartbeats.sql
```
Copy-paste each file's contents into the SQL editor and run.
## Step 3: Create your team
In the SQL editor, create a team and add yourself:
```sql
-- Create team
INSERT INTO teams (name, slug) VALUES ('Your Team', 'your-team-slug');
-- After authenticating (Step 5), add yourself as owner:
-- INSERT INTO team_members (team_id, user_id, role)
-- VALUES ('<team-id>', '<your-user-id>', 'owner');
```
Note the team slug — you'll need it in the next step.
## Step 4: Configure your project
Copy the example config to your project root:
```bash
cp .gstack-sync.json.example .gstack-sync.json
```
Edit `.gstack-sync.json` with your Supabase details:
```json
{
"supabase_url": "https://YOUR_PROJECT.supabase.co",
"supabase_anon_key": "eyJ...",
"team_slug": "your-team-slug"
}
```
**Important:** Add `.gstack-sync.json` to `.gitignore` if it contains sensitive keys, or commit it if your team uses the same Supabase project (the anon key is safe to commit — RLS protects the data).
## Step 5: Authenticate
```bash
gstack-sync setup
```
This opens your browser for Supabase OAuth. After authenticating, tokens are saved to `~/.gstack/auth.json` (mode 0600).
**For CI/automation:** Set the `GSTACK_SUPABASE_ACCESS_TOKEN` env var instead of running setup.
## Step 6: Verify
```bash
gstack-sync test
```
Expected output:
```
gstack sync test
────────────────────────────────────
1. Config: ok (team: your-team-slug)
2. Auth: ok (you@email.com)
3. Push: ok (123ms)
4. Pull: ok (1 heartbeats, 95ms)
────────────────────────────────────
Sync test passed ✓
```
## Step 7: See your data
```bash
gstack-sync show # team summary dashboard
gstack-sync show evals # recent eval runs
gstack-sync show ships # recent ship logs
gstack-sync show retros # recent retro snapshots
gstack-sync status # sync health check
bun run eval:trend --team # team-wide test trends
```
## How it works
When sync is configured, skills automatically push data after completing their primary task:
- `/ship` pushes a ship log after PR creation (Step 8.5)
- `/retro` pushes the snapshot after saving to `.context/retros/` (Step 13)
- `/qa` pushes a report after computing the health score (Phase 6)
- `/review` pushes Greptile triage entries after history file writes
- Eval runs are pushed automatically by `EvalCollector.finalize()`
All pushes are non-fatal. If sync fails, entries are queued in `~/.gstack/sync-queue.json` and retried on the next push or via `gstack-sync drain`.
## Troubleshooting
| Problem | Fix |
|---|---|
| "No .gstack-sync.json found" | Copy `.gstack-sync.json.example` and fill in your values |
| "Not authenticated" | Run `gstack-sync setup` |
| Push fails with 404 | Run the migration SQL files in order |
| "Connection failed" | Check your Supabase URL and that the project is running |
| Queue growing | Run `gstack-sync drain` to flush |
## Adding team members
Each team member needs to:
1. Have `.gstack-sync.json` in their project (commit it or share it)
2. Run `gstack-sync setup` to authenticate
3. Be added to `team_members` in Supabase (by an admin)
+1 -1
View File
@@ -1,7 +1,7 @@
# Team Coordination Store: gstack as Engineering Intelligence Platform
> Design doc for the Supabase-backed team data store and universal eval infrastructure.
> Authored 2026-03-15. Status: approved, not yet implemented.
> Authored 2026-03-15. Status: Phase 1 complete. Phase 2 complete (skill hooks, sync test/show, team trends). Phase 3-4 not started.
## Table of Contents
+23 -2
View File
@@ -541,14 +541,35 @@ async function cmdTrend(args: string[]): Promise<void> {
let limit = 10;
let filterTier: string | undefined;
let filterTest: string | undefined;
let useTeam = false;
for (let i = 0; i < args.length; i++) {
if (args[i] === '--limit' && args[i + 1]) { limit = parseInt(args[++i], 10); }
else if (args[i] === '--tier' && args[i + 1]) { filterTier = args[++i]; }
else if (args[i] === '--test' && args[i + 1]) { filterTest = args[++i]; }
else if (args[i] === '--team') { useTeam = true; }
}
let results: EvalResult[];
if (useTeam) {
try {
const { isSyncConfigured } = await import('./sync-config');
const { pullEvalRuns } = await import('./sync');
if (!isSyncConfigured()) {
console.log('Team sync not configured — showing local data only. See docs/TEAM_SYNC_SETUP.md');
results = loadEvalResults<EvalResult>(undefined, limit);
} else {
const teamRows = await pullEvalRuns({ limit });
results = teamRows as unknown as EvalResult[];
}
} catch {
console.log('Team sync not available — showing local data only.');
results = loadEvalResults<EvalResult>(undefined, limit);
}
} else {
results = loadEvalResults<EvalResult>(undefined, limit);
}
const results = loadEvalResults<EvalResult>(undefined, limit);
if (results.length === 0) {
console.log('No eval runs yet. Run: EVALS=1 bun run test:evals');
return;
@@ -627,7 +648,7 @@ Commands:
summary [--limit N] Aggregate stats across all runs
push <file> Validate + save + sync an eval result
cost <file> Show per-model cost breakdown
trend [--limit N] [--tier X] [--test X] Per-test pass rate trends
trend [--limit N] [--tier X] [--test X] [--team] Per-test pass rate trends
cache read|write|stats|clear|verify Manage eval cache
watch Live E2E test dashboard
`);
+247 -6
View File
@@ -6,12 +6,13 @@
import * as fs from 'fs';
import { getTeamConfig, resolveSyncConfig, clearAuthTokens, isSyncConfigured } from './sync-config';
import { runDeviceAuth } from './auth';
import { pushEvalRun, pushRetro, pushQAReport, pushShipLog, pullTable, drainQueue, getSyncStatus } from './sync';
import { pushEvalRun, pushRetro, pushQAReport, pushShipLog, pushGreptileTriage, pushHeartbeat, pullTable, drainQueue, getSyncStatus } from './sync';
import { readJSON } from './util';
const command = process.argv[2];
// --- Main (only when run directly, not imported) ---
async function main() {
const command = process.argv[2];
switch (command) {
case 'setup':
await cmdSetup();
@@ -31,6 +32,15 @@ async function main() {
case 'push-ship':
await cmdPushFile('ship', process.argv[3]);
break;
case 'push-greptile':
await cmdPushFile('greptile', process.argv[3]);
break;
case 'test':
await cmdTest();
break;
case 'show':
await cmdShow(process.argv.slice(3));
break;
case 'pull':
await cmdPull();
break;
@@ -121,6 +131,9 @@ async function cmdPushFile(type: string, filePath: string): Promise<void> {
case 'ship':
ok = await pushShipLog(data);
break;
case 'greptile':
ok = await pushGreptileTriage(data);
break;
}
if (ok) {
@@ -165,7 +178,235 @@ function cmdLogout(): void {
console.log(`Cleared auth tokens for ${team.supabase_url}`);
}
main().catch(err => {
console.error(err.message);
process.exit(1);
});
// --- sync test ---
async function cmdTest(): Promise<void> {
console.log('gstack sync test');
console.log('─'.repeat(40));
// Step 1: Config
const team = getTeamConfig();
if (!team) {
console.log(' 1. Config: FAIL — no .gstack-sync.json');
console.log('\n See docs/TEAM_SYNC_SETUP.md for setup instructions.');
process.exit(1);
}
console.log(` 1. Config: ok (team: ${team.team_slug})`);
// Step 2: Auth
const config = resolveSyncConfig();
if (!config) {
console.log(' 2. Auth: FAIL — not authenticated');
console.log('\n Run: gstack-sync setup');
process.exit(1);
}
console.log(` 2. Auth: ok (${config.auth.email || config.auth.user_id})`);
// Step 3: Push heartbeat
const t0 = Date.now();
const pushOk = await pushHeartbeat();
const pushMs = Date.now() - t0;
if (!pushOk) {
console.log(` 3. Push: FAIL (${pushMs}ms)`);
console.log('\n Check that Supabase migrations have been run (especially 005_sync_heartbeats.sql).');
console.log(' See docs/TEAM_SYNC_SETUP.md for details.');
process.exit(1);
}
console.log(` 3. Push: ok (${pushMs}ms)`);
// Step 4: Pull
const t1 = Date.now();
const rows = await pullTable('sync_heartbeats');
const pullMs = Date.now() - t1;
if (rows.length === 0) {
console.log(` 4. Pull: FAIL — no rows returned (${pullMs}ms)`);
process.exit(1);
}
console.log(` 4. Pull: ok (${rows.length} heartbeats, ${pullMs}ms)`);
console.log('─'.repeat(40));
console.log(' Sync test passed ✓');
}
// --- sync show ---
/** Format a relative time string (e.g., "2 hours ago"). */
export function formatRelativeTime(iso: string): string {
const ms = Date.now() - new Date(iso).getTime();
if (ms < 60_000) return 'just now';
if (ms < 3_600_000) return `${Math.round(ms / 60_000)}m ago`;
if (ms < 86_400_000) return `${Math.round(ms / 3_600_000)}h ago`;
return `${Math.round(ms / 86_400_000)}d ago`;
}
/** Format team summary dashboard from pulled data. Pure function for testing. */
export function formatTeamSummary(opts: {
teamSlug: string;
evalRuns: Record<string, unknown>[];
shipLogs: Record<string, unknown>[];
retroSnapshots: Record<string, unknown>[];
queueSize: number;
cacheLastPull: string | null;
}): string {
const lines: string[] = [];
const { teamSlug, evalRuns, shipLogs, retroSnapshots, queueSize, cacheLastPull } = opts;
lines.push('');
lines.push(`Team: ${teamSlug}`);
lines.push('═'.repeat(50));
// Eval runs (last 7 days)
const weekAgo = new Date(Date.now() - 7 * 86_400_000).toISOString();
const recentEvals = evalRuns.filter(r => (r.timestamp as string) > weekAgo);
const evalContributors = new Set(recentEvals.map(r => r.user_id).filter(Boolean));
lines.push(` Eval runs (7d): ${recentEvals.length} runs, ${evalContributors.size} contributors`);
// Ship velocity (last 7 days)
const recentShips = shipLogs.filter(r => (r.created_at as string || r.timestamp as string || '') > weekAgo);
lines.push(` Ship velocity: ${recentShips.length} PRs this week`);
// Detection rate (from recent evals)
const detectionRates = recentEvals
.flatMap(r => ((r.tests as any[]) || []).filter(t => t.detection_rate != null).map(t => t.detection_rate as number));
if (detectionRates.length > 0) {
const avg = detectionRates.reduce((a, b) => a + b, 0) / detectionRates.length;
lines.push(` Avg detection: ${avg.toFixed(1)} bugs`);
}
// Latest retro
if (retroSnapshots.length > 0) {
const latest = retroSnapshots[0];
const streak = (latest as any).streak_days;
const date = (latest as any).date || (latest as any).timestamp;
lines.push(` Latest retro: ${date ? String(date).slice(0, 10) : 'unknown'}${streak ? ` (streak: ${streak}d)` : ''}`);
}
// Queue + cache
lines.push(` Sync queue: ${queueSize} items`);
lines.push(` Last pull: ${cacheLastPull ? formatRelativeTime(cacheLastPull) : 'never'}`);
lines.push('═'.repeat(50));
lines.push('');
return lines.join('\n');
}
/** Format eval runs table. Pure function for testing. */
export function formatEvalTable(evalRuns: Record<string, unknown>[]): string {
if (evalRuns.length === 0) return 'No eval runs yet.\n';
const lines: string[] = [];
lines.push('');
lines.push('Recent Eval Runs');
lines.push('═'.repeat(80));
lines.push(
' ' +
'Date'.padEnd(13) +
'User'.padEnd(20) +
'Branch'.padEnd(22) +
'Pass'.padEnd(8) +
'Cost'.padEnd(8) +
'Tier'
);
lines.push('─'.repeat(80));
for (const r of evalRuns.slice(0, 20)) {
const date = String(r.timestamp || '').slice(0, 10);
const user = String(r.email || r.user_id || '').slice(0, 18).padEnd(20);
const branch = String(r.branch || '').slice(0, 20).padEnd(22);
const pass = `${r.passed || 0}/${r.total_tests || 0}`.padEnd(8);
const cost = `$${Number(r.total_cost_usd || 0).toFixed(2)}`.padEnd(8);
const tier = String(r.tier || 'e2e');
lines.push(` ${date.padEnd(13)}${user}${branch}${pass}${cost}${tier}`);
}
lines.push('─'.repeat(80));
lines.push('');
return lines.join('\n');
}
/** Format ship logs table. Pure function for testing. */
export function formatShipTable(shipLogs: Record<string, unknown>[]): string {
if (shipLogs.length === 0) return 'No ship logs yet.\n';
const lines: string[] = [];
lines.push('');
lines.push('Recent Ship Logs');
lines.push('═'.repeat(70));
lines.push(
' ' +
'Date'.padEnd(13) +
'Version'.padEnd(12) +
'Branch'.padEnd(25) +
'PR'
);
lines.push('─'.repeat(70));
for (const r of shipLogs.slice(0, 20)) {
const date = String(r.created_at || r.timestamp || '').slice(0, 10);
const version = String(r.version || '').padEnd(12);
const branch = String(r.branch || '').slice(0, 23).padEnd(25);
const pr = String(r.pr_url || '');
lines.push(` ${date.padEnd(13)}${version}${branch}${pr}`);
}
lines.push('─'.repeat(70));
lines.push('');
return lines.join('\n');
}
async function cmdShow(args: string[]): Promise<void> {
if (!isSyncConfigured()) {
console.error('Sync not configured. Run gstack-sync setup first.');
console.error('See docs/TEAM_SYNC_SETUP.md for setup instructions.');
process.exit(1);
}
const sub = args[0];
const team = getTeamConfig()!;
if (sub === 'evals') {
const rows = await pullTable('eval_runs');
console.log(formatEvalTable(rows));
return;
}
if (sub === 'ships') {
const rows = await pullTable('ship_logs');
console.log(formatShipTable(rows));
return;
}
if (sub === 'retros') {
const rows = await pullTable('retro_snapshots');
if (rows.length === 0) { console.log('No retro snapshots yet.'); return; }
for (const r of rows.slice(0, 10)) {
const date = String((r as any).date || (r as any).timestamp || '').slice(0, 10);
const streak = (r as any).streak_days;
const commits = (r as any).metrics?.commits;
console.log(` ${date} ${commits ? commits + ' commits' : ''} ${streak ? 'streak: ' + streak + 'd' : ''}`);
}
return;
}
// Default: summary dashboard
const status = await getSyncStatus();
const [evalRuns, shipLogs, retroSnapshots] = await Promise.all([
pullTable('eval_runs'),
pullTable('ship_logs'),
pullTable('retro_snapshots'),
]);
console.log(formatTeamSummary({
teamSlug: team.team_slug,
evalRuns,
shipLogs,
retroSnapshots,
queueSize: status.queueSize,
cacheLastPull: status.cacheLastPull,
}));
}
if (import.meta.main) {
main().catch(err => {
console.error(err.message);
process.exit(1);
});
}
+34 -48
View File
@@ -154,77 +154,63 @@ export async function pushRow(table: string, data: Record<string, unknown>): Pro
}
}
/** Push an eval run result to Supabase. */
export async function pushEvalRun(evalResult: Record<string, unknown>): Promise<boolean> {
/**
* Common push helper: resolves sync config, injects team/user/repo fields, and pushes.
* Returns false (silently) if sync is not configured.
*/
function pushWithSync(
table: string,
data: Record<string, unknown>,
opts?: { addRepoSlug?: boolean; addHostname?: boolean },
): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
const data = {
if (!config) return Promise.resolve(false);
const row: Record<string, unknown> = {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...data,
};
if (opts?.addRepoSlug !== false) row.repo_slug = getRemoteSlug();
if (opts?.addHostname) row.hostname = os.hostname();
return pushRow(table, row);
}
/** Push an eval run result to Supabase. Strips transcripts to keep payload small. */
export async function pushEvalRun(evalResult: Record<string, unknown>): Promise<boolean> {
return pushWithSync('eval_runs', {
hostname: os.hostname(),
...evalResult,
// Strip full transcripts to keep payload small
tests: (evalResult.tests as any[])?.map(t => ({
...t,
transcript: undefined,
prompt: t.prompt ? t.prompt.slice(0, 500) : undefined,
})),
};
return pushRow('eval_runs', data);
});
}
/** Push a retro snapshot to Supabase. */
export async function pushRetro(retroData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('retro_snapshots', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...retroData,
});
export function pushRetro(retroData: Record<string, unknown>): Promise<boolean> {
return pushWithSync('retro_snapshots', retroData);
}
/** Push a QA report to Supabase. */
export async function pushQAReport(qaData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('qa_reports', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...qaData,
});
export function pushQAReport(qaData: Record<string, unknown>): Promise<boolean> {
return pushWithSync('qa_reports', qaData);
}
/** Push a ship log to Supabase. */
export async function pushShipLog(shipData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('ship_logs', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...shipData,
});
export function pushShipLog(shipData: Record<string, unknown>): Promise<boolean> {
return pushWithSync('ship_logs', shipData);
}
/** Push a Greptile triage entry to Supabase. */
export async function pushGreptileTriage(triageData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
export function pushGreptileTriage(triageData: Record<string, unknown>): Promise<boolean> {
return pushWithSync('greptile_triage', triageData, { addRepoSlug: false });
}
return pushRow('greptile_triage', {
team_id: config.auth.team_id,
user_id: config.auth.user_id,
...triageData,
});
/** Push a sync heartbeat (for connectivity testing). */
export function pushHeartbeat(): Promise<boolean> {
return pushWithSync('sync_heartbeats', { hostname: os.hostname() }, { addRepoSlug: false });
}
// --- Pull operations ---
+15
View File
@@ -307,6 +307,21 @@ $B snapshot -i -a -o "$REPORT_DIR/screenshots/issue-002.png"
}
```
7. **Sync to team** (non-fatal, silent if not configured):
```bash
cat > .gstack/qa-reports/qa-sync.json << 'QAEOF'
{
"url": "<target URL>",
"mode": "<full|quick|diff-aware|regression>",
"health_score": <N>,
"issues": [<issues array from step 6 above>],
"category_scores": {<category scores object>}
}
QAEOF
~/.claude/skills/gstack/bin/gstack-sync push-qa .gstack/qa-reports/qa-sync.json 2>/dev/null && echo "Synced to team ✓" || true
```
Substitute actual values. Uses snake_case keys matching the Supabase schema.
**Regression mode:** After writing the report, load the baseline file. Compare:
- Health score delta
- Issues fixed (in baseline but not current)
+15
View File
@@ -233,6 +233,21 @@ $B snapshot -i -a -o "$REPORT_DIR/screenshots/issue-002.png"
}
```
7. **Sync to team** (non-fatal, silent if not configured):
```bash
cat > .gstack/qa-reports/qa-sync.json << 'QAEOF'
{
"url": "<target URL>",
"mode": "<full|quick|diff-aware|regression>",
"health_score": <N>,
"issues": [<issues array from step 6 above>],
"category_scores": {<category scores object>}
}
QAEOF
~/.claude/skills/gstack/bin/gstack-sync push-qa .gstack/qa-reports/qa-sync.json 2>/dev/null && echo "Synced to team ✓" || true
```
Substitute actual values. Uses snake_case keys matching the Supabase schema.
**Regression mode:** After writing the report, load the baseline file. Compare:
- Health score delta
- Issues fixed (in baseline but not current)
+5
View File
@@ -403,6 +403,11 @@ Include backlog data in the JSON when TODOS.md exists:
}
```
After writing the JSON snapshot, sync to the team store (non-fatal, silent if not configured):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-retro ".context/retros/${today}-${next}.json" 2>/dev/null && echo "Synced to team ✓" || true
```
### Step 14: Write the Narrative
Structure the output as:
+5
View File
@@ -346,6 +346,11 @@ Include backlog data in the JSON when TODOS.md exists:
}
```
After writing the JSON snapshot, sync to the team store (non-fatal, silent if not configured):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-retro ".context/retros/${today}-${next}.json" 2>/dev/null && echo "Synced to team ✓" || true
```
### Step 14: Write the Narrative
Structure the output as:
+19
View File
@@ -204,6 +204,25 @@ Example entries:
2026-03-13 | garrytan/myapp | already-fixed | lib/payments.rb | error-handling
```
## Team Sync (non-fatal)
After appending to both history files, sync each triage entry to the team store. For each triaged comment, write a JSON entry and push:
```bash
cat > /tmp/gstack-greptile-entry.json << 'GEOF'
{
"date": "<YYYY-MM-DD>",
"repo": "<owner/repo from REMOTE_SLUG>",
"triage_type": "<fp|fix|already-fixed>",
"file_pattern": "<file-pattern>",
"category": "<category>"
}
GEOF
~/.claude/skills/gstack/bin/gstack-sync push-greptile /tmp/gstack-greptile-entry.json 2>/dev/null || true
```
If multiple comments were triaged, push each one individually (overwrite the temp file each time). Non-fatal — failures are queued for retry. Silent if sync is not configured.
---
## Output Format
+27
View File
@@ -456,6 +456,33 @@ EOF
---
## Step 8.5: Sync to Team (non-fatal)
After the PR is created, write a ship log and sync to the team store. This step is entirely silent if sync is not configured.
1. Write ship metadata to a temp file:
```bash
cat > /tmp/gstack-ship-log.json << 'SHIPEOF'
{
"version": "<new version from Step 4>",
"branch": "<current branch>",
"pr_url": "<PR URL from Step 8>",
"review_findings": { "critical": 0, "informational": 0 },
"greptile_stats": { "total": 0, "valid": 0, "fixed": 0, "fp": 0 },
"todos_completed": [],
"test_results": { "pass": true, "test_count": 0 }
}
SHIPEOF
```
Substitute actual values from the preceding steps. Use `0` for Greptile fields if no Greptile comments were found.
2. Push (non-fatal):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-ship /tmp/gstack-ship-log.json 2>/dev/null && echo "Synced to team ✓" || true
```
---
## Important Rules
- **Never skip tests.** If tests fail, stop.
+27
View File
@@ -399,6 +399,33 @@ EOF
---
## Step 8.5: Sync to Team (non-fatal)
After the PR is created, write a ship log and sync to the team store. This step is entirely silent if sync is not configured.
1. Write ship metadata to a temp file:
```bash
cat > /tmp/gstack-ship-log.json << 'SHIPEOF'
{
"version": "<new version from Step 4>",
"branch": "<current branch>",
"pr_url": "<PR URL from Step 8>",
"review_findings": { "critical": 0, "informational": 0 },
"greptile_stats": { "total": 0, "valid": 0, "fixed": 0, "fp": 0 },
"todos_completed": [],
"test_results": { "pass": true, "test_count": 0 }
}
SHIPEOF
```
Substitute actual values from the preceding steps. Use `0` for Greptile fields if no Greptile comments were found.
2. Push (non-fatal):
```bash
~/.claude/skills/gstack/bin/gstack-sync push-ship /tmp/gstack-ship-log.json 2>/dev/null && echo "Synced to team ✓" || true
```
---
## Important Rules
- **Never skip tests.** If tests fail, stop.
@@ -0,0 +1,25 @@
-- 005_sync_heartbeats.sql — Lightweight table for sync connectivity tests.
--
-- Used by `gstack-sync test` to validate the full push/pull flow
-- without polluting real data tables.
create table if not exists sync_heartbeats (
id uuid primary key default gen_random_uuid(),
team_id uuid references teams(id) not null,
user_id uuid references auth.users(id),
hostname text not null default '',
timestamp timestamptz not null default now()
);
-- RLS
alter table sync_heartbeats enable row level security;
create policy "team_insert" on sync_heartbeats
for insert with check (
team_id in (select team_id from team_members where user_id = auth.uid())
);
create policy "team_read" on sync_heartbeats
for select using (
team_id in (select team_id from team_members where user_id = auth.uid())
);
+108
View File
@@ -0,0 +1,108 @@
/**
* Tests for sync show formatting functions (pure, no network).
*/
import { describe, test, expect } from 'bun:test';
import { formatTeamSummary, formatEvalTable, formatShipTable, formatRelativeTime } from '../lib/cli-sync';
describe('formatRelativeTime', () => {
test('returns "just now" for recent timestamps', () => {
expect(formatRelativeTime(new Date().toISOString())).toBe('just now');
});
test('returns minutes for recent past', () => {
const fiveMinAgo = new Date(Date.now() - 5 * 60_000).toISOString();
expect(formatRelativeTime(fiveMinAgo)).toBe('5m ago');
});
test('returns hours for older past', () => {
const threeHoursAgo = new Date(Date.now() - 3 * 3_600_000).toISOString();
expect(formatRelativeTime(threeHoursAgo)).toBe('3h ago');
});
test('returns days for old past', () => {
const twoDaysAgo = new Date(Date.now() - 2 * 86_400_000).toISOString();
expect(formatRelativeTime(twoDaysAgo)).toBe('2d ago');
});
});
describe('formatTeamSummary', () => {
test('formats summary with data', () => {
const output = formatTeamSummary({
teamSlug: 'test-team',
evalRuns: [
{ timestamp: new Date().toISOString(), user_id: 'u1', tests: [{ detection_rate: 4 }] },
{ timestamp: new Date().toISOString(), user_id: 'u2', tests: [{ detection_rate: 5 }] },
],
shipLogs: [
{ created_at: new Date().toISOString() },
],
retroSnapshots: [
{ date: '2026-03-15', streak_days: 47 },
],
queueSize: 0,
cacheLastPull: new Date().toISOString(),
});
expect(output).toContain('test-team');
expect(output).toContain('2 runs');
expect(output).toContain('2 contributors');
expect(output).toContain('1 PRs');
expect(output).toContain('4.5'); // avg detection
expect(output).toContain('streak: 47d');
expect(output).toContain('0 items');
});
test('handles empty data gracefully', () => {
const output = formatTeamSummary({
teamSlug: 'empty-team',
evalRuns: [],
shipLogs: [],
retroSnapshots: [],
queueSize: 3,
cacheLastPull: null,
});
expect(output).toContain('empty-team');
expect(output).toContain('0 runs');
expect(output).toContain('0 PRs');
expect(output).toContain('3 items');
expect(output).toContain('never');
});
});
describe('formatEvalTable', () => {
test('formats eval runs as table', () => {
const output = formatEvalTable([
{ timestamp: '2026-03-15T12:00:00Z', branch: 'main', passed: 10, total_tests: 10, total_cost_usd: 2.50, tier: 'e2e' },
]);
expect(output).toContain('Recent Eval Runs');
expect(output).toContain('2026-03-15');
expect(output).toContain('main');
expect(output).toContain('10/10');
expect(output).toContain('$2.50');
expect(output).toContain('e2e');
});
test('returns message for empty data', () => {
expect(formatEvalTable([])).toContain('No eval runs yet');
});
});
describe('formatShipTable', () => {
test('formats ship logs as table', () => {
const output = formatShipTable([
{ created_at: '2026-03-15T12:00:00Z', version: '0.3.10', branch: 'feature/sync', pr_url: 'https://github.com/org/repo/pull/1' },
]);
expect(output).toContain('Recent Ship Logs');
expect(output).toContain('0.3.10');
expect(output).toContain('feature/sync');
expect(output).toContain('github.com');
});
test('returns message for empty data', () => {
expect(formatShipTable([])).toContain('No ship logs yet');
});
});