feat: add team sync infrastructure (config, auth, push/pull, CLI)

- lib/sync-config.ts: reads .gstack-sync.json + ~/.gstack/auth.json
- lib/auth.ts: device auth flow (browser OAuth, local HTTP callback)
- lib/sync.ts: Supabase push/pull via raw fetch(), offline queue, cache
- lib/cli-sync.ts: CLI handler for gstack-sync commands
- bin/gstack-sync: bash wrapper (setup, status, push-*, pull, drain)
- .gstack-sync.json.example: template for team setup

Zero new dependencies — uses raw fetch() against PostgREST API.
All sync is non-fatal with 5s timeout and offline queue fallback.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Garry Tan
2026-03-15 02:02:40 -05:00
parent caed287496
commit 3713c3b9b9
6 changed files with 1086 additions and 0 deletions
+5
View File
@@ -0,0 +1,5 @@
{
"supabase_url": "https://YOUR_PROJECT.supabase.co",
"supabase_anon_key": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.YOUR_ANON_KEY_HERE",
"team_slug": "your-team-name"
}
+69
View File
@@ -0,0 +1,69 @@
#!/usr/bin/env bash
# gstack-sync — team data sync CLI.
#
# Usage:
# gstack-sync setup — interactive auth flow
# gstack-sync status — show sync status (queue, cache, connection)
# gstack-sync push-eval <file> — push an eval result JSON to Supabase
# gstack-sync push-retro <file> — push a retro snapshot JSON
# gstack-sync push-qa <file> — push a QA report JSON
# gstack-sync push-ship <file> — push a ship log JSON
# gstack-sync pull — pull team data to local cache
# gstack-sync drain — drain the offline queue
# gstack-sync logout — clear auth tokens
#
# Env overrides (for testing):
# GSTACK_DIR — override auto-detected gstack root
# GSTACK_STATE_DIR — override ~/.gstack state directory
set -euo pipefail
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
case "${1:-}" in
setup)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" setup
;;
status)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" status
;;
push-eval)
FILE="${2:?Usage: gstack-sync push-eval <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-eval "$FILE"
;;
push-retro)
FILE="${2:?Usage: gstack-sync push-retro <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-retro "$FILE"
;;
push-qa)
FILE="${2:?Usage: gstack-sync push-qa <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-qa "$FILE"
;;
push-ship)
FILE="${2:?Usage: gstack-sync push-ship <file.json>}"
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" push-ship "$FILE"
;;
pull)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" pull
;;
drain)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" drain
;;
logout)
exec bun run "$GSTACK_DIR/lib/cli-sync.ts" logout
;;
*)
echo "Usage: gstack-sync {setup|status|push-eval|push-retro|push-qa|push-ship|pull|drain|logout}"
echo ""
echo "Commands:"
echo " setup Interactive auth flow (opens browser)"
echo " status Show sync status (queue, cache, connection)"
echo " push-eval <file> Push eval result JSON to team store"
echo " push-retro <file> Push retro snapshot JSON"
echo " push-qa <file> Push QA report JSON"
echo " push-ship <file> Push ship log JSON"
echo " pull Pull team data to local cache"
echo " drain Drain the offline sync queue"
echo " logout Clear auth tokens"
exit 1
;;
esac
+211
View File
@@ -0,0 +1,211 @@
/**
* Device auth flow for team sync.
*
* Opens a browser for Supabase OAuth/magic link, polls for completion,
* and saves tokens to ~/.gstack/auth.json.
*
* Two modes:
* 1. Magic link: user enters email → receives link → CLI detects auth via polling
* 2. Browser OAuth: opens Supabase auth page → callback to localhost → CLI captures token
*
* For CI: set GSTACK_SUPABASE_ACCESS_TOKEN env var to skip interactive auth.
*/
import * as http from 'http';
import { saveAuthTokens, type TeamConfig, type AuthTokens } from './sync-config';
const AUTH_CALLBACK_PORT = 54321;
const AUTH_TIMEOUT_MS = 300_000; // 5 minutes
/**
* Run the interactive device auth flow.
*
* 1. Starts a local HTTP server on port 54321
* 2. Opens the Supabase auth page in the browser (with redirect to localhost)
* 3. Waits for the auth callback with tokens
* 4. Saves tokens and returns them
*/
export async function runDeviceAuth(team: TeamConfig): Promise<AuthTokens> {
return new Promise<AuthTokens>((resolve, reject) => {
const timeout = setTimeout(() => {
server.close();
reject(new Error('Auth timed out after 5 minutes. Please try again.'));
}, AUTH_TIMEOUT_MS);
const server = http.createServer((req, res) => {
const url = new URL(req.url || '/', `http://localhost:${AUTH_CALLBACK_PORT}`);
// Handle the OAuth callback
if (url.pathname === '/auth/callback') {
const accessToken = url.searchParams.get('access_token') || url.hash?.match(/access_token=([^&]+)/)?.[1];
const refreshToken = url.searchParams.get('refresh_token') || '';
const expiresIn = parseInt(url.searchParams.get('expires_in') || '3600', 10);
if (!accessToken) {
// Serve a page that extracts tokens from the URL hash (Supabase puts them there)
res.writeHead(200, { 'Content-Type': 'text/html' });
res.end(authCallbackHTML(AUTH_CALLBACK_PORT));
return;
}
const tokens: AuthTokens = {
access_token: accessToken,
refresh_token: refreshToken,
expires_at: Math.floor(Date.now() / 1000) + expiresIn,
user_id: url.searchParams.get('user_id') || '',
team_id: '', // filled in by sync.ts after first API call
email: url.searchParams.get('email') || '',
};
res.writeHead(200, { 'Content-Type': 'text/html' });
res.end(authSuccessHTML());
clearTimeout(timeout);
server.close();
// Save tokens
try {
saveAuthTokens(team.supabase_url, tokens);
} catch (err: any) {
reject(new Error(`Failed to save auth tokens: ${err.message}`));
return;
}
resolve(tokens);
return;
}
// Handle token POST from the callback page
if (url.pathname === '/auth/token' && req.method === 'POST') {
let body = '';
req.on('data', (chunk: Buffer) => { body += chunk.toString(); });
req.on('end', () => {
try {
const data = JSON.parse(body);
const tokens: AuthTokens = {
access_token: data.access_token || '',
refresh_token: data.refresh_token || '',
expires_at: Math.floor(Date.now() / 1000) + (data.expires_in || 3600),
user_id: data.user?.id || '',
team_id: '',
email: data.user?.email || '',
};
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ ok: true }));
clearTimeout(timeout);
server.close();
saveAuthTokens(team.supabase_url, tokens);
resolve(tokens);
} catch (err: any) {
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: err.message }));
}
});
return;
}
res.writeHead(404);
res.end('Not found');
});
server.listen(AUTH_CALLBACK_PORT, '127.0.0.1', () => {
const authUrl = buildAuthUrl(team.supabase_url, AUTH_CALLBACK_PORT);
console.log(`\nOpening browser for authentication...`);
console.log(`If the browser doesn't open, visit:\n ${authUrl}\n`);
openBrowser(authUrl);
});
server.on('error', (err: any) => {
clearTimeout(timeout);
if (err.code === 'EADDRINUSE') {
reject(new Error(`Port ${AUTH_CALLBACK_PORT} is in use. Close the other process and try again.`));
} else {
reject(err);
}
});
});
}
/** Build the Supabase auth URL with localhost callback. */
function buildAuthUrl(supabaseUrl: string, port: number): string {
const redirectTo = `http://localhost:${port}/auth/callback`;
return `${supabaseUrl}/auth/v1/authorize?provider=github&redirect_to=${encodeURIComponent(redirectTo)}`;
}
/** Open a URL in the default browser. */
function openBrowser(url: string): void {
const { spawnSync } = require('child_process');
// macOS
if (process.platform === 'darwin') {
spawnSync('open', [url], { stdio: 'ignore' });
return;
}
// Linux
if (process.platform === 'linux') {
spawnSync('xdg-open', [url], { stdio: 'ignore' });
return;
}
// Windows
if (process.platform === 'win32') {
spawnSync('cmd', ['/c', 'start', url], { stdio: 'ignore' });
}
}
/** HTML page that extracts tokens from URL hash and POSTs them to the local server. */
function authCallbackHTML(port: number): string {
return `<!DOCTYPE html>
<html>
<head><title>gstack auth</title></head>
<body>
<h2>Completing authentication...</h2>
<p id="status">Extracting tokens...</p>
<script>
const hash = window.location.hash.substring(1);
const params = new URLSearchParams(hash);
const data = {
access_token: params.get('access_token'),
refresh_token: params.get('refresh_token'),
expires_in: parseInt(params.get('expires_in') || '3600'),
user: { id: '', email: '' }
};
if (data.access_token) {
fetch('http://localhost:${port}/auth/token', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data)
}).then(() => {
document.getElementById('status').textContent = 'Authenticated! You can close this tab.';
}).catch(err => {
document.getElementById('status').textContent = 'Error: ' + err.message;
});
} else {
document.getElementById('status').textContent = 'No tokens found in URL. Please try again.';
}
</script>
</body>
</html>`;
}
/** HTML page shown after successful auth. */
function authSuccessHTML(): string {
return `<!DOCTYPE html>
<html>
<head><title>gstack auth</title></head>
<body>
<h2>Authenticated!</h2>
<p>You can close this tab and return to your terminal.</p>
</body>
</html>`;
}
/**
* Check if the current auth token is expired (or will expire within 5 minutes).
*/
export function isTokenExpired(tokens: AuthTokens): boolean {
if (!tokens.expires_at) return false; // env-var tokens don't expire
const buffer = 300; // 5-minute buffer
return Math.floor(Date.now() / 1000) >= tokens.expires_at - buffer;
}
+171
View File
@@ -0,0 +1,171 @@
/**
* CLI handler for gstack-sync commands.
* Called by bin/gstack-sync via `bun run`.
*/
import * as fs from 'fs';
import { getTeamConfig, resolveSyncConfig, clearAuthTokens, isSyncConfigured } from './sync-config';
import { runDeviceAuth } from './auth';
import { pushEvalRun, pushRetro, pushQAReport, pushShipLog, pullTable, drainQueue, getSyncStatus } from './sync';
import { readJSON } from './util';
const command = process.argv[2];
async function main() {
switch (command) {
case 'setup':
await cmdSetup();
break;
case 'status':
await cmdStatus();
break;
case 'push-eval':
await cmdPushFile('eval', process.argv[3]);
break;
case 'push-retro':
await cmdPushFile('retro', process.argv[3]);
break;
case 'push-qa':
await cmdPushFile('qa', process.argv[3]);
break;
case 'push-ship':
await cmdPushFile('ship', process.argv[3]);
break;
case 'pull':
await cmdPull();
break;
case 'drain':
await cmdDrain();
break;
case 'logout':
cmdLogout();
break;
default:
console.error(`Unknown command: ${command}`);
process.exit(1);
}
}
async function cmdSetup(): Promise<void> {
const team = getTeamConfig();
if (!team) {
console.error('No .gstack-sync.json found in project root.');
console.error('Ask your team admin to set up team sync first.');
process.exit(1);
}
console.log(`Team: ${team.team_slug}`);
console.log(`Supabase: ${team.supabase_url}`);
try {
const tokens = await runDeviceAuth(team);
console.log(`\nAuthenticated as ${tokens.email || tokens.user_id}`);
console.log('Sync is now enabled. Run `gstack-sync status` to verify.');
} catch (err: any) {
console.error(`\nAuth failed: ${err.message}`);
process.exit(1);
}
}
async function cmdStatus(): Promise<void> {
const status = await getSyncStatus();
console.log('gstack sync status');
console.log('─'.repeat(40));
console.log(` Configured: ${status.configured ? 'yes' : 'no (.gstack-sync.json not found)'}`);
console.log(` Authenticated: ${status.authenticated ? 'yes' : 'no (run gstack-sync setup)'}`);
console.log(` Sync enabled: ${status.syncEnabled ? 'yes' : 'no'}`);
console.log(` Connection: ${status.connectionOk ? 'ok' : 'failed'}`);
console.log(` Queue: ${status.queueSize} items${status.queueOldest ? ` (oldest: ${status.queueOldest})` : ''}`);
console.log(` Cache: ${status.cacheLastPull ? `last pull ${status.cacheLastPull}` : 'never pulled'}`);
if (status.queueSize > 100) {
console.log(`\n WARNING: Queue has ${status.queueSize} items. Run 'gstack-sync drain' to flush.`);
}
if (status.queueOldest) {
const ageMs = Date.now() - new Date(status.queueOldest).getTime();
if (ageMs > 86_400_000) {
console.log(`\n WARNING: Oldest queue entry is ${Math.round(ageMs / 3_600_000)}h old. Run 'gstack-sync drain'.`);
}
}
}
async function cmdPushFile(type: string, filePath: string): Promise<void> {
if (!filePath) {
console.error(`Usage: gstack-sync push-${type} <file.json>`);
process.exit(1);
}
if (!isSyncConfigured()) {
// Silent exit — sync not configured is normal for solo users
process.exit(0);
}
const data = readJSON<Record<string, unknown>>(filePath);
if (!data) {
console.error(`Cannot read ${filePath}`);
process.exit(1);
}
let ok = false;
switch (type) {
case 'eval':
ok = await pushEvalRun(data);
break;
case 'retro':
ok = await pushRetro(data);
break;
case 'qa':
ok = await pushQAReport(data);
break;
case 'ship':
ok = await pushShipLog(data);
break;
}
if (ok) {
console.log(`Synced ${type} to team store`);
}
// Silent on failure — queued for retry
}
async function cmdPull(): Promise<void> {
if (!isSyncConfigured()) {
console.error('Sync not configured. Run gstack-sync setup first.');
process.exit(1);
}
const tables = ['eval_runs', 'retro_snapshots', 'qa_reports', 'ship_logs', 'greptile_triage'];
let total = 0;
for (const table of tables) {
const rows = await pullTable(table);
total += rows.length;
if (rows.length > 0) {
console.log(` ${table}: ${rows.length} rows`);
}
}
console.log(`\nPulled ${total} total rows to local cache.`);
}
async function cmdDrain(): Promise<void> {
const result = await drainQueue();
console.log(`Queue drain: ${result.success} synced, ${result.failed} failed, ${result.remaining} remaining`);
}
function cmdLogout(): void {
const team = getTeamConfig();
if (!team) {
console.log('No team config found — nothing to clear.');
return;
}
clearAuthTokens(team.supabase_url);
console.log(`Cleared auth tokens for ${team.supabase_url}`);
}
main().catch(err => {
console.error(err.message);
process.exit(1);
});
+179
View File
@@ -0,0 +1,179 @@
/**
* Team sync configuration resolution.
*
* Reads project-level config (.gstack-sync.json) and user-level auth
* (~/.gstack/auth.json). All functions return null/defaults when sync
* is not configured — zero impact on non-sync users.
*/
import * as fs from 'fs';
import * as path from 'path';
import { GSTACK_STATE_DIR, getGitRoot, readJSON, atomicWriteJSON } from './util';
// --- Interfaces ---
export interface TeamConfig {
supabase_url: string;
supabase_anon_key: string;
team_slug: string;
}
export interface AuthTokens {
access_token: string;
refresh_token: string;
expires_at: number; // epoch seconds
user_id: string;
team_id: string;
email: string;
}
export interface SyncConfig {
team: TeamConfig;
auth: AuthTokens;
syncEnabled: boolean;
syncTranscripts: boolean;
}
// --- Paths ---
const AUTH_FILE = path.join(GSTACK_STATE_DIR, 'auth.json');
const SYNC_CONFIG_FILENAME = '.gstack-sync.json';
/** Resolve path to .gstack-sync.json in the project root. */
export function getSyncConfigPath(): string | null {
const root = getGitRoot();
if (!root) return null;
const configPath = path.join(root, SYNC_CONFIG_FILENAME);
return fs.existsSync(configPath) ? configPath : null;
}
// --- Team config ---
/** Read .gstack-sync.json from the project root. Returns null if not found. */
export function getTeamConfig(): TeamConfig | null {
const configPath = getSyncConfigPath();
if (!configPath) return null;
const config = readJSON<Record<string, unknown>>(configPath);
if (!config) return null;
const { supabase_url, supabase_anon_key, team_slug } = config;
if (typeof supabase_url !== 'string' || !supabase_url) return null;
if (typeof supabase_anon_key !== 'string' || !supabase_anon_key) return null;
if (typeof team_slug !== 'string' || !team_slug) return null;
return { supabase_url, supabase_anon_key, team_slug };
}
// --- Auth tokens ---
/**
* Read auth tokens for a specific Supabase URL.
* Auth file is keyed by URL so multiple teams/projects work.
*/
export function getAuthTokens(supabaseUrl: string): AuthTokens | null {
// CI/automation: env var overrides file-based auth
const envToken = process.env.GSTACK_SUPABASE_ACCESS_TOKEN;
if (envToken) {
return {
access_token: envToken,
refresh_token: '',
expires_at: 0, // no expiry for env tokens
user_id: '',
team_id: '',
email: 'ci@automation',
};
}
const allTokens = readJSON<Record<string, AuthTokens>>(AUTH_FILE);
if (!allTokens) return null;
const tokens = allTokens[supabaseUrl];
if (!tokens || !tokens.access_token) return null;
return tokens;
}
/** Save auth tokens for a Supabase URL. Creates file with mode 0o600. */
export function saveAuthTokens(supabaseUrl: string, tokens: AuthTokens): void {
const allTokens = readJSON<Record<string, AuthTokens>>(AUTH_FILE) || {};
allTokens[supabaseUrl] = tokens;
atomicWriteJSON(AUTH_FILE, allTokens, 0o600);
}
/** Remove auth tokens for a Supabase URL. */
export function clearAuthTokens(supabaseUrl: string): void {
const allTokens = readJSON<Record<string, AuthTokens>>(AUTH_FILE);
if (!allTokens || !allTokens[supabaseUrl]) return;
delete allTokens[supabaseUrl];
atomicWriteJSON(AUTH_FILE, allTokens, 0o600);
}
// --- User settings (via gstack-config) ---
/** Read a user setting from ~/.gstack/config.yaml. */
function getUserSetting(key: string): string {
try {
// Use gstack-config if available
const gstackDir = process.env.GSTACK_DIR || path.resolve(__dirname, '..');
const configScript = path.join(gstackDir, 'bin', 'gstack-config');
if (fs.existsSync(configScript)) {
const { spawnSync } = require('child_process');
const result = spawnSync(configScript, ['get', key], {
stdio: 'pipe',
timeout: 2_000,
env: { ...process.env, GSTACK_STATE_DIR },
});
return result.stdout?.toString().trim() || '';
}
return '';
} catch {
return '';
}
}
// --- Full config resolution ---
/**
* Resolve the complete sync config. Returns null if sync is not configured
* (no .gstack-sync.json) or disabled (sync_enabled=false).
*/
export function resolveSyncConfig(): SyncConfig | null {
const team = getTeamConfig();
if (!team) return null;
const syncEnabled = getUserSetting('sync_enabled') !== 'false';
if (!syncEnabled) return null;
const auth = getAuthTokens(team.supabase_url);
if (!auth) return null;
const syncTranscripts = getUserSetting('sync_transcripts') === 'true';
return { team, auth, syncEnabled, syncTranscripts };
}
/**
* Check if sync is configured (team config exists and auth is present).
* Lighter than resolveSyncConfig — doesn't check user settings.
*/
export function isSyncConfigured(): boolean {
const team = getTeamConfig();
if (!team) return false;
const auth = getAuthTokens(team.supabase_url);
return auth !== null;
}
// --- Cache paths ---
/** Get the team cache directory (.gstack/team-cache/ in project root). */
export function getTeamCacheDir(): string | null {
const root = getGitRoot();
if (!root) return null;
return path.join(root, '.gstack', 'team-cache');
}
/** Get the sync queue file path (~/.gstack/sync-queue.json). */
export function getSyncQueuePath(): string {
return path.join(GSTACK_STATE_DIR, 'sync-queue.json');
}
+451
View File
@@ -0,0 +1,451 @@
/**
* Team sync client — push/pull data to/from Supabase.
*
* All operations are non-fatal. Push failures queue to sync-queue.json.
* Pull failures fall back to local data. Skills never block on sync.
*
* Uses raw fetch() instead of @supabase/supabase-js to avoid adding
* a dependency. The Supabase REST API is just PostgREST over HTTPS.
*/
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import { resolveSyncConfig, getTeamConfig, getAuthTokens, saveAuthTokens, getSyncQueuePath, getTeamCacheDir, type SyncConfig, type AuthTokens } from './sync-config';
import { readJSON, atomicWriteJSON, getRemoteSlug } from './util';
import { isTokenExpired } from './auth';
const PUSH_TIMEOUT_MS = 5_000;
const PULL_TIMEOUT_MS = 3_000;
const QUEUE_DRAIN_CONCURRENCY = 10;
// --- Types ---
export interface QueueEntry {
table: string;
data: Record<string, unknown>;
timestamp: string;
retries: number;
}
interface CacheMeta {
last_pull: string;
tables: Record<string, { rows: number; latest: string }>;
}
// --- Token refresh ---
/**
* Refresh an expired access token using the refresh token.
* Returns new tokens on success, null on failure.
*/
async function refreshToken(supabaseUrl: string, refreshToken: string, anonKey: string): Promise<AuthTokens | null> {
try {
const res = await fetchWithTimeout(`${supabaseUrl}/auth/v1/token?grant_type=refresh_token`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'apikey': anonKey,
},
body: JSON.stringify({ refresh_token: refreshToken }),
}, PUSH_TIMEOUT_MS);
if (!res.ok) return null;
const data = await res.json() as Record<string, unknown>;
return {
access_token: data.access_token as string,
refresh_token: data.refresh_token as string || refreshToken,
expires_at: Math.floor(Date.now() / 1000) + ((data.expires_in as number) || 3600),
user_id: (data.user as any)?.id || '',
team_id: '',
email: (data.user as any)?.email || '',
};
} catch {
return null;
}
}
/** Get a valid access token, refreshing if needed. */
async function getValidToken(config: SyncConfig): Promise<string | null> {
if (!isTokenExpired(config.auth)) {
return config.auth.access_token;
}
if (!config.auth.refresh_token) return null;
const newTokens = await refreshToken(
config.team.supabase_url,
config.auth.refresh_token,
config.team.supabase_anon_key,
);
if (!newTokens) return null;
// Persist refreshed tokens
saveAuthTokens(config.team.supabase_url, newTokens);
config.auth = newTokens;
return newTokens.access_token;
}
// --- HTTP helpers ---
async function fetchWithTimeout(url: string, init: RequestInit, timeoutMs: number): Promise<Response> {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
return await fetch(url, { ...init, signal: controller.signal });
} finally {
clearTimeout(timeout);
}
}
function restUrl(supabaseUrl: string, table: string): string {
return `${supabaseUrl}/rest/v1/${table}`;
}
function authHeaders(anonKey: string, accessToken: string): Record<string, string> {
return {
'apikey': anonKey,
'Authorization': `Bearer ${accessToken}`,
'Content-Type': 'application/json',
'Prefer': 'resolution=merge-duplicates',
};
}
// --- Push operations ---
/**
* Push a row to a Supabase table. Non-fatal — queues on failure.
* Uses upsert (Prefer: resolution=merge-duplicates) for idempotency.
*/
export async function pushRow(table: string, data: Record<string, unknown>): Promise<boolean> {
try {
const config = resolveSyncConfig();
if (!config) return false;
const token = await getValidToken(config);
if (!token) {
enqueue({ table, data, timestamp: new Date().toISOString(), retries: 0 });
return false;
}
const res = await fetchWithTimeout(
restUrl(config.team.supabase_url, table),
{
method: 'POST',
headers: authHeaders(config.team.supabase_anon_key, token),
body: JSON.stringify(data),
},
PUSH_TIMEOUT_MS,
);
if (res.ok || res.status === 201 || res.status === 409) {
return true;
}
// Non-fatal: queue for retry
enqueue({ table, data, timestamp: new Date().toISOString(), retries: 0 });
return false;
} catch {
// Network error, timeout, etc — queue for retry
enqueue({ table, data, timestamp: new Date().toISOString(), retries: 0 });
return false;
}
}
/** Push an eval run result to Supabase. */
export async function pushEvalRun(evalResult: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
const data = {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
hostname: os.hostname(),
...evalResult,
// Strip full transcripts to keep payload small
tests: (evalResult.tests as any[])?.map(t => ({
...t,
transcript: undefined,
prompt: t.prompt ? t.prompt.slice(0, 500) : undefined,
})),
};
return pushRow('eval_runs', data);
}
/** Push a retro snapshot to Supabase. */
export async function pushRetro(retroData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('retro_snapshots', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...retroData,
});
}
/** Push a QA report to Supabase. */
export async function pushQAReport(qaData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('qa_reports', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...qaData,
});
}
/** Push a ship log to Supabase. */
export async function pushShipLog(shipData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('ship_logs', {
team_id: config.auth.team_id,
repo_slug: getRemoteSlug(),
user_id: config.auth.user_id,
...shipData,
});
}
/** Push a Greptile triage entry to Supabase. */
export async function pushGreptileTriage(triageData: Record<string, unknown>): Promise<boolean> {
const config = resolveSyncConfig();
if (!config) return false;
return pushRow('greptile_triage', {
team_id: config.auth.team_id,
user_id: config.auth.user_id,
...triageData,
});
}
// --- Pull operations ---
/**
* Pull rows from a Supabase table. Returns empty array on failure.
* Writes results to .gstack/team-cache/{table}.json for offline access.
*/
export async function pullTable(table: string, query?: string): Promise<Record<string, unknown>[]> {
try {
const config = resolveSyncConfig();
if (!config) return [];
const token = await getValidToken(config);
if (!token) return readCachedTable(table);
const url = query
? `${restUrl(config.team.supabase_url, table)}?${query}`
: `${restUrl(config.team.supabase_url, table)}?team_id=eq.${config.auth.team_id}&order=created_at.desc&limit=500`;
const res = await fetchWithTimeout(url, {
method: 'GET',
headers: {
'apikey': config.team.supabase_anon_key,
'Authorization': `Bearer ${token}`,
},
}, PULL_TIMEOUT_MS);
if (!res.ok) return readCachedTable(table);
const rows = await res.json() as Record<string, unknown>[];
// Cache locally
writeCachedTable(table, rows);
return rows;
} catch {
return readCachedTable(table);
}
}
/** Pull team eval runs, optionally filtered by branch or repo. */
export async function pullEvalRuns(opts?: { branch?: string; repoSlug?: string; limit?: number }): Promise<Record<string, unknown>[]> {
const config = resolveSyncConfig();
if (!config) return [];
const parts = [`team_id=eq.${config.auth.team_id}`, 'order=timestamp.desc'];
if (opts?.branch) parts.push(`branch=eq.${opts.branch}`);
if (opts?.repoSlug) parts.push(`repo_slug=eq.${opts.repoSlug}`);
parts.push(`limit=${opts?.limit || 100}`);
return pullTable('eval_runs', parts.join('&'));
}
/** Pull team retro snapshots. */
export async function pullRetros(opts?: { repoSlug?: string; limit?: number }): Promise<Record<string, unknown>[]> {
const config = resolveSyncConfig();
if (!config) return [];
const parts = [`team_id=eq.${config.auth.team_id}`, 'order=date.desc'];
if (opts?.repoSlug) parts.push(`repo_slug=eq.${opts.repoSlug}`);
parts.push(`limit=${opts?.limit || 50}`);
return pullTable('retro_snapshots', parts.join('&'));
}
// --- Offline queue ---
function enqueue(entry: QueueEntry): void {
try {
const queuePath = getSyncQueuePath();
const queue = readJSON<QueueEntry[]>(queuePath) || [];
queue.push(entry);
atomicWriteJSON(queuePath, queue);
} catch { /* non-fatal */ }
}
/** Drain the offline queue. Processes up to QUEUE_DRAIN_CONCURRENCY items in parallel. */
export async function drainQueue(): Promise<{ success: number; failed: number; remaining: number }> {
const queuePath = getSyncQueuePath();
const queue = readJSON<QueueEntry[]>(queuePath) || [];
if (queue.length === 0) return { success: 0, failed: 0, remaining: 0 };
let success = 0;
let failed = 0;
const remaining: QueueEntry[] = [];
// Process in batches
for (let i = 0; i < queue.length; i += QUEUE_DRAIN_CONCURRENCY) {
const batch = queue.slice(i, i + QUEUE_DRAIN_CONCURRENCY);
const results = await Promise.allSettled(
batch.map(async (entry) => {
const config = resolveSyncConfig();
if (!config) throw new Error('not configured');
const token = await getValidToken(config);
if (!token) throw new Error('no valid token');
const res = await fetchWithTimeout(
restUrl(config.team.supabase_url, entry.table),
{
method: 'POST',
headers: authHeaders(config.team.supabase_anon_key, token),
body: JSON.stringify(entry.data),
},
PUSH_TIMEOUT_MS,
);
if (!res.ok && res.status !== 201 && res.status !== 409) {
throw new Error(`HTTP ${res.status}`);
}
return true;
}),
);
results.forEach((result, idx) => {
if (result.status === 'fulfilled') {
success++;
} else {
const entry = batch[idx];
entry.retries++;
if (entry.retries < 5) {
remaining.push(entry);
}
failed++;
}
});
}
// Write remaining queue
atomicWriteJSON(queuePath, remaining);
return { success, failed, remaining: remaining.length };
}
// --- Cache ---
function readCachedTable(table: string): Record<string, unknown>[] {
const cacheDir = getTeamCacheDir();
if (!cacheDir) return [];
const cached = readJSON<Record<string, unknown>[]>(path.join(cacheDir, `${table}.json`));
return cached || [];
}
function writeCachedTable(table: string, rows: Record<string, unknown>[]): void {
try {
const cacheDir = getTeamCacheDir();
if (!cacheDir) return;
fs.mkdirSync(cacheDir, { recursive: true });
atomicWriteJSON(path.join(cacheDir, `${table}.json`), rows);
// Update metadata
const metaPath = path.join(cacheDir, '.meta.json');
const meta = readJSON<CacheMeta>(metaPath) || { last_pull: '', tables: {} };
meta.last_pull = new Date().toISOString();
meta.tables[table] = {
rows: rows.length,
latest: rows[0]?.created_at as string || new Date().toISOString(),
};
atomicWriteJSON(metaPath, meta);
} catch { /* non-fatal */ }
}
// --- Status ---
/** Get sync status: queue size, cache freshness, connection health. */
export async function getSyncStatus(): Promise<{
configured: boolean;
authenticated: boolean;
syncEnabled: boolean;
queueSize: number;
queueOldest: string | null;
cacheLastPull: string | null;
connectionOk: boolean;
}> {
const team = getTeamConfig();
const configured = team !== null;
const auth = team ? getAuthTokens(team.supabase_url) : null;
const authenticated = auth !== null;
const config = resolveSyncConfig();
const syncEnabled = config !== null;
const queue = readJSON<QueueEntry[]>(getSyncQueuePath()) || [];
const queueSize = queue.length;
const queueOldest = queue.length > 0 ? queue[0].timestamp : null;
const cacheDir = getTeamCacheDir();
const meta = cacheDir ? readJSON<CacheMeta>(path.join(cacheDir, '.meta.json')) : null;
const cacheLastPull = meta?.last_pull || null;
// Quick connectivity check
let connectionOk = false;
if (config) {
try {
const token = await getValidToken(config);
if (token) {
const res = await fetchWithTimeout(
`${config.team.supabase_url}/rest/v1/`,
{
method: 'HEAD',
headers: {
'apikey': config.team.supabase_anon_key,
'Authorization': `Bearer ${token}`,
},
},
PULL_TIMEOUT_MS,
);
connectionOk = res.ok;
}
} catch { /* connection failed */ }
}
return {
configured,
authenticated,
syncEnabled,
queueSize,
queueOldest,
cacheLastPull,
connectionOk,
};
}