chore: stage pre-existing community tier changes

Community tier auth, backup/restore, and test updates that were already
on this branch before the telemetry sprint. Includes updated telemetry
prompt test to match 3-option community tier flow.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Garry Tan
2026-03-23 15:49:40 -07:00
parent 00ce4b7567
commit 3df8a77b00
8 changed files with 189 additions and 57 deletions
+2 -2
View File
@@ -56,11 +56,11 @@ TOKJSON
chmod 600 "$AUTH_FILE"
}
# ─── Helper: extract JSON field (portable, no jq dependency)
# ─── Helper: extract JSON field (using jq) ───────────────────
json_field() {
local json="$1"
local field="$2"
echo "$json" | grep -o "\"${field}\":[^,}]*" | head -1 | sed "s/\"${field}\"://;s/\"//g;s/ //g"
echo "$json" | jq -r ".${field}" 2>/dev/null | sed 's/null//'
}
# ─── Subcommand: status ─────────────────────────────────────
+1 -1
View File
@@ -29,7 +29,7 @@ AUTH_URL="${SUPABASE_URL}/auth/v1"
json_field() {
local json="$1"
local field="$2"
echo "$json" | grep -o "\"${field}\":[^,}]*" | head -1 | sed "s/\"${field}\"://;s/\"//g;s/ //g"
echo "$json" | jq -r ".${field}" 2>/dev/null | sed 's/null//'
}
# ─── Check auth file exists ─────────────────────────────────
+34 -48
View File
@@ -56,16 +56,9 @@ EMAIL="$(echo "$AUTH_JSON" | grep -o '"email":"[^"]*"' | head -1 | sed 's/"email
# ─── Build config snapshot ───────────────────────────────────
CONFIG_SNAPSHOT="{}"
if [ -f "$STATE_DIR/config.yaml" ]; then
# Convert YAML-like config to JSON
CONFIG_SNAPSHOT="{"
FIRST=true
while IFS=': ' read -r KEY VALUE; do
[ -z "$KEY" ] && continue
[ -z "$VALUE" ] && continue
if [ "$FIRST" = "true" ]; then FIRST=false; else CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT,"; fi
CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT\"$KEY\":\"$VALUE\""
done < "$STATE_DIR/config.yaml"
CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT}"
# Convert YAML-like config to JSON safely using jq
CONFIG_SNAPSHOT="$(grep -v '^#' "$STATE_DIR/config.yaml" | grep ':' | \
jq -R 'split(": ") | {(.[0]): .[1]}' | jq -s 'add' || echo "{}")"
fi
# ─── Build analytics summary ────────────────────────────────
@@ -73,23 +66,18 @@ fi
ANALYTICS_SNAPSHOT="{\"skills\":{},\"recent_events\":[]}"
if [ -f "$JSONL_FILE" ]; then
# Count per-skill totals
SKILL_COUNTS="$(grep -o '"skill":"[^"]*"' "$JSONL_FILE" 2>/dev/null | awk -F'"' '{print $4}' | sort | uniq -c | sort -rn | head -20)"
SKILLS_JSON="{"
FIRST=true
while read -r COUNT SKILL; do
[ -z "$SKILL" ] && continue
if [ "$FIRST" = "true" ]; then FIRST=false; else SKILLS_JSON="$SKILLS_JSON,"; fi
SKILLS_JSON="$SKILLS_JSON\"$SKILL\":{\"total_runs\":$COUNT}"
done <<< "$SKILL_COUNTS"
SKILLS_JSON="$SKILLS_JSON}"
SKILL_COUNTS_JSON="$(grep -o '"skill":"[^"]*"' "$JSONL_FILE" 2>/dev/null | \
awk -F'"' '{print $4}' | sort | uniq -c | sort -rn | head -20 | \
jq -R 'capture("\\s+(?<count>\\d+)\\s+(?<skill>.+)") | {(.skill): {total_runs: (.count|tonumber)}}' | jq -s 'add')"
# Last 100 events (strip local-only fields)
RECENT="$(tail -100 "$JSONL_FILE" 2>/dev/null | sed \
-e 's/,"_repo_slug":"[^"]*"//g' \
-e 's/,"_branch":"[^"]*"//g' | tr '\n' ',' | sed 's/,$//')"
RECENT_JSON="$(tail -100 "$JSONL_FILE" 2>/dev/null | \
jq -c 'del(._repo_slug, ._branch)' | jq -s -c '.')"
ANALYTICS_SNAPSHOT="{\"skills\":${SKILLS_JSON},\"recent_events\":[${RECENT}]}"
ANALYTICS_SNAPSHOT="$(jq -n \
--argjson skills "${SKILL_COUNTS_JSON:-{}}" \
--argjson recent "${RECENT_JSON:-[]}" \
'{"skills": $skills, "recent_events": $recent}')"
fi
# ─── Build retro history snapshot ────────────────────────────
@@ -101,16 +89,7 @@ if [ -d "$STATE_DIR" ]; then
fi
if [ -n "$RETRO_FILES" ]; then
RETRO_SNAPSHOT="["
FIRST=true
while IFS= read -r RFILE; do
[ -f "$RFILE" ] || continue
CONTENT="$(cat "$RFILE" 2>/dev/null || true)"
[ -z "$CONTENT" ] && continue
if [ "$FIRST" = "true" ]; then FIRST=false; else RETRO_SNAPSHOT="$RETRO_SNAPSHOT,"; fi
RETRO_SNAPSHOT="$RETRO_SNAPSHOT$CONTENT"
done <<< "$RETRO_FILES"
RETRO_SNAPSHOT="$RETRO_SNAPSHOT]"
RETRO_SNAPSHOT="$(cat $RETRO_FILES 2>/dev/null | jq -s -c '.' || echo "[]")"
fi
# ─── Upsert to installations table ──────────────────────────
@@ -118,20 +97,27 @@ GSTACK_VERSION="$(cat "$GSTACK_DIR/VERSION" 2>/dev/null | tr -d '[:space:]' || e
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
NOW_ISO="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
# Escape JSON strings that might contain special characters
# Config and retro snapshots are already JSON, analytics too
PAYLOAD="{
\"installation_id\": \"${USER_ID}\",
\"user_id\": \"${USER_ID}\",
\"email\": \"${EMAIL}\",
\"gstack_version\": \"${GSTACK_VERSION}\",
\"os\": \"${OS}\",
\"config_snapshot\": ${CONFIG_SNAPSHOT},
\"analytics_snapshot\": ${ANALYTICS_SNAPSHOT},
\"retro_history\": ${RETRO_SNAPSHOT},
\"last_backup_at\": \"${NOW_ISO}\",
\"last_seen\": \"${NOW_ISO}\"
}"
PAYLOAD="$(jq -n \
--arg id "$USER_ID" \
--arg email "$EMAIL" \
--arg version "$GSTACK_VERSION" \
--arg os "$OS" \
--argjson config "${CONFIG_SNAPSHOT:-{}}" \
--argjson analytics "${ANALYTICS_SNAPSHOT:-{}}" \
--argjson retro "${RETRO_SNAPSHOT:-[]}" \
--arg last_backup "$NOW_ISO" \
'{
installation_id: $id,
user_id: $id,
email: $email,
gstack_version: $version,
os: $os,
config_snapshot: $config,
analytics_snapshot: $analytics,
retro_history: $retro,
last_backup_at: $last_backup,
last_seen: $last_backup
}')"
# Upsert (POST with Prefer: resolution=merge-duplicates)
HTTP_CODE="$(curl -s -o /dev/null -w '%{http_code}' --max-time 15 \
+11 -3
View File
@@ -110,8 +110,8 @@ if [ -n "$ANALYTICS_DATA" ] && [ "$ANALYTICS_DATA" != "null" ] && [ "$ANALYTICS_
if [ "$DRY_RUN" = "false" ]; then
mkdir -p "$ANALYTICS_DIR"
# Extract recent_events array and write as JSONL
# This is a simplified restore — recent events from backup become local history
echo " Restoring recent events from backup..."
echo "$ANALYTICS_DATA" | jq -r '.recent_events[] | tojson' > "$JSONL_FILE" 2>/dev/null
echo " Restored $(wc -l < "$JSONL_FILE" | tr -d ' ') recent events from backup."
fi
fi
echo ""
@@ -123,7 +123,15 @@ RETRO_DATA="$(echo "$BACKUP" | grep -o '"retro_history":\[.*\]' | sed 's/"retro_
if [ -n "$RETRO_DATA" ] && [ "$RETRO_DATA" != "null" ] && [ "$RETRO_DATA" != "[]" ]; then
echo "Retro history found in backup."
if [ "$DRY_RUN" = "false" ]; then
echo " Retro history will be merged with local data."
# Merge: each retro in the array is a JSON object. Write as retro-restored-N.json
echo "$RETRO_DATA" | jq -c '.[]' | while read -r RETRO; do
[ -z "$RETRO" ] && continue
TS="$(echo "$RETRO" | jq -r .ts 2>/dev/null | tr -d ':-')"
[ -z "$TS" ] && TS="$(date +%s)"
RNAME="retro-restored-${TS}-$RANDOM.json"
echo "$RETRO" > "$STATE_DIR/$RNAME"
done
echo " Retro history merged with local data ($(echo "$RETRO_DATA" | jq 'length') entries restored)."
fi
echo ""
fi
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "gstack",
"version": "0.11.9.0",
"version": "0.11.10.0",
"description": "Garry's Stack — Claude Code skills + fast headless browser. One repo, one install, entire AI engineering workflow.",
"license": "MIT",
"type": "module",
+1 -1
View File
@@ -6,7 +6,7 @@ ALTER TABLE telemetry_events ADD COLUMN error_message TEXT;
ALTER TABLE telemetry_events ADD COLUMN failed_step TEXT;
-- Add columns to installations for backup + email + auth identity
ALTER TABLE installations ADD COLUMN user_id UUID;
ALTER TABLE installations ADD COLUMN user_id UUID UNIQUE;
ALTER TABLE installations ADD COLUMN email TEXT;
ALTER TABLE installations ADD COLUMN config_snapshot JSONB;
ALTER TABLE installations ADD COLUMN analytics_snapshot JSONB;
+138
View File
@@ -0,0 +1,138 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { execSync } from 'child_process';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
const ROOT = path.resolve(import.meta.dir, '..');
const BIN = path.join(ROOT, 'bin');
let tmpDir: string;
function run(cmd: string, env: Record<string, string> = {}): string {
try {
return execSync(cmd, {
cwd: ROOT,
env: { ...process.env, GSTACK_STATE_DIR: tmpDir, GSTACK_DIR: ROOT, ...env },
encoding: 'utf-8',
timeout: 10000,
}).trim();
} catch (e: any) {
return e.stdout?.toString() || e.message;
}
}
function setConfig(key: string, value: string) {
run(`${BIN}/gstack-config set ${key} ${value}`);
}
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gstack-comm-'));
});
afterEach(() => {
fs.rmSync(tmpDir, { recursive: true, force: true });
});
describe('gstack-auth', () => {
test('status shows not authenticated when no token file', () => {
const output = run(`${BIN}/gstack-auth status`);
expect(output).toContain('Not authenticated');
});
test('logout removes token file', () => {
const authFile = path.join(tmpDir, 'auth-token.json');
fs.writeFileSync(authFile, '{"access_token":"test"}');
expect(fs.existsSync(authFile)).toBe(true);
run(`${BIN}/gstack-auth logout`);
expect(fs.existsSync(authFile)).toBe(false);
});
});
describe('gstack-auth-refresh', () => {
test('--check fails when not authenticated', () => {
// execSync throws on non-zero exit code
try {
execSync(`${BIN}/gstack-auth-refresh --check`, {
env: { ...process.env, GSTACK_STATE_DIR: tmpDir, GSTACK_DIR: ROOT }
});
expect(false).toBe(true); // Should not reach here
} catch (e: any) {
expect(e.status).toBe(1);
}
});
test('--check succeeds when authenticated', () => {
const authFile = path.join(tmpDir, 'auth-token.json');
const expiresAt = Math.floor(Date.now() / 1000) + 3600;
fs.writeFileSync(authFile, JSON.stringify({
access_token: 'valid',
refresh_token: 'refresh',
expires_at: expiresAt,
email: 'test@example.com',
user_id: 'user-123'
}));
const status = execSync(`${BIN}/gstack-auth-refresh --check`, {
env: { ...process.env, GSTACK_STATE_DIR: tmpDir, GSTACK_DIR: ROOT }
});
// Should not throw
});
});
describe('gstack-community-backup', () => {
test('exits early if not community tier', () => {
setConfig('telemetry', 'anonymous');
const output = run(`${BIN}/gstack-community-backup`);
expect(output).toBe('');
});
test('exits early if not authenticated', () => {
setConfig('telemetry', 'community');
const output = run(`${BIN}/gstack-community-backup`);
expect(output).toBe('');
});
test('snapshot generation (dry run/mock check)', () => {
setConfig('telemetry', 'community');
const authFile = path.join(tmpDir, 'auth-token.json');
fs.writeFileSync(authFile, JSON.stringify({
access_token: 'valid',
refresh_token: 'refresh',
expires_at: Math.floor(Date.now() / 1000) + 3600,
email: 'test@example.com',
user_id: 'user-123'
}));
// Create some data to backup
fs.writeFileSync(path.join(tmpDir, 'config.yaml'), 'key: "value with \\"quotes\\""\n');
const analyticsDir = path.join(tmpDir, 'analytics');
fs.mkdirSync(analyticsDir);
fs.writeFileSync(path.join(analyticsDir, 'skill-usage.jsonl'), '{"skill":"qa","duration_s":10,"outcome":"success"}\n');
// We can't easily test the Supabase POST without mocking curl or the endpoint
// but we can verify it doesn't crash and respects the rate limit marker.
run(`${BIN}/gstack-community-backup`, { GSTACK_TELEMETRY_ENDPOINT: 'http://localhost:9999' });
// It should NOT have created the rate limit marker because the POST failed (HTTP 000)
expect(fs.existsSync(path.join(analyticsDir, '.last-backup-time'))).toBe(false);
});
});
describe('gstack-community-benchmarks', () => {
test('shows no data message when no local analytics', () => {
const output = run(`${BIN}/gstack-community-benchmarks`);
expect(output).toContain('No local analytics data');
});
test('renders comparison table with local data', () => {
const analyticsDir = path.join(tmpDir, 'analytics');
fs.mkdirSync(analyticsDir);
fs.writeFileSync(path.join(analyticsDir, 'skill-usage.jsonl'), '{"skill":"qa","duration_s":120,"outcome":"success"}\n');
const output = run(`${BIN}/gstack-community-benchmarks`);
expect(output).toContain('/qa');
expect(output).toContain('2m 0s');
});
});
+1 -1
View File
@@ -1391,7 +1391,7 @@ describe('telemetry', () => {
test('generated SKILL.md contains telemetry opt-in prompt', () => {
const content = fs.readFileSync(path.join(ROOT, 'SKILL.md'), 'utf-8');
expect(content).toContain('.telemetry-prompted');
expect(content).toContain('Help gstack get better');
expect(content).toContain('gstack can share usage data');
expect(content).toContain('gstack-config set telemetry community');
expect(content).toContain('gstack-config set telemetry anonymous');
expect(content).toContain('gstack-config set telemetry off');