mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-05 05:05:08 +02:00
feat: add telemetry-sync, community-dashboard, and integration tests
gstack-telemetry-sync: fire-and-forget JSONL → Supabase sync with privacy tier field stripping, batch limits, and cursor tracking. gstack-community-dashboard: CLI tool querying Supabase for skill popularity, crash clusters, and version distribution. 19 integration tests covering all telemetry scripts. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Executable
+110
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-community-dashboard — community usage stats from Supabase
|
||||
#
|
||||
# Queries the Supabase REST API to show community-wide gstack usage:
|
||||
# skill popularity, crash clusters, version distribution, retention.
|
||||
#
|
||||
# Env overrides (for testing):
|
||||
# GSTACK_DIR — override auto-detected gstack root
|
||||
# GSTACK_SUPABASE_URL — override Supabase project URL
|
||||
# GSTACK_SUPABASE_ANON_KEY — override Supabase anon key
|
||||
set -uo pipefail
|
||||
|
||||
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
|
||||
|
||||
# Supabase connection — will be populated once project is created
|
||||
SUPABASE_URL="${GSTACK_SUPABASE_URL:-}"
|
||||
ANON_KEY="${GSTACK_SUPABASE_ANON_KEY:-}"
|
||||
|
||||
if [ -z "$SUPABASE_URL" ] || [ -z "$ANON_KEY" ]; then
|
||||
echo "gstack community dashboard"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
echo "Supabase not configured yet. The community dashboard will be"
|
||||
echo "available once the gstack Supabase project is set up."
|
||||
echo ""
|
||||
echo "For local analytics, run: gstack-analytics"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ─── Helper: query Supabase REST API ─────────────────────────
|
||||
query() {
|
||||
local table="$1"
|
||||
local params="${2:-}"
|
||||
curl -sf --max-time 10 \
|
||||
"${SUPABASE_URL}/rest/v1/${table}?${params}" \
|
||||
-H "apikey: ${ANON_KEY}" \
|
||||
-H "Authorization: Bearer ${ANON_KEY}" \
|
||||
2>/dev/null || echo "[]"
|
||||
}
|
||||
|
||||
echo "gstack community dashboard"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
|
||||
# ─── Weekly active installs ──────────────────────────────────
|
||||
WEEK_AGO="$(date -u -v-7d +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date -u -d '7 days ago' +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || echo "")"
|
||||
if [ -n "$WEEK_AGO" ]; then
|
||||
PULSE="$(curl -sf --max-time 10 \
|
||||
"${SUPABASE_URL}/functions/v1/community-pulse" \
|
||||
-H "Authorization: Bearer ${ANON_KEY}" \
|
||||
2>/dev/null || echo '{"weekly_active":0}')"
|
||||
|
||||
WEEKLY="$(echo "$PULSE" | grep -o '"weekly_active":[0-9]*' | grep -o '[0-9]*' || echo "0")"
|
||||
CHANGE="$(echo "$PULSE" | grep -o '"change_pct":[0-9-]*' | grep -o '[0-9-]*' || echo "0")"
|
||||
|
||||
echo "Weekly active installs: ${WEEKLY}"
|
||||
if [ "$CHANGE" -gt 0 ] 2>/dev/null; then
|
||||
echo " Change: +${CHANGE}%"
|
||||
elif [ "$CHANGE" -lt 0 ] 2>/dev/null; then
|
||||
echo " Change: ${CHANGE}%"
|
||||
fi
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ─── Skill popularity (top 10) ───────────────────────────────
|
||||
echo "Top skills (last 7 days)"
|
||||
echo "────────────────────────"
|
||||
|
||||
# Query telemetry_events, group by skill
|
||||
EVENTS="$(query "telemetry_events" "select=skill&event_type=eq.skill_run&event_timestamp=gte.${WEEK_AGO}&limit=1000" 2>/dev/null || echo "[]")"
|
||||
|
||||
if [ "$EVENTS" != "[]" ] && [ -n "$EVENTS" ]; then
|
||||
echo "$EVENTS" | grep -o '"skill":"[^"]*"' | awk -F'"' '{print $4}' | sort | uniq -c | sort -rn | head -10 | while read -r COUNT SKILL; do
|
||||
printf " /%-20s %d runs\n" "$SKILL" "$COUNT"
|
||||
done
|
||||
else
|
||||
echo " No data yet"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# ─── Crash clusters ──────────────────────────────────────────
|
||||
echo "Top crash clusters"
|
||||
echo "──────────────────"
|
||||
|
||||
CRASHES="$(query "crash_clusters" "select=error_class,gstack_version,count,unique_users&limit=5" 2>/dev/null || echo "[]")"
|
||||
|
||||
if [ "$CRASHES" != "[]" ] && [ -n "$CRASHES" ]; then
|
||||
echo "$CRASHES" | grep -o '"error_class":"[^"]*"' | awk -F'"' '{print $4}' | head -5 | while read -r ERR; do
|
||||
C="$(echo "$CRASHES" | grep -o "\"error_class\":\"$ERR\"[^}]*\"count\":[0-9]*" | grep -o '"count":[0-9]*' | head -1 | grep -o '[0-9]*')"
|
||||
printf " %-30s %s occurrences\n" "$ERR" "${C:-?}"
|
||||
done
|
||||
else
|
||||
echo " No crashes reported"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# ─── Version distribution ────────────────────────────────────
|
||||
echo "Version distribution (last 7 days)"
|
||||
echo "───────────────────────────────────"
|
||||
|
||||
if [ "$EVENTS" != "[]" ] && [ -n "$EVENTS" ]; then
|
||||
echo "$EVENTS" | grep -o '"gstack_version":"[^"]*"' | awk -F'"' '{print $4}' | sort | uniq -c | sort -rn | head -5 | while read -r COUNT VER; do
|
||||
printf " v%-15s %d events\n" "$VER" "$COUNT"
|
||||
done
|
||||
else
|
||||
echo " No data yet"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "For local analytics: gstack-analytics"
|
||||
Executable
+114
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-telemetry-sync — sync local JSONL events to Supabase
|
||||
#
|
||||
# Fire-and-forget, backgrounded, rate-limited to once per 5 minutes.
|
||||
# Strips local-only fields before sending. Respects privacy tiers.
|
||||
#
|
||||
# Env overrides (for testing):
|
||||
# GSTACK_STATE_DIR — override ~/.gstack state directory
|
||||
# GSTACK_DIR — override auto-detected gstack root
|
||||
# GSTACK_TELEMETRY_ENDPOINT — override Supabase endpoint URL
|
||||
set -uo pipefail
|
||||
|
||||
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
|
||||
STATE_DIR="${GSTACK_STATE_DIR:-$HOME/.gstack}"
|
||||
ANALYTICS_DIR="$STATE_DIR/analytics"
|
||||
JSONL_FILE="$ANALYTICS_DIR/skill-usage.jsonl"
|
||||
CURSOR_FILE="$ANALYTICS_DIR/.last-sync-line"
|
||||
RATE_FILE="$ANALYTICS_DIR/.last-sync-time"
|
||||
CONFIG_CMD="$GSTACK_DIR/bin/gstack-config"
|
||||
|
||||
# Default endpoint — will be updated once Supabase project is created
|
||||
ENDPOINT="${GSTACK_TELEMETRY_ENDPOINT:-}"
|
||||
|
||||
# ─── Pre-checks ──────────────────────────────────────────────
|
||||
# No endpoint configured yet → exit silently
|
||||
[ -z "$ENDPOINT" ] && exit 0
|
||||
|
||||
# No JSONL file → nothing to sync
|
||||
[ -f "$JSONL_FILE" ] || exit 0
|
||||
|
||||
# Rate limit: once per 5 minutes
|
||||
if [ -f "$RATE_FILE" ]; then
|
||||
STALE=$(find "$RATE_FILE" -mmin +5 2>/dev/null || true)
|
||||
[ -z "$STALE" ] && exit 0
|
||||
fi
|
||||
|
||||
# ─── Read tier ───────────────────────────────────────────────
|
||||
TIER="$("$CONFIG_CMD" get telemetry 2>/dev/null || true)"
|
||||
TIER="${TIER:-off}"
|
||||
[ "$TIER" = "off" ] && exit 0
|
||||
|
||||
# ─── Read cursor ─────────────────────────────────────────────
|
||||
CURSOR=0
|
||||
if [ -f "$CURSOR_FILE" ]; then
|
||||
CURSOR="$(cat "$CURSOR_FILE" 2>/dev/null | tr -d ' \n\r\t')"
|
||||
# Validate: must be a non-negative integer
|
||||
case "$CURSOR" in *[!0-9]*) CURSOR=0 ;; esac
|
||||
fi
|
||||
|
||||
# Safety: if cursor exceeds file length, reset
|
||||
TOTAL_LINES="$(wc -l < "$JSONL_FILE" | tr -d ' \n\r\t')"
|
||||
if [ "$CURSOR" -gt "$TOTAL_LINES" ] 2>/dev/null; then
|
||||
CURSOR=0
|
||||
fi
|
||||
|
||||
# Nothing new to sync
|
||||
[ "$CURSOR" -ge "$TOTAL_LINES" ] 2>/dev/null && exit 0
|
||||
|
||||
# ─── Read unsent lines ───────────────────────────────────────
|
||||
SKIP=$(( CURSOR + 1 ))
|
||||
UNSENT="$(tail -n "+$SKIP" "$JSONL_FILE" 2>/dev/null || true)"
|
||||
[ -z "$UNSENT" ] && exit 0
|
||||
|
||||
# ─── Strip local-only fields and build batch ─────────────────
|
||||
BATCH="["
|
||||
FIRST=true
|
||||
COUNT=0
|
||||
|
||||
while IFS= read -r LINE; do
|
||||
# Skip empty or malformed lines
|
||||
[ -z "$LINE" ] && continue
|
||||
echo "$LINE" | grep -q '^{' || continue
|
||||
|
||||
# Strip _repo_slug and _branch (local-only fields)
|
||||
CLEAN="$(echo "$LINE" | sed 's/,"_repo_slug":"[^"]*"//g; s/,"_branch":"[^"]*"//g')"
|
||||
|
||||
# If anonymous tier, strip installation_id
|
||||
if [ "$TIER" = "anonymous" ]; then
|
||||
CLEAN="$(echo "$CLEAN" | sed 's/,"installation_id":"[^"]*"//g; s/,"installation_id":null//g')"
|
||||
fi
|
||||
|
||||
if [ "$FIRST" = "true" ]; then
|
||||
FIRST=false
|
||||
else
|
||||
BATCH="$BATCH,"
|
||||
fi
|
||||
BATCH="$BATCH$CLEAN"
|
||||
COUNT=$(( COUNT + 1 ))
|
||||
|
||||
# Batch size limit
|
||||
[ "$COUNT" -ge 100 ] && break
|
||||
done <<< "$UNSENT"
|
||||
|
||||
BATCH="$BATCH]"
|
||||
|
||||
# Nothing to send after filtering
|
||||
[ "$COUNT" -eq 0 ] && exit 0
|
||||
|
||||
# ─── POST to Supabase ────────────────────────────────────────
|
||||
RESPONSE="$(curl -sf --max-time 10 \
|
||||
-X POST "$ENDPOINT" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$BATCH" 2>/dev/null || true)"
|
||||
|
||||
# ─── Update cursor on success ────────────────────────────────
|
||||
if [ -n "$RESPONSE" ] && echo "$RESPONSE" | grep -q '"inserted"'; then
|
||||
NEW_CURSOR=$(( CURSOR + COUNT ))
|
||||
echo "$NEW_CURSOR" > "$CURSOR_FILE" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Update rate limit marker
|
||||
touch "$RATE_FILE" 2>/dev/null || true
|
||||
|
||||
exit 0
|
||||
@@ -0,0 +1,248 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
const ROOT = path.resolve(import.meta.dir, '..');
|
||||
const BIN = path.join(ROOT, 'bin');
|
||||
|
||||
// Each test gets a fresh temp directory for GSTACK_STATE_DIR
|
||||
let tmpDir: string;
|
||||
|
||||
function run(cmd: string, env: Record<string, string> = {}): string {
|
||||
return execSync(cmd, {
|
||||
cwd: ROOT,
|
||||
env: { ...process.env, GSTACK_STATE_DIR: tmpDir, GSTACK_DIR: ROOT, ...env },
|
||||
encoding: 'utf-8',
|
||||
timeout: 10000,
|
||||
}).trim();
|
||||
}
|
||||
|
||||
function setConfig(key: string, value: string) {
|
||||
run(`${BIN}/gstack-config set ${key} ${value}`);
|
||||
}
|
||||
|
||||
function readJsonl(): string[] {
|
||||
const file = path.join(tmpDir, 'analytics', 'skill-usage.jsonl');
|
||||
if (!fs.existsSync(file)) return [];
|
||||
return fs.readFileSync(file, 'utf-8').trim().split('\n').filter(Boolean);
|
||||
}
|
||||
|
||||
function parseJsonl(): any[] {
|
||||
return readJsonl().map(line => JSON.parse(line));
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gstack-tel-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('gstack-telemetry-log', () => {
|
||||
test('appends valid JSONL when tier=anonymous', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 142 --outcome success --session-id test-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events).toHaveLength(1);
|
||||
expect(events[0].v).toBe(1);
|
||||
expect(events[0].skill).toBe('qa');
|
||||
expect(events[0].duration_s).toBe(142);
|
||||
expect(events[0].outcome).toBe('success');
|
||||
expect(events[0].session_id).toBe('test-123');
|
||||
expect(events[0].event_type).toBe('skill_run');
|
||||
expect(events[0].os).toBeTruthy();
|
||||
expect(events[0].gstack_version).toBeTruthy();
|
||||
});
|
||||
|
||||
test('produces no output when tier=off', () => {
|
||||
setConfig('telemetry', 'off');
|
||||
run(`${BIN}/gstack-telemetry-log --skill ship --duration 30 --outcome success --session-id test-456`);
|
||||
|
||||
expect(readJsonl()).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('defaults to off for invalid tier value', () => {
|
||||
setConfig('telemetry', 'invalid_value');
|
||||
run(`${BIN}/gstack-telemetry-log --skill ship --duration 30 --outcome success --session-id test-789`);
|
||||
|
||||
expect(readJsonl()).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('includes installation_id for community tier', () => {
|
||||
setConfig('telemetry', 'community');
|
||||
run(`${BIN}/gstack-telemetry-log --skill review --duration 100 --outcome success --session-id comm-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events).toHaveLength(1);
|
||||
// installation_id should be a SHA-256 hash (64 hex chars)
|
||||
expect(events[0].installation_id).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
|
||||
test('installation_id is null for anonymous tier', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id anon-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events[0].installation_id).toBeNull();
|
||||
});
|
||||
|
||||
test('includes error_class when provided', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill browse --duration 10 --outcome error --error-class timeout --session-id err-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events[0].error_class).toBe('timeout');
|
||||
expect(events[0].outcome).toBe('error');
|
||||
});
|
||||
|
||||
test('handles missing duration gracefully', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --outcome success --session-id nodur-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events[0].duration_s).toBeNull();
|
||||
});
|
||||
|
||||
test('supports event_type flag', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --event-type upgrade_prompted --skill "" --outcome success --session-id up-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events[0].event_type).toBe('upgrade_prompted');
|
||||
});
|
||||
|
||||
test('includes local-only fields (_repo_slug, _branch)', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id local-123`);
|
||||
|
||||
const events = parseJsonl();
|
||||
// These should be present in local JSONL
|
||||
expect(events[0]).toHaveProperty('_repo_slug');
|
||||
expect(events[0]).toHaveProperty('_branch');
|
||||
});
|
||||
|
||||
test('creates analytics directory if missing', () => {
|
||||
// Remove analytics dir
|
||||
const analyticsDir = path.join(tmpDir, 'analytics');
|
||||
if (fs.existsSync(analyticsDir)) fs.rmSync(analyticsDir, { recursive: true });
|
||||
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id mkdir-123`);
|
||||
|
||||
expect(fs.existsSync(analyticsDir)).toBe(true);
|
||||
expect(readJsonl()).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('.pending marker', () => {
|
||||
test('finalizes stale .pending as outcome:unknown', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
|
||||
// Write a fake .pending marker
|
||||
const analyticsDir = path.join(tmpDir, 'analytics');
|
||||
fs.mkdirSync(analyticsDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(analyticsDir, '.pending'),
|
||||
'{"skill":"old-skill","ts":"2026-03-18T00:00:00Z","session_id":"old-123","gstack_version":"0.6.4"}'
|
||||
);
|
||||
|
||||
// Run telemetry-log — should finalize the pending marker first
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id new-456`);
|
||||
|
||||
const events = parseJsonl();
|
||||
expect(events).toHaveLength(2);
|
||||
|
||||
// First event: finalized pending
|
||||
expect(events[0].skill).toBe('old-skill');
|
||||
expect(events[0].outcome).toBe('unknown');
|
||||
expect(events[0].session_id).toBe('old-123');
|
||||
|
||||
// Second event: new event
|
||||
expect(events[1].skill).toBe('qa');
|
||||
expect(events[1].outcome).toBe('success');
|
||||
});
|
||||
|
||||
test('.pending file is removed after finalization', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
|
||||
const analyticsDir = path.join(tmpDir, 'analytics');
|
||||
fs.mkdirSync(analyticsDir, { recursive: true });
|
||||
const pendingPath = path.join(analyticsDir, '.pending');
|
||||
fs.writeFileSync(pendingPath, '{"skill":"stale","ts":"2026-03-18T00:00:00Z","session_id":"s","gstack_version":"v"}');
|
||||
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id new-456`);
|
||||
|
||||
expect(fs.existsSync(pendingPath)).toBe(false);
|
||||
});
|
||||
|
||||
test('tier=off still clears .pending', () => {
|
||||
setConfig('telemetry', 'off');
|
||||
|
||||
const analyticsDir = path.join(tmpDir, 'analytics');
|
||||
fs.mkdirSync(analyticsDir, { recursive: true });
|
||||
const pendingPath = path.join(analyticsDir, '.pending');
|
||||
fs.writeFileSync(pendingPath, '{"skill":"stale","ts":"2026-03-18T00:00:00Z","session_id":"s","gstack_version":"v"}');
|
||||
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 50 --outcome success --session-id off-123`);
|
||||
|
||||
expect(fs.existsSync(pendingPath)).toBe(false);
|
||||
// But no JSONL entries since tier=off
|
||||
expect(readJsonl()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-analytics', () => {
|
||||
test('shows "no data" for empty JSONL', () => {
|
||||
const output = run(`${BIN}/gstack-analytics`);
|
||||
expect(output).toContain('no data');
|
||||
});
|
||||
|
||||
test('renders usage dashboard with events', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 120 --outcome success --session-id a-1`);
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 60 --outcome success --session-id a-2`);
|
||||
run(`${BIN}/gstack-telemetry-log --skill ship --duration 30 --outcome error --error-class timeout --session-id a-3`);
|
||||
|
||||
const output = run(`${BIN}/gstack-analytics all`);
|
||||
expect(output).toContain('/qa');
|
||||
expect(output).toContain('/ship');
|
||||
expect(output).toContain('2 runs');
|
||||
expect(output).toContain('1 runs');
|
||||
expect(output).toContain('Success rate: 66%');
|
||||
expect(output).toContain('Errors: 1');
|
||||
});
|
||||
|
||||
test('filters by time window', () => {
|
||||
setConfig('telemetry', 'anonymous');
|
||||
run(`${BIN}/gstack-telemetry-log --skill qa --duration 60 --outcome success --session-id t-1`);
|
||||
|
||||
const output7d = run(`${BIN}/gstack-analytics 7d`);
|
||||
expect(output7d).toContain('/qa');
|
||||
expect(output7d).toContain('last 7 days');
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-telemetry-sync', () => {
|
||||
test('exits silently with no endpoint configured', () => {
|
||||
// Default: GSTACK_TELEMETRY_ENDPOINT is not set → exit 0
|
||||
const result = run(`${BIN}/gstack-telemetry-sync`);
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
test('exits silently with no JSONL file', () => {
|
||||
const result = run(`${BIN}/gstack-telemetry-sync`, { GSTACK_TELEMETRY_ENDPOINT: 'http://localhost:9999' });
|
||||
expect(result).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('gstack-community-dashboard', () => {
|
||||
test('shows unconfigured message when no Supabase URL', () => {
|
||||
const output = run(`${BIN}/gstack-community-dashboard`);
|
||||
expect(output).toContain('Supabase not configured');
|
||||
expect(output).toContain('gstack-analytics');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user