mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-02 03:35:09 +02:00
feat: bin/gstack-developer-profile — unified profile with migration
bin/gstack-developer-profile supersedes bin/gstack-builder-profile. The old
binary becomes a one-line legacy shim delegating to --read for /office-hours
backward compat.
Subcommands:
--read legacy KEY:VALUE output (tier, session_count, etc)
--migrate folds ~/.gstack/builder-profile.jsonl into
~/.gstack/developer-profile.json. Atomic (temp + rename),
idempotent (no-op when target exists or source absent),
archives source as .migrated-YYYY-MM-DD-HHMMSS
--derive recomputes inferred dimensions from question-log.jsonl
using the signal map in scripts/psychographic-signals.ts
--profile full profile JSON
--gap declared vs inferred diff JSON
--trace <dim> event-level trace of what contributed to a dimension
--check-mismatch flags dimensions where declared and inferred disagree by
> 0.3 (requires >= 10 events first)
--vibe archetype name + description from scripts/archetypes.ts
--narrative (v2 stub)
Auto-migration on first read: if legacy file exists and new file doesn't,
migrate before reading. Creates a neutral (all-0.5) stub if nothing exists.
Unified schema (see docs/designs/PLAN_TUNING_V0.md §Architecture):
{identity, declared, inferred: {values, sample_size, diversity},
gap, overrides, sessions, signals_accumulated, schema_version}
25 new tests across subcommand behaviors:
- --read defaults + stub creation
- --migrate: 3 sessions preserved with signal tallies, idempotency, archival
- Tier calculation: welcome_back / regular / inner_circle boundaries
- --derive: neutral-when-empty, upward nudge on 'expand', downward on 'reduce',
recomputable (same input → same output), ad-hoc unregistered ids ignored
- --trace: contributing events, empty for untouched dims, error without arg
- --gap: empty when no declared, correctly computed otherwise
- --vibe: returns archetype name + description
- --check-mismatch: threshold behavior, 10+ sample requirement
- Unknown subcommand errors
25 pass, 0 fail, 60 expect() calls.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
+9
-130
@@ -1,134 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-builder-profile — read builder profile and output structured summary
|
||||
# gstack-builder-profile — LEGACY SHIM.
|
||||
#
|
||||
# Reads ~/.gstack/builder-profile.jsonl (append-only session log from /office-hours).
|
||||
# Outputs KEY: VALUE pairs for the template to consume. Computes tier, accumulated
|
||||
# signals, cross-project detection, nudge eligibility, and resource dedup.
|
||||
# Superseded by bin/gstack-developer-profile. This binary now delegates to
|
||||
# `gstack-developer-profile --read` to keep /office-hours working during the
|
||||
# transition. When all call sites have been updated, this file can be removed.
|
||||
#
|
||||
# Single source of truth for all closing state. No separate config keys or logs.
|
||||
#
|
||||
# Exit 0 with defaults if no profile exists (first-time user = introduction tier).
|
||||
# The migration from ~/.gstack/builder-profile.jsonl to the unified
|
||||
# ~/.gstack/developer-profile.json happens automatically on first read —
|
||||
# see bin/gstack-developer-profile --migrate for details.
|
||||
set -euo pipefail
|
||||
|
||||
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
|
||||
PROFILE_FILE="$GSTACK_HOME/builder-profile.jsonl"
|
||||
|
||||
# Graceful default: no profile = introduction tier
|
||||
if [ ! -f "$PROFILE_FILE" ] || [ ! -s "$PROFILE_FILE" ]; then
|
||||
echo "SESSION_COUNT: 0"
|
||||
echo "TIER: introduction"
|
||||
echo "LAST_PROJECT:"
|
||||
echo "LAST_ASSIGNMENT:"
|
||||
echo "LAST_DESIGN_TITLE:"
|
||||
echo "DESIGN_COUNT: 0"
|
||||
echo "DESIGN_TITLES: []"
|
||||
echo "ACCUMULATED_SIGNALS:"
|
||||
echo "TOTAL_SIGNAL_COUNT: 0"
|
||||
echo "CROSS_PROJECT: false"
|
||||
echo "NUDGE_ELIGIBLE: false"
|
||||
echo "RESOURCES_SHOWN:"
|
||||
echo "RESOURCES_SHOWN_COUNT: 0"
|
||||
echo "TOPICS:"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use bun for JSON parsing (same pattern as gstack-learnings-search).
|
||||
# Fallback to defaults if bun is unavailable.
|
||||
cat "$PROFILE_FILE" 2>/dev/null | bun -e "
|
||||
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
|
||||
const entries = [];
|
||||
for (const line of lines) {
|
||||
try { entries.push(JSON.parse(line)); } catch {}
|
||||
}
|
||||
|
||||
const count = entries.length;
|
||||
|
||||
// Tier computation
|
||||
let tier = 'introduction';
|
||||
if (count >= 8) tier = 'inner_circle';
|
||||
else if (count >= 4) tier = 'regular';
|
||||
else if (count >= 1) tier = 'welcome_back';
|
||||
|
||||
// Last session data
|
||||
const last = entries[count - 1] || {};
|
||||
const prev = entries[count - 2] || {};
|
||||
const crossProject = prev.project_slug && last.project_slug
|
||||
? prev.project_slug !== last.project_slug
|
||||
: false;
|
||||
|
||||
// Design docs
|
||||
const designs = entries
|
||||
.map(e => e.design_doc || '')
|
||||
.filter(Boolean);
|
||||
const designTitles = entries
|
||||
.map(e => {
|
||||
const doc = e.design_doc || '';
|
||||
// Extract title from path: ...-design-DATETIME.md -> use the entry's topic or project
|
||||
return doc ? (e.project_slug || 'unknown') : '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Accumulated signals
|
||||
const signalCounts = {};
|
||||
let totalSignals = 0;
|
||||
for (const e of entries) {
|
||||
for (const s of (e.signals || [])) {
|
||||
signalCounts[s] = (signalCounts[s] || 0) + 1;
|
||||
totalSignals++;
|
||||
}
|
||||
}
|
||||
const signalStr = Object.entries(signalCounts)
|
||||
.map(([k, v]) => k + ':' + v)
|
||||
.join(',');
|
||||
|
||||
// Nudge eligibility: builder-mode + 5+ signals across 3+ sessions
|
||||
const builderSessions = entries.filter(e => e.mode !== 'startup').length;
|
||||
const nudgeEligible = builderSessions >= 3 && totalSignals >= 5;
|
||||
|
||||
// Resources shown (aggregate all)
|
||||
const allResources = new Set();
|
||||
for (const e of entries) {
|
||||
for (const url of (e.resources_shown || [])) {
|
||||
allResources.add(url);
|
||||
}
|
||||
}
|
||||
|
||||
// Topics (aggregate all)
|
||||
const allTopics = new Set();
|
||||
for (const e of entries) {
|
||||
for (const t of (e.topics || [])) {
|
||||
allTopics.add(t);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('SESSION_COUNT: ' + count);
|
||||
console.log('TIER: ' + tier);
|
||||
console.log('LAST_PROJECT: ' + (last.project_slug || ''));
|
||||
console.log('LAST_ASSIGNMENT: ' + (last.assignment || ''));
|
||||
console.log('LAST_DESIGN_TITLE: ' + (last.design_doc || ''));
|
||||
console.log('DESIGN_COUNT: ' + designs.length);
|
||||
console.log('DESIGN_TITLES: ' + JSON.stringify(designTitles));
|
||||
console.log('ACCUMULATED_SIGNALS: ' + signalStr);
|
||||
console.log('TOTAL_SIGNAL_COUNT: ' + totalSignals);
|
||||
console.log('CROSS_PROJECT: ' + crossProject);
|
||||
console.log('NUDGE_ELIGIBLE: ' + nudgeEligible);
|
||||
console.log('RESOURCES_SHOWN: ' + Array.from(allResources).join(','));
|
||||
console.log('RESOURCES_SHOWN_COUNT: ' + allResources.size);
|
||||
console.log('TOPICS: ' + Array.from(allTopics).join(','));
|
||||
" 2>/dev/null || {
|
||||
# Fallback if bun is unavailable
|
||||
echo "SESSION_COUNT: 0"
|
||||
echo "TIER: introduction"
|
||||
echo "LAST_PROJECT:"
|
||||
echo "LAST_ASSIGNMENT:"
|
||||
echo "LAST_DESIGN_TITLE:"
|
||||
echo "DESIGN_COUNT: 0"
|
||||
echo "DESIGN_TITLES: []"
|
||||
echo "ACCUMULATED_SIGNALS:"
|
||||
echo "TOTAL_SIGNAL_COUNT: 0"
|
||||
echo "CROSS_PROJECT: false"
|
||||
echo "NUDGE_ELIGIBLE: false"
|
||||
echo "RESOURCES_SHOWN:"
|
||||
echo "RESOURCES_SHOWN_COUNT: 0"
|
||||
echo "TOPICS:"
|
||||
}
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
exec "$SCRIPT_DIR/gstack-developer-profile" --read "$@"
|
||||
|
||||
Executable
+446
@@ -0,0 +1,446 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-developer-profile — unified developer profile access and derivation.
|
||||
#
|
||||
# Supersedes bin/gstack-builder-profile. The old binary remains as a legacy
|
||||
# shim that delegates to `gstack-developer-profile --read`.
|
||||
#
|
||||
# Subcommands:
|
||||
# --read (default) emit KEY: VALUE pairs in builder-profile format
|
||||
# for /office-hours compatibility.
|
||||
# --derive recompute inferred dimensions from question events;
|
||||
# write updated ~/.gstack/developer-profile.json.
|
||||
# --profile emit the full profile as JSON (all fields).
|
||||
# --gap emit declared-vs-inferred gap as JSON.
|
||||
# --trace <dim> show events that contributed to a dimension.
|
||||
# --narrative (v2 stub) output a coach bio paragraph.
|
||||
# --vibe (v2 stub) output the one-word archetype.
|
||||
# --check-mismatch detect meaningful gaps between declared and observed.
|
||||
# --migrate migrate builder-profile.jsonl → developer-profile.json.
|
||||
# Idempotent; archives the source file on success.
|
||||
#
|
||||
# Profile file: ~/.gstack/developer-profile.json (unified schema — see
|
||||
# docs/designs/PLAN_TUNING_V0.md). Event file: ~/.gstack/projects/{SLUG}/
|
||||
# question-events.jsonl.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
|
||||
PROFILE_FILE="$GSTACK_HOME/developer-profile.json"
|
||||
LEGACY_FILE="$GSTACK_HOME/builder-profile.jsonl"
|
||||
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null || true)"
|
||||
SLUG="${SLUG:-unknown}"
|
||||
|
||||
CMD="${1:---read}"
|
||||
shift || true
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Migration: builder-profile.jsonl → developer-profile.json
|
||||
# -----------------------------------------------------------------------
|
||||
do_migrate() {
|
||||
if [ ! -f "$LEGACY_FILE" ]; then
|
||||
echo "MIGRATE: no legacy file to migrate"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ -f "$PROFILE_FILE" ]; then
|
||||
# Already migrated — no-op (idempotent).
|
||||
echo "MIGRATE: already migrated (developer-profile.json exists)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run migration in a temp file, then atomic rename.
|
||||
local TMPOUT
|
||||
TMPOUT=$(mktemp "$GSTACK_HOME/developer-profile.json.XXXXXX.tmp")
|
||||
trap 'rm -f "$TMPOUT"' EXIT
|
||||
|
||||
cat "$LEGACY_FILE" | bun -e "
|
||||
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
|
||||
const sessions = [];
|
||||
const signalsAcc = {};
|
||||
const resources = new Set();
|
||||
const topics = new Set();
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const e = JSON.parse(line);
|
||||
sessions.push(e);
|
||||
for (const s of (e.signals || [])) {
|
||||
signalsAcc[s] = (signalsAcc[s] || 0) + 1;
|
||||
}
|
||||
for (const r of (e.resources_shown || [])) resources.add(r);
|
||||
for (const t of (e.topics || [])) topics.add(t);
|
||||
} catch {}
|
||||
}
|
||||
const profile = {
|
||||
identity: {},
|
||||
declared: {},
|
||||
inferred: {
|
||||
values: {
|
||||
scope_appetite: 0.5,
|
||||
risk_tolerance: 0.5,
|
||||
detail_preference: 0.5,
|
||||
autonomy: 0.5,
|
||||
architecture_care: 0.5,
|
||||
},
|
||||
sample_size: 0,
|
||||
diversity: { skills_covered: 0, question_ids_covered: 0, days_span: 0 },
|
||||
},
|
||||
gap: {},
|
||||
overrides: {},
|
||||
sessions,
|
||||
signals_accumulated: signalsAcc,
|
||||
resources_shown: Array.from(resources),
|
||||
topics: Array.from(topics),
|
||||
migrated_at: new Date().toISOString(),
|
||||
schema_version: 1,
|
||||
};
|
||||
console.log(JSON.stringify(profile, null, 2));
|
||||
" > "$TMPOUT"
|
||||
|
||||
# Atomic rename.
|
||||
mv "$TMPOUT" "$PROFILE_FILE"
|
||||
trap - EXIT
|
||||
|
||||
# Archive the legacy file.
|
||||
local TS
|
||||
TS="$(date +%Y-%m-%d-%H%M%S)"
|
||||
mv "$LEGACY_FILE" "$LEGACY_FILE.migrated-$TS"
|
||||
|
||||
local COUNT
|
||||
COUNT=$(bun -e "console.log(JSON.parse(require('fs').readFileSync('$PROFILE_FILE','utf-8')).sessions.length)" 2>/dev/null || echo "?")
|
||||
echo "MIGRATE: ok — migrated $COUNT sessions from builder-profile.jsonl"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Load-or-migrate helper: ensure developer-profile.json exists.
|
||||
# Auto-migrates from builder-profile.jsonl if present.
|
||||
# Returns path to profile file via stdout. Creates a minimal stub if nothing exists.
|
||||
# -----------------------------------------------------------------------
|
||||
ensure_profile() {
|
||||
if [ -f "$PROFILE_FILE" ]; then
|
||||
return 0
|
||||
fi
|
||||
if [ -f "$LEGACY_FILE" ]; then
|
||||
do_migrate >/dev/null
|
||||
return 0
|
||||
fi
|
||||
# Nothing yet — create a stub.
|
||||
mkdir -p "$GSTACK_HOME"
|
||||
cat > "$PROFILE_FILE" <<EOF
|
||||
{
|
||||
"identity": {},
|
||||
"declared": {},
|
||||
"inferred": {
|
||||
"values": {
|
||||
"scope_appetite": 0.5,
|
||||
"risk_tolerance": 0.5,
|
||||
"detail_preference": 0.5,
|
||||
"autonomy": 0.5,
|
||||
"architecture_care": 0.5
|
||||
},
|
||||
"sample_size": 0,
|
||||
"diversity": { "skills_covered": 0, "question_ids_covered": 0, "days_span": 0 }
|
||||
},
|
||||
"gap": {},
|
||||
"overrides": {},
|
||||
"sessions": [],
|
||||
"signals_accumulated": {},
|
||||
"schema_version": 1
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Read: emit legacy KEY: VALUE output for /office-hours compat.
|
||||
# -----------------------------------------------------------------------
|
||||
do_read() {
|
||||
ensure_profile
|
||||
cat "$PROFILE_FILE" | bun -e "
|
||||
const p = JSON.parse(await Bun.stdin.text());
|
||||
const sessions = p.sessions || [];
|
||||
const count = sessions.length;
|
||||
let tier = 'introduction';
|
||||
if (count >= 8) tier = 'inner_circle';
|
||||
else if (count >= 4) tier = 'regular';
|
||||
else if (count >= 1) tier = 'welcome_back';
|
||||
|
||||
const last = sessions[count - 1] || {};
|
||||
const prev = sessions[count - 2] || {};
|
||||
const crossProject = prev.project_slug && last.project_slug
|
||||
? prev.project_slug !== last.project_slug
|
||||
: false;
|
||||
|
||||
const designs = sessions.map(e => e.design_doc || '').filter(Boolean);
|
||||
const designTitles = sessions
|
||||
.map(e => (e.design_doc ? (e.project_slug || 'unknown') : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
const signalCounts = p.signals_accumulated || {};
|
||||
let totalSignals = 0;
|
||||
for (const v of Object.values(signalCounts)) totalSignals += v;
|
||||
const signalStr = Object.entries(signalCounts).map(([k,v]) => k + ':' + v).join(',');
|
||||
|
||||
const builderSessions = sessions.filter(e => e.mode !== 'startup').length;
|
||||
const nudgeEligible = builderSessions >= 3 && totalSignals >= 5;
|
||||
|
||||
const resources = p.resources_shown || [];
|
||||
const topics = p.topics || [];
|
||||
|
||||
console.log('SESSION_COUNT: ' + count);
|
||||
console.log('TIER: ' + tier);
|
||||
console.log('LAST_PROJECT: ' + (last.project_slug || ''));
|
||||
console.log('LAST_ASSIGNMENT: ' + (last.assignment || ''));
|
||||
console.log('LAST_DESIGN_TITLE: ' + (last.design_doc || ''));
|
||||
console.log('DESIGN_COUNT: ' + designs.length);
|
||||
console.log('DESIGN_TITLES: ' + JSON.stringify(designTitles));
|
||||
console.log('ACCUMULATED_SIGNALS: ' + signalStr);
|
||||
console.log('TOTAL_SIGNAL_COUNT: ' + totalSignals);
|
||||
console.log('CROSS_PROJECT: ' + crossProject);
|
||||
console.log('NUDGE_ELIGIBLE: ' + nudgeEligible);
|
||||
console.log('RESOURCES_SHOWN: ' + resources.join(','));
|
||||
console.log('RESOURCES_SHOWN_COUNT: ' + resources.length);
|
||||
console.log('TOPICS: ' + topics.join(','));
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Profile: emit the full JSON
|
||||
# -----------------------------------------------------------------------
|
||||
do_profile() {
|
||||
ensure_profile
|
||||
cat "$PROFILE_FILE"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Gap: declared vs inferred diff
|
||||
# -----------------------------------------------------------------------
|
||||
do_gap() {
|
||||
ensure_profile
|
||||
cat "$PROFILE_FILE" | bun -e "
|
||||
const p = JSON.parse(await Bun.stdin.text());
|
||||
const declared = p.declared || {};
|
||||
const inferred = (p.inferred && p.inferred.values) || {};
|
||||
const dims = ['scope_appetite','risk_tolerance','detail_preference','autonomy','architecture_care'];
|
||||
const gap = {};
|
||||
for (const d of dims) {
|
||||
if (declared[d] !== undefined && inferred[d] !== undefined) {
|
||||
gap[d] = +(Math.abs(declared[d] - inferred[d])).toFixed(3);
|
||||
}
|
||||
}
|
||||
console.log(JSON.stringify({ declared, inferred, gap }, null, 2));
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Derive: recompute inferred dimensions from question-events.jsonl
|
||||
# -----------------------------------------------------------------------
|
||||
do_derive() {
|
||||
ensure_profile
|
||||
local EVENTS="$GSTACK_HOME/projects/$SLUG/question-log.jsonl"
|
||||
local REGISTRY="$ROOT_DIR/scripts/question-registry.ts"
|
||||
local SIGNALS="$ROOT_DIR/scripts/psychographic-signals.ts"
|
||||
if [ ! -f "$REGISTRY" ] || [ ! -f "$SIGNALS" ]; then
|
||||
echo "DERIVE: registry or signals file missing, cannot derive" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$ROOT_DIR"
|
||||
PROFILE_FILE_PATH="$PROFILE_FILE" EVENTS_PATH="$EVENTS" bun -e "
|
||||
import('./scripts/question-registry.ts').then(async (regmod) => {
|
||||
const sigmod = await import('./scripts/psychographic-signals.ts');
|
||||
const fs = require('fs');
|
||||
const { QUESTIONS } = regmod;
|
||||
const { SIGNAL_MAP, applySignal, newDimensionTotals, normalizeToDimensionValue } = sigmod;
|
||||
|
||||
const profilePath = process.env.PROFILE_FILE_PATH;
|
||||
const eventsPath = process.env.EVENTS_PATH;
|
||||
const profile = JSON.parse(fs.readFileSync(profilePath, 'utf-8'));
|
||||
|
||||
let lines = [];
|
||||
if (fs.existsSync(eventsPath)) {
|
||||
lines = fs.readFileSync(eventsPath, 'utf-8').trim().split('\n').filter(Boolean);
|
||||
}
|
||||
|
||||
const totals = newDimensionTotals();
|
||||
const skills = new Set();
|
||||
const qids = new Set();
|
||||
const days = new Set();
|
||||
let count = 0;
|
||||
for (const line of lines) {
|
||||
let e;
|
||||
try { e = JSON.parse(line); } catch { continue; }
|
||||
if (!e.question_id || !e.user_choice) continue;
|
||||
count++;
|
||||
skills.add(e.skill);
|
||||
qids.add(e.question_id);
|
||||
if (e.ts) days.add(String(e.ts).slice(0,10));
|
||||
const def = QUESTIONS[e.question_id];
|
||||
if (def && def.signal_key) {
|
||||
applySignal(totals, def.signal_key, e.user_choice);
|
||||
}
|
||||
}
|
||||
|
||||
const values = {};
|
||||
for (const [dim, total] of Object.entries(totals)) {
|
||||
values[dim] = +normalizeToDimensionValue(total).toFixed(3);
|
||||
}
|
||||
|
||||
profile.inferred = {
|
||||
values,
|
||||
sample_size: count,
|
||||
diversity: {
|
||||
skills_covered: skills.size,
|
||||
question_ids_covered: qids.size,
|
||||
days_span: days.size,
|
||||
},
|
||||
};
|
||||
|
||||
// Recompute gap.
|
||||
const gap = {};
|
||||
for (const d of Object.keys(values)) {
|
||||
if (profile.declared && profile.declared[d] !== undefined) {
|
||||
gap[d] = +(Math.abs(profile.declared[d] - values[d])).toFixed(3);
|
||||
}
|
||||
}
|
||||
profile.gap = gap;
|
||||
profile.derived_at = new Date().toISOString();
|
||||
|
||||
const tmp = profilePath + '.tmp';
|
||||
fs.writeFileSync(tmp, JSON.stringify(profile, null, 2));
|
||||
fs.renameSync(tmp, profilePath);
|
||||
console.log('DERIVE: ok — ' + count + ' events, ' + skills.size + ' skills, ' + qids.size + ' questions');
|
||||
}).catch(err => { console.error('DERIVE:', err.message); process.exit(1); });
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Trace: show events contributing to a dimension
|
||||
# -----------------------------------------------------------------------
|
||||
do_trace() {
|
||||
local DIM="${1:-}"
|
||||
if [ -z "$DIM" ]; then
|
||||
echo "TRACE: missing dimension argument" >&2
|
||||
exit 1
|
||||
fi
|
||||
local EVENTS="$GSTACK_HOME/projects/$SLUG/question-log.jsonl"
|
||||
if [ ! -f "$EVENTS" ]; then
|
||||
echo "TRACE: no events for this project"
|
||||
return 0
|
||||
fi
|
||||
cd "$ROOT_DIR"
|
||||
EVENTS_PATH="$EVENTS" TRACE_DIM="$DIM" bun -e "
|
||||
import('./scripts/question-registry.ts').then(async (regmod) => {
|
||||
const sigmod = await import('./scripts/psychographic-signals.ts');
|
||||
const fs = require('fs');
|
||||
const { QUESTIONS } = regmod;
|
||||
const { SIGNAL_MAP } = sigmod;
|
||||
const target = process.env.TRACE_DIM;
|
||||
const lines = fs.readFileSync(process.env.EVENTS_PATH, 'utf-8').trim().split('\n').filter(Boolean);
|
||||
const rows = [];
|
||||
for (const line of lines) {
|
||||
let e;
|
||||
try { e = JSON.parse(line); } catch { continue; }
|
||||
const def = QUESTIONS[e.question_id];
|
||||
if (!def || !def.signal_key) continue;
|
||||
const deltas = SIGNAL_MAP[def.signal_key]?.[e.user_choice] || [];
|
||||
for (const d of deltas) {
|
||||
if (d.dim === target) {
|
||||
rows.push({ ts: e.ts, question_id: e.question_id, choice: e.user_choice, delta: d.delta });
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rows.length === 0) {
|
||||
console.log('TRACE: no events contribute to ' + target);
|
||||
} else {
|
||||
console.log('TRACE: ' + rows.length + ' events for ' + target);
|
||||
for (const r of rows) {
|
||||
console.log(' ' + (r.ts || '').slice(0,19) + ' ' + r.question_id + ' → ' + r.choice + ' (' + (r.delta > 0 ? '+' : '') + r.delta + ')');
|
||||
}
|
||||
}
|
||||
});
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Check mismatch: flag when declared ≠ inferred by > threshold
|
||||
# -----------------------------------------------------------------------
|
||||
do_check_mismatch() {
|
||||
ensure_profile
|
||||
cat "$PROFILE_FILE" | bun -e "
|
||||
const p = JSON.parse(await Bun.stdin.text());
|
||||
const declared = p.declared || {};
|
||||
const inferred = (p.inferred && p.inferred.values) || {};
|
||||
const sampleSize = (p.inferred && p.inferred.sample_size) || 0;
|
||||
const diversity = (p.inferred && p.inferred.diversity) || {};
|
||||
|
||||
// Require enough data before reporting mismatch.
|
||||
if (sampleSize < 10) {
|
||||
console.log('MISMATCH: not enough data (' + sampleSize + ' events; need 10+)');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const THRESHOLD = 0.3;
|
||||
const flagged = [];
|
||||
for (const d of Object.keys(declared)) {
|
||||
if (inferred[d] === undefined) continue;
|
||||
const gap = Math.abs(declared[d] - inferred[d]);
|
||||
if (gap > THRESHOLD) {
|
||||
flagged.push({ dim: d, declared: declared[d], inferred: inferred[d], gap: +gap.toFixed(3) });
|
||||
}
|
||||
}
|
||||
|
||||
if (flagged.length === 0) {
|
||||
console.log('MISMATCH: none');
|
||||
} else {
|
||||
console.log('MISMATCH: ' + flagged.length + ' dimension(s) disagree (gap > ' + THRESHOLD + ')');
|
||||
for (const f of flagged) {
|
||||
console.log(' ' + f.dim + ': declared ' + f.declared + ' vs inferred ' + f.inferred + ' (gap ' + f.gap + ')');
|
||||
}
|
||||
}
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Narrative + Vibe (v2 stubs)
|
||||
# -----------------------------------------------------------------------
|
||||
do_narrative() {
|
||||
echo "NARRATIVE: (v2 — not yet implemented; use /plan-tune profile for now)"
|
||||
}
|
||||
|
||||
do_vibe() {
|
||||
ensure_profile
|
||||
cd "$ROOT_DIR"
|
||||
cat "$PROFILE_FILE" | PROFILE_DATA="$(cat "$PROFILE_FILE")" bun -e "
|
||||
import('./scripts/archetypes.ts').then(async (mod) => {
|
||||
const p = JSON.parse(process.env.PROFILE_DATA);
|
||||
const dims = (p.inferred && p.inferred.values) || {
|
||||
scope_appetite: 0.5, risk_tolerance: 0.5, detail_preference: 0.5,
|
||||
autonomy: 0.5, architecture_care: 0.5,
|
||||
};
|
||||
const arch = mod.matchArchetype(dims);
|
||||
console.log(arch.name);
|
||||
console.log(arch.description);
|
||||
});
|
||||
"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Dispatch
|
||||
# -----------------------------------------------------------------------
|
||||
case "$CMD" in
|
||||
--read) do_read ;;
|
||||
--profile) do_profile ;;
|
||||
--gap) do_gap ;;
|
||||
--derive) do_derive ;;
|
||||
--trace) do_trace "$@" ;;
|
||||
--narrative) do_narrative ;;
|
||||
--vibe) do_vibe ;;
|
||||
--check-mismatch) do_check_mismatch ;;
|
||||
--migrate) do_migrate ;;
|
||||
--help|-h) sed -n '1,/^set -euo/p' "$0" | sed 's|^# \?||' ;;
|
||||
*)
|
||||
echo "gstack-developer-profile: unknown subcommand '$CMD'" >&2
|
||||
echo "run --help for usage" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -0,0 +1,441 @@
|
||||
/**
|
||||
* bin/gstack-developer-profile — subcommand behavior tests.
|
||||
*
|
||||
* Covers:
|
||||
* - --read (legacy /office-hours KEY: VALUE format, with defaults when no profile)
|
||||
* - --migrate (idempotent; preserves sessions + signals_accumulated)
|
||||
* - --derive (recomputes inferred from question-log events)
|
||||
* - --trace <dim> (shows contributing events)
|
||||
* - --gap (declared vs inferred)
|
||||
* - --vibe (archetype match from inferred)
|
||||
* - --check-mismatch (threshold behavior; requires 10+ samples)
|
||||
*/
|
||||
|
||||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { spawnSync } from 'child_process';
|
||||
|
||||
const ROOT = path.resolve(import.meta.dir, '..');
|
||||
const BIN_DEV = path.join(ROOT, 'bin', 'gstack-developer-profile');
|
||||
const BIN_LOG = path.join(ROOT, 'bin', 'gstack-question-log');
|
||||
|
||||
let tmpHome: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tmpHome = fs.mkdtempSync(path.join(os.tmpdir(), 'gstack-test-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function runDev(...args: string[]): { stdout: string; stderr: string; status: number } {
|
||||
const res = spawnSync(BIN_DEV, args, {
|
||||
env: { ...process.env, GSTACK_HOME: tmpHome },
|
||||
encoding: 'utf-8',
|
||||
cwd: ROOT,
|
||||
});
|
||||
return {
|
||||
stdout: res.stdout ?? '',
|
||||
stderr: res.stderr ?? '',
|
||||
status: res.status ?? -1,
|
||||
};
|
||||
}
|
||||
|
||||
function logQuestion(payload: Record<string, unknown>): number {
|
||||
const res = spawnSync(BIN_LOG, [JSON.stringify(payload)], {
|
||||
env: { ...process.env, GSTACK_HOME: tmpHome },
|
||||
encoding: 'utf-8',
|
||||
cwd: ROOT,
|
||||
});
|
||||
return res.status ?? -1;
|
||||
}
|
||||
|
||||
function writeLegacyProfile(sessions: Array<Record<string, unknown>>) {
|
||||
const content = sessions.map((s) => JSON.stringify(s)).join('\n') + '\n';
|
||||
fs.writeFileSync(path.join(tmpHome, 'builder-profile.jsonl'), content);
|
||||
}
|
||||
|
||||
function readProfile(): Record<string, unknown> {
|
||||
const file = path.join(tmpHome, 'developer-profile.json');
|
||||
return JSON.parse(fs.readFileSync(file, 'utf-8'));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --read (defaults + compat)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --read', () => {
|
||||
test('emits defaults when no profile exists (creates stub)', () => {
|
||||
const r = runDev('--read');
|
||||
expect(r.status).toBe(0);
|
||||
expect(r.stdout).toContain('SESSION_COUNT: 0');
|
||||
expect(r.stdout).toContain('TIER: introduction');
|
||||
expect(r.stdout).toContain('CROSS_PROJECT: false');
|
||||
});
|
||||
|
||||
test('creates a stub profile file when missing', () => {
|
||||
runDev('--read');
|
||||
const file = path.join(tmpHome, 'developer-profile.json');
|
||||
expect(fs.existsSync(file)).toBe(true);
|
||||
const p = readProfile();
|
||||
expect(p.schema_version).toBe(1);
|
||||
});
|
||||
|
||||
test('omits --read flag and still returns default output', () => {
|
||||
const r = runDev();
|
||||
expect(r.status).toBe(0);
|
||||
expect(r.stdout).toContain('TIER:');
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --migrate (legacy jsonl → unified profile)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --migrate', () => {
|
||||
test('migrates 3 sessions with signals, resources, topics', () => {
|
||||
writeLegacyProfile([
|
||||
{
|
||||
date: '2026-03-01',
|
||||
mode: 'builder',
|
||||
project_slug: 'alpha',
|
||||
signals: ['taste', 'agency'],
|
||||
resources_shown: ['https://a.example'],
|
||||
topics: ['onboarding'],
|
||||
design_doc: '/tmp/a.md',
|
||||
assignment: 'watch 3 users',
|
||||
},
|
||||
{
|
||||
date: '2026-03-10',
|
||||
mode: 'startup',
|
||||
project_slug: 'beta',
|
||||
signals: ['named_users', 'pushback', 'taste'],
|
||||
resources_shown: ['https://b.example'],
|
||||
topics: ['fit'],
|
||||
design_doc: '/tmp/b.md',
|
||||
assignment: 'interview 5',
|
||||
},
|
||||
{
|
||||
date: '2026-04-01',
|
||||
mode: 'builder',
|
||||
project_slug: 'alpha',
|
||||
signals: ['agency'],
|
||||
resources_shown: [],
|
||||
topics: ['iter'],
|
||||
design_doc: '/tmp/c.md',
|
||||
assignment: 'ship v1',
|
||||
},
|
||||
]);
|
||||
|
||||
const r = runDev('--migrate');
|
||||
expect(r.status).toBe(0);
|
||||
expect(r.stdout).toContain('migrated 3 sessions');
|
||||
|
||||
const p = readProfile() as {
|
||||
sessions: Array<{ project_slug: string; signals: string[] }>;
|
||||
signals_accumulated: Record<string, number>;
|
||||
resources_shown: string[];
|
||||
topics: string[];
|
||||
};
|
||||
|
||||
expect(p.sessions.length).toBe(3);
|
||||
// Accumulated signals are correctly tallied
|
||||
expect(p.signals_accumulated.taste).toBe(2);
|
||||
expect(p.signals_accumulated.agency).toBe(2);
|
||||
expect(p.signals_accumulated.named_users).toBe(1);
|
||||
expect(p.signals_accumulated.pushback).toBe(1);
|
||||
expect(p.resources_shown.length).toBe(2);
|
||||
expect(p.topics.length).toBe(3);
|
||||
});
|
||||
|
||||
test('idempotent — second migrate is no-op when profile exists', () => {
|
||||
writeLegacyProfile([{ date: '2026-03-01', mode: 'builder', project_slug: 'x', signals: ['taste'] }]);
|
||||
runDev('--migrate');
|
||||
const p1 = readProfile();
|
||||
const r2 = runDev('--migrate');
|
||||
expect(r2.stdout).toMatch(/no legacy file|already migrated/);
|
||||
const p2 = readProfile();
|
||||
// Sessions count should be identical — migration didn't duplicate
|
||||
expect((p1 as any).sessions.length).toBe((p2 as any).sessions.length);
|
||||
});
|
||||
|
||||
test('archives legacy file after successful migration', () => {
|
||||
writeLegacyProfile([{ date: '2026-03-01', mode: 'builder', project_slug: 'x', signals: [] }]);
|
||||
runDev('--migrate');
|
||||
// Legacy file should be renamed to *.migrated-<timestamp>
|
||||
const files = fs.readdirSync(tmpHome);
|
||||
const archived = files.filter((f) => f.startsWith('builder-profile.jsonl.migrated-'));
|
||||
expect(archived.length).toBe(1);
|
||||
// Original name should no longer exist
|
||||
expect(fs.existsSync(path.join(tmpHome, 'builder-profile.jsonl'))).toBe(false);
|
||||
});
|
||||
|
||||
test('no-op when no legacy file exists', () => {
|
||||
const r = runDev('--migrate');
|
||||
expect(r.status).toBe(0);
|
||||
expect(r.stdout).toContain('no legacy file');
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --read tier calculation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile tier calculation', () => {
|
||||
test('1-3 sessions → welcome_back', () => {
|
||||
writeLegacyProfile([
|
||||
{ date: 'x', mode: 'builder', project_slug: 'a', signals: [] },
|
||||
{ date: 'x', mode: 'builder', project_slug: 'a', signals: [] },
|
||||
{ date: 'x', mode: 'builder', project_slug: 'a', signals: [] },
|
||||
]);
|
||||
runDev('--migrate');
|
||||
const r = runDev('--read');
|
||||
expect(r.stdout).toContain('TIER: welcome_back');
|
||||
});
|
||||
|
||||
test('4-7 sessions → regular', () => {
|
||||
const sessions = Array.from({ length: 5 }, () => ({
|
||||
date: 'x',
|
||||
mode: 'builder',
|
||||
project_slug: 'a',
|
||||
signals: [],
|
||||
}));
|
||||
writeLegacyProfile(sessions);
|
||||
runDev('--migrate');
|
||||
const r = runDev('--read');
|
||||
expect(r.stdout).toContain('TIER: regular');
|
||||
});
|
||||
|
||||
test('8+ sessions → inner_circle', () => {
|
||||
const sessions = Array.from({ length: 9 }, () => ({
|
||||
date: 'x',
|
||||
mode: 'builder',
|
||||
project_slug: 'a',
|
||||
signals: [],
|
||||
}));
|
||||
writeLegacyProfile(sessions);
|
||||
runDev('--migrate');
|
||||
const r = runDev('--read');
|
||||
expect(r.stdout).toContain('TIER: inner_circle');
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --derive: inferred dimensions from question-log events
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --derive', () => {
|
||||
test('derive with no events yields neutral (0.5) dimensions', () => {
|
||||
runDev('--derive');
|
||||
const p = readProfile() as {
|
||||
inferred: { values: Record<string, number>; sample_size: number };
|
||||
};
|
||||
expect(p.inferred.sample_size).toBe(0);
|
||||
expect(p.inferred.values.scope_appetite).toBeCloseTo(0.5, 2);
|
||||
});
|
||||
|
||||
test('derive nudges scope_appetite upward after expand choices', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
expect(
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'plan-ceo-review-mode',
|
||||
question_summary: 'mode?',
|
||||
user_choice: 'expand',
|
||||
session_id: `s${i}`,
|
||||
ts: `2026-04-0${i + 1}T10:00:00Z`,
|
||||
}),
|
||||
).toBe(0);
|
||||
}
|
||||
runDev('--derive');
|
||||
const p = readProfile() as {
|
||||
inferred: { values: Record<string, number>; sample_size: number; diversity: Record<string, number> };
|
||||
};
|
||||
expect(p.inferred.sample_size).toBe(5);
|
||||
expect(p.inferred.values.scope_appetite).toBeGreaterThan(0.5);
|
||||
expect(p.inferred.diversity.question_ids_covered).toBe(1);
|
||||
expect(p.inferred.diversity.skills_covered).toBe(1);
|
||||
});
|
||||
|
||||
test('derive nudges scope_appetite downward after reduce choices', () => {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'plan-ceo-review-mode',
|
||||
question_summary: 'mode?',
|
||||
user_choice: 'reduce',
|
||||
session_id: `s${i}`,
|
||||
});
|
||||
}
|
||||
runDev('--derive');
|
||||
const p = readProfile() as { inferred: { values: Record<string, number> } };
|
||||
expect(p.inferred.values.scope_appetite).toBeLessThan(0.5);
|
||||
});
|
||||
|
||||
test('derive is recomputable — same input, same output', () => {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'plan-ceo-review-mode',
|
||||
question_summary: 'mode?',
|
||||
user_choice: 'expand',
|
||||
session_id: `s${i}`,
|
||||
});
|
||||
}
|
||||
runDev('--derive');
|
||||
const v1 = (readProfile() as any).inferred.values;
|
||||
runDev('--derive');
|
||||
const v2 = (readProfile() as any).inferred.values;
|
||||
expect(v1).toEqual(v2);
|
||||
});
|
||||
|
||||
test('derive ignores events for questions not in registry (ad-hoc ids)', () => {
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'adhoc-unregistered-question',
|
||||
question_summary: 'mystery',
|
||||
user_choice: 'anything',
|
||||
session_id: 's1',
|
||||
});
|
||||
runDev('--derive');
|
||||
const p = readProfile() as { inferred: { values: Record<string, number>; sample_size: number } };
|
||||
// Sample size counts the log entry, but no signal delta applied
|
||||
expect(p.inferred.sample_size).toBe(1);
|
||||
expect(p.inferred.values.scope_appetite).toBeCloseTo(0.5, 2);
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --trace
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --trace <dim>', () => {
|
||||
test('shows contributing events with delta values', () => {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'plan-ceo-review-mode',
|
||||
question_summary: 'mode?',
|
||||
user_choice: 'expand',
|
||||
session_id: `s${i}`,
|
||||
});
|
||||
}
|
||||
const r = runDev('--trace', 'scope_appetite');
|
||||
expect(r.stdout).toContain('3 events for scope_appetite');
|
||||
expect(r.stdout).toContain('plan-ceo-review-mode');
|
||||
expect(r.stdout).toContain('expand');
|
||||
});
|
||||
|
||||
test('reports no contributions for untouched dimension', () => {
|
||||
logQuestion({
|
||||
skill: 'plan-ceo-review',
|
||||
question_id: 'plan-ceo-review-mode',
|
||||
question_summary: 'x',
|
||||
user_choice: 'expand',
|
||||
session_id: 's1',
|
||||
});
|
||||
const r = runDev('--trace', 'autonomy');
|
||||
expect(r.stdout).toContain('no events contribute to autonomy');
|
||||
});
|
||||
|
||||
test('errors without dimension argument', () => {
|
||||
const r = runDev('--trace');
|
||||
expect(r.status).not.toBe(0);
|
||||
expect(r.stderr).toContain('missing dimension');
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --gap
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --gap', () => {
|
||||
test('gap is empty when nothing is declared', () => {
|
||||
runDev('--read');
|
||||
const r = runDev('--gap');
|
||||
expect(r.status).toBe(0);
|
||||
const out = JSON.parse(r.stdout);
|
||||
expect(out.gap).toEqual({});
|
||||
});
|
||||
|
||||
test('gap computed when declared and inferred both present', () => {
|
||||
runDev('--read');
|
||||
const file = path.join(tmpHome, 'developer-profile.json');
|
||||
const p = readProfile() as any;
|
||||
p.declared = { scope_appetite: 0.8 };
|
||||
p.inferred.values.scope_appetite = 0.55;
|
||||
fs.writeFileSync(file, JSON.stringify(p));
|
||||
const r = runDev('--gap');
|
||||
const out = JSON.parse(r.stdout);
|
||||
expect(out.gap.scope_appetite).toBeCloseTo(0.25, 2);
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --vibe (archetype match)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --vibe', () => {
|
||||
test('returns archetype name and description', () => {
|
||||
runDev('--read');
|
||||
const r = runDev('--vibe');
|
||||
expect(r.status).toBe(0);
|
||||
const lines = r.stdout.trim().split('\n');
|
||||
expect(lines.length).toBeGreaterThanOrEqual(1);
|
||||
// Default profile (all 0.5) is closest to Builder-Coach or Polymath
|
||||
expect(lines[0].length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// --check-mismatch
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile --check-mismatch', () => {
|
||||
test('reports insufficient data when < 10 events', () => {
|
||||
runDev('--read');
|
||||
const r = runDev('--check-mismatch');
|
||||
expect(r.stdout).toContain('not enough data');
|
||||
});
|
||||
|
||||
test('reports no mismatch when declared tracks inferred closely', () => {
|
||||
runDev('--read');
|
||||
const file = path.join(tmpHome, 'developer-profile.json');
|
||||
const p = readProfile() as any;
|
||||
p.declared = { scope_appetite: 0.5, architecture_care: 0.5 };
|
||||
p.inferred.sample_size = 20;
|
||||
fs.writeFileSync(file, JSON.stringify(p));
|
||||
const r = runDev('--check-mismatch');
|
||||
expect(r.stdout).toContain('MISMATCH: none');
|
||||
});
|
||||
|
||||
test('flags dimensions with gap > 0.3 when enough data', () => {
|
||||
runDev('--read');
|
||||
const file = path.join(tmpHome, 'developer-profile.json');
|
||||
const p = readProfile() as any;
|
||||
p.declared = { scope_appetite: 0.9, autonomy: 0.2 };
|
||||
p.inferred.values.scope_appetite = 0.4;
|
||||
p.inferred.values.autonomy = 0.8;
|
||||
p.inferred.sample_size = 25;
|
||||
fs.writeFileSync(file, JSON.stringify(p));
|
||||
const r = runDev('--check-mismatch');
|
||||
expect(r.stdout).toContain('2 dimension(s) disagree');
|
||||
expect(r.stdout).toContain('scope_appetite');
|
||||
expect(r.stdout).toContain('autonomy');
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Error handling
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe('gstack-developer-profile errors', () => {
|
||||
test('unknown subcommand exits non-zero', () => {
|
||||
const r = runDev('--not-a-real-subcommand');
|
||||
expect(r.status).not.toBe(0);
|
||||
expect(r.stderr).toContain('unknown subcommand');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user