merge: integrate origin/main (v1.1.0.0) — V1 + Puppeteer parity + /plan-tune

Big merge. Main shipped three releases while this branch was in flight:
- v0.19.0.0 /plan-tune skill (observational layer; dual-track dev profile)
- v1.0.0.0 V1 prompts (simpler, outcome-framed, jargon-glossed) + LOC receipts
- v1.1.0.0 browse Puppeteer parity (load-html, file://, --selector, --scale)

This branch bumps to v1.2.0.0 (above main's v1.1.0.0) per the
branch-scoped-version rule in CLAUDE.md. My "0.19.0.0" CHANGELOG entry
is renamed to "1.2.0.0" and dated 2026-04-18 to land above main's trail.

Conflicts resolved:
- VERSION / package.json: 1.2.0.0
- CHANGELOG.md: preserved my entry at top (renamed), kept main's 1.1.0.0
  / 1.0.0.0 / 0.19.0.0 / 0.18.4.0 trail below in correct order
- .github/docker/Dockerfile.ci: kept my xz-utils + nodejs.org tarball
  fix (real CI bug fix main didn't have); absorbed main's retry loop
  structure for both apt and the tarball curl
- bin/gstack-config: kept both my checkpoint_mode/push section and
  main's explain_level writing-style section
- scripts/resolvers/preamble.ts: kept my submodule refactor as the
  file shape; extracted main's new generateWritingStyle and
  generateWritingStyleMigration into scripts/resolvers/preamble/
  submodules; absorbed main's generateQuestionTuning import
- All generated SKILL.md files: resolved by regen via
  bun run gen:skill-docs --host all (per CLAUDE.md: never hand-merge
  generated files — resolve templates and regen)
- Ship golden fixtures (claude/codex/factory): refreshed

Tier 2 preamble composition now includes all 8 sections: context
recovery, ask-user-format, writing-style, completeness, confusion,
continuous checkpoint, context health, question tuning.

Main also brought new test files from /plan-tune: skill-e2e-plan-tune,
upgrade-migration-v1, v0-dormancy, writing-style-resolver. All absorbed.
468 tests pass.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Garry Tan
2026-04-18 23:35:36 +08:00
98 changed files with 14458 additions and 258 deletions
+9 -130
View File
@@ -1,134 +1,13 @@
#!/usr/bin/env bash
# gstack-builder-profile — read builder profile and output structured summary
# gstack-builder-profile — LEGACY SHIM.
#
# Reads ~/.gstack/builder-profile.jsonl (append-only session log from /office-hours).
# Outputs KEY: VALUE pairs for the template to consume. Computes tier, accumulated
# signals, cross-project detection, nudge eligibility, and resource dedup.
# Superseded by bin/gstack-developer-profile. This binary now delegates to
# `gstack-developer-profile --read` to keep /office-hours working during the
# transition. When all call sites have been updated, this file can be removed.
#
# Single source of truth for all closing state. No separate config keys or logs.
#
# Exit 0 with defaults if no profile exists (first-time user = introduction tier).
# The migration from ~/.gstack/builder-profile.jsonl to the unified
# ~/.gstack/developer-profile.json happens automatically on first read —
# see bin/gstack-developer-profile --migrate for details.
set -euo pipefail
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
PROFILE_FILE="$GSTACK_HOME/builder-profile.jsonl"
# Graceful default: no profile = introduction tier
if [ ! -f "$PROFILE_FILE" ] || [ ! -s "$PROFILE_FILE" ]; then
echo "SESSION_COUNT: 0"
echo "TIER: introduction"
echo "LAST_PROJECT:"
echo "LAST_ASSIGNMENT:"
echo "LAST_DESIGN_TITLE:"
echo "DESIGN_COUNT: 0"
echo "DESIGN_TITLES: []"
echo "ACCUMULATED_SIGNALS:"
echo "TOTAL_SIGNAL_COUNT: 0"
echo "CROSS_PROJECT: false"
echo "NUDGE_ELIGIBLE: false"
echo "RESOURCES_SHOWN:"
echo "RESOURCES_SHOWN_COUNT: 0"
echo "TOPICS:"
exit 0
fi
# Use bun for JSON parsing (same pattern as gstack-learnings-search).
# Fallback to defaults if bun is unavailable.
cat "$PROFILE_FILE" 2>/dev/null | bun -e "
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
const entries = [];
for (const line of lines) {
try { entries.push(JSON.parse(line)); } catch {}
}
const count = entries.length;
// Tier computation
let tier = 'introduction';
if (count >= 8) tier = 'inner_circle';
else if (count >= 4) tier = 'regular';
else if (count >= 1) tier = 'welcome_back';
// Last session data
const last = entries[count - 1] || {};
const prev = entries[count - 2] || {};
const crossProject = prev.project_slug && last.project_slug
? prev.project_slug !== last.project_slug
: false;
// Design docs
const designs = entries
.map(e => e.design_doc || '')
.filter(Boolean);
const designTitles = entries
.map(e => {
const doc = e.design_doc || '';
// Extract title from path: ...-design-DATETIME.md -> use the entry's topic or project
return doc ? (e.project_slug || 'unknown') : '';
})
.filter(Boolean);
// Accumulated signals
const signalCounts = {};
let totalSignals = 0;
for (const e of entries) {
for (const s of (e.signals || [])) {
signalCounts[s] = (signalCounts[s] || 0) + 1;
totalSignals++;
}
}
const signalStr = Object.entries(signalCounts)
.map(([k, v]) => k + ':' + v)
.join(',');
// Nudge eligibility: builder-mode + 5+ signals across 3+ sessions
const builderSessions = entries.filter(e => e.mode !== 'startup').length;
const nudgeEligible = builderSessions >= 3 && totalSignals >= 5;
// Resources shown (aggregate all)
const allResources = new Set();
for (const e of entries) {
for (const url of (e.resources_shown || [])) {
allResources.add(url);
}
}
// Topics (aggregate all)
const allTopics = new Set();
for (const e of entries) {
for (const t of (e.topics || [])) {
allTopics.add(t);
}
}
console.log('SESSION_COUNT: ' + count);
console.log('TIER: ' + tier);
console.log('LAST_PROJECT: ' + (last.project_slug || ''));
console.log('LAST_ASSIGNMENT: ' + (last.assignment || ''));
console.log('LAST_DESIGN_TITLE: ' + (last.design_doc || ''));
console.log('DESIGN_COUNT: ' + designs.length);
console.log('DESIGN_TITLES: ' + JSON.stringify(designTitles));
console.log('ACCUMULATED_SIGNALS: ' + signalStr);
console.log('TOTAL_SIGNAL_COUNT: ' + totalSignals);
console.log('CROSS_PROJECT: ' + crossProject);
console.log('NUDGE_ELIGIBLE: ' + nudgeEligible);
console.log('RESOURCES_SHOWN: ' + Array.from(allResources).join(','));
console.log('RESOURCES_SHOWN_COUNT: ' + allResources.size);
console.log('TOPICS: ' + Array.from(allTopics).join(','));
" 2>/dev/null || {
# Fallback if bun is unavailable
echo "SESSION_COUNT: 0"
echo "TIER: introduction"
echo "LAST_PROJECT:"
echo "LAST_ASSIGNMENT:"
echo "LAST_DESIGN_TITLE:"
echo "DESIGN_COUNT: 0"
echo "DESIGN_TITLES: []"
echo "ACCUMULATED_SIGNALS:"
echo "TOTAL_SIGNAL_COUNT: 0"
echo "CROSS_PROJECT: false"
echo "NUDGE_ELIGIBLE: false"
echo "RESOURCES_SHOWN:"
echo "RESOURCES_SHOWN_COUNT: 0"
echo "TOPICS:"
}
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
exec "$SCRIPT_DIR/gstack-developer-profile" --read "$@"
+13
View File
@@ -51,6 +51,14 @@ CONFIG_HEADER='# gstack configuration — edit freely, changes take effect on ne
# # false = keep WIP commits local only (default)
# # Pushing can trigger CI/deploy hooks — opt in carefully.
#
# ─── Writing style (V1) ──────────────────────────────────────────────
# explain_level: default # default = jargon-glossed, outcome-framed prose
# # (V1 default — more accessible for everyone)
# # terse = V0 prose style, no glosses, no outcome-framing layer
# # (for power users who know the terms)
# # Unknown values default to "default" with a warning.
# # See docs/designs/PLAN_TUNING_V1.md for rationale.
#
# ─── Advanced ────────────────────────────────────────────────────────
# codex_reviews: enabled # disabled = skip Codex adversarial reviews in /ship
# gstack_contributor: false # true = file field reports when gstack misbehaves
@@ -101,6 +109,11 @@ case "${1:-}" in
echo "Error: key must contain only alphanumeric characters and underscores" >&2
exit 1
fi
# V1: whitelist values for keys with closed value domains. Unknown values warn + default.
if [ "$KEY" = "explain_level" ] && [ "$VALUE" != "default" ] && [ "$VALUE" != "terse" ]; then
echo "Warning: explain_level '$VALUE' not recognized. Valid values: default, terse. Using default." >&2
VALUE="default"
fi
mkdir -p "$STATE_DIR"
# Write annotated header on first creation
if [ ! -f "$CONFIG_FILE" ]; then
+446
View File
@@ -0,0 +1,446 @@
#!/usr/bin/env bash
# gstack-developer-profile — unified developer profile access and derivation.
#
# Supersedes bin/gstack-builder-profile. The old binary remains as a legacy
# shim that delegates to `gstack-developer-profile --read`.
#
# Subcommands:
# --read (default) emit KEY: VALUE pairs in builder-profile format
# for /office-hours compatibility.
# --derive recompute inferred dimensions from question events;
# write updated ~/.gstack/developer-profile.json.
# --profile emit the full profile as JSON (all fields).
# --gap emit declared-vs-inferred gap as JSON.
# --trace <dim> show events that contributed to a dimension.
# --narrative (v2 stub) output a coach bio paragraph.
# --vibe (v2 stub) output the one-word archetype.
# --check-mismatch detect meaningful gaps between declared and observed.
# --migrate migrate builder-profile.jsonl → developer-profile.json.
# Idempotent; archives the source file on success.
#
# Profile file: ~/.gstack/developer-profile.json (unified schema — see
# docs/designs/PLAN_TUNING_V0.md). Event file: ~/.gstack/projects/{SLUG}/
# question-events.jsonl.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
PROFILE_FILE="$GSTACK_HOME/developer-profile.json"
LEGACY_FILE="$GSTACK_HOME/builder-profile.jsonl"
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null || true)"
SLUG="${SLUG:-unknown}"
CMD="${1:---read}"
shift || true
# -----------------------------------------------------------------------
# Migration: builder-profile.jsonl → developer-profile.json
# -----------------------------------------------------------------------
do_migrate() {
if [ ! -f "$LEGACY_FILE" ]; then
echo "MIGRATE: no legacy file to migrate"
return 0
fi
if [ -f "$PROFILE_FILE" ]; then
# Already migrated — no-op (idempotent).
echo "MIGRATE: already migrated (developer-profile.json exists)"
return 0
fi
# Run migration in a temp file, then atomic rename.
local TMPOUT
TMPOUT=$(mktemp "$GSTACK_HOME/developer-profile.json.XXXXXX.tmp")
trap 'rm -f "$TMPOUT"' EXIT
cat "$LEGACY_FILE" | bun -e "
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
const sessions = [];
const signalsAcc = {};
const resources = new Set();
const topics = new Set();
for (const line of lines) {
try {
const e = JSON.parse(line);
sessions.push(e);
for (const s of (e.signals || [])) {
signalsAcc[s] = (signalsAcc[s] || 0) + 1;
}
for (const r of (e.resources_shown || [])) resources.add(r);
for (const t of (e.topics || [])) topics.add(t);
} catch {}
}
const profile = {
identity: {},
declared: {},
inferred: {
values: {
scope_appetite: 0.5,
risk_tolerance: 0.5,
detail_preference: 0.5,
autonomy: 0.5,
architecture_care: 0.5,
},
sample_size: 0,
diversity: { skills_covered: 0, question_ids_covered: 0, days_span: 0 },
},
gap: {},
overrides: {},
sessions,
signals_accumulated: signalsAcc,
resources_shown: Array.from(resources),
topics: Array.from(topics),
migrated_at: new Date().toISOString(),
schema_version: 1,
};
console.log(JSON.stringify(profile, null, 2));
" > "$TMPOUT"
# Atomic rename.
mv "$TMPOUT" "$PROFILE_FILE"
trap - EXIT
# Archive the legacy file.
local TS
TS="$(date +%Y-%m-%d-%H%M%S)"
mv "$LEGACY_FILE" "$LEGACY_FILE.migrated-$TS"
local COUNT
COUNT=$(bun -e "console.log(JSON.parse(require('fs').readFileSync('$PROFILE_FILE','utf-8')).sessions.length)" 2>/dev/null || echo "?")
echo "MIGRATE: ok — migrated $COUNT sessions from builder-profile.jsonl"
}
# -----------------------------------------------------------------------
# Load-or-migrate helper: ensure developer-profile.json exists.
# Auto-migrates from builder-profile.jsonl if present.
# Returns path to profile file via stdout. Creates a minimal stub if nothing exists.
# -----------------------------------------------------------------------
ensure_profile() {
if [ -f "$PROFILE_FILE" ]; then
return 0
fi
if [ -f "$LEGACY_FILE" ]; then
do_migrate >/dev/null
return 0
fi
# Nothing yet — create a stub.
mkdir -p "$GSTACK_HOME"
cat > "$PROFILE_FILE" <<EOF
{
"identity": {},
"declared": {},
"inferred": {
"values": {
"scope_appetite": 0.5,
"risk_tolerance": 0.5,
"detail_preference": 0.5,
"autonomy": 0.5,
"architecture_care": 0.5
},
"sample_size": 0,
"diversity": { "skills_covered": 0, "question_ids_covered": 0, "days_span": 0 }
},
"gap": {},
"overrides": {},
"sessions": [],
"signals_accumulated": {},
"schema_version": 1
}
EOF
}
# -----------------------------------------------------------------------
# Read: emit legacy KEY: VALUE output for /office-hours compat.
# -----------------------------------------------------------------------
do_read() {
ensure_profile
cat "$PROFILE_FILE" | bun -e "
const p = JSON.parse(await Bun.stdin.text());
const sessions = p.sessions || [];
const count = sessions.length;
let tier = 'introduction';
if (count >= 8) tier = 'inner_circle';
else if (count >= 4) tier = 'regular';
else if (count >= 1) tier = 'welcome_back';
const last = sessions[count - 1] || {};
const prev = sessions[count - 2] || {};
const crossProject = prev.project_slug && last.project_slug
? prev.project_slug !== last.project_slug
: false;
const designs = sessions.map(e => e.design_doc || '').filter(Boolean);
const designTitles = sessions
.map(e => (e.design_doc ? (e.project_slug || 'unknown') : ''))
.filter(Boolean);
const signalCounts = p.signals_accumulated || {};
let totalSignals = 0;
for (const v of Object.values(signalCounts)) totalSignals += v;
const signalStr = Object.entries(signalCounts).map(([k,v]) => k + ':' + v).join(',');
const builderSessions = sessions.filter(e => e.mode !== 'startup').length;
const nudgeEligible = builderSessions >= 3 && totalSignals >= 5;
const resources = p.resources_shown || [];
const topics = p.topics || [];
console.log('SESSION_COUNT: ' + count);
console.log('TIER: ' + tier);
console.log('LAST_PROJECT: ' + (last.project_slug || ''));
console.log('LAST_ASSIGNMENT: ' + (last.assignment || ''));
console.log('LAST_DESIGN_TITLE: ' + (last.design_doc || ''));
console.log('DESIGN_COUNT: ' + designs.length);
console.log('DESIGN_TITLES: ' + JSON.stringify(designTitles));
console.log('ACCUMULATED_SIGNALS: ' + signalStr);
console.log('TOTAL_SIGNAL_COUNT: ' + totalSignals);
console.log('CROSS_PROJECT: ' + crossProject);
console.log('NUDGE_ELIGIBLE: ' + nudgeEligible);
console.log('RESOURCES_SHOWN: ' + resources.join(','));
console.log('RESOURCES_SHOWN_COUNT: ' + resources.length);
console.log('TOPICS: ' + topics.join(','));
"
}
# -----------------------------------------------------------------------
# Profile: emit the full JSON
# -----------------------------------------------------------------------
do_profile() {
ensure_profile
cat "$PROFILE_FILE"
}
# -----------------------------------------------------------------------
# Gap: declared vs inferred diff
# -----------------------------------------------------------------------
do_gap() {
ensure_profile
cat "$PROFILE_FILE" | bun -e "
const p = JSON.parse(await Bun.stdin.text());
const declared = p.declared || {};
const inferred = (p.inferred && p.inferred.values) || {};
const dims = ['scope_appetite','risk_tolerance','detail_preference','autonomy','architecture_care'];
const gap = {};
for (const d of dims) {
if (declared[d] !== undefined && inferred[d] !== undefined) {
gap[d] = +(Math.abs(declared[d] - inferred[d])).toFixed(3);
}
}
console.log(JSON.stringify({ declared, inferred, gap }, null, 2));
"
}
# -----------------------------------------------------------------------
# Derive: recompute inferred dimensions from question-events.jsonl
# -----------------------------------------------------------------------
do_derive() {
ensure_profile
local EVENTS="$GSTACK_HOME/projects/$SLUG/question-log.jsonl"
local REGISTRY="$ROOT_DIR/scripts/question-registry.ts"
local SIGNALS="$ROOT_DIR/scripts/psychographic-signals.ts"
if [ ! -f "$REGISTRY" ] || [ ! -f "$SIGNALS" ]; then
echo "DERIVE: registry or signals file missing, cannot derive" >&2
exit 1
fi
cd "$ROOT_DIR"
PROFILE_FILE_PATH="$PROFILE_FILE" EVENTS_PATH="$EVENTS" bun -e "
import('./scripts/question-registry.ts').then(async (regmod) => {
const sigmod = await import('./scripts/psychographic-signals.ts');
const fs = require('fs');
const { QUESTIONS } = regmod;
const { SIGNAL_MAP, applySignal, newDimensionTotals, normalizeToDimensionValue } = sigmod;
const profilePath = process.env.PROFILE_FILE_PATH;
const eventsPath = process.env.EVENTS_PATH;
const profile = JSON.parse(fs.readFileSync(profilePath, 'utf-8'));
let lines = [];
if (fs.existsSync(eventsPath)) {
lines = fs.readFileSync(eventsPath, 'utf-8').trim().split('\n').filter(Boolean);
}
const totals = newDimensionTotals();
const skills = new Set();
const qids = new Set();
const days = new Set();
let count = 0;
for (const line of lines) {
let e;
try { e = JSON.parse(line); } catch { continue; }
if (!e.question_id || !e.user_choice) continue;
count++;
skills.add(e.skill);
qids.add(e.question_id);
if (e.ts) days.add(String(e.ts).slice(0,10));
const def = QUESTIONS[e.question_id];
if (def && def.signal_key) {
applySignal(totals, def.signal_key, e.user_choice);
}
}
const values = {};
for (const [dim, total] of Object.entries(totals)) {
values[dim] = +normalizeToDimensionValue(total).toFixed(3);
}
profile.inferred = {
values,
sample_size: count,
diversity: {
skills_covered: skills.size,
question_ids_covered: qids.size,
days_span: days.size,
},
};
// Recompute gap.
const gap = {};
for (const d of Object.keys(values)) {
if (profile.declared && profile.declared[d] !== undefined) {
gap[d] = +(Math.abs(profile.declared[d] - values[d])).toFixed(3);
}
}
profile.gap = gap;
profile.derived_at = new Date().toISOString();
const tmp = profilePath + '.tmp';
fs.writeFileSync(tmp, JSON.stringify(profile, null, 2));
fs.renameSync(tmp, profilePath);
console.log('DERIVE: ok — ' + count + ' events, ' + skills.size + ' skills, ' + qids.size + ' questions');
}).catch(err => { console.error('DERIVE:', err.message); process.exit(1); });
"
}
# -----------------------------------------------------------------------
# Trace: show events contributing to a dimension
# -----------------------------------------------------------------------
do_trace() {
local DIM="${1:-}"
if [ -z "$DIM" ]; then
echo "TRACE: missing dimension argument" >&2
exit 1
fi
local EVENTS="$GSTACK_HOME/projects/$SLUG/question-log.jsonl"
if [ ! -f "$EVENTS" ]; then
echo "TRACE: no events for this project"
return 0
fi
cd "$ROOT_DIR"
EVENTS_PATH="$EVENTS" TRACE_DIM="$DIM" bun -e "
import('./scripts/question-registry.ts').then(async (regmod) => {
const sigmod = await import('./scripts/psychographic-signals.ts');
const fs = require('fs');
const { QUESTIONS } = regmod;
const { SIGNAL_MAP } = sigmod;
const target = process.env.TRACE_DIM;
const lines = fs.readFileSync(process.env.EVENTS_PATH, 'utf-8').trim().split('\n').filter(Boolean);
const rows = [];
for (const line of lines) {
let e;
try { e = JSON.parse(line); } catch { continue; }
const def = QUESTIONS[e.question_id];
if (!def || !def.signal_key) continue;
const deltas = SIGNAL_MAP[def.signal_key]?.[e.user_choice] || [];
for (const d of deltas) {
if (d.dim === target) {
rows.push({ ts: e.ts, question_id: e.question_id, choice: e.user_choice, delta: d.delta });
}
}
}
if (rows.length === 0) {
console.log('TRACE: no events contribute to ' + target);
} else {
console.log('TRACE: ' + rows.length + ' events for ' + target);
for (const r of rows) {
console.log(' ' + (r.ts || '').slice(0,19) + ' ' + r.question_id + ' → ' + r.choice + ' (' + (r.delta > 0 ? '+' : '') + r.delta + ')');
}
}
});
"
}
# -----------------------------------------------------------------------
# Check mismatch: flag when declared ≠ inferred by > threshold
# -----------------------------------------------------------------------
do_check_mismatch() {
ensure_profile
cat "$PROFILE_FILE" | bun -e "
const p = JSON.parse(await Bun.stdin.text());
const declared = p.declared || {};
const inferred = (p.inferred && p.inferred.values) || {};
const sampleSize = (p.inferred && p.inferred.sample_size) || 0;
const diversity = (p.inferred && p.inferred.diversity) || {};
// Require enough data before reporting mismatch.
if (sampleSize < 10) {
console.log('MISMATCH: not enough data (' + sampleSize + ' events; need 10+)');
process.exit(0);
}
const THRESHOLD = 0.3;
const flagged = [];
for (const d of Object.keys(declared)) {
if (inferred[d] === undefined) continue;
const gap = Math.abs(declared[d] - inferred[d]);
if (gap > THRESHOLD) {
flagged.push({ dim: d, declared: declared[d], inferred: inferred[d], gap: +gap.toFixed(3) });
}
}
if (flagged.length === 0) {
console.log('MISMATCH: none');
} else {
console.log('MISMATCH: ' + flagged.length + ' dimension(s) disagree (gap > ' + THRESHOLD + ')');
for (const f of flagged) {
console.log(' ' + f.dim + ': declared ' + f.declared + ' vs inferred ' + f.inferred + ' (gap ' + f.gap + ')');
}
}
"
}
# -----------------------------------------------------------------------
# Narrative + Vibe (v2 stubs)
# -----------------------------------------------------------------------
do_narrative() {
echo "NARRATIVE: (v2 — not yet implemented; use /plan-tune profile for now)"
}
do_vibe() {
ensure_profile
cd "$ROOT_DIR"
cat "$PROFILE_FILE" | PROFILE_DATA="$(cat "$PROFILE_FILE")" bun -e "
import('./scripts/archetypes.ts').then(async (mod) => {
const p = JSON.parse(process.env.PROFILE_DATA);
const dims = (p.inferred && p.inferred.values) || {
scope_appetite: 0.5, risk_tolerance: 0.5, detail_preference: 0.5,
autonomy: 0.5, architecture_care: 0.5,
};
const arch = mod.matchArchetype(dims);
console.log(arch.name);
console.log(arch.description);
});
"
}
# -----------------------------------------------------------------------
# Dispatch
# -----------------------------------------------------------------------
case "$CMD" in
--read) do_read ;;
--profile) do_profile ;;
--gap) do_gap ;;
--derive) do_derive ;;
--trace) do_trace "$@" ;;
--narrative) do_narrative ;;
--vibe) do_vibe ;;
--check-mismatch) do_check_mismatch ;;
--migrate) do_migrate ;;
--help|-h) sed -n '1,/^set -euo/p' "$0" | sed 's|^# \?||' ;;
*)
echo "gstack-developer-profile: unknown subcommand '$CMD'" >&2
echo "run --help for usage" >&2
exit 1
;;
esac
+167
View File
@@ -0,0 +1,167 @@
#!/usr/bin/env bash
# gstack-question-log — append an AskUserQuestion event to the project log.
#
# Usage:
# gstack-question-log '{"skill":"ship","question_id":"ship-test-failure-triage",\
# "question_summary":"Tests failed","options_count":3,"user_choice":"fix-now",\
# "recommended":"fix-now","session_id":"ppid"}'
#
# v1: log-only. Consumed by /plan-tune inspection and (in v2) by the
# inferred-dimension derivation pipeline.
#
# Schema (all fields validated):
# skill — skill name (kebab-case)
# question_id — either a registered id (preferred) or ad-hoc `{skill}-{slug}`
# question_summary — short one-liner of what was asked (<= 200 chars)
# category — approval | clarification | routing | cherry-pick | feedback-loop
# (optional — looked up from registry if omitted)
# door_type — one-way | two-way
# (optional — looked up from registry if omitted)
# options_count — number of options presented (positive integer)
# user_choice — key user selected (free string; registry-options preferred)
# recommended — option key the agent recommended (optional)
# followed_recommendation — bool (optional — computed if both present)
# session_id — stable session identifier
# ts — ISO 8601 timestamp (auto-injected if missing)
#
# Append-only JSONL. Dedup is at read time in gstack-question-sensitivity --read-log.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null)"
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
mkdir -p "$GSTACK_HOME/projects/$SLUG"
INPUT="$1"
# Validate and enrich from registry.
TMPERR=$(mktemp)
trap 'rm -f "$TMPERR"' EXIT
set +e
VALIDATED=$(printf '%s' "$INPUT" | bun -e "
const path = require('path');
const raw = await Bun.stdin.text();
let j;
try { j = JSON.parse(raw); } catch { process.stderr.write('gstack-question-log: invalid JSON\n'); process.exit(1); }
// Required: skill (kebab-case)
if (!j.skill || !/^[a-z0-9-]+\$/.test(j.skill)) {
process.stderr.write('gstack-question-log: invalid skill, must be kebab-case\n');
process.exit(1);
}
// Required: question_id (kebab-case, <=64 chars)
if (!j.question_id || !/^[a-z0-9-]+\$/.test(j.question_id) || j.question_id.length > 64) {
process.stderr.write('gstack-question-log: invalid question_id, must be kebab-case <=64 chars\n');
process.exit(1);
}
// Required: question_summary (non-empty, <=200 chars, no newlines)
if (typeof j.question_summary !== 'string' || !j.question_summary.length) {
process.stderr.write('gstack-question-log: question_summary required\n');
process.exit(1);
}
if (j.question_summary.length > 200) {
j.question_summary = j.question_summary.slice(0, 200);
}
if (j.question_summary.includes('\n')) {
j.question_summary = j.question_summary.replace(/\n+/g, ' ');
}
// Injection defense on the summary — same patterns as learnings-log.
const INJECTION_PATTERNS = [
/ignore\s+(all\s+)?previous\s+(instructions|context|rules)/i,
/you\s+are\s+now\s+/i,
/always\s+output\s+no\s+findings/i,
/skip\s+(all\s+)?(security|review|checks)/i,
/override[:\s]/i,
/\bsystem\s*:/i,
/\bassistant\s*:/i,
/\buser\s*:/i,
/do\s+not\s+(report|flag|mention)/i,
];
for (const pat of INJECTION_PATTERNS) {
if (pat.test(j.question_summary)) {
process.stderr.write('gstack-question-log: question_summary contains suspicious instruction-like content, rejected\n');
process.exit(1);
}
}
// Registry lookup for category + door_type enrichment.
// Registry file is at \$GSTACK_ROOT/scripts/question-registry.ts, but we don't import
// TypeScript at runtime here — we pass through what was provided and fill in defaults.
// The caller (the preamble resolver) is expected to pass category+door_type from
// the registry when it knows them; for ad-hoc ids both can be omitted.
const ALLOWED_CATEGORIES = ['approval', 'clarification', 'routing', 'cherry-pick', 'feedback-loop'];
if (j.category !== undefined) {
if (!ALLOWED_CATEGORIES.includes(j.category)) {
process.stderr.write('gstack-question-log: invalid category, must be one of: ' + ALLOWED_CATEGORIES.join(', ') + '\n');
process.exit(1);
}
}
const ALLOWED_DOORS = ['one-way', 'two-way'];
if (j.door_type !== undefined) {
if (!ALLOWED_DOORS.includes(j.door_type)) {
process.stderr.write('gstack-question-log: invalid door_type, must be one-way or two-way\n');
process.exit(1);
}
}
// options_count — positive integer if present
if (j.options_count !== undefined) {
const n = Number(j.options_count);
if (!Number.isInteger(n) || n < 1 || n > 26) {
process.stderr.write('gstack-question-log: options_count must be integer in [1, 26]\n');
process.exit(1);
}
j.options_count = n;
}
// user_choice — required; <= 64 chars; single-line; no injection patterns
if (typeof j.user_choice !== 'string' || !j.user_choice.length) {
process.stderr.write('gstack-question-log: user_choice required\n');
process.exit(1);
}
if (j.user_choice.length > 64) j.user_choice = j.user_choice.slice(0, 64);
j.user_choice = j.user_choice.replace(/\n+/g, ' ');
// recommended — optional, same constraints as user_choice
if (j.recommended !== undefined) {
if (typeof j.recommended !== 'string') {
process.stderr.write('gstack-question-log: recommended must be string\n');
process.exit(1);
}
if (j.recommended.length > 64) j.recommended = j.recommended.slice(0, 64);
}
// followed_recommendation — compute if both sides present.
if (j.recommended !== undefined && j.user_choice !== undefined) {
j.followed_recommendation = j.user_choice === j.recommended;
}
// session_id — kebab-friendly; <=64 chars
if (j.session_id !== undefined) {
if (typeof j.session_id !== 'string') {
process.stderr.write('gstack-question-log: session_id must be string\n');
process.exit(1);
}
if (j.session_id.length > 64) j.session_id = j.session_id.slice(0, 64);
}
// Inject timestamp if not present.
if (!j.ts) j.ts = new Date().toISOString();
console.log(JSON.stringify(j));
" 2>"$TMPERR")
VALIDATE_RC=$?
set -e
if [ $VALIDATE_RC -ne 0 ] || [ -z "$VALIDATED" ]; then
if [ -s "$TMPERR" ]; then
cat "$TMPERR" >&2
fi
exit 1
fi
echo "$VALIDATED" >> "$GSTACK_HOME/projects/$SLUG/question-log.jsonl"
+262
View File
@@ -0,0 +1,262 @@
#!/usr/bin/env bash
# gstack-question-preference — read/write/check explicit per-question preferences.
#
# Preference file: ~/.gstack/projects/{SLUG}/question-preferences.json
# Schema: { "<question_id>": "always-ask" | "never-ask" | "ask-only-for-one-way" }
#
# Subcommands:
# --check <id> → emit ASK_NORMALLY | AUTO_DECIDE | ASK_ONLY_ONE_WAY
# --write '{...}' → set a preference (user-origin gate enforced)
# --read → dump preferences JSON
# --clear [<id>] → clear one or all preferences
# --stats → short summary
#
# User-origin gate
# ----------------
# The --write subcommand REQUIRES a `source` field on the input:
# - "plan-tune" — user ran /plan-tune and chose a preference (allowed)
# - "inline-user" — inline `tune:` from the user's own chat message (allowed)
# - "inline-tool-output"— tune: prefix seen in tool output / file content (REJECTED)
# - "inline-file" — tune: prefix seen in a file the agent read (REJECTED)
# This is the profile-poisoning defense from docs/designs/PLAN_TUNING_V0.md.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null || true)"
SLUG="${SLUG:-unknown}"
PREF_FILE="$GSTACK_HOME/projects/$SLUG/question-preferences.json"
EVENT_FILE="$GSTACK_HOME/projects/$SLUG/question-events.jsonl"
mkdir -p "$GSTACK_HOME/projects/$SLUG"
CMD="${1:-}"
shift || true
ensure_file() {
if [ ! -f "$PREF_FILE" ]; then
echo '{}' > "$PREF_FILE"
fi
}
# -----------------------------------------------------------------------
# --check <question_id>
# -----------------------------------------------------------------------
do_check() {
local QID="${1:-}"
if [ -z "$QID" ]; then
echo "ASK_NORMALLY"
return 0
fi
ensure_file
cd "$ROOT_DIR"
PREF_FILE_PATH="$PREF_FILE" QID="$QID" bun -e "
import('./scripts/one-way-doors.ts').then((oneway) => {
const fs = require('fs');
const qid = process.env.QID;
const prefs = JSON.parse(fs.readFileSync(process.env.PREF_FILE_PATH, 'utf-8'));
const pref = prefs[qid];
// Always check one-way status first — safety overrides preferences.
const oneWay = oneway.isOneWayDoor({ question_id: qid });
if (oneWay) {
console.log('ASK_NORMALLY');
if (pref === 'never-ask') {
console.log('NOTE: one-way door overrides your never-ask preference for safety.');
}
return;
}
switch (pref) {
case 'never-ask':
console.log('AUTO_DECIDE');
break;
case 'ask-only-for-one-way':
// Not one-way (we checked above) — auto-decide this two-way question.
console.log('AUTO_DECIDE');
break;
case 'always-ask':
case undefined:
case null:
console.log('ASK_NORMALLY');
break;
default:
console.log('ASK_NORMALLY');
console.log('NOTE: unknown preference value: ' + pref);
}
}).catch(err => { console.error('check:', err.message); process.exit(1); });
"
}
# -----------------------------------------------------------------------
# --write '{...}' (with user-origin gate)
# -----------------------------------------------------------------------
do_write() {
local INPUT="${1:-}"
if [ -z "$INPUT" ]; then
echo "gstack-question-preference: --write requires a JSON payload" >&2
exit 1
fi
ensure_file
local TMPERR
TMPERR=$(mktemp)
# Use function-local cleanup via RETURN trap so variable lookup only happens
# while the function is on the stack (avoids EXIT-trap unbound-var race).
trap "rm -f '$TMPERR'" RETURN
set +e
local RESULT
RESULT=$(printf '%s' "$INPUT" | PREF_FILE_PATH="$PREF_FILE" EVENT_FILE_PATH="$EVENT_FILE" bun -e "
const fs = require('fs');
const raw = await Bun.stdin.text();
let j;
try { j = JSON.parse(raw); } catch { process.stderr.write('gstack-question-preference: invalid JSON\n'); process.exit(1); }
// Required: question_id (kebab-case, <=64)
if (!j.question_id || !/^[a-z0-9-]+\$/.test(j.question_id) || j.question_id.length > 64) {
process.stderr.write('gstack-question-preference: invalid question_id\n');
process.exit(1);
}
// Required: preference
const ALLOWED_PREFS = ['always-ask', 'never-ask', 'ask-only-for-one-way'];
if (!ALLOWED_PREFS.includes(j.preference)) {
process.stderr.write('gstack-question-preference: invalid preference (must be one of: ' + ALLOWED_PREFS.join(', ') + ')\n');
process.exit(1);
}
// user-origin gate — REQUIRED on every write.
// See docs/designs/PLAN_TUNING_V0.md §Security model
const ALLOWED_SOURCES = ['plan-tune', 'inline-user'];
const REJECTED_SOURCES = ['inline-tool-output', 'inline-file', 'inline-file-content', 'inline-unknown'];
if (!j.source) {
process.stderr.write('gstack-question-preference: source field required (one of: ' + ALLOWED_SOURCES.join(', ') + ')\n');
process.exit(1);
}
if (REJECTED_SOURCES.includes(j.source)) {
process.stderr.write('gstack-question-preference: rejected — source \"' + j.source + '\" is not user-originated (profile poisoning defense)\n');
process.exit(2);
}
if (!ALLOWED_SOURCES.includes(j.source)) {
process.stderr.write('gstack-question-preference: invalid source \"' + j.source + '\"; allowed: ' + ALLOWED_SOURCES.join(', ') + '\n');
process.exit(1);
}
// Optional free_text — sanitize (no injection patterns, no newlines, <=300 chars)
if (j.free_text !== undefined) {
if (typeof j.free_text !== 'string') {
process.stderr.write('gstack-question-preference: free_text must be string\n');
process.exit(1);
}
if (j.free_text.length > 300) j.free_text = j.free_text.slice(0, 300);
j.free_text = j.free_text.replace(/\n+/g, ' ');
const INJECTION_PATTERNS = [
/ignore\s+(all\s+)?previous\s+(instructions|context|rules)/i,
/you\s+are\s+now\s+/i,
/override[:\s]/i,
/\bsystem\s*:/i,
/\bassistant\s*:/i,
/do\s+not\s+(report|flag|mention)/i,
];
for (const pat of INJECTION_PATTERNS) {
if (pat.test(j.free_text)) {
process.stderr.write('gstack-question-preference: free_text contains injection-like content, rejected\n');
process.exit(1);
}
}
}
// Write to preferences file
const prefs = JSON.parse(fs.readFileSync(process.env.PREF_FILE_PATH, 'utf-8'));
prefs[j.question_id] = j.preference;
fs.writeFileSync(process.env.PREF_FILE_PATH, JSON.stringify(prefs, null, 2));
// Also append a record to question-events.jsonl for audit + derivation.
const evt = {
ts: new Date().toISOString(),
event_type: 'preference-set',
question_id: j.question_id,
preference: j.preference,
source: j.source,
...(j.free_text ? { free_text: j.free_text } : {}),
};
fs.appendFileSync(process.env.EVENT_FILE_PATH, JSON.stringify(evt) + '\n');
console.log('OK: ' + j.question_id + ' → ' + j.preference + ' (source: ' + j.source + ')');
" 2>"$TMPERR")
local RC=$?
set -e
if [ $RC -ne 0 ]; then
cat "$TMPERR" >&2
exit $RC
fi
echo "$RESULT"
}
# -----------------------------------------------------------------------
# --read
# -----------------------------------------------------------------------
do_read() {
ensure_file
cat "$PREF_FILE"
}
# -----------------------------------------------------------------------
# --clear [<id>]
# -----------------------------------------------------------------------
do_clear() {
local QID="${1:-}"
ensure_file
if [ -z "$QID" ]; then
echo '{}' > "$PREF_FILE"
echo "OK: cleared all preferences"
else
PREF_FILE_PATH="$PREF_FILE" QID="$QID" bun -e "
const fs = require('fs');
const prefs = JSON.parse(fs.readFileSync(process.env.PREF_FILE_PATH, 'utf-8'));
if (prefs[process.env.QID] !== undefined) {
delete prefs[process.env.QID];
fs.writeFileSync(process.env.PREF_FILE_PATH, JSON.stringify(prefs, null, 2));
console.log('OK: cleared ' + process.env.QID);
} else {
console.log('NOOP: no preference set for ' + process.env.QID);
}
"
fi
}
# -----------------------------------------------------------------------
# --stats
# -----------------------------------------------------------------------
do_stats() {
ensure_file
cat "$PREF_FILE" | bun -e "
const prefs = JSON.parse(await Bun.stdin.text());
const entries = Object.entries(prefs);
const counts = { 'always-ask': 0, 'never-ask': 0, 'ask-only-for-one-way': 0, other: 0 };
for (const [, v] of entries) {
if (counts[v] !== undefined) counts[v]++;
else counts.other++;
}
console.log('TOTAL: ' + entries.length);
console.log('ALWAYS_ASK: ' + counts['always-ask']);
console.log('NEVER_ASK: ' + counts['never-ask']);
console.log('ASK_ONLY_ONE_WAY: ' + counts['ask-only-for-one-way']);
if (counts.other) console.log('OTHER: ' + counts.other);
"
}
case "$CMD" in
--check) do_check "$@" ;;
--write) do_write "$@" ;;
--read|"") do_read ;;
--clear) do_clear "$@" ;;
--stats) do_stats ;;
--help|-h) sed -n '1,/^set -euo/p' "$0" | sed 's|^# \?||' ;;
*)
echo "gstack-question-preference: unknown subcommand '$CMD'" >&2
exit 1
;;
esac