Merge origin/main, resolve CHANGELOG conflict, bump to v0.13.7.0

Main landed v0.13.6.0 (GStack Learns) while this branch had v0.13.6.0
(Community Wave). Resolved by keeping both entries and bumping this
branch to v0.13.7.0. Regenerated SKILL.md files to pick up new learn
skill and apply the find -exec rm fix from this branch.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Garry Tan
2026-03-29 20:03:18 -07:00
49 changed files with 2379 additions and 3 deletions
+30
View File
@@ -0,0 +1,30 @@
#!/usr/bin/env bash
# gstack-learnings-log — append a learning to the project learnings file
# Usage: gstack-learnings-log '{"skill":"review","type":"pitfall","key":"n-plus-one","insight":"...","confidence":8,"source":"observed"}'
#
# Append-only storage. Duplicates (same key+type) are resolved at read time
# by gstack-learnings-search ("latest winner" per key+type).
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null)"
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
mkdir -p "$GSTACK_HOME/projects/$SLUG"
INPUT="$1"
# Validate: input must be parseable JSON
if ! printf '%s' "$INPUT" | bun -e "JSON.parse(await Bun.stdin.text())" 2>/dev/null; then
echo "gstack-learnings-log: invalid JSON, skipping" >&2
exit 1
fi
# Inject timestamp if not present
if ! printf '%s' "$INPUT" | bun -e "const j=JSON.parse(await Bun.stdin.text()); if(!j.ts) process.exit(1)" 2>/dev/null; then
INPUT=$(printf '%s' "$INPUT" | bun -e "
const j = JSON.parse(await Bun.stdin.text());
j.ts = new Date().toISOString();
console.log(JSON.stringify(j));
" 2>/dev/null) || true
fi
echo "$INPUT" >> "$GSTACK_HOME/projects/$SLUG/learnings.jsonl"
+131
View File
@@ -0,0 +1,131 @@
#!/usr/bin/env bash
# gstack-learnings-search — read and filter project learnings
# Usage: gstack-learnings-search [--type TYPE] [--query KEYWORD] [--limit N] [--cross-project]
#
# Reads ~/.gstack/projects/$SLUG/learnings.jsonl, applies confidence decay,
# resolves duplicates (latest winner per key+type), and outputs formatted text.
# Exit 0 silently if no learnings file exists.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
eval "$("$SCRIPT_DIR/gstack-slug" 2>/dev/null)"
GSTACK_HOME="${GSTACK_HOME:-$HOME/.gstack}"
TYPE=""
QUERY=""
LIMIT=10
CROSS_PROJECT=false
while [[ $# -gt 0 ]]; do
case "$1" in
--type) TYPE="$2"; shift 2 ;;
--query) QUERY="$2"; shift 2 ;;
--limit) LIMIT="$2"; shift 2 ;;
--cross-project) CROSS_PROJECT=true; shift ;;
*) shift ;;
esac
done
LEARNINGS_FILE="$GSTACK_HOME/projects/$SLUG/learnings.jsonl"
# Collect all JSONL files to search
FILES=()
[ -f "$LEARNINGS_FILE" ] && FILES+=("$LEARNINGS_FILE")
if [ "$CROSS_PROJECT" = true ]; then
# Add other projects' learnings (max 5, sorted by mtime)
for f in $(find "$GSTACK_HOME/projects" -name "learnings.jsonl" -not -path "*/$SLUG/*" 2>/dev/null | head -5); do
FILES+=("$f")
done
fi
if [ ${#FILES[@]} -eq 0 ]; then
exit 0
fi
# Process all files through bun for JSON parsing, decay, dedup, filtering
cat "${FILES[@]}" 2>/dev/null | bun -e "
const lines = (await Bun.stdin.text()).trim().split('\n').filter(Boolean);
const now = Date.now();
const type = '${TYPE}';
const query = '${QUERY}'.toLowerCase();
const limit = ${LIMIT};
const slug = '${SLUG}';
const entries = [];
for (const line of lines) {
try {
const e = JSON.parse(line);
if (!e.key || !e.type) continue;
// Apply confidence decay: observed/inferred lose 1pt per 30 days
let conf = e.confidence || 5;
if (e.source === 'observed' || e.source === 'inferred') {
const days = Math.floor((now - new Date(e.ts).getTime()) / 86400000);
conf = Math.max(0, conf - Math.floor(days / 30));
}
e._effectiveConfidence = conf;
// Determine if this is from the current project or cross-project
// Cross-project entries are tagged for display
e._crossProject = !line.includes(slug) && '${CROSS_PROJECT}' === 'true';
entries.push(e);
} catch {}
}
// Dedup: latest winner per key+type
const seen = new Map();
for (const e of entries) {
const dk = e.key + '|' + e.type;
const existing = seen.get(dk);
if (!existing || new Date(e.ts) > new Date(existing.ts)) {
seen.set(dk, e);
}
}
let results = Array.from(seen.values());
// Filter by type
if (type) results = results.filter(e => e.type === type);
// Filter by query
if (query) results = results.filter(e =>
(e.key || '').toLowerCase().includes(query) ||
(e.insight || '').toLowerCase().includes(query) ||
(e.files || []).some(f => f.toLowerCase().includes(query))
);
// Sort by effective confidence desc, then recency
results.sort((a, b) => {
if (b._effectiveConfidence !== a._effectiveConfidence) return b._effectiveConfidence - a._effectiveConfidence;
return new Date(b.ts).getTime() - new Date(a.ts).getTime();
});
// Limit
results = results.slice(0, limit);
if (results.length === 0) process.exit(0);
// Format output
const byType = {};
for (const e of results) {
const t = e.type || 'unknown';
if (!byType[t]) byType[t] = [];
byType[t].push(e);
}
// Summary line
const counts = Object.entries(byType).map(([t, arr]) => arr.length + ' ' + t + (arr.length > 1 ? 's' : ''));
console.log('LEARNINGS: ' + results.length + ' loaded (' + counts.join(', ') + ')');
console.log('');
for (const [t, arr] of Object.entries(byType)) {
console.log('## ' + t.charAt(0).toUpperCase() + t.slice(1) + 's');
for (const e of arr) {
const cross = e._crossProject ? ' [cross-project]' : '';
const files = e.files?.length ? ' (files: ' + e.files.join(', ') + ')' : '';
console.log('- [' + e.key + '] (confidence: ' + e._effectiveConfidence + '/10, ' + e.source + ', ' + (e.ts || '').split('T')[0] + ')' + cross);
console.log(' ' + e.insight + files);
}
console.log('');
}
" 2>/dev/null || exit 0