mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-02 03:35:09 +02:00
fix: close redundant PRs + friendly error on all design commands (v0.15.8.1) (#817)
* fix: friendly OpenAI org error on all design commands Previously only generate.ts showed a user-friendly message when the OpenAI org wasn't verified. Now evolve, iterate, variants, and check all detect the 403 + "organization must be verified" pattern and show a clear message with the correct verification URL. * test: regression test for >128KB Codex session_meta Documents the current 128KB buffer limitation. When Codex embeds session_meta beyond 128KB, this test will fail, signaling the need for a streaming parse or larger buffer. * chore: bump version and changelog (v0.15.8.1) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -250,6 +250,44 @@ describe("gstack-global-discover", () => {
|
||||
expect(meta.type).toBe("session_meta");
|
||||
expect(meta.payload.cwd).toBe("/tmp/test-repo");
|
||||
});
|
||||
|
||||
test("regression: session_meta beyond 128KB still needs streaming parse", () => {
|
||||
// This test documents the current limitation: 128KB buffer is a heuristic.
|
||||
// If Codex ever embeds >128KB in session_meta, this test will fail,
|
||||
// signaling that the buffer needs to increase or be replaced with streaming.
|
||||
const padding = "x".repeat(140000); // ~140KB payload
|
||||
const sessionMeta = JSON.stringify({
|
||||
timestamp: new Date().toISOString(),
|
||||
type: "session_meta",
|
||||
payload: {
|
||||
id: "test-large",
|
||||
timestamp: new Date().toISOString(),
|
||||
cwd: "/tmp/large-test",
|
||||
originator: "codex_exec",
|
||||
cli_version: "0.200.0",
|
||||
source: "exec",
|
||||
model_provider: "openai",
|
||||
base_instructions: { text: padding },
|
||||
},
|
||||
});
|
||||
|
||||
expect(sessionMeta.length).toBeGreaterThan(131072);
|
||||
|
||||
const filePath = join(codexDir, "large-test.jsonl");
|
||||
writeFileSync(filePath, sessionMeta + "\n");
|
||||
|
||||
// 128KB buffer: JSON.parse FAILS for >128KB lines (current limitation)
|
||||
const { openSync, readSync, closeSync } = require("fs");
|
||||
const fd = openSync(filePath, "r");
|
||||
const buf = Buffer.alloc(131072);
|
||||
readSync(fd, buf, 0, 131072, 0);
|
||||
closeSync(fd);
|
||||
expect(() =>
|
||||
JSON.parse(buf.toString("utf-8").split("\n")[0])
|
||||
).toThrow();
|
||||
// When this test starts passing (e.g., after implementing streaming parse),
|
||||
// update it to verify correct parsing instead of documenting the limitation.
|
||||
});
|
||||
});
|
||||
|
||||
describe("discovery output structure", () => {
|
||||
|
||||
Reference in New Issue
Block a user