mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-01 19:25:10 +02:00
fix(discover): parse Codex sessions with large session_meta (>4KB) (#798)
Merged via PR triage plan. Fixes Codex session discovery for v0.117+ with 15KB+ session_meta. Follow-up: add >128KB regression test.
This commit is contained in:
@@ -291,7 +291,7 @@ function extractCwdFromJsonl(filePath: string): string | null {
|
||||
}
|
||||
|
||||
function scanCodex(since: Date): Session[] {
|
||||
const sessionsDir = join(homedir(), ".codex", "sessions");
|
||||
const sessionsDir = process.env.CODEX_SESSIONS_DIR || join(homedir(), ".codex", "sessions");
|
||||
if (!existsSync(sessionsDir)) return [];
|
||||
|
||||
const sessions: Session[] = [];
|
||||
@@ -326,11 +326,14 @@ function scanCodex(since: Date): Session[] {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read first line for session_meta (only first 4KB)
|
||||
// Codex session_meta lines embed the full system prompt in
|
||||
// base_instructions (~15KB as of CLI v0.117+). A 4KB buffer
|
||||
// truncates the line and JSON.parse fails. 128KB covers current
|
||||
// sizes with room for growth.
|
||||
try {
|
||||
const fd = openSync(filePath, "r");
|
||||
const buf = Buffer.alloc(4096);
|
||||
const bytesRead = readSync(fd, buf, 0, 4096, 0);
|
||||
const buf = Buffer.alloc(131072);
|
||||
const bytesRead = readSync(fd, buf, 0, 131072, 0);
|
||||
closeSync(fd);
|
||||
const firstLine = buf.toString("utf-8", 0, bytesRead).split("\n")[0];
|
||||
if (!firstLine) continue;
|
||||
|
||||
@@ -131,6 +131,127 @@ describe("gstack-global-discover", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("codex large session_meta parsing", () => {
|
||||
let codexDir: string;
|
||||
let tmpDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = mkdtempSync(join(tmpdir(), "gstack-codex-test-"));
|
||||
// Build a realistic ~/.codex/sessions/YYYY/MM/DD structure
|
||||
const now = new Date();
|
||||
const y = now.getFullYear().toString();
|
||||
const m = String(now.getMonth() + 1).padStart(2, "0");
|
||||
const d = String(now.getDate()).padStart(2, "0");
|
||||
codexDir = join(tmpDir, "codex-home", "sessions", y, m, d);
|
||||
mkdirSync(codexDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function writeCodexSession(
|
||||
dir: string,
|
||||
cwd: string,
|
||||
baseInstructionsSize: number
|
||||
): string {
|
||||
const padding = "x".repeat(baseInstructionsSize);
|
||||
const line = JSON.stringify({
|
||||
timestamp: new Date().toISOString(),
|
||||
type: "session_meta",
|
||||
payload: {
|
||||
id: `test-${Date.now()}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
cwd,
|
||||
originator: "codex_exec",
|
||||
cli_version: "0.118.0",
|
||||
source: "exec",
|
||||
model_provider: "openai",
|
||||
base_instructions: { text: padding },
|
||||
},
|
||||
});
|
||||
const name = `rollout-${new Date().toISOString().replace(/[:.]/g, "-")}-${Math.random().toString(36).slice(2)}.jsonl`;
|
||||
const filePath = join(dir, name);
|
||||
writeFileSync(filePath, line + "\n");
|
||||
return filePath;
|
||||
}
|
||||
|
||||
test("discovers codex sessions with >4KB session_meta via CLI", () => {
|
||||
// Create a git repo as the session target
|
||||
const repoDir = join(tmpDir, "fake-repo");
|
||||
mkdirSync(repoDir);
|
||||
spawnSync("git", ["init"], { cwd: repoDir, stdio: "pipe" });
|
||||
spawnSync("git", ["commit", "--allow-empty", "-m", "init"], {
|
||||
cwd: repoDir,
|
||||
stdio: "pipe",
|
||||
});
|
||||
|
||||
// Write a session with a 20KB first line (simulates Codex v0.117+)
|
||||
writeCodexSession(codexDir, repoDir, 20000);
|
||||
|
||||
// Run discovery with CODEX_SESSIONS_DIR override
|
||||
const result = spawnSync(
|
||||
"bun",
|
||||
["run", scriptPath, "--since", "1h", "--format", "json"],
|
||||
{
|
||||
encoding: "utf-8",
|
||||
timeout: 30000,
|
||||
env: {
|
||||
...process.env,
|
||||
CODEX_SESSIONS_DIR: join(tmpDir, "codex-home", "sessions"),
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
expect(result.status).toBe(0);
|
||||
const json = JSON.parse(result.stdout);
|
||||
expect(json.tools.codex.total_sessions).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("4KB buffer truncates session_meta, 128KB buffer parses it", () => {
|
||||
const padding = "x".repeat(20000);
|
||||
const sessionMeta = JSON.stringify({
|
||||
timestamp: new Date().toISOString(),
|
||||
type: "session_meta",
|
||||
payload: {
|
||||
id: "test-id",
|
||||
timestamp: new Date().toISOString(),
|
||||
cwd: "/tmp/test-repo",
|
||||
originator: "codex_exec",
|
||||
cli_version: "0.118.0",
|
||||
source: "exec",
|
||||
model_provider: "openai",
|
||||
base_instructions: { text: padding },
|
||||
},
|
||||
});
|
||||
|
||||
expect(sessionMeta.length).toBeGreaterThan(4096);
|
||||
|
||||
const filePath = join(codexDir, "test.jsonl");
|
||||
writeFileSync(filePath, sessionMeta + "\n");
|
||||
|
||||
// 4KB buffer: JSON.parse fails (the old bug)
|
||||
const { openSync, readSync, closeSync } = require("fs");
|
||||
const fd4k = openSync(filePath, "r");
|
||||
const buf4k = Buffer.alloc(4096);
|
||||
readSync(fd4k, buf4k, 0, 4096, 0);
|
||||
closeSync(fd4k);
|
||||
expect(() =>
|
||||
JSON.parse(buf4k.toString("utf-8").split("\n")[0])
|
||||
).toThrow();
|
||||
|
||||
// 128KB buffer: JSON.parse succeeds (the fix)
|
||||
const fd128k = openSync(filePath, "r");
|
||||
const buf128k = Buffer.alloc(131072);
|
||||
const bytesRead = readSync(fd128k, buf128k, 0, 131072, 0);
|
||||
closeSync(fd128k);
|
||||
const firstLine = buf128k.toString("utf-8", 0, bytesRead).split("\n")[0];
|
||||
const meta = JSON.parse(firstLine);
|
||||
expect(meta.type).toBe("session_meta");
|
||||
expect(meta.payload.cwd).toBe("/tmp/test-repo");
|
||||
});
|
||||
});
|
||||
|
||||
describe("discovery output structure", () => {
|
||||
test("repos have required fields", () => {
|
||||
const result = spawnSync(
|
||||
|
||||
Reference in New Issue
Block a user