Align error logging with admin dashboard field conventions

All warn/error log calls now use field names the dashboard's decorate()
function recognizes: `code` for the error code pill, `httpStatus` for the
status badge and severity bucket, `url` for the sidebar link, and
`repoId` for the repository link.

Key changes:
- Streamer errors surface code, httpStatus, url, and nested err in Raw tab
- Nested `{ err: serializeError(e) }` replaced with spread pattern so
  error fields (name, message, status) appear at the top level
- Raw Error objects in catch blocks now go through serializeError()
- Rate limit, token, and PR 404 warnings include code + httpStatus
- Dashboard stack walker traverses both `cause` and `err` chains
- Dashboard Raw tab renders repoId, filePath, upstream*, err, and cause
- trimRawArg recursively trims stacks in nested err/cause chains
- clampPayload strips heavy nested fields before falling back to
  truncated placeholder, preserving flat diagnostic fields
This commit is contained in:
tdurieux
2026-05-07 05:54:18 +03:00
parent b8cfe293ea
commit 9403f15ac3
8 changed files with 63 additions and 18 deletions
+15 -6
View File
@@ -984,13 +984,16 @@ angular
e._method = detail.method || null;
e._repoId = detail.repoId || detail.detail || null;
e._detail = detail.detail && detail.detail !== e._repoId ? detail.detail : null;
// Walk into `cause` to surface the deepest stack — for unhandled
// errors the inner cause is usually the actual JS error frame.
// Walk into `cause` (and `err` for streamer-style entries) to
// surface the deepest stack.
let s = typeof detail.stack === "string" ? detail.stack : null;
let c = detail.cause;
while (!s && c && typeof c === "object") {
if (typeof c.stack === "string") s = c.stack;
c = c.cause;
var roots = [detail.cause, detail.err].filter(Boolean);
for (var ri = 0; !s && ri < roots.length; ri++) {
var c = roots[ri];
while (!s && c && typeof c === "object") {
if (typeof c.stack === "string") s = c.stack;
c = c.cause;
}
}
e._stack = s;
} else {
@@ -1037,7 +1040,13 @@ angular
}
}
push("detail", detailValue);
push("repoId", detail && detail.repoId);
push("filePath", detail && detail.filePath);
push("upstreamStatus", detail && detail.upstreamStatus);
push("upstreamBody", detail && detail.upstreamBody);
push("url", entry._url);
push("err", detail && detail.err);
push("cause", detail && !detail.err && detail.cause);
push("ts", entry.ts);
if (!fields.length) return JSON.stringify(entry, null, 2);
const keyW = fields.reduce((w, f) => Math.max(w, f[0].length), 0);
+5
View File
@@ -69,10 +69,15 @@ function streamerErrorToAnonymous(
}
logger.warn("streamer fetch failed", {
code: errCode,
httpStatus,
repoId: context.repoId,
filePath: context.filePath,
upstreamStatus,
upstreamBody: upstreamBody?.slice(0, 500),
url: config.STREAMER_ENTRYPOINT
? join(config.STREAMER_ENTRYPOINT, "api")
: undefined,
err: serializeError(err),
});
+1 -1
View File
@@ -63,7 +63,7 @@ export default class Gist {
try {
return this._model.source.accessToken;
} catch {
logger.warn("invalid token", { gistId: this._model.source.gistId });
logger.warn("invalid token", { code: "invalid_token", httpStatus: 401, gistId: this._model.source.gistId });
}
}
return config.GITHUB_TOKEN;
+6 -1
View File
@@ -62,6 +62,8 @@ export function octokit(token: string) {
throttle: {
onRateLimit: (retryAfter, options, _o, retryCount) => {
logger.warn("github primary rate limit hit", {
code: "github_rate_limit",
httpStatus: 429,
method: options.method,
url: options.url,
retryAfter,
@@ -73,6 +75,8 @@ export function octokit(token: string) {
},
onSecondaryRateLimit: (retryAfter, options, _o, retryCount) => {
logger.warn("github secondary rate limit hit", {
code: "github_secondary_rate_limit",
httpStatus: 429,
method: options.method,
url: options.url,
retryAfter,
@@ -193,8 +197,9 @@ export async function getToken(repository: Repository) {
}
}
logger.warn("token refresh failed; falling back", {
code: "token_refresh_failed",
httpStatus: res.status,
username: repository.owner.model.username,
status: res.status,
});
// fall through to the checkToken path / config.GITHUB_TOKEN
}
+6
View File
@@ -43,6 +43,8 @@ export default class PullRequest {
return this._model.source.accessToken;
} catch {
logger.warn("invalid token", {
code: "invalid_token",
httpStatus: 401,
pullRequestId: this._model.source.pullRequestId,
});
}
@@ -92,6 +94,8 @@ export default class PullRequest {
}> => {
if ((err as { status?: number }).status === 404) {
logger.warn("PR comments 404, continuing without them", {
code: "pr_comments_not_found",
httpStatus: 404,
pr: `${owner}/${repo}#${pull_number}`,
});
return [];
@@ -104,6 +108,8 @@ export default class PullRequest {
).catch((err) => {
if (err instanceof HTTPError && err.response.statusCode === 404) {
logger.warn("PR diff 404, continuing without it", {
code: "pr_diff_not_found",
httpStatus: 404,
pr: `${owner}/${repo}#${pull_number}`,
});
return { body: "" };
+27 -7
View File
@@ -101,13 +101,20 @@ function trimStack(s: unknown): unknown {
}
return s;
}
function trimErrorLike(o: Record<string, unknown>): Record<string, unknown> {
const out = { ...o };
if (typeof out.stack === "string") out.stack = trimStack(out.stack);
if (out.cause && typeof out.cause === "object") {
out.cause = trimErrorLike(out.cause as Record<string, unknown>);
}
if (out.err && typeof out.err === "object") {
out.err = trimErrorLike(out.err as Record<string, unknown>);
}
return out;
}
function trimRawArg(a: unknown): unknown {
if (!a || typeof a !== "object") return a;
const o = a as Record<string, unknown>;
if (typeof o.stack === "string") {
return { ...o, stack: trimStack(o.stack) };
}
return o;
return trimErrorLike(a as Record<string, unknown>);
}
function clampPayload(entry: {
@@ -127,8 +134,21 @@ function clampPayload(entry: {
entry.raw = entry.raw.slice(0, 1);
s = JSON.stringify(entry);
if (s.length <= MAX_PAYLOAD_BYTES) return s;
// Step 2: replace the payload with a placeholder so the entry still shows
// up in the list but doesn't blow the cap.
// Step 2: strip heavy nested fields (err, cause, upstreamBody) but keep
// the flat diagnostic fields (code, httpStatus, repoId, url, …).
const first = entry.raw[0];
if (first && typeof first === "object" && !Array.isArray(first)) {
const slim = { ...(first as Record<string, unknown>) };
delete slim.err;
delete slim.cause;
delete slim.stack;
delete slim.upstreamBody;
entry.raw = [slim];
s = JSON.stringify(entry);
if (s.length <= MAX_PAYLOAD_BYTES) return s;
}
// Step 3: nothing left to trim — replace with a placeholder so the entry
// still shows up in the list.
entry.raw = [{ truncated: true, originalBytes: s.length }];
return JSON.stringify(entry);
}
+1 -1
View File
@@ -132,7 +132,7 @@ export default class S3Storage extends StorageBase {
try {
res.status(500).json({ error: "file_not_found" });
} catch (err) {
logger.error("send failed", { path, err: serializeError(err) });
logger.error("send failed", { ...serializeError(err), filePath: path });
}
}
}
+2 -2
View File
@@ -269,10 +269,10 @@ export default async function start() {
app.listen(config.PORT);
logger.info("server started", { port: config.PORT });
ensureTodaySnapshot().catch((err) =>
logger.error("ensureTodaySnapshot failed", { err })
logger.error("ensureTodaySnapshot failed", serializeError(err))
);
recoverStuckPreparing().catch((err) =>
logger.error("recoverStuckPreparing failed", { err })
logger.error("recoverStuckPreparing failed", serializeError(err))
);
}