error logging improvement, regex fix

This commit is contained in:
tdurieux
2026-05-06 11:09:17 +03:00
parent e34f45522f
commit c2d43164d0
39 changed files with 747 additions and 126 deletions
+1 -1
View File
File diff suppressed because one or more lines are too long
+20 -1
View File
@@ -4702,4 +4702,23 @@ textarea::selection {
}
.file.folder.truncated > a {
color: #d39e00;
}
}
/* Errors admin */
.errors-table .error-when time { font-variant-numeric: tabular-nums; color: #555; cursor: help; }
.errors-table .error-msg-line { display: flex; flex-wrap: wrap; gap: 6px; align-items: baseline; }
.errors-table .error-chip {
display: inline-flex; align-items: center; gap: 4px;
font-size: 0.78rem; padding: 1px 6px; border-radius: 999px;
background: #eef0f3; color: #333; border: 1px solid #dde0e4;
max-width: 36em; overflow: hidden; text-overflow: ellipsis; white-space: nowrap;
}
.errors-table .error-chip .chip-label { color: #777; font-size: 0.72rem; text-transform: uppercase; letter-spacing: 0.03em; }
.errors-table .error-chip.chip-err { background: #fdecec; border-color: #f5c2c2; color: #8a1f1f; }
.errors-table .error-chip.chip-warn { background: #fff5e1; border-color: #f3d9a4; color: #7a4d00; }
.errors-table .error-chip.chip-ok { background: #e9f6ec; border-color: #b8dfc1; color: #1f6b32; }
.errors-table .error-chip.chip-mono .chip-value { font-family: ui-monospace, SFMono-Regular, Menlo, monospace; font-size: 0.78rem; }
.errors-table .pill-module { font-family: ui-monospace, SFMono-Regular, Menlo, monospace; font-size: 0.78rem; background: #eef0f3; color: #333; padding: 1px 6px; border-radius: 4px; }
.errors-table .error-details { margin-top: 6px; }
.errors-table .error-details summary { cursor: pointer; color: #666; font-size: 0.82rem; }
.errors-table .error-details pre { background: #fafafa; border: 1px solid #ececec; border-radius: 4px; padding: 8px; font-size: 0.78rem; max-height: 18em; overflow: auto; }
.errors-table .error-context { color: #888; font-size: 0.78rem; font-style: italic; margin-left: 4px; }
+1
View File
@@ -7,6 +7,7 @@
<a href="/admin/users"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences" class="active"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="admin-summary">
+77
View File
@@ -0,0 +1,77 @@
<div class="container paper-page admin-page">
<div class="paper-crumbs">Admin &nbsp;/&nbsp; <span class="here">Errors</span></div>
<h1 class="paper-page-title">Errors</h1>
<nav class="admin-nav">
<a href="/admin/"><i class="fas fa-code-branch"></i> Repositories</a>
<a href="/admin/users"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors" class="active"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="admin-summary">
<span class="summary-pill error">{{filtered.length}} shown</span>
<span class="summary-pill">{{entries.length}} captured</span>
<span class="summary-pill" ng-if="!available">redis sink unavailable</span>
</div>
<form class="w-100 admin-filter-toolbar" aria-label="Error filters">
<div class="admin-filter-row">
<div class="search-wrap">
<input type="search" class="form-control" placeholder="Search message, module, or url…" ng-model="query.search" autocomplete="off" />
</div>
<span class="admin-filter-inline">
<label>Module</label>
<select class="form-control form-control-sm" ng-model="query.module">
<option value="">Any</option>
<option ng-repeat="m in modules" value="{{m}}">{{m}}</option>
</select>
</span>
<span class="admin-filter-spacer"></span>
<label class="admin-filter-inline" style="cursor:pointer;">
<input type="checkbox" ng-model="query.autoRefresh" />
Auto-refresh
</label>
<button class="btn btn-sm" type="button" ng-click="refreshNow()" title="Refresh now"><i class="fas fa-sync"></i></button>
<button class="btn btn-sm btn-danger" type="button" ng-click="clearAll()" title="Clear all errors"><i class="fas fa-trash"></i> Clear</button>
</div>
</form>
<div ng-if="!filtered.length" class="admin-empty">No errors captured.</div>
<table class="table errors-table" ng-if="filtered.length">
<thead>
<tr>
<th style="width: 9em;">When</th>
<th style="width: 9em;">Module</th>
<th>Message</th>
</tr>
</thead>
<tbody>
<tr ng-repeat="e in filtered track by $index">
<td class="error-when">
<time title="{{absTime(e.ts)}}">{{relTime(e.ts)}}</time>
</td>
<td><span class="pill pill-module">{{e.module}}</span></td>
<td class="error-msg">
<div class="error-msg-line">
<strong>{{e.displayMessage}}</strong>
<span class="error-context" ng-if="e.displayContext && e.displayContext !== e.displayMessage">{{e.displayContext}}</span>
<span class="error-chip"
ng-repeat="c in e._chips track by $index"
ng-class="{'chip-err': c.kind === 'err', 'chip-warn': c.kind === 'warn', 'chip-ok': c.kind === 'ok', 'chip-mono': c.mono}"
title="{{c.label}}: {{c.value}}">
<span class="chip-label">{{c.label}}</span>
<span class="chip-value">{{c.value}}</span>
</span>
</div>
<details ng-if="e._detailJson" class="error-details">
<summary>raw</summary>
<pre>{{e._detailJson}}</pre>
</details>
</td>
</tr>
</tbody>
</table>
</div>
+1
View File
@@ -7,6 +7,7 @@
<a href="/admin/users"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues" class="active"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="admin-summary">
+1
View File
@@ -7,6 +7,7 @@
<a href="/admin/users"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="admin-summary">
+1
View File
@@ -7,6 +7,7 @@
<a href="/admin/users" class="active"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="user-detail-card" ng-if="userInfo">
+1
View File
@@ -7,6 +7,7 @@
<a href="/admin/users" class="active"><i class="fas fa-users"></i> Users</a>
<a href="/admin/conferences"><i class="fas fa-chalkboard-teacher"></i> Conferences</a>
<a href="/admin/queues"><i class="fas fa-tasks"></i> Queues</a>
<a href="/admin/errors"><i class="fas fa-bug"></i> Errors</a>
</nav>
<div class="admin-summary">
+7
View File
@@ -9,6 +9,13 @@
<h1 class="paper-page-title pr-title">
<span ng-if="details.description" ng-bind="details.description"></span>
<span ng-if="!details.description" class="text-muted">Untitled gist</span>
<a
ng-if="options.isAdmin || options.isOwner"
ng-href="/gist-anonymize/{{gistId}}"
class="btn btn-sm"
aria-label="Edit"
><i class="far fa-edit"></i><span class="d-none d-md-inline"> Edit</span></a
>
</h1>
<div class="pr-header-meta">
<span class="paper-pill" ng-class="{'good': details.isPublic, 'warn': !details.isPublic}">
+7
View File
@@ -9,6 +9,13 @@
<h1 class="paper-page-title pr-title">
<span ng-if="details.title" ng-bind="details.title"></span>
<span ng-if="!details.title" class="text-muted">Untitled pull request</span>
<a
ng-if="options.isAdmin || options.isOwner"
ng-href="/pull-request-anonymize/{{pullRequestId}}"
class="btn btn-sm"
aria-label="Edit"
><i class="far fa-edit"></i><span class="d-none d-md-inline"> Edit</span></a
>
</h1>
<div class="pr-header-meta">
<span class="paper-pill" ng-class="{'good': details.merged, 'warn': details.state == 'open', 'bad': details.state == 'closed' && !details.merged}">
+146
View File
@@ -849,4 +849,150 @@ angular
);
$scope.$watch("query.state", getQueues);
},
])
.controller("errorsAdminController", [
"$scope",
"$http",
"$location",
"$interval",
function ($scope, $http, $location, $interval) {
$scope.$watch("user.status", () => {
if ($scope.user == null) {
$location.url("/");
}
});
if ($scope.user == null) {
$location.url("/");
}
$scope.entries = [];
$scope.filtered = [];
$scope.modules = [];
$scope.available = true;
$scope.query = {
search: "",
module: "",
autoRefresh: true,
};
$scope.relTime = (iso) => {
if (!iso) return "";
const t = new Date(iso).getTime();
if (isNaN(t)) return iso;
const diff = Math.max(0, Date.now() - t);
const s = Math.floor(diff / 1000);
if (s < 5) return "just now";
if (s < 60) return `${s}s ago`;
const m = Math.floor(s / 60);
if (m < 60) return `${m}m ago`;
const h = Math.floor(m / 60);
if (h < 24) return `${h}h ago`;
const d = Math.floor(h / 24);
if (d < 7) return `${d}d ago`;
return new Date(iso).toLocaleDateString();
};
$scope.absTime = (iso) => {
if (!iso) return "";
const d = new Date(iso);
if (isNaN(d.getTime())) return iso;
return d.toLocaleString();
};
// Decorate each entry once with derived display fields (chips + json).
// Returning a fresh array from a template-bound function each digest
// cycle triggers Angular's $rootScope:infdig — so we precompute on load.
function statusKind(s) {
const n = parseInt(s, 10);
if (!n) return "";
if (n >= 500) return "err";
if (n >= 400) return "warn";
return "ok";
}
// snake_case identifier looking like an error key (e.g. "repo_not_found").
const errorKeyRe = /^[a-z][a-z0-9]*(?:_[a-z0-9]+)+$/;
function decorate(e) {
const chips = [];
const detail = (e.raw || []).find(
(a) => a && typeof a === "object" && !Array.isArray(a)
);
if (detail) {
// Prefer the structured error key (e.g. "pull_request_not_found")
// over the generic logger message ("anonymous error", "http error").
if (detail.message && errorKeyRe.test(detail.message)) {
e.displayMessage = detail.message;
e.displayContext = e.message;
} else if (detail.code && errorKeyRe.test(String(detail.code))) {
e.displayMessage = String(detail.code);
e.displayContext = e.message;
} else {
e.displayMessage = e.message;
}
if (detail.httpStatus) chips.push({ label: "status", value: detail.httpStatus, kind: statusKind(detail.httpStatus) });
else if (detail.status) chips.push({ label: "status", value: detail.status, kind: statusKind(detail.status) });
if (detail.method) chips.push({ label: "method", value: detail.method });
if (detail.url) chips.push({ label: "url", value: detail.url, mono: true });
if (detail.repoId) chips.push({ label: "repo", value: detail.repoId, mono: true });
if (detail.code && detail.code !== detail.message && detail.code !== e.displayMessage) {
chips.push({ label: "code", value: detail.code });
}
} else {
e.displayMessage = e.message;
}
const tail = (e.raw || []).slice(1);
const detailJson = !tail.length
? ""
: tail.length === 1
? JSON.stringify(tail[0], null, 2)
: JSON.stringify(tail, null, 2);
e._chips = chips;
e._detailJson = detailJson;
return e;
}
function applyFilter() {
const q = ($scope.query.search || "").toLowerCase();
const mod = $scope.query.module || "";
$scope.filtered = $scope.entries.filter((e) => {
if (mod && e.module !== mod) return false;
if (!q) return true;
const hay = (
(e.displayMessage || e.message || "") +
" " +
e.module +
" " +
JSON.stringify(e.raw || [])
).toLowerCase();
return hay.indexOf(q) > -1;
});
}
function load() {
$http.get("/api/admin/errors").then(
(res) => {
$scope.entries = (res.data.entries || []).map(decorate);
$scope.available = !!res.data.available;
const set = new Set();
$scope.entries.forEach((e) => e.module && set.add(e.module));
$scope.modules = Array.from(set).sort();
applyFilter();
},
(err) => console.error(err)
);
}
$scope.refreshNow = load;
$scope.clearAll = () => {
if (!confirm("Clear all captured errors?")) return;
$http.delete("/api/admin/errors").then(load, (err) => console.error(err));
};
load();
const stop = $interval(() => {
if ($scope.query.autoRefresh) load();
}, 5000);
$scope.$on("$destroy", () => $interval.cancel(stop));
$scope.$watch("query.search", applyFilter);
$scope.$watch("query.module", applyFilter);
},
]);
+5
View File
@@ -137,6 +137,11 @@ angular
controller: "queuesAdminController",
title: "Admin · Queues Anonymous GitHub",
})
.when("/admin/errors", {
templateUrl: "/partials/admin/errors.htm",
controller: "errorsAdminController",
title: "Admin · Errors Anonymous GitHub",
})
.when("/404", {
templateUrl: "/partials/404.htm",
title: "Page not found Anonymous GitHub",
+1 -1
View File
File diff suppressed because one or more lines are too long
+5 -2
View File
@@ -8,6 +8,9 @@ import { IAnonymizedGistDocument } from "./model/anonymizedGists/anonymizedGists
import config from "../config";
import { octokit } from "./GitHubUtils";
import { ContentAnonimizer } from "./anonymize-utils";
import { createLogger } from "./logger";
const logger = createLogger("gist");
type GistPayload = {
description: string;
@@ -59,14 +62,14 @@ export default class Gist {
try {
return this._model.source.accessToken;
} catch {
console.debug("[ERROR] Token is invalid", this._model.source.gistId);
logger.warn("invalid token", { gistId: this._model.source.gistId });
}
}
return config.GITHUB_TOKEN;
}
async download() {
console.debug("[INFO] Downloading gist", this._model.source.gistId);
logger.info("downloading gist", { gistId: this._model.source.gistId });
const oct = octokit(await this.getToken());
const gist_id = this._model.source.gistId;
+8 -4
View File
@@ -3,6 +3,9 @@ import { Octokit } from "@octokit/rest";
import Repository from "./Repository";
import UserModel from "./model/users/users.model";
import config from "../config";
import { createLogger } from "./logger";
const logger = createLogger("github");
export function octokit(token: string) {
return new Octokit({
@@ -24,7 +27,7 @@ export async function checkToken(token: string) {
}
export async function getToken(repository: Repository) {
console.log("getToken", repository.repoId);
logger.debug("getToken", { repoId: repository.repoId });
// if (repository.model.source.accessToken) {
// // only check the token if the repo has been visited less than 10 minutes ago
// if (
@@ -101,9 +104,10 @@ export async function getToken(repository: Repository) {
return refreshed;
}
}
console.warn(
`[getToken] refresh failed for ${repository.owner.model.username} (status ${res.status}); falling back`
);
logger.warn("token refresh failed; falling back", {
username: repository.owner.model.username,
status: res.status,
});
// fall through to the checkToken path / config.GITHUB_TOKEN
}
const check = await checkToken(ownerAccessToken);
+15 -16
View File
@@ -9,6 +9,9 @@ import config from "../config";
import got, { HTTPError } from "got";
import { octokit } from "./GitHubUtils";
import { ContentAnonimizer } from "./anonymize-utils";
import { createLogger } from "./logger";
const logger = createLogger("pull-request");
export default class PullRequest {
private _model: IAnonymizedPullRequestDocument;
@@ -38,20 +41,18 @@ export default class PullRequest {
try {
return this._model.source.accessToken;
} catch {
console.debug(
"[ERROR] Token is invalid",
this._model.source.pullRequestId
);
logger.warn("invalid token", {
pullRequestId: this._model.source.pullRequestId,
});
}
}
return config.GITHUB_TOKEN;
}
async download() {
console.debug(
"[INFO] Downloading pull request",
this._model.source.pullRequestId
);
logger.info("downloading pull request", {
pullRequestId: this._model.source.pullRequestId,
});
const oct = octokit(await this.getToken());
const [owner, repo] = this._model.source.repositoryFullName.split("/");
@@ -89,10 +90,9 @@ export default class PullRequest {
user?: { login?: string } | null;
}> => {
if ((err as { status?: number }).status === 404) {
console.warn(
"[WARN] Failed to fetch PR comments (404), continuing without them",
`${owner}/${repo}#${pull_number}`
);
logger.warn("PR comments 404, continuing without them", {
pr: `${owner}/${repo}#${pull_number}`,
});
return [];
}
throw err;
@@ -102,10 +102,9 @@ export default class PullRequest {
`https://github.com/${owner}/${repo}/pull/${pull_number}.diff`
).catch((err) => {
if (err instanceof HTTPError && err.response.statusCode === 404) {
console.warn(
"[WARN] Failed to fetch PR diff (404), continuing without it",
`${owner}/${repo}#${pull_number}`
);
logger.warn("PR diff 404, continuing without it", {
pr: `${owner}/${repo}#${pull_number}`,
});
return { body: "" };
}
throw err;
+14 -9
View File
@@ -23,6 +23,9 @@ import { getToken } from "./GitHubUtils";
import config from "../config";
import FileModel from "./model/files/files.model";
import AnonymizedRepositoryModel from "./model/anonymizedRepositories/anonymizedRepositories.model";
import { createLogger, serializeError } from "./logger";
const logger = createLogger("repository");
import { IFile } from "./model/files/files.types";
import AnonymizedFile from "./AnonymizedFile";
import { FilterQuery } from "mongoose";
@@ -316,9 +319,10 @@ export default class Repository {
?.commit;
if (!newCommit) {
console.error(
`${branchName} for ${this.model.source.repositoryName} is not found`
);
logger.error("branch not found", {
branch: branchName,
repo: this.model.source.repositoryName,
});
await this.updateStatus(RepositoryStatus.ERROR, "branch_not_found");
await this.resetSate();
throw new AnonymousError("branch_not_found", {
@@ -330,7 +334,7 @@ export default class Repository {
this.model.source.commit == newCommit &&
this.status == RepositoryStatus.READY
) {
console.log(`[UPDATE] ${this._model.repoId} is up to date`);
logger.info("up to date", { repoId: this._model.repoId });
return;
}
this._model.source.commit = newCommit;
@@ -347,9 +351,10 @@ export default class Repository {
}
this.model.source.commit = newCommit;
this._model.anonymizeDate = new Date();
console.log(
`[UPDATE] ${this._model.repoId} will be updated to ${newCommit}`
);
logger.info("update queued", {
repoId: this._model.repoId,
commit: newCommit,
});
await this.resetSate(RepositoryStatus.PREPARING);
await downloadQueue.add(this.repoId, { repoId: this.repoId }, {
@@ -450,7 +455,7 @@ export default class Repository {
FileModel.deleteMany({ repoId: this.repoId }).exec(),
this.removeCache(),
]);
console.log(`[RESET] ${this._model.repoId} has been reset`);
logger.info("reset", { repoId: this._model.repoId });
}
/**
@@ -464,7 +469,7 @@ export default class Repository {
try {
await this.model.save();
} catch (error) {
console.error("[ERROR] removeCache save", error);
logger.error("removeCache save failed", serializeError(error));
}
}
}
+172
View File
@@ -0,0 +1,172 @@
import { createClient, RedisClientType } from "redis";
import config from "../config";
export const ERROR_LOG_KEY = "admin:errors";
export const ERROR_LOG_MAX = 1000;
export type Logger = {
debug: (...args: unknown[]) => void;
info: (...args: unknown[]) => void;
warn: (...args: unknown[]) => void;
error: (...args: unknown[]) => void;
};
type Level = "debug" | "info" | "warn" | "error";
const LEVEL_ORDER: Record<Level, number> = {
debug: 10,
info: 20,
warn: 30,
error: 40,
};
function resolveThreshold(): number {
const raw = (process.env.LOG_LEVEL || "").toLowerCase() as Level;
if (raw in LEVEL_ORDER) return LEVEL_ORDER[raw];
return process.env.NODE_ENV === "production"
? LEVEL_ORDER.info
: LEVEL_ORDER.debug;
}
const threshold = resolveThreshold();
function formatArg(a: unknown): string {
if (typeof a === "string") return a;
if (a instanceof Error) return JSON.stringify(serializeError(a));
try {
return JSON.stringify(a);
} catch {
return String(a);
}
}
let redisClient: RedisClientType | null = null;
let redisDisabled = false;
function getRedis(): RedisClientType | null {
if (redisDisabled) return null;
if (redisClient) return redisClient;
try {
redisClient = createClient({
socket: {
host: config.REDIS_HOSTNAME,
port: config.REDIS_PORT,
// Give up on first failure — we don't want the redis client's
// reconnect timer keeping the event loop alive (breaks unit tests
// that just import the logger), and we don't want logger.error to
// recursively retrigger if redis is down.
reconnectStrategy: false,
},
}) as RedisClientType;
redisClient.on("error", () => {
if (!redisDisabled) {
redisDisabled = true;
try {
redisClient?.disconnect();
} catch {
/* ignore */
}
}
});
redisClient.connect().catch(() => {
redisDisabled = true;
});
return redisClient;
} catch {
redisDisabled = true;
return null;
}
}
function persistError(entry: {
ts: string;
module: string;
message: string;
raw: unknown[];
}) {
const client = getRedis();
if (!client || !client.isOpen) return;
const payload = JSON.stringify(entry);
client
.multi()
.lPush(ERROR_LOG_KEY, payload)
.lTrim(ERROR_LOG_KEY, 0, ERROR_LOG_MAX - 1)
.exec()
.catch(() => undefined);
}
function emit(level: Level, module: string, args: unknown[]) {
if (LEVEL_ORDER[level] < threshold) return;
const ts = new Date().toISOString();
const formatted = args.map(formatArg);
const line = `${ts} ${level.toUpperCase()} [${module}] ${formatted.join(" ")}`;
const sink =
level === "error"
? console.error
: level === "warn"
? console.warn
: level === "debug"
? console.debug
: console.log;
sink(line);
if (level === "error") {
persistError({
ts,
module,
message: typeof args[0] === "string" ? args[0] : "",
raw: args.map((a) => {
if (a instanceof Error) return serializeError(a);
return a;
}),
});
}
}
export function createLogger(module: string): Logger {
return {
debug: (...args) => emit("debug", module, args),
info: (...args) => emit("info", module, args),
warn: (...args) => emit("warn", module, args),
error: (...args) => emit("error", module, args),
};
}
type ErrorLike = {
name?: string;
message?: string;
stack?: string;
status?: number;
httpStatus?: number;
code?: string | number;
cause?: unknown;
request?: { url?: string; method?: string };
response?: { url?: string; status?: number };
};
export function serializeError(err: unknown): Record<string, unknown> {
if (err == null) return { value: err };
if (typeof err !== "object") return { value: String(err) };
const e = err as ErrorLike;
const out: Record<string, unknown> = {};
if (e.name) out.name = e.name;
if (e.message) out.message = e.message;
// Octokit RequestError / HTTP-shaped errors: surface status + url + method,
// skip the giant headers/response body dump.
if (typeof e.status === "number") out.status = e.status;
if (e.request?.url) out.url = e.request.url;
if (e.request?.method) out.method = e.request.method;
if (!e.request && e.response?.url) out.url = e.response.url;
// AnonymousError carries an httpStatus and an inner cause.
if (typeof e.httpStatus === "number") out.httpStatus = e.httpStatus;
if (e.code !== undefined && e.code !== e.message) out.code = e.code;
if (e.cause) out.cause = serializeError(e.cause);
// Only include the stack when there's nothing else useful — avoids dumping
// a stack for handled HTTP errors but keeps debuggability for plain Errors.
if (!out.status && !out.httpStatus && e.stack) out.stack = e.stack;
return out;
}
+9 -4
View File
@@ -14,6 +14,9 @@ import { FILE_TYPE } from "../storage/Storage";
import { octokit } from "../GitHubUtils";
import FileModel from "../model/files/files.model";
import { IFile } from "../model/files/files.types";
import { createLogger, serializeError } from "../logger";
const logger = createLogger("gh-stream");
export default class GitHubStream extends GitHubBase {
type: "GitHubDownload" | "GitHubStream" | "Zip" = "GitHubStream";
@@ -36,7 +39,7 @@ export default class GitHubStream extends GitHubBase {
repo: this.data.repoName,
file_sha: sha,
});
console.log("[GHStream] Downloading file", url);
logger.debug("downloading file", { url });
return got.stream(url, {
headers: {
"X-GitHub-Api-Version": "2022-11-28",
@@ -45,7 +48,7 @@ export default class GitHubStream extends GitHubBase {
},
});
} catch (error) {
console.error(error);
logger.error("downloadFile failed", serializeError(error));
throw new AnonymousError("repo_not_accessible", {
httpStatus: 404,
object: this.data,
@@ -60,7 +63,7 @@ export default class GitHubStream extends GitHubBase {
// as the fallback for LFS files (#95).
private downloadFileViaRaw(token: string, filePath: string) {
const url = `https://github.com/${this.data.organization}/${this.data.repoName}/raw/${this.data.commit}/${filePath}`;
console.log("[GHStream] Downloading via raw URL (LFS)", url);
logger.debug("downloading via raw URL (LFS)", { url });
return got.stream(url, {
headers: { authorization: `token ${token}` },
followRedirect: true,
@@ -267,9 +270,9 @@ export default class GitHubStream extends GitHubBase {
}
output.push(...this.tree2Tree(data.tree, parentPath));
} catch (error) {
console.log(error);
const status = (error as { status?: number }).status;
if (status === 409) {
logger.debug("getTree empty repo", serializeError(error));
throw new AnonymousError("repo_empty", {
httpStatus: 409,
object: this.data,
@@ -277,6 +280,7 @@ export default class GitHubStream extends GitHubBase {
});
}
if (status === 404) {
logger.debug("getTree miss", serializeError(error));
const code = await classifyGitHubMissError(error, this.data);
throw new AnonymousError(code, {
httpStatus: 404,
@@ -284,6 +288,7 @@ export default class GitHubStream extends GitHubBase {
cause: error as Error,
});
}
logger.warn("getTree failed", serializeError(error));
throw new AnonymousError("repo_not_found", {
httpStatus: status || 500,
object: this.data,
+4 -1
View File
@@ -10,6 +10,9 @@ import { lookup } from "mime-types";
import StorageBase, { FILE_TYPE } from "./Storage";
import FileModel from "../model/files/files.model";
import { IFile } from "../model/files/files.types";
import { createLogger, serializeError } from "../logger";
const logger = createLogger("fs");
export default class FileSystem extends StorageBase {
type = "FileSystem";
@@ -97,7 +100,7 @@ export default class FileSystem extends StorageBase {
}
await fs.promises.rename(tmpPath, fullPath);
} catch (err) {
console.error("[ERROR] FileSystem.write failed:", err);
logger.error("write failed", serializeError(err));
await fs.promises.rm(tmpPath, { force: true }).catch(() => undefined);
throw err;
}
+4 -1
View File
@@ -17,6 +17,9 @@ import AnonymousError from "../AnonymousError";
import StorageBase, { FILE_TYPE } from "./Storage";
import { IFile } from "../model/files/files.types";
import FileModel from "../model/files/files.model";
import { createLogger, serializeError } from "../logger";
const logger = createLogger("s3");
export default class S3Storage extends StorageBase {
type = "AWS";
@@ -129,7 +132,7 @@ export default class S3Storage extends StorageBase {
try {
res.status(500).json({ error: "file_not_found" });
} catch (err) {
console.error(`[ERROR] S3 send ${path}`, err);
logger.error("send failed", { path, err: serializeError(err) });
}
}
}
+21
View File
@@ -82,6 +82,7 @@ const UNICODE_ESCAPE_CHARS = new Set(
export function diacriticInsensitive(escapedTerm: string): string {
let out = "";
let i = 0;
let inClass = false;
while (i < escapedTerm.length) {
const c = escapedTerm[i];
if (c === "\\" && i + 1 < escapedTerm.length) {
@@ -96,6 +97,26 @@ export function diacriticInsensitive(escapedTerm: string): string {
i += 2;
continue;
}
// Inside a character class, leave letters alone — expanding them into
// bracketed alternatives would produce nested `[...]` which is a syntax
// error. The user's regex is responsible for its own char-class content.
if (c === "[" && !inClass) {
inClass = true;
out += c;
i += 1;
continue;
}
if (c === "]" && inClass) {
inClass = false;
out += c;
i += 1;
continue;
}
if (inClass) {
out += c;
i += 1;
continue;
}
const lower = c.toLowerCase();
out += DIACRITIC_CLASSES[lower] || c;
i += 1;
+7 -4
View File
@@ -8,6 +8,9 @@ import {
anonymizePathCompiled,
compileTerms,
} from "./anonymize-utils";
import { createLogger, serializeError } from "./logger";
const logger = createLogger("zip-stream");
export interface StreamAnonymizedZipOptions {
repoId: string;
@@ -82,7 +85,7 @@ export async function streamAnonymizedZip(
const downloadStream = got.stream(response.url);
res.on("error", (error) => {
console.error(error);
logger.error("response stream error", serializeError(error));
downloadStream.destroy();
});
res.on("close", () => {
@@ -101,7 +104,7 @@ export async function streamAnonymizedZip(
// bug as #694.
let upstreamSucceeded = false;
const fail = (error: Error) => {
console.error(error);
logger.error("upstream zipball failed", serializeError(error));
archive.abort();
const destroyable = res as unknown as {
destroy?: (err?: Error) => void;
@@ -140,7 +143,7 @@ export async function streamAnonymizedZip(
archive.append(st, { name: fileName });
} catch (error) {
entry.autodrain();
console.error(error);
logger.error("entry transform failed", serializeError(error));
}
} else {
entry.autodrain();
@@ -157,7 +160,7 @@ export async function streamAnonymizedZip(
});
archive.pipe(res).on("error", (error) => {
console.error(error);
logger.error("archive pipe error", serializeError(error));
if (!upstreamSucceeded) {
// archive errored while we were still depending on upstream bytes:
// treat as failure rather than truncating.
+18 -10
View File
@@ -3,6 +3,9 @@ import config from "../config";
import AnonymizedRepositoryModel from "../core/model/anonymizedRepositories/anonymizedRepositories.model";
import { RepositoryStatus } from "../core/types";
import * as path from "path";
import { createLogger, serializeError } from "../core/logger";
const logger = createLogger("queue");
// Minimal payload for queue jobs. Workers re-fetch the Repository from the
// database via getRepository(repoId), so passing the full Mongoose-backed
@@ -31,7 +34,10 @@ async function markErrorIfInFlight(repoId: string, message: string) {
}
).exec();
} catch (e) {
console.log("[QUEUE] markErrorIfInFlight error", repoId, e);
logger.error("markErrorIfInFlight failed", {
repoId,
err: serializeError(e),
});
}
}
@@ -58,13 +64,16 @@ export async function recoverStuckPreparing() {
}
}
await markErrorIfInFlight(doc.repoId, "preparation_interrupted");
console.log("[QUEUE] recovered stuck repo", doc.repoId);
logger.info("recovered stuck repo", { repoId: doc.repoId });
} catch (e) {
console.log("[QUEUE] recover error for", doc.repoId, e);
logger.warn("recover failed", {
repoId: doc.repoId,
err: serializeError(e),
});
}
}
} catch (e) {
console.log("[QUEUE] recoverStuckPreparing failed", e);
logger.error("recoverStuckPreparing failed", serializeError(e));
}
}
@@ -140,18 +149,17 @@ export function startWorker() {
if (!downloadWorker.isRunning()) downloadWorker.run();
downloadWorker.on("active", async (job) => {
console.log("[QUEUE] download repository start", job.data.repoId);
logger.info("download start", { repoId: job.data.repoId });
});
downloadWorker.on("completed", async (job) => {
console.log("[QUEUE] download repository completed", job.data.repoId);
logger.info("download completed", { repoId: job.data.repoId });
});
downloadWorker.on("failed", async (job, err) => {
const repoId = job?.data?.repoId;
console.log(
"[QUEUE] download repository failed",
logger.error("download failed", {
repoId,
err?.message || err
);
err: serializeError(err),
});
if (!repoId) return;
if (job && typeof job.attemptsMade === "number" && job.opts?.attempts) {
if (job.attemptsMade < job.opts.attempts) return;
+17 -10
View File
@@ -4,6 +4,9 @@ config();
import { getRepository as getRepositoryImport } from "../../server/database";
import { RepositoryStatus } from "../../core/types";
import { RepoJobData } from "../index";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("queue:download");
export default async function (job: SandboxedJob<RepoJobData, void>) {
const {
@@ -13,7 +16,7 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
connect: () => Promise<void>;
getRepository: typeof getRepositoryImport;
} = require("../../server/database");
console.log(`[QUEUE] ${job.data.repoId} is going to be downloaded`);
logger.info("queued for download", { repoId: job.data.repoId });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let statusInterval: any = null;
await connect();
@@ -37,9 +40,10 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
repo.status &&
repo.model.statusMessage !== progress?.status
) {
console.log(
`[QUEUE] Progress: ${job.data.repoId} ${progress.status}`
);
logger.debug("progress", {
repoId: job.data.repoId,
status: progress.status,
});
await repo.updateStatus(repo.status, progress?.status || "");
}
} catch {
@@ -60,7 +64,7 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
clearInterval(statusInterval);
if (tickPromise) await tickPromise;
await repo.updateStatus(RepositoryStatus.READY, "");
console.log(`[QUEUE] ${job.data.repoId} is downloaded`);
logger.info("downloaded", { repoId: job.data.repoId });
} catch (error) {
clearInterval(statusInterval);
if (tickPromise) await tickPromise;
@@ -79,17 +83,20 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
await tickPromise;
} catch { /* ignored */ }
}
console.log(`[QUEUE] ${job.data.repoId} is finished with an error`, error);
logger.error("finished with error", {
repoId: job.data.repoId,
err: serializeError(error),
});
try {
await repo.updateStatus(
RepositoryStatus.ERROR,
error instanceof Error ? error.message : String(error)
);
} catch (persistError) {
console.log(
`[QUEUE] failed to persist ERROR status for ${job.data.repoId}`,
persistError
);
logger.error("failed to persist ERROR status", {
repoId: job.data.repoId,
err: serializeError(persistError),
});
}
throw error;
} finally {
+5 -4
View File
@@ -1,6 +1,9 @@
import { SandboxedJob } from "bullmq";
import { getRepository as getRepositoryImport } from "../../server/database";
import { RepoJobData } from "../index";
import { createLogger } from "../../core/logger";
const logger = createLogger("queue:cache");
export default async function (job: SandboxedJob<RepoJobData, void>) {
const {
@@ -12,14 +15,12 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
} = require("../../server/database");
try {
await connect();
console.log(
`[QUEUE] Cache of ${job.data.repoId} is going to be removed...`
);
logger.info("removing cache", { repoId: job.data.repoId });
const repo = await getRepository(job.data.repoId);
await repo.removeCache();
} catch {
// error already handled
} finally {
console.log(`[QUEUE] Cache of ${job.data.repoId} is removed.`);
logger.info("cache removed", { repoId: job.data.repoId });
}
}
+5 -2
View File
@@ -2,6 +2,9 @@ import { SandboxedJob } from "bullmq";
import { getRepository as getRepositoryImport } from "../../server/database";
import { RepositoryStatus } from "../../core/types";
import { RepoJobData } from "../index";
import { createLogger } from "../../core/logger";
const logger = createLogger("queue:remove");
export default async function (job: SandboxedJob<RepoJobData, void>) {
const {
@@ -13,7 +16,7 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
} = require("../../server/database");
try {
await connect();
console.log(`[QUEUE] ${job.data.repoId} is going to be removed`);
logger.info("removing repository", { repoId: job.data.repoId });
const repo = await getRepository(job.data.repoId);
await repo.updateStatus(RepositoryStatus.REMOVING, "");
try {
@@ -29,6 +32,6 @@ export default async function (job: SandboxedJob<RepoJobData, void>) {
} catch {
// error already handled
} finally {
console.log(`[QUEUE] ${job.data.repoId} is removed`);
logger.info("repository removed", { repoId: job.data.repoId });
}
}
+14 -9
View File
@@ -20,6 +20,9 @@ import { startWorker, recoverStuckPreparing } from "../queue";
import AnonymizedPullRequestModel from "../core/model/anonymizedPullRequests/anonymizedPullRequests.model";
import { getUser } from "./routes/route-utils";
import config from "../config";
import { createLogger, serializeError } from "../core/logger";
const logger = createLogger("server");
function indexResponse(req: express.Request, res: express.Response) {
if (
@@ -67,7 +70,9 @@ export default async function start() {
port: config.REDIS_PORT,
},
});
redisClient.on("error", (err) => console.log("Redis Client Error", err));
redisClient.on("error", (err) =>
logger.error("redis client error", serializeError(err))
);
await redisClient.connect();
@@ -79,7 +84,7 @@ export default async function start() {
return request.headers["cf-connecting-ip"] as string;
}
if (!request.ip && request.socket.remoteAddress) {
console.error("Warning: request.ip is missing!");
logger.warn("request.ip is missing");
return request.socket.remoteAddress;
}
// remove port number from IPv4 addresses
@@ -136,12 +141,12 @@ export default async function start() {
const start = Date.now();
res.on("finish", function () {
const time = Date.now() - start;
console.log(
`${req.method} ${res.statusCode} ${join(
req.baseUrl || "",
req.url || ""
)} ${time}ms`
);
logger.info("request", {
method: req.method,
status: res.statusCode,
url: join(req.baseUrl || "", req.url || ""),
ms: time,
});
});
next();
});
@@ -252,7 +257,7 @@ export default async function start() {
await connect();
await recoverStuckPreparing();
app.listen(config.PORT);
console.log("Database connected and Server started on port: " + config.PORT);
logger.info("server started", { port: config.PORT });
}
start();
+63 -2
View File
@@ -10,6 +10,30 @@ import { ensureAuthenticated } from "./connection";
import { handleError, getUser, isOwnerOrAdmin, getRepo } from "./route-utils";
import adminTokensRouter from "./admin-tokens";
import { octokit, getToken } from "../../core/GitHubUtils";
import { createLogger, serializeError, ERROR_LOG_KEY, ERROR_LOG_MAX } from "../../core/logger";
import { createClient, RedisClientType } from "redis";
import config from "../../config";
const logger = createLogger("admin");
let errorLogClient: RedisClientType | null = null;
async function getErrorLogClient(): Promise<RedisClientType | null> {
if (errorLogClient && errorLogClient.isOpen) return errorLogClient;
try {
errorLogClient = createClient({
socket: {
host: config.REDIS_HOSTNAME,
port: config.REDIS_PORT,
},
}) as RedisClientType;
errorLogClient.on("error", () => undefined);
await errorLogClient.connect();
return errorLogClient;
} catch (err) {
logger.error("error log redis connect failed", serializeError(err));
return null;
}
}
const router = express.Router();
@@ -203,6 +227,39 @@ router.get("/queues", async (req, res) => {
});
});
// Errors captured by the logger sink (last ERROR_LOG_MAX entries).
router.get("/errors", async (req, res) => {
try {
const client = await getErrorLogClient();
if (!client) {
return res.json({ entries: [], max: ERROR_LOG_MAX, available: false });
}
const raw = await client.lRange(ERROR_LOG_KEY, 0, ERROR_LOG_MAX - 1);
const entries = raw.map((s) => {
try {
return JSON.parse(s);
} catch {
return { ts: null, module: null, message: s, raw: [] };
}
});
res.json({ entries, max: ERROR_LOG_MAX, available: true });
} catch (error) {
handleError(error, res, req);
}
});
router.delete("/errors", async (req, res) => {
try {
const client = await getErrorLogClient();
if (!client) return res.json({ ok: true, cleared: 0 });
const len = await client.lLen(ERROR_LOG_KEY);
await client.del(ERROR_LOG_KEY);
res.json({ ok: true, cleared: len });
} catch (error) {
handleError(error, res, req);
}
});
// Global stats endpoint: counts by status, total disk, recent failures
router.get("/stats", async (req, res) => {
try {
@@ -538,7 +595,9 @@ router.get(
localField: "repositories",
});
if (!model) {
req.logout((error) => console.error(error));
req.logout((error) =>
logger.error("logout failed", serializeError(error))
);
throw new AnonymousError("user_not_found", {
httpStatus: 404,
});
@@ -556,7 +615,9 @@ router.get(
try {
const model = await UserModel.findOne({ username: req.params.username });
if (!model) {
req.logout((error) => console.error(error));
req.logout((error) =>
logger.error("logout failed", serializeError(error))
);
throw new AnonymousError("user_not_found", {
httpStatus: 404,
});
+9 -4
View File
@@ -12,6 +12,9 @@ import { IUserDocument } from "../../core/model/users/users.types";
import AnonymousError from "../../core/AnonymousError";
import AnonymizedPullRequestModel from "../../core/model/anonymizedPullRequests/anonymizedPullRequests.model";
import { hashToken } from "./token-auth";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("auth");
export function ensureAuthenticated(
req: express.Request,
@@ -97,7 +100,7 @@ const verify = async (
user,
});
} catch (error) {
console.error(error);
logger.error("verify failed", serializeError(error));
done(
new AnonymousError("unable_to_connect_user", {
httpStatus: 500,
@@ -135,7 +138,9 @@ export function initSession() {
host: config.REDIS_HOSTNAME,
},
});
redisClient.on("error", (err) => console.log("Redis Client Error", err));
redisClient.on("error", (err) =>
logger.error("redis client error", serializeError(err))
);
redisClient.connect();
const redisStore = new RedisStore({
client: redisClient,
@@ -200,7 +205,7 @@ router.all(
};
req.login(synthUser, (err) => {
if (err) {
console.error("[login-token] req.login failed", err);
logger.error("login-token req.login failed", serializeError(err));
return res.status(500).json({ error: "login_failed" });
}
UserModel.updateOne(
@@ -211,7 +216,7 @@ router.all(
return res.json({ ok: true, username: model.username });
});
} catch (err) {
console.error("[login-token] error", err);
logger.error("login-token failed", serializeError(err));
res.status(500).json({ error: "server_error" });
}
}
+14 -2
View File
@@ -1,7 +1,8 @@
import * as express from "express";
import { getGist, handleError } from "./route-utils";
import { getGist, getUser, handleError } from "./route-utils";
import AnonymousError from "../../core/AnonymousError";
import User from "../../core/User";
const router = express.Router();
@@ -12,13 +13,22 @@ router.get(
res.header("Cache-Control", "no-cache");
const gist = await getGist(req, res, { nocheck: true });
if (!gist) return;
let user: User | undefined = undefined;
try {
user = await getUser(req);
} catch { /* not logged in */ }
const canEdit =
!!user && (user.isAdmin || user.id == gist.model.owner);
let redirectURL = null;
if (
!canEdit &&
gist.status == "expired" &&
gist.options.expirationMode == "redirect"
) {
redirectURL = `https://gist.github.com/${gist.source.gistId}`;
} else {
} else if (!canEdit) {
if (
gist.status == "expired" ||
gist.status == "expiring" ||
@@ -60,6 +70,8 @@ router.get(
res.json({
url: redirectURL,
lastUpdateDate: gist.model.statusDate,
isAdmin: user?.isAdmin === true,
isOwner: user?.id == gist.model.owner,
});
} catch (error) {
handleError(error, res, req);
+14 -3
View File
@@ -1,7 +1,8 @@
import * as express from "express";
import { getPullRequest, handleError } from "./route-utils";
import { getPullRequest, getUser, handleError } from "./route-utils";
import AnonymousError from "../../core/AnonymousError";
import User from "../../core/User";
const router = express.Router();
@@ -12,10 +13,18 @@ router.get(
res.header("Cache-Control", "no-cache");
const pr = await getPullRequest(req, res, { nocheck: true });
if (!pr) return;
let user: User | undefined = undefined;
try {
user = await getUser(req);
} catch { /* not logged in */ }
const canEdit =
!!user && (user.isAdmin || user.id == pr.model.owner);
let redirectURL = null;
if (pr.status == "expired" && pr.options.expirationMode == "redirect") {
if (!canEdit && pr.status == "expired" && pr.options.expirationMode == "redirect") {
redirectURL = `https://github.com/${pr.source.repositoryFullName}/pull/${pr.source.pullRequestId}`;
} else {
} else if (!canEdit) {
if (
pr.status == "expired" ||
pr.status == "expiring" ||
@@ -60,6 +69,8 @@ router.get(
res.json({
url: redirectURL,
lastUpdateDate: pr.model.statusDate,
isAdmin: user?.isAdmin === true,
isOwner: user?.id == pr.model.owner,
});
} catch (error) {
handleError(error, res, req);
+8 -2
View File
@@ -22,6 +22,9 @@ import User from "../../core/User";
import { RepositoryStatus } from "../../core/types";
import { IUserDocument } from "../../core/model/users/users.types";
import { checkToken, octokit } from "../../core/GitHubUtils";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("route:repo");
const router = express.Router();
@@ -55,7 +58,7 @@ async function getTokenForAdmin(user: User, req: express.Request) {
return existingRepo.source.accessToken;
}
} catch (error) {
console.log(error);
logger.warn("getToken lookup failed", serializeError(error));
}
}
}
@@ -116,7 +119,10 @@ router.post("/claim", async (req: express.Request, res: express.Response) => {
});
}
console.log(`${user.username} claims ${r.repository}.`);
logger.info("repo claimed", {
user: user.username,
repo: r.repository,
});
repoConfig.owner = user;
await AnonymizedRepositoryModel.updateOne(
+14 -7
View File
@@ -3,7 +3,7 @@ import config from "../../config";
import got from "got";
import { join } from "path";
import { getRepo, getUser, handleError } from "./route-utils";
import { getRepo, getUser, handleError, isCoauthor } from "./route-utils";
import AnonymousError from "../../core/AnonymousError";
import { downloadQueue } from "../../queue";
import { RepositoryStatus } from "../../core/types";
@@ -150,14 +150,26 @@ router.get(
nocheck: true,
});
if (!repo) return;
let user: User | undefined = undefined;
try {
user = await getUser(req);
} catch { /* not logged in */ }
const canEdit =
!!user &&
(user.isAdmin ||
user.id == repo.model.owner ||
isCoauthor(repo, user));
let redirectURL = null;
if (
!canEdit &&
repo.status == RepositoryStatus.EXPIRED &&
repo.options.expirationMode == "redirect" &&
repo.model.source.repositoryName
) {
redirectURL = `https://github.com/${repo.model.source.repositoryName}`;
} else {
} else if (!canEdit) {
if (
repo.status == RepositoryStatus.EXPIRED ||
repo.status == RepositoryStatus.EXPIRING ||
@@ -207,11 +219,6 @@ router.get(
if (!!config.ENABLE_DOWNLOAD && !!config.STREAMER_ENTRYPOINT) {
download = true;
}
let user: User | undefined = undefined;
try {
user = await getUser(req);
} catch { /* not logged in */ }
res.json({
url: redirectURL,
download: download || user?.isAdmin === true,
+14 -17
View File
@@ -6,6 +6,9 @@ import User from "../../core/User";
import Repository from "../../core/Repository";
import { HTTPError } from "got";
import { RepositoryStatus } from "../../core/types";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("route");
export async function getGist(
req: express.Request,
@@ -114,24 +117,18 @@ export function isOwnerCoauthorOrAdmin(repo: Repository, user: User) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function printError(error: any, req?: express.Request) {
if (error instanceof AnonymousError) {
let message = `[ERROR] ${error.toString()} ${error.stack
?.split("\n")[1]
.trim()}`;
if (req) {
message += ` ${req.originalUrl}`;
// ignore common error
if (req.originalUrl === "/api/repo/undefined/options") return;
}
console.error(message);
if (req?.originalUrl === "/api/repo/undefined/options") return;
logger.error("anonymous error", {
...serializeError(error),
url: req?.originalUrl,
});
} else if (error instanceof HTTPError) {
const message = `[ERROR] HTTP.${
error.code
} ${error.message.toString()} ${error.stack?.split("\n")[1].trim()}`;
console.error(message);
} else if (error instanceof Error) {
console.error(error);
logger.error("http error", {
code: error.code,
message: error.message,
});
} else {
console.error(error);
logger.error("unhandled error", serializeError(error));
}
}
@@ -172,7 +169,7 @@ export async function getUser(req: express.Request) {
function notConnected(): never {
req.logout((error) => {
if (error) {
console.error(`[ERROR] Error while logging out: ${error}`);
logger.error("logout failed", serializeError(error));
}
});
throw new AnonymousError("not_connected", {
+7 -2
View File
@@ -1,6 +1,9 @@
import * as express from "express";
import * as crypto from "crypto";
import UserModel from "../../core/model/users/users.model";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("token-auth");
export function hashToken(token: string): string {
return crypto.createHash("sha256").update(token).digest("hex");
@@ -38,9 +41,11 @@ export async function bearerTokenAuth(
UserModel.updateOne(
{ _id: model._id, "apiTokens.tokenHash": tokenHash },
{ $set: { "apiTokens.$.lastUsedAt": new Date() } }
).catch((err) => console.error("[token-auth] lastUsedAt update failed", err));
).catch((err) =>
logger.error("lastUsedAt update failed", serializeError(err))
);
} catch (err) {
console.error("[token-auth] lookup failed", err);
logger.error("lookup failed", serializeError(err));
}
return next();
}
+4 -1
View File
@@ -7,6 +7,9 @@ import User from "../../core/User";
import FileModel from "../../core/model/files/files.model";
import { isConnected } from "../database";
import { octokit } from "../../core/GitHubUtils";
import { createLogger, serializeError } from "../../core/logger";
const logger = createLogger("user");
const router = express.Router();
@@ -17,7 +20,7 @@ router.get("/logout", async (req: express.Request, res: express.Response) => {
try {
req.logout((error) => {
if (error) {
console.error(`[ERROR] Logout error: ${error}`);
logger.error("logout failed", serializeError(error));
}
});
res.redirect("/");
+9 -6
View File
@@ -3,6 +3,9 @@ import Conference from "../core/Conference";
import AnonymizedRepositoryModel from "../core/model/anonymizedRepositories/anonymizedRepositories.model";
import ConferenceModel from "../core/model/conference/conferences.model";
import Repository from "../core/Repository";
import { createLogger, serializeError } from "../core/logger";
const logger = createLogger("schedule");
export function conferenceStatusCheck() {
// check every 6 hours the status of the conferences
@@ -14,7 +17,7 @@ export function conferenceStatusCheck() {
try {
await conference.expire();
} catch (error) {
console.error(error);
logger.error("conference expire failed", serializeError(error));
}
}
}
@@ -25,7 +28,7 @@ export function conferenceStatusCheck() {
export function repositoryStatusCheck() {
// check every 6 hours the status of the repositories
schedule.scheduleJob("0 */6 * * *", async () => {
console.log("[schedule] Check repository status and unused repositories");
logger.info("checking repository status and unused repositories");
(
await AnonymizedRepositoryModel.find({
status: { $eq: "ready" },
@@ -36,16 +39,16 @@ export function repositoryStatusCheck() {
try {
repo.check();
} catch {
console.log(`Repository ${repo.repoId} is expired`);
logger.info("repository expired", { repoId: repo.repoId });
}
const fourMonthAgo = new Date();
fourMonthAgo.setMonth(fourMonthAgo.getMonth() - 4);
if (repo.model.lastView < fourMonthAgo) {
repo.removeCache().then(() => {
console.log(
`Repository ${repo.repoId} not visited for 4 months remove the cached files`
);
logger.info("removed cache for unused repository", {
repoId: repo.repoId,
});
});
}
});
+4 -1
View File
@@ -8,6 +8,9 @@ import config from "../config";
import router from "./route";
import { handleError } from "../server/routes/route-utils";
import AnonymousError from "../core/AnonymousError";
import { createLogger } from "../core/logger";
const logger = createLogger("streamer");
const app = express();
app.use(express.json());
@@ -31,5 +34,5 @@ app.all("*", (req, res) => {
);
});
app.listen(config.PORT, () => {
console.log(`Server started on http://streamer:${config.PORT}`);
logger.info("streamer started", { port: config.PORT });
});