Add missing error handlers on stream pipelines

- AnonymizedFile.anonymizedContent(): propagate content errors to the
  anonymizer so callers see the failure instead of hanging.
- AnonymizedFile.send() local path: add error handler on the anonymizer
  transform between content and response pipes.
- S3.send(): handle errors on the S3 body stream to avoid unhandled
  emits crashing the process.
- S3.archive() / FileSystem.archive(): propagate read-stream errors
  to the file transformer so archiver sees the failure.
- Add frontend translations for new error codes.
This commit is contained in:
tdurieux
2026-05-07 07:47:29 +03:00
parent 7a163f2d35
commit 2de08c3df3
4 changed files with 22 additions and 8 deletions
+3 -1
View File
@@ -103,7 +103,9 @@
"username_not_defined": "A username must be provided.", "username_not_defined": "A username must be provided.",
"github_user_not_found": "The specified GitHub user could not be found.", "github_user_not_found": "The specified GitHub user could not be found.",
"cannot_coauthor_self": "You cannot add yourself as a co-author.", "cannot_coauthor_self": "You cannot add yourself as a co-author.",
"storage_write_size_mismatch": "The downloaded file was smaller than expected. The upstream source may have returned an incomplete response — please try again." "storage_write_size_mismatch": "The downloaded file was smaller than expected. The upstream source may have returned an incomplete response — please try again.",
"storage_read_error": "An error occurred while reading the file from storage — please try again.",
"upstream_error": "A temporary error occurred while fetching from GitHub — please try again."
}, },
"WARNINGS": { "WARNINGS": {
"page_not_enabled_on_repo": "GitHub Pages is not enabled on this repository. Enable it in the repository's Settings → Pages on GitHub, then refresh.", "page_not_enabled_on_repo": "GitHub Pages is not enabled on this repository. Enable it in the repository's Settings → Pages on GitHub, then refresh.",
+2
View File
@@ -269,6 +269,7 @@ export default class AnonymizedFile {
if (!config.STREAMER_ENTRYPOINT) { if (!config.STREAMER_ENTRYPOINT) {
// collect the content locally // collect the content locally
const content = await this.content(); const content = await this.content();
content.on("error", (err) => anonymizer.destroy(err));
return content.pipe(anonymizer); return content.pipe(anonymizer);
} }
@@ -412,6 +413,7 @@ export default class AnonymizedFile {
content content
.on("error", handleStreamError) .on("error", handleStreamError)
.pipe(anonymizer) .pipe(anonymizer)
.on("error", handleStreamError)
.pipe(res) .pipe(res)
.on("error", handleStreamError) .on("error", handleStreamError)
.on("finish", () => { .on("finish", () => {
+5 -3
View File
@@ -228,10 +228,12 @@ export default class FileSystem extends StorageBase {
await this.listFiles(repoId, dir, { await this.listFiles(repoId, dir, {
onEntry: async (file) => { onEntry: async (file) => {
let rs = await this.read(repoId, file.path); let rs: Readable = await this.read(repoId, file.path);
if (opt?.fileTransformer) { if (opt?.fileTransformer) {
// apply transformation on the stream const src = rs;
rs = rs.pipe(opt.fileTransformer(file.path)); const transformer = opt.fileTransformer(file.path);
src.on("error", (err) => transformer.destroy(err));
rs = src.pipe(transformer);
} }
const f = file.path.replace(fullPath, ""); const f = file.path.replace(fullPath, "");
archive.append(rs, { archive.append(rs, {
+12 -4
View File
@@ -124,7 +124,13 @@ export default class S3Storage extends StorageBase {
res.set("Content-Length", s.ContentLength.toString()); res.set("Content-Length", s.ContentLength.toString());
} }
if (s.Body) { if (s.Body) {
(s.Body as Readable)?.pipe(res); const body = s.Body as Readable;
body.on("error", (err) => {
logger.error("S3 body stream error", { ...serializeError(err), filePath: path });
if (!res.headersSent) res.status(502).json({ error: "storage_read_error" });
else res.destroy();
});
body.pipe(res);
} else { } else {
res.end(); res.end();
} }
@@ -344,10 +350,12 @@ export default class S3Storage extends StorageBase {
f.Key.replace(join(this.repoPath(repoId), dir), "") f.Key.replace(join(this.repoPath(repoId), dir), "")
); );
let rs = await this.read(repoId, f.Key); let rs: Readable = await this.read(repoId, f.Key);
if (opt?.fileTransformer) { if (opt?.fileTransformer) {
// apply transformation on the stream const src = rs;
rs = rs.pipe(opt.fileTransformer(f.Key)); const transformer = opt.fileTransformer(f.Key);
src.on("error", (err) => transformer.destroy(err));
rs = src.pipe(transformer);
} }
archive.append(rs, { archive.append(rs, {