chore: update dependencies and update AWS S3 lib to v3

This commit is contained in:
tdurieux
2023-05-01 14:34:58 +02:00
parent 6226f32471
commit ee82d3c12a
11 changed files with 2409 additions and 2607 deletions

2
cli.ts
View File

@@ -108,7 +108,7 @@ async function main() {
await (repository.source as GitHubDownload).download(inq.token);
const outputFileName = join(inq.output, generateRandomFileName(8) + ".zip");
console.info("[INFO] Anonymizing repository and creation zip file...");
await writeFile(outputFileName, repository.zip());
await writeFile(outputFileName, await repository.zip());
console.log(`Anonymized repository saved at ${outputFileName}`);
}

4751
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -30,15 +30,17 @@
"build"
],
"dependencies": {
"@octokit/oauth-app": "^4.1.0",
"@aws-sdk/client-s3": "^3.321.1",
"@aws-sdk/node-http-handler": "^3.321.1",
"@octokit/oauth-app": "^4.2.1",
"@octokit/rest": "^19.0.5",
"@pm2/io": "^5.0.0",
"archiver": "^5.3.1",
"aws-sdk": "^2.1238.0",
"aws-sdk": "^2.1368.0",
"bullmq": "^2.3.2",
"compression": "^1.7.4",
"connect-redis": "^6.1.3",
"decompress-stream-to-s3": "^1.3.1",
"connect-redis": "^7.0.1",
"decompress-stream-to-s3": "^2.0.0",
"dotenv": "^16.0.3",
"express": "^4.18.2",
"express-rate-limit": "^6.6.0",
@@ -49,14 +51,14 @@
"istextorbinary": "^6.0.0",
"marked": "^4.1.1",
"mime-types": "^2.1.35",
"mongoose": "^6.6.7",
"mongoose": "^7.1.0",
"node-schedule": "^2.1.0",
"parse-github-url": "^1.0.2",
"passport": "^0.6.0",
"passport-github2": "^0.1.12",
"rate-limit-redis": "^3.0.1",
"redis": "^4.3.1",
"textextensions": "^5.15.0",
"redis": "^4.6.6",
"textextensions": "^5.16.0",
"ts-custom-error": "^3.3.0",
"unzip-stream": "^0.3.1",
"xml-flow": "^1.0.4"

View File

@@ -209,8 +209,6 @@ export default class AnonymizedFile {
const mime = lookup(this.anonymizedPath);
if (mime && this.extension() != "ts") {
res.contentType(mime);
} else if (this.extension() == "ts") {
res.contentType("application/x-typescript");
} else if (isTextFile(this.anonymizedPath)) {
res.contentType("text/plain");
} else {

View File

@@ -164,7 +164,7 @@ export default class Repository {
*
* @returns A stream of anonymized repository compressed
*/
zip(): Readable {
zip(): Promise<Readable> {
return storage.archive(this.originalCachePath, {
format: "zip",
fileTransformer: (filename: string) =>

View File

@@ -1,7 +1,7 @@
import { createClient } from "redis";
import * as passport from "passport";
import * as session from "express-session";
import * as connectRedis from "connect-redis";
import RedisStore from "connect-redis";
import * as OAuth2Strategy from "passport-oauth2";
import { Profile, Strategy } from "passport-github2";
import * as express from "express";
@@ -89,9 +89,8 @@ passport.deserializeUser((user: Express.User, done) => {
});
export function initSession() {
const RedisStore = connectRedis(session);
const redisClient = createClient({
legacyMode: true,
legacyMode: false,
socket: {
port: config.REDIS_PORT,
host: config.REDIS_HOSTNAME,
@@ -99,12 +98,14 @@ export function initSession() {
});
redisClient.on("error", (err) => console.log("Redis Client Error", err));
redisClient.connect();
const redisStore = new RedisStore({
client: redisClient,
prefix: "anoGH_session:",
});
return session({
secret: "keyboard cat",
store: new RedisStore({
client: redisClient,
}),
secret: config.SESSION_SECRET,
store: redisStore,
saveUninitialized: false,
resave: false,
});

View File

@@ -505,7 +505,7 @@ router.post("/", async (req: express.Request, res: express.Response) => {
new Date() > conf.endDate ||
conf.status !== "ready"
) {
await repo.remove();
await repo.deleteOne();
throw new AnonymousError("conf_not_activated", {
object: conf,
httpStatus: 400,

View File

@@ -51,7 +51,7 @@ router.get(
// cache the file for 6 hours
res.header("Cache-Control", "max-age=21600");
await pipeline(repo.zip(), res);
await pipeline(await repo.zip(), res);
} catch (error) {
handleError(error, res, req);
}

View File

@@ -22,12 +22,12 @@ export default class FileSystem implements StorageBase {
}
/** @override */
send(p: string, res: Response) {
async send(p: string, res: Response) {
res.sendFile(join(config.FOLDER, p), { dotfiles: "allow" });
}
/** @override */
read(p: string): Readable {
async read(p: string): Promise<Readable> {
return fs.createReadStream(join(config.FOLDER, p));
}
@@ -38,7 +38,7 @@ export default class FileSystem implements StorageBase {
lastModified: info.mtime,
contentType: info.isDirectory()
? "application/x-directory"
: lookup(join(config.FOLDER, path)) as string,
: (lookup(join(config.FOLDER, path)) as string),
};
}
@@ -132,7 +132,7 @@ export default class FileSystem implements StorageBase {
}
/** @override */
archive(
async archive(
dir: string,
opt?: {
format?: "zip" | "tar";
@@ -142,8 +142,8 @@ export default class FileSystem implements StorageBase {
const archive = archiver(opt?.format || "zip", {});
this.listFiles(dir, {
onEntry: (file) => {
let rs = this.read(file.path);
onEntry: async (file) => {
let rs = await this.read(file.path);
if (opt?.fileTransformer) {
// apply transformation on the stream
rs = rs.pipe(opt.fileTransformer(file.path));

View File

@@ -1,5 +1,11 @@
import { SourceBase, StorageBase, Tree, TreeFile } from "../types";
import { S3 } from "aws-sdk";
import {
GetObjectCommand,
ListObjectsV2CommandOutput,
PutObjectCommandInput,
S3,
} from "@aws-sdk/client-s3";
import { NodeHttpHandler } from "@aws-sdk/node-http-handler";
import config from "../../config";
import { pipeline, Readable, Transform } from "stream";
import ArchiveStreamToS3 from "decompress-stream-to-s3";
@@ -10,6 +16,7 @@ import * as archiver from "archiver";
import { dirname, basename } from "path";
import AnonymousError from "../AnonymousError";
import AnonymizedFile from "../AnonymizedFile";
import { AnonymizeTransformer } from "../anonymize-utils";
export default class S3Storage implements StorageBase {
type = "AWS";
@@ -22,14 +29,19 @@ export default class S3Storage implements StorageBase {
}
private client(timeout = 5000) {
if (!config.S3_CLIENT_ID) throw new Error("S3_CLIENT_ID not set");
if (!config.S3_CLIENT_SECRET) throw new Error("S3_CLIENT_SECRET not set");
return new S3({
credentials: {
accessKeyId: config.S3_CLIENT_ID,
secretAccessKey: config.S3_CLIENT_SECRET,
},
region: config.S3_REGION,
endpoint: config.S3_ENDPOINT,
accessKeyId: config.S3_CLIENT_ID,
secretAccessKey: config.S3_CLIENT_SECRET,
httpOptions: {
timeout,
},
requestHandler: new NodeHttpHandler({
socketTimeout: timeout,
connectionTimeout: timeout,
}),
});
}
@@ -37,22 +49,18 @@ export default class S3Storage implements StorageBase {
async exists(path: string): Promise<boolean> {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
try {
await this.client()
.headObject({
Bucket: config.S3_BUCKET,
Key: path,
})
.promise();
await this.client().headObject({
Bucket: config.S3_BUCKET,
Key: path,
});
return true;
} catch (err) {
// check if it is a directory
const data = await this.client()
.listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: path,
MaxKeys: 1,
})
.promise();
const data = await this.client().listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: path,
MaxKeys: 1,
});
return (data.Contents?.length || 0) > 0;
}
}
@@ -65,13 +73,11 @@ export default class S3Storage implements StorageBase {
/** @override */
async rm(dir: string): Promise<void> {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
const data = await this.client()
.listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
MaxKeys: 1000,
})
.promise();
const data = await this.client().listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
MaxKeys: 1000,
});
const params = {
Bucket: config.S3_BUCKET,
@@ -88,7 +94,7 @@ export default class S3Storage implements StorageBase {
// nothing to remove
return;
}
await this.client().deleteObjects(params).promise();
await this.client().deleteObjects(params);
if (data.IsTruncated) {
await this.rm(dir);
@@ -96,40 +102,38 @@ export default class S3Storage implements StorageBase {
}
/** @override */
send(p: string, res: Response) {
async send(p: string, res: Response) {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
const s = this.client()
.getObject({
try {
const command = new GetObjectCommand({
Bucket: config.S3_BUCKET,
Key: p,
})
.on("error", (error) => {
try {
res.status(error.statusCode || 500);
} catch (err) {
console.error(`[ERROR] S3 send ${p}`, err);
}
})
.on("httpHeaders", (statusCode, headers, response) => {
res.status(statusCode);
if (statusCode < 300) {
res.set("Content-Length", headers["content-length"]);
res.set("Content-Type", headers["content-type"]);
}
(response.httpResponse.createUnbufferedStream() as Readable).pipe(res);
});
s.send();
const s = await this.client().send(command);
s.ContentLength;
res.status(200);
if (s.ContentType) {
res.contentType(s.ContentType);
}
if (s.ContentLength) {
res.set("Content-Length", s.ContentLength.toString());
}
(s.Body as Readable)?.pipe(res);
} catch (error) {
try {
res.status(500);
} catch (err) {
console.error(`[ERROR] S3 send ${p}`, err);
}
}
}
async fileInfo(path: string) {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
const info = await this.client(3000)
.headObject({
Bucket: config.S3_BUCKET,
Key: path,
})
.promise();
const info = await this.client(3000).headObject({
Bucket: config.S3_BUCKET,
Key: path,
});
return {
size: info.ContentLength,
lastModified: info.LastModified,
@@ -140,14 +144,20 @@ export default class S3Storage implements StorageBase {
}
/** @override */
read(path: string): Readable {
async read(path: string): Promise<Readable> {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
return this.client(3000)
.getObject({
Bucket: config.S3_BUCKET,
Key: path,
})
.createReadStream();
const command = new GetObjectCommand({
Bucket: config.S3_BUCKET,
Key: path,
});
const res = (await this.client(3000).send(command)).Body;
if (!res) {
throw new AnonymousError("file_not_found", {
httpStatus: 404,
object: path,
});
}
return res as Readable;
}
/** @override */
@@ -158,7 +168,7 @@ export default class S3Storage implements StorageBase {
source?: SourceBase
): Promise<void> {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
const params: S3.PutObjectRequest = {
const params: PutObjectCommandInput = {
Bucket: config.S3_BUCKET,
Key: path,
Body: data,
@@ -168,7 +178,7 @@ export default class S3Storage implements StorageBase {
params.Tagging = `source=${source.type}`;
}
// 30s timeout
await this.client(30000).putObject(params).promise();
await this.client(30000).putObject(params);
return;
}
@@ -177,17 +187,15 @@ export default class S3Storage implements StorageBase {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
if (dir && dir[dir.length - 1] != "/") dir = dir + "/";
const out: Tree = {};
let req: S3.ListObjectsV2Output;
let req: ListObjectsV2CommandOutput;
let nextContinuationToken: string | undefined;
do {
req = await this.client(30000)
.listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
MaxKeys: 250,
ContinuationToken: nextContinuationToken,
})
.promise();
req = await this.client(30000).listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
MaxKeys: 250,
ContinuationToken: nextContinuationToken,
});
if (!req.Contents) return out;
nextContinuationToken = req.NextContinuationToken;
@@ -235,6 +243,9 @@ export default class S3Storage implements StorageBase {
header.name = header.name.substr(header.name.indexOf("/") + 1);
if (source) {
header.Tagging = `source=${source.type}`;
header.Metadata = {
source: source.type,
};
}
},
});
@@ -245,7 +256,7 @@ export default class S3Storage implements StorageBase {
}
/** @override */
archive(
async archive(
dir: string,
opt?: {
format?: "zip" | "tar";
@@ -255,31 +266,36 @@ export default class S3Storage implements StorageBase {
if (!config.S3_BUCKET) throw new Error("S3_BUCKET not set");
const archive = archiver(opt?.format || "zip", {});
if (dir && dir[dir.length - 1] != "/") dir = dir + "/";
const req = this.client(30000).listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
});
const filesStream = req.createReadStream();
const xmlStream = flow(filesStream);
const that = this;
xmlStream.on("tag:contents", function (file) {
let rs = that.read(file.key);
file.key = file.key.replace(dir, "");
const filename = basename(file.key);
if (filename == "") return;
if (opt?.fileTransformer) {
rs = rs.pipe(opt.fileTransformer(filename));
}
archive.append(rs, {
name: filename,
prefix: dirname(file.key),
let req: ListObjectsV2CommandOutput;
let nextContinuationToken: string | undefined;
do {
req = await this.client(30000).listObjectsV2({
Bucket: config.S3_BUCKET,
Prefix: dir,
MaxKeys: 250,
ContinuationToken: nextContinuationToken,
});
});
xmlStream.on("end", () => {
archive.finalize();
});
nextContinuationToken = req.NextContinuationToken;
for (const f of req.Contents || []) {
if (!f.Key) continue;
const filename = basename(f.Key);
const prefix = dirname(f.Key.replace(dir, ""));
let rs = await this.read(f.Key);
if (opt?.fileTransformer) {
// apply transformation on the stream
rs = rs.pipe(opt.fileTransformer(f.Key));
}
archive.append(rs, {
name: filename,
prefix,
});
}
} while (req && req.Contents?.length && req.IsTruncated);
archive.finalize();
return archive;
}
}

View File

@@ -44,13 +44,13 @@ export interface StorageBase {
*/
exists(path: string): Promise<boolean>;
send(p: string, res: Response): void;
send(p: string, res: Response): Promise<void>;
/**
* Read the content of a file
* @param path the path to the file
*/
read(path: string): Readable;
read(path: string): Promise<Readable>;
fileInfo(path: string): Promise<{
size: number | undefined;
@@ -115,7 +115,7 @@ export interface StorageBase {
*/
fileTransformer?: (p: string) => Transform;
}
): archiver.Archiver;
): Promise<archiver.Archiver>;
/**
* Create a directory