mirror of
https://github.com/tdurieux/anonymous_github.git
synced 2026-02-13 19:02:45 +00:00
migrate JavaScript to TypeScript
This commit is contained in:
136
src/storage/FileSystem.ts
Normal file
136
src/storage/FileSystem.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { StorageBase, Tree } from "../types";
|
||||
import * as fs from "fs";
|
||||
import * as tar from "tar-fs";
|
||||
import * as path from "path";
|
||||
import * as express from "express";
|
||||
import config from "../../config";
|
||||
import * as stream from "stream";
|
||||
import * as gunzip from "gunzip-maybe";
|
||||
import * as archiver from "archiver";
|
||||
|
||||
export default class FileSystem implements StorageBase {
|
||||
type = "FileSystem";
|
||||
|
||||
constructor() {}
|
||||
|
||||
/** @override */
|
||||
async exists(p: string): Promise<boolean> {
|
||||
return fs.existsSync(path.join(config.FOLDER, p));
|
||||
}
|
||||
|
||||
/** @override */
|
||||
send(p: string, res: express.Response) {
|
||||
res.sendFile(path.join(config.FOLDER, p), { dotfiles: "allow" });
|
||||
}
|
||||
|
||||
/** @override */
|
||||
read(p: string): stream.Readable {
|
||||
return fs.createReadStream(path.join(config.FOLDER, p));
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async write(p: string, data: Buffer): Promise<void> {
|
||||
if (!(await this.exists(path.dirname(p)))) {
|
||||
await fs.promises.mkdir(path.dirname(path.join(config.FOLDER, p)), {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
return fs.promises.writeFile(path.join(config.FOLDER, p), data);
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async rm(path: string): Promise<void> {
|
||||
await fs.promises.rm(path, { force: true, recursive: true });
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async mk(dir: string): Promise<void> {
|
||||
if (!(await this.exists(dir)))
|
||||
fs.promises.mkdir(path.join(config.FOLDER, dir), { recursive: true });
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async listFiles(
|
||||
dir: string,
|
||||
opt: {
|
||||
root?: string;
|
||||
onEntry?: (file: { path: string; size: number }) => void;
|
||||
} = {}
|
||||
): Promise<Tree> {
|
||||
if (opt.root == null) {
|
||||
opt.root = config.FOLDER;
|
||||
}
|
||||
let files = await fs.promises.readdir(path.join(opt.root, dir));
|
||||
const output: Tree = {};
|
||||
for (let file of files) {
|
||||
let filePath = path.join(dir, file);
|
||||
try {
|
||||
const stats = await fs.promises.stat(path.join(opt.root, filePath));
|
||||
if (file[0] == "$") {
|
||||
file = "\\" + file;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
output[file] = await this.listFiles(filePath, opt);
|
||||
} else if (stats.isFile()) {
|
||||
if (opt.onEntry) {
|
||||
opt.onEntry({
|
||||
path: filePath,
|
||||
size: stats.size,
|
||||
});
|
||||
}
|
||||
output[file] = { size: stats.size, sha: stats.ino.toString() };
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async extractTar(p: string, data: stream.Readable): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
data
|
||||
.pipe(gunzip())
|
||||
.pipe(
|
||||
tar.extract(path.join(config.FOLDER, p), {
|
||||
map: (header) => {
|
||||
header.name = header.name.substr(header.name.indexOf("/") + 1);
|
||||
return header;
|
||||
},
|
||||
})
|
||||
)
|
||||
.on("finish", resolve)
|
||||
.on("error", reject);
|
||||
});
|
||||
}
|
||||
|
||||
/** @override */
|
||||
archive(
|
||||
dir: string,
|
||||
opt?: {
|
||||
format?: "zip" | "tar";
|
||||
fileTransformer?;
|
||||
}
|
||||
) {
|
||||
const archive = archiver(opt?.format, {});
|
||||
|
||||
this.listFiles(dir, {
|
||||
onEntry: (file) => {
|
||||
let rs = this.read(file.path);
|
||||
if (opt?.fileTransformer) {
|
||||
// apply transformation on the stream
|
||||
rs = rs.pipe(opt.fileTransformer(file.path));
|
||||
}
|
||||
const f = file.path.replace(dir, "");
|
||||
archive.append(rs, {
|
||||
name: path.basename(f),
|
||||
prefix: path.dirname(f),
|
||||
});
|
||||
},
|
||||
}).then(() => {
|
||||
archive.finalize();
|
||||
});
|
||||
return archive;
|
||||
}
|
||||
}
|
||||
225
src/storage/S3.ts
Normal file
225
src/storage/S3.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { StorageBase, Tree, TreeFile } from "../types";
|
||||
import { S3 } from "aws-sdk";
|
||||
import config from "../../config";
|
||||
import * as stream from "stream";
|
||||
import { ArchiveStreamToS3 } from "archive-stream-to-s3";
|
||||
import * as express from "express";
|
||||
import * as mime from "mime-types";
|
||||
import * as flow from "xml-flow";
|
||||
import * as archiver from "archiver";
|
||||
import * as path from "path";
|
||||
import * as gunzip from "gunzip-maybe";
|
||||
|
||||
const originalArchiveStreamToS3Entry: Function = (ArchiveStreamToS3 as any)
|
||||
.prototype.onEntry;
|
||||
|
||||
export default class S3Storage implements StorageBase {
|
||||
type = "AWS";
|
||||
client: S3;
|
||||
|
||||
constructor() {
|
||||
if (!config.S3_BUCKET) throw new Error("s3_config_not_provided");
|
||||
this.client = new S3({
|
||||
region: config.S3_REGION,
|
||||
endpoint: config.S3_ENDPOINT,
|
||||
accessKeyId: config.S3_CLIENT_ID,
|
||||
secretAccessKey: config.S3_CLIENT_SECRET,
|
||||
});
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await this.client
|
||||
.headObject({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Key: path,
|
||||
})
|
||||
.promise();
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async mk(dir: string): Promise<void> {
|
||||
if (dir && dir[dir.length - 1] != "/") dir = dir + "/";
|
||||
|
||||
await this.client
|
||||
.putObject({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Key: dir,
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async rm(dir: string): Promise<void> {
|
||||
const data = await this.client
|
||||
.listObjectsV2({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Prefix: dir,
|
||||
})
|
||||
.promise();
|
||||
|
||||
const params = { Bucket: config.S3_BUCKET, Delete: { Objects: [] } };
|
||||
|
||||
data.Contents.forEach(function (content) {
|
||||
params.Delete.Objects.push({ Key: content.Key });
|
||||
});
|
||||
|
||||
if (params.Delete.Objects.length == 0) {
|
||||
// nothing to remove
|
||||
return;
|
||||
}
|
||||
await this.client.deleteObjects(params).promise();
|
||||
|
||||
if (data.IsTruncated) {
|
||||
await this.rm(dir);
|
||||
}
|
||||
}
|
||||
|
||||
/** @override */
|
||||
send(p: string, res: express.Response) {
|
||||
const s = this.client
|
||||
.getObject({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Key: p,
|
||||
})
|
||||
.on("httpHeaders", (statusCode, headers, response) => {
|
||||
res.status(statusCode);
|
||||
if (statusCode < 300) {
|
||||
res.set("Content-Length", headers["content-length"]);
|
||||
res.set("Content-Type", headers["content-type"]);
|
||||
}
|
||||
(
|
||||
response.httpResponse.createUnbufferedStream() as stream.Readable
|
||||
).pipe(res);
|
||||
});
|
||||
|
||||
s.send();
|
||||
}
|
||||
|
||||
/** @override */
|
||||
read(path: string): stream.Readable {
|
||||
return this.client
|
||||
.getObject({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Key: path,
|
||||
})
|
||||
.createReadStream();
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async write(path: string, data: Buffer): Promise<void> {
|
||||
await this.client
|
||||
.putObject({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Key: path,
|
||||
Body: data,
|
||||
ContentType: mime.lookup(path).toString(),
|
||||
})
|
||||
.promise();
|
||||
return;
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async listFiles(dir: string): Promise<Tree> {
|
||||
if (dir && dir[dir.length - 1] != "/") dir = dir + "/";
|
||||
const out: Tree = {};
|
||||
const req = await this.client
|
||||
.listObjectsV2({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Prefix: dir,
|
||||
})
|
||||
.promise();
|
||||
|
||||
if (!req.Contents) return out;
|
||||
for (const f of req.Contents) {
|
||||
if (!f.Key) continue;
|
||||
f.Key = f.Key.replace(dir, "");
|
||||
const paths = f.Key.split("/");
|
||||
let current: Tree = out;
|
||||
for (let i = 0; i < paths.length - 1; i++) {
|
||||
let p = paths[i];
|
||||
if (!p) continue;
|
||||
if (!(current[p] as Tree)) {
|
||||
current[p] = {} as Tree;
|
||||
}
|
||||
current = current[p] as Tree;
|
||||
}
|
||||
|
||||
const fileInfo: TreeFile = { size: f.Size || 0, sha: f.ETag };
|
||||
const fileName = paths[paths.length - 1];
|
||||
if (fileName) current[fileName] = fileInfo;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/** @override */
|
||||
async extractTar(p: string, data: stream.Readable): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const toS3 = new ArchiveStreamToS3(config.S3_BUCKET, p, this.client);
|
||||
|
||||
let rootFolder = null;
|
||||
(ArchiveStreamToS3 as any).prototype.onEntry = function (
|
||||
header: any,
|
||||
stream: any,
|
||||
next: any
|
||||
) {
|
||||
if (rootFolder == null) {
|
||||
rootFolder = header.name.substr(0, header.name.indexOf("/") + 1);
|
||||
}
|
||||
header.name = header.name.replace(rootFolder, "");
|
||||
originalArchiveStreamToS3Entry.call(toS3, header, stream, next);
|
||||
};
|
||||
|
||||
toS3.on("finish", (result) => {
|
||||
resolve(result);
|
||||
});
|
||||
toS3.on("error", (e) => {
|
||||
reject(e);
|
||||
});
|
||||
data.pipe(gunzip()).pipe(toS3);
|
||||
});
|
||||
}
|
||||
|
||||
/** @override */
|
||||
archive(
|
||||
dir: string,
|
||||
opt?: {
|
||||
format?: "zip" | "tar";
|
||||
fileTransformer?;
|
||||
}
|
||||
) {
|
||||
const archive = archiver(opt?.format, {});
|
||||
if (dir && dir[dir.length - 1] != "/") dir = dir + "/";
|
||||
const req = this.client.listObjectsV2({
|
||||
Bucket: config.S3_BUCKET,
|
||||
Prefix: dir,
|
||||
});
|
||||
const filesStream = req.createReadStream();
|
||||
|
||||
const xmlStream = flow(filesStream);
|
||||
|
||||
const that = this;
|
||||
xmlStream.on("tag:contents", function (file) {
|
||||
let rs = that.read(file.key);
|
||||
file.key = file.key.replace(dir, "");
|
||||
const filename = path.basename(file.key);
|
||||
if (filename == "") return;
|
||||
if (opt?.fileTransformer) {
|
||||
rs = rs.pipe(opt.fileTransformer(filename));
|
||||
}
|
||||
archive.append(rs, {
|
||||
name: filename,
|
||||
prefix: path.dirname(file.key),
|
||||
});
|
||||
});
|
||||
xmlStream.on("end", () => {
|
||||
archive.finalize();
|
||||
});
|
||||
return archive;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user