feat: refactoring project

This commit is contained in:
Carlos
2024-11-23 14:56:07 -05:00
parent f0c2a50c18
commit 1c6db5818d
2351 changed files with 39323 additions and 60326 deletions

View File

@@ -19,6 +19,7 @@ const memoize = require("../util/memoize");
const SerializerMiddleware = require("./SerializerMiddleware");
/** @typedef {typeof import("../util/Hash")} Hash */
/** @typedef {import("../util/fs").IStats} IStats */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
/** @typedef {import("./types").BufferSerializableType} BufferSerializableType */
@@ -57,6 +58,7 @@ const hashForName = (buffers, hashFunction) => {
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
const DECOMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
/** @type {function(Buffer, number, number): void} */
const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
? (buf, value, offset) => {
buf.writeBigUInt64LE(BigInt(value), offset);
@@ -68,10 +70,9 @@ const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
buf.writeUInt32LE(high, offset + 4);
};
/** @type {function(Buffer, number): void} */
const readUInt64LE = Buffer.prototype.readBigUInt64LE
? (buf, offset) => {
return Number(buf.readBigUInt64LE(offset));
}
? (buf, offset) => Number(buf.readBigUInt64LE(offset))
: (buf, offset) => {
const low = buf.readUInt32LE(offset);
const high = buf.readUInt32LE(offset + 4);
@@ -82,7 +83,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
* @typedef {object} SerializeResult
* @property {string | false} name
* @property {number} size
* @property {Promise=} backgroundJob
* @property {Promise<any>=} backgroundJob
*/
/**
@@ -104,8 +105,8 @@ const serialize = async (
const processedData = [];
/** @type {WeakMap<SerializeResult, function(): any | Promise<any>>} */
const resultToLazy = new WeakMap();
/** @type {Buffer[]} */
let lastBuffers = undefined;
/** @type {Buffer[] | undefined} */
let lastBuffers;
for (const item of await data) {
if (typeof item === "function") {
if (!SerializerMiddleware.isLazy(item))
@@ -163,9 +164,8 @@ const serialize = async (
const backgroundJobs = [];
const resolvedData = (
await Promise.all(
/** @type {Promise<Buffer[] | Buffer | SerializeResult>[]} */ (
processedData
)
/** @type {Promise<Buffer[] | Buffer | SerializeResult>[]} */
(processedData)
)
).map(item => {
if (Array.isArray(item) || Buffer.isBuffer(item)) return item;
@@ -195,7 +195,7 @@ const serialize = async (
} else if (item) {
lengths.push(-item.length);
} else {
throw new Error("Unexpected falsy value in resolved data " + item);
throw new Error(`Unexpected falsy value in resolved data ${item}`);
}
}
const header = Buffer.allocUnsafe(8 + lengths.length * 4);
@@ -237,12 +237,12 @@ const serialize = async (
*/
const deserialize = async (middleware, name, readFile) => {
const contents = await readFile(name);
if (contents.length === 0) throw new Error("Empty file " + name);
if (contents.length === 0) throw new Error(`Empty file ${name}`);
let contentsIndex = 0;
let contentItem = contents[0];
let contentItemLength = contentItem.length;
let contentPosition = 0;
if (contentItemLength === 0) throw new Error("Empty file " + name);
if (contentItemLength === 0) throw new Error(`Empty file ${name}`);
const nextContent = () => {
contentsIndex++;
contentItem = contents[contentsIndex];
@@ -378,18 +378,16 @@ const deserialize = async (middleware, name, readFile) => {
length -= l;
contentPosition = contentItemLength;
}
} else if (length >= contentItemLength) {
result.push(contentItem);
length -= contentItemLength;
contentPosition = contentItemLength;
} else {
if (length >= contentItemLength) {
result.push(contentItem);
length -= contentItemLength;
contentPosition = contentItemLength;
} else {
result.push(
Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
);
contentPosition += length;
length = 0;
}
result.push(
Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
);
contentPosition += length;
length = 0;
}
while (length > 0) {
nextContent();
@@ -410,6 +408,8 @@ const deserialize = async (middleware, name, readFile) => {
return result;
};
/** @typedef {{ filename: string, extension?: string }} FileMiddlewareContext */
/**
* @typedef {BufferSerializableType[]} DeserializedType
* @typedef {true} SerializedType
@@ -425,6 +425,7 @@ class FileMiddleware extends SerializerMiddleware {
this.fs = fs;
this._hashFunction = hashFunction;
}
/**
* @param {DeserializedType} data data
* @param {object} context context object
@@ -439,76 +440,92 @@ class FileMiddleware extends SerializerMiddleware {
// It's important that we don't touch existing files during serialization
// because serialize may read existing files (when deserializing)
const allWrittenFiles = new Set();
/**
* @param {string | false} name name
* @param {Buffer[]} content content
* @param {number} size size
* @returns {Promise<void>}
*/
const writeFile = async (name, content, size) => {
const file = name
? join(this.fs, filename, `../${name}${extension}`)
: filename;
await new Promise((resolve, reject) => {
let stream = this.fs.createWriteStream(file + "_");
let compression;
if (file.endsWith(".gz")) {
compression = createGzip({
chunkSize: COMPRESSION_CHUNK_SIZE,
level: zConstants.Z_BEST_SPEED
});
} else if (file.endsWith(".br")) {
compression = createBrotliCompress({
chunkSize: COMPRESSION_CHUNK_SIZE,
params: {
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
[zConstants.BROTLI_PARAM_QUALITY]: 2,
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
}
});
}
if (compression) {
pipeline(compression, stream, reject);
stream = compression;
stream.on("finish", () => resolve());
} else {
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
}
// split into chunks for WRITE_LIMIT_CHUNK size
const chunks = [];
for (const b of content) {
if (b.length < WRITE_LIMIT_CHUNK) {
chunks.push(b);
await new Promise(
/**
* @param {(value?: undefined) => void} resolve resolve
* @param {(reason?: Error | null) => void} reject reject
*/
(resolve, reject) => {
let stream = this.fs.createWriteStream(`${file}_`);
let compression;
if (file.endsWith(".gz")) {
compression = createGzip({
chunkSize: COMPRESSION_CHUNK_SIZE,
level: zConstants.Z_BEST_SPEED
});
} else if (file.endsWith(".br")) {
compression = createBrotliCompress({
chunkSize: COMPRESSION_CHUNK_SIZE,
params: {
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
[zConstants.BROTLI_PARAM_QUALITY]: 2,
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
}
});
}
if (compression) {
pipeline(compression, stream, reject);
stream = compression;
stream.on("finish", () => resolve());
} else {
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
}
// split into chunks for WRITE_LIMIT_CHUNK size
/** @type {TODO[]} */
const chunks = [];
for (const b of content) {
if (b.length < WRITE_LIMIT_CHUNK) {
chunks.push(b);
} else {
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
}
}
}
const len = chunks.length;
let i = 0;
/**
* @param {(Error | null)=} err err
*/
const batchWrite = err => {
// will be handled in "on" error handler
if (err) return;
if (i === len) {
stream.end();
return;
}
// queue up a batch of chunks up to the write limit
// end is exclusive
let end = i;
let sum = chunks[end++].length;
while (end < len) {
sum += chunks[end].length;
if (sum > WRITE_LIMIT_TOTAL) break;
end++;
}
while (i < end - 1) {
stream.write(chunks[i++]);
}
stream.write(chunks[i++], batchWrite);
};
batchWrite();
}
const len = chunks.length;
let i = 0;
const batchWrite = err => {
// will be handled in "on" error handler
if (err) return;
if (i === len) {
stream.end();
return;
}
// queue up a batch of chunks up to the write limit
// end is exclusive
let end = i;
let sum = chunks[end++].length;
while (end < len) {
sum += chunks[end].length;
if (sum > WRITE_LIMIT_TOTAL) break;
end++;
}
while (i < end - 1) {
stream.write(chunks[i++]);
}
stream.write(chunks[i++], batchWrite);
};
batchWrite();
});
);
if (name) allWrittenFiles.add(file);
};
@@ -518,10 +535,15 @@ class FileMiddleware extends SerializerMiddleware {
await backgroundJob;
// Rename the index file to disallow access during inconsistent file state
await new Promise(resolve =>
this.fs.rename(filename, filename + ".old", err => {
resolve();
})
await new Promise(
/**
* @param {(value?: undefined) => void} resolve resolve
*/
resolve => {
this.fs.rename(filename, `${filename}.old`, err => {
resolve();
});
}
);
// update all written files
@@ -529,22 +551,35 @@ class FileMiddleware extends SerializerMiddleware {
Array.from(
allWrittenFiles,
file =>
new Promise((resolve, reject) => {
this.fs.rename(file + "_", file, err => {
if (err) return reject(err);
resolve();
});
})
new Promise(
/**
* @param {(value?: undefined) => void} resolve resolve
* @param {(reason?: Error | null) => void} reject reject
* @returns {void}
*/
(resolve, reject) => {
this.fs.rename(`${file}_`, file, err => {
if (err) return reject(err);
resolve();
});
}
)
)
);
// As final step automatically update the index file to have a consistent pack again
await new Promise(resolve => {
this.fs.rename(filename + "_", filename, err => {
if (err) return reject(err);
resolve();
});
});
await new Promise(
/**
* @param {(value?: undefined) => void} resolve resolve
* @returns {void}
*/
resolve => {
this.fs.rename(`${filename}_`, filename, err => {
if (err) return reject(err);
resolve();
});
}
);
return /** @type {true} */ (true);
}
)
@@ -560,6 +595,10 @@ class FileMiddleware extends SerializerMiddleware {
*/
deserialize(data, context) {
const { filename, extension = "" } = context;
/**
* @param {string | boolean} name name
* @returns {Promise<TODO>} result
*/
const readFile = name =>
new Promise((resolve, reject) => {
const file = name
@@ -570,11 +609,12 @@ class FileMiddleware extends SerializerMiddleware {
reject(err);
return;
}
let remaining = /** @type {number} */ (stats.size);
let remaining = /** @type {IStats} */ (stats).size;
/** @type {Buffer | undefined} */
let currentBuffer;
/** @type {number | undefined} */
let currentBufferUsed;
/** @type {any[]} */
const buf = [];
/** @type {import("zlib").Zlib & import("stream").Transform | undefined} */
let decompression;
@@ -588,7 +628,8 @@ class FileMiddleware extends SerializerMiddleware {
});
}
if (decompression) {
let newResolve, newReject;
let newResolve;
let newReject;
resolve(
Promise.all([
new Promise((rs, rj) => {
@@ -605,11 +646,12 @@ class FileMiddleware extends SerializerMiddleware {
resolve = newResolve;
reject = newReject;
}
this.fs.open(file, "r", (err, fd) => {
this.fs.open(file, "r", (err, _fd) => {
if (err) {
reject(err);
return;
}
const fd = /** @type {number} */ (_fd);
const read = () => {
if (currentBuffer === undefined) {
currentBuffer = Buffer.allocUnsafeSlow(
@@ -622,8 +664,10 @@ class FileMiddleware extends SerializerMiddleware {
currentBufferUsed = 0;
}
let readBuffer = currentBuffer;
let readOffset = currentBufferUsed;
let readLength = currentBuffer.length - currentBufferUsed;
let readOffset = /** @type {number} */ (currentBufferUsed);
let readLength =
currentBuffer.length -
/** @type {number} */ (currentBufferUsed);
// values passed to fs.read must be valid int32 values
if (readOffset > 0x7fffffff) {
readBuffer = currentBuffer.slice(readOffset);
@@ -645,9 +689,13 @@ class FileMiddleware extends SerializerMiddleware {
});
return;
}
currentBufferUsed += bytesRead;
/** @type {number} */
(currentBufferUsed) += bytesRead;
remaining -= bytesRead;
if (currentBufferUsed === currentBuffer.length) {
if (
currentBufferUsed ===
/** @type {Buffer} */ (currentBuffer).length
) {
if (decompression) {
decompression.write(currentBuffer);
} else {