| | |
| | | const memoize = require("../util/memoize"); |
| | | const SerializerMiddleware = require("./SerializerMiddleware"); |
| | | |
| | | /** @typedef {typeof import("../util/Hash")} Hash */ |
| | | /** @typedef {import("../util/Hash").HashFunction} HashFunction */ |
| | | /** @typedef {import("../util/fs").IStats} IStats */ |
| | | /** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */ |
| | | /** @typedef {import("./types").BufferSerializableType} BufferSerializableType */ |
| | |
| | | const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024; |
| | | |
| | | /** |
| | | * Returns hash. |
| | | * @param {Buffer[]} buffers buffers |
| | | * @param {string | Hash} hashFunction hash function to use |
| | | * @param {HashFunction} hashFunction hash function to use |
| | | * @returns {string} hash |
| | | */ |
| | | const hashForName = (buffers, hashFunction) => { |
| | |
| | | /** @typedef {Promise<void | void[]>} BackgroundJob */ |
| | | |
| | | /** |
| | | * Defines the serialize result type used by this module. |
| | | * @typedef {object} SerializeResult |
| | | * @property {string | false} name |
| | | * @property {number} size |
| | |
| | | |
| | | /** @typedef {{ name: string, size: number }} LazyOptions */ |
| | | /** |
| | | * Defines the lazy function type used by this module. |
| | | * @typedef {import("./SerializerMiddleware").LazyFunction<BufferSerializableType[], Buffer, FileMiddleware, LazyOptions>} LazyFunction |
| | | */ |
| | | |
| | | /** |
| | | * Serializes this instance into the provided serializer context. |
| | | * @param {FileMiddleware} middleware this |
| | | * @param {(BufferSerializableType | LazyFunction)[]} data data to be serialized |
| | | * @param {string | boolean} name file base name |
| | | * @param {(name: string | false, buffers: Buffer[], size: number) => Promise<void>} writeFile writes a file |
| | | * @param {string | Hash} hashFunction hash function to use |
| | | * @param {HashFunction=} hashFunction hash function to use |
| | | * @returns {Promise<SerializeResult>} resulting file pointer and promise |
| | | */ |
| | | const serialize = async ( |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Restores this instance from the provided deserializer context. |
| | | * @param {FileMiddleware} middleware this |
| | | * @param {string | false} name filename |
| | | * @param {(name: string | false) => Promise<Buffer[]>} readFile read content of a file |
| | |
| | | contentPosition = 0; |
| | | }; |
| | | /** |
| | | * Processes the provided n. |
| | | * @param {number} n number of bytes to ensure |
| | | */ |
| | | const ensureData = (n) => { |
| | |
| | | nextContent(); |
| | | } |
| | | while (contentItemLength - contentPosition < n) { |
| | | const remaining = contentItem.slice(contentPosition); |
| | | const remaining = contentItem.subarray(contentPosition); |
| | | let lengthFromNext = n - remaining.length; |
| | | /** @type {Buffer[]} */ |
| | | const buffers = [remaining]; |
| | | for (let i = contentsIndex + 1; i < contents.length; i++) { |
| | | const l = contents[i].length; |
| | | if (l > lengthFromNext) { |
| | | buffers.push(contents[i].slice(0, lengthFromNext)); |
| | | contents[i] = contents[i].slice(lengthFromNext); |
| | | buffers.push(contents[i].subarray(0, lengthFromNext)); |
| | | contents[i] = contents[i].subarray(lengthFromNext); |
| | | lengthFromNext = 0; |
| | | break; |
| | | } else { |
| | |
| | | } |
| | | }; |
| | | /** |
| | | * Returns value value. |
| | | * @returns {number} value value |
| | | */ |
| | | const readUInt32LE = () => { |
| | |
| | | return value; |
| | | }; |
| | | /** |
| | | * Returns value value. |
| | | * @returns {number} value value |
| | | */ |
| | | const readInt32LE = () => { |
| | |
| | | return value; |
| | | }; |
| | | /** |
| | | * Returns buffer. |
| | | * @param {number} l length |
| | | * @returns {Buffer} buffer |
| | | */ |
| | |
| | | } |
| | | return result; |
| | | } |
| | | const result = contentItem.slice(contentPosition, contentPosition + l); |
| | | const result = contentItem.subarray(contentPosition, contentPosition + l); |
| | | contentPosition += l; |
| | | // we clone the buffer here to allow the original content to be garbage collected |
| | | return l * 2 < contentItem.buffer.byteLength ? Buffer.from(result) : result; |
| | |
| | | throw new Error("Invalid file version"); |
| | | } |
| | | const sectionCount = readUInt32LE(); |
| | | /** @type {number[]} */ |
| | | const lengths = []; |
| | | let lastLengthPositive = false; |
| | | for (let i = 0; i < sectionCount; i++) { |
| | |
| | | if (length < 0) { |
| | | const slice = readSlice(-length); |
| | | const size = Number(readUInt64LE(slice, 0)); |
| | | const nameBuffer = slice.slice(8); |
| | | const nameBuffer = slice.subarray(8); |
| | | const name = nameBuffer.toString(); |
| | | const lazy = |
| | | /** @type {LazyFunction} */ |
| | |
| | | /** @typedef {{ filename: string, extension?: string }} Context */ |
| | | |
| | | /** |
| | | * Represents FileMiddleware. |
| | | * @extends {SerializerMiddleware<DeserializedType, SerializedType, Context>} |
| | | */ |
| | | class FileMiddleware extends SerializerMiddleware { |
| | | /** |
| | | * Creates an instance of FileMiddleware. |
| | | * @param {IntermediateFileSystem} fs filesystem |
| | | * @param {string | Hash} hashFunction hash function to use |
| | | * @param {HashFunction} hashFunction hash function to use |
| | | */ |
| | | constructor(fs, hashFunction = DEFAULTS.HASH_FUNCTION) { |
| | | super(); |
| | | /** @type {IntermediateFileSystem} */ |
| | | this.fs = fs; |
| | | /** @type {HashFunction} */ |
| | | this._hashFunction = hashFunction; |
| | | } |
| | | |
| | | /** |
| | | * Serializes this instance into the provided serializer context. |
| | | * @param {DeserializedType} data data |
| | | * @param {Context} context context object |
| | | * @returns {SerializedType | Promise<SerializedType> | null} serialized data |
| | |
| | | |
| | | // It's important that we don't touch existing files during serialization |
| | | // because serialize may read existing files (when deserializing) |
| | | /** @type {Set<string>} */ |
| | | const allWrittenFiles = new Set(); |
| | | /** |
| | | * Processes the provided name. |
| | | * @param {string | false} name name |
| | | * @param {Buffer[]} content content |
| | | * @param {number} size size |
| | |
| | | : filename; |
| | | await new Promise( |
| | | /** |
| | | * Handles the callback logic for this hook. |
| | | * @param {(value?: undefined) => void} resolve resolve |
| | | * @param {(reason?: Error | null) => void} reject reject |
| | | */ |
| | | (resolve, reject) => { |
| | | let stream = this.fs.createWriteStream(`${file}_`); |
| | | /** @type {undefined | import("zlib").Gzip | import("zlib").BrotliCompress} */ |
| | | let compression; |
| | | if (file.endsWith(".gz")) { |
| | | compression = createGzip({ |
| | |
| | | chunks.push(b); |
| | | } else { |
| | | for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) { |
| | | chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK)); |
| | | chunks.push(b.subarray(i, i + WRITE_LIMIT_CHUNK)); |
| | | } |
| | | } |
| | | } |
| | |
| | | const len = chunks.length; |
| | | let i = 0; |
| | | /** |
| | | * Processes the provided err. |
| | | * @param {(Error | null)=} err err |
| | | */ |
| | | const batchWrite = (err) => { |
| | |
| | | // Rename the index file to disallow access during inconsistent file state |
| | | await new Promise( |
| | | /** |
| | | * Handles the callback logic for this hook. |
| | | * @param {(value?: undefined) => void} resolve resolve |
| | | */ |
| | | (resolve) => { |
| | |
| | | (file) => |
| | | new Promise( |
| | | /** |
| | | * Handles the callback logic for this hook. |
| | | * @param {(value?: undefined) => void} resolve resolve |
| | | * @param {(reason?: Error | null) => void} reject reject |
| | | * @returns {void} |
| | |
| | | // As final step automatically update the index file to have a consistent pack again |
| | | await new Promise( |
| | | /** |
| | | * Handles the callback logic for this hook. |
| | | * @param {(value?: undefined) => void} resolve resolve |
| | | * @returns {void} |
| | | */ |
| | |
| | | } |
| | | |
| | | /** |
| | | * Restores this instance from the provided deserializer context. |
| | | * @param {SerializedType} data data |
| | | * @param {Context} context context object |
| | | * @returns {DeserializedType | Promise<DeserializedType>} deserialized data |
| | |
| | | deserialize(data, context) { |
| | | const { filename, extension = "" } = context; |
| | | /** |
| | | * Returns result. |
| | | * @param {string | boolean} name name |
| | | * @returns {Promise<Buffer[]>} result |
| | | */ |
| | |
| | | }), |
| | | new Promise( |
| | | /** |
| | | * Handles the chunk size callback for this hook. |
| | | * @param {(value?: undefined) => void} resolve resolve |
| | | * @param {(reason?: Error) => void} reject reject |
| | | */ |
| | |
| | | /** @type {number} */ (currentBufferUsed); |
| | | // values passed to fs.read must be valid int32 values |
| | | if (readOffset > 0x7fffffff) { |
| | | readBuffer = currentBuffer.slice(readOffset); |
| | | readBuffer = currentBuffer.subarray(readOffset); |
| | | readOffset = 0; |
| | | } |
| | | if (readLength > 0x7fffffff) { |