Skip to content

Commit

Permalink
Refactor: use the V8 serializer for the cache file
Browse files Browse the repository at this point in the history
  • Loading branch information
emmercm committed Aug 29, 2024
1 parent 27502fd commit 8888951
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 31 deletions.
42 changes: 29 additions & 13 deletions src/igir.ts
Original file line number Diff line number Diff line change
Expand Up @@ -230,13 +230,17 @@ export default class Igir {
}

private async getCachePath(): Promise<string | undefined> {
const defaultFileName = `${Package.NAME}.cache`;
const defaultFileName = process.versions.bun
// As of v1.1.26, Bun uses a different serializer than V8, making cache files incompatible
// @see https://bun.sh/docs/runtime/nodejs-apis
? `${Package.NAME}.bun.cache`
: `${Package.NAME}.cache`;

// Try to use the provided path
// First, try to use the provided path
let cachePath = this.options.getCachePath();
if (cachePath !== undefined && await FsPoly.isDirectory(cachePath)) {
cachePath = path.join(cachePath, defaultFileName);
this.logger.warn(`A directory was provided for cache path instead of a file, using '${cachePath}' instead`);
this.logger.warn(`A directory was provided for the cache path instead of a file, using '${cachePath}' instead`);
}
if (cachePath !== undefined) {
if (await FsPoly.isWritable(cachePath)) {
Expand All @@ -245,19 +249,31 @@ export default class Igir {
this.logger.warn('Provided cache path isn\'t writable, using the default path');
}

// Otherwise, use a default path
return [
const cachePathCandidates = [
path.join(path.resolve(Package.DIRECTORY), defaultFileName),
path.join(os.homedir(), defaultFileName),
path.join(process.cwd(), defaultFileName),
]
.filter((filePath) => filePath && !filePath.startsWith(os.tmpdir()))
.find(async (filePath) => {
if (await FsPoly.exists(filePath)) {
return true;
}
return FsPoly.isWritable(filePath);
});
].filter((filePath) => filePath && !filePath.startsWith(os.tmpdir()));

// Next, try to use an already existing path
const exists = await Promise.all(
cachePathCandidates.map(async (pathCandidate) => FsPoly.exists(pathCandidate)),
);
const existsCachePath = cachePathCandidates.find((_, idx) => exists[idx]);
if (existsCachePath !== undefined) {
return existsCachePath;
}

// Next, try to find a writable path
const writable = await Promise.all(
cachePathCandidates.map(async (pathCandidate) => FsPoly.isWritable(pathCandidate)),
);
const writableCachePath = cachePathCandidates.find((_, idx) => writable[idx]);
if (writableCachePath !== undefined) {
return writableCachePath;
}

return undefined;
}

private async processDATScanner(fileFactory: FileFactory): Promise<DAT[]> {
Expand Down
34 changes: 16 additions & 18 deletions src/types/cache.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import fs from 'node:fs';
import path from 'node:path';
import util from 'node:util';
import * as v8 from 'node:v8';
import * as zlib from 'node:zlib';

import { E_CANCELED, Mutex } from 'async-mutex';
Expand All @@ -9,10 +9,6 @@ import KeyedMutex from '../keyedMutex.js';
import FsPoly from '../polyfill/fsPoly.js';
import Timer from '../timer.js';

interface CacheData {
data: string,
}

export interface CacheProps {
filePath?: string,
fileFlushMillis?: number,
Expand Down Expand Up @@ -109,8 +105,11 @@ export default class Cache<V> {
}

private setUnsafe(key: string, val: V): void {
const oldVal = this.keyValues.get(key);
this.keyValues.set(key, val);
this.saveWithTimeout();
if (val !== oldVal) {
this.saveWithTimeout();
}
}

/**
Expand Down Expand Up @@ -145,12 +144,13 @@ export default class Cache<V> {
}

try {
const cacheData = JSON.parse(
await fs.promises.readFile(this.filePath, { encoding: Cache.BUFFER_ENCODING }),
) as CacheData;
const compressed = Buffer.from(cacheData.data, Cache.BUFFER_ENCODING);
const decompressed = await util.promisify(zlib.inflate)(compressed);
const keyValuesObject = JSON.parse(decompressed.toString(Cache.BUFFER_ENCODING));
const compressed = await fs.promises.readFile(this.filePath);
if (compressed.length === 0) {
return this;
}
// NOTE(cemmer): util.promisify(zlib.inflate) seems to have issues not throwing correctly
const decompressed = zlib.inflateSync(compressed);
const keyValuesObject = v8.deserialize(decompressed);
const keyValuesEntries = Object.entries(keyValuesObject) as [string, V][];
this.keyValues = new Map(keyValuesEntries);
} catch { /* ignored */ }
Expand Down Expand Up @@ -187,11 +187,9 @@ export default class Cache<V> {
}

const keyValuesObject = Object.fromEntries(this.keyValues);
const decompressed = JSON.stringify(keyValuesObject);
const compressed = await util.promisify(zlib.deflate)(decompressed);
const cacheData = {
data: compressed.toString(Cache.BUFFER_ENCODING),
} satisfies CacheData;
const decompressed = v8.serialize(keyValuesObject);
// NOTE(cemmer): util.promisify(zlib.deflate) seems to have issues not throwing correctly
const compressed = zlib.deflateSync(decompressed);

// Ensure the directory exists
const dirPath = path.dirname(this.filePath);
Expand All @@ -203,7 +201,7 @@ export default class Cache<V> {
const tempFile = await FsPoly.mktemp(this.filePath);
await FsPoly.writeFile(
tempFile,
JSON.stringify(cacheData),
compressed,
{ encoding: Cache.BUFFER_ENCODING },
);

Expand Down

0 comments on commit 8888951

Please sign in to comment.