mirror of
https://github.com/unraid/api.git
synced 2026-05-01 20:54:27 -05:00
fix: flaky watch on boot drive's dynamix config (#1753)
On FAT32, `fs.stat()` updates accesstime, which means file reads are also writes, which means we can't use `usePoll` without degrading users' flash drives. To keep file reads lazy without a larger refactor, I override `getters.dynamix()` as the entrypoint to re-read the boot drive's dynamix config. Consecutive calls to `getters.dynamix()` are a common access pattern, which means we have to memoize to avoid many redundant file reads, so I used a TTL cache with a 250ms lifetime, hoping to scope config files to each request. `getters.dynamix()` is also used synchonously, so bit the bullet and switched away from async reads for simplicity, considering that most reads will be occurring from memory, even during cache misses. <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit * **New Features** * Added a TTL memoized loader utility with exported types. * Added a public function to load Dynamix configuration at startup. * **Refactor** * Startup now uses the deterministic, cached config loader; runtime file-watch for Dynamix config removed. * Simplified config state handling and load-status reporting for more predictable startup behavior. * **Tests** * Added tests for TTL caching, eviction, keying, and conditional caching. * **Chores** * Bumped package versions and updated changelog. <!-- end of auto-generated comment: release notes by coderabbit.ai --> --------- Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/shared",
|
||||
"version": "1.0.0",
|
||||
"version": "4.25.3",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
@@ -69,4 +69,4 @@
|
||||
"undici": "7.15.0",
|
||||
"ws": "8.18.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,5 +4,10 @@ export * from './graphql.model.js';
|
||||
export * from './tokens.js';
|
||||
export * from './use-permissions.directive.js';
|
||||
export * from './util/permissions.js';
|
||||
export { createTtlMemoizedLoader } from './util/create-ttl-memoized-loader.js';
|
||||
export type {
|
||||
CreateTtlMemoizedLoaderOptions,
|
||||
TtlMemoizedLoader,
|
||||
} from './util/create-ttl-memoized-loader.js';
|
||||
export type { InternalGraphQLClientFactory } from './types/internal-graphql-client.factory.js';
|
||||
export type { CanonicalInternalClientService } from './types/canonical-internal-client.interface.js';
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { createTtlMemoizedLoader } from "../create-ttl-memoized-loader.js";
|
||||
|
||||
describe("createTtlMemoizedLoader", () => {
|
||||
it("reuses cached value within ttl window", () => {
|
||||
let loadCount = 0;
|
||||
const loader = createTtlMemoizedLoader<number, undefined>({
|
||||
ttlMs: 500,
|
||||
load: () => {
|
||||
loadCount += 1;
|
||||
return loadCount;
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.get(undefined)).toBe(1);
|
||||
expect(loader.get(undefined)).toBe(1);
|
||||
expect(loadCount).toBe(1);
|
||||
});
|
||||
|
||||
it("evicts cache entries once ttl expires", () => {
|
||||
vi.useFakeTimers();
|
||||
let loadCount = 0;
|
||||
const loader = createTtlMemoizedLoader<number, undefined>({
|
||||
ttlMs: 100,
|
||||
load: () => {
|
||||
loadCount += 1;
|
||||
return loadCount;
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.get(undefined)).toBe(1);
|
||||
vi.advanceTimersByTime(150);
|
||||
expect(loader.get(undefined)).toBe(2);
|
||||
expect(loadCount).toBe(2);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it("treats different cache keys as independent entries", () => {
|
||||
let loadCount = 0;
|
||||
const loader = createTtlMemoizedLoader<number, { key: string }>({
|
||||
ttlMs: 500,
|
||||
getCacheKey: ({ key }) => key,
|
||||
load: ({ key }) => {
|
||||
loadCount += 1;
|
||||
return Number(`${loadCount}${key.length}`);
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.get({ key: "a" })).toBe(11);
|
||||
expect(loader.get({ key: "a" })).toBe(11);
|
||||
expect(loader.get({ key: "ab" })).toBe(22);
|
||||
expect(loadCount).toBe(2);
|
||||
});
|
||||
|
||||
it("skips caching when predicate returns false", () => {
|
||||
let loadCount = 0;
|
||||
const loader = createTtlMemoizedLoader<number, undefined>({
|
||||
ttlMs: 500,
|
||||
load: () => {
|
||||
loadCount += 1;
|
||||
return loadCount;
|
||||
},
|
||||
shouldCache: (value) => value % 2 === 0,
|
||||
});
|
||||
|
||||
expect(loader.get(undefined)).toBe(1);
|
||||
expect(loader.get(undefined)).toBe(2);
|
||||
expect(loader.get(undefined)).toBe(2);
|
||||
expect(loadCount).toBe(2);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,82 @@
|
||||
type CacheEntry<T, Key> = {
|
||||
value: T;
|
||||
expiresAt: number;
|
||||
key?: Key;
|
||||
};
|
||||
|
||||
export type CreateTtlMemoizedLoaderOptions<T, Args = void, Key = Args> = {
|
||||
/**
|
||||
* Function that produces the value to cache.
|
||||
*/
|
||||
load: (args: Args) => T;
|
||||
/**
|
||||
* Duration (in milliseconds) that the cached value remains valid.
|
||||
*/
|
||||
ttlMs: number;
|
||||
/**
|
||||
* Optional function that returns a cache key derived from the loader arguments.
|
||||
* When provided, the value is only reused when the key matches the cached entry.
|
||||
*/
|
||||
getCacheKey?: (args: Args) => Key;
|
||||
/**
|
||||
* Optional predicate to determine whether a loaded value should be cached.
|
||||
*/
|
||||
shouldCache?: (value: T) => boolean;
|
||||
};
|
||||
|
||||
export type TtlMemoizedLoader<T, Args = void> = {
|
||||
/**
|
||||
* Returns a cached value when available, otherwise calls the loader.
|
||||
*/
|
||||
get: (args: Args) => T;
|
||||
/**
|
||||
* Clears the cached value.
|
||||
*/
|
||||
clear: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a memoized loader with a time-to-live (TTL) cache.
|
||||
*
|
||||
* Subsequent calls within the TTL window reuse the cached value when the cache key matches.
|
||||
* The cache entry is invalidated automatically when the TTL expires, the key changes,
|
||||
or `shouldCache` returns false.
|
||||
*/
|
||||
export const createTtlMemoizedLoader = <T, Args = void, Key = Args>(
|
||||
options: CreateTtlMemoizedLoaderOptions<T, Args, Key>
|
||||
): TtlMemoizedLoader<T, Args> => {
|
||||
let cache: CacheEntry<T, Key> | undefined;
|
||||
|
||||
const clear = () => {
|
||||
cache = undefined;
|
||||
};
|
||||
|
||||
const get = (args: Args): T => {
|
||||
const now = Date.now();
|
||||
const key = options.getCacheKey?.(args);
|
||||
|
||||
if (cache) {
|
||||
const keyMatches =
|
||||
options.getCacheKey === undefined ? true : Object.is(cache.key, key);
|
||||
|
||||
if (keyMatches && now < cache.expiresAt) {
|
||||
return cache.value;
|
||||
}
|
||||
}
|
||||
|
||||
const value = options.load(args);
|
||||
if (!options.shouldCache || options.shouldCache(value)) {
|
||||
cache = {
|
||||
value,
|
||||
expiresAt: now + Math.max(0, options.ttlMs),
|
||||
key,
|
||||
};
|
||||
} else {
|
||||
cache = undefined;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
return { get, clear };
|
||||
};
|
||||
Reference in New Issue
Block a user