feat: unified cache (#6520)

This commit is contained in:
Victor Hugo dos Santos
2025-09-10 06:59:16 -03:00
committed by GitHub
parent feee22b5c3
commit c259a61f0e
67 changed files with 3965 additions and 1578 deletions

13
.eslintrc.cjs Normal file
View File

@@ -0,0 +1,13 @@
module.exports = {
root: true,
ignorePatterns: ["node_modules/", "dist/", "coverage/"],
overrides: [
{
files: ["packages/cache/**/*.{ts,js}"],
extends: ["@formbricks/eslint-config/library.js"],
parserOptions: {
project: "./packages/cache/tsconfig.json",
},
},
],
};

View File

@@ -1,4 +1,6 @@
{
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
"eslint.workingDirectories": [{ "mode": "auto" }],
"javascript.updateImportsOnFileMove.enabled": "always",
"sonarlint.connectedMode.project": {
"connectionId": "formbricks",

View File

@@ -1,9 +1,9 @@
import { cache } from "@/lib/cache";
import { getMonthlyOrganizationResponseCount } from "@/lib/organization/service";
import {
capturePosthogEnvironmentEvent,
sendPlanLimitsReachedEventToPosthogWeekly,
} from "@/lib/posthogServer";
import { withCache } from "@/modules/cache/lib/withCache";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
@@ -18,7 +18,11 @@ import { getEnvironmentState } from "./environmentState";
// Mock dependencies
vi.mock("@/lib/organization/service");
vi.mock("@/lib/posthogServer");
vi.mock("@/modules/cache/lib/withCache");
vi.mock("@/lib/cache", () => ({
cache: {
withCache: vi.fn(),
},
}));
vi.mock("@formbricks/database", () => ({
prisma: {
@@ -43,6 +47,15 @@ vi.mock("@/lib/constants", () => ({
ENTERPRISE_LICENSE_KEY: "mock_enterprise_license_key",
}));
// Mock @formbricks/cache
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
environment: {
state: vi.fn((environmentId: string) => `fb:env:${environmentId}:state`),
},
},
}));
const environmentId = "test-environment-id";
const mockProject: TJsEnvironmentStateProject = {
@@ -116,7 +129,7 @@ const mockSurveys: TSurvey[] = [
variables: [],
createdBy: null,
recaptcha: { enabled: false, threshold: 0.5 },
},
} as unknown as TSurvey,
];
const mockActionClasses: TActionClass[] = [
@@ -152,8 +165,8 @@ describe("getEnvironmentState", () => {
beforeEach(() => {
vi.resetAllMocks();
// Mock withCache to simply execute the function without caching for tests
vi.mocked(withCache).mockImplementation((fn) => fn);
// Mock cache.withCache to simply execute the function without caching for tests
vi.mocked(cache.withCache).mockImplementation(async (fn) => await fn());
// Default mocks for successful retrieval
vi.mocked(getEnvironmentStateData).mockResolvedValue(mockEnvironmentStateData);
@@ -268,12 +281,129 @@ describe("getEnvironmentState", () => {
expect(result.data.recaptchaSiteKey).toBe("mock_recaptcha_site_key");
});
test("should use withCache for caching with correct cache key and TTL", () => {
test("should use cache.withCache for caching with correct cache key and TTL", () => {
getEnvironmentState(environmentId);
expect(withCache).toHaveBeenCalledWith(expect.any(Function), {
key: `fb:env:${environmentId}:state`,
ttl: 5 * 60 * 1000, // 5 minutes in milliseconds
});
expect(cache.withCache).toHaveBeenCalledWith(
expect.any(Function),
"fb:env:test-environment-id:state",
5 * 60 * 1000 // 5 minutes in milliseconds
);
});
test("should handle null response limit correctly (unlimited)", async () => {
const unlimitedOrgData = {
...mockEnvironmentStateData,
organization: {
...mockEnvironmentStateData.organization,
billing: {
...mockOrganization.billing,
limits: {
...mockOrganization.billing.limits,
monthly: {
...mockOrganization.billing.limits.monthly,
responses: null, // Unlimited
},
},
},
},
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(unlimitedOrgData);
vi.mocked(getMonthlyOrganizationResponseCount).mockResolvedValue(999999); // High count
const result = await getEnvironmentState(environmentId);
// Should return surveys even with high count since limit is null (unlimited)
expect(result.data.surveys).toEqual(mockSurveys);
expect(sendPlanLimitsReachedEventToPosthogWeekly).not.toHaveBeenCalled();
});
test("should propagate database update errors", async () => {
const incompleteEnvironmentData = {
...mockEnvironmentStateData,
environment: {
...mockEnvironmentStateData.environment,
appSetupCompleted: false,
},
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(incompleteEnvironmentData);
vi.mocked(prisma.environment.update).mockRejectedValue(new Error("Database error"));
// Should throw error since Promise.all will fail if database update fails
await expect(getEnvironmentState(environmentId)).rejects.toThrow("Database error");
});
test("should propagate PostHog event capture errors", async () => {
const incompleteEnvironmentData = {
...mockEnvironmentStateData,
environment: {
...mockEnvironmentStateData.environment,
appSetupCompleted: false,
},
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(incompleteEnvironmentData);
vi.mocked(capturePosthogEnvironmentEvent).mockRejectedValue(new Error("PostHog error"));
// Should throw error since Promise.all will fail if PostHog event capture fails
await expect(getEnvironmentState(environmentId)).rejects.toThrow("PostHog error");
});
test("should include recaptchaSiteKey when IS_RECAPTCHA_CONFIGURED is true", async () => {
const result = await getEnvironmentState(environmentId);
expect(result.data).toHaveProperty("recaptchaSiteKey");
expect(result.data.recaptchaSiteKey).toBe("mock_recaptcha_site_key");
});
test("should handle different survey types and statuses", async () => {
const mixedSurveys = [
...mockSurveys,
{
...mockSurveys[0],
id: "survey-web-draft",
type: "app", // Use valid survey type
status: "draft",
} as TSurvey,
{
...mockSurveys[0],
id: "survey-link-completed",
type: "link",
status: "completed",
} as TSurvey,
];
const modifiedData = {
...mockEnvironmentStateData,
surveys: mixedSurveys,
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(modifiedData);
const result = await getEnvironmentState(environmentId);
expect(result.data.surveys).toEqual(mixedSurveys);
});
test("should handle empty surveys array", async () => {
const emptyData = {
...mockEnvironmentStateData,
surveys: [],
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(emptyData);
const result = await getEnvironmentState(environmentId);
expect(result.data.surveys).toEqual([]);
});
test("should handle empty actionClasses array", async () => {
const emptyData = {
...mockEnvironmentStateData,
actionClasses: [],
};
vi.mocked(getEnvironmentStateData).mockResolvedValue(emptyData);
const result = await getEnvironmentState(environmentId);
expect(result.data.actionClasses).toEqual([]);
});
});

View File

@@ -1,12 +1,12 @@
import "server-only";
import { cache } from "@/lib/cache";
import { IS_FORMBRICKS_CLOUD, IS_RECAPTCHA_CONFIGURED, RECAPTCHA_SITE_KEY } from "@/lib/constants";
import { getMonthlyOrganizationResponseCount } from "@/lib/organization/service";
import {
capturePosthogEnvironmentEvent,
sendPlanLimitsReachedEventToPosthogWeekly,
} from "@/lib/posthogServer";
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { withCache } from "@/modules/cache/lib/withCache";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { TJsEnvironmentState } from "@formbricks/types/js";
@@ -24,8 +24,7 @@ import { getEnvironmentStateData } from "./data";
export const getEnvironmentState = async (
environmentId: string
): Promise<{ data: TJsEnvironmentState["data"] }> => {
// Use withCache for efficient Redis caching with automatic fallback
const getCachedEnvironmentState = withCache(
return cache.withCache(
async () => {
// Single optimized database call replacing multiple service calls
const { environment, organization, surveys, actionClasses } =
@@ -80,13 +79,7 @@ export const getEnvironmentState = async (
return { data };
},
{
// Use enterprise-grade cache key pattern
key: createCacheKey.environment.state(environmentId),
// This is a temporary fix for the invalidation issues, will be changed later with a proper solution
ttl: 5 * 60 * 1000, // 5 minutes in milliseconds
}
createCacheKey.environment.state(environmentId),
5 * 60 * 1000 // 5 minutes in milliseconds
);
return getCachedEnvironmentState();
};

View File

@@ -3,10 +3,11 @@ import * as Sentry from "@sentry/nextjs";
export const onRequestError = Sentry.captureRequestError;
// instrumentation.ts
export const register = async () => {
if (process.env.NEXT_RUNTIME === "nodejs" && PROMETHEUS_ENABLED) {
await import("./instrumentation-node");
if (process.env.NEXT_RUNTIME === "nodejs") {
if (PROMETHEUS_ENABLED) {
await import("./instrumentation-node");
}
}
if (process.env.NEXT_RUNTIME === "nodejs" && IS_PRODUCTION && SENTRY_DSN) {
await import("./sentry.server.config");

363
apps/web/lib/cache/index.test.ts vendored Normal file
View File

@@ -0,0 +1,363 @@
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import type { CacheKey } from "@formbricks/cache";
// Create mocks
const mockCacheService = {
get: vi.fn(),
set: vi.fn(),
del: vi.fn(),
exists: vi.fn(),
withCache: vi.fn(),
getRedisClient: vi.fn(),
};
const mockGetCacheService = vi.fn();
const mockLogger = {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
};
// Mock all dependencies before importing the module under test
vi.mock("@formbricks/cache", () => ({
getCacheService: mockGetCacheService,
}));
vi.mock("@formbricks/logger", () => ({
logger: mockLogger,
}));
// Import the module under test after mocking
let cache: any;
describe("Cache Index", () => {
beforeEach(async () => {
vi.clearAllMocks();
vi.resetModules();
// Re-import the module to get a fresh instance
const cacheModule = await import("./index");
cache = cacheModule.cache;
});
afterEach(() => {
vi.clearAllMocks();
});
describe("Singleton Behavior", () => {
test("should call getCacheService for each method call", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// Call a method multiple times
await cache.get("test-key-1" as CacheKey);
await cache.get("test-key-2" as CacheKey);
await cache.set("test-key" as CacheKey, "value", 1000);
// getCacheService should be called for each operation
expect(mockGetCacheService).toHaveBeenCalledTimes(3);
expect(mockCacheService.get).toHaveBeenCalledWith("test-key-1" as CacheKey);
expect(mockCacheService.get).toHaveBeenCalledWith("test-key-2" as CacheKey);
expect(mockCacheService.set).toHaveBeenCalledWith("test-key" as CacheKey, "value", 1000);
});
test("should proxy all cache methods correctly", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// Multiple calls should use the cache service
await cache.get("key1" as CacheKey);
await cache.set("key2" as CacheKey, "value", 1000);
await cache.del(["key3" as CacheKey]);
expect(mockGetCacheService).toHaveBeenCalledTimes(3);
expect(mockCacheService.get).toHaveBeenCalledWith("key1" as CacheKey);
expect(mockCacheService.set).toHaveBeenCalledWith("key2" as CacheKey, "value", 1000);
expect(mockCacheService.del).toHaveBeenCalledWith(["key3" as CacheKey]);
});
});
describe("Cache Service Integration", () => {
test("should call getCacheService on each operation", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// getCacheService should not be called until first access
expect(mockGetCacheService).not.toHaveBeenCalled();
// First access should trigger getCacheService call
await cache.get("test-key" as CacheKey);
expect(mockGetCacheService).toHaveBeenCalledTimes(1);
expect(mockCacheService.get).toHaveBeenCalledWith("test-key" as CacheKey);
});
test("should handle concurrent operations correctly", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// Start multiple concurrent operations
const promise1 = cache.get("key1" as CacheKey);
const promise2 = cache.set("key2" as CacheKey, "value", 1000);
const promise3 = cache.exists("key3" as CacheKey);
// Wait for all operations to complete
await Promise.all([promise1, promise2, promise3]);
// Each operation should call getCacheService
expect(mockGetCacheService).toHaveBeenCalledTimes(3);
expect(mockCacheService.get).toHaveBeenCalledWith("key1" as CacheKey);
expect(mockCacheService.set).toHaveBeenCalledWith("key2" as CacheKey, "value", 1000);
expect(mockCacheService.exists).toHaveBeenCalledWith("key3" as CacheKey);
});
});
describe("Error Handling", () => {
test("should return error object when getCacheService fails", async () => {
const initError = {
ok: false,
error: { code: "REDIS_CONNECTION_ERROR" },
};
mockGetCacheService.mockResolvedValue(initError);
const result = await cache.get("test-key" as CacheKey);
expect(result).toEqual({ ok: false, error: initError.error });
expect(mockGetCacheService).toHaveBeenCalledTimes(1);
});
test("should handle getCacheService rejection", async () => {
const networkError = new Error("Network connection failed");
mockGetCacheService.mockRejectedValue(networkError);
await expect(cache.get("test-key" as CacheKey)).rejects.toThrow("Network connection failed");
});
test("should handle errors consistently across different methods", async () => {
const cacheError = {
ok: false,
error: { code: "CONNECTION_FAILED" },
};
mockGetCacheService.mockResolvedValue(cacheError);
// All methods should return the same error structure
const getResult = await cache.get("test-key" as CacheKey);
const setResult = await cache.set("test-key" as CacheKey, "value", 1000);
const delResult = await cache.del(["test-key" as CacheKey]);
const existsResult = await cache.exists("test-key" as CacheKey);
expect(getResult).toEqual({ ok: false, error: cacheError.error });
expect(setResult).toEqual({ ok: false, error: cacheError.error });
expect(delResult).toEqual({ ok: false, error: cacheError.error });
expect(existsResult).toEqual({ ok: false, error: cacheError.error });
});
});
describe("Proxy Functionality", () => {
beforeEach(() => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
});
test("should proxy get method correctly", async () => {
mockCacheService.get.mockResolvedValue({ ok: true, data: "cached-value" });
const result = await cache.get("test-key" as CacheKey);
expect(mockCacheService.get).toHaveBeenCalledWith("test-key" as CacheKey);
expect(result).toEqual({ ok: true, data: "cached-value" });
});
test("should proxy set method correctly", async () => {
mockCacheService.set.mockResolvedValue({ ok: true, data: undefined });
const result = await cache.set("test-key" as CacheKey, "test-value", 5000);
expect(mockCacheService.set).toHaveBeenCalledWith("test-key" as CacheKey, "test-value", 5000);
expect(result).toEqual({ ok: true, data: undefined });
});
test("should proxy del method correctly", async () => {
mockCacheService.del.mockResolvedValue({ ok: true, data: undefined });
const result = await cache.del(["key1" as CacheKey, "key2" as CacheKey]);
expect(mockCacheService.del).toHaveBeenCalledWith(["key1" as CacheKey, "key2" as CacheKey]);
expect(result).toEqual({ ok: true, data: undefined });
});
test("should proxy exists method correctly", async () => {
mockCacheService.exists.mockResolvedValue({ ok: true, data: true });
const result = await cache.exists("test-key" as CacheKey);
expect(mockCacheService.exists).toHaveBeenCalledWith("test-key" as CacheKey);
expect(result).toEqual({ ok: true, data: true });
});
test("should proxy withCache method correctly when cache is available", async () => {
const mockFn = vi.fn().mockResolvedValue("function-result");
mockCacheService.withCache.mockResolvedValue("cached-result");
const result = await cache.withCache(mockFn, "cache-key" as CacheKey, 3000);
expect(mockCacheService.withCache).toHaveBeenCalledWith(mockFn, "cache-key" as CacheKey, 3000);
expect(result).toBe("cached-result");
});
test("should execute function directly when cache service fails", async () => {
const mockFn = vi.fn().mockResolvedValue("function-result");
mockGetCacheService.mockResolvedValue({
ok: false,
error: { code: "CACHE_UNAVAILABLE" },
});
const result = await cache.withCache(mockFn, "cache-key" as CacheKey, 3000);
expect(result).toBe("function-result");
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockCacheService.withCache).not.toHaveBeenCalled();
});
test("should execute function directly when cache service throws error", async () => {
const mockFn = vi.fn().mockResolvedValue("function-result");
const cacheError = new Error("Cache connection failed");
mockGetCacheService.mockRejectedValue(cacheError);
const result = await cache.withCache(mockFn, "cache-key" as CacheKey, 3000);
expect(result).toBe("function-result");
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockLogger.warn).toHaveBeenCalledWith(
{ error: cacheError },
"Cache unavailable; executing function directly"
);
});
test("should proxy getRedisClient method correctly", async () => {
const mockRedisClient = { ping: vi.fn() };
mockCacheService.getRedisClient.mockReturnValue(mockRedisClient);
const result = await cache.getRedisClient();
expect(mockCacheService.getRedisClient).toHaveBeenCalled();
expect(result).toBe(mockRedisClient);
});
test("should return null when cache service fails for getRedisClient", async () => {
mockGetCacheService.mockResolvedValue({
ok: false,
error: { code: "REDIS_CONNECTION_ERROR" },
});
const result = await cache.getRedisClient();
expect(result).toBeNull();
expect(mockCacheService.getRedisClient).not.toHaveBeenCalled();
});
test("should handle getRedisClient when cache service throws error", async () => {
const cacheError = new Error("Cache connection failed");
mockGetCacheService.mockRejectedValue(cacheError);
await expect(cache.getRedisClient()).rejects.toThrow("Cache connection failed");
});
test("should handle method errors correctly", async () => {
const cacheError = new Error("Cache operation failed");
mockCacheService.get.mockRejectedValue(cacheError);
await expect(cache.get("test-key" as CacheKey)).rejects.toThrow("Cache operation failed");
});
});
describe("Type Safety", () => {
test("should maintain type safety through proxy", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// TypeScript should enforce correct method signatures
mockCacheService.get.mockResolvedValue({ ok: true, data: "string-value" });
mockCacheService.set.mockResolvedValue({ ok: true, data: undefined });
mockCacheService.exists.mockResolvedValue({ ok: true, data: true });
// These should compile without type errors
const getValue = cache.get("key" as CacheKey);
const setValue = cache.set("key" as CacheKey, "value", 1000);
const existsValue = cache.exists("key" as CacheKey);
const delValue = cache.del(["key" as CacheKey]);
// Verify the calls work
await getValue;
await setValue;
await existsValue;
await delValue;
expect(mockCacheService.get).toHaveBeenCalledWith("key" as CacheKey);
expect(mockCacheService.set).toHaveBeenCalledWith("key" as CacheKey, "value", 1000);
expect(mockCacheService.exists).toHaveBeenCalledWith("key" as CacheKey);
expect(mockCacheService.del).toHaveBeenCalledWith(["key" as CacheKey]);
});
});
describe("Integration Scenarios", () => {
test("should handle rapid successive calls", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
mockCacheService.get.mockResolvedValue({ ok: true, data: null });
mockCacheService.set.mockResolvedValue({ ok: true, data: undefined });
// Make many rapid calls
const promises = Array.from({ length: 10 }, (_, i) =>
Promise.all([
cache.get(`key-${i}`),
cache.set(`key-${i}`, `value-${i}`, 1000),
cache.exists(`key-${i}`),
])
);
await Promise.all(promises);
// Each operation calls getCacheService
expect(mockGetCacheService).toHaveBeenCalledTimes(30); // 10 * 3 operations
expect(mockCacheService.get).toHaveBeenCalledTimes(10);
expect(mockCacheService.set).toHaveBeenCalledTimes(10);
expect(mockCacheService.exists).toHaveBeenCalledTimes(10);
});
test("should work in server environment", async () => {
mockGetCacheService.mockResolvedValue({
ok: true,
data: mockCacheService,
});
// Simulate server-only environment (which is already mocked by the "server-only" import)
mockCacheService.get.mockResolvedValue({ ok: true, data: "server-value" });
const result = await cache.get("server-key" as CacheKey);
expect(result).toEqual({ ok: true, data: "server-value" });
expect(mockGetCacheService).toHaveBeenCalledTimes(1);
});
});
});

59
apps/web/lib/cache/index.ts vendored Normal file
View File

@@ -0,0 +1,59 @@
import "server-only";
import { type CacheKey, type CacheService, getCacheService } from "@formbricks/cache";
import { logger } from "@formbricks/logger";
// Expose an async-leaning service to reflect lazy init for sync members like getRedisClient
type AsyncCacheService = Omit<CacheService, "getRedisClient"> & {
getRedisClient(): Promise<ReturnType<CacheService["getRedisClient"]>>;
};
/**
* Cache facade for the cache service
* Provides a proxy to the cache service methods
* Lazy initializes the cache service on first use
* Handles cache service initialization failures gracefully
* Avoid the need to use double awaits when using the cache service (e.g. await (await cache).get(key))
*/
export const cache = new Proxy({} as AsyncCacheService, {
get(_target, prop: keyof CacheService) {
// Special-case: withCache must never fail; fall back to direct fn on init failure.
if (prop === "withCache") {
return async <T>(fn: () => Promise<T>, ...rest: [CacheKey, number]) => {
try {
const cacheServiceResult = await getCacheService();
if (!cacheServiceResult.ok) {
return await fn();
}
return cacheServiceResult.data.withCache(fn, ...rest);
} catch (error) {
logger.warn({ error }, "Cache unavailable; executing function directly");
return await fn();
}
};
}
if (prop === "getRedisClient") {
return async () => {
const cacheServiceResult = await getCacheService();
if (!cacheServiceResult.ok) {
return null;
}
return cacheServiceResult.data.getRedisClient();
};
}
// Default: lazily initialize and forward the call; returns a Promise for all methods
return async (...args: Parameters<CacheService[typeof prop]>) => {
const cacheServiceResult = await getCacheService();
if (!cacheServiceResult.ok) {
return { ok: false, error: cacheServiceResult.error };
}
const method = cacheServiceResult.data[prop];
return await method.apply(cacheServiceResult.data, args);
};
},
});

View File

@@ -1,5 +1,6 @@
import { createCacheKey, withCache } from "@/modules/cache/lib/withCache";
import { cache } from "@/lib/cache";
import { PostHog } from "posthog-node";
import { createCacheKey } from "@formbricks/cache";
import { logger } from "@formbricks/logger";
import { TOrganizationBillingPlan, TOrganizationBillingPlanLimits } from "@formbricks/types/organizations";
import { IS_POSTHOG_CONFIGURED, IS_PRODUCTION, POSTHOG_API_HOST, POSTHOG_API_KEY } from "./constants";
@@ -31,14 +32,14 @@ export const capturePosthogEnvironmentEvent = async (
}
};
export const sendPlanLimitsReachedEventToPosthogWeekly = (
export const sendPlanLimitsReachedEventToPosthogWeekly = async (
environmentId: string,
billing: {
plan: TOrganizationBillingPlan;
limits: TOrganizationBillingPlanLimits;
}
) =>
withCache(
await cache.withCache(
async () => {
try {
await capturePosthogEnvironmentEvent(environmentId, "plan limit reached", {
@@ -50,8 +51,6 @@ export const sendPlanLimitsReachedEventToPosthogWeekly = (
throw error;
}
},
{
key: createCacheKey.custom("analytics", environmentId, `plan_limits_${billing.plan}`),
ttl: 60 * 60 * 24 * 7 * 1000, // 7 days in milliseconds
}
)();
createCacheKey.custom("analytics", environmentId, `plan_limits_${billing.plan}`),
60 * 60 * 24 * 7 * 1000 // 7 days in milliseconds
);

View File

@@ -181,7 +181,6 @@
"created_at": "Erstellt am",
"created_by": "Erstellt von",
"customer_success": "Kundenerfolg",
"danger_zone": "Gefahrenzone",
"dark_overlay": "Dunkle Überlagerung",
"date": "Datum",
"default": "Standard",
@@ -240,11 +239,9 @@
"label": "Bezeichnung",
"language": "Sprache",
"learn_more": "Mehr erfahren",
"license": "Lizenz",
"light_overlay": "Helle Überlagerung",
"limits_reached": "Limits erreicht",
"link": "Link",
"link_and_email": "Link & E-Mail",
"link_survey": "Link-Umfrage",
"link_surveys": "Umfragen verknüpfen",
"load_more": "Mehr laden",
@@ -387,7 +384,6 @@
"survey_scheduled": "Umfrage geplant.",
"survey_type": "Umfragetyp",
"surveys": "Umfragen",
"switch_organization": "Organisation wechseln",
"switch_to": "Wechseln zu {environment}",
"table_items_deleted_successfully": "{type}s erfolgreich gelöscht",
"table_settings": "Tabelleinstellungen",
@@ -996,7 +992,6 @@
"free": "Kostenlos",
"free_description": "Unbegrenzte Umfragen, Teammitglieder und mehr.",
"get_2_months_free": "2 Monate gratis",
"get_in_touch": "Kontaktiere uns",
"hosted_in_frankfurt": "Gehostet in Frankfurt",
"ios_android_sdks": "iOS & Android SDK für mobile Umfragen",
"link_surveys": "Umfragen verlinken (teilbar)",

View File

@@ -181,7 +181,6 @@
"created_at": "Created at",
"created_by": "Created by",
"customer_success": "Customer Success",
"danger_zone": "Danger Zone",
"dark_overlay": "Dark overlay",
"date": "Date",
"default": "Default",
@@ -240,11 +239,9 @@
"label": "Label",
"language": "Language",
"learn_more": "Learn more",
"license": "License",
"light_overlay": "Light overlay",
"limits_reached": "Limits Reached",
"link": "Link",
"link_and_email": "Link & Email",
"link_survey": "Link Survey",
"link_surveys": "Link Surveys",
"load_more": "Load more",
@@ -387,7 +384,6 @@
"survey_scheduled": "Survey scheduled.",
"survey_type": "Survey Type",
"surveys": "Surveys",
"switch_organization": "Switch organization",
"switch_to": "Switch to {environment}",
"table_items_deleted_successfully": "{type}s deleted successfully",
"table_settings": "Table settings",
@@ -996,7 +992,6 @@
"free": "Free",
"free_description": "Unlimited Surveys, Team Members, and more.",
"get_2_months_free": "Get 2 months free",
"get_in_touch": "Get in touch",
"hosted_in_frankfurt": "Hosted in Frankfurt",
"ios_android_sdks": "iOS & Android SDK for mobile surveys",
"link_surveys": "Link Surveys (Shareable)",

View File

@@ -181,7 +181,6 @@
"created_at": "Créé le",
"created_by": "Créé par",
"customer_success": "Succès Client",
"danger_zone": "Zone de danger",
"dark_overlay": "Superposition sombre",
"date": "Date",
"default": "Par défaut",
@@ -240,11 +239,9 @@
"label": "Étiquette",
"language": "Langue",
"learn_more": "En savoir plus",
"license": "Licence",
"light_overlay": "Superposition légère",
"limits_reached": "Limites atteints",
"link": "Lien",
"link_and_email": "Liens et e-mail",
"link_survey": "Enquête de lien",
"link_surveys": "Sondages de lien",
"load_more": "Charger plus",
@@ -387,7 +384,6 @@
"survey_scheduled": "Sondage programmé.",
"survey_type": "Type de sondage",
"surveys": "Enquêtes",
"switch_organization": "Changer d'organisation",
"switch_to": "Passer à {environment}",
"table_items_deleted_successfully": "{type}s supprimés avec succès",
"table_settings": "Réglages de table",
@@ -996,7 +992,6 @@
"free": "Gratuit",
"free_description": "Sondages illimités, membres d'équipe, et plus encore.",
"get_2_months_free": "Obtenez 2 mois gratuits",
"get_in_touch": "Prenez contact",
"hosted_in_frankfurt": "Hébergé à Francfort",
"ios_android_sdks": "SDK iOS et Android pour les sondages mobiles",
"link_surveys": "Sondages par lien (partageables)",

View File

@@ -181,7 +181,6 @@
"created_at": "作成日時",
"created_by": "作成者",
"customer_success": "カスタマーサクセス",
"danger_zone": "危険な操作",
"dark_overlay": "暗いオーバーレイ",
"date": "日付",
"default": "デフォルト",
@@ -240,11 +239,9 @@
"label": "ラベル",
"language": "言語",
"learn_more": "詳細を見る",
"license": "ライセンス",
"light_overlay": "明るいオーバーレイ",
"limits_reached": "上限に達しました",
"link": "リンク",
"link_and_email": "リンク&メール",
"link_survey": "リンクフォーム",
"link_surveys": "リンクフォーム",
"load_more": "さらに読み込む",
@@ -387,7 +384,6 @@
"survey_scheduled": "フォームはスケジュール済みです。",
"survey_type": "フォームの種類",
"surveys": "フォーム",
"switch_organization": "組織を切り替え",
"switch_to": "{environment}に切り替え",
"table_items_deleted_successfully": "{type}を正常に削除しました",
"table_settings": "テーブル設定",
@@ -996,7 +992,6 @@
"free": "無料",
"free_description": "無制限のフォーム、チームメンバー、その他多数。",
"get_2_months_free": "2ヶ月間無料",
"get_in_touch": "お問い合わせください",
"hosted_in_frankfurt": "フランクフルトでホスト",
"ios_android_sdks": "モバイルフォーム用iOS & Android SDK",
"link_surveys": "リンクフォーム(共有可能)",

View File

@@ -181,7 +181,6 @@
"created_at": "Data de criação",
"created_by": "Criado por",
"customer_success": "Sucesso do Cliente",
"danger_zone": "Zona de Perigo",
"dark_overlay": "sobreposição escura",
"date": "Encontro",
"default": "Padrão",
@@ -240,11 +239,9 @@
"label": "Etiqueta",
"language": "Língua",
"learn_more": "Saiba mais",
"license": "Licença",
"light_overlay": "sobreposição leve",
"limits_reached": "Limites Atingidos",
"link": "link",
"link_and_email": "Link & E-mail",
"link_survey": "Pesquisa de Link",
"link_surveys": "Link de Pesquisas",
"load_more": "Carregar mais",
@@ -387,7 +384,6 @@
"survey_scheduled": "Pesquisa agendada.",
"survey_type": "Tipo de Pesquisa",
"surveys": "Pesquisas",
"switch_organization": "Mudar organização",
"switch_to": "Mudar para {environment}",
"table_items_deleted_successfully": "{type}s deletados com sucesso",
"table_settings": "Arrumação da mesa",
@@ -996,7 +992,6 @@
"free": "grátis",
"free_description": "Pesquisas ilimitadas, membros da equipe e mais.",
"get_2_months_free": "Ganhe 2 meses grátis",
"get_in_touch": "Entre em contato",
"hosted_in_frankfurt": "Hospedado em Frankfurt",
"ios_android_sdks": "SDK para iOS e Android para pesquisas móveis",
"link_surveys": "Link de Pesquisas (Compartilhável)",

View File

@@ -181,7 +181,6 @@
"created_at": "Criado em",
"created_by": "Criado por",
"customer_success": "Sucesso do Cliente",
"danger_zone": "Zona de Perigo",
"dark_overlay": "Sobreposição escura",
"date": "Data",
"default": "Padrão",
@@ -240,11 +239,9 @@
"label": "Etiqueta",
"language": "Idioma",
"learn_more": "Saiba mais",
"license": "Licença",
"light_overlay": "Sobreposição leve",
"limits_reached": "Limites Atingidos",
"link": "Link",
"link_and_email": "Link e Email",
"link_survey": "Ligar Inquérito",
"link_surveys": "Ligar Inquéritos",
"load_more": "Carregar mais",
@@ -387,7 +384,6 @@
"survey_scheduled": "Inquérito agendado.",
"survey_type": "Tipo de Inquérito",
"surveys": "Inquéritos",
"switch_organization": "Mudar de organização",
"switch_to": "Mudar para {environment}",
"table_items_deleted_successfully": "{type}s eliminados com sucesso",
"table_settings": "Configurações da tabela",
@@ -996,7 +992,6 @@
"free": "Grátis",
"free_description": "Inquéritos ilimitados, membros da equipa e mais.",
"get_2_months_free": "Obtenha 2 meses grátis",
"get_in_touch": "Entre em contacto",
"hosted_in_frankfurt": "Hospedado em Frankfurt",
"ios_android_sdks": "SDK iOS e Android para inquéritos móveis",
"link_surveys": "Ligar Inquéritos (Partilhável)",

View File

@@ -181,7 +181,6 @@
"created_at": "Creat la",
"created_by": "Creat de",
"customer_success": "Succesul Clientului",
"danger_zone": "Zonă periculoasă",
"dark_overlay": "Suprapunere întunecată",
"date": "Dată",
"default": "Implicit",
@@ -240,11 +239,9 @@
"label": "Etichetă",
"language": "Limba",
"learn_more": "Află mai multe",
"license": "Licență",
"light_overlay": "Suprapunere ușoară",
"limits_reached": "Limite atinse",
"link": "Legătura",
"link_and_email": "Link & email",
"link_survey": "Conectează chestionarul",
"link_surveys": "Conectează chestionarele",
"load_more": "Încarcă mai multe",
@@ -387,7 +384,6 @@
"survey_scheduled": "Chestionar programat.",
"survey_type": "Tip Chestionar",
"surveys": "Sondaje",
"switch_organization": "Comută organizația",
"switch_to": "Comută la {environment}",
"table_items_deleted_successfully": "\"{type} șterse cu succes\"",
"table_settings": "Setări tabel",
@@ -996,7 +992,6 @@
"free": "Gratuit",
"free_description": "Sondaje nelimitate, membri în echipă și altele.",
"get_2_months_free": "Primește 2 luni gratuite",
"get_in_touch": "Contactați-ne",
"hosted_in_frankfurt": "Găzduit în Frankfurt",
"ios_android_sdks": "SDK iOS & Android pentru sondaje mobile",
"link_surveys": "Sondaje Link (Distribuibil)",

View File

@@ -181,7 +181,6 @@
"created_at": "建立時間",
"created_by": "建立者",
"customer_success": "客戶成功",
"danger_zone": "危險區域",
"dark_overlay": "深色覆蓋",
"date": "日期",
"default": "預設",
@@ -240,11 +239,9 @@
"label": "標籤",
"language": "語言",
"learn_more": "瞭解更多",
"license": "授權",
"light_overlay": "淺色覆蓋",
"limits_reached": "已達上限",
"link": "連結",
"link_and_email": "連結與電子郵件",
"link_survey": "連結問卷",
"link_surveys": "連結問卷",
"load_more": "載入更多",
@@ -387,7 +384,6 @@
"survey_scheduled": "問卷已排程。",
"survey_type": "問卷類型",
"surveys": "問卷",
"switch_organization": "切換組織",
"switch_to": "切換至 '{'environment'}'",
"table_items_deleted_successfully": "'{'type'}' 已成功刪除",
"table_settings": "表格設定",
@@ -996,7 +992,6 @@
"free": "免費",
"free_description": "無限問卷、團隊成員等。",
"get_2_months_free": "免費獲得 2 個月",
"get_in_touch": "取得聯繫",
"hosted_in_frankfurt": "託管在 Frankfurt",
"ios_android_sdks": "iOS 和 Android SDK 用於行動問卷",
"link_surveys": "連結問卷(可分享)",

View File

@@ -42,13 +42,28 @@ vi.mock("@/lib/constants", () => ({
REDIS_URL: "redis://localhost:6379",
}));
// Mock Redis client
const { mockGetRedisClient } = vi.hoisted(() => ({
mockGetRedisClient: vi.fn(),
// Mock cache module
const { mockCache } = vi.hoisted(() => ({
mockCache: {
getRedisClient: vi.fn(),
},
}));
vi.mock("@/modules/cache/redis", () => ({
getRedisClient: mockGetRedisClient,
vi.mock("@/lib/cache", () => ({
cache: mockCache,
}));
// Mock @formbricks/cache
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
custom: vi.fn((namespace: string, ...parts: string[]) => `${namespace}:${parts.join(":")}`),
rateLimit: {
core: vi.fn(
(namespace: string, identifier: string, bucketStart: number) =>
`rate_limit:${namespace}:${identifier}:${bucketStart}`
),
},
},
}));
describe("Auth Utils", () => {
@@ -81,6 +96,35 @@ describe("Auth Utils", () => {
const isValid = await verifyPassword("WrongPassword123!", hashedPassword);
expect(isValid).toBe(false);
});
test("should handle empty password correctly", async () => {
const isValid = await verifyPassword("", hashedPassword);
expect(isValid).toBe(false);
});
test("should handle empty hash correctly", async () => {
const isValid = await verifyPassword(password, "");
expect(isValid).toBe(false);
});
test("should generate different hashes for same password", async () => {
const hash1 = await hashPassword(password);
const hash2 = await hashPassword(password);
expect(hash1).not.toBe(hash2);
expect(await verifyPassword(password, hash1)).toBe(true);
expect(await verifyPassword(password, hash2)).toBe(true);
});
test("should hash complex passwords correctly", async () => {
const complexPassword = "MyC0mpl3x!P@ssw0rd#2024$%^&*()";
const hashedComplex = await hashPassword(complexPassword);
expect(typeof hashedComplex).toBe("string");
expect(hashedComplex.length).toBe(60);
expect(await verifyPassword(complexPassword, hashedComplex)).toBe(true);
expect(await verifyPassword("wrong", hashedComplex)).toBe(false);
});
});
describe("Audit Identifier Utils", () => {
@@ -110,20 +154,115 @@ describe("Auth Utils", () => {
const identifier = createAuditIdentifier("test@example.com");
expect(identifier).toMatch(/^actor_/);
});
test("should handle case-insensitive inputs consistently", () => {
const id1 = createAuditIdentifier("User@Example.COM", "email");
const id2 = createAuditIdentifier("user@example.com", "email");
expect(id1).toBe(id2);
});
test("should handle special characters in identifiers", () => {
const specialEmail = "user+test@example-domain.co.uk";
const identifier = createAuditIdentifier(specialEmail, "email");
expect(identifier).toMatch(/^email_/);
expect(identifier).not.toContain("user+test");
expect(identifier.length).toBe(38); // "email_" + 32 chars
});
test("should create different hashes for different prefixes", () => {
const input = "test@example.com";
const emailId = createAuditIdentifier(input, "email");
const ipId = createAuditIdentifier(input, "ip");
expect(emailId).not.toBe(ipId);
expect(emailId).toMatch(/^email_/);
expect(ipId).toMatch(/^ip_/);
});
test("should handle numeric identifiers", () => {
const numericId = "12345678";
const identifier = createAuditIdentifier(numericId, "user");
expect(identifier).toMatch(/^user_/);
expect(identifier).not.toContain("12345678");
});
});
describe("Rate Limiting", () => {
test("should always allow successful authentication logging", async () => {
// This test doesn't need Redis to be available as it short-circuits for success
mockGetRedisClient.mockResolvedValue(null);
mockCache.getRedisClient.mockResolvedValue(null);
expect(await shouldLogAuthFailure("user@example.com", true)).toBe(true);
expect(await shouldLogAuthFailure("user@example.com", true)).toBe(true);
});
describe("Bucket Time Alignment", () => {
test("should align timestamps to bucket boundaries for consistent keys across pods", async () => {
mockCache.getRedisClient.mockResolvedValue(null);
const RATE_LIMIT_WINDOW = 5 * 60 * 1000; // 5 minutes = 300000ms
// Test with a known aligned timestamp (start of hour for simplicity)
const alignedTime = 1700000000000; // Use this as our aligned bucket start
const bucketStart = Math.floor(alignedTime / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
// Verify bucket alignment logic with specific test cases
const testCases = [
{ timestamp: bucketStart, expected: bucketStart },
{ timestamp: bucketStart + 50000, expected: bucketStart }, // 50 seconds later
{ timestamp: bucketStart + 100000, expected: bucketStart }, // 1 min 40 sec later
{ timestamp: bucketStart + 200000, expected: bucketStart }, // 3 min 20 sec later
{ timestamp: bucketStart + RATE_LIMIT_WINDOW, expected: bucketStart + RATE_LIMIT_WINDOW }, // Next bucket
];
for (const { timestamp, expected } of testCases) {
const actualBucketStart = Math.floor(timestamp / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
expect(actualBucketStart).toBe(expected);
}
});
test("should create consistent cache keys with bucketed timestamps", async () => {
const { createCacheKey } = await import("@formbricks/cache");
const { createAuditIdentifier } = await import("./utils");
mockCache.getRedisClient.mockResolvedValue(null);
const identifier = "test@example.com";
const hashedIdentifier = createAuditIdentifier(identifier, "ratelimit");
const RATE_LIMIT_WINDOW = 5 * 60 * 1000; // 5 minutes = 300000ms
// Use a simple aligned time for testing
const baseTime = 1700000000000;
const bucketStart = Math.floor(baseTime / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
// Test that cache keys are consistent for the same bucket
const timestamp1 = bucketStart;
const timestamp2 = bucketStart + 60000; // 1 minute later in same bucket
const bucketStart1 = Math.floor(timestamp1 / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
const bucketStart2 = Math.floor(timestamp2 / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
// Both should align to the same bucket
expect(bucketStart1).toBe(bucketStart);
expect(bucketStart2).toBe(bucketStart);
// Both should generate the same cache key
const key1 = (createCacheKey.rateLimit.core as any)("auth", hashedIdentifier, bucketStart1);
const key2 = (createCacheKey.rateLimit.core as any)("auth", hashedIdentifier, bucketStart2);
expect(key1).toBe(key2);
const expectedKey = `rate_limit:auth:${hashedIdentifier}:${bucketStart}`;
expect(key1).toBe(expectedKey);
});
});
test("should implement fail-closed behavior when Redis is unavailable", async () => {
// Set Redis unavailable for this test
mockGetRedisClient.mockResolvedValue(null);
mockCache.getRedisClient.mockResolvedValue(null);
const email = "rate-limit-test@example.com";
@@ -167,8 +306,8 @@ describe("Auth Utils", () => {
};
// Reset the Redis mock for these specific tests
mockGetRedisClient.mockReset();
mockGetRedisClient.mockReturnValue(mockRedis); // Use mockReturnValue instead of mockResolvedValue
mockCache.getRedisClient.mockReset();
mockCache.getRedisClient.mockResolvedValue(mockRedis); // Use mockResolvedValue since it's now async
});
test("should handle Redis transaction failure - !results branch", async () => {
@@ -188,15 +327,15 @@ describe("Auth Utils", () => {
};
// Reset and setup mock for this specific test
mockGetRedisClient.mockReset();
mockGetRedisClient.mockReturnValue(testMockRedis);
mockCache.getRedisClient.mockReset();
mockCache.getRedisClient.mockResolvedValue(testMockRedis);
const email = "transaction-failure@example.com";
const result = await shouldLogAuthFailure(email, false);
// Function should return false when Redis transaction fails (fail-closed behavior)
expect(result).toBe(false);
expect(mockGetRedisClient).toHaveBeenCalled();
expect(mockCache.getRedisClient).toHaveBeenCalled();
expect(testMockRedis.multi).toHaveBeenCalled();
expect(testMockMulti.zRemRangeByScore).toHaveBeenCalled();
expect(testMockMulti.zCard).toHaveBeenCalled();
@@ -389,6 +528,23 @@ describe("Auth Utils", () => {
expect.any(Number)
);
});
test("should handle edge case with empty identifier", async () => {
const result = await shouldLogAuthFailure("", false);
expect(result).toBe(false);
});
test("should handle edge case with null identifier", async () => {
// @ts-expect-error - Testing runtime behavior with null
const result = await shouldLogAuthFailure(null, false);
expect(result).toBe(false);
});
test("should handle edge case with undefined identifier", async () => {
// @ts-expect-error - Testing runtime behavior with undefined
const result = await shouldLogAuthFailure(undefined, false);
expect(result).toBe(false);
});
});
});

View File

@@ -1,10 +1,11 @@
import { cache } from "@/lib/cache";
import { IS_PRODUCTION, SENTRY_DSN } from "@/lib/constants";
import { getRedisClient } from "@/modules/cache/redis";
import { queueAuditEventBackground } from "@/modules/ee/audit-logs/lib/handler";
import { TAuditAction, TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
import * as Sentry from "@sentry/nextjs";
import { compare, hash } from "bcryptjs";
import { createHash, randomUUID } from "crypto";
import { createCacheKey } from "@formbricks/cache";
import { logger } from "@formbricks/logger";
export const hashPassword = async (password: string) => {
@@ -13,8 +14,15 @@ export const hashPassword = async (password: string) => {
};
export const verifyPassword = async (password: string, hashedPassword: string) => {
const isValid = await compare(password, hashedPassword);
return isValid;
try {
const isValid = await compare(password, hashedPassword);
return isValid;
} catch (error) {
// Log warning for debugging purposes, but don't throw to maintain security
logger.warn("Password verification failed due to invalid hash format", { error });
// Return false for invalid hashes or other bcrypt errors
return false;
}
};
/**
@@ -225,12 +233,17 @@ export const shouldLogAuthFailure = async (
// Always log successful authentications
if (isSuccess) return true;
const rateLimitKey = `rate_limit:auth:${createAuditIdentifier(identifier, "ratelimit")}`;
const now = Date.now();
const bucketStart = Math.floor(now / RATE_LIMIT_WINDOW) * RATE_LIMIT_WINDOW;
const rateLimitKey = createCacheKey.rateLimit.core(
"auth",
createAuditIdentifier(identifier, "ratelimit"),
bucketStart
);
try {
// Get Redis client
const redis = getRedisClient();
const redis = await cache.getRedisClient();
if (!redis) {
logger.warn("Redis not available for rate limiting, not logging due to Redis requirement");
return false;
@@ -238,10 +251,9 @@ export const shouldLogAuthFailure = async (
// Use Redis for distributed rate limiting
const multi = redis.multi();
const windowStart = now - RATE_LIMIT_WINDOW;
// Remove expired entries and count recent failures
multi.zRemRangeByScore(rateLimitKey, 0, windowStart);
multi.zRemRangeByScore(rateLimitKey, 0, bucketStart);
multi.zCard(rateLimitKey);
multi.zAdd(rateLimitKey, { score: now, value: `${now}:${randomUUID()}` });
multi.expire(rateLimitKey, Math.ceil(RATE_LIMIT_WINDOW / 1000));
@@ -251,7 +263,7 @@ export const shouldLogAuthFailure = async (
throw new Error("Redis transaction failed");
}
const currentCount = results[1] as number;
const currentCount = results[1] as unknown as number;
// Apply throttling logic
if (currentCount <= AGGREGATION_THRESHOLD) {

View File

@@ -1,381 +0,0 @@
import { describe, expect, test } from "vitest";
import { createCacheKey, parseCacheKey, validateCacheKey } from "./cacheKeys";
describe("cacheKeys", () => {
describe("createCacheKey", () => {
describe("environment keys", () => {
test("should create environment state key", () => {
const key = createCacheKey.environment.state("env123");
expect(key).toBe("fb:env:env123:state");
});
test("should create environment surveys key", () => {
const key = createCacheKey.environment.surveys("env456");
expect(key).toBe("fb:env:env456:surveys");
});
test("should create environment actionClasses key", () => {
const key = createCacheKey.environment.actionClasses("env789");
expect(key).toBe("fb:env:env789:action_classes");
});
test("should create environment config key", () => {
const key = createCacheKey.environment.config("env101");
expect(key).toBe("fb:env:env101:config");
});
test("should create environment segments key", () => {
const key = createCacheKey.environment.segments("env202");
expect(key).toBe("fb:env:env202:segments");
});
});
describe("organization keys", () => {
test("should create organization billing key", () => {
const key = createCacheKey.organization.billing("org123");
expect(key).toBe("fb:org:org123:billing");
});
test("should create organization environments key", () => {
const key = createCacheKey.organization.environments("org456");
expect(key).toBe("fb:org:org456:environments");
});
test("should create organization config key", () => {
const key = createCacheKey.organization.config("org789");
expect(key).toBe("fb:org:org789:config");
});
test("should create organization limits key", () => {
const key = createCacheKey.organization.limits("org101");
expect(key).toBe("fb:org:org101:limits");
});
});
describe("license keys", () => {
test("should create license status key", () => {
const key = createCacheKey.license.status("org123");
expect(key).toBe("fb:license:org123:status");
});
test("should create license features key", () => {
const key = createCacheKey.license.features("org456");
expect(key).toBe("fb:license:org456:features");
});
test("should create license usage key", () => {
const key = createCacheKey.license.usage("org789");
expect(key).toBe("fb:license:org789:usage");
});
test("should create license check key", () => {
const key = createCacheKey.license.check("org123", "feature-x");
expect(key).toBe("fb:license:org123:check:feature-x");
});
test("should create license previous_result key", () => {
const key = createCacheKey.license.previous_result("org456");
expect(key).toBe("fb:license:org456:previous_result");
});
});
describe("user keys", () => {
test("should create user profile key", () => {
const key = createCacheKey.user.profile("user123");
expect(key).toBe("fb:user:user123:profile");
});
test("should create user preferences key", () => {
const key = createCacheKey.user.preferences("user456");
expect(key).toBe("fb:user:user456:preferences");
});
test("should create user organizations key", () => {
const key = createCacheKey.user.organizations("user789");
expect(key).toBe("fb:user:user789:organizations");
});
test("should create user permissions key", () => {
const key = createCacheKey.user.permissions("user123", "org456");
expect(key).toBe("fb:user:user123:org:org456:permissions");
});
});
describe("project keys", () => {
test("should create project config key", () => {
const key = createCacheKey.project.config("proj123");
expect(key).toBe("fb:project:proj123:config");
});
test("should create project environments key", () => {
const key = createCacheKey.project.environments("proj456");
expect(key).toBe("fb:project:proj456:environments");
});
test("should create project surveys key", () => {
const key = createCacheKey.project.surveys("proj789");
expect(key).toBe("fb:project:proj789:surveys");
});
});
describe("survey keys", () => {
test("should create survey metadata key", () => {
const key = createCacheKey.survey.metadata("survey123");
expect(key).toBe("fb:survey:survey123:metadata");
});
test("should create survey responses key", () => {
const key = createCacheKey.survey.responses("survey456");
expect(key).toBe("fb:survey:survey456:responses");
});
test("should create survey stats key", () => {
const key = createCacheKey.survey.stats("survey789");
expect(key).toBe("fb:survey:survey789:stats");
});
});
describe("session keys", () => {
test("should create session data key", () => {
const key = createCacheKey.session.data("session123");
expect(key).toBe("fb:session:session123:data");
});
test("should create session permissions key", () => {
const key = createCacheKey.session.permissions("session456");
expect(key).toBe("fb:session:session456:permissions");
});
});
describe("rate limit keys", () => {
test("should create rate limit api key", () => {
const key = createCacheKey.rateLimit.api("api-key-123", "endpoint-v1");
expect(key).toBe("fb:rate_limit:api:api-key-123:endpoint-v1");
});
test("should create rate limit login key", () => {
const key = createCacheKey.rateLimit.login("user-ip-hash");
expect(key).toBe("fb:rate_limit:login:user-ip-hash");
});
test("should create rate limit core key", () => {
const key = createCacheKey.rateLimit.core("auth:login", "user123", 1703174400);
expect(key).toBe("fb:rate_limit:auth:login:user123:1703174400");
});
});
describe("custom keys", () => {
test("should create custom key without subResource", () => {
const key = createCacheKey.custom("temp", "identifier123");
expect(key).toBe("fb:temp:identifier123");
});
test("should create custom key with subResource", () => {
const key = createCacheKey.custom("analytics", "user456", "daily-stats");
expect(key).toBe("fb:analytics:user456:daily-stats");
});
test("should work with all valid namespaces", () => {
const validNamespaces = ["temp", "analytics", "webhook", "integration", "backup"];
validNamespaces.forEach((namespace) => {
const key = createCacheKey.custom(namespace, "test-id");
expect(key).toBe(`fb:${namespace}:test-id`);
});
});
test("should throw error for invalid namespace", () => {
expect(() => createCacheKey.custom("invalid", "identifier")).toThrow(
"Invalid cache namespace: invalid. Use: temp, analytics, webhook, integration, backup"
);
});
test("should throw error for empty namespace", () => {
expect(() => createCacheKey.custom("", "identifier")).toThrow(
"Invalid cache namespace: . Use: temp, analytics, webhook, integration, backup"
);
});
});
});
describe("validateCacheKey", () => {
test("should validate correct cache keys", () => {
const validKeys = [
"fb:env:env123:state",
"fb:user:user456:profile",
"fb:org:org789:billing",
"fb:rate_limit:api:key123:endpoint",
"fb:custom:namespace:identifier:sub:resource",
];
validKeys.forEach((key) => {
expect(validateCacheKey(key)).toBe(true);
});
});
test("should reject keys without fb prefix", () => {
const invalidKeys = ["env:env123:state", "user:user456:profile", "redis:key:value", "cache:item:data"];
invalidKeys.forEach((key) => {
expect(validateCacheKey(key)).toBe(false);
});
});
test("should reject keys with insufficient parts", () => {
const invalidKeys = ["fb:", "fb:env", "fb:env:", "fb:user:user123:"];
invalidKeys.forEach((key) => {
expect(validateCacheKey(key)).toBe(false);
});
});
test("should reject keys with empty parts", () => {
const invalidKeys = ["fb::env123:state", "fb:env::state", "fb:env:env123:", "fb:user::profile"];
invalidKeys.forEach((key) => {
expect(validateCacheKey(key)).toBe(false);
});
});
test("should validate minimum valid key", () => {
expect(validateCacheKey("fb:a:b")).toBe(true);
});
});
describe("parseCacheKey", () => {
test("should parse basic cache key", () => {
const result = parseCacheKey("fb:env:env123:state");
expect(result).toEqual({
prefix: "fb",
resource: "env",
identifier: "env123",
subResource: "state",
full: "fb:env:env123:state",
});
});
test("should parse key without subResource", () => {
const result = parseCacheKey("fb:user:user123");
expect(result).toEqual({
prefix: "fb",
resource: "user",
identifier: "user123",
subResource: undefined,
full: "fb:user:user123",
});
});
test("should parse key with multiple subResource parts", () => {
const result = parseCacheKey("fb:user:user123:org:org456:permissions");
expect(result).toEqual({
prefix: "fb",
resource: "user",
identifier: "user123",
subResource: "org:org456:permissions",
full: "fb:user:user123:org:org456:permissions",
});
});
test("should parse rate limit key with timestamp", () => {
const result = parseCacheKey("fb:rate_limit:auth:login:user123:1703174400");
expect(result).toEqual({
prefix: "fb",
resource: "rate_limit",
identifier: "auth",
subResource: "login:user123:1703174400",
full: "fb:rate_limit:auth:login:user123:1703174400",
});
});
test("should throw error for invalid cache key", () => {
const invalidKeys = ["invalid:key:format", "fb:env", "fb::env123:state", "redis:user:profile"];
invalidKeys.forEach((key) => {
expect(() => parseCacheKey(key)).toThrow(`Invalid cache key format: ${key}`);
});
});
});
describe("cache key patterns and consistency", () => {
test("all environment keys should follow same pattern", () => {
const envId = "test-env-123";
const envKeys = [
createCacheKey.environment.state(envId),
createCacheKey.environment.surveys(envId),
createCacheKey.environment.actionClasses(envId),
createCacheKey.environment.config(envId),
createCacheKey.environment.segments(envId),
];
envKeys.forEach((key) => {
expect(key).toMatch(/^fb:env:test-env-123:.+$/);
expect(validateCacheKey(key)).toBe(true);
});
});
test("all organization keys should follow same pattern", () => {
const orgId = "test-org-456";
const orgKeys = [
createCacheKey.organization.billing(orgId),
createCacheKey.organization.environments(orgId),
createCacheKey.organization.config(orgId),
createCacheKey.organization.limits(orgId),
];
orgKeys.forEach((key) => {
expect(key).toMatch(/^fb:org:test-org-456:.+$/);
expect(validateCacheKey(key)).toBe(true);
});
});
test("all generated keys should be parseable", () => {
const testKeys = [
createCacheKey.environment.state("env123"),
createCacheKey.user.profile("user456"),
createCacheKey.organization.billing("org789"),
createCacheKey.survey.metadata("survey101"),
createCacheKey.session.data("session202"),
createCacheKey.rateLimit.core("auth:login", "user303", 1703174400),
createCacheKey.custom("temp", "temp404", "cleanup"),
];
testKeys.forEach((key) => {
expect(() => parseCacheKey(key)).not.toThrow();
const parsed = parseCacheKey(key);
expect(parsed.prefix).toBe("fb");
expect(parsed.full).toBe(key);
expect(parsed.resource).toBeTruthy();
expect(parsed.identifier).toBeTruthy();
});
});
test("keys should be unique across different resources", () => {
const keys = [
createCacheKey.environment.state("same-id"),
createCacheKey.user.profile("same-id"),
createCacheKey.organization.billing("same-id"),
createCacheKey.project.config("same-id"),
createCacheKey.survey.metadata("same-id"),
];
const uniqueKeys = new Set(keys);
expect(uniqueKeys.size).toBe(keys.length);
});
test("namespace validation should prevent collisions", () => {
// These should not throw (valid namespaces)
expect(() => createCacheKey.custom("temp", "id")).not.toThrow();
expect(() => createCacheKey.custom("analytics", "id")).not.toThrow();
// These should throw (reserved/invalid namespaces)
expect(() => createCacheKey.custom("env", "id")).toThrow();
expect(() => createCacheKey.custom("user", "id")).toThrow();
expect(() => createCacheKey.custom("org", "id")).toThrow();
});
});
});

View File

@@ -1,126 +0,0 @@
import "server-only";
/**
* Enterprise-grade cache key generator following industry best practices
* Pattern: fb:{resource}:{identifier}[:{subresource}]
*
* Benefits:
* - Clear namespace hierarchy (fb = formbricks)
* - Collision-proof across environments
* - Easy debugging and monitoring
* - Predictable invalidation patterns
* - Multi-tenant safe
*/
export const createCacheKey = {
// Environment-related keys
environment: {
state: (environmentId: string) => `fb:env:${environmentId}:state`,
surveys: (environmentId: string) => `fb:env:${environmentId}:surveys`,
actionClasses: (environmentId: string) => `fb:env:${environmentId}:action_classes`,
config: (environmentId: string) => `fb:env:${environmentId}:config`,
segments: (environmentId: string) => `fb:env:${environmentId}:segments`,
},
// Organization-related keys
organization: {
billing: (organizationId: string) => `fb:org:${organizationId}:billing`,
environments: (organizationId: string) => `fb:org:${organizationId}:environments`,
config: (organizationId: string) => `fb:org:${organizationId}:config`,
limits: (organizationId: string) => `fb:org:${organizationId}:limits`,
},
// License and enterprise features
license: {
status: (organizationId: string) => `fb:license:${organizationId}:status`,
features: (organizationId: string) => `fb:license:${organizationId}:features`,
usage: (organizationId: string) => `fb:license:${organizationId}:usage`,
check: (organizationId: string, feature: string) => `fb:license:${organizationId}:check:${feature}`,
previous_result: (organizationId: string) => `fb:license:${organizationId}:previous_result`,
},
// User-related keys
user: {
profile: (userId: string) => `fb:user:${userId}:profile`,
preferences: (userId: string) => `fb:user:${userId}:preferences`,
organizations: (userId: string) => `fb:user:${userId}:organizations`,
permissions: (userId: string, organizationId: string) =>
`fb:user:${userId}:org:${organizationId}:permissions`,
},
// Project-related keys
project: {
config: (projectId: string) => `fb:project:${projectId}:config`,
environments: (projectId: string) => `fb:project:${projectId}:environments`,
surveys: (projectId: string) => `fb:project:${projectId}:surveys`,
},
// Survey-related keys
survey: {
metadata: (surveyId: string) => `fb:survey:${surveyId}:metadata`,
responses: (surveyId: string) => `fb:survey:${surveyId}:responses`,
stats: (surveyId: string) => `fb:survey:${surveyId}:stats`,
},
// Session and authentication
session: {
data: (sessionId: string) => `fb:session:${sessionId}:data`,
permissions: (sessionId: string) => `fb:session:${sessionId}:permissions`,
},
// Rate limiting and security
rateLimit: {
api: (identifier: string, endpoint: string) => `fb:rate_limit:api:${identifier}:${endpoint}`,
login: (identifier: string) => `fb:rate_limit:login:${identifier}`,
core: (namespace: string, identifier: string, windowStart: number) =>
`fb:rate_limit:${namespace}:${identifier}:${windowStart}`,
},
// Custom keys with validation
custom: (namespace: string, identifier: string, subResource?: string) => {
// Validate namespace to prevent collisions
const validNamespaces = ["temp", "analytics", "webhook", "integration", "backup"];
if (!validNamespaces.includes(namespace)) {
throw new Error(`Invalid cache namespace: ${namespace}. Use: ${validNamespaces.join(", ")}`);
}
const base = `fb:${namespace}:${identifier}`;
return subResource ? `${base}:${subResource}` : base;
},
};
/**
* Cache key validation helpers
*/
export const validateCacheKey = (key: string): boolean => {
// Must start with fb: prefix
if (!key.startsWith("fb:")) return false;
// Must have at least 3 parts (fb:resource:identifier)
const parts = key.split(":");
if (parts.length < 3) return false;
// No empty parts
if (parts.some((part) => part.length === 0)) return false;
return true;
};
/**
* Extract cache key components for debugging/monitoring
*/
export const parseCacheKey = (key: string) => {
if (!validateCacheKey(key)) {
throw new Error(`Invalid cache key format: ${key}`);
}
const [prefix, resource, identifier, ...subResources] = key.split(":");
return {
prefix,
resource,
identifier,
subResource: subResources.length > 0 ? subResources.join(":") : undefined,
full: key,
};
};

View File

@@ -1,159 +0,0 @@
import KeyvRedis from "@keyv/redis";
import { createCache } from "cache-manager";
import { Keyv } from "keyv";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import { logger } from "@formbricks/logger";
// Mock dependencies
vi.mock("keyv");
vi.mock("@keyv/redis");
vi.mock("cache-manager");
vi.mock("@formbricks/logger");
const mockCacheInstance = {
set: vi.fn(),
get: vi.fn(),
del: vi.fn(),
};
describe("Cache Service", () => {
let originalRedisUrl: string | undefined;
let originalNextRuntime: string | undefined;
beforeEach(() => {
originalRedisUrl = process.env.REDIS_URL;
originalNextRuntime = process.env.NEXT_RUNTIME;
// Ensure we're in runtime mode (not build time)
process.env.NEXT_RUNTIME = "nodejs";
vi.resetAllMocks();
vi.resetModules();
// Setup default mock implementations
vi.mocked(createCache).mockReturnValue(mockCacheInstance as any);
vi.mocked(Keyv).mockClear();
vi.mocked(KeyvRedis).mockClear();
vi.mocked(logger.warn).mockClear();
vi.mocked(logger.error).mockClear();
vi.mocked(logger.info).mockClear();
// Mock successful cache operations for Redis connection test
mockCacheInstance.set.mockResolvedValue(undefined);
mockCacheInstance.get.mockResolvedValue({ test: true });
mockCacheInstance.del.mockResolvedValue(undefined);
});
afterEach(() => {
process.env.REDIS_URL = originalRedisUrl;
process.env.NEXT_RUNTIME = originalNextRuntime;
});
describe("Initialization and getCache", () => {
test("should use Redis store and return it via getCache if REDIS_URL is set", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const { getCache } = await import("./service");
const cache = await getCache();
expect(KeyvRedis).toHaveBeenCalledWith("redis://localhost:6379");
expect(Keyv).toHaveBeenCalledWith({
store: expect.any(KeyvRedis),
});
expect(createCache).toHaveBeenCalledWith({
stores: [expect.any(Keyv)],
});
expect(logger.info).toHaveBeenCalledWith("Cache initialized with Redis");
expect(cache).toBe(mockCacheInstance);
});
test("should fall back to memory store if Redis connection fails", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockError = new Error("Connection refused");
// Mock cache operations to fail for Redis connection test
mockCacheInstance.get.mockRejectedValueOnce(mockError);
const { getCache } = await import("./service");
const cache = await getCache();
expect(KeyvRedis).toHaveBeenCalledWith("redis://localhost:6379");
expect(logger.warn).toHaveBeenCalledWith("Redis connection failed, using memory cache", {
error: mockError,
});
expect(cache).toBe(mockCacheInstance);
});
test("should use memory store and return it via getCache if REDIS_URL is not set", async () => {
delete process.env.REDIS_URL;
const { getCache } = await import("./service");
const cache = await getCache();
expect(KeyvRedis).not.toHaveBeenCalled();
expect(Keyv).toHaveBeenCalledWith();
expect(createCache).toHaveBeenCalledWith({
stores: [expect.any(Keyv)],
});
expect(cache).toBe(mockCacheInstance);
});
test("should use memory store and return it via getCache if REDIS_URL is an empty string", async () => {
process.env.REDIS_URL = "";
const { getCache } = await import("./service");
const cache = await getCache();
expect(KeyvRedis).not.toHaveBeenCalled();
expect(Keyv).toHaveBeenCalledWith();
expect(createCache).toHaveBeenCalledWith({
stores: [expect.any(Keyv)],
});
expect(cache).toBe(mockCacheInstance);
});
test("should return same instance on multiple calls to getCache", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const { getCache } = await import("./service");
const cache1 = await getCache();
const cache2 = await getCache();
expect(cache1).toBe(cache2);
expect(cache1).toBe(mockCacheInstance);
// Should only initialize once
expect(createCache).toHaveBeenCalledTimes(1);
});
test("should use memory cache during build time", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
delete process.env.NEXT_RUNTIME; // Simulate build time
const { getCache } = await import("./service");
const cache = await getCache();
expect(KeyvRedis).not.toHaveBeenCalled();
expect(Keyv).toHaveBeenCalledWith();
expect(cache).toBe(mockCacheInstance);
});
test("should provide cache health information", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const { getCache, getCacheHealth } = await import("./service");
// Before initialization
let health = getCacheHealth();
expect(health.isInitialized).toBe(false);
expect(health.hasInstance).toBe(false);
// After initialization
await getCache();
health = getCacheHealth();
expect(health.isInitialized).toBe(true);
expect(health.hasInstance).toBe(true);
expect(health.isRedisConnected).toBe(true);
});
});
});

View File

@@ -1,135 +0,0 @@
import "server-only";
import KeyvRedis from "@keyv/redis";
import { type Cache, createCache } from "cache-manager";
import { Keyv } from "keyv";
import { logger } from "@formbricks/logger";
// Singleton state management
interface CacheState {
instance: Cache | null;
isInitialized: boolean;
isRedisConnected: boolean;
initializationPromise: Promise<Cache> | null;
}
const state: CacheState = {
instance: null,
isInitialized: false,
isRedisConnected: false,
initializationPromise: null,
};
/**
* Creates a memory cache fallback
*/
const createMemoryCache = (): Cache => {
return createCache({ stores: [new Keyv()] });
};
/**
* Creates Redis cache with proper async connection handling
*/
const createRedisCache = async (redisUrl: string): Promise<Cache> => {
const redisStore = new KeyvRedis(redisUrl);
const cache = createCache({ stores: [new Keyv({ store: redisStore })] });
// Test connection
const testKey = "__health_check__";
await cache.set(testKey, { test: true }, 5000);
const result = await cache.get<{ test: boolean }>(testKey);
await cache.del(testKey);
if (!result?.test) {
throw new Error("Redis connection test failed");
}
return cache;
};
/**
* Async cache initialization with proper singleton pattern
*/
const initializeCache = async (): Promise<Cache> => {
if (state.initializationPromise) {
return state.initializationPromise;
}
state.initializationPromise = (async () => {
try {
const redisUrl = process.env.REDIS_URL?.trim();
if (!redisUrl) {
state.instance = createMemoryCache();
state.isRedisConnected = false;
return state.instance;
}
try {
state.instance = await createRedisCache(redisUrl);
state.isRedisConnected = true;
logger.info("Cache initialized with Redis");
} catch (error) {
logger.warn("Redis connection failed, using memory cache", { error });
state.instance = createMemoryCache();
state.isRedisConnected = false;
}
return state.instance;
} catch (error) {
logger.error("Cache initialization failed", { error });
state.instance = createMemoryCache();
return state.instance;
} finally {
state.isInitialized = true;
state.initializationPromise = null;
}
})();
return state.initializationPromise;
};
/**
* Simple Next.js build environment detection
* Works in 99% of cases with minimal complexity
*/
const isBuildTime = () => !process.env.NEXT_RUNTIME;
/**
* Get cache instance with proper async initialization
* Always re-evaluates Redis URL at runtime to handle build-time vs runtime differences
*/
export const getCache = async (): Promise<Cache> => {
if (isBuildTime()) {
if (!state.instance) {
state.instance = createMemoryCache();
state.isInitialized = true;
state.isRedisConnected = false;
}
return state.instance;
}
const currentRedisUrl = process.env.REDIS_URL?.trim();
// Re-initialize if Redis URL is now available but we're using memory cache
if (state.instance && state.isInitialized && !state.isRedisConnected && currentRedisUrl) {
logger.info("Re-initializing cache with Redis");
state.instance = null;
state.isInitialized = false;
state.initializationPromise = null;
}
if (state.instance && state.isInitialized) {
return state.instance;
}
return initializeCache();
};
/**
* Cache health monitoring for diagnostics
*/
export const getCacheHealth = () => ({
isInitialized: state.isInitialized,
isRedisConnected: state.isRedisConnected,
hasInstance: !!state.instance,
});

View File

@@ -1,85 +0,0 @@
import "server-only";
import { logger } from "@formbricks/logger";
import { getCache } from "./service";
/**
* Simple cache wrapper for functions that return promises
*/
type CacheOptions = {
key: string;
ttl: number; // TTL in milliseconds
};
/**
* Simple cache wrapper for functions that return promises
*
* @example
* ```typescript
* const getCachedEnvironment = withCache(
* () => fetchEnvironmentFromDB(environmentId),
* {
* key: `env:${environmentId}`,
* ttl: 3600000 // 1 hour in milliseconds
* }
* );
* ```
*/
export const withCache = <T>(fn: () => Promise<T>, options: CacheOptions): (() => Promise<T>) => {
return async (): Promise<T> => {
const { key, ttl } = options;
try {
const cache = await getCache();
// Try to get from cache - cache-manager with Keyv handles serialization automatically
const cached = await cache.get<T>(key);
if (cached !== null && cached !== undefined) {
return cached;
}
// Cache miss - fetch fresh data
const fresh = await fn();
// Cache the result with proper TTL conversion
// cache-manager with Keyv expects TTL in milliseconds
await cache.set(key, fresh, ttl);
return fresh;
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
// On cache error, still try to fetch fresh data
logger.warn({ key, error: err }, "Cache operation failed, fetching fresh data");
try {
return await fn();
} catch (fnError) {
const fnErr = fnError instanceof Error ? fnError : new Error(String(fnError));
logger.error("Failed to fetch fresh data after cache error", {
key,
cacheError: err,
functionError: fnErr,
});
throw fnErr;
}
}
};
};
/**
* Simple cache invalidation helper
* Prefer explicit key invalidation over complex tag systems
*/
export const invalidateCache = async (keys: string | string[]): Promise<void> => {
const cache = await getCache();
const keyArray = Array.isArray(keys) ? keys : [keys];
await Promise.all(keyArray.map((key) => cache.del(key)));
logger.info("Cache invalidated", { keys: keyArray });
};
// Re-export cache key utilities for backwards compatibility
export { createCacheKey, validateCacheKey, parseCacheKey } from "./cacheKeys";

View File

@@ -1,261 +0,0 @@
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
// Mock the logger
vi.mock("@formbricks/logger", () => ({
logger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
},
}));
// Mock the redis client
const mockRedisClient = {
connect: vi.fn(),
disconnect: vi.fn(),
on: vi.fn(),
isReady: true,
get: vi.fn(),
set: vi.fn(),
del: vi.fn(),
exists: vi.fn(),
expire: vi.fn(),
ttl: vi.fn(),
keys: vi.fn(),
flushall: vi.fn(),
};
vi.mock("redis", () => ({
createClient: vi.fn(() => mockRedisClient),
}));
// Mock crypto for UUID generation
vi.mock("crypto", () => ({
randomUUID: vi.fn(() => "test-uuid-123"),
}));
describe("Redis module", () => {
beforeEach(() => {
vi.clearAllMocks();
// Reset environment variable
process.env.REDIS_URL = "redis://localhost:6379";
// Reset isReady state
mockRedisClient.isReady = true;
// Make connect resolve successfully
mockRedisClient.connect.mockResolvedValue(undefined);
});
afterEach(() => {
vi.resetModules();
process.env.REDIS_URL = undefined;
});
describe("Module initialization", () => {
test("should create Redis client when REDIS_URL is set", async () => {
const { createClient } = await import("redis");
// Re-import the module to trigger initialization
await import("./redis");
expect(createClient).toHaveBeenCalledWith({
url: "redis://localhost:6379",
socket: {
reconnectStrategy: expect.any(Function),
},
});
});
test("should not create Redis client when REDIS_URL is not set", async () => {
delete process.env.REDIS_URL;
const { createClient } = await import("redis");
// Clear the module cache and re-import
vi.resetModules();
await import("./redis");
expect(createClient).not.toHaveBeenCalled();
});
test("should set up event listeners", async () => {
// Re-import the module to trigger initialization
await import("./redis");
expect(mockRedisClient.on).toHaveBeenCalledWith("error", expect.any(Function));
expect(mockRedisClient.on).toHaveBeenCalledWith("connect", expect.any(Function));
expect(mockRedisClient.on).toHaveBeenCalledWith("reconnecting", expect.any(Function));
expect(mockRedisClient.on).toHaveBeenCalledWith("ready", expect.any(Function));
});
test("should attempt initial connection", async () => {
// Re-import the module to trigger initialization
await import("./redis");
expect(mockRedisClient.connect).toHaveBeenCalled();
});
});
describe("getRedisClient", () => {
test("should return client when ready", async () => {
mockRedisClient.isReady = true;
const { getRedisClient } = await import("./redis");
const client = getRedisClient();
expect(client).toBe(mockRedisClient);
});
test("should return null when client is not ready", async () => {
mockRedisClient.isReady = false;
const { getRedisClient } = await import("./redis");
const client = getRedisClient();
expect(client).toBeNull();
});
test("should return null when no REDIS_URL is set", async () => {
delete process.env.REDIS_URL;
vi.resetModules();
const { getRedisClient } = await import("./redis");
const client = getRedisClient();
expect(client).toBeNull();
});
});
describe("disconnectRedis", () => {
test("should disconnect the client", async () => {
const { disconnectRedis } = await import("./redis");
await disconnectRedis();
expect(mockRedisClient.disconnect).toHaveBeenCalled();
});
test("should handle case when client is null", async () => {
delete process.env.REDIS_URL;
vi.resetModules();
const { disconnectRedis } = await import("./redis");
await expect(disconnectRedis()).resolves.toBeUndefined();
});
});
describe("Reconnection strategy", () => {
test("should configure reconnection strategy properly", async () => {
const { createClient } = await import("redis");
// Re-import the module to trigger initialization
await import("./redis");
const createClientCall = vi.mocked(createClient).mock.calls[0];
const config = createClientCall[0] as any;
expect(config.socket.reconnectStrategy).toBeDefined();
expect(typeof config.socket.reconnectStrategy).toBe("function");
});
});
describe("Event handlers", () => {
test("should log error events", async () => {
const { logger } = await import("@formbricks/logger");
// Re-import the module to trigger initialization
await import("./redis");
// Find the error event handler
const errorCall = vi.mocked(mockRedisClient.on).mock.calls.find((call) => call[0] === "error");
const errorHandler = errorCall?.[1];
const testError = new Error("Test error");
errorHandler?.(testError);
expect(logger.error).toHaveBeenCalledWith("Redis client error:", testError);
});
test("should log connect events", async () => {
const { logger } = await import("@formbricks/logger");
// Re-import the module to trigger initialization
await import("./redis");
// Find the connect event handler
const connectCall = vi.mocked(mockRedisClient.on).mock.calls.find((call) => call[0] === "connect");
const connectHandler = connectCall?.[1];
connectHandler?.();
expect(logger.info).toHaveBeenCalledWith("Redis client connected");
});
test("should log reconnecting events", async () => {
const { logger } = await import("@formbricks/logger");
// Re-import the module to trigger initialization
await import("./redis");
// Find the reconnecting event handler
const reconnectingCall = vi
.mocked(mockRedisClient.on)
.mock.calls.find((call) => call[0] === "reconnecting");
const reconnectingHandler = reconnectingCall?.[1];
reconnectingHandler?.();
expect(logger.info).toHaveBeenCalledWith("Redis client reconnecting");
});
test("should log ready events", async () => {
const { logger } = await import("@formbricks/logger");
// Re-import the module to trigger initialization
await import("./redis");
// Find the ready event handler
const readyCall = vi.mocked(mockRedisClient.on).mock.calls.find((call) => call[0] === "ready");
const readyHandler = readyCall?.[1];
readyHandler?.();
expect(logger.info).toHaveBeenCalledWith("Redis client ready");
});
test("should log end events", async () => {
const { logger } = await import("@formbricks/logger");
// Re-import the module to trigger initialization
await import("./redis");
// Find the end event handler
const endCall = vi.mocked(mockRedisClient.on).mock.calls.find((call) => call[0] === "end");
const endHandler = endCall?.[1];
endHandler?.();
expect(logger.info).toHaveBeenCalledWith("Redis client disconnected");
});
});
describe("Connection failure handling", () => {
test("should handle initial connection failure", async () => {
const { logger } = await import("@formbricks/logger");
const connectionError = new Error("Connection failed");
mockRedisClient.connect.mockRejectedValue(connectionError);
vi.resetModules();
await import("./redis");
// Wait for the connection promise to resolve
await new Promise((resolve) => setTimeout(resolve, 0));
expect(logger.error).toHaveBeenCalledWith("Initial Redis connection failed:", connectionError);
});
});
});

View File

@@ -1,69 +0,0 @@
import { createClient } from "redis";
import { logger } from "@formbricks/logger";
type RedisClient = ReturnType<typeof createClient>;
const REDIS_URL = process.env.REDIS_URL;
let client: RedisClient | null = null;
if (REDIS_URL) {
client = createClient({
url: REDIS_URL,
socket: {
reconnectStrategy: (retries) => {
logger.info(`Redis reconnection attempt ${retries}`);
// For the first 5 attempts, use exponential backoff with max 5 second delay
if (retries <= 5) {
return Math.min(retries * 1000, 5000);
}
// After 5 attempts, use a longer delay but never give up
// This ensures the client keeps trying to reconnect when Redis comes back online
logger.info("Redis reconnection using extended delay (30 seconds)");
return 30000; // 30 second delay for persistent reconnection attempts
},
},
});
client.on("error", (err) => {
logger.error("Redis client error:", err);
});
client.on("connect", () => {
logger.info("Redis client connected");
});
client.on("reconnecting", () => {
logger.info("Redis client reconnecting");
});
client.on("ready", () => {
logger.info("Redis client ready");
});
client.on("end", () => {
logger.info("Redis client disconnected");
});
// Connect immediately
client.connect().catch((err) => {
logger.error("Initial Redis connection failed:", err);
});
}
export const getRedisClient = (): RedisClient | null => {
if (!client?.isReady) {
logger.warn("Redis client not ready, operations will be skipped");
return null;
}
return client;
};
export const disconnectRedis = async (): Promise<void> => {
if (client) {
await client.disconnect();
client = null;
}
};

View File

@@ -7,7 +7,7 @@ import { rateLimitConfigs } from "./rate-limit-configs";
const { mockEval, mockRedisClient, mockGetRedisClient } = vi.hoisted(() => {
const _mockEval = vi.fn();
const _mockRedisClient = { eval: _mockEval } as any;
const _mockGetRedisClient = vi.fn().mockReturnValue(_mockRedisClient);
const _mockGetRedisClient = vi.fn().mockResolvedValue(_mockRedisClient);
return { mockEval: _mockEval, mockRedisClient: _mockRedisClient, mockGetRedisClient: _mockGetRedisClient };
});
@@ -18,8 +18,10 @@ vi.mock("@/lib/constants", () => ({
SENTRY_DSN: "https://test@sentry.io/test",
}));
vi.mock("@/modules/cache/redis", () => ({
getRedisClient: mockGetRedisClient,
vi.mock("@/lib/cache", () => ({
cache: {
getRedisClient: mockGetRedisClient,
},
}));
vi.mock("@formbricks/logger", () => ({
@@ -35,7 +37,7 @@ vi.mock("@sentry/nextjs", () => ({
captureException: vi.fn(),
}));
vi.mock("@/modules/cache/lib/cacheKeys", () => ({
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
rateLimit: {
core: vi.fn(
@@ -48,8 +50,8 @@ vi.mock("@/modules/cache/lib/cacheKeys", () => ({
describe("rateLimitConfigs", () => {
beforeEach(() => {
vi.clearAllMocks();
// Reset the mock to return our mock client
mockGetRedisClient.mockReturnValue(mockRedisClient);
// Reset the mock to return our mock client (async)
mockGetRedisClient.mockResolvedValue(mockRedisClient);
});
describe("Configuration Structure", () => {

View File

@@ -1,4 +1,4 @@
import { getRedisClient } from "@/modules/cache/redis";
import { cache } from "@/lib/cache";
import { afterAll, beforeAll, describe, expect, test } from "vitest";
import { applyRateLimit } from "./helpers";
import { checkRateLimit } from "./rate-limit";
@@ -10,7 +10,8 @@ let isRedisAvailable = false;
// Test Redis availability
async function checkRedisAvailability() {
try {
const redis = getRedisClient();
// eslint-disable-next-line @typescript-eslint/await-thenable
const redis = await cache.getRedisClient();
if (redis === null) {
console.log("Redis client is null - Redis not available");
return false;
@@ -146,7 +147,8 @@ describe("Rate Limiter Load Tests - Race Conditions", () => {
console.log("🟢 Rate Limiter Load Tests: Redis available - tests will run");
// Clear any existing test keys
const redis = getRedisClient();
// eslint-disable-next-line @typescript-eslint/await-thenable
const redis = await cache.getRedisClient();
if (redis) {
const testKeys = await redis.keys("fb:rate_limit:test:*");
if (testKeys.length > 0) {
@@ -157,7 +159,15 @@ describe("Rate Limiter Load Tests - Race Conditions", () => {
afterAll(async () => {
// Clean up test keys
const redis = getRedisClient();
isRedisAvailable = await checkRedisAvailability();
if (!isRedisAvailable) {
console.log("Skipping cleanup: Redis not available");
return;
}
// eslint-disable-next-line @typescript-eslint/await-thenable
const redis = await cache.getRedisClient();
if (redis) {
const testKeys = await redis.keys("fb:rate_limit:test:*");
if (testKeys.length > 0) {
@@ -319,7 +329,8 @@ describe("Rate Limiter Load Tests - Race Conditions", () => {
const identifier = "stress-test";
// Clear any existing keys first to ensure clean state
const redis = getRedisClient();
// eslint-disable-next-line @typescript-eslint/await-thenable
const redis = await cache.getRedisClient();
if (redis) {
const existingKeys = await redis.keys(`fb:rate_limit:${config.namespace}:*`);
if (existingKeys.length > 0) {
@@ -447,7 +458,8 @@ describe("Rate Limiter Load Tests - Race Conditions", () => {
const identifier = "ttl-test-user";
// Clear any existing keys first
const redis = getRedisClient();
// eslint-disable-next-line @typescript-eslint/await-thenable
const redis = await cache.getRedisClient();
if (redis) {
const existingKeys = await redis.keys(`fb:rate_limit:${config.namespace}:*`);
if (existingKeys.length > 0) {

View File

@@ -4,24 +4,26 @@ import { afterAll, afterEach, beforeEach, describe, expect, test, vi } from "vit
import { checkRateLimit } from "./rate-limit";
import { TRateLimitConfig } from "./types/rate-limit";
const { mockEval, mockRedisClient, mockGetRedisClient } = vi.hoisted(() => {
const { mockEval, mockRedisClient, mockCache } = vi.hoisted(() => {
const _mockEval = vi.fn();
const _mockRedisClient = {
eval: _mockEval,
} as any;
const _mockGetRedisClient = vi.fn().mockReturnValue(_mockRedisClient);
const _mockCache = {
getRedisClient: vi.fn().mockResolvedValue(_mockRedisClient),
};
return {
mockEval: _mockEval,
mockRedisClient: _mockRedisClient,
mockGetRedisClient: _mockGetRedisClient,
mockCache: _mockCache,
};
});
// Mock all dependencies (will use the hoisted mocks above)
vi.mock("@/modules/cache/redis", () => ({
getRedisClient: mockGetRedisClient,
vi.mock("@/lib/cache", () => ({
cache: mockCache,
}));
vi.mock("@/lib/constants", () => ({
@@ -52,7 +54,7 @@ describe("checkRateLimit", () => {
beforeEach(() => {
vi.clearAllMocks();
// Reset the mock to return our mock client
mockGetRedisClient.mockReturnValue(mockRedisClient);
mockCache.getRedisClient.mockResolvedValue(mockRedisClient);
});
afterEach(() => {
@@ -121,8 +123,10 @@ describe("checkRateLimit", () => {
test("should fail open when Redis is not configured", async () => {
vi.resetModules();
vi.doMock("@/modules/cache/redis", () => ({
getRedisClient: vi.fn().mockReturnValue(null),
vi.doMock("@/lib/cache", () => ({
cache: {
getRedisClient: vi.fn().mockResolvedValue(null),
},
}));
// Dynamic import after mocking
@@ -222,10 +226,12 @@ describe("checkRateLimit", () => {
},
}));
vi.doMock("@/modules/cache/redis", () => ({
getRedisClient: vi.fn().mockReturnValue({
eval: vi.fn().mockResolvedValue([6, 0]),
}),
vi.doMock("@/lib/cache", () => ({
cache: {
getRedisClient: vi.fn().mockResolvedValue({
eval: vi.fn().mockResolvedValue([6, 0]),
}),
},
}));
// Dynamic import after mocking
@@ -263,10 +269,12 @@ describe("checkRateLimit", () => {
},
}));
vi.doMock("@/modules/cache/redis", () => ({
getRedisClient: vi.fn().mockReturnValue({
eval: vi.fn().mockResolvedValue([6, 0]),
}),
vi.doMock("@/lib/cache", () => ({
cache: {
getRedisClient: vi.fn().mockResolvedValue({
eval: vi.fn().mockResolvedValue([6, 0]),
}),
},
}));
// Dynamic import after mocking
@@ -314,10 +322,12 @@ describe("checkRateLimit", () => {
}));
const redisError = new Error("Redis connection failed");
vi.doMock("@/modules/cache/redis", () => ({
getRedisClient: vi.fn().mockReturnValue({
eval: vi.fn().mockRejectedValue(redisError),
}),
vi.doMock("@/lib/cache", () => ({
cache: {
getRedisClient: vi.fn().mockResolvedValue({
eval: vi.fn().mockRejectedValue(redisError),
}),
},
}));
// Dynamic import after mocking

View File

@@ -1,7 +1,7 @@
import { cache } from "@/lib/cache";
import { RATE_LIMITING_DISABLED, SENTRY_DSN } from "@/lib/constants";
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { getRedisClient } from "@/modules/cache/redis";
import * as Sentry from "@sentry/nextjs";
import { createCacheKey } from "@formbricks/cache";
import { logger } from "@formbricks/logger";
import { Result, ok } from "@formbricks/types/error-handlers";
import { TRateLimitConfig, type TRateLimitResponse } from "./types/rate-limit";
@@ -24,7 +24,7 @@ export const checkRateLimit = async (
try {
// Get Redis client
const redis = getRedisClient();
const redis = await cache.getRedisClient();
if (!redis) {
logger.debug(`Redis unavailable`);
return ok({
@@ -38,8 +38,8 @@ export const checkRateLimit = async (
// Calculate TTL to expire exactly at window end, value in seconds
const windowEnd = windowStart + config.interval;
// Convert window end from seconds to milliseconds, subtract current time, then convert back to seconds for Redis EXPIRE
const ttlSeconds = Math.ceil((windowEnd * 1000 - now) / 1000);
// Convert window end from seconds to milliseconds, subtract current time, then convert back to seconds for Redis EXPIRE (at least 1 second)
const ttlSeconds = Math.max(1, Math.ceil((windowEnd * 1000 - now) / 1000));
// Lua script for atomic increment and conditional expire
// This prevents race conditions between INCR and EXPIRE operations
@@ -67,15 +67,18 @@ export const checkRateLimit = async (
const [currentCount, isAllowed] = result;
// Log debug information for every Redis count increase
logger.debug(`Rate limit check`, {
identifier,
currentCount,
limit: config.allowedPerInterval,
window: config.interval,
key,
allowed: isAllowed === 1,
windowEnd,
});
logger.debug(
{
identifier,
currentCount,
limit: config.allowedPerInterval,
window: config.interval,
key,
allowed: isAllowed === 1,
windowEnd,
},
`Rate limit check`
);
const response: TRateLimitResponse = {
allowed: isAllowed === 1,
@@ -92,7 +95,7 @@ export const checkRateLimit = async (
namespace: config.namespace,
};
logger.error(`Rate limit exceeded`, violationContext);
logger.error(violationContext, `Rate limit exceeded`);
if (SENTRY_DSN) {
// Breadcrumb because the exception will be captured in the error handler
@@ -109,7 +112,7 @@ export const checkRateLimit = async (
const errorMessage = `Rate limit check failed`;
const errorContext = { error, identifier, namespace: config.namespace };
logger.error(errorMessage, errorContext);
logger.error(errorContext, errorMessage);
if (SENTRY_DSN) {
// Log error to Sentry

View File

@@ -11,11 +11,13 @@ import { DatabaseError } from "@formbricks/types/errors";
import { TBaseFilter } from "@formbricks/types/segment";
// Mock the cache functions
vi.mock("@/modules/cache/lib/withCache", () => ({
withCache: vi.fn((fn) => fn), // Just execute the function without caching for tests
vi.mock("@/lib/cache", () => ({
cache: {
withCache: vi.fn(async (fn) => await fn()), // Just execute the function without caching for tests
},
}));
vi.mock("@/modules/cache/lib/cacheKeys", () => ({
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
environment: {
segments: vi.fn((environmentId) => `segments-${environmentId}`),

View File

@@ -8,8 +8,10 @@ import { TBaseFilter } from "@formbricks/types/segment";
import { getPersonSegmentIds, getSegments } from "./segments";
// Mock the cache functions
vi.mock("@/modules/cache/lib/withCache", () => ({
withCache: vi.fn((fn) => fn), // Just execute the function without caching for tests
vi.mock("@/lib/cache", () => ({
cache: {
withCache: vi.fn(async (fn) => await fn()), // Just execute the function without caching for tests
},
}));
vi.mock("@/lib/utils/validate", () => ({

View File

@@ -1,43 +1,41 @@
import { cache } from "@/lib/cache";
import { validateInputs } from "@/lib/utils/validate";
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { withCache } from "@/modules/cache/lib/withCache";
import { evaluateSegment } from "@/modules/ee/contacts/segments/lib/segments";
import { Prisma } from "@prisma/client";
import { cache as reactCache } from "react";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { ZId, ZString } from "@formbricks/types/common";
import { DatabaseError } from "@formbricks/types/errors";
import { TBaseFilter } from "@formbricks/types/segment";
export const getSegments = reactCache((environmentId: string) =>
withCache(
async () => {
try {
const segments = await prisma.segment.findMany({
where: { environmentId },
// Include all necessary fields for evaluateSegment to work
select: {
id: true,
filters: true,
},
});
export const getSegments = reactCache(
async (environmentId: string) =>
await cache.withCache(
async () => {
try {
const segments = await prisma.segment.findMany({
where: { environmentId },
// Include all necessary fields for evaluateSegment to work
select: {
id: true,
filters: true,
},
});
return segments || [];
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
throw new DatabaseError(error.message);
return segments || [];
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
throw new DatabaseError(error.message);
}
throw error;
}
throw error;
}
},
{
key: createCacheKey.environment.segments(environmentId),
// This is a temporary fix for the invalidation issues, will be changed later with a proper solution
ttl: 5 * 60 * 1000, // 5 minutes in milliseconds
}
)()
},
createCacheKey.environment.segments(environmentId),
5 * 60 * 1000 // 5 minutes in milliseconds
)
);
export const getPersonSegmentIds = async (

View File

@@ -6,8 +6,10 @@ import { getPersonSegmentIds } from "./segments";
import { updateUser } from "./update-user";
// Mock the cache functions
vi.mock("@/modules/cache/lib/withCache", () => ({
withCache: vi.fn((fn) => fn), // Just execute the function without caching for tests
vi.mock("@/lib/cache", () => ({
cache: {
withCache: vi.fn(async (fn) => await fn()), // Just execute the function without caching for tests
},
}));
vi.mock("@/modules/ee/contacts/lib/attributes", () => ({

View File

@@ -1,6 +1,6 @@
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { withCache } from "@/modules/cache/lib/withCache";
import { cache } from "@/lib/cache";
import { updateAttributes } from "@/modules/ee/contacts/lib/attributes";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { ResourceNotFoundError } from "@formbricks/types/errors";
import { TJsPersonState } from "@formbricks/types/js";
@@ -9,19 +9,17 @@ import { getPersonSegmentIds } from "./segments";
/**
* Cached environment lookup - environments rarely change
*/
const getEnvironment = (environmentId: string) =>
withCache(
const getEnvironment = async (environmentId: string) =>
await cache.withCache(
async () => {
return prisma.environment.findUnique({
where: { id: environmentId },
select: { id: true, type: true },
});
},
{
key: createCacheKey.environment.config(environmentId),
ttl: 60 * 60 * 1000, // 1 hour TTL in milliseconds - environments rarely change
}
)();
createCacheKey.environment.config(environmentId),
60 * 60 * 1000 // 1 hour TTL in milliseconds - environments rarely change
);
/**
* Comprehensive contact data fetcher - gets everything needed in one query

View File

@@ -21,16 +21,17 @@ const mockCache = {
get: vi.fn(),
set: vi.fn(),
del: vi.fn(),
reset: vi.fn(),
store: { name: "memory" },
exists: vi.fn(),
withCache: vi.fn(),
getRedisClient: vi.fn(),
};
vi.mock("@/modules/cache/lib/service", () => ({
getCache: () => Promise.resolve(mockCache),
vi.mock("@/lib/cache", () => ({
cache: mockCache,
}));
// Mock the createCacheKey functions
vi.mock("@/modules/cache/lib/cacheKeys", () => ({
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
license: {
status: (identifier: string) => `fb:license:${identifier}:status`,
@@ -78,6 +79,13 @@ describe("License Core Logic", () => {
mockCache.get.mockReset();
mockCache.set.mockReset();
mockCache.del.mockReset();
mockCache.withCache.mockReset();
// Set up default mock implementations for Result types
mockCache.get.mockResolvedValue({ ok: true, data: null });
mockCache.set.mockResolvedValue({ ok: true });
mockCache.withCache.mockImplementation(async (fn) => await fn());
vi.mocked(prisma.response.count).mockResolvedValue(100);
vi.clearAllMocks();
// Mock window to be undefined for server-side tests
@@ -123,16 +131,16 @@ describe("License Core Logic", () => {
const { getEnterpriseLicense } = await import("./license");
const fetch = (await import("node-fetch")).default as Mock;
mockCache.get.mockImplementation(async (key) => {
if (key.startsWith("fb:license:") && key.endsWith(":status")) {
return mockFetchedLicenseDetails;
}
return null;
});
// Mock cache.withCache to return cached license details (simulating cache hit)
mockCache.withCache.mockResolvedValue(mockFetchedLicenseDetails);
const license = await getEnterpriseLicense();
expect(license).toEqual(expectedActiveLicenseState);
expect(mockCache.get).toHaveBeenCalledWith(expect.stringContaining("fb:license:"));
expect(mockCache.withCache).toHaveBeenCalledWith(
expect.any(Function),
expect.stringContaining("fb:license:"),
expect.any(Number)
);
expect(fetch).not.toHaveBeenCalled();
});
@@ -140,8 +148,10 @@ describe("License Core Logic", () => {
const { getEnterpriseLicense } = await import("./license");
const fetch = (await import("node-fetch")).default as Mock;
mockCache.get.mockResolvedValue(null);
(fetch as Mock).mockResolvedValueOnce({
// Mock cache.withCache to execute the function (simulating cache miss)
mockCache.withCache.mockImplementation(async (fn) => await fn());
fetch.mockResolvedValueOnce({
ok: true,
json: async () => ({ data: mockFetchedLicenseDetails }),
} as any);
@@ -149,18 +159,9 @@ describe("License Core Logic", () => {
const license = await getEnterpriseLicense();
expect(fetch).toHaveBeenCalledTimes(1);
expect(mockCache.set).toHaveBeenCalledWith(
expect(mockCache.withCache).toHaveBeenCalledWith(
expect.any(Function),
expect.stringContaining("fb:license:"),
mockFetchedLicenseDetails,
expect.any(Number)
);
expect(mockCache.set).toHaveBeenCalledWith(
expect.stringContaining("fb:license:"),
{
active: true,
features: mockFetchedLicenseDetails.features,
lastChecked: expect.any(Date),
},
expect.any(Number)
);
expect(license).toEqual(expectedActiveLicenseState);
@@ -177,16 +178,23 @@ describe("License Core Logic", () => {
lastChecked: previousTime,
version: 1,
};
// Mock cache.withCache to return null (simulating fetch failure)
mockCache.withCache.mockResolvedValue(null);
// Mock cache.get to return previous result when requested
mockCache.get.mockImplementation(async (key) => {
if (key.startsWith("fb:license:") && key.endsWith(":status")) return null;
if (key.startsWith("fb:license:") && key.includes(":previous_result")) return mockPreviousResult;
return null;
if (key.includes(":previous_result")) {
return { ok: true, data: mockPreviousResult };
}
return { ok: true, data: null };
});
(fetch as Mock).mockResolvedValueOnce({ ok: false, status: 500 } as any);
fetch.mockResolvedValueOnce({ ok: false, status: 500 } as any);
const license = await getEnterpriseLicense();
expect(fetch).toHaveBeenCalledTimes(1);
expect(mockCache.withCache).toHaveBeenCalled();
expect(license).toEqual({
active: true,
features: mockPreviousResult.features,
@@ -207,16 +215,23 @@ describe("License Core Logic", () => {
lastChecked: previousTime,
version: 1,
};
// Mock cache.withCache to return null (simulating fetch failure)
mockCache.withCache.mockResolvedValue(null);
// Mock cache.get to return previous result when requested
mockCache.get.mockImplementation(async (key) => {
if (key.startsWith("fb:license:") && key.endsWith(":status")) return null;
if (key.startsWith("fb:license:") && key.includes(":previous_result")) return mockPreviousResult;
return null;
if (key.includes(":previous_result")) {
return { ok: true, data: mockPreviousResult };
}
return { ok: true, data: null };
});
(fetch as Mock).mockResolvedValueOnce({ ok: false, status: 500 } as any);
fetch.mockResolvedValueOnce({ ok: false, status: 500 } as any);
const license = await getEnterpriseLicense();
expect(fetch).toHaveBeenCalledTimes(1);
expect(mockCache.withCache).toHaveBeenCalled();
expect(mockCache.set).toHaveBeenCalledWith(
expect.stringContaining("fb:license:"),
{
@@ -269,8 +284,13 @@ describe("License Core Logic", () => {
const { getEnterpriseLicense } = await import("./license");
const fetch = (await import("node-fetch")).default as Mock;
mockCache.get.mockResolvedValue(null);
(fetch as Mock).mockRejectedValueOnce(new Error("Network error"));
// Mock cache.withCache to return null (simulating fetch failure)
mockCache.withCache.mockResolvedValue(null);
// Mock cache.get to return no previous result
mockCache.get.mockResolvedValue({ ok: true, data: null });
fetch.mockRejectedValueOnce(new Error("Network error"));
const license = await getEnterpriseLicense();
const expectedFeatures: TEnterpriseLicenseFeatures = {
@@ -312,6 +332,7 @@ describe("License Core Logic", () => {
vi.resetAllMocks();
mockCache.get.mockReset();
mockCache.set.mockReset();
mockCache.withCache.mockReset();
const fetch = (await import("node-fetch")).default as Mock;
fetch.mockReset();
@@ -339,14 +360,20 @@ describe("License Core Logic", () => {
});
expect(mockCache.get).not.toHaveBeenCalled();
expect(mockCache.set).not.toHaveBeenCalled();
expect(mockCache.withCache).not.toHaveBeenCalled();
});
test("should handle fetch throwing an error and use grace period or return inactive", async () => {
const { getEnterpriseLicense } = await import("./license");
const fetch = (await import("node-fetch")).default as Mock;
mockCache.get.mockResolvedValue(null);
(fetch as Mock).mockRejectedValueOnce(new Error("Network error"));
// Mock cache.withCache to return null (simulating fetch failure)
mockCache.withCache.mockResolvedValue(null);
// Mock cache.get to return no previous result
mockCache.get.mockResolvedValue({ ok: true, data: null });
fetch.mockRejectedValueOnce(new Error("Network error"));
const license = await getEnterpriseLicense();
expect(license).toEqual({
@@ -372,31 +399,22 @@ describe("License Core Logic", () => {
HTTP_PROXY: undefined,
},
}));
// Import hashString to compute the expected cache key
const { hashString } = await import("@/lib/hash-string");
const hashedKey = hashString("test-license-key");
const detailsKey = `fb:license:${hashedKey}:status`;
// Patch the cache mock to match the actual key logic
mockCache.get.mockImplementation(async (key) => {
if (key === detailsKey) {
return {
status: "active",
features: {
isMultiOrgEnabled: true,
contacts: true,
projects: 5,
whitelabel: true,
removeBranding: true,
twoFactorAuth: true,
sso: true,
saml: true,
spamProtection: true,
ai: true,
auditLogs: true,
},
};
}
return null;
// Mock cache.withCache to return license details
mockCache.withCache.mockResolvedValue({
status: "active",
features: {
isMultiOrgEnabled: true,
contacts: true,
projects: 5,
whitelabel: true,
removeBranding: true,
twoFactorAuth: true,
sso: true,
saml: true,
spamProtection: true,
ai: true,
auditLogs: true,
},
});
// Import after env and mocks are set
const { getLicenseFeatures } = await import("./license");
@@ -418,12 +436,9 @@ describe("License Core Logic", () => {
test("should return null if license is inactive", async () => {
const { getLicenseFeatures } = await import("./license");
mockCache.get.mockImplementation(async (key) => {
if (key.startsWith("fb:license:") && key.endsWith(":status")) {
return { status: "expired", features: null };
}
return null;
});
// Mock cache.withCache to return expired license
mockCache.withCache.mockResolvedValue({ status: "expired", features: null });
const features = await getLicenseFeatures();
expect(features).toBeNull();
@@ -431,7 +446,9 @@ describe("License Core Logic", () => {
test("should return null if getEnterpriseLicense throws", async () => {
const { getLicenseFeatures } = await import("./license");
mockCache.get.mockRejectedValue(new Error("Cache error"));
// Mock cache.withCache to throw an error
mockCache.withCache.mockRejectedValue(new Error("Cache error"));
const features = await getLicenseFeatures();
expect(features).toBeNull();
@@ -444,14 +461,23 @@ describe("License Core Logic", () => {
mockCache.get.mockReset();
mockCache.set.mockReset();
mockCache.del.mockReset();
mockCache.withCache.mockReset();
vi.resetModules();
});
test("should use 'browser' as cache key in browser environment", async () => {
vi.stubGlobal("window", {});
// Set up default mock for cache.withCache
mockCache.withCache.mockImplementation(async (fn) => await fn());
const { getEnterpriseLicense } = await import("./license");
await getEnterpriseLicense();
expect(mockCache.get).toHaveBeenCalledWith(expect.stringContaining("fb:license:browser:status"));
expect(mockCache.withCache).toHaveBeenCalledWith(
expect.any(Function),
expect.stringContaining("fb:license:browser:status"),
expect.any(Number)
);
});
test("should use 'no-license' as cache key when ENTERPRISE_LICENSE_KEY is not set", async () => {
@@ -470,6 +496,7 @@ describe("License Core Logic", () => {
await getEnterpriseLicense();
// The cache should NOT be accessed if there is no license key
expect(mockCache.get).not.toHaveBeenCalled();
expect(mockCache.withCache).not.toHaveBeenCalled();
});
test("should use hashed license key as cache key when ENTERPRISE_LICENSE_KEY is set", async () => {
@@ -485,12 +512,18 @@ describe("License Core Logic", () => {
HTTP_PROXY: undefined,
},
}));
// Set up default mock for cache.withCache
mockCache.withCache.mockImplementation(async (fn) => await fn());
const { hashString } = await import("@/lib/hash-string");
const expectedHash = hashString(testLicenseKey);
const { getEnterpriseLicense } = await import("./license");
await getEnterpriseLicense();
expect(mockCache.get).toHaveBeenCalledWith(
expect.stringContaining(`fb:license:${expectedHash}:status`)
expect(mockCache.withCache).toHaveBeenCalledWith(
expect.any(Function),
expect.stringContaining(`fb:license:${expectedHash}:status`),
expect.any(Number)
);
});
});

View File

@@ -1,8 +1,7 @@
import "server-only";
import { cache } from "@/lib/cache";
import { env } from "@/lib/env";
import { hashString } from "@/lib/hash-string";
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { getCache } from "@/modules/cache/lib/service";
import {
TEnterpriseLicenseDetails,
TEnterpriseLicenseFeatures,
@@ -11,6 +10,7 @@ import { HttpsProxyAgent } from "https-proxy-agent";
import fetch from "node-fetch";
import { cache as reactCache } from "react";
import { z } from "zod";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
@@ -146,12 +146,11 @@ const getPreviousResult = async (): Promise<TPreviousResult> => {
}
try {
const formbricksCache = await getCache();
const cachedData = await formbricksCache.get<TPreviousResult>(getCacheKeys().PREVIOUS_RESULT_CACHE_KEY);
if (cachedData) {
const result = await cache.get<TPreviousResult>(getCacheKeys().PREVIOUS_RESULT_CACHE_KEY);
if (result.ok && result.data) {
return {
...cachedData,
lastChecked: new Date(cachedData.lastChecked),
...result.data,
lastChecked: new Date(result.data.lastChecked),
};
}
} catch (error) {
@@ -169,12 +168,14 @@ const setPreviousResult = async (previousResult: TPreviousResult) => {
if (typeof window !== "undefined") return;
try {
const formbricksCache = await getCache();
await formbricksCache.set(
const result = await cache.set(
getCacheKeys().PREVIOUS_RESULT_CACHE_KEY,
previousResult,
CONFIG.CACHE.PREVIOUS_RESULT_TTL_MS
);
if (!result.ok) {
logger.warn("Failed to cache previous result", { error: result.error });
}
} catch (error) {
logger.error("Failed to set previous result in cache", { error });
}
@@ -314,31 +315,13 @@ export const fetchLicense = async (): Promise<TEnterpriseLicenseDetails | null>
return null;
}
try {
const formbricksCache = await getCache();
const cachedLicense = await formbricksCache.get<TEnterpriseLicenseDetails>(
getCacheKeys().FETCH_LICENSE_CACHE_KEY
);
if (cachedLicense) {
return cachedLicense;
}
const licenseDetails = await fetchLicenseFromServerInternal();
if (licenseDetails) {
await formbricksCache.set(
getCacheKeys().FETCH_LICENSE_CACHE_KEY,
licenseDetails,
CONFIG.CACHE.FETCH_LICENSE_TTL_MS
);
}
return licenseDetails;
} catch (error) {
logger.error("Failed to fetch license due to cache error", { error });
// Fallback to direct API call without cache
return fetchLicenseFromServerInternal();
}
return await cache.withCache(
async () => {
return await fetchLicenseFromServerInternal();
},
getCacheKeys().FETCH_LICENSE_CACHE_KEY,
CONFIG.CACHE.FETCH_LICENSE_TTL_MS
);
};
export const getEnterpriseLicense = reactCache(

View File

@@ -1,9 +1,9 @@
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { withCache } from "@/modules/cache/lib/withCache";
import { cache } from "@/lib/cache";
import { transformPrismaSurvey } from "@/modules/survey/lib/utils";
import { Prisma } from "@prisma/client";
import "@testing-library/jest-dom/vitest";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey } from "@formbricks/types/surveys/types";
@@ -17,19 +17,22 @@ import {
} from "./data";
// Mock dependencies
vi.mock("@/modules/cache/lib/cacheKeys", () => ({
vi.mock("@formbricks/cache", () => ({
createCacheKey: {
survey: {
metadata: vi.fn(),
},
organization: {
billing: vi.fn(),
},
custom: vi.fn(),
},
}));
vi.mock("@/modules/cache/lib/withCache", () => ({
withCache: vi.fn(),
// Helper to create branded CacheKey for tests
const mockCacheKey = (key: string) => key as any;
vi.mock("@/lib/cache", () => ({
cache: {
withCache: vi.fn(),
},
}));
vi.mock("@/modules/survey/lib/utils", () => ({
@@ -46,6 +49,7 @@ vi.mock("@formbricks/database", () => ({
},
organization: {
findFirst: vi.fn(),
findUnique: vi.fn(),
},
},
}));
@@ -472,33 +476,30 @@ describe("data", () => {
test("should fetch organization billing successfully", async () => {
const organizationId = "org-1";
const mockCacheFunction = vi.fn().mockResolvedValue(mockBilling);
vi.mocked(createCacheKey.organization.billing).mockReturnValue("billing-cache-key");
vi.mocked(withCache).mockReturnValue(mockCacheFunction);
vi.mocked(prisma.organization.findFirst).mockResolvedValue(mockOrganization as any);
vi.mocked(createCacheKey.organization.billing).mockReturnValue(mockCacheKey("billing-cache-key"));
vi.mocked(cache.withCache).mockResolvedValue(mockBilling);
vi.mocked(prisma.organization.findUnique).mockResolvedValue(mockOrganization as any);
const result = await getOrganizationBilling(organizationId);
expect(result).toEqual(mockBilling);
expect(createCacheKey.organization.billing).toHaveBeenCalledWith(organizationId);
expect(withCache).toHaveBeenCalledWith(expect.any(Function), {
key: "billing-cache-key",
ttl: 60 * 60 * 24 * 1000,
});
expect(cache.withCache).toHaveBeenCalledWith(
expect.any(Function),
"billing-cache-key",
60 * 60 * 24 * 1000
);
});
test("should throw ResourceNotFoundError when organization not found", async () => {
const organizationId = "nonexistent-org";
const mockCacheFunction = vi.fn().mockImplementation(async () => {
vi.mocked(prisma.organization.findFirst).mockResolvedValue(null);
const cacheFunction = vi.mocked(withCache).mock.calls[0][0];
return await cacheFunction();
vi.mocked(createCacheKey.organization.billing).mockReturnValue(mockCacheKey("billing-cache-key"));
vi.mocked(cache.withCache).mockImplementation(async (fn) => {
vi.mocked(prisma.organization.findUnique).mockResolvedValue(null);
return await fn();
});
vi.mocked(createCacheKey.organization.billing).mockReturnValue("billing-cache-key");
vi.mocked(withCache).mockReturnValue(mockCacheFunction);
await expect(getOrganizationBilling(organizationId)).rejects.toThrow(ResourceNotFoundError);
await expect(getOrganizationBilling(organizationId)).rejects.toThrow("Organization");
});
@@ -510,15 +511,12 @@ describe("data", () => {
clientVersion: "5.0.0",
});
const mockCacheFunction = vi.fn().mockImplementation(async () => {
vi.mocked(prisma.organization.findFirst).mockRejectedValue(prismaError);
const cacheFunction = vi.mocked(withCache).mock.calls[0][0];
return await cacheFunction();
vi.mocked(createCacheKey.organization.billing).mockReturnValue(mockCacheKey("billing-cache-key"));
vi.mocked(cache.withCache).mockImplementation(async (fn) => {
vi.mocked(prisma.organization.findUnique).mockRejectedValue(prismaError);
return await fn();
});
vi.mocked(createCacheKey.organization.billing).mockReturnValue("billing-cache-key");
vi.mocked(withCache).mockReturnValue(mockCacheFunction);
await expect(getOrganizationBilling(organizationId)).rejects.toThrow(DatabaseError);
});
@@ -526,15 +524,12 @@ describe("data", () => {
const organizationId = "org-1";
const genericError = new Error("Generic error");
const mockCacheFunction = vi.fn().mockImplementation(async () => {
vi.mocked(prisma.organization.findFirst).mockRejectedValue(genericError);
const cacheFunction = vi.mocked(withCache).mock.calls[0][0];
return await cacheFunction();
vi.mocked(createCacheKey.organization.billing).mockReturnValue(mockCacheKey("billing-cache-key"));
vi.mocked(cache.withCache).mockImplementation(async (fn) => {
vi.mocked(prisma.organization.findUnique).mockRejectedValue(genericError);
return await fn();
});
vi.mocked(createCacheKey.organization.billing).mockReturnValue("billing-cache-key");
vi.mocked(withCache).mockReturnValue(mockCacheFunction);
await expect(getOrganizationBilling(organizationId)).rejects.toThrow(genericError);
});
});

View File

@@ -1,9 +1,9 @@
import "server-only";
import { createCacheKey } from "@/modules/cache/lib/cacheKeys";
import { withCache } from "@/modules/cache/lib/withCache";
import { cache } from "@/lib/cache";
import { transformPrismaSurvey } from "@/modules/survey/lib/utils";
import { Prisma } from "@prisma/client";
import { cache as reactCache } from "react";
import { createCacheKey } from "@formbricks/cache";
import { prisma } from "@formbricks/database";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey } from "@formbricks/types/surveys/types";
@@ -223,30 +223,29 @@ export const getExistingContactResponse = reactCache((surveyId: string, contactI
* Get organization billing information for survey limits
* Cached separately with longer TTL
*/
export const getOrganizationBilling = reactCache((organizationId: string) =>
withCache(
async () => {
try {
const organization = await prisma.organization.findFirst({
where: { id: organizationId },
select: { billing: true },
});
export const getOrganizationBilling = reactCache(
async (organizationId: string) =>
await cache.withCache(
async () => {
try {
const organization = await prisma.organization.findUnique({
where: { id: organizationId },
select: { billing: true },
});
if (!organization) {
throw new ResourceNotFoundError("Organization", organizationId);
}
if (!organization) {
throw new ResourceNotFoundError("Organization", organizationId);
}
return organization.billing;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
throw new DatabaseError(error.message);
return organization.billing;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
throw new DatabaseError(error.message);
}
throw error;
}
throw error;
}
},
{
key: createCacheKey.organization.billing(organizationId),
ttl: 60 * 60 * 24 * 1000, // 24 hours in milliseconds - billing info changes rarely
}
)()
},
createCacheKey.organization.billing(organizationId),
60 * 60 * 24 * 1000 // 24 hours in milliseconds - billing info changes rarely
)
);

View File

@@ -26,6 +26,7 @@
"@dnd-kit/modifiers": "9.0.0",
"@dnd-kit/sortable": "10.0.0",
"@dnd-kit/utilities": "3.2.2",
"@formbricks/cache": "workspace:*",
"@formbricks/database": "workspace:*",
"@formbricks/i18n-utils": "workspace:*",
"@formbricks/js-core": "workspace:*",

View File

@@ -0,0 +1,291 @@
# @formbricks/cache Package Rules
## Core Principles
### Redis-Only Architecture
- **Mandatory Redis**: All deployments MUST use Redis via `REDIS_URL` environment variable
- **Singleton Client**: Use `getCacheService()` - returns singleton instance per process
- **Result Types**: Core operations return `Result<T, CacheError>` for explicit error handling
- **Never-Failing Wrappers**: `withCache()` always returns function result, handling cache errors internally
### Type Safety & Validation
- **Branded Cache Keys**: Use `CacheKey` type to prevent raw string usage
- **Runtime Validation**: Use `validateInputs()` function with Zod schemas
- **Error Codes**: Use `ErrorCode` enum for consistent error categorization
## File Organization
```text
src/
├── index.ts # Main exports (getCacheService, createCacheKey, types)
├── client.ts # Singleton cache service client with Redis connection
├── service.ts # Core CacheService class with Result types + withCache helpers
├── cache-keys.ts # Cache key generators with branded types
├── utils/
│ ├── validation.ts # Zod validation utilities
│ └── key.ts # makeCacheKey utility (not exported)
└── *.test.ts # Unit tests
types/
├── keys.ts # Branded CacheKey type & CustomCacheNamespace
├── client.ts # RedisClient type definition
├── service.ts # Zod schemas and validateInputs function
├── error.ts # Result type system and error definitions
└── *.test.ts # Type tests
```
## Required Patterns
### Singleton Client Pattern
```typescript
// ✅ GOOD - Use singleton client
import { getCacheService } from "@formbricks/cache";
const result = await getCacheService();
if (!result.ok) {
// Handle initialization error
throw new Error(`Cache failed: ${result.error.code}`);
}
const cacheService = result.data;
// ❌ BAD - CacheService class not exported for direct instantiation
import { CacheService } from "@formbricks/cache"; // Won't work!
```
### Result Type Error Handling
```typescript
// ✅ GOOD - Core operations return Result<T, CacheError>
const result = await cacheService.get<UserData>(key);
if (!result.ok) {
switch (result.error.code) {
case ErrorCode.CacheValidationError:
case ErrorCode.RedisOperationError:
case ErrorCode.CacheCorruptionError:
// Handle based on error code
}
return;
}
const data = result.data; // Type-safe access
// ✅ GOOD - withCache never fails, always returns function result
const environmentData = await cacheService.withCache(
() => fetchEnvironmentFromDB(environmentId),
createCacheKey.environment.state(environmentId),
60000
); // Returns T directly, handles cache errors internally
```
### Core Validation & Error Types
```typescript
// Unified error interface
interface CacheError { code: ErrorCode; }
enum ErrorCode {
Unknown = "unknown",
CacheValidationError = "cache_validation_error",
RedisConnectionError = "redis_connection_error",
RedisOperationError = "redis_operation_error",
CacheCorruptionError = "cache_corruption_error",
}
// Key validation: min 1 char, non-whitespace
export const ZCacheKey = z.string().min(1).refine(k => k.trim().length > 0);
// TTL validation: min 1000ms for Redis seconds conversion
export const ZTtlMs = z.number().int().min(1000).finite();
// Generic validation function
export function validateInputs(...pairs: [unknown, ZodType][]): Result<unknown[], CacheError>;
```
## Cache Key Generation
### Key Generators (cache-keys.ts)
```typescript
export const createCacheKey = {
environment: {
state: (environmentId: string): CacheKey,
config: (environmentId: string): CacheKey,
segments: (environmentId: string): CacheKey,
},
organization: {
billing: (organizationId: string): CacheKey,
},
license: {
status: (organizationId: string): CacheKey,
previous_result: (organizationId: string): CacheKey,
},
rateLimit: {
core: (namespace: string, identifier: string, windowStart: number): CacheKey,
},
custom: (namespace: CustomCacheNamespace, identifier: string, subResource?: string): CacheKey,
};
```
### Internal Key Utility (utils/key.ts)
- **Not exported** from package - internal only
- **Validates** `fb:resource:identifier[:subresource]*` pattern
- **Prevents empty parts** and malformed keys
- **Runtime validation** with regex patterns
## Service API Methods
```typescript
// Core operations return Result<T, CacheError>
await cacheService.get<T>(key): Promise<Result<T | null, CacheError>>
await cacheService.set(key, value, ttlMs): Promise<Result<void, CacheError>>
await cacheService.del(keys: CacheKey[]): Promise<Result<void, CacheError>>
await cacheService.exists(key): Promise<Result<boolean, CacheError>>
// withCache never fails - returns T directly, handles cache errors internally
await cacheService.withCache<T>(fn, key, ttlMs): Promise<T>
// Direct Redis access for advanced operations (rate limiting, etc.)
cacheService.getRedisClient(): RedisClient | null
```
### Service Implementation - Cognitive Complexity Reduction
The `withCache` method is split into helper methods to reduce cognitive complexity:
```typescript
// Main method (simplified)
async withCache<T>(fn: () => Promise<T>, key: CacheKey, ttlMs: number): Promise<T> {
// Early returns for Redis availability and validation
const cachedValue = await this.tryGetCachedValue<T>(key, ttlMs);
if (cachedValue !== undefined) return cachedValue;
const fresh = await fn();
await this.trySetCache(key, fresh, ttlMs);
return fresh;
}
// Helper methods extract complex logic
private async tryGetCachedValue<T>(key, ttlMs): Promise<T | undefined>
private async trySetCache(key, value, ttlMs): Promise<void>
```
## Null vs Undefined Handling
### Caching Behavior
- **`null` values**: Cached normally (represents intentional absence)
- **`undefined` values**: NOT cached (preserves JavaScript semantics)
- **Cache miss**: Returns `null` (Redis returns null for missing keys)
```typescript
// ✅ GOOD - Null values are cached
const nullResult = await cacheService.withCache(
() => Promise.resolve(null), // Intentional null
key,
ttl
); // Returns null, value is cached
// ✅ GOOD - Undefined values are NOT cached
const undefinedResult = await cacheService.withCache(
() => Promise.resolve(undefined), // Undefined result
key,
ttl
); // Returns undefined, value is NOT cached
// ✅ GOOD - Cache miss detection
const result = await cacheService.get<string>(key);
if (result.ok && result.data === null) {
const exists = await cacheService.exists(key);
if (exists.ok && exists.data) {
// Key exists with null value (cached null)
} else {
// True cache miss
}
}
```
## Logging Standards
### Error Logging Strategy
- **Detailed logging at source** - Log full context where errors occur
- **Clean Result objects** - Only error codes in Result, not messages
- **Level strategy**:
- `debug`: Cache GET failures in withCache (expected fallback)
- `debug`: Cache SET failures in withCache (logged but not critical)
- `warn`: Cache unavailable in withCache (fallback to direct execution)
- `warn`: Data corruption (concerning but recoverable)
- `error`: Direct operation failures
```typescript
// ✅ GOOD - Rich logging, clean Result
logger.error("Cache validation failed", {
value,
error: "TTL must be at least 1000ms",
validationErrors: [...]
});
return err({ code: ErrorCode.CacheValidationError });
// ✅ GOOD - withCache handles errors gracefully
logger.warn({ error }, "Cache unavailable; executing function directly");
return await fn(); // Always return function result
```
## Testing Patterns
### Key Test Areas
- **Result error cases**: Validation, Redis, corruption errors
- **Null vs undefined**: Caching behavior differences
- **withCache fallbacks**: Cache failures gracefully handled
- **Edge cases**: Empty arrays, invalid TTLs, malformed keys
- **Mock dependencies**: Redis client, logger with all levels
## Web App Integration Pattern
### Cache Facade (apps/web/lib/cache/index.ts)
The web app uses a simplified Proxy-based facade that calls `getCacheService()` directly:
```typescript
// ✅ GOOD - Use cache facade in web app
import { cache } from "@/lib/cache";
// Direct cache operations
const result = await cache.get<UserData>(key);
const success = await cache.set(key, data, ttl);
// Never-failing withCache
const environmentData = await cache.withCache(
() => fetchEnvironmentFromDB(environmentId),
createCacheKey.environment.state(environmentId),
60000
);
// Advanced Redis access for rate limiting
const redis = await cache.getRedisClient();
```
### Proxy Implementation
- **No Singleton Management**: Calls `getCacheService()` for each operation
- **Proxy Pattern**: Transparent method forwarding to underlying cache service
- **Graceful Degradation**: withCache falls back to direct execution on cache failure
- **Server-Only**: Uses "server-only" import to prevent client-side usage
## Import/Export Standards
```typescript
// ✅ GOOD - Package root exports (index.ts)
export { getCacheService } from "./client";
export type { CacheService } from "./service";
export { createCacheKey } from "./cache-keys";
export type { CacheKey } from "../types/keys";
export type { Result, CacheError } from "../types/error";
export { CacheErrorClass, ErrorCode } from "../types/error";
// ❌ BAD - Don't export these (encapsulation)
// export { createRedisClientFromEnv } from "./client"; // Internal only
// export type { RedisClient } from "../types/client"; // Internal only
// export { CacheService } from "./service"; // Only type exported
```
## Key Rules Summary
1. **Singleton Client**: Use `getCacheService()` - returns singleton per process
2. **Result Types**: Core ops return `Result<T, CacheError>` - no throwing
3. **Never-Failing withCache**: Returns `T` directly, handles cache errors internally
4. **Validation**: Use `validateInputs()` function for all input validation
5. **Error Interface**: Single `CacheError` interface with just `code` field
6. **Logging**: Rich logging at source, clean Results for consumers
7. **TTL Minimum**: 1000ms minimum for Redis conversion (ms → seconds)
8. **Type Safety**: Branded `CacheKey` type prevents raw string usage
9. **Encapsulation**: RedisClient and createRedisClientFromEnv are internal only
10. **Cognitive Complexity**: Split complex methods into focused helper methods

7
packages/cache/.eslintrc.cjs vendored Normal file
View File

@@ -0,0 +1,7 @@
module.exports = {
extends: ["@formbricks/eslint-config/library.js"],
parserOptions: {
project: "tsconfig.json",
tsconfigRootDir: __dirname,
},
};

51
packages/cache/package.json vendored Normal file
View File

@@ -0,0 +1,51 @@
{
"name": "@formbricks/cache",
"private": true,
"type": "module",
"version": "0.1.0",
"homepage": "https://formbricks.com",
"description": "Unified Redis cache for Formbricks",
"main": "./dist/index.js",
"types": "./dist/src/index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/formbricks/formbricks"
},
"keywords": [
"Formbricks",
"cache",
"redis",
"caching"
],
"files": [
"dist"
],
"exports": {
"types": "./dist/src/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.cjs"
},
"scripts": {
"clean": "rimraf .turbo node_modules coverage dist",
"lint": "eslint . --ext .ts,.js",
"lint:fix": "eslint . --ext .ts,.js --fix",
"lint:report": "eslint . --format json --output-file ../../lint-results/cache.json",
"build": "tsc && vite build",
"test": "vitest run",
"test:coverage": "vitest run --coverage",
"go": "vite build --watch --mode dev"
},
"author": "Formbricks <hola@formbricks.com>",
"dependencies": {
"@formbricks/logger": "workspace:*",
"redis": "5.8.1",
"zod": "3.24.4"
},
"devDependencies": {
"@formbricks/config-typescript": "workspace:*",
"@formbricks/eslint-config": "workspace:*",
"vite": "6.3.5",
"vitest": "3.1.3",
"@vitest/coverage-v8": "3.1.3"
}
}

219
packages/cache/src/cache-keys.test.ts vendored Normal file
View File

@@ -0,0 +1,219 @@
import { describe, expect, test } from "vitest";
import type { CacheKey } from "@/types/keys";
import { createCacheKey } from "./cache-keys";
describe("@formbricks/cache cacheKeys", () => {
describe("createCacheKey", () => {
describe("environment namespace", () => {
test("should create environment state key", () => {
const key = createCacheKey.environment.state("env-123");
expect(key).toBe("fb:env:env-123:state");
// Verify it returns branded CacheKey type
expect(typeof key).toBe("string");
});
test("should create environment config key", () => {
const key = createCacheKey.environment.config("env-abc");
expect(key).toBe("fb:env:env-abc:config");
});
test("should create environment segments key", () => {
const key = createCacheKey.environment.segments("env-def");
expect(key).toBe("fb:env:env-def:segments");
});
test("should handle special characters in environment IDs", () => {
const key = createCacheKey.environment.state("env-test_123-special");
expect(key).toBe("fb:env:env-test_123-special:state");
});
test("should throw error for empty environment ID", () => {
expect(() => createCacheKey.environment.state("")).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
});
describe("organization namespace", () => {
test("should create organization billing key", () => {
const key = createCacheKey.organization.billing("org-123");
expect(key).toBe("fb:org:org-123:billing");
});
test("should handle complex organization IDs", () => {
const key = createCacheKey.organization.billing("org-enterprise-team_123");
expect(key).toBe("fb:org:org-enterprise-team_123:billing");
});
test("should throw error for empty organization ID", () => {
expect(() => createCacheKey.organization.billing("")).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
});
describe("license namespace", () => {
test("should create license status key", () => {
const key = createCacheKey.license.status("org-123");
expect(key).toBe("fb:license:org-123:status");
});
test("should create license previous_result key", () => {
const key = createCacheKey.license.previous_result("org-def");
expect(key).toBe("fb:license:org-def:previous_result");
});
test("should handle UUID-style organization IDs", () => {
const key = createCacheKey.license.status("550e8400-e29b-41d4-a716-446655440000");
expect(key).toBe("fb:license:550e8400-e29b-41d4-a716-446655440000:status");
});
test("should throw error for empty organization ID in license keys", () => {
expect(() => createCacheKey.license.status("")).toThrow("Invalid Cache key: Parts cannot be empty");
expect(() => createCacheKey.license.previous_result("")).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
});
describe("rateLimit namespace", () => {
test("should create rate limit core key", () => {
const key = createCacheKey.rateLimit.core("api", "user-123", 1640995200);
expect(key).toBe("fb:rate_limit:api:user-123:1640995200");
});
test("should handle different rate limit namespaces", () => {
const apiKey = createCacheKey.rateLimit.core("api", "key-abc", 1640995200);
expect(apiKey).toBe("fb:rate_limit:api:key-abc:1640995200");
const loginKey = createCacheKey.rateLimit.core("auth:login", "user-456", 1640995300);
expect(loginKey).toBe("fb:rate_limit:auth:login:user-456:1640995300");
});
test("should convert window start number to string", () => {
const key = createCacheKey.rateLimit.core("webhook", "endpoint-789", 0);
expect(key).toBe("fb:rate_limit:webhook:endpoint-789:0");
});
test("should throw error for empty parameters", () => {
expect(() => createCacheKey.rateLimit.core("", "user-123", 1640995200)).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
expect(() => createCacheKey.rateLimit.core("api", "", 1640995200)).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
});
describe("custom namespace", () => {
test("should create custom key with subResource", () => {
const key = createCacheKey.custom("analytics", "user-456", "daily-stats");
expect(key).toBe("fb:analytics:user-456:daily-stats");
});
test("should create custom key without subResource", () => {
const key = createCacheKey.custom("analytics", "user-789");
expect(key).toBe("fb:analytics:user-789");
});
test("should handle complex subResources", () => {
const key = createCacheKey.custom("analytics", "user-123", "dashboard:metrics:daily");
expect(key).toBe("fb:analytics:user-123:dashboard:metrics:daily");
});
test("should restrict to valid namespaces only", () => {
// TypeScript should prevent invalid namespaces at compile time
// Test with currently valid namespace
const key = createCacheKey.custom("analytics", "test-id");
expect(key).toBe("fb:analytics:test-id");
});
test("should throw error for empty identifier", () => {
expect(() => createCacheKey.custom("analytics", "")).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
test("should throw error for empty subResource when provided", () => {
expect(() => createCacheKey.custom("analytics", "user-123", "")).toThrow(
"Invalid Cache key: Parts cannot be empty"
);
});
});
});
describe("CacheKey type safety", () => {
test("should return CacheKey branded type", () => {
const key = createCacheKey.environment.state("test-env");
// This function would only accept CacheKey, not raw string
const acceptsCacheKey = (cacheKey: CacheKey): string => cacheKey;
// This should work without TypeScript errors
expect(acceptsCacheKey(key)).toBe("fb:env:test-env:state");
// Raw string would not be accepted (TypeScript compile-time check)
// acceptsCacheKey("fb:env:test:state"); // This would cause TS error
});
test("should work with all namespace keys", () => {
const keys = [
createCacheKey.environment.state("env-1"),
createCacheKey.environment.config("env-1"),
createCacheKey.environment.segments("env-1"),
createCacheKey.organization.billing("org-1"),
createCacheKey.license.status("org-1"),
createCacheKey.license.previous_result("org-1"),
createCacheKey.rateLimit.core("api", "user-1", 123456),
createCacheKey.custom("analytics", "temp-1"),
createCacheKey.custom("analytics", "temp-1", "sub"),
];
keys.forEach((key) => {
expect(typeof key).toBe("string");
expect(key.startsWith("fb:")).toBe(true);
});
});
});
describe("validation and error handling", () => {
test("should validate all cache key structures", () => {
// All generated keys should follow the fb:resource:identifier[:subresource] pattern
const keys = [
createCacheKey.environment.state("env-123"),
createCacheKey.organization.billing("org-456"),
createCacheKey.license.status("license-789"),
createCacheKey.rateLimit.core("api", "user-101", 1640995200),
createCacheKey.custom("analytics", "analytics-102", "daily"),
];
keys.forEach((key) => {
// Should match the expected pattern: fb:resource:identifier[:subresource]*
expect(key).toMatch(/^fb:(?:[^:]+)(?::[^:]+)+$/);
expect(key.split(":").length).toBeGreaterThanOrEqual(3);
});
});
test("should throw consistent error messages for empty parts", () => {
const errorMessage = "Invalid Cache key: Parts cannot be empty";
expect(() => createCacheKey.environment.state("")).toThrow(errorMessage);
expect(() => createCacheKey.organization.billing("")).toThrow(errorMessage);
expect(() => createCacheKey.license.status("")).toThrow(errorMessage);
expect(() => createCacheKey.rateLimit.core("", "user", 123)).toThrow(errorMessage);
expect(() => createCacheKey.custom("analytics", "")).toThrow(errorMessage);
});
test("should handle edge case values safely", () => {
// Test with realistic edge case values
const specialChars = createCacheKey.environment.state("env_test-123.special");
expect(specialChars).toBe("fb:env:env_test-123.special:state");
const numeric = createCacheKey.organization.billing("12345");
expect(numeric).toBe("fb:org:12345:billing");
const longId = createCacheKey.license.status("very-long-organization-identifier-that-might-exist");
expect(longId).toBe("fb:license:very-long-organization-identifier-that-might-exist:status");
});
});
});

49
packages/cache/src/cache-keys.ts vendored Normal file
View File

@@ -0,0 +1,49 @@
import { type CacheKey, type CustomCacheNamespace } from "@/types/keys";
import { makeCacheKey } from "./utils/key";
/**
* Enterprise-grade cache key generator following industry best practices
* Pattern: fb:\{resource\}:\{identifier\}:\{subResource\}
*
* Benefits:
* - Clear namespace hierarchy (fb = formbricks)
* - Collision-proof across environments
* - Easy debugging and monitoring
* - Predictable invalidation patterns
* - Multi-tenant safe
* - Type-safe with branded CacheKey type
*/
export const createCacheKey = {
// Environment-related keys
environment: {
state: (environmentId: string): CacheKey => makeCacheKey("env", environmentId, "state"),
config: (environmentId: string): CacheKey => makeCacheKey("env", environmentId, "config"),
segments: (environmentId: string): CacheKey => makeCacheKey("env", environmentId, "segments"),
},
// Organization-related keys
organization: {
billing: (organizationId: string): CacheKey => makeCacheKey("org", organizationId, "billing"),
},
// License and enterprise features
license: {
status: (organizationId: string): CacheKey => makeCacheKey("license", organizationId, "status"),
previous_result: (organizationId: string): CacheKey =>
makeCacheKey("license", organizationId, "previous_result"),
},
// Rate limiting and security
rateLimit: {
core: (namespace: string, identifier: string, windowStart: number): CacheKey =>
makeCacheKey("rate_limit", namespace, identifier, String(windowStart)),
},
// Custom keys with validation
custom: (namespace: CustomCacheNamespace, identifier: string, subResource?: string): CacheKey => {
return subResource !== undefined
? makeCacheKey(namespace, identifier, subResource)
: makeCacheKey(namespace, identifier);
},
};

318
packages/cache/src/client.test.ts vendored Normal file
View File

@@ -0,0 +1,318 @@
import type { RedisClient } from "@/types/client";
import { ErrorCode } from "@/types/error";
import { createClient } from "redis";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { createRedisClientFromEnv, getCacheService, resetCacheFactory } from "./client";
// Mock the redis module
vi.mock("redis", () => ({
createClient: vi.fn(),
}));
// Mock the logger
vi.mock("@formbricks/logger", () => ({
logger: {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
},
}));
// Mock CacheService
vi.mock("./service", () => ({
CacheService: vi.fn().mockImplementation((redis: RedisClient | null = null) => ({
get: vi.fn(),
set: vi.fn(),
del: vi.fn(),
exists: vi.fn(),
withCache: vi.fn(),
getRedisClient: vi.fn().mockImplementation(() => {
if (!redis || !redis.isReady || !redis.isOpen) {
return null;
}
return redis;
}),
})),
}));
// Create a proper mock interface for Redis client
interface MockRedisClient {
isOpen: boolean;
isReady: boolean;
on: ReturnType<typeof vi.fn>;
connect: ReturnType<typeof vi.fn>;
destroy: ReturnType<typeof vi.fn>;
}
// Get typed mocks
const mockCreateClient = vi.mocked(createClient);
describe("@formbricks/cache factory", () => {
beforeEach(() => {
vi.clearAllMocks();
delete process.env.REDIS_URL;
resetCacheFactory();
});
describe("createRedisClientFromEnv", () => {
test("should return error when REDIS_URL is not set", async () => {
const result = await createRedisClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisConfigurationError);
expect(typeof result.error).toBe("object");
expect(result.error).toHaveProperty("code");
}
});
test("should create client when REDIS_URL is set", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: true,
isReady: true,
on: vi.fn(),
connect: vi.fn().mockResolvedValue(undefined),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
const result = await createRedisClientFromEnv();
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(mockClient);
}
expect(mockCreateClient).toHaveBeenCalledWith({
url: "redis://localhost:6379",
socket: {
connectTimeout: 3000,
},
});
// Verify event handlers are set up
expect(mockClient.on).toHaveBeenCalledWith("error", expect.any(Function));
expect(mockClient.on).toHaveBeenCalledWith("connect", expect.any(Function));
expect(mockClient.on).toHaveBeenCalledWith("ready", expect.any(Function));
expect(mockClient.on).toHaveBeenCalledWith("end", expect.any(Function));
});
test("should return error when client connection fails", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: false,
isReady: false,
on: vi.fn(),
connect: vi.fn().mockRejectedValue(new Error("Connection failed")),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
const result = await createRedisClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisConnectionError);
expect(typeof result.error).toBe("object");
expect(result.error).toHaveProperty("code");
}
// Verify client was created and connect was attempted
expect(mockCreateClient).toHaveBeenCalledWith({
url: "redis://localhost:6379",
socket: {
connectTimeout: 3000,
},
});
expect(mockClient.connect).toHaveBeenCalled();
});
});
describe("getCacheService", () => {
test("should return error when environment client creation fails", async () => {
// Don't set REDIS_URL to trigger configuration error
const result = await getCacheService();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisConfigurationError);
expect(typeof result.error).toBe("object");
expect(result.error).toHaveProperty("code");
}
});
test("should create cache service successfully with valid environment", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: true,
isReady: true,
on: vi.fn(),
connect: vi.fn().mockResolvedValue(undefined),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
const result = await getCacheService();
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBeDefined();
}
});
test("should handle concurrent initialization safely", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: true,
isReady: true,
on: vi.fn(),
connect: vi.fn().mockResolvedValue(undefined),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
// Start multiple concurrent calls
const promises = Array(3)
.fill(null)
.map(() => getCacheService());
const results = await Promise.all(promises);
// All should succeed and return the same instance
results.forEach((result) => {
expect(result.ok).toBe(true);
});
if (results[0].ok && results[1].ok && results[2].ok) {
expect(results[0].data).toBe(results[1].data);
expect(results[1].data).toBe(results[2].data);
}
// Only one client should have been created
expect(mockCreateClient).toHaveBeenCalledTimes(1);
});
test("should allow retry after failed initialization", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: true,
isReady: true,
on: vi.fn(),
connect: vi
.fn()
.mockRejectedValueOnce(new Error("Connection failed"))
.mockResolvedValueOnce(undefined),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
// First call should fail
const firstResult = await getCacheService();
expect(firstResult.ok).toBe(false);
// Second call should succeed (after retry)
const secondResult = await getCacheService();
expect(secondResult.ok).toBe(true);
});
test("should handle connection failure and return error", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: false,
isReady: false,
on: vi.fn(),
connect: vi.fn().mockRejectedValue(new Error("Connection failed")),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
// Call should fail
const result = await getCacheService();
expect(result.ok).toBe(false);
if (!result.ok) {
// The error should be a simple error object from createRedisClientFromEnv
expect(result.error.code).toBe(ErrorCode.RedisConnectionError);
expect(typeof result.error).toBe("object");
expect(result.error).toHaveProperty("code");
}
expect(mockClient.connect).toHaveBeenCalledTimes(1);
});
test("should handle connection errors gracefully", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: false,
isReady: false,
on: vi.fn(),
connect: vi.fn().mockRejectedValue(new Error("Connection failed")),
destroy: vi.fn().mockRejectedValue(new Error("Destroy failed")),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
// Call should fail with connection error
const result = await getCacheService();
expect(result.ok).toBe(false);
if (!result.ok) {
// The error should be a simple error object from createRedisClientFromEnv
expect(result.error.code).toBe(ErrorCode.RedisConnectionError);
expect(typeof result.error).toBe("object");
expect(result.error).toHaveProperty("code");
}
});
});
describe("resetCacheFactory", () => {
test("should reset singleton and initializing state", async () => {
process.env.REDIS_URL = "redis://localhost:6379";
const mockClient: MockRedisClient = {
isOpen: true,
isReady: true,
on: vi.fn(),
connect: vi.fn().mockResolvedValue(undefined),
destroy: vi.fn().mockResolvedValue(undefined),
};
// @ts-expect-error - Mock client type incompatibility with Redis types
mockCreateClient.mockReturnValue(mockClient as unknown as RedisClient);
// Create initial service
const firstResult = await getCacheService();
expect(firstResult.ok).toBe(true);
// Reset the factory
resetCacheFactory();
// Create another service - should create a new instance
const secondResult = await getCacheService();
expect(secondResult.ok).toBe(true);
// Should have called createClient twice (once for each service)
expect(mockCreateClient).toHaveBeenCalledTimes(2);
});
});
});

126
packages/cache/src/client.ts vendored Normal file
View File

@@ -0,0 +1,126 @@
import type { RedisClient } from "@/types/client";
import { type CacheError, ErrorCode, type Result, err, ok } from "@/types/error";
import { createClient } from "redis";
import { logger } from "@formbricks/logger";
import { CacheService } from "./service";
/**
* Creates a Redis client from the REDIS_URL environment variable
* @returns Result containing RedisClient or RedisConfigurationError if REDIS_URL is not set
*/
export async function createRedisClientFromEnv(): Promise<Result<RedisClient, CacheError>> {
const url = process.env.REDIS_URL;
if (!url) {
logger.error("REDIS_URL is required to create the Redis client");
return err({
code: ErrorCode.RedisConfigurationError,
});
}
const client = createClient({
url,
socket: {
connectTimeout: 3000,
},
});
client.on("error", (error) => {
logger.error(error, "Redis client error");
try {
resetCacheFactory();
client.destroy();
} catch (e) {
logger.error(e, "Error destroying Redis client");
}
});
client.on("connect", () => {
logger.info("Redis client connected");
});
client.on("ready", () => {
logger.info("Redis client ready");
});
client.on("end", () => {
logger.info("Redis client disconnected");
});
try {
await client.connect();
return ok(client as RedisClient);
} catch (error) {
logger.error(error, "Redis client connection failed");
return err({ code: ErrorCode.RedisConnectionError });
}
}
// Global singleton with globalThis for cross-module sharing
const globalForCache = globalThis as unknown as {
formbricksCache: CacheService | undefined;
formbricksCacheInitializing: Promise<Result<CacheService, CacheError>> | undefined;
};
// Module-level singleton for performance
let singleton: CacheService | null = globalForCache.formbricksCache ?? null;
/**
* Returns existing instance immediately if available
* Creates a cache service instance instead if not available
* Fails fast if Redis is not available - consumers handle reconnection
*/
export async function getCacheService(): Promise<Result<CacheService, CacheError>> {
// Return existing instance immediately
if (singleton) {
const rc = singleton.getRedisClient();
if (rc?.isReady && rc.isOpen) return ok(singleton);
}
// Return existing instance from globalForCache if available
if (globalForCache.formbricksCache) {
const rc = globalForCache.formbricksCache.getRedisClient();
if (rc?.isReady && rc.isOpen) {
singleton = globalForCache.formbricksCache;
return ok(globalForCache.formbricksCache);
}
}
// Prevent concurrent initialization
if (globalForCache.formbricksCacheInitializing) {
const result = await globalForCache.formbricksCacheInitializing;
if (result.ok) {
singleton = result.data;
}
return result;
}
// Start initialization - fail fast approach
globalForCache.formbricksCacheInitializing = (async (): Promise<Result<CacheService, CacheError>> => {
const clientResult = await createRedisClientFromEnv();
if (!clientResult.ok) {
logger.error({ error: clientResult.error }, "Redis client creation failed");
return err({ code: clientResult.error.code });
}
const client = clientResult.data;
logger.debug("Redis connection established");
const svc = new CacheService(client);
singleton = svc;
globalForCache.formbricksCache = svc;
logger.debug("Cache service created");
return ok(svc);
})();
const result = await globalForCache.formbricksCacheInitializing;
if (!result.ok) {
globalForCache.formbricksCacheInitializing = undefined; // Allow retry
logger.error({ error: result.error }, "Cache service creation failed");
}
return result;
}
export function resetCacheFactory(): void {
singleton = null;
globalForCache.formbricksCache = undefined;
globalForCache.formbricksCacheInitializing = undefined;
}

28
packages/cache/src/index.test.ts vendored Normal file
View File

@@ -0,0 +1,28 @@
import { describe, expect, test } from "vitest";
import { createCacheKey, getCacheService } from "./index";
import type { CacheError, CacheKey, ErrorCode, Result } from "./index";
describe("@formbricks/cache index exports", () => {
test("should export all required functions and constants", () => {
expect(typeof getCacheService).toBe("function");
expect(typeof createCacheKey).toBe("object");
});
test("should export all required types without circular dependency issues", () => {
// This test passes if the types can be imported successfully
// The actual verification happens at compile/import time
const testTypes = {
CacheKey: "CacheKey" as keyof { CacheKey: CacheKey },
// RedisClient is no longer exported
Result: "Result" as keyof { Result: Result<unknown, unknown> },
CacheError: "CacheError" as keyof { CacheError: CacheError },
ErrorCode: "ErrorCode" as keyof { ErrorCode: ErrorCode },
};
expect(testTypes.CacheKey).toBe("CacheKey");
// RedisClient test removed since it's no longer exported
expect(testTypes.Result).toBe("Result");
expect(testTypes.CacheError).toBe("CacheError");
expect(testTypes.ErrorCode).toBe("ErrorCode");
});
});

11
packages/cache/src/index.ts vendored Normal file
View File

@@ -0,0 +1,11 @@
// Re-export everything from factory
export { getCacheService } from "./client";
export type { CacheService } from "./service";
// Export cache keys
export { createCacheKey } from "./cache-keys";
// Export types
export type { CacheKey } from "../types/keys";
export type { CacheError, Result } from "../types/error";
export { ErrorCode } from "../types/error";

631
packages/cache/src/service.test.ts vendored Normal file
View File

@@ -0,0 +1,631 @@
import { beforeEach, describe, expect, test, vi } from "vitest";
import { logger } from "@formbricks/logger";
import type { RedisClient } from "../types/client";
import { ErrorCode } from "../types/error";
import type { CacheKey } from "../types/keys";
import { CacheService } from "./service";
// Mock logger
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
},
}));
interface MockRedisClient {
get: ReturnType<typeof vi.fn>;
setEx: ReturnType<typeof vi.fn>;
del: ReturnType<typeof vi.fn>;
exists: ReturnType<typeof vi.fn>;
isReady: boolean;
isOpen: boolean;
}
describe("CacheService", () => {
let mockRedis: MockRedisClient;
let cacheService: CacheService;
beforeEach(() => {
mockRedis = {
get: vi.fn(),
setEx: vi.fn(),
del: vi.fn(),
exists: vi.fn(),
isReady: true,
isOpen: true,
};
cacheService = new CacheService(mockRedis as unknown as RedisClient);
});
describe("get", () => {
test("should return parsed JSON value when found", async () => {
const key = "test:key" as CacheKey;
const value = { data: "test" };
mockRedis.get.mockResolvedValue(JSON.stringify(value));
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual(value);
}
expect(mockRedis.get).toHaveBeenCalledWith(key);
});
test("should return null when key not found", async () => {
const key = "test:key" as CacheKey;
mockRedis.get.mockResolvedValue(null);
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBeNull();
}
});
test("should return error when JSON parse fails (corrupted data)", async () => {
const key = "test:key" as CacheKey;
const corruptedValue = "invalid json {broken";
mockRedis.get.mockResolvedValue(corruptedValue);
const result = await cacheService.get(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheCorruptionError);
}
expect(logger.warn).toHaveBeenCalledWith(
"Corrupted cache data detected, treating as cache miss",
expect.objectContaining({
key,
parseError: expect.objectContaining({
name: "SyntaxError",
message: expect.stringContaining("JSON") as string,
}) as Error,
})
);
});
test("should return validation error for empty key", async () => {
const key = "" as CacheKey;
const result = await cacheService.get(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return validation error for whitespace-only key", async () => {
const key = " " as CacheKey;
const result = await cacheService.get(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return error when Redis operation fails", async () => {
const key = "test:key" as CacheKey;
mockRedis.get.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.get(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisOperationError);
}
expect(logger.error).toHaveBeenCalledWith(
{ error: expect.any(Error), key }, // eslint-disable-line @typescript-eslint/no-unsafe-assignment -- Testing error handling with any Error type
"Cache get operation failed"
);
});
test("should handle string values correctly", async () => {
const key = "test:key" as CacheKey;
const value = "simple string";
mockRedis.get.mockResolvedValue(JSON.stringify(value));
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(value);
}
});
test("should handle number values correctly", async () => {
const key = "test:key" as CacheKey;
const value = 42;
mockRedis.get.mockResolvedValue(JSON.stringify(value));
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(value);
}
});
test("should handle boolean values correctly", async () => {
const key = "test:key" as CacheKey;
const value = false;
mockRedis.get.mockResolvedValue(JSON.stringify(value));
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(value);
}
});
test("should handle nested object values correctly", async () => {
const key = "test:key" as CacheKey;
const value = { nested: { deeply: { value: "test" } }, array: [1, 2, 3] };
mockRedis.get.mockResolvedValue(JSON.stringify(value));
const result = await cacheService.get(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual(value);
}
});
});
describe("exists", () => {
test("should return true when key exists", async () => {
const key = "test:key" as CacheKey;
mockRedis.exists.mockResolvedValue(1);
const result = await cacheService.exists(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(true);
}
expect(mockRedis.exists).toHaveBeenCalledWith(key);
});
test("should return false when key does not exist", async () => {
const key = "test:key" as CacheKey;
mockRedis.exists.mockResolvedValue(0);
const result = await cacheService.exists(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(false);
}
});
test("should return validation error for empty key", async () => {
const key = "" as CacheKey;
const result = await cacheService.exists(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return error when Redis operation fails", async () => {
const key = "test:key" as CacheKey;
mockRedis.exists.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.exists(key);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisOperationError);
}
expect(logger.error).toHaveBeenCalledWith(
{ error: expect.any(Error), key }, // eslint-disable-line @typescript-eslint/no-unsafe-assignment -- Testing error handling with any Error type
"Cache exists operation failed"
);
});
test("should handle multiple keys existing", async () => {
const key = "test:key" as CacheKey;
mockRedis.exists.mockResolvedValue(2); // Multiple keys exist
const result = await cacheService.exists(key);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBe(true);
}
});
});
describe("set", () => {
test("should store JSON serialized value with TTL", async () => {
const key = "test:key" as CacheKey;
const value = { data: "test" };
const ttlMs = 60000;
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(true);
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, JSON.stringify(value));
});
test("should convert TTL from milliseconds to seconds", async () => {
const key = "test:key" as CacheKey;
const value = "test";
const ttlMs = 5500; // 5.5 seconds
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(true);
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 5, JSON.stringify(value));
});
test("should normalize undefined to null and store as JSON", async () => {
const key = "test:key" as CacheKey;
const value = undefined;
const ttlMs = 60000;
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(true);
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, "null");
});
test("should store null values as JSON", async () => {
const key = "test:key" as CacheKey;
const value = null;
const ttlMs = 60000;
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(true);
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, "null");
});
test("should return validation error for invalid TTL", async () => {
const key = "test:key" as CacheKey;
const value = "test";
const result1 = await cacheService.set(key, value, 0);
const result2 = await cacheService.set(key, value, -1);
const result3 = await cacheService.set(key, value, 500); // Below 1000ms minimum
expect(result1.ok).toBe(false);
expect(result2.ok).toBe(false);
expect(result3.ok).toBe(false);
if (!result1.ok) {
expect(result1.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return validation error for empty key", async () => {
const key = "" as CacheKey;
const value = "test";
const result = await cacheService.set(key, value, 1000);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return error when Redis operation fails", async () => {
const key = "test:key" as CacheKey;
const value = "test";
const ttlMs = 60000;
mockRedis.setEx.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisOperationError);
}
expect(logger.error).toHaveBeenCalledWith(
{ error: expect.any(Error), key, ttlMs }, // eslint-disable-line @typescript-eslint/no-unsafe-assignment -- Testing error handling with any Error type
"Cache set operation failed"
);
});
test("should handle complex data types correctly", async () => {
const key = "test:key" as CacheKey;
const value = {
string: "test",
number: 42,
boolean: true,
array: [1, 2, 3],
nested: { level: { deep: "value" } },
nullValue: null,
};
const ttlMs = 60000;
const result = await cacheService.set(key, value, ttlMs);
expect(result.ok).toBe(true);
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, JSON.stringify(value));
});
});
describe("del", () => {
test("should delete single key", async () => {
const key = "test:key" as CacheKey;
const result = await cacheService.del([key]);
expect(result.ok).toBe(true);
expect(mockRedis.del).toHaveBeenCalledWith([key]);
});
test("should delete multiple keys", async () => {
const keys = ["test:key1", "test:key2"] as CacheKey[];
const result = await cacheService.del(keys);
expect(result.ok).toBe(true);
expect(mockRedis.del).toHaveBeenCalledWith(keys);
});
test("should be idempotent (not throw if key missing)", async () => {
const key = "nonexistent:key" as CacheKey;
mockRedis.del.mockResolvedValue(0);
const result = await cacheService.del([key]);
expect(result.ok).toBe(true);
});
test("should handle empty array gracefully", async () => {
const result = await cacheService.del([]);
expect(result.ok).toBe(true);
expect(mockRedis.del).not.toHaveBeenCalled();
});
test("should return validation error for empty key in array", async () => {
const keys = ["valid:key", ""] as CacheKey[];
const result = await cacheService.del(keys);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return error when Redis is not ready/open", async () => {
const keys = ["test:key1", "test:key2"] as CacheKey[];
mockRedis.isReady = false;
mockRedis.isOpen = false;
const result = await cacheService.del(keys);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisConnectionError);
}
});
test("should return error when Redis operation fails", async () => {
const keys = ["test:key1", "test:key2"] as CacheKey[];
mockRedis.del.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.del(keys);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.RedisOperationError);
}
expect(logger.error).toHaveBeenCalledWith(
{ error: expect.any(Error), keys }, // eslint-disable-line @typescript-eslint/no-unsafe-assignment -- Testing error handling with any Error type
"Cache delete operation failed"
);
});
test("should validate all keys before deletion", async () => {
const keys = ["valid:key1", " ", "valid:key2"] as CacheKey[];
const result = await cacheService.del(keys);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
expect(mockRedis.del).not.toHaveBeenCalled();
});
});
describe("getRedisClient", () => {
test("should return the Redis client instance when ready", () => {
const result = cacheService.getRedisClient();
expect(result).toBe(mockRedis);
});
test("should return null when Redis is not ready", () => {
mockRedis.isReady = false;
const result = cacheService.getRedisClient();
expect(result).toBeNull();
});
test("should return null when Redis is not open", () => {
mockRedis.isOpen = false;
const result = cacheService.getRedisClient();
expect(result).toBeNull();
});
});
describe("withCache", () => {
test("should return cached value when available", async () => {
const key = "test:key" as CacheKey;
const cachedValue = { data: "cached" };
const fn = vi.fn().mockResolvedValue({ data: "fresh" });
mockRedis.get.mockResolvedValue(JSON.stringify(cachedValue));
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toEqual(cachedValue);
expect(fn).not.toHaveBeenCalled();
});
test("should compute and cache value when cache miss", async () => {
const key = "test:key" as CacheKey;
const freshValue = { data: "fresh" };
const fn = vi.fn().mockResolvedValue(freshValue);
mockRedis.get.mockResolvedValue(null);
mockRedis.exists.mockResolvedValue(0); // Key doesn't exist
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toEqual(freshValue);
expect(fn).toHaveBeenCalledOnce();
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, JSON.stringify(freshValue));
});
test("should return fresh value when cache operation fails", async () => {
const key = "test:key" as CacheKey;
const freshValue = { data: "fresh" };
const fn = vi.fn().mockResolvedValue(freshValue);
mockRedis.get.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toEqual(freshValue);
expect(fn).toHaveBeenCalledOnce();
});
test("should return cached null value without executing function", async () => {
const key = "test:key" as CacheKey;
const fn = vi.fn().mockResolvedValue({ data: "fresh" });
// Mock Redis returning stringified null (cached null value)
mockRedis.get.mockResolvedValue("null");
mockRedis.exists.mockResolvedValue(1); // Key exists
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toBeNull();
expect(fn).not.toHaveBeenCalled(); // Function should not be executed
});
test("should execute function and cache null result", async () => {
const key = "test:key" as CacheKey;
const fn = vi.fn().mockResolvedValue(null); // Function returns null
// Mock cache miss
mockRedis.get.mockResolvedValue(null);
mockRedis.exists.mockResolvedValue(0); // Key doesn't exist
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toBeNull();
expect(fn).toHaveBeenCalledOnce();
expect(mockRedis.setEx).toHaveBeenCalledWith(key, 60, "null");
});
test("should return undefined without caching when function returns undefined", async () => {
const key = "test:key" as CacheKey;
const fn = vi.fn().mockResolvedValue(undefined); // Function returns undefined
// Mock cache miss
mockRedis.get.mockResolvedValue(null);
mockRedis.exists.mockResolvedValue(0); // Key doesn't exist
const result = await cacheService.withCache(fn, key, 60000);
expect(result).toBeUndefined();
expect(fn).toHaveBeenCalledOnce();
// undefined should NOT be cached to preserve semantics
expect(mockRedis.setEx).not.toHaveBeenCalled();
});
test("should distinguish between null and undefined return values", async () => {
const nullKey = "test:null-key" as CacheKey;
const undefinedKey = "test:undefined-key" as CacheKey;
const nullFn = vi.fn().mockResolvedValue(null);
const undefinedFn = vi.fn().mockResolvedValue(undefined);
// Mock cache miss for both keys
mockRedis.get.mockResolvedValue(null);
mockRedis.exists.mockResolvedValue(0);
// Test null return value - should be cached
const nullResult = await cacheService.withCache(nullFn, nullKey, 60000);
expect(nullResult).toBeNull();
expect(nullFn).toHaveBeenCalledOnce();
expect(mockRedis.setEx).toHaveBeenCalledWith(nullKey, 60, "null");
// Reset mocks
vi.clearAllMocks();
mockRedis.get.mockResolvedValue(null);
mockRedis.exists.mockResolvedValue(0);
// Test undefined return value - should NOT be cached
const undefinedResult = await cacheService.withCache(undefinedFn, undefinedKey, 60000);
expect(undefinedResult).toBeUndefined();
expect(undefinedFn).toHaveBeenCalledOnce();
expect(mockRedis.setEx).not.toHaveBeenCalled();
});
test("should execute function directly when cache fails", async () => {
const key = "test:key" as CacheKey;
const expectedResult = { data: "result" };
const fn = vi.fn().mockResolvedValue(expectedResult);
mockRedis.get.mockRejectedValue(new Error("Redis connection failed"));
const result = await cacheService.withCache(fn, key, 60000);
// withCache now always returns the function result, even when cache fails
expect(result).toEqual(expectedResult);
expect(fn).toHaveBeenCalledOnce();
});
test("should execute function directly when validation fails", async () => {
const invalidKey = "" as CacheKey; // Empty key should fail validation
const expectedResult = { data: "result" };
const fn = vi.fn().mockResolvedValue(expectedResult);
const result = await cacheService.withCache(fn, invalidKey, 60000);
expect(result).toEqual(expectedResult);
expect(fn).toHaveBeenCalledOnce();
// Should not attempt any cache operations when validation fails
expect(mockRedis.get).not.toHaveBeenCalled();
expect(mockRedis.setEx).not.toHaveBeenCalled();
});
test("should execute function directly when TTL validation fails", async () => {
const key = "test:key" as CacheKey;
const invalidTtl = 500; // Below minimum TTL of 1000ms
const expectedResult = { data: "result" };
const fn = vi.fn().mockResolvedValue(expectedResult);
const result = await cacheService.withCache(fn, key, invalidTtl);
expect(result).toEqual(expectedResult);
expect(fn).toHaveBeenCalledOnce();
// Should not attempt any cache operations when validation fails
expect(mockRedis.get).not.toHaveBeenCalled();
expect(mockRedis.setEx).not.toHaveBeenCalled();
});
});
});

263
packages/cache/src/service.ts vendored Normal file
View File

@@ -0,0 +1,263 @@
import type { RedisClient } from "@/types/client";
import { type CacheError, CacheErrorClass, ErrorCode, type Result, err, ok } from "@/types/error";
import type { CacheKey } from "@/types/keys";
import { ZCacheKey } from "@/types/keys";
import { ZTtlMs } from "@/types/service";
import { logger } from "@formbricks/logger";
import { validateInputs } from "./utils/validation";
/**
* Core cache service providing basic Redis operations with JSON serialization
*/
export class CacheService {
constructor(private readonly redis: RedisClient) {}
/**
* Wraps Redis operations with connection check and timeout to prevent hanging
*/
private async withTimeout<T>(operation: Promise<T>, timeoutMs = 1000): Promise<T> {
return Promise.race([
operation,
new Promise<T>((_, reject) => {
setTimeout(() => {
reject(new CacheErrorClass(ErrorCode.RedisOperationError, "Cache operation timeout"));
}, timeoutMs);
}),
]);
}
/**
* Get the underlying Redis client for advanced operations (e.g., Lua scripts)
* Use with caution - prefer cache service methods when possible
* @returns The Redis client instance or null if not ready
*/
getRedisClient(): RedisClient | null {
if (!this.isRedisAvailable()) {
return null;
}
return this.redis;
}
/**
* Get a value from cache with automatic JSON deserialization
* @param key - Cache key to retrieve
* @returns Result containing parsed value, null if not found, or an error
*/
async get<T>(key: CacheKey): Promise<Result<T | null, CacheError>> {
// Check Redis availability first
if (!this.isRedisAvailable()) {
return err({
code: ErrorCode.RedisConnectionError,
});
}
const validation = validateInputs([key, ZCacheKey]);
if (!validation.ok) {
return validation;
}
try {
const value = await this.withTimeout(this.redis.get(key));
if (value === null) {
return ok(null);
}
// Parse JSON - all data should be valid JSON since we stringify on set
try {
return ok(JSON.parse(value) as T);
} catch (parseError) {
// JSON parse failure indicates corrupted cache data - treat as cache miss
logger.warn("Corrupted cache data detected, treating as cache miss", {
key,
parseError,
});
return err({
code: ErrorCode.CacheCorruptionError,
});
}
} catch (error) {
logger.error({ error, key }, "Cache get operation failed");
return err({
code: ErrorCode.RedisOperationError,
});
}
}
/**
* Check if a key exists in cache (for distinguishing cache miss from cached null)
* @param key - Cache key to check
* @returns Result containing boolean indicating if key exists
*/
async exists(key: CacheKey): Promise<Result<boolean, CacheError>> {
// Check Redis availability first
if (!this.isRedisAvailable()) {
return err({
code: ErrorCode.RedisConnectionError,
});
}
const validation = validateInputs([key, ZCacheKey]);
if (!validation.ok) {
return validation;
}
try {
const exists = await this.withTimeout(this.redis.exists(key));
return ok(exists > 0);
} catch (error) {
logger.error({ error, key }, "Cache exists operation failed");
return err({
code: ErrorCode.RedisOperationError,
});
}
}
/**
* Set a value in cache with automatic JSON serialization and TTL
* @param key - Cache key to store under
* @param value - Value to store
* @param ttlMs - Time to live in milliseconds
* @returns Result containing void or an error
*/
async set(key: CacheKey, value: unknown, ttlMs: number): Promise<Result<void, CacheError>> {
// Check Redis availability first
if (!this.isRedisAvailable()) {
return err({
code: ErrorCode.RedisConnectionError,
});
}
// Validate both key and TTL in one call
const validation = validateInputs([key, ZCacheKey], [ttlMs, ZTtlMs]);
if (!validation.ok) {
return validation;
}
try {
// Normalize undefined to null to maintain consistent cached-null semantics
const normalizedValue = value === undefined ? null : value;
const serialized = JSON.stringify(normalizedValue);
await this.withTimeout(this.redis.setEx(key, Math.floor(ttlMs / 1000), serialized));
return ok(undefined);
} catch (error) {
logger.error({ error, key, ttlMs }, "Cache set operation failed");
return err({
code: ErrorCode.RedisOperationError,
});
}
}
/**
* Delete one or more keys from cache (idempotent)
* @param keys - Array of keys to delete
* @returns Result containing void or an error
*/
async del(keys: CacheKey[]): Promise<Result<void, CacheError>> {
// Check Redis availability first
if (!this.isRedisAvailable()) {
return err({
code: ErrorCode.RedisConnectionError,
});
}
// Validate all keys using generic validation
for (const key of keys) {
const validation = validateInputs([key, ZCacheKey]);
if (!validation.ok) {
return validation;
}
}
try {
if (keys.length > 0) {
await this.withTimeout(this.redis.del(keys));
}
return ok(undefined);
} catch (error) {
logger.error({ error, keys }, "Cache delete operation failed");
return err({
code: ErrorCode.RedisOperationError,
});
}
}
/**
* Cache wrapper for functions (cache-aside).
* Never throws due to cache errors; function errors propagate without retry.
* Must include null in T to support cached null values.
* @param fn - Function to execute (and optionally cache).
* @param key - Cache key
* @param ttlMs - Time to live in milliseconds
* @returns Cached value if present, otherwise fresh result from fn()
*/
async withCache<T>(fn: () => Promise<T>, key: CacheKey, ttlMs: number): Promise<T> {
if (!this.isRedisAvailable()) {
return await fn();
}
const validation = validateInputs([key, ZCacheKey], [ttlMs, ZTtlMs]);
if (!validation.ok) {
logger.warn({ error: validation.error, key }, "Invalid cache inputs, executing function directly");
return await fn();
}
const cachedValue = await this.tryGetCachedValue<T>(key, ttlMs);
if (cachedValue !== undefined) {
return cachedValue;
}
const fresh = await fn();
await this.trySetCache(key, fresh, ttlMs);
return fresh;
}
private async tryGetCachedValue<T>(key: CacheKey, ttlMs: number): Promise<T | undefined> {
try {
const cacheResult = await this.get<T>(key);
if (cacheResult.ok && cacheResult.data !== null) {
return cacheResult.data;
}
if (cacheResult.ok && cacheResult.data === null) {
const existsResult = await this.exists(key);
if (existsResult.ok && existsResult.data) {
return null as T;
}
}
if (!cacheResult.ok) {
logger.debug(
{ error: cacheResult.error, key, ttlMs },
"Cache get operation failed, fetching fresh data"
);
}
} catch (error) {
logger.debug({ error, key, ttlMs }, "Cache get/exists threw; proceeding to compute fresh value");
}
return undefined;
}
private async trySetCache(key: CacheKey, value: unknown, ttlMs: number): Promise<void> {
if (typeof value === "undefined") {
return; // Skip caching undefined values
}
try {
const setResult = await this.set(key, value, ttlMs);
if (!setResult.ok) {
logger.debug(
{ error: setResult.error, key, ttlMs },
"Failed to cache fresh data, but returning result"
);
}
} catch (error) {
logger.debug({ error, key, ttlMs }, "Cache set threw; returning fresh result");
}
}
private isRedisAvailable(): boolean {
return this.redis.isReady && this.redis.isOpen;
}
}

62
packages/cache/src/utils/key.test.ts vendored Normal file
View File

@@ -0,0 +1,62 @@
import { describe, expect, test } from "vitest";
import type { CacheKey } from "@/types/keys";
import { makeCacheKey } from "./key";
describe("@formbricks/cache utils/key", () => {
describe("makeCacheKey helper", () => {
test("should create cache key with automatic fb prefix", () => {
const key: CacheKey = makeCacheKey("env", "123", "state");
expect(key).toBe("fb:env:123:state");
expect(typeof key).toBe("string");
});
test("should work with minimum parts", () => {
const key: CacheKey = makeCacheKey("user", "456");
expect(key).toBe("fb:user:456");
});
test("should work with many parts", () => {
const key: CacheKey = makeCacheKey("user", "123", "org", "456", "permissions");
expect(key).toBe("fb:user:123:org:456:permissions");
});
test("should throw error if fb prefix is included", () => {
expect(() => makeCacheKey("fb", "env", "123")).toThrow(
"Invalid Cache key: Do not include 'fb' prefix, it's added automatically"
);
});
test("should throw error for empty parts", () => {
expect(() => makeCacheKey("env", "", "state")).toThrow("Invalid Cache key: Parts cannot be empty");
expect(() => makeCacheKey("", "123")).toThrow("Invalid Cache key: Parts cannot be empty");
});
test("should validate structure with regex", () => {
// Valid structures should pass
expect(() => makeCacheKey("env", "123")).not.toThrow();
expect(() => makeCacheKey("env", "123", "state")).not.toThrow();
expect(() => makeCacheKey("rate_limit", "api", "user", "123")).not.toThrow();
});
test("should return branded CacheKey type", () => {
const key: CacheKey = makeCacheKey("test", "123");
// Function that only accepts CacheKey
const acceptsCacheKey = (cacheKey: CacheKey): string => cacheKey;
// Should work without TypeScript errors
expect(acceptsCacheKey(key)).toBe("fb:test:123");
});
test("should be compatible with existing cache key patterns", () => {
// Test patterns that match existing createCacheKey outputs
expect(makeCacheKey("env", "env-123", "state")).toBe("fb:env:env-123:state");
expect(makeCacheKey("org", "org-456", "billing")).toBe("fb:org:org-456:billing");
expect(makeCacheKey("license", "org-789", "status")).toBe("fb:license:org-789:status");
expect(makeCacheKey("rate_limit", "api", "key-123", "endpoint")).toBe(
"fb:rate_limit:api:key-123:endpoint"
);
});
});
});

36
packages/cache/src/utils/key.ts vendored Normal file
View File

@@ -0,0 +1,36 @@
import { logger } from "@formbricks/logger";
import type { CacheKey } from "@/types/keys";
/**
* Helper function to create cache keys with runtime validation
* Automatically adds "fb:" prefix and validates structure
*
* @param parts - Cache key parts (resource, identifier, subresources...)
* @returns Validated CacheKey
*
* @example
* makeCacheKey("env", "123", "state") // Returns "fb:env:123:state"
* makeCacheKey("user", "456") // Returns "fb:user:456"
*/
export const makeCacheKey = (...parts: [first: string, ...rest: string[]]): CacheKey => {
if (parts[0] === "fb") {
logger.error("Invalid Cache key: Do not include 'fb' prefix, it's added automatically");
throw new Error("Invalid Cache key: Do not include 'fb' prefix, it's added automatically");
}
// Check for empty parts
if (parts.some((part) => part.length === 0)) {
logger.error("Invalid Cache key: Parts cannot be empty");
throw new Error("Invalid Cache key: Parts cannot be empty");
}
const key = `fb:${parts.join(":")}`;
// Valid format: starts with "fb:", has valid structure
if (!/^fb:(?:[^:]+)(?::[^:]+)*$/.test(key)) {
logger.error("Invalid Cache key: Invalid structure");
throw new Error("Invalid Cache key: Invalid structure");
}
return key as CacheKey;
};

View File

@@ -0,0 +1,75 @@
import { describe, expect, test, vi } from "vitest";
import { z } from "zod";
import { ErrorCode } from "@/types/error";
import { validateInputs } from "./validation";
// Mock logger
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("@formbricks/cache validation utils", () => {
describe("validateInputs", () => {
const stringSchema = z.string().min(1);
const numberSchema = z.number().positive();
test("should return success for valid inputs", () => {
const result = validateInputs(["test", stringSchema], [42, numberSchema]);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual(["test", 42]);
}
});
test("should return error for invalid first input", () => {
const result = validateInputs(["", stringSchema]);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should return error for invalid second input", () => {
const result = validateInputs(["valid", stringSchema], [-1, numberSchema]);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
test("should work with single input", () => {
const result = validateInputs(["test", stringSchema]);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual(["test"]);
}
});
test("should work with no inputs", () => {
const result = validateInputs();
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual([]);
}
});
test("should return error on first failure in multiple inputs", () => {
const result = validateInputs(
["", stringSchema], // This will fail
[42, numberSchema] // This would pass but shouldn't be reached
);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe(ErrorCode.CacheValidationError);
}
});
});
});

34
packages/cache/src/utils/validation.ts vendored Normal file
View File

@@ -0,0 +1,34 @@
import type { CacheError, Result } from "@/types/error";
import { ErrorCode, err, ok } from "@/types/error";
import type { z } from "zod";
import { logger } from "@formbricks/logger";
/**
* Generic validation function using Zod schemas with Result types
* @param pairs - Array of [value, schema] tuples to validate
* @returns Result with validated data or CacheValidationError
*/
export function validateInputs<T extends readonly [unknown, z.ZodType<unknown>][]>(
...pairs: T
): Result<{ [K in keyof T]: T[K] extends readonly [unknown, z.ZodType<infer U>] ? U : never }, CacheError> {
const results: unknown[] = [];
for (const [value, schema] of pairs) {
const result = schema.safeParse(value);
if (!result.success) {
logger.error(
{
error: result.error.issues[0]?.message || "Unknown validation error",
validationErrors: result.error.issues,
},
"Cache validation failed"
);
return err({
code: ErrorCode.CacheValidationError,
});
}
results.push(result.data);
}
return ok(results as { [K in keyof T]: T[K] extends readonly [unknown, z.ZodType<infer U>] ? U : never });
}

16
packages/cache/tsconfig.json vendored Normal file
View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"allowImportingTsExtensions": true,
"baseUrl": ".",
"isolatedModules": true,
"noEmit": true,
"paths": {
"@/*": ["./*"]
},
"resolveJsonModule": true,
"strict": true
},
"exclude": ["node_modules"],
"extends": "@formbricks/config-typescript/js-library.json",
"include": ["src", "types", "package.json"]
}

6
packages/cache/types/client.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
import type { RedisClientType } from "redis";
/**
* Redis client type used throughout the cache package
*/
export type RedisClient = RedisClientType;

296
packages/cache/types/error.test.ts vendored Normal file
View File

@@ -0,0 +1,296 @@
import { describe, expect, test } from "vitest";
import { type CacheError, CacheErrorClass, ErrorCode, type Result, err, ok } from "./error";
describe("Error types and utilities", () => {
describe("ok utility function", () => {
test("should create success Result with data", () => {
const data = { test: "value" };
const result = ok(data);
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toEqual(data);
}
});
test("should work with different data types", () => {
const stringResult = ok("test string");
const numberResult = ok(42);
const arrayResult = ok([1, 2, 3]);
const nullResult = ok(null);
expect(stringResult.ok).toBe(true);
expect(numberResult.ok).toBe(true);
expect(arrayResult.ok).toBe(true);
expect(nullResult.ok).toBe(true);
if (stringResult.ok) expect(stringResult.data).toBe("test string");
if (numberResult.ok) expect(numberResult.data).toBe(42);
if (arrayResult.ok) expect(arrayResult.data).toEqual([1, 2, 3]);
if (nullResult.ok) expect(nullResult.data).toBe(null);
});
});
describe("err utility function", () => {
test("should create error Result with error", () => {
const error: CacheError = { code: ErrorCode.Unknown };
const result = err(error);
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error).toEqual(error);
}
});
test("should work with different error types", () => {
const cacheError: CacheError = { code: ErrorCode.CacheValidationError };
const redisError: CacheError = { code: ErrorCode.RedisOperationError };
const cacheResult = err(cacheError);
const redisResult = err(redisError);
expect(cacheResult.ok).toBe(false);
expect(redisResult.ok).toBe(false);
if (!cacheResult.ok) expect(cacheResult.error.code).toBe(ErrorCode.CacheValidationError);
if (!redisResult.ok) expect(redisResult.error.code).toBe(ErrorCode.RedisOperationError);
});
});
describe("ErrorCode enum", () => {
test("should have all expected error codes", () => {
expect(ErrorCode.Unknown).toBe("unknown");
expect(ErrorCode.CacheValidationError).toBe("cache_validation_error");
expect(ErrorCode.RedisConnectionError).toBe("redis_connection_error");
expect(ErrorCode.RedisOperationError).toBe("redis_operation_error");
expect(ErrorCode.CacheCorruptionError).toBe("cache_corruption_error");
});
test("should be usable as object keys", () => {
const errorMap = {
[ErrorCode.Unknown]: "Unknown error occurred",
[ErrorCode.CacheValidationError]: "Validation failed",
[ErrorCode.RedisConnectionError]: "Connection failed",
[ErrorCode.RedisOperationError]: "Operation failed",
[ErrorCode.CacheCorruptionError]: "Data corrupted",
};
expect(errorMap[ErrorCode.Unknown]).toBe("Unknown error occurred");
expect(errorMap[ErrorCode.CacheValidationError]).toBe("Validation failed");
});
});
describe("CacheError interface", () => {
test("should work with all error codes", () => {
const errors: CacheError[] = [
{ code: ErrorCode.Unknown },
{ code: ErrorCode.CacheValidationError },
{ code: ErrorCode.RedisConnectionError },
{ code: ErrorCode.RedisOperationError },
{ code: ErrorCode.CacheCorruptionError },
];
errors.forEach((error) => {
expect(typeof error.code).toBe("string");
expect(Object.values(ErrorCode)).toContain(error.code);
});
});
});
describe("Result type", () => {
test("should discriminate between success and error states", () => {
const successResult: Result<string, CacheError> = ok("success");
const errorResult: Result<string, CacheError> = err({ code: ErrorCode.Unknown });
// TypeScript should narrow types correctly
if (successResult.ok) {
expect(typeof successResult.data).toBe("string");
expect(successResult.data).toBe("success");
}
if (!errorResult.ok) {
expect(typeof errorResult.error.code).toBe("string");
expect(errorResult.error.code).toBe(ErrorCode.Unknown);
}
});
test("should support type safety with different data types", () => {
// This test verifies type compatibility at compile time
const stringResult: Result<string, CacheError> = ok("test");
const numberResult: Result<number, CacheError> = ok(42);
const objectResult: Result<{ id: string }, CacheError> = ok({ id: "123" });
expect(stringResult.ok).toBe(true);
expect(numberResult.ok).toBe(true);
expect(objectResult.ok).toBe(true);
});
});
describe("CacheErrorClass", () => {
test("should create proper Error instances with code and message", () => {
const error = new CacheErrorClass(ErrorCode.RedisConnectionError, "Custom error message");
expect(error).toBeInstanceOf(Error);
expect(error).toBeInstanceOf(CacheErrorClass);
expect(error.name).toBe("CacheError");
expect(error.message).toBe("Custom error message");
expect(error.code).toBe(ErrorCode.RedisConnectionError);
expect(error.stack).toBeDefined();
expect(typeof error.stack).toBe("string");
});
test("should create Error instances with default message", () => {
const error = new CacheErrorClass(ErrorCode.CacheValidationError);
expect(error).toBeInstanceOf(Error);
expect(error).toBeInstanceOf(CacheErrorClass);
expect(error.name).toBe("CacheError");
expect(error.message).toBe("Cache error: cache_validation_error");
expect(error.code).toBe(ErrorCode.CacheValidationError);
expect(error.stack).toBeDefined();
});
test("should work with all ErrorCode values", () => {
// Test a representative sample to avoid deep nesting warnings
const testCodes = [
ErrorCode.Unknown,
ErrorCode.CacheValidationError,
ErrorCode.RedisConnectionError,
ErrorCode.RedisOperationError,
ErrorCode.CacheCorruptionError,
];
testCodes.forEach((code) => {
const error = new CacheErrorClass(code, `Test error for ${code}`);
expect(error.code).toBe(code);
expect(error.message).toBe(`Test error for ${code}`);
expect(error).toBeInstanceOf(Error);
expect(error).toBeInstanceOf(CacheErrorClass);
});
});
test("should implement CacheError interface", () => {
const error = new CacheErrorClass(ErrorCode.RedisOperationError, "Test message");
// Should be assignable to CacheError interface
const cacheError: CacheError = error;
expect(cacheError.code).toBe(ErrorCode.RedisOperationError);
});
test("should be throwable and catchable", () => {
expect(() => {
throw new CacheErrorClass(ErrorCode.CacheCorruptionError, "Data corrupted");
}).toThrow("Data corrupted");
try {
throw new CacheErrorClass(ErrorCode.RedisConnectionError, "Connection failed");
} catch (error) {
expect(error).toBeInstanceOf(CacheErrorClass);
expect(error).toBeInstanceOf(Error);
if (error instanceof CacheErrorClass) {
expect(error.code).toBe(ErrorCode.RedisConnectionError);
expect(error.message).toBe("Connection failed");
}
}
});
describe("fromCacheError static method", () => {
test("should convert plain CacheError to CacheErrorClass", () => {
const plainError: CacheError = { code: ErrorCode.CacheValidationError };
const errorClass = CacheErrorClass.fromCacheError(plainError);
expect(errorClass).toBeInstanceOf(Error);
expect(errorClass).toBeInstanceOf(CacheErrorClass);
expect(errorClass.code).toBe(ErrorCode.CacheValidationError);
expect(errorClass.message).toBe("Cache error: cache_validation_error");
expect(errorClass.name).toBe("CacheError");
});
test("should use custom message when provided", () => {
const plainError: CacheError = { code: ErrorCode.RedisOperationError };
const errorClass = CacheErrorClass.fromCacheError(plainError, "Custom conversion message");
expect(errorClass.code).toBe(ErrorCode.RedisOperationError);
expect(errorClass.message).toBe("Custom conversion message");
expect(errorClass).toBeInstanceOf(Error);
expect(errorClass).toBeInstanceOf(CacheErrorClass);
});
test("should preserve error code from plain object", () => {
// Test a few key error codes to avoid deep nesting warning
const testCodes = [
ErrorCode.CacheValidationError,
ErrorCode.RedisConnectionError,
ErrorCode.RedisOperationError,
];
testCodes.forEach((code) => {
const plainError: CacheError = { code };
const errorClass = CacheErrorClass.fromCacheError(plainError, `Converted ${code}`);
expect(errorClass.code).toBe(code);
expect(errorClass.message).toBe(`Converted ${code}`);
});
});
});
test("should maintain proper prototype chain", () => {
const error = new CacheErrorClass(ErrorCode.Unknown, "Test error");
// Verify prototype chain
expect(Object.getPrototypeOf(error)).toBe(CacheErrorClass.prototype);
expect(Object.getPrototypeOf(CacheErrorClass.prototype)).toBe(Error.prototype);
// Verify constructor
expect(error.constructor).toBe(CacheErrorClass);
});
test("should have enumerable code property", () => {
const error = new CacheErrorClass(ErrorCode.RedisConnectionError, "Test");
const descriptor = Object.getOwnPropertyDescriptor(error, "code");
expect(descriptor).toBeDefined();
expect(descriptor?.enumerable).toBe(true);
expect(descriptor?.value).toBe(ErrorCode.RedisConnectionError);
});
test("should work with JSON.stringify", () => {
const error = new CacheErrorClass(ErrorCode.CacheValidationError, "Validation failed");
// JSON.stringify should include the code property (public field)
const json = JSON.stringify(error);
const parsed = JSON.parse(json) as Record<string, unknown>;
expect(parsed.code).toBe(ErrorCode.CacheValidationError);
// Note: Error's message and name are not enumerable by default in JSON.stringify
// Only the public 'code' property will be serialized
expect(parsed.code).toBeDefined();
});
});
describe("Module exports", () => {
test("should export all required types and utilities", () => {
// Verify functions are exported
expect(typeof ok).toBe("function");
expect(typeof err).toBe("function");
// Verify classes are exported
expect(typeof CacheErrorClass).toBe("function");
expect(CacheErrorClass.prototype).toBeInstanceOf(Error);
// Verify enum is exported
expect(typeof ErrorCode).toBe("object");
expect(ErrorCode).toBeDefined();
// Type exports verification (compile-time check)
const typeTest = {
Result: "Result" as keyof { Result: Result<unknown, unknown> },
CacheError: "CacheError" as keyof { CacheError: CacheError },
};
expect(typeTest.Result).toBe("Result");
expect(typeTest.CacheError).toBe("CacheError");
});
});
});

43
packages/cache/types/error.ts vendored Normal file
View File

@@ -0,0 +1,43 @@
// Result type system for cache operations
export type Result<T, E = Error> = { ok: true; data: T } | { ok: false; error: E };
// Utility functions for creating Result objects
export const ok = <T, E>(data: T): Result<T, E> => ({ ok: true, data });
export const err = <E = Error>(error: E): Result<never, E> => ({ ok: false, error });
// Error codes for cache operations
export enum ErrorCode {
Unknown = "unknown",
CacheValidationError = "cache_validation_error",
RedisConnectionError = "redis_connection_error",
RedisOperationError = "redis_operation_error",
CacheCorruptionError = "cache_corruption_error",
RedisConfigurationError = "redis_configuration_error",
}
// Generic error type for all cache operations
export interface CacheError {
code: ErrorCode;
}
// CacheError class that extends Error for proper error handling
export class CacheErrorClass extends Error implements CacheError {
constructor(
public code: ErrorCode,
message?: string
) {
super(message ?? `Cache error: ${code}`);
this.name = "CacheError";
// Maintains proper prototype chain in older environments
Object.setPrototypeOf(this, CacheErrorClass.prototype);
}
/**
* Creates a CacheErrorClass from a plain CacheError object
* Useful for converting existing error objects to proper Error instances
*/
static fromCacheError(error: CacheError, message?: string): CacheErrorClass {
return new CacheErrorClass(error.code, message);
}
}

106
packages/cache/types/keys.test.ts vendored Normal file
View File

@@ -0,0 +1,106 @@
import { describe, expect, test } from "vitest";
import { type CacheKey, type CustomCacheNamespace, ZCacheKey } from "./keys";
describe("@formbricks/cache types/keys", () => {
describe("ZCacheKey schema", () => {
test("should validate valid cache keys", () => {
const validKeys = [
"fb:test:123:data",
"fb:env:test:state",
"analytics:user:123",
"custom:namespace:key",
];
validKeys.forEach((key) => {
const result = ZCacheKey.safeParse(key);
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(key);
}
});
});
test("should reject invalid cache keys", () => {
const invalidKeys = [
"", // empty string
" ", // whitespace only
"\t", // tab only
"\n", // newline only
" \n ", // mixed whitespace
];
invalidKeys.forEach((key) => {
const result = ZCacheKey.safeParse(key);
expect(result.success).toBe(false);
});
});
test("should provide meaningful error messages", () => {
const emptyResult = ZCacheKey.safeParse("");
expect(emptyResult.success).toBe(false);
if (!emptyResult.success) {
expect(emptyResult.error.issues[0].message).toBe("Cache key cannot be empty");
}
const whitespaceResult = ZCacheKey.safeParse(" ");
expect(whitespaceResult.success).toBe(false);
if (!whitespaceResult.success) {
expect(whitespaceResult.error.issues[0].message).toBe("Cache key cannot be empty or whitespace only");
}
});
test("should create branded CacheKey type", () => {
const validKey = "fb:test:123:data";
const result = ZCacheKey.parse(validKey);
// Type assertion to ensure it's properly branded
const typedKey: CacheKey = result;
expect(typedKey).toBe(validKey);
});
});
describe("CacheKey type", () => {
test("should work with type-safe functions", () => {
// Helper function that only accepts CacheKey
const acceptsCacheKey = (key: CacheKey): string => key;
const validKey = ZCacheKey.parse("fb:env:test:state");
expect(acceptsCacheKey(validKey)).toBe("fb:env:test:state");
});
test("should maintain string behavior", () => {
const key = ZCacheKey.parse("fb:test:123");
// Should work with string methods
expect(key.length).toBe(11);
expect(key.startsWith("fb:")).toBe(true);
expect(key.split(":")).toEqual(["fb", "test", "123"]);
expect(key.includes("test")).toBe(true);
});
test("should be serializable", () => {
const key = ZCacheKey.parse("fb:serialization:test");
// Should serialize as regular string
expect(JSON.stringify({ cacheKey: key })).toBe('{"cacheKey":"fb:serialization:test"}');
// Should parse back correctly
const parsed = JSON.parse('{"cacheKey":"fb:serialization:test"}') as { cacheKey: string };
expect(parsed.cacheKey).toBe("fb:serialization:test");
});
});
describe("CustomCacheNamespace type", () => {
test("should include expected namespaces", () => {
// Type test - this will fail at compile time if types don't match
const analyticsNamespace: CustomCacheNamespace = "analytics";
expect(analyticsNamespace).toBe("analytics");
});
test("should be usable in cache key construction", () => {
const namespace: CustomCacheNamespace = "analytics";
const cacheKey = ZCacheKey.parse(`${namespace}:user:123`);
expect(cacheKey).toBe("analytics:user:123");
});
});
});

19
packages/cache/types/keys.ts vendored Normal file
View File

@@ -0,0 +1,19 @@
import { z } from "zod";
/**
* Branded type for cache keys to prevent raw string usage
* This ensures only properly generated cache keys can be used in cache operations
*/
export const ZCacheKey = z
.string()
.min(1, "Cache key cannot be empty")
.refine((key) => key.trim().length > 0, "Cache key cannot be empty or whitespace only")
.brand("CacheKey");
export type CacheKey = z.infer<typeof ZCacheKey>;
/**
* Possible namespaces for custom cache keys
* Add new namespaces here as they are introduced
*/
export type CustomCacheNamespace = "analytics";

38
packages/cache/types/service.test.ts vendored Normal file
View File

@@ -0,0 +1,38 @@
import { describe, expect, test } from "vitest";
import { ZTtlMs } from "./service";
describe("ZTtlMs", () => {
test("should validate valid TTL values", () => {
expect(ZTtlMs.parse(1000)).toBe(1000);
expect(ZTtlMs.parse(5000)).toBe(5000);
expect(ZTtlMs.parse(7200000)).toBe(7200000); // 2 hours
});
test("should reject values below 1000ms", () => {
expect(() => ZTtlMs.parse(0)).toThrow("TTL must be at least 1000ms (1 second)");
expect(() => ZTtlMs.parse(500)).toThrow("TTL must be at least 1000ms (1 second)");
expect(() => ZTtlMs.parse(999)).toThrow("TTL must be at least 1000ms (1 second)");
});
test("should reject negative numbers", () => {
expect(() => ZTtlMs.parse(-1)).toThrow("TTL must be at least 1000ms (1 second)");
expect(() => ZTtlMs.parse(-1000)).toThrow("TTL must be at least 1000ms (1 second)");
});
test("should reject non-integer values", () => {
expect(() => ZTtlMs.parse(1000.5)).toThrow("Expected integer, received float");
expect(() => ZTtlMs.parse(1500.25)).toThrow("Expected integer, received float");
});
test("should reject non-finite values", () => {
expect(() => ZTtlMs.parse(NaN)).toThrow("Expected number, received nan");
expect(() => ZTtlMs.parse(Infinity)).toThrow("TTL must be finite");
expect(() => ZTtlMs.parse(-Infinity)).toThrow("TTL must be finite");
});
test("should reject non-numeric values", () => {
expect(() => ZTtlMs.parse("1000")).toThrow("Expected number, received string");
expect(() => ZTtlMs.parse(null)).toThrow("Expected number, received null");
expect(() => ZTtlMs.parse(undefined)).toThrow("Required");
});
});

7
packages/cache/types/service.ts vendored Normal file
View File

@@ -0,0 +1,7 @@
import { z } from "zod";
export const ZTtlMs = z
.number()
.int()
.min(1000, "TTL must be at least 1000ms (1 second)")
.finite("TTL must be finite");

37
packages/cache/vite.config.ts vendored Normal file
View File

@@ -0,0 +1,37 @@
/// <reference types="vitest" />
import { resolve } from "path";
import dts from "vite-plugin-dts";
import { defineConfig } from "vitest/config";
export default defineConfig({
resolve: {
alias: {
"@": resolve(__dirname, "."),
},
},
build: {
lib: {
entry: resolve(__dirname, "src/index.ts"),
name: "formbricksCache",
fileName: "index",
formats: ["es", "cjs"],
},
rollupOptions: {
external: ["redis", "@formbricks/logger", "zod"],
},
},
test: {
environment: "node",
globals: true,
coverage: {
reporter: ["text", "json", "html", "lcov"],
},
},
plugins: [
dts({
include: ["src/**/*", "types/**/*"],
entryRoot: ".",
outDir: "dist",
}),
],
});

91
pnpm-lock.yaml generated
View File

@@ -123,6 +123,9 @@ importers:
'@dnd-kit/utilities':
specifier: 3.2.2
version: 3.2.2(react@19.1.0)
'@formbricks/cache':
specifier: workspace:*
version: link:../../packages/cache
'@formbricks/database':
specifier: workspace:*
version: link:../../packages/database
@@ -539,6 +542,34 @@ importers:
specifier: 3.1.0
version: 3.1.0(typescript@5.8.3)(vitest@3.1.3(@types/node@22.15.18)(jiti@2.4.2)(jsdom@26.1.0)(terser@5.39.1)(tsx@4.19.4)(yaml@2.8.0))
packages/cache:
dependencies:
'@formbricks/logger':
specifier: workspace:*
version: link:../logger
redis:
specifier: 5.8.1
version: 5.8.1
zod:
specifier: 3.24.4
version: 3.24.4
devDependencies:
'@formbricks/config-typescript':
specifier: workspace:*
version: link:../config-typescript
'@formbricks/eslint-config':
specifier: workspace:*
version: link:../config-eslint
'@vitest/coverage-v8':
specifier: 3.1.3
version: 3.1.3(vitest@3.1.3(@types/node@22.15.18)(jiti@2.5.1)(jsdom@26.1.0)(terser@5.39.1)(tsx@4.19.4)(yaml@2.8.0))
vite:
specifier: 6.3.5
version: 6.3.5(@types/node@22.15.18)(jiti@2.5.1)(terser@5.39.1)(tsx@4.19.4)(yaml@2.8.0)
vitest:
specifier: 3.1.3
version: 3.1.3(@types/node@22.15.18)(jiti@2.5.1)(jsdom@26.1.0)(terser@5.39.1)(tsx@4.19.4)(yaml@2.8.0)
packages/config-eslint:
devDependencies:
'@next/eslint-plugin-next':
@@ -3265,6 +3296,12 @@ packages:
peerDependencies:
'@redis/client': ^1.0.0
'@redis/bloom@5.8.1':
resolution: {integrity: sha512-hJOJr/yX6BttnyZ+nxD3Ddiu2lPig4XJjyAK1v7OSHOJNUTfn3RHBryB9wgnBMBdkg9glVh2AjItxIXmr600MA==}
engines: {node: '>= 18'}
peerDependencies:
'@redis/client': ^5.8.1
'@redis/client@1.6.0':
resolution: {integrity: sha512-aR0uffYI700OEEH4gYnitAnv3vzVGXCFvYfdpu/CJKvk4pHfLPEy/JSZyrpQ+15WhXe1yJRXLtfQ84s4mEXnPg==}
engines: {node: '>=14'}
@@ -3273,6 +3310,10 @@ packages:
resolution: {integrity: sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==}
engines: {node: '>=14'}
'@redis/client@5.8.1':
resolution: {integrity: sha512-hD5Tvv7G0t8b3w8ao3kQ4jEPUmUUC6pqA18c8ciYF5xZGfUGBg0olQHW46v6qSt4O5bxOuB3uV7pM6H5wEjBwA==}
engines: {node: '>= 18'}
'@redis/graph@1.1.1':
resolution: {integrity: sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==}
peerDependencies:
@@ -3283,16 +3324,34 @@ packages:
peerDependencies:
'@redis/client': ^1.0.0
'@redis/json@5.8.1':
resolution: {integrity: sha512-kyvM8Vn+WjJI++nRsIoI9TbdfCs1/TgD0Hp7Z7GiG6W4IEBzkXGQakli+R5BoJzUfgh7gED2fkncYy1NLprMNg==}
engines: {node: '>= 18'}
peerDependencies:
'@redis/client': ^5.8.1
'@redis/search@1.2.0':
resolution: {integrity: sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==}
peerDependencies:
'@redis/client': ^1.0.0
'@redis/search@5.8.1':
resolution: {integrity: sha512-CzuKNTInTNQkxqehSn7QiYcM+th+fhjQn5ilTvksP1wPjpxqK0qWt92oYg3XZc3tO2WuXkqDvTujc4D7kb6r/A==}
engines: {node: '>= 18'}
peerDependencies:
'@redis/client': ^5.8.1
'@redis/time-series@1.1.0':
resolution: {integrity: sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==}
peerDependencies:
'@redis/client': ^1.0.0
'@redis/time-series@5.8.1':
resolution: {integrity: sha512-klvdR96U9oSOyqvcectoAGhYlMOnMS3I5UWUOgdBn1buMODiwM/E4Eds7gxldKmtowe4rLJSF1CyIqyZTjy8Ow==}
engines: {node: '>= 18'}
peerDependencies:
'@redis/client': ^5.8.1
'@resvg/resvg-wasm@2.4.0':
resolution: {integrity: sha512-C7c51Nn4yTxXFKvgh2txJFNweaVcfUPQxwEUFw4aWsCmfiBDJsTSwviIF8EcwjQ6k8bPyMWCl1vw4BdxE569Cg==}
engines: {node: '>= 10'}
@@ -8275,6 +8334,10 @@ packages:
redis@4.7.0:
resolution: {integrity: sha512-zvmkHEAdGMn+hMRXuMBtu4Vo5P6rHQjLoHftu+lBqq8ZTA3RCVC/WzD790bkKKiNFp7d5/9PcSD19fJyyRvOdQ==}
redis@5.8.1:
resolution: {integrity: sha512-RZjBKYX/qFF809x6vDcE5VA6L3MmiuT+BkbXbIyyyeU0lPD47V4z8qTzN+Z/kKFwpojwCItOfaItYuAjNs8pTQ==}
engines: {node: '>= 18'}
reflect-metadata@0.2.2:
resolution: {integrity: sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==}
@@ -13013,6 +13076,10 @@ snapshots:
dependencies:
'@redis/client': 1.6.0
'@redis/bloom@5.8.1(@redis/client@5.8.1)':
dependencies:
'@redis/client': 5.8.1
'@redis/client@1.6.0':
dependencies:
cluster-key-slot: 1.1.2
@@ -13025,6 +13092,10 @@ snapshots:
generic-pool: 3.9.0
yallist: 4.0.0
'@redis/client@5.8.1':
dependencies:
cluster-key-slot: 1.1.2
'@redis/graph@1.1.1(@redis/client@1.6.0)':
dependencies:
'@redis/client': 1.6.0
@@ -13033,14 +13104,26 @@ snapshots:
dependencies:
'@redis/client': 1.6.0
'@redis/json@5.8.1(@redis/client@5.8.1)':
dependencies:
'@redis/client': 5.8.1
'@redis/search@1.2.0(@redis/client@1.6.0)':
dependencies:
'@redis/client': 1.6.0
'@redis/search@5.8.1(@redis/client@5.8.1)':
dependencies:
'@redis/client': 5.8.1
'@redis/time-series@1.1.0(@redis/client@1.6.0)':
dependencies:
'@redis/client': 1.6.0
'@redis/time-series@5.8.1(@redis/client@5.8.1)':
dependencies:
'@redis/client': 5.8.1
'@resvg/resvg-wasm@2.4.0': {}
'@rollup/plugin-commonjs@28.0.1(rollup@4.46.1)':
@@ -18660,6 +18743,14 @@ snapshots:
'@redis/search': 1.2.0(@redis/client@1.6.0)
'@redis/time-series': 1.1.0(@redis/client@1.6.0)
redis@5.8.1:
dependencies:
'@redis/bloom': 5.8.1(@redis/client@5.8.1)
'@redis/client': 5.8.1
'@redis/json': 5.8.1(@redis/client@5.8.1)
'@redis/search': 5.8.1(@redis/client@5.8.1)
'@redis/time-series': 5.8.1(@redis/client@5.8.1)
reflect-metadata@0.2.2: {}
reflect.getprototypeof@1.0.10:

View File

@@ -2,16 +2,16 @@ sonar.projectKey=formbricks_formbricks
sonar.organization=formbricks
# Sources
sonar.sources=apps/web,packages/surveys,packages/js-core
sonar.sources=apps/web,packages/surveys,packages/js-core,packages/cache
sonar.exclusions=**/node_modules/**,**/.next/**,**/dist/**,**/build/**,**/*.test.*,**/*.spec.*,**/__mocks__/**
# Tests
sonar.tests=apps/web,packages/surveys,packages/js-core
sonar.tests=apps/web,packages/surveys,packages/js-core,packages/cache
sonar.test.inclusions=**/*.test.*,**/*.spec.*
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info,packages/cache/coverage/lcov.info
# TypeScript configuration
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json,packages/cache/tsconfig.json
# SCM
sonar.scm.provider=git

View File

@@ -1,6 +1,21 @@
{
"$schema": "https://turborepo.org/schema.json",
"tasks": {
"@formbricks/cache#build": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/cache#go": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/cache#lint": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/cache#test": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/cache#test:coverage": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/database#build": {
"dependsOn": ["^build"],
"outputs": ["dist/**"]
@@ -61,10 +76,10 @@
"persistent": true
},
"@formbricks/web#test": {
"dependsOn": ["@formbricks/logger#build", "@formbricks/database#build"]
"dependsOn": ["@formbricks/logger#build", "@formbricks/database#build", "@formbricks/cache#build"]
},
"@formbricks/web#test:coverage": {
"dependsOn": ["@formbricks/logger#build", "@formbricks/database#build"]
"dependsOn": ["@formbricks/logger#build", "@formbricks/database#build", "@formbricks/cache#build"]
},
"build": {
"dependsOn": ["^build"],

View File

@@ -1 +1 @@
export default ["packages/*/vite.config.mts", "apps/web/vite.config.mts"];
export default ["packages/*/vite.config.{ts,mts}", "apps/**/vite.config.{ts,mts}"];