mirror of
https://github.com/formbricks/formbricks.git
synced 2026-01-06 09:00:18 -06:00
chore: update dependencies and improve logging format (#6672)
This commit is contained in:
committed by
GitHub
parent
cdf0926c60
commit
76213af5d7
@@ -100,10 +100,13 @@ export const getAirtableToken = async (environmentId: string) => {
|
||||
});
|
||||
|
||||
if (!newToken) {
|
||||
logger.error("Failed to fetch new Airtable token", {
|
||||
environmentId,
|
||||
airtableIntegration,
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
environmentId,
|
||||
airtableIntegration,
|
||||
},
|
||||
"Failed to fetch new Airtable token"
|
||||
);
|
||||
throw new Error("Failed to fetch new Airtable token");
|
||||
}
|
||||
|
||||
@@ -121,10 +124,13 @@ export const getAirtableToken = async (environmentId: string) => {
|
||||
|
||||
return access_token;
|
||||
} catch (error) {
|
||||
logger.error("Failed to get Airtable token", {
|
||||
environmentId,
|
||||
error,
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
environmentId,
|
||||
error,
|
||||
},
|
||||
"Failed to get Airtable token"
|
||||
);
|
||||
throw new Error("Failed to get Airtable token");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,13 +1,24 @@
|
||||
import { createCipheriv, randomBytes } from "crypto";
|
||||
import { describe, expect, test, vi } from "vitest";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { getHash, symmetricDecrypt, symmetricEncrypt } from "./crypto";
|
||||
|
||||
vi.mock("./constants", () => ({ ENCRYPTION_KEY: "0".repeat(32) }));
|
||||
|
||||
vi.mock("@formbricks/logger", () => ({
|
||||
logger: {
|
||||
warn: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const key = "0".repeat(32);
|
||||
const plain = "hello";
|
||||
|
||||
describe("crypto", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test("encrypt + decrypt roundtrip", () => {
|
||||
const cipher = symmetricEncrypt(plain, key);
|
||||
expect(symmetricDecrypt(cipher, key)).toBe(plain);
|
||||
@@ -38,4 +49,84 @@ describe("crypto", () => {
|
||||
expect(typeof h).toBe("string");
|
||||
expect(h.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("logs warning and throws when GCM decryption fails with invalid auth tag", () => {
|
||||
// Create a valid GCM payload but corrupt the auth tag
|
||||
const iv = randomBytes(16);
|
||||
const bufKey = Buffer.from(key, "utf8");
|
||||
const cipher = createCipheriv("aes-256-gcm", bufKey, iv);
|
||||
let enc = cipher.update(plain, "utf8", "hex");
|
||||
enc += cipher.final("hex");
|
||||
const validTag = cipher.getAuthTag().toString("hex");
|
||||
|
||||
// Corrupt the auth tag by flipping some bits
|
||||
const corruptedTag = validTag
|
||||
.split("")
|
||||
.map((c, i) => (i < 4 ? (parseInt(c, 16) ^ 0xf).toString(16) : c))
|
||||
.join("");
|
||||
|
||||
const corruptedPayload = `${iv.toString("hex")}:${enc}:${corruptedTag}`;
|
||||
|
||||
// Should throw an error and log a warning
|
||||
expect(() => symmetricDecrypt(corruptedPayload, key)).toThrow();
|
||||
|
||||
// Verify logger.warn was called with the correct format (object first, message second)
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
{ err: expect.any(Error) },
|
||||
"AES-GCM decryption failed; refusing to fall back to insecure CBC"
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("logs warning and throws when GCM decryption fails with corrupted encrypted data", () => {
|
||||
// Create a payload with valid structure but corrupted encrypted data
|
||||
const iv = randomBytes(16);
|
||||
const bufKey = Buffer.from(key, "utf8");
|
||||
const cipher = createCipheriv("aes-256-gcm", bufKey, iv);
|
||||
let enc = cipher.update(plain, "utf8", "hex");
|
||||
enc += cipher.final("hex");
|
||||
const tag = cipher.getAuthTag().toString("hex");
|
||||
|
||||
// Corrupt the encrypted data
|
||||
const corruptedEnc = enc
|
||||
.split("")
|
||||
.map((c, i) => (i < 4 ? (parseInt(c, 16) ^ 0xa).toString(16) : c))
|
||||
.join("");
|
||||
|
||||
const corruptedPayload = `${iv.toString("hex")}:${corruptedEnc}:${tag}`;
|
||||
|
||||
// Should throw an error and log a warning
|
||||
expect(() => symmetricDecrypt(corruptedPayload, key)).toThrow();
|
||||
|
||||
// Verify logger.warn was called
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
{ err: expect.any(Error) },
|
||||
"AES-GCM decryption failed; refusing to fall back to insecure CBC"
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("logs warning and throws when GCM decryption fails with wrong key", () => {
|
||||
// Create a valid GCM payload with one key
|
||||
const iv = randomBytes(16);
|
||||
const bufKey = Buffer.from(key, "utf8");
|
||||
const cipher = createCipheriv("aes-256-gcm", bufKey, iv);
|
||||
let enc = cipher.update(plain, "utf8", "hex");
|
||||
enc += cipher.final("hex");
|
||||
const tag = cipher.getAuthTag().toString("hex");
|
||||
const payload = `${iv.toString("hex")}:${enc}:${tag}`;
|
||||
|
||||
// Try to decrypt with a different key
|
||||
const wrongKey = "1".repeat(32);
|
||||
|
||||
// Should throw an error and log a warning
|
||||
expect(() => symmetricDecrypt(payload, wrongKey)).toThrow();
|
||||
|
||||
// Verify logger.warn was called
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
{ err: expect.any(Error) },
|
||||
"AES-GCM decryption failed; refusing to fall back to insecure CBC"
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -85,7 +85,7 @@ export function symmetricDecrypt(payload: string, key: string): string {
|
||||
try {
|
||||
return symmetricDecryptV2(payload, key);
|
||||
} catch (err) {
|
||||
logger.warn(err, "AES-GCM decryption failed; refusing to fall back to insecure CBC");
|
||||
logger.warn({ err }, "AES-GCM decryption failed; refusing to fall back to insecure CBC");
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { logSignOut } from "@/modules/auth/lib/utils";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { logSignOut } from "@/modules/auth/lib/utils";
|
||||
import { logSignOutAction } from "./sign-out";
|
||||
|
||||
// Mock the dependencies
|
||||
@@ -80,6 +80,7 @@ describe("logSignOutAction", () => {
|
||||
"email_change",
|
||||
"session_timeout",
|
||||
"forced_logout",
|
||||
"password_reset",
|
||||
] as const;
|
||||
|
||||
for (const reason of reasons) {
|
||||
@@ -100,11 +101,14 @@ describe("logSignOutAction", () => {
|
||||
|
||||
await expect(() => logSignOutAction(mockUserId, mockUserEmail, mockContext)).rejects.toThrow(mockError);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith("Failed to log sign out event", {
|
||||
userId: mockUserId,
|
||||
context: mockContext,
|
||||
error: mockError.message,
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: mockUserId,
|
||||
context: mockContext,
|
||||
error: mockError.message,
|
||||
},
|
||||
"Failed to log sign out event"
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -116,11 +120,14 @@ describe("logSignOutAction", () => {
|
||||
|
||||
await expect(() => logSignOutAction(mockUserId, mockUserEmail, mockContext)).rejects.toThrow(mockError);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith("Failed to log sign out event", {
|
||||
userId: mockUserId,
|
||||
context: mockContext,
|
||||
error: mockError,
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: mockUserId,
|
||||
context: mockContext,
|
||||
error: mockError,
|
||||
},
|
||||
"Failed to log sign out event"
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -133,11 +140,14 @@ describe("logSignOutAction", () => {
|
||||
|
||||
await expect(() => logSignOutAction(mockUserId, mockUserEmail, emptyContext)).rejects.toThrow(mockError);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith("Failed to log sign out event", {
|
||||
userId: mockUserId,
|
||||
context: emptyContext,
|
||||
error: mockError.message,
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: mockUserId,
|
||||
context: emptyContext,
|
||||
error: mockError.message,
|
||||
},
|
||||
"Failed to log sign out event"
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use server";
|
||||
|
||||
import { logSignOut } from "@/modules/auth/lib/utils";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { logSignOut } from "@/modules/auth/lib/utils";
|
||||
|
||||
/**
|
||||
* Logs a sign out event
|
||||
@@ -27,11 +27,14 @@ export const logSignOutAction = async (
|
||||
try {
|
||||
logSignOut(userId, userEmail, context);
|
||||
} catch (error) {
|
||||
logger.error("Failed to log sign out event", {
|
||||
userId,
|
||||
context,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
userId,
|
||||
context,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
},
|
||||
"Failed to log sign out event"
|
||||
);
|
||||
// Re-throw to ensure callers are aware of the failure
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { queueAuditEventBackground } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { queueAuditEventBackground } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
|
||||
import {
|
||||
createAuditIdentifier,
|
||||
hashPassword,
|
||||
@@ -43,16 +43,26 @@ vi.mock("@/lib/constants", () => ({
|
||||
}));
|
||||
|
||||
// Mock cache module
|
||||
const { mockCache } = vi.hoisted(() => ({
|
||||
const { mockCache, mockLogger } = vi.hoisted(() => ({
|
||||
mockCache: {
|
||||
getRedisClient: vi.fn(),
|
||||
},
|
||||
mockLogger: {
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/cache", () => ({
|
||||
cache: mockCache,
|
||||
}));
|
||||
|
||||
vi.mock("@formbricks/logger", () => ({
|
||||
logger: mockLogger,
|
||||
}));
|
||||
|
||||
// Mock @formbricks/cache
|
||||
vi.mock("@formbricks/cache", () => ({
|
||||
createCacheKey: {
|
||||
@@ -125,6 +135,38 @@ describe("Auth Utils", () => {
|
||||
expect(await verifyPassword(complexPassword, hashedComplex)).toBe(true);
|
||||
expect(await verifyPassword("wrong", hashedComplex)).toBe(false);
|
||||
});
|
||||
|
||||
test("should handle bcrypt errors gracefully and log warning", async () => {
|
||||
// Save the original bcryptjs implementation
|
||||
const originalModule = await import("bcryptjs");
|
||||
|
||||
// Mock bcryptjs to throw an error on compare
|
||||
vi.doMock("bcryptjs", () => ({
|
||||
...originalModule,
|
||||
compare: vi.fn().mockRejectedValue(new Error("Invalid salt version")),
|
||||
hash: originalModule.hash, // Keep hash working
|
||||
}));
|
||||
|
||||
// Re-import the utils module to use the mocked bcryptjs
|
||||
const { verifyPassword: verifyPasswordMocked } = await import("./utils?t=" + Date.now());
|
||||
|
||||
const password = "testPassword";
|
||||
const invalidHash = "invalid-hash-format";
|
||||
|
||||
const result = await verifyPasswordMocked(password, invalidHash);
|
||||
|
||||
// Should return false for security
|
||||
expect(result).toBe(false);
|
||||
|
||||
// Should log warning
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{ error: expect.any(Error) },
|
||||
"Password verification failed due to invalid hash format"
|
||||
);
|
||||
|
||||
// Restore the module
|
||||
vi.doUnmock("bcryptjs");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Audit Identifier Utils", () => {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { cache } from "@/lib/cache";
|
||||
import { IS_PRODUCTION, SENTRY_DSN } from "@/lib/constants";
|
||||
import { queueAuditEventBackground } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { TAuditAction, TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { compare, hash } from "bcryptjs";
|
||||
import { createHash, randomUUID } from "crypto";
|
||||
import { createCacheKey } from "@formbricks/cache";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { cache } from "@/lib/cache";
|
||||
import { IS_PRODUCTION, SENTRY_DSN } from "@/lib/constants";
|
||||
import { queueAuditEventBackground } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { TAuditAction, TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
|
||||
|
||||
export const hashPassword = async (password: string) => {
|
||||
const hashedPassword = await hash(password, 12);
|
||||
@@ -19,7 +19,7 @@ export const verifyPassword = async (password: string, hashedPassword: string) =
|
||||
return isValid;
|
||||
} catch (error) {
|
||||
// Log warning for debugging purposes, but don't throw to maintain security
|
||||
logger.warn("Password verification failed due to invalid hash format", { error });
|
||||
logger.warn({ error }, "Password verification failed due to invalid hash format");
|
||||
// Return false for invalid hashes or other bcrypt errors
|
||||
return false;
|
||||
}
|
||||
@@ -279,7 +279,7 @@ export const shouldLogAuthFailure = async (
|
||||
|
||||
return currentCount % 10 === 0 || timeSinceLastLog > 60000;
|
||||
} catch (error) {
|
||||
logger.warn("Redis rate limiting failed, not logging due to Redis requirement", { error });
|
||||
logger.warn({ error }, "Redis rate limiting failed, not logging due to Redis requirement");
|
||||
// If Redis fails, do not log as Redis is required for audit logs
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
// Import modules after mocking
|
||||
import { getClientIpFromHeaders } from "@/lib/utils/client-ip";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { err, ok } from "@formbricks/types/error-handlers";
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
// Import modules after mocking
|
||||
import { getClientIpFromHeaders } from "@/lib/utils/client-ip";
|
||||
import { applyIPRateLimit, applyRateLimit, getClientIdentifier } from "./helpers";
|
||||
import { checkRateLimit } from "./rate-limit";
|
||||
|
||||
@@ -67,8 +67,8 @@ describe("helpers", () => {
|
||||
|
||||
await expect(getClientIdentifier()).rejects.toThrow("Failed to hash IP");
|
||||
|
||||
// Verify that the error was logged with proper context
|
||||
expect(logger.error).toHaveBeenCalledWith("Failed to hash IP", { error: originalError });
|
||||
// Verify that the error was logged with proper context (pino 10 format: object first, message second)
|
||||
expect(logger.error).toHaveBeenCalledWith({ error: originalError }, "Failed to hash IP");
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
import { getClientIpFromHeaders } from "@/lib/utils/client-ip";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { TooManyRequestsError } from "@formbricks/types/errors";
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
import { getClientIpFromHeaders } from "@/lib/utils/client-ip";
|
||||
import { checkRateLimit } from "./rate-limit";
|
||||
import { type TRateLimitConfig } from "./types/rate-limit";
|
||||
|
||||
@@ -19,7 +19,7 @@ export const getClientIdentifier = async (): Promise<string> => {
|
||||
return hashString(ip);
|
||||
} catch (error) {
|
||||
const errorMessage = "Failed to hash IP";
|
||||
logger.error(errorMessage, { error });
|
||||
logger.error({ error }, errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import type { Mock } from "vitest";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import {
|
||||
TEnterpriseLicenseDetails,
|
||||
TEnterpriseLicenseFeatures,
|
||||
} from "@/modules/ee/license-check/types/enterprise-license";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import type { Mock } from "vitest";
|
||||
import { prisma } from "@formbricks/database";
|
||||
|
||||
// Mock declarations must be at the top level
|
||||
vi.mock("@/lib/env", () => ({
|
||||
@@ -59,6 +59,17 @@ vi.mock("@formbricks/database", () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const mockLogger = {
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock("@formbricks/logger", () => ({
|
||||
logger: mockLogger,
|
||||
}));
|
||||
|
||||
// Mock constants as they are used in the original license.ts indirectly
|
||||
vi.mock("@/lib/constants", async (importOriginal) => {
|
||||
const actual = await importOriginal();
|
||||
@@ -80,6 +91,10 @@ describe("License Core Logic", () => {
|
||||
mockCache.set.mockReset();
|
||||
mockCache.del.mockReset();
|
||||
mockCache.withCache.mockReset();
|
||||
mockLogger.error.mockReset();
|
||||
mockLogger.warn.mockReset();
|
||||
mockLogger.info.mockReset();
|
||||
mockLogger.debug.mockReset();
|
||||
|
||||
// Set up default mock implementations for Result types
|
||||
mockCache.get.mockResolvedValue({ ok: true, data: null });
|
||||
@@ -527,4 +542,136 @@ describe("License Core Logic", () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error and Warning Logging", () => {
|
||||
test("should log warning when setPreviousResult cache.set fails (line 176-178)", async () => {
|
||||
const { getEnterpriseLicense } = await import("./license");
|
||||
const fetch = (await import("node-fetch")).default as Mock;
|
||||
|
||||
const mockFetchedLicenseDetails: TEnterpriseLicenseDetails = {
|
||||
status: "active",
|
||||
features: {
|
||||
isMultiOrgEnabled: true,
|
||||
contacts: true,
|
||||
projects: 10,
|
||||
whitelabel: true,
|
||||
removeBranding: true,
|
||||
twoFactorAuth: true,
|
||||
sso: true,
|
||||
saml: true,
|
||||
spamProtection: true,
|
||||
ai: false,
|
||||
auditLogs: true,
|
||||
multiLanguageSurveys: true,
|
||||
accessControl: true,
|
||||
quotas: true,
|
||||
},
|
||||
};
|
||||
|
||||
// Mock successful fetch from API
|
||||
mockCache.withCache.mockResolvedValue(mockFetchedLicenseDetails);
|
||||
|
||||
// Mock cache.set to fail when saving previous result
|
||||
mockCache.set.mockResolvedValue({
|
||||
ok: false,
|
||||
error: new Error("Redis connection failed"),
|
||||
});
|
||||
|
||||
await getEnterpriseLicense();
|
||||
|
||||
// Verify that the warning was logged
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{ error: new Error("Redis connection failed") },
|
||||
"Failed to cache previous result"
|
||||
);
|
||||
});
|
||||
|
||||
test("should log error when trackApiError is called (line 196-203)", async () => {
|
||||
const { getEnterpriseLicense } = await import("./license");
|
||||
const fetch = (await import("node-fetch")).default as Mock;
|
||||
|
||||
// Mock cache.withCache to execute the function (simulating cache miss)
|
||||
mockCache.withCache.mockImplementation(async (fn) => await fn());
|
||||
|
||||
// Mock API response with 500 status
|
||||
const mockStatus = 500;
|
||||
fetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: mockStatus,
|
||||
json: async () => ({ error: "Internal Server Error" }),
|
||||
} as any);
|
||||
|
||||
await getEnterpriseLicense();
|
||||
|
||||
// Verify that the API error was logged with correct structure
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
status: mockStatus,
|
||||
code: "API_ERROR",
|
||||
timestamp: expect.any(String),
|
||||
}),
|
||||
expect.stringContaining("License API error:")
|
||||
);
|
||||
});
|
||||
|
||||
test("should log error when trackApiError is called with different status codes (line 196-203)", async () => {
|
||||
const { getEnterpriseLicense } = await import("./license");
|
||||
const fetch = (await import("node-fetch")).default as Mock;
|
||||
|
||||
// Test with 403 Forbidden
|
||||
mockCache.withCache.mockImplementation(async (fn) => await fn());
|
||||
const mockStatus = 403;
|
||||
fetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: mockStatus,
|
||||
json: async () => ({ error: "Forbidden" }),
|
||||
} as any);
|
||||
|
||||
await getEnterpriseLicense();
|
||||
|
||||
// Verify that the API error was logged with correct structure
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
status: mockStatus,
|
||||
code: "API_ERROR",
|
||||
timestamp: expect.any(String),
|
||||
}),
|
||||
expect.stringContaining("License API error:")
|
||||
);
|
||||
});
|
||||
|
||||
test("should log info when trackFallbackUsage is called during grace period", async () => {
|
||||
const { getEnterpriseLicense } = await import("./license");
|
||||
const fetch = (await import("node-fetch")).default as Mock;
|
||||
|
||||
const previousTime = new Date(Date.now() - 1 * 24 * 60 * 60 * 1000); // 1 day ago
|
||||
const mockPreviousResult = {
|
||||
active: true,
|
||||
features: { removeBranding: true, projects: 5 },
|
||||
lastChecked: previousTime,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
mockCache.withCache.mockResolvedValue(null);
|
||||
mockCache.get.mockImplementation(async (key) => {
|
||||
if (key.includes(":previous_result")) {
|
||||
return { ok: true, data: mockPreviousResult };
|
||||
}
|
||||
return { ok: true, data: null };
|
||||
});
|
||||
|
||||
fetch.mockResolvedValueOnce({ ok: false, status: 500 } as any);
|
||||
|
||||
await getEnterpriseLicense();
|
||||
|
||||
// Verify that the fallback info was logged
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
fallbackLevel: "grace",
|
||||
timestamp: expect.any(String),
|
||||
}),
|
||||
expect.stringContaining("Using license fallback level: grace")
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import "server-only";
|
||||
import { cache } from "@/lib/cache";
|
||||
import { env } from "@/lib/env";
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
import {
|
||||
TEnterpriseLicenseDetails,
|
||||
TEnterpriseLicenseFeatures,
|
||||
} from "@/modules/ee/license-check/types/enterprise-license";
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
import fetch from "node-fetch";
|
||||
import { cache as reactCache } from "react";
|
||||
@@ -13,6 +6,13 @@ import { z } from "zod";
|
||||
import { createCacheKey } from "@formbricks/cache";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { cache } from "@/lib/cache";
|
||||
import { env } from "@/lib/env";
|
||||
import { hashString } from "@/lib/hash-string";
|
||||
import {
|
||||
TEnterpriseLicenseDetails,
|
||||
TEnterpriseLicenseFeatures,
|
||||
} from "@/modules/ee/license-check/types/enterprise-license";
|
||||
|
||||
// Configuration
|
||||
const CONFIG = {
|
||||
@@ -154,7 +154,7 @@ const getPreviousResult = async (): Promise<TPreviousResult> => {
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Failed to get previous result from cache", { error });
|
||||
logger.error({ error }, "Failed to get previous result from cache");
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -174,27 +174,33 @@ const setPreviousResult = async (previousResult: TPreviousResult) => {
|
||||
CONFIG.CACHE.PREVIOUS_RESULT_TTL_MS
|
||||
);
|
||||
if (!result.ok) {
|
||||
logger.warn("Failed to cache previous result", { error: result.error });
|
||||
logger.warn({ error: result.error }, "Failed to cache previous result");
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Failed to set previous result in cache", { error });
|
||||
logger.error({ error }, "Failed to set previous result in cache");
|
||||
}
|
||||
};
|
||||
|
||||
// Monitoring functions
|
||||
const trackFallbackUsage = (level: FallbackLevel) => {
|
||||
logger.info(`Using license fallback level: ${level}`, {
|
||||
fallbackLevel: level,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
logger.info(
|
||||
{
|
||||
fallbackLevel: level,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
`Using license fallback level: ${level}`
|
||||
);
|
||||
};
|
||||
|
||||
const trackApiError = (error: LicenseApiError) => {
|
||||
logger.error(`License API error: ${error.message}`, {
|
||||
status: error.status,
|
||||
code: error.code,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
logger.error(
|
||||
{
|
||||
status: error.status,
|
||||
code: error.code,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
`License API error: ${error.message}`
|
||||
);
|
||||
};
|
||||
|
||||
// Validation functions
|
||||
|
||||
4
packages/cache/src/service.test.ts
vendored
4
packages/cache/src/service.test.ts
vendored
@@ -81,14 +81,14 @@ describe("CacheService", () => {
|
||||
expect(result.error.code).toBe(ErrorCode.CacheCorruptionError);
|
||||
}
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
"Corrupted cache data detected, treating as cache miss",
|
||||
expect.objectContaining({
|
||||
key,
|
||||
parseError: expect.objectContaining({
|
||||
name: "SyntaxError",
|
||||
message: expect.stringContaining("JSON") as string,
|
||||
}) as Error,
|
||||
})
|
||||
}),
|
||||
"Corrupted cache data detected, treating as cache miss"
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
11
packages/cache/src/service.ts
vendored
11
packages/cache/src/service.ts
vendored
@@ -67,10 +67,13 @@ export class CacheService {
|
||||
return ok(JSON.parse(value) as T);
|
||||
} catch (parseError) {
|
||||
// JSON parse failure indicates corrupted cache data - treat as cache miss
|
||||
logger.warn("Corrupted cache data detected, treating as cache miss", {
|
||||
key,
|
||||
parseError,
|
||||
});
|
||||
logger.warn(
|
||||
{
|
||||
key,
|
||||
parseError,
|
||||
},
|
||||
"Corrupted cache data detected, treating as cache miss"
|
||||
);
|
||||
return err({
|
||||
code: ErrorCode.CacheCorruptionError,
|
||||
});
|
||||
|
||||
@@ -36,8 +36,8 @@
|
||||
"author": "Formbricks <hola@formbricks.com>",
|
||||
"dependencies": {
|
||||
"zod": "3.24.4",
|
||||
"pino": "9.6.0",
|
||||
"pino-pretty": "13.0.0"
|
||||
"pino": "10.0.0",
|
||||
"pino-pretty": "13.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite": "6.3.6",
|
||||
|
||||
57
pnpm-lock.yaml
generated
57
pnpm-lock.yaml
generated
@@ -731,11 +731,11 @@ importers:
|
||||
packages/logger:
|
||||
dependencies:
|
||||
pino:
|
||||
specifier: 9.6.0
|
||||
version: 9.6.0
|
||||
specifier: 10.0.0
|
||||
version: 10.0.0
|
||||
pino-pretty:
|
||||
specifier: 13.0.0
|
||||
version: 13.0.0
|
||||
specifier: 13.1.1
|
||||
version: 13.1.1
|
||||
zod:
|
||||
specifier: 3.24.4
|
||||
version: 3.24.4
|
||||
@@ -6446,10 +6446,6 @@ packages:
|
||||
fast-levenshtein@2.0.6:
|
||||
resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==}
|
||||
|
||||
fast-redact@3.5.0:
|
||||
resolution: {integrity: sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
fast-safe-stringify@2.1.1:
|
||||
resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==}
|
||||
|
||||
@@ -8160,15 +8156,15 @@ packages:
|
||||
pino-abstract-transport@2.0.0:
|
||||
resolution: {integrity: sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==}
|
||||
|
||||
pino-pretty@13.0.0:
|
||||
resolution: {integrity: sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA==}
|
||||
pino-pretty@13.1.1:
|
||||
resolution: {integrity: sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==}
|
||||
hasBin: true
|
||||
|
||||
pino-std-serializers@7.0.0:
|
||||
resolution: {integrity: sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==}
|
||||
|
||||
pino@9.6.0:
|
||||
resolution: {integrity: sha512-i85pKRCt4qMjZ1+L7sy2Ag4t1atFcdbEt76+7iRJn1g2BvsnRMGu9p8pivl9fs63M2kF/A0OacFZhTub+m/qMg==}
|
||||
pino@10.0.0:
|
||||
resolution: {integrity: sha512-eI9pKwWEix40kfvSzqEP6ldqOoBIN7dwD/o91TY5z8vQI12sAffpR/pOqAD1IVVwIVHDpHjkq0joBPdJD0rafA==}
|
||||
hasBin: true
|
||||
|
||||
pirates@4.0.7:
|
||||
@@ -8418,8 +8414,8 @@ packages:
|
||||
process-nextick-args@2.0.1:
|
||||
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
|
||||
|
||||
process-warning@4.0.1:
|
||||
resolution: {integrity: sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==}
|
||||
process-warning@5.0.0:
|
||||
resolution: {integrity: sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==}
|
||||
|
||||
process@0.11.10:
|
||||
resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==}
|
||||
@@ -8874,8 +8870,8 @@ packages:
|
||||
resolution: {integrity: sha512-9BakfsO2aUQN2K9Fdbj87RJIEZ82Q9IGim7FqM5OsebfoFC6ZHXgDq/KvniuLTPdeM8wY2o6Dj3WQ7KeQCj3cA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
secure-json-parse@2.7.0:
|
||||
resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==}
|
||||
secure-json-parse@4.1.0:
|
||||
resolution: {integrity: sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==}
|
||||
|
||||
selderee@0.11.0:
|
||||
resolution: {integrity: sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA==}
|
||||
@@ -9004,6 +9000,9 @@ packages:
|
||||
resolution: {integrity: sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
slow-redact@0.3.1:
|
||||
resolution: {integrity: sha512-NvFvl1GuLZNW4U046Tfi8b26zXo8aBzgCAS2f7yVJR/fArN93mOqSA99cB9uITm92ajSz01bsu1K7SCVVjIMpQ==}
|
||||
|
||||
smart-buffer@4.2.0:
|
||||
resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==}
|
||||
engines: {node: '>= 6.0.0', npm: '>= 3.0.0'}
|
||||
@@ -9207,6 +9206,10 @@ packages:
|
||||
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
strip-json-comments@5.0.3:
|
||||
resolution: {integrity: sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
stripe@16.12.0:
|
||||
resolution: {integrity: sha512-H7eFVLDxeTNNSn4JTRfL2//LzCbDrMSZ+2q1c7CanVWgK2qIW5TwS+0V7N9KcKZZNpYh/uCqK0PyZh/2UsaAtQ==}
|
||||
engines: {node: '>=12.*'}
|
||||
@@ -17290,8 +17293,6 @@ snapshots:
|
||||
|
||||
fast-levenshtein@2.0.6: {}
|
||||
|
||||
fast-redact@3.5.0: {}
|
||||
|
||||
fast-safe-stringify@2.1.1: {}
|
||||
|
||||
fast-shallow-equal@1.0.0: {}
|
||||
@@ -19074,7 +19075,7 @@ snapshots:
|
||||
dependencies:
|
||||
split2: 4.2.0
|
||||
|
||||
pino-pretty@13.0.0:
|
||||
pino-pretty@13.1.1:
|
||||
dependencies:
|
||||
colorette: 2.0.20
|
||||
dateformat: 4.6.3
|
||||
@@ -19086,23 +19087,23 @@ snapshots:
|
||||
on-exit-leak-free: 2.1.2
|
||||
pino-abstract-transport: 2.0.0
|
||||
pump: 3.0.3
|
||||
secure-json-parse: 2.7.0
|
||||
secure-json-parse: 4.1.0
|
||||
sonic-boom: 4.2.0
|
||||
strip-json-comments: 3.1.1
|
||||
strip-json-comments: 5.0.3
|
||||
|
||||
pino-std-serializers@7.0.0: {}
|
||||
|
||||
pino@9.6.0:
|
||||
pino@10.0.0:
|
||||
dependencies:
|
||||
atomic-sleep: 1.0.0
|
||||
fast-redact: 3.5.0
|
||||
on-exit-leak-free: 2.1.2
|
||||
pino-abstract-transport: 2.0.0
|
||||
pino-std-serializers: 7.0.0
|
||||
process-warning: 4.0.1
|
||||
process-warning: 5.0.0
|
||||
quick-format-unescaped: 4.0.4
|
||||
real-require: 0.2.0
|
||||
safe-stable-stringify: 2.5.0
|
||||
slow-redact: 0.3.1
|
||||
sonic-boom: 4.2.0
|
||||
thread-stream: 3.1.0
|
||||
|
||||
@@ -19288,7 +19289,7 @@ snapshots:
|
||||
|
||||
process-nextick-args@2.0.1: {}
|
||||
|
||||
process-warning@4.0.1: {}
|
||||
process-warning@5.0.0: {}
|
||||
|
||||
process@0.11.10: {}
|
||||
|
||||
@@ -19830,7 +19831,7 @@ snapshots:
|
||||
|
||||
screenfull@5.2.0: {}
|
||||
|
||||
secure-json-parse@2.7.0: {}
|
||||
secure-json-parse@4.1.0: {}
|
||||
|
||||
selderee@0.11.0:
|
||||
dependencies:
|
||||
@@ -20016,6 +20017,8 @@ snapshots:
|
||||
ansi-styles: 6.2.3
|
||||
is-fullwidth-code-point: 5.1.0
|
||||
|
||||
slow-redact@0.3.1: {}
|
||||
|
||||
smart-buffer@4.2.0:
|
||||
optional: true
|
||||
|
||||
@@ -20265,6 +20268,8 @@ snapshots:
|
||||
|
||||
strip-json-comments@3.1.1: {}
|
||||
|
||||
strip-json-comments@5.0.3: {}
|
||||
|
||||
stripe@16.12.0:
|
||||
dependencies:
|
||||
'@types/node': 22.15.18
|
||||
|
||||
Reference in New Issue
Block a user