chore: removed integrity hash chain from audit logging (#6202)

This commit is contained in:
Dhruwang Jariwala
2025-07-10 16:13:57 +05:30
committed by GitHub
parent 4e52556f7e
commit 599e847686
12 changed files with 23 additions and 406 deletions

View File

@@ -219,7 +219,7 @@ UNKEY_ROOT_KEY=
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
# SESSION_MAX_AGE=86400
# Audit logs options. Requires REDIS_URL env varibale. Default 0.
# Audit logs options. Default 0.
# AUDIT_LOG_ENABLED=0
# If the ip should be added in the log or not. Default 0
# AUDIT_LOG_GET_USER_IP=0

View File

@@ -297,11 +297,6 @@ export const PROMETHEUS_ENABLED = env.PROMETHEUS_ENABLED === "1";
export const USER_MANAGEMENT_MINIMUM_ROLE = env.USER_MANAGEMENT_MINIMUM_ROLE ?? "manager";
export const AUDIT_LOG_ENABLED =
env.AUDIT_LOG_ENABLED === "1" &&
env.REDIS_URL &&
env.REDIS_URL !== "" &&
env.ENCRYPTION_KEY &&
env.ENCRYPTION_KEY !== ""; // The audit log requires Redis to be configured
export const AUDIT_LOG_ENABLED = env.AUDIT_LOG_ENABLED === "1";
export const AUDIT_LOG_GET_USER_IP = env.AUDIT_LOG_GET_USER_IP === "1";
export const SESSION_MAX_AGE = Number(env.SESSION_MAX_AGE) || 86400;

View File

@@ -1,113 +0,0 @@
import redis from "@/modules/cache/redis";
import { afterAll, beforeEach, describe, expect, test, vi } from "vitest";
import {
AUDIT_LOG_HASH_KEY,
getPreviousAuditLogHash,
runAuditLogHashTransaction,
setPreviousAuditLogHash,
} from "./cache";
// Mock redis module
vi.mock("@/modules/cache/redis", () => {
let store: Record<string, string | null> = {};
return {
default: {
del: vi.fn(async (key: string) => {
store[key] = null;
return 1;
}),
quit: vi.fn(async () => {
return "OK";
}),
get: vi.fn(async (key: string) => {
return store[key] ?? null;
}),
set: vi.fn(async (key: string, value: string) => {
store[key] = value;
return "OK";
}),
watch: vi.fn(async (_key: string) => {
return "OK";
}),
unwatch: vi.fn(async () => {
return "OK";
}),
multi: vi.fn(() => {
return {
set: vi.fn(function (key: string, value: string) {
store[key] = value;
return this;
}),
exec: vi.fn(async () => {
return [[null, "OK"]];
}),
} as unknown as import("ioredis").ChainableCommander;
}),
},
};
});
describe("audit log cache utils", () => {
beforeEach(async () => {
await redis?.del(AUDIT_LOG_HASH_KEY);
});
afterAll(async () => {
await redis?.quit();
});
test("should get and set the previous audit log hash", async () => {
expect(await getPreviousAuditLogHash()).toBeNull();
await setPreviousAuditLogHash("testhash");
expect(await getPreviousAuditLogHash()).toBe("testhash");
});
test("should run a successful audit log hash transaction", async () => {
let logCalled = false;
await runAuditLogHashTransaction(async (previousHash) => {
expect(previousHash).toBeNull();
return {
auditEvent: async () => {
logCalled = true;
},
integrityHash: "hash1",
};
});
expect(await getPreviousAuditLogHash()).toBe("hash1");
expect(logCalled).toBe(true);
});
test("should retry and eventually throw if the hash keeps changing", async () => {
// Simulate another process changing the hash every time
let callCount = 0;
const originalMulti = redis?.multi;
(redis?.multi as any).mockImplementation(() => {
return {
set: vi.fn(function () {
return this;
}),
exec: vi.fn(async () => {
callCount++;
return null; // Simulate transaction failure
}),
} as unknown as import("ioredis").ChainableCommander;
});
let errorCaught = false;
try {
await runAuditLogHashTransaction(async () => {
return {
auditEvent: async () => {},
integrityHash: "conflict-hash",
};
});
throw new Error("Error was not thrown by runAuditLogHashTransaction");
} catch (e) {
errorCaught = true;
expect((e as Error).message).toContain("Failed to update audit log hash after multiple retries");
}
expect(errorCaught).toBe(true);
expect(callCount).toBe(5);
// Restore
(redis?.multi as any).mockImplementation(originalMulti);
});
});

View File

@@ -1,67 +0,0 @@
import redis from "@/modules/cache/redis";
import { logger } from "@formbricks/logger";
export const AUDIT_LOG_HASH_KEY = "audit:lastHash";
export async function getPreviousAuditLogHash(): Promise<string | null> {
if (!redis) {
logger.error("Redis is not initialized");
return null;
}
return (await redis.get(AUDIT_LOG_HASH_KEY)) ?? null;
}
export async function setPreviousAuditLogHash(hash: string): Promise<void> {
if (!redis) {
logger.error("Redis is not initialized");
return;
}
await redis.set(AUDIT_LOG_HASH_KEY, hash);
}
/**
* Runs a concurrency-safe Redis transaction for the audit log hash chain.
* The callback receives the previous hash and should return the audit event to log.
* Handles retries and atomicity.
*/
export async function runAuditLogHashTransaction(
buildAndLogEvent: (previousHash: string | null) => Promise<{ auditEvent: any; integrityHash: string }>
): Promise<void> {
let retry = 0;
while (retry < 5) {
if (!redis) {
logger.error("Redis is not initialized");
throw new Error("Redis is not initialized");
}
let result;
let auditEvent;
try {
await redis.watch(AUDIT_LOG_HASH_KEY);
const previousHash = await getPreviousAuditLogHash();
const buildResult = await buildAndLogEvent(previousHash);
auditEvent = buildResult.auditEvent;
const integrityHash = buildResult.integrityHash;
const tx = redis.multi();
tx.set(AUDIT_LOG_HASH_KEY, integrityHash);
result = await tx.exec();
} finally {
await redis.unwatch();
}
if (result) {
// Success: now log the audit event
await auditEvent();
return;
}
// Retry if the hash was changed by another process
retry++;
}
// Debug log for test diagnostics
// eslint-disable-next-line no-console
console.error("runAuditLogHashTransaction: throwing after 5 retries");
throw new Error("Failed to update audit log hash after multiple retries (concurrency issue)");
}

View File

@@ -5,8 +5,6 @@ import * as OriginalHandler from "./handler";
// Use 'var' for all mock handles used in vi.mock factories to avoid hoisting/TDZ issues
var serviceLogAuditEventMockHandle: ReturnType<typeof vi.fn>; // NOSONAR / test code
var cacheRunAuditLogHashTransactionMockHandle: ReturnType<typeof vi.fn>; // NOSONAR / test code
var utilsComputeAuditLogHashMockHandle: ReturnType<typeof vi.fn>; // NOSONAR / test code
var loggerErrorMockHandle: ReturnType<typeof vi.fn>; // NOSONAR / test code
// Use 'var' for mutableConstants due to hoisting issues with vi.mock factories
@@ -23,7 +21,6 @@ vi.mock("@/lib/constants", () => ({
return mutableConstants ? mutableConstants.AUDIT_LOG_ENABLED : true; // Default to true if somehow undefined
},
AUDIT_LOG_GET_USER_IP: true,
ENCRYPTION_KEY: "testsecret",
}));
vi.mock("@/lib/utils/client-ip", () => ({
getClientIpFromHeaders: vi.fn().mockResolvedValue("127.0.0.1"),
@@ -35,19 +32,10 @@ vi.mock("@/modules/ee/audit-logs/lib/service", () => {
return { logAuditEvent: mock };
});
vi.mock("./cache", () => {
const mock = vi.fn((fn) => fn(null).then((res: any) => res.auditEvent())); // Keep original mock logic
cacheRunAuditLogHashTransactionMockHandle = mock;
return { runAuditLogHashTransaction: mock };
});
vi.mock("./utils", async () => {
const actualUtils = await vi.importActual("./utils");
const mock = vi.fn();
utilsComputeAuditLogHashMockHandle = mock;
return {
...(actualUtils as object),
computeAuditLogHash: mock, // This is the one we primarily care about controlling
redactPII: vi.fn((obj) => obj), // Keep others as simple mocks or actuals if needed
deepDiff: vi.fn((a, b) => ({ diff: true })),
};
@@ -139,12 +127,6 @@ const mockCtxBase = {
// Helper to clear all mock handles
function clearAllMockHandles() {
if (serviceLogAuditEventMockHandle) serviceLogAuditEventMockHandle.mockClear().mockResolvedValue(undefined);
if (cacheRunAuditLogHashTransactionMockHandle)
cacheRunAuditLogHashTransactionMockHandle
.mockClear()
.mockImplementation((fn) => fn(null).then((res: any) => res.auditEvent()));
if (utilsComputeAuditLogHashMockHandle)
utilsComputeAuditLogHashMockHandle.mockClear().mockReturnValue("testhash");
if (loggerErrorMockHandle) loggerErrorMockHandle.mockClear();
if (mutableConstants) {
// Check because it's a var and could be re-assigned (though not in this code)
@@ -164,25 +146,23 @@ describe("queueAuditEvent", () => {
await OriginalHandler.queueAuditEvent(baseEventParams);
// Now, OriginalHandler.queueAuditEvent will call the REAL OriginalHandler.buildAndLogAuditEvent
// We expect the MOCKED dependencies of buildAndLogAuditEvent to be called.
expect(cacheRunAuditLogHashTransactionMockHandle).toHaveBeenCalled();
expect(serviceLogAuditEventMockHandle).toHaveBeenCalled();
// Add more specific assertions on what serviceLogAuditEventMockHandle was called with if necessary
// This would be similar to the direct tests for buildAndLogAuditEvent
const logCall = serviceLogAuditEventMockHandle.mock.calls[0][0];
expect(logCall.action).toBe(baseEventParams.action);
expect(logCall.integrityHash).toBe("testhash");
});
test("handles errors from buildAndLogAuditEvent dependencies", async () => {
const testError = new Error("DB hash error in test");
cacheRunAuditLogHashTransactionMockHandle.mockImplementationOnce(() => {
const testError = new Error("Service error in test");
serviceLogAuditEventMockHandle.mockImplementationOnce(() => {
throw testError;
});
await OriginalHandler.queueAuditEvent(baseEventParams);
// queueAuditEvent should catch errors from buildAndLogAuditEvent and log them
// buildAndLogAuditEvent in turn logs errors from its dependencies
expect(loggerErrorMockHandle).toHaveBeenCalledWith(testError, "Failed to create audit log event");
expect(serviceLogAuditEventMockHandle).not.toHaveBeenCalled();
expect(serviceLogAuditEventMockHandle).toHaveBeenCalled();
});
});
@@ -197,11 +177,9 @@ describe("queueAuditEventBackground", () => {
test("correctly processes event in background and dependencies are called", async () => {
await OriginalHandler.queueAuditEventBackground(baseEventParams);
await new Promise(setImmediate); // Wait for setImmediate to run
expect(cacheRunAuditLogHashTransactionMockHandle).toHaveBeenCalled();
expect(serviceLogAuditEventMockHandle).toHaveBeenCalled();
const logCall = serviceLogAuditEventMockHandle.mock.calls[0][0];
expect(logCall.action).toBe(baseEventParams.action);
expect(logCall.integrityHash).toBe("testhash");
});
});
@@ -226,7 +204,6 @@ describe("withAuditLogging", () => {
expect(callArgs.action).toBe("created");
expect(callArgs.status).toBe("success");
expect(callArgs.target.id).toBe("t1");
expect(callArgs.integrityHash).toBe("testhash");
});
test("logs audit event for failed handler and throws", async () => {

View File

@@ -13,12 +13,11 @@ import {
} from "@/modules/ee/audit-logs/types/audit-log";
import { getIsAuditLogsEnabled } from "@/modules/ee/license-check/lib/utils";
import { logger } from "@formbricks/logger";
import { runAuditLogHashTransaction } from "./cache";
import { computeAuditLogHash, deepDiff, redactPII } from "./utils";
import { deepDiff, redactPII } from "./utils";
/**
* Builds an audit event and logs it.
* Redacts sensitive data from the old and new objects and computes the hash of the event before logging it.
* Redacts sensitive data from the old and new objects before logging.
*/
export const buildAndLogAuditEvent = async ({
action,
@@ -63,7 +62,7 @@ export const buildAndLogAuditEvent = async ({
changes = redactPII(oldObject);
}
const eventBase: Omit<TAuditLogEvent, "integrityHash" | "previousHash" | "chainStart"> = {
const auditEvent: TAuditLogEvent = {
actor: { id: userId, type: userType },
action,
target: { id: targetId, type: targetType },
@@ -76,20 +75,7 @@ export const buildAndLogAuditEvent = async ({
...(status === "failure" && eventId ? { eventId } : {}),
};
await runAuditLogHashTransaction(async (previousHash) => {
const isChainStart = !previousHash;
const integrityHash = computeAuditLogHash(eventBase, previousHash);
const auditEvent: TAuditLogEvent = {
...eventBase,
integrityHash,
previousHash,
...(isChainStart ? { chainStart: true } : {}),
};
return {
auditEvent: async () => await logAuditEvent(auditEvent),
integrityHash,
};
});
await logAuditEvent(auditEvent);
} catch (logError) {
logger.error(logError, "Failed to create audit log event");
}

View File

@@ -19,9 +19,6 @@ const validEvent = {
status: "success" as const,
timestamp: new Date().toISOString(),
organizationId: "org-1",
integrityHash: "hash",
previousHash: null,
chainStart: true,
};
describe("logAuditEvent", () => {

View File

@@ -183,118 +183,3 @@ describe("withAuditLogging", () => {
expect(handler).toHaveBeenCalled();
});
});
describe("runtime config checks", () => {
test("throws if AUDIT_LOG_ENABLED is true and ENCRYPTION_KEY is missing", async () => {
// Unset the secret and reload the module
process.env.ENCRYPTION_KEY = "";
vi.resetModules();
vi.doMock("@/lib/constants", () => ({
AUDIT_LOG_ENABLED: true,
AUDIT_LOG_GET_USER_IP: true,
ENCRYPTION_KEY: undefined,
}));
await expect(import("./utils")).rejects.toThrow(
/ENCRYPTION_KEY must be set when AUDIT_LOG_ENABLED is enabled/
);
// Restore for other tests
process.env.ENCRYPTION_KEY = "testsecret";
vi.resetModules();
vi.doMock("@/lib/constants", () => ({
AUDIT_LOG_ENABLED: true,
AUDIT_LOG_GET_USER_IP: true,
ENCRYPTION_KEY: "testsecret",
}));
});
});
describe("computeAuditLogHash", () => {
let utils: any;
beforeEach(async () => {
vi.unmock("crypto");
utils = await import("./utils");
});
test("produces deterministic hash for same input", () => {
const event = {
actor: { id: "u1", type: "user" },
action: "survey.created",
target: { id: "t1", type: "survey" },
timestamp: "2024-01-01T00:00:00.000Z",
organizationId: "org1",
status: "success",
ipAddress: "127.0.0.1",
apiUrl: "/api/test",
};
const hash1 = utils.computeAuditLogHash(event, null);
const hash2 = utils.computeAuditLogHash(event, null);
expect(hash1).toBe(hash2);
});
test("hash changes if previous hash changes", () => {
const event = {
actor: { id: "u1", type: "user" },
action: "survey.created",
target: { id: "t1", type: "survey" },
timestamp: "2024-01-01T00:00:00.000Z",
organizationId: "org1",
status: "success",
ipAddress: "127.0.0.1",
apiUrl: "/api/test",
};
const hash1 = utils.computeAuditLogHash(event, "prev1");
const hash2 = utils.computeAuditLogHash(event, "prev2");
expect(hash1).not.toBe(hash2);
});
});
describe("buildAndLogAuditEvent", () => {
let buildAndLogAuditEvent: any;
let redis: any;
let logAuditEvent: any;
beforeEach(async () => {
vi.resetModules();
(globalThis as any).__logAuditEvent = vi.fn().mockResolvedValue(undefined);
vi.mock("@/modules/cache/redis", () => ({
default: {
watch: vi.fn().mockResolvedValue("OK"),
multi: vi.fn().mockReturnValue({
set: vi.fn(),
exec: vi.fn().mockResolvedValue([["OK"]]),
}),
get: vi.fn().mockResolvedValue(null),
},
}));
vi.mock("@/lib/constants", () => ({
AUDIT_LOG_ENABLED: true,
AUDIT_LOG_GET_USER_IP: true,
ENCRYPTION_KEY: "testsecret",
}));
({ buildAndLogAuditEvent } = await import("./handler"));
redis = (await import("@/modules/cache/redis")).default;
logAuditEvent = (globalThis as any).__logAuditEvent;
});
afterEach(() => {
delete (globalThis as any).__logAuditEvent;
});
test("retries and logs error if hash update fails", async () => {
redis.multi.mockReturnValue({
set: vi.fn(),
exec: vi.fn().mockResolvedValue(null),
});
await buildAndLogAuditEvent({
actionType: "survey.created",
targetType: "survey",
userId: "u1",
userType: "user",
targetId: "t1",
organizationId: "org1",
ipAddress: "127.0.0.1",
status: "success",
oldObject: { foo: "bar" },
newObject: { foo: "baz" },
apiUrl: "/api/test",
});
expect(logAuditEvent).not.toHaveBeenCalled();
// The error is caught and logged, not thrown
});
});

View File

@@ -1,8 +1,3 @@
import { AUDIT_LOG_ENABLED, ENCRYPTION_KEY } from "@/lib/constants";
import { TAuditLogEvent } from "@/modules/ee/audit-logs/types/audit-log";
import { createHash } from "crypto";
import { logger } from "@formbricks/logger";
const SENSITIVE_KEYS = [
"email",
"name",
@@ -41,31 +36,6 @@ const SENSITIVE_KEYS = [
"fileName",
];
/**
* Computes the hash of the audit log event using the SHA256 algorithm.
* @param event - The audit log event.
* @param prevHash - The previous hash of the audit log event.
* @returns The hash of the audit log event. The hash is computed by concatenating the secret, the previous hash, and the event and then hashing the result.
*/
export const computeAuditLogHash = (
event: Omit<TAuditLogEvent, "integrityHash" | "previousHash" | "chainStart">,
prevHash: string | null
): string => {
let secret = ENCRYPTION_KEY;
if (!secret) {
// Log an error but don't throw an error to avoid blocking the main request
logger.error(
"ENCRYPTION_KEY is not set, creating audit log hash without it. Please set ENCRYPTION_KEY in the environment variables to avoid security issues."
);
secret = "";
}
const hash = createHash("sha256");
hash.update(secret + (prevHash ?? "") + JSON.stringify(event));
return hash.digest("hex");
};
/**
* Redacts sensitive data from the object by replacing the sensitive keys with "********".
* @param obj - The object to redact.
@@ -120,9 +90,3 @@ export const deepDiff = (oldObj: any, newObj: any): any => {
}
return Object.keys(diff).length > 0 ? diff : undefined;
};
if (AUDIT_LOG_ENABLED && !ENCRYPTION_KEY) {
throw new Error(
"ENCRYPTION_KEY must be set when AUDIT_LOG_ENABLED is enabled. Refusing to start for security reasons."
);
}

View File

@@ -79,9 +79,6 @@ export const ZAuditLogEventSchema = z.object({
changes: z.record(z.any()).optional(),
eventId: z.string().optional(),
apiUrl: z.string().url().optional(),
integrityHash: z.string(),
previousHash: z.string().nullable(),
chainStart: z.boolean().optional(),
});
export type TAuditLogEvent = z.infer<typeof ZAuditLogEventSchema>;

View File

@@ -183,8 +183,8 @@ x-environment: &environment
########################################## OPTIONAL (AUDIT LOGGING) ###########################################
# Set the below to 1 to enable audit logging. The audit log requires Redis to be configured with the REDIS_URL env variable.
# AUDIT_LOG_ENABLED: 1
# Set the below to 1 to enable audit logging.
# AUDIT_LOG_ENABLED: 1
# Set the below to get the ip address of the user from the request headers
# AUDIT_LOG_GET_USER_IP: 1

View File

@@ -1,7 +1,7 @@
---
title: Audit Logging
sidebarTitle: Audit Logging
description: Enable and use tamperevident audit logs for your Formbricks instance.
description: Enable comprehensive audit logs for your Formbricks instance.
icon: file-shield
---
@@ -16,15 +16,7 @@ Audit logs record **who** did **what**, **when**, **from where**, and **with wha
- **Compliance readiness** — Many regulatory frameworks such as GDPR and SOC 2 require immutable records of user activity.
- **Security investigation support** — Audit logs provide clear visibility into user and system actions, helping teams respond quickly and confidently during security incidents.
- **Operational accountability** — Track changes across the system to answer common questions like "_who modified this?_ or "_when was this deleted?_".
---
## Prerequisites
| Requirement | Notes |
|-------------|-------|
| **`redis`** | Used internally to guarantee integrity under concurrency. |
- **Operational accountability** — Track changes across the system to answer common questions like "_who modified this?_" or "_when was this deleted?_".
---
@@ -35,8 +27,6 @@ Audit logs record **who** did **what**, **when**, **from where**, and **with wha
```bash title=".env"
# --- Audit logging ---
AUDIT_LOG_ENABLED=1
ENCRYPTION_KEY=your_encryption_key_here # required for integrity hashes and authentication logs
REDIS_URL=redis://`redis`:6379 # existing `redis` instance
AUDIT_LOG_GET_USER_IP=1 # set to 1 to include user IP address in audit logs, 0 to omit (default: 0)
```
@@ -52,7 +42,7 @@ Audit logs are printed to **stdout** as JSON Lines format, making them easily ac
Audit logs are **JSON Lines** (one JSON object per line). A typical entry looks like this:
```json
{"level":"audit","time":1749207302158,"pid":20023,"hostname":"Victors-MacBook-Pro.local","name":"formbricks","actor":{"id":"cm90t4t7l0000vrws5hpo5ta5","type":"api"},"action":"created","target":{"id":"cmbkov4dn0000vrg72i7oznqv","type":"webhook"},"timestamp":"2025-06-06T10:55:02.145Z","organizationId":"cm8zovtbm0001vr3efa4n03ms","status":"success","ipAddress":"unknown","apiUrl":"http://localhost:3000/api/v1/webhooks","changes":{"id":"cmbkov4dn0000vrg72i7oznqv","name":"********","createdAt":"2025-06-06T10:55:02.123Z","updatedAt":"2025-06-06T10:55:02.123Z","url":"https://eoy8o887lmsqmhz.m.pipedream.net","source":"user","environmentId":"cm8zowv0b0009vr3ec56w2qf3","triggers":["responseCreated","responseUpdated","responseFinished"],"surveyIds":[]},"integrityHash":"eefa760bf03572c32d8caf7d5012d305bcea321d08b1929781b8c7e537f22aed","previousHash":"f6bc014e835be5499f2b3a0475ed6ec8b97903085059ff8482b16ab5bfd34062"}
{"level":"audit","time":1749207302158,"pid":20023,"hostname":"Victors-MacBook-Pro.local","name":"formbricks","actor":{"id":"cm90t4t7l0000vrws5hpo5ta5","type":"api"},"action":"created","target":{"id":"cmbkov4dn0000vrg72i7oznqv","type":"webhook"},"timestamp":"2025-06-06T10:55:02.145Z","organizationId":"cm8zovtbm0001vr3efa4n03ms","status":"success","ipAddress":"unknown","apiUrl":"http://localhost:3000/api/v1/webhooks","changes":{"id":"cmbkov4dn0000vrg72i7oznqv","name":"********","createdAt":"2025-06-06T10:55:02.123Z","updatedAt":"2025-06-06T10:55:02.123Z","url":"https://eoy8o887lmsqmhz.m.pipedream.net","source":"user","environmentId":"cm8zowv0b0009vr3ec56w2qf3","triggers":["responseCreated","responseUpdated","responseFinished"],"surveyIds":[]}}
```
Key fields:
@@ -74,12 +64,18 @@ Key fields:
| `apiUrl` | (Optional) API endpoint URL if the logs was generated through an API call |
| `eventId` | (Optional) Available on error logs. You can use it to refer to the system log with this eventId for more details on the error |
| `changes` | (Optional) Only the fields that actually changed (sensitive values redacted) |
| `integrityHash` | SHA256 hash chaining the entry to the previous one |
| `previousHash` | SHA256 hash of the previous audit log entry for chain integrity |
| `chainStart` | (Optional) Boolean indicating if this is the start of a new audit chain |
---
## Centralized logging and compliance
Formbricks audit logs are designed to work with modern centralized logging architectures:
- **Stdout delivery**: Logs are written to stdout for immediate collection by log forwarding agents
- **Centralized integrity**: Log integrity and immutability are handled by your centralized logging platform (ELK Stack, Splunk, CloudWatch, etc.)
- **Platform-level security**: Access controls and tamper detection are provided by your logging infrastructure
- **SOC2 compliance**: Most SOC2 auditors accept centralized logging without application-level integrity mechanisms
## Additional details
- **Redacted secrets:** Sensitive fields (emails, access tokens, passwords…) are replaced with `"********"` before being written.