feat: make cubejs mandatory for xm suite v5 (#7913)

This commit is contained in:
Bhagya Amarasinghe
2026-04-30 14:34:50 +05:30
committed by GitHub
30 changed files with 569 additions and 196 deletions
+14 -17
View File
@@ -65,6 +65,14 @@ HUB_API_KEY=dev-api-key
HUB_API_URL=http://localhost:8080
HUB_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
###########################
# CUBE ANALYTICS (XM V5) #
###########################
# XM Suite v5 analysis features require Cube.js. The local dev stack exposes Cube on port 4000.
CUBEJS_API_URL=http://localhost:4000
# Generate with: openssl rand -hex 32. `pnpm dev:setup` will create/preserve this automatically.
CUBEJS_API_SECRET=
################
# MAIL SETUP #
################
@@ -296,24 +304,13 @@ REDIS_URL=redis://localhost:6379
# AUDIT_LOG_GET_USER_IP=0
# Cube.js Analytics (optional — only needed for the analytics/dashboard feature)
# Required when running the Cube service (docker-compose.dev.yml). Generate with: openssl rand -hex 32
# Use the same value for CUBEJS_API_TOKEN so the client can authenticate.
# CUBEJS_API_SECRET=
# URL where the Cube.js instance is running
# CUBEJS_API_URL=http://localhost:4000
# API token sent with each Cube.js request; must match CUBEJS_API_SECRET when CUBEJS_DEV_MODE is off
# CUBEJS_API_TOKEN=
#
# Cube connects to the Hub DB. When using docker-compose.dev.yml with the hub network,
# use the container name and internal port. Hub credentials: formbricks/formbricks_dev, db: hub
# CUBEJS_DB_HOST=formbricks_hub_postgres
# Optional overrides for running Cube against a non-default database.
# The official local docker-compose.dev.yml stack points Cube at the local `postgres` service automatically.
# CUBEJS_DB_HOST=postgres
# CUBEJS_DB_PORT=5432
# CUBEJS_DB_NAME=hub
# CUBEJS_DB_USER=formbricks
# CUBEJS_DB_PASS=formbricks_dev
#
# Alternative (when not on same Docker network): host.docker.internal and port 5433
# CUBEJS_DB_NAME=postgres
# CUBEJS_DB_USER=postgres
# CUBEJS_DB_PASS=postgres
# Lingo.dev API key for translation generation
LINGO_API_KEY=your_api_key_here
+3 -5
View File
@@ -57,16 +57,14 @@ runs:
if: steps.cache-build.outputs.cache-hit != 'true'
shell: bash
- name: create .env
run: cp .env.example .env
- name: Create .env
run: pnpm dev:setup
shell: bash
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
- name: Fill E2E_TESTING in .env
env:
E2E_TESTING_MODE: ${{ inputs.e2e_testing_mode }}
run: |
RANDOM_KEY=$(openssl rand -hex 32)
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
echo "E2E_TESTING=$E2E_TESTING_MODE" >> .env
shell: bash
+3 -7
View File
@@ -68,16 +68,12 @@ jobs:
run: pnpm install --config.platform=linux --config.architecture=x64
shell: bash
- name: create .env
run: cp .env.example .env
- name: Create .env
run: pnpm dev:setup
shell: bash
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
- name: Fill ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
run: |
RANDOM_KEY=$(openssl rand -hex 32)
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${{ secrets.ENTERPRISE_LICENSE_KEY }}/" .env
sed -i "s|REDIS_URL=.*|REDIS_URL=redis://localhost:6379|" .env
echo "" >> .env
+2 -9
View File
@@ -31,15 +31,8 @@ jobs:
- name: Install dependencies
run: pnpm install --config.platform=linux --config.architecture=x64
- name: create .env
run: cp .env.example .env
- name: Generate Random ENCRYPTION_KEY, CRON_SECRET & NEXTAUTH_SECRET and fill in .env
run: |
RANDOM_KEY=$(openssl rand -hex 32)
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
- name: Create .env
run: pnpm dev:setup
- name: Lint
run: pnpm lint
+3 -7
View File
@@ -35,15 +35,11 @@ jobs:
- name: Install dependencies
run: pnpm install --config.platform=linux --config.architecture=x64
- name: create .env
run: cp .env.example .env
- name: Create .env
run: pnpm dev:setup
- name: Generate Random ENCRYPTION_KEY, CRON_SECRET & NEXTAUTH_SECRET and fill in .env
- name: Adjust CI-specific env values
run: |
RANDOM_KEY=$(openssl rand -hex 32)
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
- name: Run tests with coverage
+3 -7
View File
@@ -32,15 +32,11 @@ jobs:
- name: Install dependencies
run: pnpm install --config.platform=linux --config.architecture=x64
- name: create .env
run: cp .env.example .env
- name: Create .env
run: pnpm dev:setup
- name: Generate Random ENCRYPTION_KEY, CRON_SECRET & NEXTAUTH_SECRET and fill in .env
- name: Adjust CI-specific env values
run: |
RANDOM_KEY=$(openssl rand -hex 32)
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
- name: Test
+35
View File
@@ -9,6 +9,8 @@ const setTestEnv = (overrides: Record<string, string | undefined> = {}) => {
DATABASE_URL: "https://example.com/db",
ENCRYPTION_KEY: "12345678901234567890123456789012",
HUB_API_URL: "https://hub.formbricks.local",
CUBEJS_API_URL: "https://cube.formbricks.local",
CUBEJS_API_SECRET: "cube-secret",
...overrides,
};
};
@@ -77,6 +79,39 @@ describe("env", () => {
expect(env.DEBUG_SHOW_RESET_LINK).toBe("1");
});
test("uses the configured Cube environment variables", async () => {
setTestEnv();
const { env } = await import("./env");
expect(env.CUBEJS_API_URL).toBe("https://cube.formbricks.local");
expect(env.CUBEJS_API_SECRET).toBe("cube-secret");
});
test("fails to load when the Cube API secret is missing", async () => {
setTestEnv({
CUBEJS_API_SECRET: undefined,
});
await expect(import("./env")).rejects.toThrow("Invalid environment variables");
});
test("fails to load when the Cube API URL is missing", async () => {
setTestEnv({
CUBEJS_API_URL: undefined,
});
await expect(import("./env")).rejects.toThrow("Invalid environment variables");
});
test("fails to load when the Cube API URL is invalid", async () => {
setTestEnv({
CUBEJS_API_URL: "not-a-url",
CUBEJS_API_SECRET: "cube-secret",
});
await expect(import("./env")).rejects.toThrow("Invalid environment variables");
});
test("uses the default survey scheduling configuration when env vars are not set", async () => {
setTestEnv({
NEXT_PUBLIC_SURVEY_SCHEDULING_LOCAL_HOUR: undefined,
+4
View File
@@ -194,6 +194,8 @@ const parsedEnv = createEnv({
AI_AZURE_API_KEY: z.string().optional(),
AI_AZURE_API_VERSION: z.string().optional(),
AI_AZURE_RESOURCE_NAME: z.string().optional(),
CUBEJS_API_SECRET: z.string().trim().min(1),
CUBEJS_API_URL: z.url(),
HTTP_PROXY: z.url().optional(),
HTTPS_PROXY: z.url().optional(),
HUB_API_URL: z.url(),
@@ -349,6 +351,8 @@ const parsedEnv = createEnv({
AI_AZURE_API_KEY: process.env.AI_AZURE_API_KEY,
AI_AZURE_API_VERSION: process.env.AI_AZURE_API_VERSION,
AI_AZURE_RESOURCE_NAME: process.env.AI_AZURE_RESOURCE_NAME,
CUBEJS_API_SECRET: process.env.CUBEJS_API_SECRET,
CUBEJS_API_URL: process.env.CUBEJS_API_URL,
HTTP_PROXY: process.env.HTTP_PROXY,
HTTPS_PROXY: process.env.HTTPS_PROXY,
HUB_API_URL: process.env.HUB_API_URL,
@@ -5,11 +5,13 @@ import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import {
AuthenticationError,
AuthorizationError,
ConfigurationError,
EXPECTED_ERROR_NAMES,
INVALID_PASSWORD_RESET_TOKEN_ERROR_CODE,
InvalidInputError,
InvalidPasswordResetTokenError,
OperationNotAllowedError,
QueryExecutionError,
ResourceNotFoundError,
TooManyRequestsError,
UnknownError,
@@ -72,6 +74,8 @@ describe("isExpectedError (shared helper)", () => {
"ValidationError",
"AuthenticationError",
"OperationNotAllowedError",
"ConfigurationError",
"QueryExecutionError",
"TooManyRequestsError",
"InvalidPasswordResetTokenError",
];
@@ -90,6 +94,8 @@ describe("isExpectedError (shared helper)", () => {
{ ErrorClass: InvalidInputError, args: ["Invalid input"] },
{ ErrorClass: ValidationError, args: ["Invalid data"] },
{ ErrorClass: OperationNotAllowedError, args: ["Not allowed"] },
{ ErrorClass: ConfigurationError, args: ["Cube is not configured"] },
{ ErrorClass: QueryExecutionError, args: ["Cube query failed. Details: connect ECONNREFUSED"] },
{ ErrorClass: InvalidPasswordResetTokenError, args: [INVALID_PASSWORD_RESET_TOKEN_ERROR_CODE] },
])("returns true for $ErrorClass.name", ({ ErrorClass, args }) => {
const error = new (ErrorClass as any)(...args);
@@ -179,6 +185,20 @@ describe("actionClient handleServerError", () => {
expect(Sentry.captureException).not.toHaveBeenCalled();
});
test("ConfigurationError returns its message and is not sent to Sentry", async () => {
const result = await executeThrowingAction(new ConfigurationError("Cube is not configured"));
expect(result?.serverError).toBe("Cube is not configured");
expect(Sentry.captureException).not.toHaveBeenCalled();
});
test("QueryExecutionError returns its message and is not sent to Sentry", async () => {
const result = await executeThrowingAction(
new QueryExecutionError("Cube query failed. Details: connect ECONNREFUSED")
);
expect(result?.serverError).toBe("Cube query failed. Details: connect ECONNREFUSED");
expect(Sentry.captureException).not.toHaveBeenCalled();
});
test("InvalidPasswordResetTokenError returns its message and is not sent to Sentry", async () => {
const result = await executeThrowingAction(
new InvalidPasswordResetTokenError(INVALID_PASSWORD_RESET_TOKEN_ERROR_CODE)
@@ -1,7 +1,14 @@
import { beforeEach, describe, expect, test, vi } from "vitest";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
vi.mock("server-only", () => ({}));
const mockLoad = vi.fn();
const mockTablePivot = vi.fn();
const globalForCube = globalThis as unknown as {
formbricksCubeClient: unknown;
formbricksCubeClientCacheKey: string | undefined;
formbricksCubeClientTokenExpiresAtMs: number | undefined;
};
vi.mock("@cubejs-client/core", () => ({
default: vi.fn(() => ({
@@ -11,13 +18,28 @@ vi.mock("@cubejs-client/core", () => ({
describe("executeQuery", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.clearAllMocks();
vi.resetModules();
vi.doUnmock("@/lib/env");
vi.stubEnv("NODE_ENV", "test");
vi.stubEnv("DATABASE_URL", "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public");
vi.stubEnv("ENCRYPTION_KEY", "12345678901234567890123456789012");
vi.stubEnv("HUB_API_URL", "https://hub.formbricks.local");
vi.stubEnv("CUBEJS_API_URL", "https://cube.example.com");
vi.stubEnv("CUBEJS_API_SECRET", "cube-secret");
globalForCube.formbricksCubeClient = undefined;
globalForCube.formbricksCubeClientCacheKey = undefined;
globalForCube.formbricksCubeClientTokenExpiresAtMs = undefined;
const resultSet = { tablePivot: mockTablePivot };
mockLoad.mockResolvedValue(resultSet);
mockTablePivot.mockReturnValue([{ id: "1", count: 42 }]);
});
afterEach(() => {
vi.unstubAllEnvs();
});
test("loads query and returns tablePivot result", async () => {
const { executeQuery } = await import("./cube-client");
const query = { measures: ["FeedbackRecords.count"] };
@@ -40,4 +62,59 @@ describe("executeQuery", () => {
expect(cubejs).toHaveBeenCalledWith(expect.any(String), { apiUrl: fullUrl });
vi.unstubAllEnvs();
});
test("reuses the cached Cube client across queries while the JWT is still fresh", async () => {
let nowMs = 1_700_000_000_000;
vi.spyOn(Date, "now").mockImplementation(() => nowMs);
const { executeQuery } = await import("./cube-client");
await executeQuery({ measures: ["FeedbackRecords.count"] });
nowMs += 5 * 60 * 1000;
await executeQuery({ measures: ["FeedbackRecords.count"] });
// eslint-disable-next-line @typescript-eslint/no-require-imports
const cubejs = ((await vi.importMock("@cubejs-client/core")) as any).default;
expect(cubejs).toHaveBeenCalledTimes(1);
});
test("refreshes the cached Cube client once the JWT reaches the refresh window", async () => {
let nowMs = 1_700_000_000_000;
vi.spyOn(Date, "now").mockImplementation(() => nowMs);
const { CUBE_API_TOKEN_TTL_SECONDS } = await import("./cube-config");
const { executeQuery } = await import("./cube-client");
await executeQuery({ measures: ["FeedbackRecords.count"] });
nowMs += CUBE_API_TOKEN_TTL_SECONDS * 1000;
await executeQuery({ measures: ["FeedbackRecords.count"] });
// eslint-disable-next-line @typescript-eslint/no-require-imports
const cubejs = ((await vi.importMock("@cubejs-client/core")) as any).default;
expect(cubejs).toHaveBeenCalledTimes(2);
});
test("throws a configuration error when Cube env is missing", async () => {
vi.resetModules();
vi.unstubAllEnvs();
vi.doMock("@/lib/env", () => ({
env: {
CUBEJS_API_URL: undefined,
CUBEJS_API_SECRET: undefined,
},
}));
const { CUBE_CONFIGURATION_ERROR_MESSAGE } = await import("./cube-config");
const { executeQuery } = await import("./cube-client");
await expect(executeQuery({ measures: ["FeedbackRecords.count"] })).rejects.toThrow(
CUBE_CONFIGURATION_ERROR_MESSAGE
);
});
test("wraps Cube runtime failures in a query execution error with details", async () => {
mockLoad.mockRejectedValueOnce(new Error("connect ECONNREFUSED"));
const { executeQuery } = await import("./cube-client");
await expect(executeQuery({ measures: ["FeedbackRecords.count"] })).rejects.toThrow(
/Cube query failed\..*connect ECONNREFUSED/
);
});
});
@@ -1,26 +1,60 @@
import cubejs, { type CubeApi, type Query } from "@cubejs-client/core";
import { ConfigurationError, QueryExecutionError } from "@formbricks/types/errors";
import { createCubeApiToken, getCubeApiCredentials } from "./cube-config";
const getApiUrl = (): string => {
const baseUrl = process.env.CUBEJS_API_URL || "http://localhost:4000";
if (baseUrl.includes("/cubejs-api/v1")) {
return baseUrl;
}
return `${baseUrl.replace(/\/$/, "")}/cubejs-api/v1`;
const CUBE_QUERY_ERROR_MESSAGE =
"Cube query failed. Verify CUBEJS_API_URL and CUBEJS_API_SECRET, and ensure the Cube service is running.";
const CUBE_CLIENT_REFRESH_BUFFER_MS = 60 * 1000;
const globalForCube = globalThis as unknown as {
formbricksCubeClient: CubeApi | undefined;
formbricksCubeClientCacheKey: string | undefined;
formbricksCubeClientTokenExpiresAtMs: number | undefined;
};
let cubeClient: CubeApi | null = null;
let cubeClient: CubeApi | null = globalForCube.formbricksCubeClient ?? null;
let cubeClientCacheKey: string | null = globalForCube.formbricksCubeClientCacheKey ?? null;
let cubeClientTokenExpiresAtMs = globalForCube.formbricksCubeClientTokenExpiresAtMs ?? 0;
const isCachedClientReusable = (cacheKey: string, nowMs: number): boolean =>
cubeClient !== null &&
cubeClientCacheKey === cacheKey &&
cubeClientTokenExpiresAtMs > nowMs + CUBE_CLIENT_REFRESH_BUFFER_MS;
const cacheCubeClient = (client: CubeApi, cacheKey: string, tokenExpiresAtMs: number): CubeApi => {
cubeClient = client;
cubeClientCacheKey = cacheKey;
cubeClientTokenExpiresAtMs = tokenExpiresAtMs;
globalForCube.formbricksCubeClient = client;
globalForCube.formbricksCubeClientCacheKey = cacheKey;
globalForCube.formbricksCubeClientTokenExpiresAtMs = tokenExpiresAtMs;
return client;
};
function getCubeClient(): CubeApi {
if (!cubeClient) {
// TODO: This will fail silently if the token is not set. We need to fix this before going to production.
const token = process.env.CUBEJS_API_TOKEN ?? "";
cubeClient = cubejs(token, { apiUrl: getApiUrl() });
const { apiSecret, apiUrl } = getCubeApiCredentials();
const nowMs = Date.now();
const cacheKey = `${apiUrl}:${apiSecret}`;
if (isCachedClientReusable(cacheKey, nowMs)) {
return cubeClient as CubeApi;
}
return cubeClient;
const { token, tokenExpiresAtMs } = createCubeApiToken(apiSecret);
return cacheCubeClient(cubejs(token, { apiUrl }), cacheKey, tokenExpiresAtMs);
}
export async function executeQuery(query: Query) {
const client = getCubeClient();
const resultSet = await client.load(query);
return resultSet.tablePivot();
try {
const client = getCubeClient();
const resultSet = await client.load(query);
return resultSet.tablePivot();
} catch (error) {
if (error instanceof ConfigurationError) {
throw error;
}
const detail = error instanceof Error && error.message ? ` Details: ${error.message}` : "";
throw new QueryExecutionError(`${CUBE_QUERY_ERROR_MESSAGE}${detail}`);
}
}
@@ -0,0 +1,65 @@
import jwt from "jsonwebtoken";
import { beforeEach, describe, expect, test, vi } from "vitest";
vi.mock("server-only", () => ({}));
const mockCubeEnv = (overrides: { CUBEJS_API_URL?: string; CUBEJS_API_SECRET?: string } = {}) => {
vi.doMock("@/lib/env", () => ({
env: {
CUBEJS_API_URL: "https://cube.formbricks.local",
CUBEJS_API_SECRET: "cube-secret",
...overrides,
},
}));
};
describe("cube-config", () => {
beforeEach(() => {
vi.resetModules();
});
test("normalizes the Cube API URL and signs a JWT from CUBEJS_API_SECRET", async () => {
mockCubeEnv();
const { getCubeApiConfig } = await import("./cube-config");
const config = getCubeApiConfig();
expect(config.apiUrl).toBe("https://cube.formbricks.local/cubejs-api/v1");
expect(config.tokenExpiresAtMs).toBeGreaterThan(Date.now());
expect(jwt.verify(config.token, "cube-secret")).toEqual({
exp: expect.any(Number),
iat: expect.any(Number),
});
});
test("preserves a full Cube API URL when it already contains /cubejs-api/v1", async () => {
mockCubeEnv({
CUBEJS_API_URL: "https://cube.formbricks.local/cubejs-api/v1",
});
const { getCubeApiConfig } = await import("./cube-config");
expect(getCubeApiConfig().apiUrl).toBe("https://cube.formbricks.local/cubejs-api/v1");
});
test("throws a configuration error when CUBEJS_API_URL is missing", async () => {
mockCubeEnv({
CUBEJS_API_URL: undefined,
});
const { CUBE_CONFIGURATION_ERROR_MESSAGE, getCubeApiConfig } = await import("./cube-config");
expect(() => getCubeApiConfig()).toThrow(CUBE_CONFIGURATION_ERROR_MESSAGE);
});
test("throws a configuration error when CUBEJS_API_SECRET is missing", async () => {
mockCubeEnv({
CUBEJS_API_SECRET: undefined,
});
const { CUBE_CONFIGURATION_ERROR_MESSAGE, getCubeApiConfig } = await import("./cube-config");
expect(() => getCubeApiConfig()).toThrow(CUBE_CONFIGURATION_ERROR_MESSAGE);
});
});
@@ -0,0 +1,50 @@
import "server-only";
import jwt from "jsonwebtoken";
import { ConfigurationError } from "@formbricks/types/errors";
import { env } from "@/lib/env";
export const CUBE_CONFIGURATION_ERROR_MESSAGE =
"Cube is not configured on this instance. Set CUBEJS_API_URL and CUBEJS_API_SECRET.";
export const CUBE_API_TOKEN_TTL_SECONDS = 60 * 60;
export const normalizeCubeApiUrl = (baseUrl: string): string => {
if (baseUrl.includes("/cubejs-api/v1")) {
return baseUrl;
}
return `${baseUrl.replace(/\/$/, "")}/cubejs-api/v1`;
};
export const getCubeApiCredentials = () => {
if (!env.CUBEJS_API_URL || !env.CUBEJS_API_SECRET) {
throw new ConfigurationError(CUBE_CONFIGURATION_ERROR_MESSAGE);
}
return {
apiUrl: normalizeCubeApiUrl(env.CUBEJS_API_URL),
apiSecret: env.CUBEJS_API_SECRET,
};
};
export const createCubeApiToken = (apiSecret: string) => {
const tokenIssuedAtMs = Date.now();
return {
token: jwt.sign({}, apiSecret, {
algorithm: "HS256",
expiresIn: CUBE_API_TOKEN_TTL_SECONDS,
}),
tokenExpiresAtMs: tokenIssuedAtMs + CUBE_API_TOKEN_TTL_SECONDS * 1000,
};
};
export const getCubeApiConfig = () => {
const { apiSecret, apiUrl } = getCubeApiCredentials();
return {
apiUrl,
apiSecret,
...createCubeApiToken(apiSecret),
};
};
@@ -21,13 +21,17 @@ import { EmptyState } from "@/modules/ui/components/empty-state";
import { GoBackButton } from "@/modules/ui/components/go-back-button";
import { PageContentWrapper } from "@/modules/ui/components/page-content-wrapper";
import { updateDashboardAction, updateWidgetLayoutsAction } from "../actions";
import type { TDashboardWidgetError } from "../lib/widget-errors";
const ROW_HEIGHT = 80;
interface DashboardDetailClientProps {
workspaceId: string;
dashboard: TDashboardDetail;
widgetDataPromises: Map<string, Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: string }>>;
widgetDataPromises: Map<
string,
Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: TDashboardWidgetError }>
>;
directories: { id: string; name: string }[];
isReadOnly: boolean;
}
@@ -96,7 +100,7 @@ const MemoizedWidgetContent = memo(function WidgetContent({
dataPromise,
}: Readonly<{
widget: TDashboardWidget;
dataPromise?: Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: string }>;
dataPromise?: Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: TDashboardWidgetError }>;
}>) {
if (widget.chart && dataPromise) {
return (
@@ -122,7 +126,7 @@ const MemoizedWidgetItem = memo(function WidgetItem({
}: Readonly<{
widget: TDashboardWidget;
isEditing: boolean;
dataPromise?: Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: string }>;
dataPromise?: Promise<{ data: TChartDataRow[]; query: TChartQuery } | { error: TDashboardWidgetError }>;
onEdit?: () => void;
onResize?: () => void;
onRemove?: () => void;
@@ -5,9 +5,10 @@ import { useTranslation } from "react-i18next";
import { TChartQuery } from "@formbricks/types/analysis";
import { ChartRenderer } from "@/modules/ee/analysis/charts/components/chart-renderer";
import type { TChartDataRow, TChartType } from "@/modules/ee/analysis/types/analysis";
import type { TDashboardWidgetError } from "../lib/widget-errors";
interface DashboardWidgetDataProps {
dataPromise: Promise<{ data: TChartDataRow[] } | { error: string }>;
dataPromise: Promise<{ data: TChartDataRow[] } | { error: TDashboardWidgetError }>;
chartType: TChartType;
query: TChartQuery;
}
@@ -18,7 +19,7 @@ export function DashboardWidgetData({ dataPromise, chartType, query }: Readonly<
if ("error" in result) {
return (
<div className="flex h-full items-center justify-center text-sm text-red-500">
<div className="flex h-full items-center justify-center text-center text-sm text-red-500">
{t("workspace.analysis.dashboards.failed_to_load_chart_data")}
</div>
);
@@ -0,0 +1,3 @@
export const DASHBOARD_WIDGET_LOAD_ERROR = "failed_to_load_chart_data" as const;
export type TDashboardWidgetError = typeof DASHBOARD_WIDGET_LOAD_ERROR;
@@ -1,4 +1,5 @@
import { notFound } from "next/navigation";
import { logger } from "@formbricks/logger";
import type { TChartQuery } from "@formbricks/types/analysis";
import { ResourceNotFoundError } from "@formbricks/types/errors";
import { executeQuery } from "@/modules/ee/analysis/api/lib/cube-client";
@@ -8,6 +9,7 @@ import { getFeedbackRecordDirectoriesByWorkspaceId } from "@/modules/ee/feedback
import { getWorkspaceAuth } from "@/modules/workspaces/lib/utils";
import { DashboardDetailClient } from "../components/dashboard-detail-client";
import { getDashboard } from "../lib/dashboards";
import { DASHBOARD_WIDGET_LOAD_ERROR, type TDashboardWidgetError } from "../lib/widget-errors";
interface WidgetQueryResult {
data: TChartDataRow[];
@@ -17,17 +19,19 @@ interface WidgetQueryResult {
async function executeWidgetQuery(
query: TChartQuery,
feedbackRecordDirectoryId: string
): Promise<WidgetQueryResult | { error: string }> {
): Promise<WidgetQueryResult | { error: TDashboardWidgetError }> {
try {
const scopedQuery = injectTenantFilter(query, feedbackRecordDirectoryId);
const data = await executeQuery(scopedQuery as Record<string, unknown>);
return { data: Array.isArray(data) ? data : [], query };
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to load chart data";
return { error: message };
logger.error(error, "Failed to load dashboard widget data");
return { error: DASHBOARD_WIDGET_LOAD_ERROR };
}
}
type WidgetQueryPromiseResult = Promise<WidgetQueryResult | { error: TDashboardWidgetError }>;
export async function DashboardDetailPage({
params,
}: Readonly<{
@@ -47,7 +51,7 @@ export async function DashboardDetailPage({
throw error;
}
const widgetDataPromises = new Map<string, Promise<WidgetQueryResult | { error: string }>>();
const widgetDataPromises = new Map<string, WidgetQueryPromiseResult>();
const widgetsWithCharts = dashboard.widgets.filter(
(w): w is typeof w & { chart: NonNullable<typeof w.chart> } => !!w.chart
);
+8
View File
@@ -46,6 +46,14 @@ The intended defaults are:
- self-hosted / single-tenant clusters: bundled controller mode
- shared clusters with an existing platform controller: external-controller mode
## Cube.js for XM Suite v5
This chart does not deploy Cube.js. XM Suite v5 dashboard and analysis features require an external Cube instance.
- Set `deployment.env.CUBEJS_API_URL` to your Cube endpoint.
- Provide `CUBEJS_API_SECRET` through your existing secret management flow, such as the generated app secret override or `deployment.envFrom`.
- Keep Hub enabled. Cube should point at the same feedback records database that Hub writes to, unless you intentionally split that storage.
## Values
| Key | Type | Default | Description |
+6 -1
View File
@@ -95,7 +95,9 @@ deployment:
# type: secret
# nameSuffix: app-secrets
# Environment variables passed to the app container
# Environment variables passed to the app container.
# XM Suite v5 analytics requires an external Cube endpoint when using Helm:
# set deployment.env.CUBEJS_API_URL and provide CUBEJS_API_SECRET through a Secret referenced by envFrom/existingSecret.
env: {}
# Tolerations for scheduling pods on tainted nodes
@@ -577,6 +579,9 @@ hub:
# Optional env vars (non-secret). Use existingSecret for secret values such as DATABASE_URL and HUB_API_KEY.
env: {}
# Helm does not deploy Cube. XM Suite v5 analytics requires operators to provide an external Cube instance,
# set deployment.env.CUBEJS_API_URL, and supply CUBEJS_API_SECRET via an existing secret.
# Upgrade migration job runs goose + river before Helm upgrades Hub resources.
# Fresh installs run the same migrations through the Hub deployment init container.
migration:
+12 -15
View File
@@ -84,31 +84,28 @@ services:
image: cubejs/cube:v1.6.6
env_file:
- apps/web/.env
depends_on:
postgres:
condition: service_healthy
hub-migrate:
condition: service_completed_successfully
ports:
- 4000:4000
- 4001:4001 # Cube Playground UI (dev only)
environment:
CUBEJS_DB_TYPE: postgres
CUBEJS_DB_HOST: ${CUBEJS_DB_HOST:-formbricks_hub_postgres}
CUBEJS_DB_NAME: ${CUBEJS_DB_NAME:-hub}
CUBEJS_DB_USER: ${CUBEJS_DB_USER:-formbricks}
CUBEJS_DB_PASS: ${CUBEJS_DB_PASS:-formbricks_dev}
CUBEJS_DB_HOST: ${CUBEJS_DB_HOST:-postgres}
CUBEJS_DB_NAME: ${CUBEJS_DB_NAME:-postgres}
CUBEJS_DB_USER: ${CUBEJS_DB_USER:-postgres}
CUBEJS_DB_PASS: ${CUBEJS_DB_PASS:-postgres}
CUBEJS_DB_PORT: ${CUBEJS_DB_PORT:-5432}
CUBEJS_DEV_MODE: "true"
CUBEJS_API_SECRET: ${CUBEJS_API_SECRET}
CUBEJS_API_SECRET: ${CUBEJS_API_SECRET:?CUBEJS_API_SECRET is required to run Cube}
CUBEJS_CACHE_AND_QUEUE_DRIVER: memory
volumes:
- ./cube/cube.js:/cube/conf/cube.js
- ./cube/schema:/cube/conf/model
- ./docker/cube/cube.js:/cube/conf/cube.js:ro
- ./docker/cube/schema:/cube/conf/model:ro
restart: on-failure
networks:
- default
- hub
networks:
hub:
external: true
name: superset_formbricks_hub
volumes:
postgres:
+5 -5
View File
@@ -28,12 +28,12 @@ The script will prompt you for the following information:
That's it! After running the command and providing the required information, visit the domain name you entered, and you should see the Formbricks home wizard!
## Formbricks Hub
## Formbricks Hub and Cube
The stack includes the [Formbricks Hub](https://github.com/formbricks/hub) API (`ghcr.io/formbricks/hub`). Hub shares the same database as Formbricks by default.
The stack includes the [Formbricks Hub](https://github.com/formbricks/hub) API (`ghcr.io/formbricks/hub`) and a bundled Cube.js service for XM Suite v5 analytics. Hub and Cube share the same database as Formbricks by default.
- **Migrations**: A `hub-migrate` service runs Hub's database migrations (goose + river) before the Hub API starts. It runs on every `docker compose up` and is idempotent.
- **Production** (`docker/docker-compose.yml`): Set `HUB_API_KEY` (required). `HUB_API_URL` defaults to `http://hub:8080` so the Formbricks app can reach Hub inside the compose network. Override `HUB_DATABASE_URL` only if you want Hub to use a separate database.
- **Development** (`docker-compose.dev.yml`): Hub uses the same Postgres database; `HUB_API_KEY` defaults to `dev-api-key` (override with `HUB_API_KEY`) and the local Hub URL is `http://localhost:8080`.
- **Production** (`docker/docker-compose.yml`): Set `HUB_API_KEY` and `CUBEJS_API_SECRET` (required). `HUB_API_URL` defaults to `http://hub:8080` and `CUBEJS_API_URL` defaults to `http://cube:4000` so the Formbricks app can reach both services inside the compose network. Override `HUB_DATABASE_URL` and `CUBEJS_DB_*` only if Hub or Cube should use a separate database.
- **Development** (`docker-compose.dev.yml`): Hub and Cube use the same local Postgres database. `HUB_API_KEY` defaults to `dev-api-key`, `CUBEJS_API_URL` defaults to `http://localhost:4000`, and `pnpm dev:setup` generates `CUBEJS_API_SECRET` in the repo root `.env`.
In development, Hub is exposed locally on port **8080**. In production Docker Compose, Hub stays internal to the compose network and is reached via `http://hub:8080`.
In development, Hub is exposed locally on port **8080** and Cube on **4000** (with the Cube playground on **4001**). In production Docker Compose, Hub and Cube stay internal to the compose network and are reached via `http://hub:8080` and `http://cube:4000`.
@@ -18,7 +18,7 @@ cube(`FeedbackRecords`, {
detractorCount: {
type: `count`,
filters: [{ sql: `${CUBE}.value_number <= 6` }],
filters: [{ sql: `${CUBE}.value_number >= 0 AND ${CUBE}.value_number <= 6` }],
description: `Number of detractors (NPS score 0-6)`,
},
@@ -146,7 +146,7 @@ cube(`TopicsUnnested`, {
dimensions: {
id: {
sql: `feedback_record_id || '-' || topic`,
sql: `md5(feedback_record_id || '::' || topic)`,
type: `string`,
primaryKey: true,
},
+26
View File
@@ -38,6 +38,10 @@ x-environment: &environment
# Hub database URL (optional). Default: same Postgres as Formbricks. Set only if Hub uses a separate DB.
# HUB_DATABASE_URL:
# Cube.js analytics for XM Suite v5. Cube runs inside this compose stack by default.
CUBEJS_API_URL: ${CUBEJS_API_URL:-http://cube:4000}
CUBEJS_API_SECRET: ${CUBEJS_API_SECRET:?CUBEJS_API_SECRET is required to run XM Suite v5 analytics}
# Set the minimum log level(debug, info, warn, error, fatal)
# LOG_LEVEL: info
@@ -285,6 +289,28 @@ services:
API_KEY: ${HUB_API_KEY:?HUB_API_KEY is required to run Hub}
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
# Cube.js analytics service for XM Suite v5. Shares the Hub database by default.
cube:
restart: always
image: cubejs/cube:v1.6.6
depends_on:
hub-migrate:
condition: service_completed_successfully
postgres:
condition: service_healthy
environment:
CUBEJS_DB_TYPE: postgres
CUBEJS_DB_HOST: ${CUBEJS_DB_HOST:-postgres}
CUBEJS_DB_NAME: ${CUBEJS_DB_NAME:-formbricks}
CUBEJS_DB_USER: ${CUBEJS_DB_USER:-postgres}
CUBEJS_DB_PASS: ${CUBEJS_DB_PASS:-postgres}
CUBEJS_DB_PORT: ${CUBEJS_DB_PORT:-5432}
CUBEJS_API_SECRET: ${CUBEJS_API_SECRET:?CUBEJS_API_SECRET is required to run Cube}
CUBEJS_CACHE_AND_QUEUE_DRIVER: memory
volumes:
- ./cube/cube.js:/cube/conf/cube.js:ro
- ./cube/schema:/cube/conf/model:ro
volumes:
postgres:
driver: local
+13 -1
View File
@@ -313,6 +313,10 @@ EOT
echo "📥 Downloading docker-compose.yml from Formbricks GitHub repository..."
curl -fsSL -o docker-compose.yml https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/docker-compose.yml
mkdir -p cube/schema
echo "📥 Downloading Cube.js configuration for XM Suite v5 analytics..."
curl -fsSL -o cube/cube.js https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/cube/cube.js
curl -fsSL -o cube/schema/FeedbackRecords.js https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/cube/schema/FeedbackRecords.js
echo "🚙 Updating docker-compose.yml with your custom inputs..."
sed -i "/WEBAPP_URL:/s|WEBAPP_URL:.*|WEBAPP_URL: \"https://$domain_name\"|" docker-compose.yml
@@ -326,6 +330,14 @@ EOT
cron_secret=$(openssl rand -hex 32) && sed -i "/CRON_SECRET:$/s/CRON_SECRET:.*/CRON_SECRET: $cron_secret/" docker-compose.yml
echo "🚗 CRON_SECRET updated successfully!"
hub_api_key=$(openssl rand -hex 32)
cubejs_api_secret=$(openssl rand -hex 32)
cat <<EOF > .env
HUB_API_KEY=$hub_api_key
CUBEJS_API_SECRET=$cubejs_api_secret
EOF
echo "🚗 Generated Hub and Cube secrets in .env successfully!"
if [[ -n $mail_from ]]; then
sed -i "s|# MAIL_FROM:|MAIL_FROM: \"$mail_from\"|" docker-compose.yml
@@ -796,4 +808,4 @@ uninstall)
echo "🚀 Executing default step of installing Formbricks"
install_formbricks
;;
esac
esac
@@ -10,91 +10,91 @@ These variables are present inside your machine's docker-compose file. Restart t
For `AI_PROVIDER=google`, use a Gemini model ID such as `gemini-2.5-flash` together with Google Cloud credentials. Formbricks uses Google Cloud naming here, even though the underlying SDK still talks to Vertex AI endpoints for Gemini model access.
| Variable | Description | Required | Default |
| --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ------------------------------------------------------------------------- |
| WEBAPP_URL | Base URL of the site. | required | http://localhost:3000 |
| PUBLIC_URL | Base URL for the public domain where surveys and public-facing content are served. If not set, uses WEBAPP_URL. | optional | WEBAPP_URL |
| NEXTAUTH_URL | Location of the auth server. This should normally be the same as WEBAPP_URL | required | http://localhost:3000 |
| DATABASE_URL | Database URL with credentials. | required | |
| NEXTAUTH_SECRET | Secret for NextAuth, used for session signing and encryption. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| ENCRYPTION_KEY | Secret used by Formbricks for data encryption and audit log hashing. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| CRON_SECRET | API Secret for running cron jobs. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| LOG_LEVEL | Minimum log level (debug, info, warn, error, fatal) | optional | info |
| S3_ACCESS_KEY | Access key for S3. | optional | (resolved by the AWS SDK) |
| S3_SECRET_KEY | Secret key for S3. | optional | (resolved by the AWS SDK) |
| S3_REGION | Region for S3. | optional | (resolved by the AWS SDK) |
| S3_BUCKET_NAME | S3 bucket name for data storage. Formbricks enables S3 storage when this is set. | optional (required if S3 is enabled) | |
| S3_ENDPOINT_URL | Endpoint for S3. | optional | (resolved by the AWS SDK) |
| SAML_DATABASE_URL | Database URL for SAML. | optional | postgres://postgres:@localhost:5432/formbricks-saml |
| PRIVACY_URL | URL for privacy policy. | optional | |
| TERMS_URL | URL for terms of service. | optional | |
| IMPRINT_URL | URL for imprint. | optional | |
| IMPRINT_ADDRESS | Address for imprint. | optional | |
| EMAIL_AUTH_DISABLED | Disables the ability for users to signup or login via email and password if set to 1. | optional | |
| PASSWORD_RESET_DISABLED | Disables password reset functionality if set to 1. | optional | |
| PASSWORD_RESET_TOKEN_LIFETIME_MINUTES | Configures how long password reset links remain valid in minutes. Accepted values are integers from 5 to 120. | optional | 30 |
| EMAIL_VERIFICATION_DISABLED | Disables email verification if set to 1. | optional | |
| RATE_LIMITING_DISABLED | Disables rate limiting if set to 1. | optional | |
| TELEMETRY_DISABLED | Disables telemetry reporting if set to 1. Ignored when an Enterprise License is active. | optional | |
| DANGEROUSLY_ALLOW_WEBHOOK_INTERNAL_URLS | Allows webhook URLs to point to internal/private network addresses (e.g. localhost, 192.168.x.x) if set to 1. Useful for self-hosted instances that need to send webhooks to internal services. | optional | |
| INVITE_DISABLED | Disables the ability for invited users to create an account if set to 1. | optional | |
| MAIL_FROM | Email address to send emails from. | optional (required if email services are to be enabled) | |
| MAIL_FROM_NAME | Email name/title to send emails from. | optional (required if email services are to be enabled) | |
| SMTP_HOST | Host URL of your SMTP server. | optional (required if email services are to be enabled) | |
| SMTP_PORT | Host Port of your SMTP server. | optional (required if email services are to be enabled) | |
| SMTP_USER | Username for your SMTP Server. | optional (required if email services are to be enabled) | |
| SMTP_PASSWORD | Password for your SMTP Server. | optional (required if email services are to be enabled) | |
| SMTP_AUTHENTICATED | If set to 0, the server will not require SMTP_USER and SMTP_PASSWORD(default is 1) | optional | |
| SMTP_SECURE_ENABLED | SMTP secure connection. For using TLS, set to 1 else to 0. | optional (required if email services are to be enabled) | |
| SMTP_REJECT_UNAUTHORIZED_TLS | If set to 0, the server will accept connections without requiring authorization from the list of supplied CAs. | optional | 1 |
| TURNSTILE_SITE_KEY | Site key for Turnstile. | optional | |
| TURNSTILE_SECRET_KEY | Secret key for Turnstile. | optional | |
| RECAPTCHA_SITE_KEY | Site key for survey responses recaptcha bot protection | optional | |
| RECAPTCHA_SECRET_KEY | Secret key for recaptcha bot protection. | optional | |
| GITHUB_ID | Client ID for GitHub. | optional (required if GitHub auth is enabled) | |
| GITHUB_SECRET | Secret for GitHub. | optional (required if GitHub auth is enabled) | |
| GOOGLE_CLIENT_ID | Client ID for Google. | optional (required if Google auth is enabled) | |
| GOOGLE_CLIENT_SECRET | Secret for Google. | optional (required if Google auth is enabled) | |
| AI_PROVIDER | Instance-level AI provider used in the background. Supported values: `aws`, `google`, `azure`. | optional (required if AI is enabled) | |
| AI_MODEL | Instance-level AI model or deployment name used by the active provider. | optional (required if `AI_PROVIDER` is set) | |
| AI_GOOGLE_CLOUD_PROJECT | Google Cloud project ID for the `google` AI provider. | optional (required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_LOCATION | Google Cloud location for `google` AI requests. | optional (required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_CREDENTIALS_JSON | Service account credentials JSON for the `google` AI provider. | optional (one of this or `AI_GOOGLE_CLOUD_APPLICATION_CREDENTIALS` required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_APPLICATION_CREDENTIALS | Path to Google Application Default Credentials used by the `google` AI provider. | optional (one of this or `AI_GOOGLE_CLOUD_CREDENTIALS_JSON` required if `AI_PROVIDER=google`) | |
| AI_AWS_REGION | AWS region for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_ACCESS_KEY_ID | AWS access key ID for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_SECRET_ACCESS_KEY | AWS secret access key for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_SESSION_TOKEN | AWS session token for Amazon Bedrock temporary credentials. | optional | |
| AI_AZURE_BASE_URL | Azure OpenAI / Foundry base URL. When set, this is preferred over `AI_AZURE_RESOURCE_NAME`. | optional | |
| AI_AZURE_RESOURCE_NAME | Azure resource name used to assemble the Azure OpenAI URL. | optional | |
| AI_AZURE_API_KEY | API key for Azure OpenAI / Foundry. | optional (required if `AI_PROVIDER=azure`) | |
| AI_AZURE_API_VERSION | Azure API version for OpenAI-compatible calls. | optional | v1 |
| STRIPE_SECRET_KEY | Secret key for Stripe integration. | optional | |
| STRIPE_WEBHOOK_SECRET | Webhook secret for Stripe integration. | optional | |
| DEFAULT_BRAND_COLOR | Default brand color for your app (Can be overwritten from the UI as well). | optional | #64748b |
| DEFAULT_ORGANIZATION_ID | Automatically assign new users to a specific organization when joining | optional | |
| OIDC_DISPLAY_NAME | Display name for Custom OpenID Connect Provider | optional | |
| OIDC_CLIENT_ID | Client ID for Custom OpenID Connect Provider | optional (required if OIDC auth is enabled) | |
| OIDC_CLIENT_SECRET | Secret for Custom OpenID Connect Provider | optional (required if OIDC auth is enabled) | |
| OIDC_ISSUER | Issuer URL for Custom OpenID Connect Provider (should have .well-known configured at this) | optional (required if OIDC auth is enabled) | |
| OIDC_SIGNING_ALGORITHM | Signing Algorithm for Custom OpenID Connect Provider | optional | RS256 |
| OTEL_EXPORTER_OTLP_ENDPOINT | Base OTLP HTTP endpoint for traces and metrics export (e.g. http://collector:4318). | optional | |
| OTEL_EXPORTER_OTLP_PROTOCOL | OTLP protocol to use for export. | optional | http/protobuf |
| OTEL_SERVICE_NAME | Service name reported in OpenTelemetry resource attributes. | optional | formbricks |
| OTEL_RESOURCE_ATTRIBUTES | Comma-separated resource attributes in OTel format (`key=value,key2=value2`). | optional | |
| OTEL_TRACES_SAMPLER | Trace sampler strategy (`always_on`, `always_off`, `traceidratio`, `parentbased_traceidratio`). | optional | always_on |
| OTEL_TRACES_SAMPLER_ARG | Sampling argument used by ratio-based samplers (`0` to `1`). | optional | |
| PROMETHEUS_ENABLED | Enables Prometheus metrics if set to 1. | optional | |
| PROMETHEUS_EXPORTER_PORT | Port for Prometheus metrics. | optional | 9090 |
| DEFAULT_TEAM_ID | Default team ID for new users. | optional | |
| SENTRY_DSN | Set this to track errors and monitor performance in Sentry. | optional | |
| SENTRY_ENVIRONMENT | Set this to identify the environment in Sentry | optional | |
| SENTRY_AUTH_TOKEN | Set this if you want to make errors more readable in Sentry. | optional | |
| SESSION_MAX_AGE | Configure the maximum age for the session in seconds. | optional | 86400 (24 hours) |
| USER_MANAGEMENT_MINIMUM_ROLE | Set this to control which roles can access user management features. Accepted values: "owner", "manager", "disabled" | optional | manager |
| REDIS_URL | Redis URL for caching, rate limiting, and audit logging. Application will not start without this. | required | redis://localhost:6379 |
| AUDIT_LOG_ENABLED | Set this to 1 to enable audit logging. Requires Redis to be configured with the REDIS_URL env variable. | optional | 0 |
| AUDIT_LOG_GET_USER_IP | Set to 1 to include user IP addresses in audit logs from request headers | optional | 0 |
| Variable | Description | Required | Default |
| --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------- |
| WEBAPP_URL | Base URL of the site. | required | http://localhost:3000 |
| PUBLIC_URL | Base URL for the public domain where surveys and public-facing content are served. If not set, uses WEBAPP_URL. | optional | WEBAPP_URL |
| NEXTAUTH_URL | Location of the auth server. This should normally be the same as WEBAPP_URL | required | http://localhost:3000 |
| DATABASE_URL | Database URL with credentials. | required | |
| NEXTAUTH_SECRET | Secret for NextAuth, used for session signing and encryption. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| ENCRYPTION_KEY | Secret used by Formbricks for data encryption and audit log hashing. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| CRON_SECRET | API Secret for running cron jobs. | required | (Generated by the user, must not exceed 32 bytes, `openssl rand -hex 32`) |
| LOG_LEVEL | Minimum log level (debug, info, warn, error, fatal) | optional | info |
| S3_ACCESS_KEY | Access key for S3. | optional | (resolved by the AWS SDK) |
| S3_SECRET_KEY | Secret key for S3. | optional | (resolved by the AWS SDK) |
| S3_REGION | Region for S3. | optional | (resolved by the AWS SDK) |
| S3_BUCKET_NAME | S3 bucket name for data storage. Formbricks enables S3 storage when this is set. | optional (required if S3 is enabled) | |
| S3_ENDPOINT_URL | Endpoint for S3. | optional | (resolved by the AWS SDK) |
| SAML_DATABASE_URL | Database URL for SAML. | optional | postgres://postgres:@localhost:5432/formbricks-saml |
| PRIVACY_URL | URL for privacy policy. | optional | |
| TERMS_URL | URL for terms of service. | optional | |
| IMPRINT_URL | URL for imprint. | optional | |
| IMPRINT_ADDRESS | Address for imprint. | optional | |
| EMAIL_AUTH_DISABLED | Disables the ability for users to signup or login via email and password if set to 1. | optional | |
| PASSWORD_RESET_DISABLED | Disables password reset functionality if set to 1. | optional | |
| PASSWORD_RESET_TOKEN_LIFETIME_MINUTES | Configures how long password reset links remain valid in minutes. Accepted values are integers from 5 to 120. | optional | 30 |
| EMAIL_VERIFICATION_DISABLED | Disables email verification if set to 1. | optional | |
| RATE_LIMITING_DISABLED | Disables rate limiting if set to 1. | optional | |
| TELEMETRY_DISABLED | Disables telemetry reporting if set to 1. Ignored when an Enterprise License is active. | optional | |
| DANGEROUSLY_ALLOW_WEBHOOK_INTERNAL_URLS | Allows webhook URLs to point to internal/private network addresses (e.g. localhost, 192.168.x.x) if set to 1. Useful for self-hosted instances that need to send webhooks to internal services. | optional | |
| INVITE_DISABLED | Disables the ability for invited users to create an account if set to 1. | optional | |
| MAIL_FROM | Email address to send emails from. | optional (required if email services are to be enabled) | |
| MAIL_FROM_NAME | Email name/title to send emails from. | optional (required if email services are to be enabled) | |
| SMTP_HOST | Host URL of your SMTP server. | optional (required if email services are to be enabled) | |
| SMTP_PORT | Host Port of your SMTP server. | optional (required if email services are to be enabled) | |
| SMTP_USER | Username for your SMTP Server. | optional (required if email services are to be enabled) | |
| SMTP_PASSWORD | Password for your SMTP Server. | optional (required if email services are to be enabled) | |
| SMTP_AUTHENTICATED | If set to 0, the server will not require SMTP_USER and SMTP_PASSWORD(default is 1) | optional | |
| SMTP_SECURE_ENABLED | SMTP secure connection. For using TLS, set to 1 else to 0. | optional (required if email services are to be enabled) | |
| SMTP_REJECT_UNAUTHORIZED_TLS | If set to 0, the server will accept connections without requiring authorization from the list of supplied CAs. | optional | 1 |
| TURNSTILE_SITE_KEY | Site key for Turnstile. | optional | |
| TURNSTILE_SECRET_KEY | Secret key for Turnstile. | optional | |
| RECAPTCHA_SITE_KEY | Site key for survey responses recaptcha bot protection | optional | |
| RECAPTCHA_SECRET_KEY | Secret key for recaptcha bot protection. | optional | |
| GITHUB_ID | Client ID for GitHub. | optional (required if GitHub auth is enabled) | |
| GITHUB_SECRET | Secret for GitHub. | optional (required if GitHub auth is enabled) | |
| GOOGLE_CLIENT_ID | Client ID for Google. | optional (required if Google auth is enabled) | |
| GOOGLE_CLIENT_SECRET | Secret for Google. | optional (required if Google auth is enabled) | |
| AI_PROVIDER | Instance-level AI provider used in the background. Supported values: `aws`, `google`, `azure`. | optional (required if AI is enabled) | |
| AI_MODEL | Instance-level AI model or deployment name used by the active provider. | optional (required if `AI_PROVIDER` is set) | |
| AI_GOOGLE_CLOUD_PROJECT | Google Cloud project ID for the `google` AI provider. | optional (required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_LOCATION | Google Cloud location for `google` AI requests. | optional (required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_CREDENTIALS_JSON | Service account credentials JSON for the `google` AI provider. | optional (one of this or `AI_GOOGLE_CLOUD_APPLICATION_CREDENTIALS` required if `AI_PROVIDER=google`) | |
| AI_GOOGLE_CLOUD_APPLICATION_CREDENTIALS | Path to Google Application Default Credentials used by the `google` AI provider. | optional (one of this or `AI_GOOGLE_CLOUD_CREDENTIALS_JSON` required if `AI_PROVIDER=google`) | |
| AI_AWS_REGION | AWS region for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_ACCESS_KEY_ID | AWS access key ID for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_SECRET_ACCESS_KEY | AWS secret access key for Amazon Bedrock. | optional (required if `AI_PROVIDER=aws`) | |
| AI_AWS_SESSION_TOKEN | AWS session token for Amazon Bedrock temporary credentials. | optional | |
| AI_AZURE_BASE_URL | Azure OpenAI / Foundry base URL. When set, this is preferred over `AI_AZURE_RESOURCE_NAME`. | optional | |
| AI_AZURE_RESOURCE_NAME | Azure resource name used to assemble the Azure OpenAI URL. | optional | |
| AI_AZURE_API_KEY | API key for Azure OpenAI / Foundry. | optional (required if `AI_PROVIDER=azure`) | |
| AI_AZURE_API_VERSION | Azure API version for OpenAI-compatible calls. | optional | v1 |
| STRIPE_SECRET_KEY | Secret key for Stripe integration. | optional | |
| STRIPE_WEBHOOK_SECRET | Webhook secret for Stripe integration. | optional | |
| DEFAULT_BRAND_COLOR | Default brand color for your app (Can be overwritten from the UI as well). | optional | #64748b |
| DEFAULT_ORGANIZATION_ID | Automatically assign new users to a specific organization when joining | optional | |
| OIDC_DISPLAY_NAME | Display name for Custom OpenID Connect Provider | optional | |
| OIDC_CLIENT_ID | Client ID for Custom OpenID Connect Provider | optional (required if OIDC auth is enabled) | |
| OIDC_CLIENT_SECRET | Secret for Custom OpenID Connect Provider | optional (required if OIDC auth is enabled) | |
| OIDC_ISSUER | Issuer URL for Custom OpenID Connect Provider (should have .well-known configured at this) | optional (required if OIDC auth is enabled) | |
| OIDC_SIGNING_ALGORITHM | Signing Algorithm for Custom OpenID Connect Provider | optional | RS256 |
| OTEL_EXPORTER_OTLP_ENDPOINT | Base OTLP HTTP endpoint for traces and metrics export (e.g. http://collector:4318). | optional | |
| OTEL_EXPORTER_OTLP_PROTOCOL | OTLP protocol to use for export. | optional | http/protobuf |
| OTEL_SERVICE_NAME | Service name reported in OpenTelemetry resource attributes. | optional | formbricks |
| OTEL_RESOURCE_ATTRIBUTES | Comma-separated resource attributes in OTel format (`key=value,key2=value2`). | optional | |
| OTEL_TRACES_SAMPLER | Trace sampler strategy (`always_on`, `always_off`, `traceidratio`, `parentbased_traceidratio`). | optional | always_on |
| OTEL_TRACES_SAMPLER_ARG | Sampling argument used by ratio-based samplers (`0` to `1`). | optional | |
| PROMETHEUS_ENABLED | Enables Prometheus metrics if set to 1. | optional | |
| PROMETHEUS_EXPORTER_PORT | Port for Prometheus metrics. | optional | 9090 |
| DEFAULT_TEAM_ID | Default team ID for new users. | optional | |
| SENTRY_DSN | Set this to track errors and monitor performance in Sentry. | optional | |
| SENTRY_ENVIRONMENT | Set this to identify the environment in Sentry | optional | |
| SENTRY_AUTH_TOKEN | Set this if you want to make errors more readable in Sentry. | optional | |
| SESSION_MAX_AGE | Configure the maximum age for the session in seconds. | optional | 86400 (24 hours) |
| USER_MANAGEMENT_MINIMUM_ROLE | Set this to control which roles can access user management features. Accepted values: "owner", "manager", "disabled" | optional | manager |
| REDIS_URL | Redis URL for caching, rate limiting, and audit logging. Application will not start without this. | required | redis://localhost:6379 |
| AUDIT_LOG_ENABLED | Set this to 1 to enable audit logging. Requires Redis to be configured with the REDIS_URL env variable. | optional | 0 |
| AUDIT_LOG_GET_USER_IP | Set to 1 to include user IP addresses in audit logs from request headers | optional | 0 |
#### Formbricks Hub
@@ -106,4 +106,22 @@ When running the stack with [Formbricks Hub](https://github.com/formbricks/hub)
| HUB_API_URL | Base URL the Formbricks app uses to call Hub. Use `http://localhost:8080` locally. | required | `http://localhost:8080` in local dev |
| HUB_DATABASE_URL | PostgreSQL connection URL for Hub. Omit to use the same database as Formbricks. | optional | Same as Formbricks `DATABASE_URL` (shared database) |
#### Cube.js Analytics for XM Suite v5
XM Suite v5 dashboard and analysis features require a reachable Cube.js instance. Formbricks generates the backend
Cube JWT from `CUBEJS_API_SECRET`, so `CUBEJS_API_TOKEN` is not part of the supported setup contract.
| Variable | Description | Required | Default |
| ----------------- | ----------------------------------------------------------------------------------------------------- | ---------------------------------- | ------------------------------------ |
| CUBEJS_API_URL | Base URL the Formbricks app uses to call Cube. Use `http://localhost:4000` locally. | required for XM Suite v5 analytics | `http://localhost:4000` in local dev |
| CUBEJS_API_SECRET | Shared secret Formbricks uses to sign Cube API JWTs. Generate with `openssl rand -hex 32`. | required for XM Suite v5 analytics | |
| CUBEJS_DB_HOST | Database host for the Cube service. Only needed when you run Cube yourself and override defaults. | optional | Depends on your Cube deployment |
| CUBEJS_DB_PORT | Database port for the Cube service. Only needed when you run Cube yourself and override defaults. | optional | Depends on your Cube deployment |
| CUBEJS_DB_NAME | Database name for the Cube service. Only needed when you run Cube yourself and override defaults. | optional | Depends on your Cube deployment |
| CUBEJS_DB_USER | Database user for the Cube service. Only needed when you run Cube yourself and override defaults. | optional | Depends on your Cube deployment |
| CUBEJS_DB_PASS | Database password for the Cube service. Only needed when you run Cube yourself and override defaults. | optional | Depends on your Cube deployment |
For Helm deployments, Formbricks does not deploy Cube for you in this chart. Provide an external Cube endpoint with
`CUBEJS_API_URL` and supply `CUBEJS_API_SECRET` through your existing secret management setup.
Note: If you want to configure something that is not possible via above, please open an issue on our GitHub repo here or reach out to us on Github Discussions and we'll try our best to work out a solution with you.
+22 -8
View File
@@ -25,12 +25,26 @@ Make sure Docker and Docker Compose are installed on your system. These are usua
mkdir formbricks-quickstart && cd formbricks-quickstart
```
1. **Download the Docker-Compose File**
1. **Download the Docker Files**
Get the docker-compose file from the Formbricks repository by running:
Get the Docker Compose file plus the Cube.js configuration shipped with the XM Suite v5 stack:
```bash
mkdir -p cube/schema
curl -o docker-compose.yml https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/docker-compose.yml
curl -o cube/cube.js https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/cube/cube.js
curl -o cube/schema/FeedbackRecords.js https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/cube/schema/FeedbackRecords.js
```
1. **Generate Hub and Cube Secrets**
XM Suite v5 analytics requires Formbricks Hub and Cube.js. Create a local `.env` file for the required shared secrets:
```bash
cat <<EOF > .env
HUB_API_KEY=$(openssl rand -hex 32)
CUBEJS_API_SECRET=$(openssl rand -hex 32)
EOF
```
1. **Generate NextAuth Secret**
@@ -83,7 +97,7 @@ Make sure Docker and Docker Compose are installed on your system. These are usua
1. **Start the Docker Setup**
Now, you're ready to run Formbricks with Docker. Use the command below to start Formbricks along with a PostgreSQL database using Docker Compose:
Now, you're ready to run Formbricks with Docker. Use the command below to start Formbricks together with PostgreSQL, Redis, Formbricks Hub, and Cube.js:
```bash
docker compose up -d
@@ -95,6 +109,11 @@ Make sure Docker and Docker Compose are installed on your system. These are usua
Once the setup is running, open [**http://localhost:3000**](http://localhost:3000) in your browser to access Formbricks. The first time you visit, you'll see a setup wizard. Follow the steps to create your first user and start using Formbricks.
<Note>
The bundled Docker stack keeps Formbricks Hub and Cube.js internal to the compose network. The app reaches
them through `http://hub:8080` and `http://cube:4000`.
</Note>
## Update
Please take a look at our [migration guide](/self-hosting/advanced/migration) for version specific steps to update Formbricks.
@@ -151,12 +170,10 @@ The fastest way to test MinIO with Formbricks is to use the included `docker-com
Open http://localhost:9001 in your browser.
Login credentials:
- Username: `devminio`
- Password: `devminio123`
3. **Create Bucket**
- Click "Buckets" in the left sidebar
- Click "Create Bucket"
- Name it: `formbricks`
@@ -178,7 +195,6 @@ The fastest way to test MinIO with Formbricks is to use the included `docker-com
5. **Verify in MinIO Console**
After uploading files in Formbricks, view them at http://localhost:9001:
- Navigate to Buckets → formbricks → Browse
- Your uploaded files will appear here
@@ -245,7 +261,6 @@ If you prefer to add MinIO to your own `docker-compose.yml`, follow these steps:
```
4. **Open the MinIO Console & Create a Bucket**
- Visit **http://localhost:9001**
- Log in with:
- **Username:** `formbricks-root`
@@ -275,7 +290,6 @@ If you prefer to add MinIO to your own `docker-compose.yml`, follow these steps:
6. **Verify uploads**
After uploading a file in Formbricks, check **http://localhost:9001**:
- **Buckets → formbricks → Browse**
You should see your uploaded files.
+20
View File
@@ -31,6 +31,22 @@ class ValidationError extends Error {
}
}
class ConfigurationError extends Error {
statusCode = 503;
constructor(message: string) {
super(message);
this.name = "ConfigurationError";
}
}
class QueryExecutionError extends Error {
statusCode = 500;
constructor(message: string) {
super(message);
this.name = "QueryExecutionError";
}
}
class UnknownError extends Error {
statusCode = 500;
constructor(message: string) {
@@ -135,6 +151,8 @@ export {
ResourceNotFoundError,
InvalidInputError,
ValidationError,
ConfigurationError,
QueryExecutionError,
DatabaseError,
UniqueConstraintError,
UnknownError,
@@ -156,6 +174,8 @@ export const EXPECTED_ERROR_NAMES = new Set([
"AuthorizationError",
"InvalidInputError",
"ValidationError",
"ConfigurationError",
"QueryExecutionError",
"AuthenticationError",
"OperationNotAllowedError",
"TooManyRequestsError",
+1 -1
View File
@@ -5,7 +5,7 @@ readonly SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
readonly REPO_ROOT="$(cd -- "${SCRIPT_DIR}/.." && pwd)"
readonly ENV_TEMPLATE_PATH="${REPO_ROOT}/.env.example"
readonly ENV_PATH="${REPO_ROOT}/.env"
readonly REQUIRED_GENERATED_KEYS=("ENCRYPTION_KEY" "NEXTAUTH_SECRET" "CRON_SECRET")
readonly REQUIRED_GENERATED_KEYS=("ENCRYPTION_KEY" "NEXTAUTH_SECRET" "CRON_SECRET" "CUBEJS_API_SECRET")
TEMP_FILE=""
+1 -1
View File
@@ -211,7 +211,7 @@
"BREVO_API_KEY",
"BREVO_LIST_ID",
"CRON_SECRET",
"CUBEJS_API_TOKEN",
"CUBEJS_API_SECRET",
"CUBEJS_API_URL",
"DANGEROUSLY_ALLOW_WEBHOOK_INTERNAL_URLS",
"DATABASE_URL",