Compare commits

..

15 Commits

Author SHA1 Message Date
Dhruwang
abdd378ad5 chore: dual-write projectId in all create/upsert paths
Add projectId alongside environmentId in all resource creation and
upsert code paths. This is Phase 3 of the environment deprecation plan.

For 15 call sites, replaced verbose getEnvironment() + null check
boilerplate with the existing getProjectIdFromEnvironmentId() helper,
which encapsulates the same logic in a single call.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 16:12:22 +05:30
Dhruwang
83cc7708f3 feat(db): add batched data migration to backfill projectId on environment-owned models
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:44 +05:30
Matti Nannt
6cf7ec262c feat: port hub xm-suite config to epic/v5 (#7578) 2026-03-26 15:10:44 +05:30
Dhruwang
cd02baa992 fix: omit projectId from webhook update API schema
The ZWebhookUpdateSchema derived from ZWebhook via .omit() did not exclude
projectId, making it a required field in the update payload. This caused
the webhook update E2E test to fail since it doesn't send projectId.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
d624925fbd fix: standardize column naming in migration indices for consistency
Update migration.sql to change column names from camelCase to snake_case for createdAt and updatedAt in the indices of ActionClass, ContactAttributeKey, and Survey models. This change ensures consistency across the database schema and aligns with recent updates to the projectId field.
2026-03-26 15:10:43 +05:30
Dhruwang
30500f09bc fix: add explicit select to xm_segment_migration to prevent projectId query
The migration runner uses a Prisma Client generated from the latest
schema, but applies migrations in timestamp order. Without explicit
select, findMany/update would query the projectId column which doesn't
exist yet when this older migration runs.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
10a31677f9 fix: update database indices to include projectId with relevant fields
Refactor database schema to modify indices for ActionClass, ContactAttributeKey, and Survey models, incorporating projectId alongside createdAt or updatedAt as appropriate. This change enhances query performance and aligns with recent updates to the projectId field across environment-owned models.
2026-03-26 15:10:43 +05:30
Dhruwang
cf8f960876 fix: add projectId to all Zod types, select objects, and inline constructors
Update all TypeScript types (Zod schemas in packages/types and
packages/database/zod) and Prisma select objects to include the new
nullable projectId field added to the 8 environment-owned models.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
1f36eb3d32 fix: add projectId to Prisma select objects for affected models
Add projectId: true to all explicit select objects for ActionClass,
Survey, Contact, and Segment models so returned types include the
new nullable projectId field.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
0c0ad1ee4a fix: add projectId to ActionClass mock in js-core widget test
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
9b4e41cc48 chore(db): add nullable projectId to environment-owned models
Phase 1 of environment deprecation: adds optional projectId column
with FK, index, and cascade delete to all 8 environment-owned models
(Survey, Contact, ActionClass, ContactAttributeKey, Webhook, Tag,
Segment, Integration) and reverse relations on Project.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:43 +05:30
Dhruwang
9a12320a2a docs: revise deprecate environments migration plan
Remove premature resolver abstraction (old Phase 1). Renumber phases 2-12
to 1-11. Add detailed explanations for why each phase is needed.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:42 +05:30
Dhruwang
07f2249fc7 docs: add deprecate environments migration plan
Add detailed phased plan for collapsing the Environment layer so resources
live directly under Project. Covers 12 phases from resolution abstraction
through SDK updates.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 15:10:42 +05:30
Dhruwang
2c9fbf83e4 chore: merge epic/v5 into chore/deprecate-environments 2026-03-26 15:10:31 +05:30
Matti Nannt
81272b96e1 feat: port hub xm-suite config to epic/v5 (#7578) 2026-03-25 11:04:42 +00:00
74 changed files with 1256 additions and 223 deletions

View File

@@ -38,6 +38,15 @@ LOG_LEVEL=info
DATABASE_URL='postgresql://postgres:postgres@localhost:5432/formbricks?schema=public'
#################
# HUB (DEV) #
#################
# The dev stack (pnpm db:up / pnpm go) runs Formbricks Hub on port 8080.
# Set explicitly to avoid confusion; override as needed when using docker-compose.dev.yml.
HUB_API_KEY=dev-api-key
HUB_API_URL=http://localhost:8080
HUB_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
################
# MAIL SETUP #
################

View File

@@ -2,6 +2,7 @@ import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { TDisplayCreateInput, ZDisplayCreateInput } from "@formbricks/types/displays";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { getContactByUserId } from "./contact";
@@ -15,9 +16,11 @@ export const createDisplay = async (displayInput: TDisplayCreateInput): Promise<
if (userId) {
contact = await getContactByUserId(environmentId, userId);
if (!contact) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
contact = await prisma.contact.create({
data: {
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
attributeKey: {

View File

@@ -45,6 +45,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -19,6 +19,7 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(async (environmentIds: string[]): Promise<TActionClass[]> => {

View File

@@ -50,6 +50,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -5,6 +5,7 @@ import { DatabaseError, InvalidInputError } from "@formbricks/types/errors";
import { TWebhookInput, ZWebhookInput } from "@/app/api/v1/webhooks/types/webhooks";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
@@ -12,6 +13,8 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
validateInputs([webhookInput, ZWebhookInput]);
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(webhookInput.environmentId);
try {
const secret = generateWebhookSecret();
@@ -23,11 +26,8 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
surveyIds: webhookInput.surveyIds || [],
triggers: webhookInput.triggers || [],
secret,
environment: {
connect: {
id: webhookInput.environmentId,
},
},
environmentId: webhookInput.environmentId,
projectId,
},
});

View File

@@ -4823,6 +4823,7 @@ export const previewSurvey = (projectName: string, t: TFunction): TSurvey => {
name: t("templates.preview_survey_name"),
type: "link" as const,
environmentId: "cltwumfcz0009echxg02fh7oa",
projectId: null,
createdBy: "cltwumfbz0000echxysz6ptvq",
status: "inProgress" as const,
welcomeCard: {

View File

@@ -9,6 +9,7 @@ import { TActionClass, TActionClassInput, ZActionClassInput } from "@formbricks/
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const selectActionClass = {
@@ -21,6 +22,7 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(
@@ -113,10 +115,13 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -40,6 +40,8 @@ export const GITHUB_ID = env.GITHUB_ID;
export const GITHUB_SECRET = env.GITHUB_SECRET;
export const GOOGLE_CLIENT_ID = env.GOOGLE_CLIENT_ID;
export const GOOGLE_CLIENT_SECRET = env.GOOGLE_CLIENT_SECRET;
export const HUB_API_URL = env.HUB_API_URL;
export const HUB_API_KEY = env.HUB_API_KEY;
export const AZUREAD_CLIENT_ID = env.AZUREAD_CLIENT_ID;
export const AZUREAD_CLIENT_SECRET = env.AZUREAD_CLIENT_SECRET;

View File

@@ -33,6 +33,8 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: z.string().optional(),
HTTP_PROXY: z.url().optional(),
HTTPS_PROXY: z.url().optional(),
HUB_API_URL: z.url(),
HUB_API_KEY: z.string().optional(),
IMPRINT_URL: z
.url()
.optional()
@@ -159,6 +161,8 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: process.env.GOOGLE_SHEETS_REDIRECT_URL,
HTTP_PROXY: process.env.HTTP_PROXY,
HTTPS_PROXY: process.env.HTTPS_PROXY,
HUB_API_URL: process.env.HUB_API_URL,
HUB_API_KEY: process.env.HUB_API_KEY,
IMPRINT_URL: process.env.IMPRINT_URL,
IMPRINT_ADDRESS: process.env.IMPRINT_ADDRESS,
INVITE_DISABLED: process.env.INVITE_DISABLED,

View File

@@ -7,6 +7,7 @@ import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError } from "@formbricks/types/errors";
import { TIntegration, TIntegrationInput, ZIntegrationType } from "@formbricks/types/integration";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const transformIntegration = (integration: TIntegration): TIntegration => {
@@ -28,6 +29,8 @@ export const createOrUpdateIntegration = async (
): Promise<TIntegration> => {
validateInputs([environmentId, ZId]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const integration = await prisma.integration.upsert({
where: {
@@ -38,11 +41,13 @@ export const createOrUpdateIntegration = async (
},
update: {
...integrationData,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
},
create: {
...integrationData,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
},
});
return integration;

View File

@@ -75,6 +75,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -19,6 +19,7 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -41,6 +42,7 @@ const commonMockProperties = {
createdAt: currentDate,
updatedAt: currentDate,
environmentId: mockId,
projectId: null,
};
type SurveyMock = Prisma.SurveyGetPayload<{

View File

@@ -14,6 +14,7 @@ import {
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "../actionClass/service";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
import {
checkForInvalidImagesInQuestions,
@@ -30,6 +31,7 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
welcomeCard: true,
@@ -84,6 +86,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -471,6 +474,11 @@ export const updateSurveyInternal = async (
id: environmentId,
},
},
project: {
connect: {
id: currentSurvey.projectId!,
},
},
},
},
},
@@ -624,7 +632,10 @@ export const createSurvey = async (
};
}
const organization = await getOrganizationByEnvironmentId(parsedEnvironmentId);
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(parsedEnvironmentId),
getProjectIdFromEnvironmentId(parsedEnvironmentId),
]);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
@@ -659,6 +670,11 @@ export const createSurvey = async (
id: parsedEnvironmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
@@ -670,11 +686,8 @@ export const createSurvey = async (
title: survey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: parsedEnvironmentId,
},
},
environmentId: parsedEnvironmentId,
projectId,
},
});

View File

@@ -6,6 +6,7 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { TTag } from "@formbricks/types/tags";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TagError } from "@/modules/projects/settings/types/tag";
import { ITEMS_PER_PAGE } from "../constants";
import { validateInputs } from "../utils/validate";
@@ -52,11 +53,14 @@ export const createTag = async (
): Promise<Result<TTag, { code: TagError; message: string; meta?: Record<string, string> }>> => {
validateInputs([environmentId, ZId], [name, ZString]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const tag = await prisma.tag.create({
data: {
name,
environmentId,
projectId,
},
});

View File

@@ -3,6 +3,7 @@ import { cache as reactCache } from "react";
import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { getContactAttributeKeysQuery } from "@/modules/api/v2/management/contact-attribute-keys/lib/utils";
import {
@@ -45,6 +46,14 @@ export const createContactAttributeKey = async (
): Promise<Result<ContactAttributeKey, ApiErrorResponseV2>> => {
const { environmentId, name, description, key, dataType } = contactAttributeKey;
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
try {
const prismaData: Prisma.ContactAttributeKeyCreateInput = {
environment: {
@@ -52,6 +61,11 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name: name ?? formatSnakeCaseToTitleCase(key),
description,
key,

View File

@@ -58,6 +58,7 @@ export const getResponseForPipeline = async (
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -184,6 +184,7 @@ describe("Response Lib", () => {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -17,6 +17,7 @@ export const ZWebhookUpdateSchema = ZWebhook.omit({
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
secret: true,
}).meta({
id: "webhookUpdate",

View File

@@ -3,6 +3,7 @@ import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { InvalidInputError } from "@formbricks/types/errors";
import { generateWebhookSecret } from "@/lib/crypto";
import { getEnvironment } from "@/lib/environment/service";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getWebhooksQuery } from "@/modules/api/v2/management/webhooks/lib/utils";
import { TGetWebhooksFilter, TWebhookInput } from "@/modules/api/v2/management/webhooks/types/webhooks";
@@ -68,6 +69,14 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not_found" }],
});
}
try {
const secret = generateWebhookSecret();
@@ -77,6 +86,11 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name,
url,
source,

View File

@@ -106,7 +106,10 @@ describe("billing actions", () => {
});
expect(mocks.getOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.ensureStripeCustomerForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith(
"org_1",
"start-hobby"
);
expect(mocks.syncOrganizationBillingFromStripe).toHaveBeenCalledWith("org_1");
expect(result).toEqual({ success: true });
});
@@ -125,7 +128,10 @@ describe("billing actions", () => {
} as any);
expect(mocks.ensureStripeCustomerForOrganization).not.toHaveBeenCalled();
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith(
"org_1",
"start-hobby"
);
expect(mocks.syncOrganizationBillingFromStripe).toHaveBeenCalledWith("org_1");
expect(result).toEqual({ success: true });
});
@@ -139,7 +145,7 @@ describe("billing actions", () => {
expect(mocks.getOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.ensureStripeCustomerForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.createProTrialSubscription).toHaveBeenCalledWith("org_1", "cus_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1", "pro-trial");
expect(mocks.syncOrganizationBillingFromStripe).toHaveBeenCalledWith("org_1");
expect(result).toEqual({ success: true });
});
@@ -159,7 +165,7 @@ describe("billing actions", () => {
expect(mocks.ensureStripeCustomerForOrganization).not.toHaveBeenCalled();
expect(mocks.createProTrialSubscription).toHaveBeenCalledWith("org_1", "cus_existing");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1");
expect(mocks.reconcileCloudStripeSubscriptionsForOrganization).toHaveBeenCalledWith("org_1", "pro-trial");
expect(mocks.syncOrganizationBillingFromStripe).toHaveBeenCalledWith("org_1");
expect(result).toEqual({ success: true });
});

View File

@@ -216,7 +216,7 @@ export const startHobbyAction = authenticatedActionClient
throw new ResourceNotFoundError("OrganizationBilling", parsedInput.organizationId);
}
await reconcileCloudStripeSubscriptionsForOrganization(parsedInput.organizationId);
await reconcileCloudStripeSubscriptionsForOrganization(parsedInput.organizationId, "start-hobby");
await syncOrganizationBillingFromStripe(parsedInput.organizationId);
return { success: true };
});
@@ -248,7 +248,7 @@ export const startProTrialAction = authenticatedActionClient
}
await createProTrialSubscription(parsedInput.organizationId, customerId);
await reconcileCloudStripeSubscriptionsForOrganization(parsedInput.organizationId);
await reconcileCloudStripeSubscriptionsForOrganization(parsedInput.organizationId, "pro-trial");
await syncOrganizationBillingFromStripe(parsedInput.organizationId);
return { success: true };
});

View File

@@ -150,7 +150,7 @@ export const webhookHandler = async (requestBody: string, stripeSignature: strin
await handleSetupCheckoutCompleted(event.data.object, stripe);
}
await reconcileCloudStripeSubscriptionsForOrganization(organizationId);
await reconcileCloudStripeSubscriptionsForOrganization(organizationId, event.id);
await syncOrganizationBillingFromStripe(organizationId, {
id: event.id,
created: event.created,

View File

@@ -1905,7 +1905,7 @@ describe("organization-billing", () => {
items: [{ price: "price_hobby_monthly", quantity: 1 }],
metadata: { organizationId: "org_1" },
},
{ idempotencyKey: "ensure-hobby-subscription-org_1-0" }
{ idempotencyKey: "ensure-hobby-subscription-org_1-bootstrap" }
);
expect(mocks.prismaOrganizationBillingUpdate).toHaveBeenCalledWith({
where: { organizationId: "org_1" },
@@ -1974,7 +1974,7 @@ describe("organization-billing", () => {
],
});
await reconcileCloudStripeSubscriptionsForOrganization("org_1");
await reconcileCloudStripeSubscriptionsForOrganization("org_1", "evt_123");
expect(mocks.subscriptionsCancel).toHaveBeenCalledWith("sub_hobby", { prorate: false });
expect(mocks.subscriptionsCreate).not.toHaveBeenCalled();

View File

@@ -458,21 +458,18 @@ const resolvePendingChangeEffectiveAt = (
const ensureHobbySubscription = async (
organizationId: string,
customerId: string,
subscriptionCount: number
idempotencySuffix: string
): Promise<void> => {
if (!stripeClient) return;
const hobbyItems = await getCatalogItemsForPlan("hobby", "monthly");
// Include subscriptionCount so the key is stable across concurrent calls (same
// count → same key → Stripe deduplicates) but changes after a cancellation
// (count increases → new key → allows legitimate re-creation).
await stripeClient.subscriptions.create(
{
customer: customerId,
items: hobbyItems,
metadata: { organizationId },
},
{ idempotencyKey: `ensure-hobby-subscription-${organizationId}-${subscriptionCount}` }
{ idempotencyKey: `ensure-hobby-subscription-${organizationId}-${idempotencySuffix}` }
);
};
@@ -1267,7 +1264,8 @@ export const findOrganizationIdByStripeCustomerId = async (customerId: string):
};
export const reconcileCloudStripeSubscriptionsForOrganization = async (
organizationId: string
organizationId: string,
idempotencySuffix = "reconcile"
): Promise<void> => {
const client = stripeClient;
if (!IS_FORMBRICKS_CLOUD || !client) return;
@@ -1344,14 +1342,12 @@ export const reconcileCloudStripeSubscriptionsForOrganization = async (
// (e.g. webhook + bootstrap) both seeing 0 and creating duplicate hobbies.
const freshSubscriptions = await client.subscriptions.list({
customer: customerId,
status: "all",
limit: 20,
status: "active",
limit: 1,
});
const freshActive = freshSubscriptions.data.filter((sub) => ACTIVE_SUBSCRIPTION_STATUSES.has(sub.status));
if (freshActive.length === 0) {
await ensureHobbySubscription(organizationId, customerId, freshSubscriptions.data.length);
if (freshSubscriptions.data.length === 0) {
await ensureHobbySubscription(organizationId, customerId, idempotencySuffix);
}
}
};
@@ -1359,6 +1355,6 @@ export const reconcileCloudStripeSubscriptionsForOrganization = async (
export const ensureCloudStripeSetupForOrganization = async (organizationId: string): Promise<void> => {
if (!IS_FORMBRICKS_CLOUD || !stripeClient) return;
await ensureStripeCustomerForOrganization(organizationId);
await reconcileCloudStripeSubscriptionsForOrganization(organizationId);
await reconcileCloudStripeSubscriptionsForOrganization(organizationId, "bootstrap");
await syncOrganizationBillingFromStripe(organizationId);
};

View File

@@ -15,7 +15,7 @@ const getEnvironment = async (environmentId: string) =>
async () => {
return prisma.environment.findUnique({
where: { id: environmentId },
select: { id: true, type: true },
select: { id: true, type: true, projectId: true },
});
},
createCacheKey.environment.config(environmentId),
@@ -63,12 +63,15 @@ const getContactWithFullData = async (environmentId: string, userId: string) =>
/**
* Creates contact with comprehensive data structure
*/
const createContact = async (environmentId: string, userId: string) => {
const createContact = async (environmentId: string, projectId: string, userId: string) => {
return prisma.contact.create({
data: {
environment: {
connect: { id: environmentId },
},
project: {
connect: { id: projectId },
},
attributes: {
create: [
{
@@ -164,7 +167,7 @@ export const updateUser = async (
// Create contact if doesn't exist
if (!contactData) {
contactData = await createContact(environmentId, userId);
contactData = await createContact(environmentId, environment.projectId, userId);
}
// Process contact attributes efficiently (single pass)

View File

@@ -5,6 +5,7 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, OperationNotAllowedError } from "@formbricks/types/errors";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { TContactAttributeKeyCreateInput } from "@/modules/ee/contacts/api/v1/management/contact-attribute-keys/[contactAttributeKeyId]/types/contact-attribute-keys";
@@ -29,6 +30,8 @@ export const createContactAttributeKey = async (
environmentId: string,
data: TContactAttributeKeyCreateInput
): Promise<TContactAttributeKey | null> => {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const contactAttributeKeysCount = await prisma.contactAttributeKey.count({
where: {
environmentId,
@@ -54,6 +57,11 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
});

View File

@@ -4,6 +4,7 @@ import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -406,6 +407,7 @@ const upsertAttributeKeysInBatches = async (
tx: Prisma.TransactionClient,
keysToUpsert: Map<string, { key: string; name: string; dataType: TContactAttributeDataType }>,
environmentId: string,
projectId: string,
attributeKeyMap: Record<string, string>
): Promise<void> => {
const keysArray = Array.from(keysToUpsert.values());
@@ -414,17 +416,18 @@ const upsertAttributeKeysInBatches = async (
const batch = keysArray.slice(i, i + BATCH_SIZE);
const upsertedKeys = await tx.$queryRaw<{ id: string; key: string }[]>`
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "dataType", "created_at", "updated_at")
SELECT
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "projectId", "dataType", "created_at", "updated_at")
SELECT
unnest(${Prisma.sql`ARRAY[${batch.map(() => createId())}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.key)}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.name)}]`}),
${environmentId},
${projectId},
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.dataType)}]`}::text[]::"ContactAttributeDataType"[]),
NOW(),
NOW()
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
"name" = EXCLUDED."name",
"updated_at" = NOW()
RETURNING "id", "key"
@@ -490,6 +493,16 @@ export const upsertBulkContacts = async (
>
> => {
const contactIdxWithConflictingUserIds: number[] = [];
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
const { projectId } = environment;
const { userIdsInContacts, attributeKeys } = extractContactMetadata(contacts);
const [existingUserIds, existingContactsByEmail, existingAttributeKeys] = await Promise.all([
@@ -624,11 +637,11 @@ export const upsertBulkContacts = async (
// Upsert attribute keys in batches
if (keysToUpsert.size > 0) {
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, attributeKeyMap);
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, projectId, attributeKeyMap);
}
// Create new contacts
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId }));
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId, projectId }));
if (newContacts.length > 0) {
await tx.contact.createMany({ data: newContacts });

View File

@@ -1,5 +1,6 @@
import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { readAttributeValue } from "@/modules/ee/contacts/lib/attribute-storage";
import { TContactCreateRequest, TContactResponse } from "@/modules/ee/contacts/types/contact";
@@ -18,6 +19,14 @@ export const createContact = async (
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
// Extract userId if present
const userId = attributes.userId;
@@ -98,6 +107,7 @@ export const createContact = async (
const result = await prisma.contact.create({
data: {
environmentId,
projectId: environment.projectId,
attributes: {
createMany: {
data: attributeData,

View File

@@ -4,6 +4,7 @@ import { ZId, ZString } from "@formbricks/types/common";
import { TContactAttributesInput, ZContactAttributesInput } from "@formbricks/types/contact-attribute";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareNewSDKAttributeForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -145,14 +146,20 @@ export const updateAttributes = async (
? null
: String(contactAttributesParam.userId);
// Fetch current attributes, contact attribute keys, and email/userId checks in parallel
const [currentAttributes, contactAttributeKeys, existingEmailAttribute, existingUserIdAttribute] =
await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Fetch current attributes, contact attribute keys, environment, and email/userId checks in parallel
const [
currentAttributes,
contactAttributeKeys,
projectId,
existingEmailAttribute,
existingUserIdAttribute,
] = await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
getProjectIdFromEnvironmentId(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Process email and userId existence early
const emailExists = !!existingEmailAttribute;
@@ -360,6 +367,7 @@ export const updateAttributes = async (
type: "custom",
dataType,
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
contactId,

View File

@@ -3,6 +3,7 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeDataType, TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { InvalidInputError, OperationNotAllowedError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
export const getContactAttributeKeys = reactCache(
@@ -31,6 +32,8 @@ export const createContactAttributeKey = async (data: {
description?: string;
dataType?: TContactAttributeDataType;
}): Promise<TContactAttributeKey> => {
const projectId = await getProjectIdFromEnvironmentId(data.environmentId);
try {
const contactAttributeKey = await prisma.contactAttributeKey.create({
data: {
@@ -38,6 +41,7 @@ export const createContactAttributeKey = async (data: {
name: data.name ?? formatSnakeCaseToTitleCase(data.key),
description: data.description ?? null,
environmentId: data.environmentId,
projectId,
type: "custom",
...(data.dataType && { dataType: data.dataType }),
},

View File

@@ -7,6 +7,7 @@ import { ZId, ZOptionalNumber, ZOptionalString } from "@formbricks/types/common"
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, ValidationError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -98,6 +99,7 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -398,7 +400,8 @@ const createMissingAttributeKeys = async (
lowercaseToActualKeyMap: Map<string, string>,
attributeKeyMap: Map<string, string>,
attributeTypeMap: Map<string, TAttributeTypeInfo>,
environmentId: string
environmentId: string,
projectId: string
): Promise<void> => {
const missingKeys = Array.from(csvKeys).filter((key) => !lowercaseToActualKeyMap.has(key.toLowerCase()));
@@ -427,6 +430,7 @@ const createMissingAttributeKeys = async (
name: formatSnakeCaseToTitleCase(key),
dataType: attributeTypeMap.get(key)?.dataType ?? "string",
environmentId,
projectId,
})),
skipDuplicates: true,
});
@@ -461,6 +465,7 @@ type TCsvProcessingContext = {
attributeTypeMap: Map<string, TAttributeTypeInfo>;
duplicateContactsAction: "skip" | "update" | "overwrite";
environmentId: string;
projectId: string;
};
/**
@@ -478,6 +483,7 @@ const processCsvRecord = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
} = ctx;
// Map CSV keys to actual DB keys (case-insensitive matching)
const mappedRecord: Record<string, string> = {};
@@ -500,6 +506,7 @@ const processCsvRecord = async (
return prisma.contact.create({
data: {
environmentId,
projectId,
attributes: {
create: createAttributeConnections(mappedRecord, environmentId, attributeTypeMap),
},
@@ -610,10 +617,13 @@ export const createContactsFromCSV = async (
);
try {
// Step 1: Extract metadata from CSV data
// Step 1: Resolve projectId from environment
const projectId = await getProjectIdFromEnvironmentId(environmentId);
// Step 2: Extract metadata from CSV data
const { csvEmails, csvUserIds, csvKeys, attributeValuesByKey } = extractCsvMetadata(csvData);
// Step 2: Fetch existing data from database
// Step 3: Fetch existing data from database
const [existingContactsByEmail, existingUserIds, existingAttributeKeys] = await Promise.all([
prisma.contact.findMany({
where: {
@@ -668,7 +678,8 @@ export const createContactsFromCSV = async (
lowercaseToActualKeyMap,
attributeKeyMap,
attributeTypeMap,
environmentId
environmentId,
projectId
);
// Step 6: Process each CSV record
@@ -680,6 +691,7 @@ export const createContactsFromCSV = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
};
const CHUNK_SIZE = 50;

View File

@@ -45,6 +45,7 @@ export function CreateSegmentModal({
isPrivate: false,
filters: [],
environmentId,
projectId: null,
id: "",
surveys: [],
createdAt: new Date(),

View File

@@ -32,6 +32,7 @@ import {
ZSegmentUpdateInput,
} from "@formbricks/types/segment";
import { getSurvey } from "@/lib/survey/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { isResourceFilter, searchForAttributeKeyInSegment } from "@/modules/ee/contacts/segments/lib/utils";
import { isSameDay, subtractTimeUnit } from "./date-utils";
@@ -55,6 +56,7 @@ export const selectSegment = {
title: true,
description: true,
environmentId: true,
projectId: true,
filters: true,
isPrivate: true,
surveys: {
@@ -138,6 +140,8 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
const surveyConnect = surveyId ? { surveys: { connect: { id: surveyId } } } : {};
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
// Private segments use upsert because auto-save may have already created a
// default (empty-filter) segment via connectOrCreate before the user publishes.
@@ -156,11 +160,13 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
update: {
description,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -176,6 +182,7 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -233,6 +240,7 @@ export const cloneSegment = async (segmentId: string, surveyId: string): Promise
isPrivate: segment.isPrivate,
environmentId: segment.environmentId,
filters: segment.filters,
projectId: segment.projectId,
surveys: {
connect: {
id: surveyId,
@@ -327,7 +335,8 @@ export const resetSegmentInSurvey = async (surveyId: string): Promise<TSegment>
isPrivate: true,
filters: [],
surveys: { connect: { id: surveyId } },
environment: { connect: { id: survey?.environmentId } },
environmentId: survey.environmentId,
projectId: survey.projectId,
},
select: selectSegment,
});

View File

@@ -27,6 +27,7 @@ export const WebhookTable = ({
const { t } = useTranslation();
const [activeWebhook, setActiveWebhook] = useState<Webhook>({
environmentId: environment.id,
projectId: null,
id: "",
name: "",
url: "",

View File

@@ -10,6 +10,7 @@ import {
UnknownError,
} from "@formbricks/types/errors";
import { generateStandardWebhookSignature, generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getTranslate } from "@/lingodotdev/server";
@@ -105,6 +106,8 @@ export const createWebhook = async (
): Promise<Webhook> => {
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
if (isDiscordWebhook(webhookInput.url)) {
throw new UnknownError("Discord webhooks are currently not supported.");
@@ -117,11 +120,8 @@ export const createWebhook = async (
...webhookInput,
surveyIds: webhookInput.surveyIds || [],
secret: signingSecret,
environment: {
connect: {
id: environmentId,
},
},
environmentId,
projectId,
},
});

View File

@@ -8,6 +8,7 @@ import {
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
@@ -44,7 +45,10 @@ export const createSurvey = async (
};
}
const organization = await getOrganizationByEnvironmentId(environmentId);
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(environmentId),
getProjectIdFromEnvironmentId(environmentId),
]);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
@@ -75,6 +79,11 @@ export const createSurvey = async (
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
@@ -86,11 +95,8 @@ export const createSurvey = async (
title: survey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: environmentId,
},
},
environmentId,
projectId,
},
});

View File

@@ -49,6 +49,7 @@ export const HowToSendCard = ({ localSurvey, setLocalSurvey, environment }: HowT
isPrivate: true,
title: localSurvey.id,
environmentId: environment.id,
projectId: null,
surveys: [localSurvey.id],
filters: [],
createdAt: new Date(),

View File

@@ -3,6 +3,7 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TActionClassInput } from "@formbricks/types/action-classes";
import { DatabaseError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const createActionClass = async (
environmentId: string,
@@ -11,10 +12,13 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -6,6 +6,7 @@ import { TSegment, ZSegmentFilters } from "@formbricks/types/segment";
import { TSurvey } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
@@ -161,6 +162,8 @@ export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> =>
}
} else if (type === "app") {
if (!currentSurvey.segment) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
await prisma.survey.update({
where: {
id: surveyId,
@@ -183,6 +186,11 @@ export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> =>
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
},
},

View File

@@ -14,6 +14,7 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
welcomeCard: true,
@@ -69,6 +70,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -84,6 +86,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
title: true,
description: true,
isPrivate: true,

View File

@@ -15,6 +15,7 @@ export const surveySelect = {
status: true,
singleUse: true,
environmentId: true,
projectId: true,
_count: {
select: { responses: true },
},

View File

@@ -380,6 +380,7 @@ export const copySurveyToOtherEnvironment = async (
const baseActionClassData = {
name: modifiedName,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
description: trigger.actionClass.description,
type: trigger.actionClass.type,
};
@@ -444,6 +445,11 @@ export const copySurveyToOtherEnvironment = async (
id: targetEnvironmentId,
},
},
project: {
connect: {
id: targetProject.id,
},
},
creator: {
connect: {
id: userId,
@@ -493,6 +499,7 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: true,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
} else if (isSameEnvironment) {
@@ -514,6 +521,7 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: false,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
}

View File

@@ -9,6 +9,7 @@ export const getMinimalSurvey = (t: TFunction): TSurvey => ({
name: "Minimal Survey",
type: "app",
environmentId: "someEnvId1",
projectId: null,
createdBy: null,
status: "draft",
displayOption: "displayOnce",

View File

@@ -1,6 +1,6 @@
# formbricks
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.7.0](https://img.shields.io/badge/AppVersion-3.7.0-informational?style=flat-square)
A Helm chart for Formbricks with PostgreSQL, Redis
@@ -8,150 +8,178 @@ A Helm chart for Formbricks with PostgreSQL, Redis
## Maintainers
| Name | Email | Url |
| ---------- | --------------------- | --- |
| Formbricks | <info@formbricks.com> | |
| Name | Email | Url |
| ---- | ------ | --- |
| Formbricks | <info@formbricks.com> | |
## Requirements
| Repository | Name | Version |
| ---------------------------------------- | ---------- | ------- |
| Repository | Name | Version |
|------------|------|---------|
| oci://registry-1.docker.io/bitnamicharts | postgresql | 16.4.16 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
## Values
| Key | Type | Default | Description |
| ------------------------------------------------------------------ | ------ | --------------------------------- | ----------- |
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| cronJob.enabled | bool | `false` | |
| cronJob.jobs | object | `{}` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env.EMAIL_VERIFICATION_DISABLED.value | string | `"1"` | |
| deployment.env.PASSWORD_RESET_DISABLED.value | string | `"1"` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"0.8.0-pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |
---
Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2)
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.behavior.scaleDown.policies[0].periodSeconds | int | `120` | |
| autoscaling.behavior.scaleDown.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleDown.policies[0].value | int | `1` | |
| autoscaling.behavior.scaleDown.stabilizationWindowSeconds | int | `300` | |
| autoscaling.behavior.scaleUp.policies[0].periodSeconds | int | `60` | |
| autoscaling.behavior.scaleUp.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleUp.policies[0].value | int | `2` | |
| autoscaling.behavior.scaleUp.stabilizationWindowSeconds | int | `60` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env | object | `{}` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.image.tag | string | `""` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| formbricks.publicUrl | string | `""` | |
| formbricks.webappUrl | string | `""` | |
| hub.enabled | bool | `true` | |
| hub.env | object | `{}` | |
| hub.existingSecret | string | `""` | |
| hub.image.pullPolicy | string | `"IfNotPresent"` | |
| hub.image.repository | string | `"ghcr.io/formbricks/hub"` | |
| hub.image.tag | string | `"1.0.0"` | |
| hub.migration.activeDeadlineSeconds | int | `900` | |
| hub.migration.backoffLimit | int | `3` | |
| hub.migration.ttlSecondsAfterFinished | int | `300` | |
| hub.replicas | int | `1` | |
| hub.resources.limits.memory | string | `"512Mi"` | |
| hub.resources.requests.cpu | string | `"100m"` | |
| hub.resources.requests.memory | string | `"256Mi"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| migration.annotations | object | `{}` | |
| migration.backoffLimit | int | `3` | |
| migration.enabled | bool | `true` | |
| migration.resources.limits.memory | string | `"512Mi"` | |
| migration.resources.requests.cpu | string | `"100m"` | |
| migration.resources.requests.memory | string | `"256Mi"` | |
| migration.ttlSecondsAfterFinished | int | `300` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| pdb.additionalLabels | object | `{}` | |
| pdb.annotations | object | `{}` | |
| pdb.enabled | bool | `true` | |
| pdb.minAvailable | int | `1` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |

View File

@@ -8,6 +8,15 @@ It also truncates the name to a maximum of 63 characters and removes trailing hy
{{- end }}
{{/*
Hub resource name: base name truncated to 59 chars then "-hub" so the suffix is never lost (63 char limit).
*/}}
{{- define "formbricks.hubname" -}}
{{- $base := include "formbricks.name" . | trunc 59 | trimSuffix "-" }}
{{- printf "%s-hub" $base | trimSuffix "-" }}
{{- end }}
{{/*
Define the application version to be used in labels.
The version is taken from `.Values.deployment.image.tag` if provided, otherwise it defaults to `.Chart.Version`.
@@ -85,9 +94,17 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- default .Release.Namespace .Values.namespaceOverride -}}
{{- end -}}
{{- define "formbricks.appSecretName" -}}
{{- printf "%s-app-secrets" (include "formbricks.name" .) -}}
{{- end }}
{{- define "formbricks.hubSecretName" -}}
{{- default (include "formbricks.appSecretName" .) .Values.hub.existingSecret -}}
{{- end }}
{{- define "formbricks.postgresAdminPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "POSTGRES_ADMIN_PASSWORD") }}
{{- index $secret.data "POSTGRES_ADMIN_PASSWORD" | b64dec -}}
{{- else }}
@@ -96,7 +113,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.postgresUserPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "POSTGRES_USER_PASSWORD") }}
{{- index $secret.data "POSTGRES_USER_PASSWORD" | b64dec -}}
{{- else }}
@@ -105,7 +122,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.redisPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "REDIS_PASSWORD") }}
{{- index $secret.data "REDIS_PASSWORD" | b64dec -}}
{{- else }}
@@ -114,7 +131,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.cronSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "CRON_SECRET" | b64dec -}}
{{- else }}
@@ -123,7 +140,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.encryptionKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "ENCRYPTION_KEY" | b64dec -}}
{{- else }}
@@ -132,10 +149,19 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.nextAuthSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "NEXTAUTH_SECRET" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}
{{- define "formbricks.hubApiKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "HUB_API_KEY") }}
{{- index $secret.data "HUB_API_KEY" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}

View File

@@ -131,6 +131,10 @@ spec:
- name: SKIP_STARTUP_MIGRATION
value: "true"
{{- end }}
{{- if not (hasKey .Values.deployment.env "HUB_API_URL") }}
- name: HUB_API_URL
value: "http://{{ include "formbricks.hubname" . }}:8080"
{{- end }}
{{- range $key, $value := .Values.deployment.env }}
- name: {{ include "formbricks.tplvalues.render" ( dict "value" $key "context" $ ) }}
{{- if kindIs "string" $value }}

View File

@@ -0,0 +1,100 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
replicas: {{ .Values.hub.replicas | default 1 }}
selector:
matchLabels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
spec:
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
initContainers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
containers:
- name: hub
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
ports:
- name: http
containerPort: 8080
protocol: TCP
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
env:
- name: API_KEY
valueFrom:
secretKeyRef:
name: {{ include "formbricks.hubSecretName" . }}
key: HUB_API_KEY
{{- range $key, $value := .Values.hub.env }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- if .Values.hub.resources }}
resources:
{{- toYaml .Values.hub.resources | nindent 12 }}
{{- end }}
readinessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
livenessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
startupProbe:
httpGet:
path: /health
port: 8080
failureThreshold: 30
periodSeconds: 10

View File

@@ -0,0 +1,54 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: batch/v1
kind: Job
metadata:
name: {{ include "formbricks.hubname" . }}-migration
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
app.kubernetes.io/managed-by: {{ .Release.Service }}
annotations:
helm.sh/hook: pre-upgrade
helm.sh/hook-weight: "-5"
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
spec:
ttlSecondsAfterFinished: {{ .Values.hub.migration.ttlSecondsAfterFinished | default 300 }}
backoffLimit: {{ .Values.hub.migration.backoffLimit | default 3 }}
activeDeadlineSeconds: {{ .Values.hub.migration.activeDeadlineSeconds | default 900 }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
spec:
restartPolicy: Never
securityContext:
runAsNonRoot: true
runAsUser: 1000
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
containers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
capabilities:
drop: ["ALL"]
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}

View File

@@ -0,0 +1,25 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: v1
kind: Service
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
type: ClusterIP
selector:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
ports:
- name: http
port: 8080
targetPort: 8080
protocol: TCP

View File

@@ -4,11 +4,13 @@
{{- $postgresUserPassword := include "formbricks.postgresUserPassword" . }}
{{- $redisPassword := include "formbricks.redisPassword" . }}
{{- $webappUrl := required "formbricks.webappUrl is required. Set it to your Formbricks instance URL (e.g., https://formbricks.example.com)" .Values.formbricks.webappUrl }}
{{- $hubApiKey := include "formbricks.hubApiKey" . }}
{{- $includeHubApiKeyInAppSecret := or (not .Values.hub.existingSecret) (eq .Values.hub.existingSecret (include "formbricks.appSecretName" .)) }}
---
apiVersion: v1
kind: Secret
metadata:
name: {{ template "formbricks.name" . }}-app-secrets
name: {{ include "formbricks.appSecretName" . }}
labels:
{{- include "formbricks.labels" . | nindent 4 }}
data:
@@ -28,6 +30,9 @@ data:
{{- else }}
DATABASE_URL: {{ .Values.postgresql.externalDatabaseUrl | b64enc }}
{{- end }}
{{- if $includeHubApiKeyInAppSecret }}
HUB_API_KEY: {{ $hubApiKey | b64enc }}
{{- end }}
CRON_SECRET: {{ include "formbricks.cronSecret" . | b64enc }}
ENCRYPTION_KEY: {{ include "formbricks.encryptionKey" . | b64enc }}
NEXTAUTH_SECRET: {{ include "formbricks.nextAuthSecret" . | b64enc }}

View File

@@ -340,6 +340,43 @@ serviceMonitor:
path: /metrics
port: metrics
##########################################################
# Hub API Configuration
# Formbricks Hub image: ghcr.io/formbricks/hub
##########################################################
hub:
# Hub is mandatory in Formbricks 5. Keep this enabled.
enabled: true
replicas: 1
image:
repository: "ghcr.io/formbricks/hub"
# Pin to a semver tag for reproducible deployments; update on each Hub release.
tag: "1.0.0"
pullPolicy: IfNotPresent
# Optional override for the secret Hub reads from.
# Defaults to the generated app secret (<release>-app-secrets), which contains DATABASE_URL and HUB_API_KEY.
# If you set this, the custom secret must provide DATABASE_URL and HUB_API_KEY.
existingSecret: ""
# Optional env vars (non-secret). Use existingSecret for secret values such as DATABASE_URL and HUB_API_KEY.
env: {}
# Upgrade migration job runs goose + river before Helm upgrades Hub resources.
# Fresh installs run the same migrations through the Hub deployment init container.
migration:
ttlSecondsAfterFinished: 300
backoffLimit: 3
activeDeadlineSeconds: 900
resources:
limits:
memory: 512Mi
requests:
memory: 256Mi
cpu: "100m"
##########################################################
# PostgreSQL Configuration
##########################################################
@@ -352,7 +389,7 @@ postgresql:
fullnameOverride: "formbricks-postgresql"
image:
repository: pgvector/pgvector
tag: 0.8.0-pg17
tag: pg17
auth:
username: formbricks
database: formbricks

View File

@@ -1,14 +1,24 @@
services:
# PostgreSQL must load the vector library so Hub (and Formbricks) can use the pgvector extension.
postgres:
image: pgvector/pgvector:pg17
image: pgvector/pgvector:pg18
volumes:
- postgres:/var/lib/postgresql/data
- postgres:/var/lib/postgresql
environment:
- POSTGRES_DB=postgres
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
ports:
- 5432:5432
command: >
postgres
-c shared_preload_libraries=vector
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres || exit 1"]
interval: 5s
timeout: 3s
retries: 30
start_period: 10s
mailhog:
image: arjenz/mailhog
@@ -36,6 +46,40 @@ services:
volumes:
- minio-data:/data
# Run Hub DB migrations (goose + river) before the API starts. Idempotent; runs on every compose up.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command:
[
'if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres "$$DATABASE_URL" up && /usr/local/bin/river migrate-up --database-url "$$DATABASE_URL"; else echo ''Migration tools (goose/river) not in image.''; exit 1; fi',
]
environment:
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Shares the same Postgres database as Formbricks by default.
hub:
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
ports:
- "8080:8080"
environment:
API_KEY: ${HUB_API_KEY:-dev-api-key}
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
# Explicit Postgres env so migrations and any libpq fallback use the service host, not localhost
PGHOST: postgres
PGPORT: "5432"
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
PGSSLMODE: disable
volumes:
postgres:
driver: local

View File

@@ -27,3 +27,13 @@ The script will prompt you for the following information:
3. **Domain Name**: Enter the domain name that Traefik will use to create the SSL certificate and forward requests to Formbricks.
That's it! After running the command and providing the required information, visit the domain name you entered, and you should see the Formbricks home wizard!
## Formbricks Hub
The stack includes the [Formbricks Hub](https://github.com/formbricks/hub) API (`ghcr.io/formbricks/hub`). Hub shares the same database as Formbricks by default.
- **Migrations**: A `hub-migrate` service runs Hub's database migrations (goose + river) before the Hub API starts. It runs on every `docker compose up` and is idempotent.
- **Production** (`docker/docker-compose.yml`): Set `HUB_API_KEY` (required). `HUB_API_URL` defaults to `http://hub:8080` so the Formbricks app can reach Hub inside the compose network. Override `HUB_DATABASE_URL` only if you want Hub to use a separate database.
- **Development** (`docker-compose.dev.yml`): Hub uses the same Postgres database; `HUB_API_KEY` defaults to `dev-api-key` (override with `HUB_API_KEY`) and the local Hub URL is `http://localhost:8080`.
In development, Hub is exposed locally on port **8080**. In production Docker Compose, Hub stays internal to the compose network and is reached via `http://hub:8080`.

View File

@@ -29,6 +29,15 @@ x-environment: &environment
# To use external Redis/Valkey: remove the redis service below and update this URL
REDIS_URL: redis://redis:6379
# Formbricks Hub (port 8080): API key required. Use e.g. openssl rand -hex 32
HUB_API_KEY:
# Base URL the Formbricks app uses to reach Hub. Defaults to the internal Hub service.
HUB_API_URL: ${HUB_API_URL:-http://hub:8080}
# Hub database URL (optional). Default: same Postgres as Formbricks. Set only if Hub uses a separate DB.
# HUB_DATABASE_URL:
# Set the minimum log level(debug, info, warn, error, fatal)
# LOG_LEVEL: info
@@ -202,7 +211,7 @@ x-environment: &environment
services:
postgres:
restart: always
image: pgvector/pgvector:pg17
image: pgvector/pgvector:pg18
volumes:
- postgres:/var/lib/postgresql/data
environment:
@@ -245,6 +254,31 @@ services:
- ./saml-connection:/home/nextjs/apps/web/saml-connection
<<: *environment
# Run Hub DB migrations (goose + river) before the API starts. Uses same image; migrations are idempotent.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command: ["if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres \"$$DATABASE_URL\" up && /usr/local/bin/river migrate-up --database-url \"$$DATABASE_URL\"; else echo 'Migration tools (goose/river) not in image.'; exit 1; fi"]
environment:
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Set HUB_API_KEY. By default shares the Formbricks database; set HUB_DATABASE_URL to use a separate DB.
hub:
restart: always
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
postgres:
condition: service_healthy
environment:
API_KEY: ${HUB_API_KEY:?HUB_API_KEY is required to run Hub}
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
volumes:
postgres:
driver: local

View File

@@ -0,0 +1,275 @@
# Plan: Deprecate Environments in Formbricks
**Issue**: https://github.com/formbricks/internal/issues/1501
## Context
Formbricks currently has a 4-level hierarchy: **Organization → Project → Environment (prod/dev) → Resources**. The "Environment" layer adds complexity with minimal value — the only real difference between prod and dev is separate API keys and a UI badge. The UI already calls "Project" a "Workspace".
**Goal**: Collapse the Environment layer so resources live directly under Project. The production environment merges into the workspace identity. Dev environments with data become separate new workspaces.
**Key decisions**:
- DB model stays as `Project` (no table rename)
- SDK will accept `workspaceId` as new param, `environmentId` as deprecated alias
- Dev environments with data get promoted to separate workspaces
---
## Current State
```
Organization
└── Project ("Workspace" in UI)
├── Environment (production) ──→ surveys, contacts, webhooks, tags, ...
└── Environment (development) ──→ surveys, contacts, webhooks, tags, ...
```
Every project always has exactly 2 environments. The only differences between them:
- Separate data (contacts, responses, attributes, integrations, webhooks, segments, etc.)
- Separate API keys (`ApiKeyEnvironment` grants per-environment permissions)
- A red warning banner in the dev UI, plus an environment switcher breadcrumb
Key metrics:
- **564 files** in `apps/web` reference `environmentId`
- **52 files** in `packages` reference `environmentId`
- **68+ route directories** under `/environments/[environmentId]/`
- **22 API endpoint directories** keyed by `[environmentId]`
- **8 resource tables** FK to Environment: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- **SDK** requires `environmentId` to initialize, all client APIs use `/api/v1/client/[environmentId]/...`
- **Storage** paths: `private/${environmentId}/${fileName}`
---
## Phase 1: Add `projectId` Column to All Environment-Owned Models (PR 1 — Small, Low Risk)
Add an **optional** `projectId` column alongside the existing `environmentId` on every model that currently only references Environment.
**Why**: Today, Survey has `environmentId` pointing to Environment, and you have to join through Environment to reach Project. We need Survey to point directly to Project. But we can't just switch the FK in one shot — that would break everything. So we add a new nullable `projectId` column alongside the existing `environmentId`. No code changes, no runtime impact. Just schema preparation.
**Modify**: `packages/database/schema.prisma`
- Add `projectId String?` + FK to Project + index to: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- Add reverse relations on the `Project` model
- New Prisma migration file
No code changes. No runtime behavior change. All new columns are NULL.
---
## Phase 2: Backfill `projectId` (PR 2 — Small, Medium Risk)
Data migration to populate `projectId` on every existing row.
**Why**: The new `projectId` columns are all NULL. We need to populate them by joining through the Environment table: `Survey.environmentId → Environment.id → Environment.projectId`. After this, every row has both `environmentId` (old) and `projectId` (new) filled in, but the app still only reads `environmentId`.
```sql
UPDATE "Survey" s SET "projectId" = e."projectId"
FROM "Environment" e WHERE s."environmentId" = e."id" AND s."projectId" IS NULL;
-- Repeat for all 8 tables
```
**Create**: Migration script (idempotent — only updates rows where `projectId IS NULL`)
App behavior unchanged. New columns now populated but not yet read.
---
## Phase 3: Dual-Write (PR 3 — Large, Medium Risk)
All create/update operations write both `environmentId` AND `projectId`.
**Why**: New rows created after the backfill would still have `projectId = NULL` because the app code doesn't know about the new column yet. We update every `prisma.survey.create(...)`, `prisma.contact.create(...)`, etc. to write both `environmentId` and `projectId`. Now every new row gets both values. Old code still reads `environmentId` — nothing breaks.
**Key files to modify**:
- `apps/web/lib/survey/service.ts``createSurvey`
- `apps/web/lib/environment/service.ts``createEnvironment` (creates default ContactAttributeKeys)
- `apps/web/modules/projects/settings/lib/project.ts``createProject`
- `apps/web/modules/survey/list/lib/survey.ts``copySurveyToOtherEnvironment`
- `apps/web/modules/survey/components/template-list/lib/survey.ts``createSurvey`
- `apps/web/lib/actionClass/service.ts``createActionClass`
- `apps/web/modules/survey/editor/lib/action-class.ts``createActionClass`
- `apps/web/modules/ee/contacts/lib/contacts.ts``processCsvRecord`, `createMissingAttributeKeys`
- `apps/web/modules/ee/contacts/api/v2/management/contacts/lib/contact.ts``createContact`
- `apps/web/app/api/v1/client/[environmentId]/displays/lib/display.ts``createDisplay` (creates contacts)
- `apps/web/modules/ee/contacts/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/api/v2/management/contact-attribute-keys/lib/contact-attribute-key.ts``createContactAttributeKey`
- `apps/web/modules/ee/contacts/api/v1/management/contact-attribute-keys/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/integrations/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/modules/api/v2/management/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/app/api/v1/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/lib/tag/service.ts``createTag`
- `apps/web/modules/ee/contacts/segments/lib/segments.ts``createSegment`, `cloneSegment`, `resetSegmentInSurvey`
- `apps/web/lib/integration/service.ts``createOrUpdateIntegration`
Pattern:
```typescript
// Resolve environmentId to projectId using existing getEnvironment()
const environment = await getEnvironment(environmentId);
const projectId = environment.projectId;
await prisma.survey.create({ data: { environmentId, projectId, ...rest } });
```
---
## Phase 4: Switch Internal Reads to `projectId` (PR 4 — Very Large, High Risk)
Change internal (non-API) queries from `WHERE environmentId = ?` to `WHERE projectId = ?`.
**Why**: This is the actual migration. Every query that says `WHERE environmentId = X` changes to `WHERE projectId = X`. Functions like `getSurveys(environmentId)` become `getSurveys(projectId)`. The layout at `/environments/[environmentId]/layout.tsx` resolves the environmentId from the URL to a projectId early on and passes projectId downstream. After this phase, the app internally thinks in terms of projects, not environments, even though URLs still say `[environmentId]`.
**Key files**:
- `apps/web/modules/survey/list/lib/survey.ts``getSurveys(environmentId)``getSurveys(projectId)`
- `apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts``getEnvironmentStateData`
- `apps/web/modules/environments/lib/utils.ts``getEnvironmentAuth`, `getEnvironmentLayoutData`
- `apps/web/app/(app)/environments/[environmentId]/layout.tsx` — resolve `projectId` early, pass to context
- `apps/web/app/(app)/environments/[environmentId]/context/environment-context.tsx` — add `projectId`
- All page server components that pass `environmentId` to service functions
URL still has `[environmentId]`. Each page resolves `environmentId → projectId` at the top.
**This PR can be split further** by migrating one resource type at a time (surveys first, then contacts, then actions, etc.).
---
## Phase 5: Client API Backwards Compatibility (PR 5 — Medium, Medium Risk)
Make `/api/v1/client/[environmentId]/...` and `/api/v2/client/[environmentId]/...` accept either an `environmentId` or a `projectId`.
**Why**: The SDK sends requests to `/api/v1/client/[environmentId]/...`. Existing deployed SDKs will keep sending environmentIds. New SDKs will send projectIds. Each route handler needs to accept either and resolve to a projectId internally. This ensures old SDK versions don't break.
**Add fallback resolution at top of each route handler**:
```typescript
// Try Environment table first, fall back to Project table
let projectId: string;
const environment = await prisma.environment.findUnique({ where: { id: params.environmentId } });
if (environment) {
projectId = environment.projectId;
} else {
projectId = params.environmentId; // caller passed a projectId directly
}
```
**Files**:
- `apps/web/app/api/v1/client/[environmentId]/environment/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/displays/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/responses/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/storage/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/user/route.ts`
- `apps/web/app/api/v2/client/[environmentId]/` — all routes
---
## Phase 6: Management API + API Key Migration (PR 6 — Medium, Medium Risk)
**Why**: The `ApiKeyEnvironment` model grants per-environment permissions. API keys used by integrations (Zapier, Make, etc.) reference environmentIds. These need to work at the project level. The management API endpoints that accept `environmentId` in request bodies need to also accept `projectId`.
- Modify `ApiKeyEnvironment` to also support project-level permissions (or add `projectId` to the model)
- Update `apps/web/app/api/v1/auth.ts``authenticateRequest` resolves environment permissions to project
- Management route handlers accept `environmentId` OR `projectId` in request bodies
- API key management UI in `modules/organization/settings/api-keys/`
---
## Phase 7: Storage Path Migration (PR 7 — Medium, Medium Risk)
**Why**: Uploaded files are stored at paths like `private/{environmentId}/{fileName}`. New uploads should use `{projectId}/...`, but old files still live at the old paths. Downloads need to check both locations for backward compatibility.
- New uploads use `{projectId}/{accessType}/{fileName}`
- Downloads check both `{projectId}/...` and `{environmentId}/...` paths for backwards compat
- `apps/web/modules/storage/service.ts`
- `apps/web/app/storage/[environmentId]/[accessType]/[fileName]/route.ts`
---
## Phase 8: Dev Environment Data Migration (PR 8 — Large, High Risk)
**Why**: Currently each project has a prod and dev environment. After the migration, there's no "environment" concept — just projects. Dev environments with no data can be discarded. Dev environments with data need to be promoted into new standalone projects so that data isn't lost.
For each Project with a development Environment that has data:
1. Create new Project named `{name} (Dev)` in the same Organization
2. Create a production Environment for the new Project
3. Re-parent all dev environment resources to the new Project (update `projectId`)
4. Re-parent resources to the new production environment (update `environmentId`)
For development environments with NO data: leave as-is (will be cleaned up later).
**Create**: Idempotent migration script in `packages/database/migration/` or `scripts/`
---
## Phase 9: New `/workspaces/[projectId]/` Routes + Redirects (PR 9 — Very Large, High Risk)
**Why**: The URL currently says `/environments/[environmentId]/surveys/...`. After the migration, it should say `/workspaces/[projectId]/surveys/...`. This phase creates the new route group mirroring the old structure, removes the environment switcher breadcrumb, and adds redirects so old bookmarked URLs still work.
- Create `/apps/web/app/(app)/workspaces/[projectId]/` route group mirroring the environments structure
- New layout resolves `projectId` directly
- Old `/environments/[environmentId]/...` routes redirect to `/workspaces/{projectId}/...`
- Update `apps/web/app/page.tsx` to redirect to workspace URLs
- Remove environment switcher breadcrumb
**Can be split** into sub-PRs: layout first, then surveys, then settings, etc.
---
## Phase 10: Make `projectId` NOT NULL (PR 10 — Small, Low Risk)
**Why**: At this point, every row has `projectId` populated (backfill + dual-write), and all reads use `projectId`. Now we can safely make it required in the schema. This is a safety net — the DB will reject any row that somehow doesn't have a projectId.
```sql
ALTER TABLE "Survey" ALTER COLUMN "projectId" SET NOT NULL;
-- Repeat for all 8 tables
```
Pre-check: verify no NULL values remain.
---
## Phase 11: JS SDK Update (PR 11 — Medium, Low Risk)
**Why**: Add `workspaceId` as the new init parameter. `environmentId` keeps working as a deprecated alias. Existing integrations don't break.
- `packages/js-core/src/types/config.ts` — add `workspaceId` to `TConfigInput`
- `packages/js-core/src/lib/common/setup.ts` — accept `workspaceId`, fall back to `environmentId`
- `environmentId` continues working as deprecated alias indefinitely
```typescript
// New:
formbricks.init({ workspaceId: "cxxx", appUrl: "..." })
// Old (still works):
formbricks.init({ environmentId: "cxxx", appUrl: "..." })
```
---
## Verification
After each PR:
1. `pnpm build` passes
2. Existing tests pass (`pnpm test`)
3. Manual smoke test: create survey, submit response, check dashboard
4. SDK initialization works with existing `environmentId`
After full migration:
- Old environment URLs redirect correctly
- Old API keys work
- Old SDK `environmentId` init works
- New `workspaceId` SDK init works
- Storage files accessible via both old and new paths
- Dev environments with data are separate workspaces
---
## PR Summary
| PR | Phase | Description | Size | Risk |
|----|-------|-------------|------|------|
| 1 | 1 | Add nullable `projectId` columns | S | Low |
| 2 | 2 | Backfill `projectId` data migration | S | Med |
| 3 | 3 | Dual-write `projectId` on all creates | L | Med |
| 4 | 4 | Switch reads to `projectId` | XL | High |
| 5 | 5 | Client API backwards compat | M | Med |
| 6 | 6 | Management API + API key migration | M | Med |
| 7 | 7 | Storage path migration | M | Med |
| 8 | 8 | Dev environment → workspace promotion | L | High |
| 9 | 9 | New workspace routes + redirects | XL | High |
| 10 | 10 | Make `projectId` NOT NULL | S | Low |
| 11 | 11 | JS SDK `workspaceId` support | M | Low |

View File

@@ -77,4 +77,14 @@ These variables are present inside your machine's docker-compose file. Restart t
| AUDIT_LOG_ENABLED | Set this to 1 to enable audit logging. Requires Redis to be configured with the REDIS_URL env variable. | optional | 0 |
| AUDIT_LOG_GET_USER_IP | Set to 1 to include user IP addresses in audit logs from request headers | optional | 0 |
#### Formbricks Hub
When running the stack with [Formbricks Hub](https://github.com/formbricks/hub) (for example via Docker Compose or Helm), the following variables apply:
| Variable | Description | Required | Default |
| ---------------- | ------------------------------------------------------------------------------------------------ | -------------------------- | ----------------------------------------------------- |
| HUB_API_KEY | API key used by the Formbricks Hub API (port 8080). | required | (e.g. `openssl rand -hex 32`) |
| HUB_API_URL | Base URL the Formbricks app uses to call Hub. Use `http://localhost:8080` locally. | required | `http://localhost:8080` in local dev |
| HUB_DATABASE_URL | PostgreSQL connection URL for Hub. Omit to use the same database as Formbricks. | optional | Same as Formbricks `DATABASE_URL` (shared database) |
Note: If you want to configure something that is not possible via above, please open an issue on our GitHub repo here or reach out to us on Github Discussions and we'll try our best to work out a solution with you.

View File

@@ -47,7 +47,9 @@ export const xmSegmentMigration: MigrationScript = {
id: "s644oyyqccstfdeejc4fluye",
name: "20241209110456_xm_segment_migration",
run: async ({ tx }) => {
const allSegments = await tx.segment.findMany();
const allSegments = await tx.segment.findMany({
select: { id: true, filters: true },
});
const updationPromises = [];
for (const segment of allSegments) {
updationPromises.push(
@@ -56,6 +58,7 @@ export const xmSegmentMigration: MigrationScript = {
data: {
filters: findAndReplace(segment.filters),
},
select: { id: true },
})
);
}

View File

@@ -0,0 +1,71 @@
-- AlterTable
ALTER TABLE "ActionClass" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Contact" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "ContactAttributeKey" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Integration" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Segment" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Survey" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Tag" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Webhook" ADD COLUMN "projectId" TEXT;
-- CreateIndex
CREATE INDEX "ActionClass_projectId_createdAt_idx" ON "ActionClass"("projectId", "created_at");
-- CreateIndex
CREATE INDEX "Contact_projectId_idx" ON "Contact"("projectId");
-- CreateIndex
CREATE INDEX "ContactAttributeKey_projectId_createdAt_idx" ON "ContactAttributeKey"("projectId", "created_at");
-- CreateIndex
CREATE INDEX "Integration_projectId_idx" ON "Integration"("projectId");
-- CreateIndex
CREATE INDEX "Segment_projectId_idx" ON "Segment"("projectId");
-- CreateIndex
CREATE INDEX "Survey_projectId_updatedAt_idx" ON "Survey"("projectId", "updated_at");
-- CreateIndex
CREATE INDEX "Tag_projectId_idx" ON "Tag"("projectId");
-- CreateIndex
CREATE INDEX "Webhook_projectId_idx" ON "Webhook"("projectId");
-- AddForeignKey
ALTER TABLE "Webhook" ADD CONSTRAINT "Webhook_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ContactAttributeKey" ADD CONSTRAINT "ContactAttributeKey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Contact" ADD CONSTRAINT "Contact_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Tag" ADD CONSTRAINT "Tag_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Survey" ADD CONSTRAINT "Survey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ActionClass" ADD CONSTRAINT "ActionClass_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Integration" ADD CONSTRAINT "Integration_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Segment" ADD CONSTRAINT "Segment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,73 @@
/* eslint-disable no-constant-condition -- Required for the while loop */
/* eslint-disable @typescript-eslint/no-unnecessary-condition -- Required for a while loop here */
import { logger } from "@formbricks/logger";
import type { MigrationScript } from "../../src/scripts/migration-runner";
// Table names are from a hardcoded const array, not user input.
// $executeRawUnsafe is required because Postgres does not support parameterized identifiers.
const TABLES_TO_BACKFILL = [
"Survey",
"Contact",
"ActionClass",
"ContactAttributeKey",
"Webhook",
"Tag",
"Segment",
"Integration",
] as const;
const BATCH_SIZE = 10_000;
export const backfillProjectId: MigrationScript = {
type: "data",
id: "snae9apsx7e74yo9ncmhjl47",
name: "20260325151230_backfill_project_id",
run: async ({ tx }) => {
for (const table of TABLES_TO_BACKFILL) {
let totalUpdated = 0;
while (true) {
const updatedRows = await tx.$executeRawUnsafe(`
UPDATE "${table}" t
SET "projectId" = e."projectId"
FROM "Environment" e
WHERE t."environmentId" = e."id"
AND t."projectId" IS NULL
AND t.id IN (
SELECT id FROM "${table}"
WHERE "projectId" IS NULL
LIMIT ${BATCH_SIZE.toString()}
)
`);
totalUpdated += updatedRows;
if (updatedRows < BATCH_SIZE) {
break;
}
logger.info(`${table}: backfilled ${totalUpdated.toString()} rows so far...`);
}
logger.info(`Backfilled ${totalUpdated.toString()} rows in ${table}`);
}
// Verify no rows were missed.
// Any remaining NULL projectId indicates orphaned rows (environmentId references a
// non-existent Environment). The FK cascade should prevent this, but we check anyway.
const failures: string[] = [];
for (const table of TABLES_TO_BACKFILL) {
const nullCount: [{ count: bigint }] = await tx.$queryRawUnsafe(`
SELECT COUNT(*) as count FROM "${table}" WHERE "projectId" IS NULL
`);
if (nullCount[0].count > 0n) {
failures.push(`${table}: ${nullCount[0].count.toString()} rows with NULL projectId`);
}
}
if (failures.length > 0) {
throw new Error(`Backfill verification failed:\n${failures.join("\n")}`);
}
},
};

View File

@@ -49,11 +49,14 @@ model Webhook {
source WebhookSource @default(user)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
triggers PipelineTriggers[]
surveyIds String[]
secret String?
@@index([environmentId])
@@index([projectId])
}
/// Represents an attribute value associated with a contact.
@@ -116,11 +119,14 @@ model ContactAttributeKey {
dataType ContactAttributeDataType @default(string)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
attributes ContactAttribute[]
attributeFilters SurveyAttributeFilter[]
@@unique([key, environmentId])
@@index([environmentId, createdAt])
@@index([projectId, createdAt])
}
/// Represents a person or user who can receive and respond to surveys.
@@ -137,11 +143,14 @@ model Contact {
updatedAt DateTime @updatedAt @map(name: "updated_at")
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
responses Response[]
attributes ContactAttribute[]
displays Display[]
@@index([environmentId])
@@index([projectId])
}
/// Stores a user's response to a survey, including their answers and metadata.
@@ -204,8 +213,11 @@ model Tag {
responses TagsOnResponses[]
environmentId String
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
@@unique([environmentId, name])
@@index([projectId])
}
/// Junction table linking tags to responses.
@@ -350,6 +362,8 @@ model Survey {
type SurveyType @default(app)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
creator User? @relation(fields: [createdBy], references: [id])
createdBy String?
status SurveyStatus @default(draft)
@@ -413,6 +427,7 @@ model Survey {
@@index([environmentId, updatedAt])
@@index([segmentId])
@@index([projectId, updatedAt])
}
/// Represents a quota configuration for a survey.
@@ -507,11 +522,14 @@ model ActionClass {
noCodeConfig Json?
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
surveyTriggers SurveyTrigger[]
@@unique([key, environmentId])
@@unique([name, environmentId])
@@index([environmentId, createdAt])
@@index([projectId, createdAt])
}
enum EnvironmentType {
@@ -540,9 +558,12 @@ model Integration {
/// [IntegrationConfig]
config Json
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
@@unique([type, environmentId])
@@index([environmentId])
@@index([projectId])
}
enum DataMigrationStatus {
@@ -648,6 +669,16 @@ model Project {
projectTeams ProjectTeam[]
customHeadScripts String? // Custom HTML scripts for link surveys (self-hosted only)
// Direct resource relations (for environment deprecation migration)
surveys Survey[]
contacts Contact[]
actionClasses ActionClass[]
contactAttributeKeys ContactAttributeKey[]
webhooks Webhook[]
tags Tag[]
segments Segment[]
integrations Integration[]
@@unique([organizationId, name])
}
@@ -910,9 +941,12 @@ model Segment {
filters Json @default("[]")
environmentId String
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
surveys Survey[]
@@unique([environmentId, title])
@@index([projectId])
}
/// Represents a supported language in the system.

View File

@@ -54,6 +54,7 @@ export const ZContactAttributeKey = z.object({
})
.describe("The data type of the attribute (string, number, date)"),
environmentId: z.cuid2().describe("The ID of the environment this attribute belongs to"),
projectId: z.string().nullable().describe("The ID of the project this attribute belongs to"),
}) satisfies z.ZodType<ContactAttributeKey>;
ZContactAttributeKey.meta({

View File

@@ -17,6 +17,7 @@ export const ZContact = z.object({
})
.describe("When the contact was last updated"),
environmentId: z.string().describe("The environment this contact belongs to"),
projectId: z.string().nullable().describe("The project this contact belongs to"),
}) satisfies z.ZodType<Contact>;
ZContact.meta({

View File

@@ -72,6 +72,7 @@ const ZSurveyBase = z.object({
pin: z.string().nullable().describe("The pin of the survey"),
createdBy: z.string().nullable().describe("The user who created the survey"),
environmentId: z.cuid2().describe("The environment ID of the survey"),
projectId: z.string().nullable().describe("The project ID of the survey"),
questions: z.array(ZSurveyQuestion).describe("The questions of the survey"),
blocks: ZSurveyBlocks.prefault([]).describe("The blocks of the survey"),
endings: z.array(ZSurveyEnding).prefault([]).describe("The endings of the survey"),

View File

@@ -19,6 +19,7 @@ export const ZWebhook = z.object({
url: z.url().describe("The URL of the webhook"),
source: z.enum(["user", "zapier", "make", "n8n"]).describe("The source of the webhook"),
environmentId: z.cuid2().describe("The ID of the environment"),
projectId: z.string().nullable().describe("The ID of the project"),
triggers: z
.array(z.enum(["responseFinished", "responseCreated", "responseUpdated"]))
.describe("The triggers of the webhook")

View File

@@ -62,6 +62,7 @@ export const mockSurvey: TEnvironmentStateSurvey = {
createdAt: new Date("2025-01-01T10:00:00Z"),
updatedAt: new Date("2025-01-01T10:00:00Z"),
environmentId: mockEnvironmentId,
projectId: null,
description: "Manual Trigger",
noCodeConfig: {
elementSelector: { cssSelector: ".btn", innerHtml: "Click me" },

View File

@@ -135,6 +135,7 @@ export const ZActionClass = z.object({
key: z.string().trim().min(1).nullable(),
noCodeConfig: ZActionClassNoCodeConfig.nullable(),
environmentId: z.string(),
projectId: z.string().nullable(),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});

View File

@@ -19,6 +19,7 @@ export const ZContactAttributeKey = z.object({
type: ZContactAttributeKeyType,
dataType: ZContactAttributeDataType.prefault("string"),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export type TContactAttributeKey = z.infer<typeof ZContactAttributeKey>;

View File

@@ -19,6 +19,7 @@ export type TIntegrationConfig = z.infer<typeof ZIntegrationConfig>;
export const ZIntegrationBase = z.object({
id: z.string(),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export const ZIntegration = ZIntegrationBase.extend({

View File

@@ -3,6 +3,7 @@ import { z } from "zod";
export const ZIntegrationBase = z.object({
id: z.string(),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export const ZIntegrationBaseSurveyData = z.object({

View File

@@ -344,6 +344,7 @@ export const ZSegment = z.object({
isPrivate: z.boolean().prefault(true),
filters: ZSegmentFilters,
environmentId: z.string(),
projectId: z.string().nullable(),
createdAt: z.date(),
updatedAt: z.date(),
surveys: z.array(z.string()),

View File

@@ -826,6 +826,7 @@ export const ZSurveyBase = z.object({
name: z.string(),
type: ZSurveyType,
environmentId: z.string(),
projectId: z.string().nullable(),
createdBy: z.string().nullable(),
status: ZSurveyStatus,
displayOption: ZSurveyDisplayOption,

View File

@@ -6,6 +6,7 @@ export const ZTag = z.object({
updatedAt: z.date(),
name: z.string(),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export type TTag = z.infer<typeof ZTag>;

View File

@@ -156,6 +156,8 @@
"GOOGLE_SHEETS_CLIENT_ID",
"GOOGLE_SHEETS_CLIENT_SECRET",
"GOOGLE_SHEETS_REDIRECT_URL",
"HUB_API_KEY",
"HUB_API_URL",
"NOTION_OAUTH_CLIENT_ID",
"NOTION_OAUTH_CLIENT_SECRET",
"HEROKU_APP_NAME",