Compare commits

...

6 Commits

Author SHA1 Message Date
Dhruwang
9d8fa1878a chore: switch internal reads from environmentId to projectId
Migrate all internal (non-API) read queries from WHERE environmentId
to WHERE projectId across surveys, contacts, action classes, tags,
webhooks, segments, integrations, and contact attribute keys.

Service functions renamed:
- getTagsByEnvironmentId -> getTagsByProjectId
- getActionClassByEnvironmentIdAndName -> getActionClassByProjectIdAndName
- getWebhookCountBySource(environmentId) -> getWebhookCountBySource(projectId)
- getPublishedLinkSurveys(environmentId) -> getPublishedLinkSurveys(projectId)

All page components resolve projectId from environment.projectId
early and pass it downstream. Tests updated to match.
2026-03-27 12:50:10 +05:30
Dhruwang
d202b9263f chore: dual-write projectId in all create/upsert paths
Add projectId alongside environmentId in all resource creation and
upsert code paths. This is Phase 3 of the environment deprecation plan.

For 15 call sites, replaced verbose getEnvironment() + null check
boilerplate with the existing getProjectIdFromEnvironmentId() helper,
which encapsulates the same logic in a single call.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-27 12:50:10 +05:30
Dhruwang Jariwala
71cca557fc chore(db): add nullable projectId to environment-owned models (#7588) 2026-03-27 12:33:44 +05:30
Dhruwang Jariwala
1500b6f7f3 docs: deprecate environments migration plan (#7586)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-26 16:34:40 +04:00
Dhruwang
2c9fbf83e4 chore: merge epic/v5 into chore/deprecate-environments 2026-03-26 15:10:31 +05:30
Matti Nannt
81272b96e1 feat: port hub xm-suite config to epic/v5 (#7578) 2026-03-25 11:04:42 +00:00
103 changed files with 1411 additions and 386 deletions

View File

@@ -38,6 +38,15 @@ LOG_LEVEL=info
DATABASE_URL='postgresql://postgres:postgres@localhost:5432/formbricks?schema=public'
#################
# HUB (DEV) #
#################
# The dev stack (pnpm db:up / pnpm go) runs Formbricks Hub on port 8080.
# Set explicitly to avoid confusion; override as needed when using docker-compose.dev.yml.
HUB_API_KEY=dev-api-key
HUB_API_URL=http://localhost:8080
HUB_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
################
# MAIL SETUP #
################

View File

@@ -6,7 +6,7 @@ import { IS_FORMBRICKS_CLOUD, IS_STORAGE_CONFIGURED, RESPONSES_PER_PAGE } from "
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getResponseCountBySurveyId, getResponses } from "@/lib/response/service";
import { getSurvey } from "@/lib/survey/service";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getUser } from "@/lib/user/service";
import { getTranslate } from "@/lingodotdev/server";
import { getSegments } from "@/modules/ee/contacts/segments/lib/segments";
@@ -23,10 +23,12 @@ const Page = async (props: { params: Promise<{ environmentId: string; surveyId:
const { session, environment, organization, isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [survey, user, tags, isContactsEnabled, responseCount] = await Promise.all([
getSurvey(params.surveyId),
getUser(session.user.id),
getTagsByEnvironmentId(params.environmentId),
getTagsByProjectId(projectId),
getIsContactsEnabled(organization.id),
getResponseCountBySurveyId(params.surveyId),
]);
@@ -43,7 +45,7 @@ const Page = async (props: { params: Promise<{ environmentId: string; surveyId:
throw new ResourceNotFoundError(t("common.organization"), null);
}
const segments = isContactsEnabled ? await getSegments(params.environmentId) : [];
const segments = isContactsEnabled ? await getSegments(projectId) : [];
const publicDomain = getPublicDomain();

View File

@@ -24,6 +24,8 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
const { session, environment, isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const surveyId = params.surveyId;
if (!surveyId) {
@@ -44,7 +46,7 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
const organizationId = await getOrganizationIdFromEnvironmentId(environment.id);
const isContactsEnabled = await getIsContactsEnabled(organizationId);
const segments = isContactsEnabled ? await getSegments(environment.id) : [];
const segments = isContactsEnabled ? await getSegments(projectId) : [];
if (!organizationId) {
throw new ResourceNotFoundError(t("common.organization"), null);

View File

@@ -6,7 +6,7 @@ import { ResourceNotFoundError } from "@formbricks/types/errors";
import { ZResponseFilterCriteria } from "@formbricks/types/responses";
import { getResponseDownloadFile, getResponseFilteringValues } from "@/lib/response/service";
import { getSurvey } from "@/lib/survey/service";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { authenticatedActionClient } from "@/lib/utils/action-client";
import { checkAuthorizationUpdated } from "@/lib/utils/action-client/action-client-middleware";
import { getOrganizationIdFromSurveyId, getProjectIdFromSurveyId } from "@/lib/utils/helper";
@@ -84,8 +84,10 @@ export const getSurveyFilterDataAction = authenticatedActionClient
const isQuotasAllowed = await getIsQuotasEnabled(organizationId);
const projectId = survey.projectId!;
const [tags, { contactAttributes: attributes, meta, hiddenFields }, quotas = []] = await Promise.all([
getTagsByEnvironmentId(survey.environmentId),
getTagsByProjectId(projectId),
getResponseFilteringValues(parsedInput.surveyId),
isQuotasAllowed ? getQuotas(parsedInput.surveyId) : [],
]);

View File

@@ -20,9 +20,11 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, integrations, locale] = await Promise.all([
getSurveys(params.environmentId),
getIntegrations(params.environmentId),
getSurveys(projectId),
getIntegrations(projectId),
getUserLocale(session.user.id),
]);

View File

@@ -19,6 +19,8 @@ const ZValidateGoogleSheetsConnectionAction = z.object({
export const validateGoogleSheetsConnectionAction = authenticatedActionClient
.inputSchema(ZValidateGoogleSheetsConnectionAction)
.action(async ({ ctx, parsedInput }) => {
const projectId = await getProjectIdFromEnvironmentId(parsedInput.environmentId);
await checkAuthorizationUpdated({
userId: ctx.user.id,
organizationId: await getOrganizationIdFromEnvironmentId(parsedInput.environmentId),
@@ -29,13 +31,13 @@ export const validateGoogleSheetsConnectionAction = authenticatedActionClient
},
{
type: "projectTeam",
projectId: await getProjectIdFromEnvironmentId(parsedInput.environmentId),
projectId,
minPermission: "readWrite",
},
],
});
const integration = await getIntegrationByType(parsedInput.environmentId, "googleSheets");
const integration = await getIntegrationByType(projectId, "googleSheets");
if (!integration) {
return { data: false };
}

View File

@@ -24,9 +24,11 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, integrations, locale] = await Promise.all([
getSurveys(params.environmentId),
getIntegrations(params.environmentId),
getSurveys(projectId),
getIntegrations(projectId),
getUserLocale(session.user.id),
]);

View File

@@ -6,15 +6,15 @@ import { DatabaseError } from "@formbricks/types/errors";
import { validateInputs } from "@/lib/utils/validate";
export const getWebhookCountBySource = async (
environmentId: string,
projectId: string,
source?: Webhook["source"]
): Promise<number> => {
validateInputs([environmentId, ZId], [source, z.string().optional()]);
validateInputs([projectId, ZId], [source, z.string().optional()]);
try {
const count = await prisma.webhook.count({
where: {
environmentId,
projectId,
source,
},
});

View File

@@ -31,9 +31,11 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, notionIntegration, locale] = await Promise.all([
getSurveys(params.environmentId),
getIntegrationByType(params.environmentId, "notion"),
getSurveys(projectId),
getIntegrationByType(projectId, "notion"),
getUserLocale(session.user.id),
]);

View File

@@ -33,6 +33,8 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, isBilling } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [
integrations,
userWebhookCount,
@@ -41,12 +43,12 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
n8nwebhookCount,
activePiecesWebhookCount,
] = await Promise.all([
getIntegrations(params.environmentId),
getWebhookCountBySource(params.environmentId, "user"),
getWebhookCountBySource(params.environmentId, "zapier"),
getWebhookCountBySource(params.environmentId, "make"),
getWebhookCountBySource(params.environmentId, "n8n"),
getWebhookCountBySource(params.environmentId, "activepieces"),
getIntegrations(projectId),
getWebhookCountBySource(projectId, "user"),
getWebhookCountBySource(projectId, "zapier"),
getWebhookCountBySource(projectId, "make"),
getWebhookCountBySource(projectId, "n8n"),
getWebhookCountBySource(projectId, "activepieces"),
]);
const isIntegrationConnected = (type: TIntegrationType) =>

View File

@@ -19,9 +19,11 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, slackIntegration, locale] = await Promise.all([
getSurveys(params.environmentId),
getIntegrationByType(params.environmentId, "slack"),
getSurveys(projectId),
getIntegrationByType(projectId, "slack"),
getUserLocale(session.user.id),
]);

View File

@@ -15,6 +15,7 @@ import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { getResponseCountBySurveyId } from "@/lib/response/service";
import { getSurvey, updateSurvey } from "@/lib/survey/service";
import { convertDatesInObject } from "@/lib/time";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { queueAuditEvent } from "@/modules/ee/audit-logs/lib/handler";
import { TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
@@ -152,8 +153,9 @@ export const POST = async (request: Request) => {
if (event === "responseFinished") {
// Fetch integrations and responseCount in parallel
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const [integrations, responseCount] = await Promise.all([
getIntegrations(environmentId),
getIntegrations(projectId),
getResponseCountBySurveyId(surveyId),
]);

View File

@@ -10,6 +10,7 @@ import {
} from "@/lib/constants";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { authOptions } from "@/modules/auth/lib/authOptions";
export const GET = async (req: Request) => {
@@ -67,7 +68,8 @@ export const GET = async (req: Request) => {
}
const integrationType = "googleSheets" as const;
const existingIntegration = await getIntegrationByType(environmentId, integrationType);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const existingIntegration = await getIntegrationByType(projectId, integrationType);
const existingConfig = existingIntegration?.config as TIntegrationGoogleSheetsConfig;
const googleSheetIntegration = {

View File

@@ -2,6 +2,7 @@ import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { TDisplayCreateInput, ZDisplayCreateInput } from "@formbricks/types/displays";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { getContactByUserId } from "./contact";
@@ -15,9 +16,11 @@ export const createDisplay = async (displayInput: TDisplayCreateInput): Promise<
if (userId) {
contact = await getContactByUserId(environmentId, userId);
if (!contact) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
contact = await prisma.contact.create({
data: {
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
attributeKey: {

View File

@@ -45,6 +45,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -5,6 +5,7 @@ import { withV1ApiWrapper } from "@/app/lib/api/with-api-logging";
import { getTables } from "@/lib/airtable/service";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -36,7 +37,8 @@ export const GET = withV1ApiWrapper({
};
}
const integration = (await getIntegrationByType(environmentId, "airtable")) as TIntegrationAirtable;
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const integration = (await getIntegrationByType(projectId, "airtable")) as TIntegrationAirtable;
if (!integration) {
return {

View File

@@ -11,6 +11,7 @@ import {
import { symmetricEncrypt } from "@/lib/crypto";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -88,7 +89,8 @@ export const GET = withV1ApiWrapper({
},
};
const existingIntegration = await getIntegrationByType(environmentId, "notion");
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const existingIntegration = await getIntegrationByType(projectId, "notion");
if (existingIntegration) {
notionIntegration.config.data = existingIntegration.config.data as TIntegrationNotionConfigData[];
}

View File

@@ -8,6 +8,7 @@ import { withV1ApiWrapper } from "@/app/lib/api/with-api-logging";
import { SLACK_CLIENT_ID, SLACK_CLIENT_SECRET, SLACK_REDIRECT_URI, WEBAPP_URL } from "@/lib/constants";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -88,7 +89,8 @@ export const GET = withV1ApiWrapper({
team: data.team,
};
const slackIntegration = await getIntegrationByType(environmentId, "slack");
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const slackIntegration = await getIntegrationByType(projectId, "slack");
const slackConfiguration: TIntegrationSlackConfig = {
data: (slackIntegration?.config.data as TIntegrationSlackConfigData[]) ?? [],

View File

@@ -19,6 +19,7 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(async (environmentIds: string[]): Promise<TActionClass[]> => {

View File

@@ -50,6 +50,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -5,6 +5,7 @@ import { DatabaseError, InvalidInputError } from "@formbricks/types/errors";
import { TWebhookInput, ZWebhookInput } from "@/app/api/v1/webhooks/types/webhooks";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
@@ -12,6 +13,8 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
validateInputs([webhookInput, ZWebhookInput]);
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(webhookInput.environmentId);
try {
const secret = generateWebhookSecret();
@@ -23,11 +26,8 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
surveyIds: webhookInput.surveyIds || [],
triggers: webhookInput.triggers || [],
secret,
environment: {
connect: {
id: webhookInput.environmentId,
},
},
environmentId: webhookInput.environmentId,
projectId,
},
});

View File

@@ -44,16 +44,16 @@ export const GET = withV3ApiWrapper({
return authResult;
}
const { environmentId } = authResult;
const { projectId } = authResult;
const [{ surveys, nextCursor }, totalCount] = await Promise.all([
getSurveyListPage(environmentId, {
getSurveyListPage(projectId, {
limit: parsed.limit,
cursor: parsed.cursor,
sortBy: parsed.sortBy,
filterCriteria: parsed.filterCriteria,
}),
getSurveyCount(environmentId, parsed.filterCriteria),
getSurveyCount(projectId, parsed.filterCriteria),
]);
return successListResponse(

View File

@@ -4823,6 +4823,7 @@ export const previewSurvey = (projectName: string, t: TFunction): TSurvey => {
name: t("templates.preview_survey_name"),
type: "link" as const,
environmentId: "cltwumfcz0009echxg02fh7oa",
projectId: null,
createdBy: "cltwumfbz0000echxysz6ptvq",
status: "inProgress" as const,
welcomeCard: {

View File

@@ -5,7 +5,7 @@ import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import {
deleteActionClass,
getActionClass,
getActionClassByEnvironmentIdAndName,
getActionClassByProjectIdAndName,
getActionClasses,
} from "./service";
@@ -49,7 +49,7 @@ describe("ActionClass Service", () => {
const result = await getActionClasses("env1");
expect(result).toEqual(mockActionClasses);
expect(prisma.actionClass.findMany).toHaveBeenCalledWith({
where: { environmentId: "env1" },
where: { projectId: "env1" },
select: expect.any(Object),
take: undefined,
skip: undefined,
@@ -63,7 +63,7 @@ describe("ActionClass Service", () => {
});
});
describe("getActionClassByEnvironmentIdAndName", () => {
describe("getActionClassByProjectIdAndName", () => {
test("should return action class when found", async () => {
const mockActionClass: TActionClass = {
id: "id2",
@@ -83,10 +83,10 @@ describe("ActionClass Service", () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockResolvedValue(mockActionClass);
const result = await getActionClassByEnvironmentIdAndName("env2", "Action 2");
const result = await getActionClassByProjectIdAndName("env2", "Action 2");
expect(result).toEqual(mockActionClass);
expect(prisma.actionClass.findFirst).toHaveBeenCalledWith({
where: { name: "Action 2", environmentId: "env2" },
where: { name: "Action 2", projectId: "env2" },
select: expect.any(Object),
});
});
@@ -94,14 +94,14 @@ describe("ActionClass Service", () => {
test("should return null when not found", async () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockResolvedValue(null);
const result = await getActionClassByEnvironmentIdAndName("env2", "Action 2");
const result = await getActionClassByProjectIdAndName("env2", "Action 2");
expect(result).toBeNull();
});
test("should throw DatabaseError when prisma throws", async () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockRejectedValue(new Error("fail"));
await expect(getActionClassByEnvironmentIdAndName("env2", "Action 2")).rejects.toThrow(DatabaseError);
await expect(getActionClassByProjectIdAndName("env2", "Action 2")).rejects.toThrow(DatabaseError);
});
});

View File

@@ -9,6 +9,7 @@ import { TActionClass, TActionClassInput, ZActionClassInput } from "@formbricks/
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const selectActionClass = {
@@ -21,16 +22,17 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(
async (environmentId: string, page?: number): Promise<TActionClass[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
async (projectId: string, page?: number): Promise<TActionClass[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
try {
return await prisma.actionClass.findMany({
where: {
environmentId: environmentId,
projectId,
},
select: selectActionClass,
take: page ? ITEMS_PER_PAGE : undefined,
@@ -40,21 +42,21 @@ export const getActionClasses = reactCache(
},
});
} catch (error) {
throw new DatabaseError(`Database error when fetching actions for environment ${environmentId}`);
throw new DatabaseError(`Database error when fetching actions for project ${projectId}`);
}
}
);
// This function is used to get an action by its name and environmentId(it can return private actions as well)
export const getActionClassByEnvironmentIdAndName = reactCache(
async (environmentId: string, name: string): Promise<TActionClass | null> => {
validateInputs([environmentId, ZId], [name, ZString]);
// This function is used to get an action by its name and projectId(it can return private actions as well)
export const getActionClassByProjectIdAndName = reactCache(
async (projectId: string, name: string): Promise<TActionClass | null> => {
validateInputs([projectId, ZId], [name, ZString]);
try {
const actionClass = await prisma.actionClass.findFirst({
where: {
name,
environmentId,
projectId,
},
select: selectActionClass,
});
@@ -113,10 +115,13 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -14,6 +14,7 @@ import {
} from "@formbricks/types/integration/airtable";
import { AIRTABLE_CLIENT_ID, AIRTABLE_MESSAGE_LIMIT } from "../constants";
import { createOrUpdateIntegration, getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { delay } from "../utils/promises";
import { truncateText } from "../utils/strings";
@@ -78,10 +79,8 @@ export const fetchAirtableAuthToken = async (formData: Record<string, any>) => {
export const getAirtableToken = async (environmentId: string) => {
try {
const airtableIntegration = (await getIntegrationByType(
environmentId,
"airtable"
)) as TIntegrationAirtable;
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const airtableIntegration = (await getIntegrationByType(projectId, "airtable")) as TIntegrationAirtable;
const { access_token, expiry_date, refresh_token } = ZIntegrationAirtableCredential.parse(
airtableIntegration?.config.key

View File

@@ -40,6 +40,8 @@ export const GITHUB_ID = env.GITHUB_ID;
export const GITHUB_SECRET = env.GITHUB_SECRET;
export const GOOGLE_CLIENT_ID = env.GOOGLE_CLIENT_ID;
export const GOOGLE_CLIENT_SECRET = env.GOOGLE_CLIENT_SECRET;
export const HUB_API_URL = env.HUB_API_URL;
export const HUB_API_KEY = env.HUB_API_KEY;
export const AZUREAD_CLIENT_ID = env.AZUREAD_CLIENT_ID;
export const AZUREAD_CLIENT_SECRET = env.AZUREAD_CLIENT_SECRET;

View File

@@ -33,6 +33,8 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: z.string().optional(),
HTTP_PROXY: z.url().optional(),
HTTPS_PROXY: z.url().optional(),
HUB_API_URL: z.url(),
HUB_API_KEY: z.string().optional(),
IMPRINT_URL: z
.url()
.optional()
@@ -159,6 +161,8 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: process.env.GOOGLE_SHEETS_REDIRECT_URL,
HTTP_PROXY: process.env.HTTP_PROXY,
HTTPS_PROXY: process.env.HTTPS_PROXY,
HUB_API_URL: process.env.HUB_API_URL,
HUB_API_KEY: process.env.HUB_API_KEY,
IMPRINT_URL: process.env.IMPRINT_URL,
IMPRINT_ADDRESS: process.env.IMPRINT_ADDRESS,
INVITE_DISABLED: process.env.INVITE_DISABLED,

View File

@@ -7,6 +7,7 @@ import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError } from "@formbricks/types/errors";
import { TIntegration, TIntegrationInput, ZIntegrationType } from "@formbricks/types/integration";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const transformIntegration = (integration: TIntegration): TIntegration => {
@@ -28,6 +29,8 @@ export const createOrUpdateIntegration = async (
): Promise<TIntegration> => {
validateInputs([environmentId, ZId]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const integration = await prisma.integration.upsert({
where: {
@@ -38,11 +41,13 @@ export const createOrUpdateIntegration = async (
},
update: {
...integrationData,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
},
create: {
...integrationData,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
},
});
return integration;
@@ -56,13 +61,13 @@ export const createOrUpdateIntegration = async (
};
export const getIntegrations = reactCache(
async (environmentId: string, page?: number): Promise<TIntegration[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
async (projectId: string, page?: number): Promise<TIntegration[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
try {
const integrations = await prisma.integration.findMany({
where: {
environmentId,
projectId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
@@ -94,16 +99,14 @@ export const getIntegration = reactCache(async (integrationId: string): Promise<
});
export const getIntegrationByType = reactCache(
async (environmentId: string, type: TIntegrationInput["type"]): Promise<TIntegration | null> => {
validateInputs([environmentId, ZId], [type, ZIntegrationType]);
async (projectId: string, type: TIntegrationInput["type"]): Promise<TIntegration | null> => {
validateInputs([projectId, ZId], [type, ZIntegrationType]);
try {
const integration = await prisma.integration.findUnique({
const integration = await prisma.integration.findFirst({
where: {
type_environmentId: {
environmentId,
type,
},
projectId,
type,
},
});
return integration ? transformIntegration(integration) : null;

View File

@@ -6,6 +6,7 @@ import {
import { ENCRYPTION_KEY } from "@/lib/constants";
import { symmetricDecrypt } from "@/lib/crypto";
import { getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
const fetchPages = async (config: TIntegrationNotionConfig) => {
try {
@@ -29,7 +30,8 @@ const fetchPages = async (config: TIntegrationNotionConfig) => {
export const getNotionDatabases = async (environmentId: string): Promise<TIntegrationNotionDatabase[]> => {
let results: TIntegrationNotionDatabase[] = [];
try {
const notionIntegration = (await getIntegrationByType(environmentId, "notion")) as TIntegrationNotion;
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const notionIntegration = (await getIntegrationByType(projectId, "notion")) as TIntegrationNotion;
if (notionIntegration && notionIntegration.config?.key.bot_id) {
results = await fetchPages(notionIntegration.config);
}

View File

@@ -75,6 +75,7 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -4,6 +4,7 @@ import { TIntegration, TIntegrationItem } from "@formbricks/types/integration";
import { TIntegrationSlack, TIntegrationSlackCredential } from "@formbricks/types/integration/slack";
import { SLACK_MESSAGE_LIMIT } from "../constants";
import { deleteIntegration, getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { truncateText } from "../utils/strings";
export const fetchChannels = async (slackIntegration: TIntegration): Promise<TIntegrationItem[]> => {
@@ -58,7 +59,8 @@ export const fetchChannels = async (slackIntegration: TIntegration): Promise<TIn
export const getSlackChannels = async (environmentId: string): Promise<TIntegrationItem[]> => {
let channels: TIntegrationItem[] = [];
try {
const slackIntegration = (await getIntegrationByType(environmentId, "slack")) as TIntegrationSlack;
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const slackIntegration = (await getIntegrationByType(projectId, "slack")) as TIntegrationSlack;
if (slackIntegration && slackIntegration.config?.key) {
channels = await fetchChannels(slackIntegration);
}

View File

@@ -19,6 +19,7 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -41,6 +42,7 @@ const commonMockProperties = {
createdAt: currentDate,
updatedAt: currentDate,
environmentId: mockId,
projectId: null,
};
type SurveyMock = Prisma.SurveyGetPayload<{

View File

@@ -14,6 +14,7 @@ import {
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "../actionClass/service";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
import {
checkForInvalidImagesInQuestions,
@@ -30,6 +31,7 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
welcomeCard: true,
@@ -84,6 +86,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -243,13 +246,13 @@ export const getSurveysByActionClassId = reactCache(
);
export const getSurveys = reactCache(
async (environmentId: string, limit?: number, offset?: number): Promise<TSurvey[]> => {
validateInputs([environmentId, ZId], [limit, ZOptionalNumber], [offset, ZOptionalNumber]);
async (projectId: string, limit?: number, offset?: number): Promise<TSurvey[]> => {
validateInputs([projectId, ZId], [limit, ZOptionalNumber], [offset, ZOptionalNumber]);
try {
const surveysPrisma = await prisma.survey.findMany({
where: {
environmentId,
projectId,
},
select: selectSurvey,
orderBy: {
@@ -270,12 +273,12 @@ export const getSurveys = reactCache(
}
);
export const getSurveyCount = reactCache(async (environmentId: string): Promise<number> => {
validateInputs([environmentId, ZId]);
export const getSurveyCount = reactCache(async (projectId: string): Promise<number> => {
validateInputs([projectId, ZId]);
try {
const surveyCount = await prisma.survey.count({
where: {
environmentId: environmentId,
projectId,
},
});
@@ -471,6 +474,11 @@ export const updateSurveyInternal = async (
id: environmentId,
},
},
project: {
connect: {
id: currentSurvey.projectId!,
},
},
},
},
},
@@ -624,7 +632,10 @@ export const createSurvey = async (
};
}
const organization = await getOrganizationByEnvironmentId(parsedEnvironmentId);
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(parsedEnvironmentId),
getProjectIdFromEnvironmentId(parsedEnvironmentId),
]);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
@@ -659,6 +670,11 @@ export const createSurvey = async (
id: parsedEnvironmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
@@ -670,11 +686,8 @@ export const createSurvey = async (
title: survey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: parsedEnvironmentId,
},
},
environmentId: parsedEnvironmentId,
projectId,
},
});

View File

@@ -4,7 +4,7 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TTag } from "@formbricks/types/tags";
import { TagError } from "@/modules/projects/settings/types/tag";
import { createTag, getTag, getTagsByEnvironmentId } from "./service";
import { createTag, getTag, getTagsByProjectId } from "./service";
vi.mock("@formbricks/database", () => ({
prisma: {
@@ -21,8 +21,8 @@ describe("Tag Service", () => {
vi.clearAllMocks();
});
describe("getTagsByEnvironmentId", () => {
test("should return tags for a given environment ID", async () => {
describe("getTagsByProjectId", () => {
test("should return tags for a given project ID", async () => {
const mockTags: TTag[] = [
{
id: "tag1",
@@ -35,11 +35,11 @@ describe("Tag Service", () => {
vi.mocked(prisma.tag.findMany).mockResolvedValue(mockTags);
const result = await getTagsByEnvironmentId("env1");
const result = await getTagsByProjectId("env1");
expect(result).toEqual(mockTags);
expect(prisma.tag.findMany).toHaveBeenCalledWith({
where: {
environmentId: "env1",
projectId: "env1",
},
take: undefined,
skip: undefined,
@@ -59,11 +59,11 @@ describe("Tag Service", () => {
vi.mocked(prisma.tag.findMany).mockResolvedValue(mockTags);
const result = await getTagsByEnvironmentId("env1", 1);
const result = await getTagsByProjectId("env1", 1);
expect(result).toEqual(mockTags);
expect(prisma.tag.findMany).toHaveBeenCalledWith({
where: {
environmentId: "env1",
projectId: "env1",
},
take: 30,
skip: 0,

View File

@@ -6,29 +6,28 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { TTag } from "@formbricks/types/tags";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TagError } from "@/modules/projects/settings/types/tag";
import { ITEMS_PER_PAGE } from "../constants";
import { validateInputs } from "../utils/validate";
export const getTagsByEnvironmentId = reactCache(
async (environmentId: string, page?: number): Promise<TTag[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
export const getTagsByProjectId = reactCache(async (projectId: string, page?: number): Promise<TTag[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
try {
const tags = await prisma.tag.findMany({
where: {
environmentId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
});
try {
const tags = await prisma.tag.findMany({
where: {
projectId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
});
return tags;
} catch (error) {
throw error;
}
return tags;
} catch (error) {
throw error;
}
);
});
export const getTag = reactCache(async (id: string): Promise<TTag | null> => {
validateInputs([id, ZId]);
@@ -52,11 +51,14 @@ export const createTag = async (
): Promise<Result<TTag, { code: TagError; message: string; meta?: Record<string, string> }>> => {
validateInputs([environmentId, ZId], [name, ZString]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const tag = await prisma.tag.create({
data: {
name,
environmentId,
projectId,
},
});

View File

@@ -3,6 +3,7 @@ import { cache as reactCache } from "react";
import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { getContactAttributeKeysQuery } from "@/modules/api/v2/management/contact-attribute-keys/lib/utils";
import {
@@ -45,6 +46,14 @@ export const createContactAttributeKey = async (
): Promise<Result<ContactAttributeKey, ApiErrorResponseV2>> => {
const { environmentId, name, description, key, dataType } = contactAttributeKey;
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
try {
const prismaData: Prisma.ContactAttributeKeyCreateInput = {
environment: {
@@ -52,6 +61,11 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name: name ?? formatSnakeCaseToTitleCase(key),
description,
key,

View File

@@ -58,6 +58,7 @@ export const getResponseForPipeline = async (
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -184,6 +184,7 @@ describe("Response Lib", () => {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -17,6 +17,7 @@ export const ZWebhookUpdateSchema = ZWebhook.omit({
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
secret: true,
}).meta({
id: "webhookUpdate",

View File

@@ -3,6 +3,7 @@ import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { InvalidInputError } from "@formbricks/types/errors";
import { generateWebhookSecret } from "@/lib/crypto";
import { getEnvironment } from "@/lib/environment/service";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getWebhooksQuery } from "@/modules/api/v2/management/webhooks/lib/utils";
import { TGetWebhooksFilter, TWebhookInput } from "@/modules/api/v2/management/webhooks/types/webhooks";
@@ -68,6 +69,14 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not_found" }],
});
}
try {
const secret = generateWebhookSecret();
@@ -77,6 +86,11 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name,
url,
source,

View File

@@ -21,16 +21,21 @@ interface ActivitySectionProps {
}
export const ActivitySection = async ({ environment, contactId, environmentTags }: ActivitySectionProps) => {
const [responses, displays] = await Promise.all([
const [responses, displays, project] = await Promise.all([
getResponsesByContactId(contactId),
getDisplaysByContactId(contactId),
getProjectByEnvironmentId(environment.id),
]);
if (!project) {
throw new ResourceNotFoundError("Project", null);
}
const allSurveyIds = [
...new Set([...(responses?.map((r) => r.surveyId) || []), ...displays.map((d) => d.surveyId)]),
];
const surveys: TSurvey[] = allSurveyIds.length === 0 ? [] : ((await getSurveys(environment.id)) ?? []);
const surveys: TSurvey[] = allSurveyIds.length === 0 ? [] : ((await getSurveys(project.id)) ?? []);
const session = await getServerSession(authOptions);
const t = await getTranslate();
@@ -48,11 +53,6 @@ export const ActivitySection = async ({ environment, contactId, environmentTags
throw new Error(t("environments.contacts.no_responses_found"));
}
const project = await getProjectByEnvironmentId(environment.id);
if (!project) {
throw new ResourceNotFoundError(t("common.workspace"), null);
}
const projectPermission = await getProjectPermissionByUserId(session.user.id, project.id);
const locale = user.locale ?? DEFAULT_LOCALE;

View File

@@ -1,5 +1,6 @@
import { ResourceNotFoundError } from "@formbricks/types/errors";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { AttributesSection } from "@/modules/ee/contacts/[contactId]/components/attributes-section";
import { ContactControlBar } from "@/modules/ee/contacts/[contactId]/components/contact-control-bar";
@@ -22,13 +23,15 @@ export const SingleContactPage = async (props: {
const { environment, isReadOnly, organization } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [environmentTags, contact, publishedLinkSurveys, attributesWithKeyInfo, allAttributeKeys] =
await Promise.all([
getTagsByEnvironmentId(params.environmentId),
getTagsByProjectId(projectId),
getContact(params.contactId),
getPublishedLinkSurveys(params.environmentId),
getPublishedLinkSurveys(projectId),
getContactAttributesWithKeyInfo(params.contactId),
getContactAttributeKeys(params.environmentId),
getContactAttributeKeys(projectId),
]);
if (!contact) {

View File

@@ -30,6 +30,8 @@ const ZGetContactsAction = z.object({
export const getContactsAction = authenticatedActionClient
.inputSchema(ZGetContactsAction)
.action(async ({ ctx, parsedInput }) => {
const projectId = await getProjectIdFromEnvironmentId(parsedInput.environmentId);
await checkAuthorizationUpdated({
userId: ctx.user.id,
organizationId: await getOrganizationIdFromEnvironmentId(parsedInput.environmentId),
@@ -41,12 +43,12 @@ export const getContactsAction = authenticatedActionClient
{
type: "projectTeam",
minPermission: "read",
projectId: await getProjectIdFromEnvironmentId(parsedInput.environmentId),
projectId,
},
],
});
return getContacts(parsedInput.environmentId, parsedInput.offset, parsedInput.searchValue);
return getContacts(projectId, parsedInput.offset, parsedInput.searchValue);
});
const ZContactDeleteAction = z.object({

View File

@@ -15,7 +15,7 @@ const getEnvironment = async (environmentId: string) =>
async () => {
return prisma.environment.findUnique({
where: { id: environmentId },
select: { id: true, type: true },
select: { id: true, type: true, projectId: true },
});
},
createCacheKey.environment.config(environmentId),
@@ -63,12 +63,15 @@ const getContactWithFullData = async (environmentId: string, userId: string) =>
/**
* Creates contact with comprehensive data structure
*/
const createContact = async (environmentId: string, userId: string) => {
const createContact = async (environmentId: string, projectId: string, userId: string) => {
return prisma.contact.create({
data: {
environment: {
connect: { id: environmentId },
},
project: {
connect: { id: projectId },
},
attributes: {
create: [
{
@@ -164,7 +167,7 @@ export const updateUser = async (
// Create contact if doesn't exist
if (!contactData) {
contactData = await createContact(environmentId, userId);
contactData = await createContact(environmentId, environment.projectId, userId);
}
// Process contact attributes efficiently (single pass)

View File

@@ -5,6 +5,7 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, OperationNotAllowedError } from "@formbricks/types/errors";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { TContactAttributeKeyCreateInput } from "@/modules/ee/contacts/api/v1/management/contact-attribute-keys/[contactAttributeKeyId]/types/contact-attribute-keys";
@@ -29,6 +30,8 @@ export const createContactAttributeKey = async (
environmentId: string,
data: TContactAttributeKeyCreateInput
): Promise<TContactAttributeKey | null> => {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const contactAttributeKeysCount = await prisma.contactAttributeKey.count({
where: {
environmentId,
@@ -54,6 +57,11 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
});

View File

@@ -4,6 +4,7 @@ import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -406,6 +407,7 @@ const upsertAttributeKeysInBatches = async (
tx: Prisma.TransactionClient,
keysToUpsert: Map<string, { key: string; name: string; dataType: TContactAttributeDataType }>,
environmentId: string,
projectId: string,
attributeKeyMap: Record<string, string>
): Promise<void> => {
const keysArray = Array.from(keysToUpsert.values());
@@ -414,17 +416,18 @@ const upsertAttributeKeysInBatches = async (
const batch = keysArray.slice(i, i + BATCH_SIZE);
const upsertedKeys = await tx.$queryRaw<{ id: string; key: string }[]>`
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "dataType", "created_at", "updated_at")
SELECT
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "projectId", "dataType", "created_at", "updated_at")
SELECT
unnest(${Prisma.sql`ARRAY[${batch.map(() => createId())}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.key)}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.name)}]`}),
${environmentId},
${projectId},
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.dataType)}]`}::text[]::"ContactAttributeDataType"[]),
NOW(),
NOW()
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
"name" = EXCLUDED."name",
"updated_at" = NOW()
RETURNING "id", "key"
@@ -490,6 +493,16 @@ export const upsertBulkContacts = async (
>
> => {
const contactIdxWithConflictingUserIds: number[] = [];
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
const { projectId } = environment;
const { userIdsInContacts, attributeKeys } = extractContactMetadata(contacts);
const [existingUserIds, existingContactsByEmail, existingAttributeKeys] = await Promise.all([
@@ -624,11 +637,11 @@ export const upsertBulkContacts = async (
// Upsert attribute keys in batches
if (keysToUpsert.size > 0) {
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, attributeKeyMap);
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, projectId, attributeKeyMap);
}
// Create new contacts
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId }));
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId, projectId }));
if (newContacts.length > 0) {
await tx.contact.createMany({ data: newContacts });

View File

@@ -1,5 +1,6 @@
import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { readAttributeValue } from "@/modules/ee/contacts/lib/attribute-storage";
import { TContactCreateRequest, TContactResponse } from "@/modules/ee/contacts/types/contact";
@@ -18,6 +19,14 @@ export const createContact = async (
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
// Extract userId if present
const userId = attributes.userId;
@@ -98,6 +107,7 @@ export const createContact = async (
const result = await prisma.contact.create({
data: {
environmentId,
projectId: environment.projectId,
attributes: {
createMany: {
data: attributeData,

View File

@@ -1,3 +1,4 @@
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getLocale } from "@/lingodotdev/language";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
@@ -15,9 +16,10 @@ export const AttributesPage = async ({
const params = await paramsProps;
const locale = await getLocale();
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [{ isReadOnly, organization }, contactAttributeKeys] = await Promise.all([
getEnvironmentAuth(params.environmentId),
getContactAttributeKeys(params.environmentId),
getContactAttributeKeys(projectId),
]);
const isContactsEnabled = await getIsContactsEnabled(organization.id);

View File

@@ -4,6 +4,7 @@ import { ZId, ZString } from "@formbricks/types/common";
import { TContactAttributesInput, ZContactAttributesInput } from "@formbricks/types/contact-attribute";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareNewSDKAttributeForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -145,14 +146,20 @@ export const updateAttributes = async (
? null
: String(contactAttributesParam.userId);
// Fetch current attributes, contact attribute keys, and email/userId checks in parallel
const [currentAttributes, contactAttributeKeys, existingEmailAttribute, existingUserIdAttribute] =
await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Fetch current attributes, contact attribute keys, environment, and email/userId checks in parallel
const [
currentAttributes,
contactAttributeKeys,
projectId,
existingEmailAttribute,
existingUserIdAttribute,
] = await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
getProjectIdFromEnvironmentId(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Process email and userId existence early
const emailExists = !!existingEmailAttribute;
@@ -360,6 +367,7 @@ export const updateAttributes = async (
type: "custom",
dataType,
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
contactId,

View File

@@ -3,12 +3,13 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeDataType, TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { InvalidInputError, OperationNotAllowedError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
export const getContactAttributeKeys = reactCache(
async (environmentId: string): Promise<TContactAttributeKey[]> => {
async (projectId: string): Promise<TContactAttributeKey[]> => {
return await prisma.contactAttributeKey.findMany({
where: { environmentId },
where: { projectId },
});
}
);
@@ -31,6 +32,8 @@ export const createContactAttributeKey = async (data: {
description?: string;
dataType?: TContactAttributeDataType;
}): Promise<TContactAttributeKey> => {
const projectId = await getProjectIdFromEnvironmentId(data.environmentId);
try {
const contactAttributeKey = await prisma.contactAttributeKey.create({
data: {
@@ -38,6 +41,7 @@ export const createContactAttributeKey = async (data: {
name: data.name ?? formatSnakeCaseToTitleCase(data.key),
description: data.description ?? null,
environmentId: data.environmentId,
projectId,
type: "custom",
...(data.dataType && { dataType: data.dataType }),
},

View File

@@ -7,6 +7,7 @@ import { ZId, ZOptionalNumber, ZOptionalString } from "@formbricks/types/common"
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, ValidationError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -98,6 +99,7 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -114,8 +116,8 @@ const selectContact = {
},
} satisfies Prisma.ContactSelect;
export const buildContactWhereClause = (environmentId: string, search?: string): Prisma.ContactWhereInput => {
const whereClause: Prisma.ContactWhereInput = { environmentId };
export const buildContactWhereClause = (projectId: string, search?: string): Prisma.ContactWhereInput => {
const whereClause: Prisma.ContactWhereInput = { projectId };
if (search) {
whereClause.OR = [
@@ -142,12 +144,12 @@ export const buildContactWhereClause = (environmentId: string, search?: string):
};
export const getContacts = reactCache(
async (environmentId: string, offset?: number, searchValue?: string): Promise<TContactWithAttributes[]> => {
validateInputs([environmentId, ZId], [offset, ZOptionalNumber], [searchValue, ZOptionalString]);
async (projectId: string, offset?: number, searchValue?: string): Promise<TContactWithAttributes[]> => {
validateInputs([projectId, ZId], [offset, ZOptionalNumber], [searchValue, ZOptionalString]);
try {
const contacts = await prisma.contact.findMany({
where: buildContactWhereClause(environmentId, searchValue),
where: buildContactWhereClause(projectId, searchValue),
select: selectContact,
take: ITEMS_PER_PAGE,
skip: offset,
@@ -398,7 +400,8 @@ const createMissingAttributeKeys = async (
lowercaseToActualKeyMap: Map<string, string>,
attributeKeyMap: Map<string, string>,
attributeTypeMap: Map<string, TAttributeTypeInfo>,
environmentId: string
environmentId: string,
projectId: string
): Promise<void> => {
const missingKeys = Array.from(csvKeys).filter((key) => !lowercaseToActualKeyMap.has(key.toLowerCase()));
@@ -427,6 +430,7 @@ const createMissingAttributeKeys = async (
name: formatSnakeCaseToTitleCase(key),
dataType: attributeTypeMap.get(key)?.dataType ?? "string",
environmentId,
projectId,
})),
skipDuplicates: true,
});
@@ -461,6 +465,7 @@ type TCsvProcessingContext = {
attributeTypeMap: Map<string, TAttributeTypeInfo>;
duplicateContactsAction: "skip" | "update" | "overwrite";
environmentId: string;
projectId: string;
};
/**
@@ -478,6 +483,7 @@ const processCsvRecord = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
} = ctx;
// Map CSV keys to actual DB keys (case-insensitive matching)
const mappedRecord: Record<string, string> = {};
@@ -500,6 +506,7 @@ const processCsvRecord = async (
return prisma.contact.create({
data: {
environmentId,
projectId,
attributes: {
create: createAttributeConnections(mappedRecord, environmentId, attributeTypeMap),
},
@@ -610,14 +617,17 @@ export const createContactsFromCSV = async (
);
try {
// Step 1: Extract metadata from CSV data
// Step 1: Resolve projectId from environment
const projectId = await getProjectIdFromEnvironmentId(environmentId);
// Step 2: Extract metadata from CSV data
const { csvEmails, csvUserIds, csvKeys, attributeValuesByKey } = extractCsvMetadata(csvData);
// Step 2: Fetch existing data from database
// Step 3: Fetch existing data from database
const [existingContactsByEmail, existingUserIds, existingAttributeKeys] = await Promise.all([
prisma.contact.findMany({
where: {
environmentId,
projectId,
attributes: { some: { attributeKey: { key: "email" }, value: { in: csvEmails } } },
},
select: {
@@ -626,11 +636,11 @@ export const createContactsFromCSV = async (
},
}),
prisma.contactAttribute.findMany({
where: { attributeKey: { key: "userId", environmentId }, value: { in: csvUserIds } },
where: { attributeKey: { key: "userId", projectId }, value: { in: csvUserIds } },
select: { value: true, contactId: true },
}),
prisma.contactAttributeKey.findMany({
where: { environmentId },
where: { projectId },
select: { key: true, id: true, dataType: true },
}),
]);
@@ -668,7 +678,8 @@ export const createContactsFromCSV = async (
lowercaseToActualKeyMap,
attributeKeyMap,
attributeTypeMap,
environmentId
environmentId,
projectId
);
// Step 6: Process each CSV record
@@ -680,6 +691,7 @@ export const createContactsFromCSV = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
};
const CHUNK_SIZE = 50;

View File

@@ -10,10 +10,10 @@ export interface PublishedLinkSurvey {
}
export const getPublishedLinkSurveys = reactCache(
async (environmentId: string): Promise<PublishedLinkSurvey[]> => {
async (projectId: string): Promise<PublishedLinkSurvey[]> => {
try {
const surveys = await prisma.survey.findMany({
where: { environmentId, status: "inProgress", type: "link" },
where: { projectId, status: "inProgress", type: "link" },
select: {
id: true,
name: true,

View File

@@ -1,4 +1,5 @@
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
import { UploadContactsCSVButton } from "@/modules/ee/contacts/components/upload-contacts-button";
@@ -19,12 +20,14 @@ export const ContactsPage = async ({
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const isContactsEnabled = await getIsContactsEnabled(organization.id);
const isQuotasAllowed = await getIsQuotasEnabled(organization.id);
const contactAttributeKeys = await getContactAttributeKeys(params.environmentId);
const initialContacts = await getContacts(params.environmentId, 0);
const contactAttributeKeys = await getContactAttributeKeys(projectId);
const initialContacts = await getContacts(projectId, 0);
const AddContactsButton = (
<UploadContactsCSVButton environmentId={environment.id} contactAttributeKeys={contactAttributeKeys} />

View File

@@ -45,6 +45,7 @@ export function CreateSegmentModal({
isPrivate: false,
filters: [],
environmentId,
projectId: null,
id: "",
surveys: [],
createdAt: new Date(),

View File

@@ -32,6 +32,7 @@ import {
ZSegmentUpdateInput,
} from "@formbricks/types/segment";
import { getSurvey } from "@/lib/survey/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { isResourceFilter, searchForAttributeKeyInSegment } from "@/modules/ee/contacts/segments/lib/utils";
import { isSameDay, subtractTimeUnit } from "./date-utils";
@@ -55,6 +56,7 @@ export const selectSegment = {
title: true,
description: true,
environmentId: true,
projectId: true,
filters: true,
isPrivate: true,
surveys: {
@@ -107,12 +109,12 @@ export const getSegment = reactCache(async (segmentId: string): Promise<TSegment
}
});
export const getSegments = reactCache(async (environmentId: string): Promise<TSegmentWithSurveyRefs[]> => {
validateInputs([environmentId, ZId]);
export const getSegments = reactCache(async (projectId: string): Promise<TSegmentWithSurveyRefs[]> => {
validateInputs([projectId, ZId]);
try {
const segments = await prisma.segment.findMany({
where: {
environmentId,
projectId,
},
select: selectSegment,
});
@@ -138,6 +140,8 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
const surveyConnect = surveyId ? { surveys: { connect: { id: surveyId } } } : {};
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
// Private segments use upsert because auto-save may have already created a
// default (empty-filter) segment via connectOrCreate before the user publishes.
@@ -156,11 +160,13 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
update: {
description,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -176,6 +182,7 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -233,6 +240,7 @@ export const cloneSegment = async (segmentId: string, surveyId: string): Promise
isPrivate: segment.isPrivate,
environmentId: segment.environmentId,
filters: segment.filters,
projectId: segment.projectId,
surveys: {
connect: {
id: surveyId,
@@ -327,7 +335,8 @@ export const resetSegmentInSurvey = async (surveyId: string): Promise<TSegment>
isPrivate: true,
filters: [],
surveys: { connect: { id: surveyId } },
environment: { connect: { id: survey?.environmentId } },
environmentId: survey.environmentId,
projectId: survey.projectId,
},
select: selectSegment,
});
@@ -385,13 +394,13 @@ export const updateSegment = async (segmentId: string, data: TSegmentUpdateInput
}
};
export const getSegmentsByAttributeKey = reactCache(async (environmentId: string, attributeKey: string) => {
validateInputs([environmentId, ZId], [attributeKey, ZString]);
export const getSegmentsByAttributeKey = reactCache(async (projectId: string, attributeKey: string) => {
validateInputs([projectId, ZId], [attributeKey, ZString]);
try {
const segments = await prisma.segment.findMany({
where: {
environmentId,
projectId,
},
select: selectSegment,
});

View File

@@ -1,3 +1,4 @@
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
import { getContactAttributeKeys } from "@/modules/ee/contacts/lib/contact-attribute-keys";
@@ -17,9 +18,11 @@ export const SegmentsPage = async ({
const { isReadOnly, organization } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [segments, contactAttributeKeys] = await Promise.all([
getSegments(params.environmentId),
getContactAttributeKeys(params.environmentId),
getSegments(projectId),
getContactAttributeKeys(projectId),
]);
const isContactsEnabled = await getIsContactsEnabled(organization.id);

View File

@@ -27,6 +27,7 @@ export const WebhookTable = ({
const { t } = useTranslation();
const [activeWebhook, setActiveWebhook] = useState<Webhook>({
environmentId: environment.id,
projectId: null,
id: "",
name: "",
url: "",

View File

@@ -10,6 +10,7 @@ import {
UnknownError,
} from "@formbricks/types/errors";
import { generateStandardWebhookSignature, generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getTranslate } from "@/lingodotdev/server";
@@ -105,6 +106,8 @@ export const createWebhook = async (
): Promise<Webhook> => {
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
if (isDiscordWebhook(webhookInput.url)) {
throw new UnknownError("Discord webhooks are currently not supported.");
@@ -117,11 +120,8 @@ export const createWebhook = async (
...webhookInput,
surveyIds: webhookInput.surveyIds || [],
secret: signingSecret,
environment: {
connect: {
id: environmentId,
},
},
environmentId,
projectId,
},
});
@@ -139,13 +139,13 @@ export const createWebhook = async (
}
};
export const getWebhooks = async (environmentId: string): Promise<Webhook[]> => {
validateInputs([environmentId, ZId]);
export const getWebhooks = async (projectId: string): Promise<Webhook[]> => {
validateInputs([projectId, ZId]);
try {
const webhooks = await prisma.webhook.findMany({
where: {
environmentId: environmentId,
projectId,
},
orderBy: {
createdAt: "desc",

View File

@@ -16,9 +16,11 @@ export const WebhooksPage = async (props: { params: Promise<{ environmentId: str
const { isReadOnly, environment } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [webhooks, surveys] = await Promise.all([
getWebhooks(params.environmentId),
getSurveys(params.environmentId, 200), // HOTFIX: not getting all surveys for now since it's maxing out the prisma accelerate limit
getWebhooks(projectId),
getSurveys(projectId, 200), // HOTFIX: not getting all surveys for now since it's maxing out the prisma accelerate limit
]);
const renderAddWebhookButton = () => <AddWebhookButton environment={environment} surveys={surveys} />;

View File

@@ -1,6 +1,7 @@
import { SettingsCard } from "@/app/(app)/environments/[environmentId]/settings/components/SettingsCard";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getTagsOnResponsesCount } from "@/lib/tagOnResponse/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getEnvironmentAuth } from "@/modules/environments/lib/utils";
import { ProjectConfigNavigation } from "@/modules/projects/settings/components/project-config-navigation";
@@ -14,8 +15,10 @@ export const TagsPage = async (props: { params: Promise<{ environmentId: string
const { isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [tags, environmentTagsCount] = await Promise.all([
getTagsByEnvironmentId(params.environmentId),
getTagsByProjectId(projectId),
getTagsOnResponsesCount(params.environmentId),
]);

View File

@@ -8,6 +8,7 @@ import {
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
@@ -24,7 +25,12 @@ export const createSurvey = async (
delete restSurveyBody.languages;
}
const actionClasses = await getActionClasses(environmentId);
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(environmentId),
getProjectIdFromEnvironmentId(environmentId),
]);
const actionClasses = await getActionClasses(projectId);
// @ts-expect-error
let data: Omit<Prisma.SurveyCreateInput, "environment"> = {
@@ -43,8 +49,6 @@ export const createSurvey = async (
},
};
}
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
@@ -75,6 +79,11 @@ export const createSurvey = async (
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
@@ -86,11 +95,8 @@ export const createSurvey = async (
title: survey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: environmentId,
},
},
environmentId,
projectId,
},
});

View File

@@ -49,6 +49,7 @@ export const HowToSendCard = ({ localSurvey, setLocalSurvey, environment }: HowT
isPrivate: true,
title: localSurvey.id,
environmentId: environment.id,
projectId: null,
surveys: [localSurvey.id],
filters: [],
createdAt: new Date(),

View File

@@ -3,6 +3,7 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TActionClassInput } from "@formbricks/types/action-classes";
import { DatabaseError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const createActionClass = async (
environmentId: string,
@@ -11,10 +12,13 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environment: { connect: { id: environmentId } },
environmentId,
projectId,
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -6,6 +6,7 @@ import { TSegment, ZSegmentFilters } from "@formbricks/types/segment";
import { TSurvey } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
@@ -21,8 +22,12 @@ export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> =>
const surveyId = updatedSurvey.id;
let data: any = {};
const actionClasses = await getActionClasses(updatedSurvey.environmentId);
const currentSurvey = await getSurvey(surveyId);
const [actionClasses, currentSurvey] = await Promise.all([
getProjectIdFromEnvironmentId(updatedSurvey.environmentId).then((projectId) =>
getActionClasses(projectId)
),
getSurvey(surveyId),
]);
if (!currentSurvey) {
throw new ResourceNotFoundError("Survey", surveyId);
@@ -161,6 +166,8 @@ export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> =>
}
} else if (type === "app") {
if (!currentSurvey.segment) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
await prisma.survey.update({
where: {
id: surveyId,
@@ -183,6 +190,11 @@ export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> =>
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
},
},

View File

@@ -8,6 +8,7 @@ import {
UNSPLASH_ACCESS_KEY,
} from "@/lib/constants";
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getContactAttributeKeys } from "@/modules/ee/contacts/lib/contact-attribute-keys";
import { getSegments } from "@/modules/ee/contacts/segments/lib/segments";
@@ -50,14 +51,16 @@ export const SurveyEditorPage = async (props: {
await getEnvironmentAuth(params.environmentId);
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [survey, projectWithTeamIds, actionClasses, contactAttributeKeys, responseCount, segments] =
await Promise.all([
getSurvey(params.surveyId),
getProjectWithTeamIdsByEnvironmentId(params.environmentId),
getActionClasses(params.environmentId),
getContactAttributeKeys(params.environmentId),
getActionClasses(projectId),
getContactAttributeKeys(projectId),
getResponseCountBySurveyId(params.surveyId),
getSegments(params.environmentId),
getSegments(projectId),
]);
if (!projectWithTeamIds) {

View File

@@ -5,19 +5,19 @@ import { prisma } from "@formbricks/database";
import { DatabaseError } from "@formbricks/types/errors";
import { validateInputs } from "@/lib/utils/validate";
export const getActionClasses = reactCache(async (environmentId: string): Promise<ActionClass[]> => {
validateInputs([environmentId, z.cuid2()]);
export const getActionClasses = reactCache(async (projectId: string): Promise<ActionClass[]> => {
validateInputs([projectId, z.cuid2()]);
try {
return await prisma.actionClass.findMany({
where: {
environmentId: environmentId,
projectId,
},
orderBy: {
createdAt: "asc",
},
});
} catch (error) {
throw new DatabaseError(`Database error when fetching actions for environment ${environmentId}`);
throw new DatabaseError(`Database error when fetching actions for project ${projectId}`);
}
});

View File

@@ -14,6 +14,7 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
welcomeCard: true,
@@ -69,6 +70,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -84,6 +86,7 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
title: true,
description: true,
isPrivate: true,

View File

@@ -148,12 +148,12 @@ function buildStandardCursorWhere(
}
function buildBaseWhere(
environmentId: string,
projectId: string,
filterCriteria?: TSurveyFilterCriteria,
extraWhere?: Prisma.SurveyWhereInput
): Prisma.SurveyWhereInput {
return {
environmentId,
projectId,
...buildWhereClause(filterCriteria),
...extraWhere,
};
@@ -197,7 +197,7 @@ function getRelevanceNextCursor(survey: TSurveyRow, bucket: TRelevanceBucket): T
}
async function findSurveyRows(
environmentId: string,
projectId: string,
limit: number,
sortBy: TStandardSurveyListSort,
filterCriteria?: TSurveyFilterCriteria,
@@ -207,7 +207,7 @@ async function findSurveyRows(
const cursorWhere = cursor ? buildStandardCursorWhere(sortBy, cursor) : undefined;
return prisma.survey.findMany({
where: buildBaseWhere(environmentId, filterCriteria, {
where: buildBaseWhere(projectId, filterCriteria, {
...extraWhere,
...cursorWhere,
}),
@@ -237,11 +237,11 @@ function buildSurveyListPage(rows: TSurveyRow[], cursor: TSurveyListPageCursor |
}
async function getStandardSurveyListPage(
environmentId: string,
projectId: string,
options: TGetSurveyListPageOptions & { sortBy: TStandardSurveyListSort }
): Promise<TSurveyListPage> {
const surveyRows = await findSurveyRows(
environmentId,
projectId,
options.limit,
options.sortBy,
options.filterCriteria,
@@ -258,7 +258,7 @@ async function getStandardSurveyListPage(
}
async function findRelevanceRows(
environmentId: string,
projectId: string,
limit: number,
filterCriteria: TSurveyFilterCriteria | undefined,
bucket: TRelevanceBucket,
@@ -271,7 +271,7 @@ async function findRelevanceRows(
: undefined;
return prisma.survey.findMany({
where: buildBaseWhere(environmentId, filterCriteria, {
where: buildBaseWhere(projectId, filterCriteria, {
...statusWhere,
...cursorWhere,
}),
@@ -282,10 +282,10 @@ async function findRelevanceRows(
}
async function hasMoreRelevanceRowsInOtherBucket(
environmentId: string,
projectId: string,
filterCriteria?: TSurveyFilterCriteria
): Promise<boolean> {
const otherRows = await findRelevanceRows(environmentId, 1, filterCriteria, OTHER_BUCKET, null);
const otherRows = await findRelevanceRows(projectId, 1, filterCriteria, OTHER_BUCKET, null);
return otherRows.length > 0;
}
@@ -315,13 +315,13 @@ function buildRelevancePage(rows: TSurveyRow[], bucket: TRelevanceBucket | null)
}
async function getInProgressRelevanceStep(
environmentId: string,
projectId: string,
limit: number,
filterCriteria: TSurveyFilterCriteria | undefined,
cursor: TRelevanceSurveyListCursor | null
): Promise<{ pageRows: TSurveyRow[]; remaining: number; response: TSurveyListPage | null }> {
const inProgressRows = await findRelevanceRows(
environmentId,
projectId,
limit,
filterCriteria,
IN_PROGRESS_BUCKET,
@@ -337,7 +337,7 @@ async function getInProgressRelevanceStep(
}
async function buildInProgressOnlyRelevancePage(
environmentId: string,
projectId: string,
rows: TSurveyRow[],
filterCriteria: TSurveyFilterCriteria | undefined,
cursor: TRelevanceSurveyListCursor | null
@@ -345,13 +345,13 @@ async function buildInProgressOnlyRelevancePage(
const hasOtherRows =
rows.length > 0 &&
shouldReadInProgressBucket(cursor) &&
(await hasMoreRelevanceRowsInOtherBucket(environmentId, filterCriteria));
(await hasMoreRelevanceRowsInOtherBucket(projectId, filterCriteria));
return buildRelevancePage(rows, hasOtherRows ? IN_PROGRESS_BUCKET : null);
}
async function getRelevanceSurveyListPage(
environmentId: string,
projectId: string,
options: TGetSurveyListPageOptions & { sortBy: "relevance" }
): Promise<TSurveyListPage> {
const relevanceCursor = getRelevanceCursor(options.cursor);
@@ -360,7 +360,7 @@ async function getRelevanceSurveyListPage(
if (shouldReadInProgressBucket(relevanceCursor)) {
const inProgressStep = await getInProgressRelevanceStep(
environmentId,
projectId,
remaining,
options.filterCriteria,
relevanceCursor
@@ -376,7 +376,7 @@ async function getRelevanceSurveyListPage(
if (remaining <= 0) {
return await buildInProgressOnlyRelevancePage(
environmentId,
projectId,
pageRows,
options.filterCriteria,
relevanceCursor
@@ -384,7 +384,7 @@ async function getRelevanceSurveyListPage(
}
const otherRows = await findRelevanceRows(
environmentId,
projectId,
remaining,
options.filterCriteria,
OTHER_BUCKET,
@@ -397,18 +397,18 @@ async function getRelevanceSurveyListPage(
}
export async function getSurveyListPage(
environmentId: string,
projectId: string,
options: TGetSurveyListPageOptions
): Promise<TSurveyListPage> {
try {
if (options.sortBy === "relevance") {
return await getRelevanceSurveyListPage(environmentId, {
return await getRelevanceSurveyListPage(projectId, {
...options,
sortBy: "relevance",
});
}
return await getStandardSurveyListPage(environmentId, {
return await getStandardSurveyListPage(projectId, {
...options,
sortBy: options.sortBy,
});

View File

@@ -15,6 +15,7 @@ export const surveySelect = {
status: true,
singleUse: true,
environmentId: true,
projectId: true,
_count: {
select: { responses: true },
},

View File

@@ -21,7 +21,7 @@ import { mapSurveyRowToSurvey, mapSurveyRowsToSurveys, surveySelect } from "./su
export const getSurveys = reactCache(
async (
environmentId: string,
projectId: string,
limit?: number,
offset?: number,
filterCriteria?: TSurveyFilterCriteria
@@ -29,13 +29,13 @@ export const getSurveys = reactCache(
try {
if (filterCriteria?.sortBy === "relevance") {
// Call the sortByRelevance function
return await getSurveysSortedByRelevance(environmentId, limit, offset ?? 0, filterCriteria);
return await getSurveysSortedByRelevance(projectId, limit, offset ?? 0, filterCriteria);
}
// Fetch surveys normally with pagination and include response count
const surveysPrisma = await prisma.survey.findMany({
where: {
environmentId,
projectId,
...buildWhereClause(filterCriteria),
},
select: surveySelect,
@@ -57,7 +57,7 @@ export const getSurveys = reactCache(
export const getSurveysSortedByRelevance = reactCache(
async (
environmentId: string,
projectId: string,
limit?: number,
offset?: number,
filterCriteria?: TSurveyFilterCriteria
@@ -67,7 +67,7 @@ export const getSurveysSortedByRelevance = reactCache(
const inProgressSurveyCount = await prisma.survey.count({
where: {
environmentId,
projectId,
status: "inProgress",
...buildWhereClause(filterCriteria),
},
@@ -79,7 +79,7 @@ export const getSurveysSortedByRelevance = reactCache(
? []
: await prisma.survey.findMany({
where: {
environmentId,
projectId,
status: "inProgress",
...buildWhereClause(filterCriteria),
},
@@ -97,7 +97,7 @@ export const getSurveysSortedByRelevance = reactCache(
const newOffset = Math.max(0, offset - inProgressSurveyCount);
const additionalSurveys = await prisma.survey.findMany({
where: {
environmentId,
projectId,
status: { not: "inProgress" },
...buildWhereClause(filterCriteria),
},
@@ -288,10 +288,10 @@ export const copySurveyToOtherEnvironment = async (
if (!targetProject) throw new ResourceNotFoundError("Project", targetEnvironmentId);
}
// Fetch existing action classes in target environment for name conflict checks
// Fetch existing action classes in target project for name conflict checks
const existingActionClasses = !isSameEnvironment
? await prisma.actionClass.findMany({
where: { environmentId: targetEnvironmentId },
where: { projectId: targetProject.id },
select: { name: true, type: true, key: true, noCodeConfig: true, id: true },
})
: [];
@@ -380,6 +380,7 @@ export const copySurveyToOtherEnvironment = async (
const baseActionClassData = {
name: modifiedName,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
description: trigger.actionClass.description,
type: trigger.actionClass.type,
};
@@ -444,6 +445,11 @@ export const copySurveyToOtherEnvironment = async (
id: targetEnvironmentId,
},
},
project: {
connect: {
id: targetProject.id,
},
},
creator: {
connect: {
id: userId,
@@ -493,6 +499,7 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: true,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
} else if (isSameEnvironment) {
@@ -502,7 +509,7 @@ export const copySurveyToOtherEnvironment = async (
where: {
title: existingSurvey.segment.title,
isPrivate: false,
environmentId: targetEnvironmentId,
projectId: targetProject.id,
},
});
@@ -514,6 +521,7 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: false,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
}
@@ -569,14 +577,14 @@ export const copySurveyToOtherEnvironment = async (
}
};
/** Count surveys in an environment, optionally with the same filter as getSurveys (so total matches list). */
/** Count surveys in a project, optionally with the same filter as getSurveys (so total matches list). */
export const getSurveyCount = reactCache(
async (environmentId: string, filterCriteria?: TSurveyFilterCriteria): Promise<number> => {
validateInputs([environmentId, z.cuid2()]);
async (projectId: string, filterCriteria?: TSurveyFilterCriteria): Promise<number> => {
validateInputs([projectId, z.cuid2()]);
try {
const surveyCount = await prisma.survey.count({
where: {
environmentId,
projectId,
...buildWhereClause(filterCriteria),
},
});

View File

@@ -6,6 +6,7 @@ import { ResourceNotFoundError } from "@formbricks/types/errors";
import { DEFAULT_LOCALE, SURVEYS_PER_PAGE } from "@/lib/constants";
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getUserLocale } from "@/lib/user/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getEnvironmentAuth } from "@/modules/environments/lib/utils";
import { getProjectWithTeamIdsByEnvironmentId } from "@/modules/survey/lib/project";
@@ -43,7 +44,8 @@ export const SurveysPage = async ({ params: paramsProps }: SurveyTemplateProps)
return redirect(`/environments/${params.environmentId}/settings/billing`);
}
const surveyCount = await getSurveyCount(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const surveyCount = await getSurveyCount(projectId);
const currentProjectChannel = project.config.channel ?? null;
const locale = (await getUserLocale(session.user.id)) ?? DEFAULT_LOCALE;

View File

@@ -9,6 +9,7 @@ export const getMinimalSurvey = (t: TFunction): TSurvey => ({
name: "Minimal Survey",
type: "app",
environmentId: "someEnvId1",
projectId: null,
createdBy: null,
status: "draft",
displayOption: "displayOnce",

View File

@@ -1,6 +1,6 @@
# formbricks
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.7.0](https://img.shields.io/badge/AppVersion-3.7.0-informational?style=flat-square)
A Helm chart for Formbricks with PostgreSQL, Redis
@@ -8,150 +8,178 @@ A Helm chart for Formbricks with PostgreSQL, Redis
## Maintainers
| Name | Email | Url |
| ---------- | --------------------- | --- |
| Formbricks | <info@formbricks.com> | |
| Name | Email | Url |
| ---- | ------ | --- |
| Formbricks | <info@formbricks.com> | |
## Requirements
| Repository | Name | Version |
| ---------------------------------------- | ---------- | ------- |
| Repository | Name | Version |
|------------|------|---------|
| oci://registry-1.docker.io/bitnamicharts | postgresql | 16.4.16 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
## Values
| Key | Type | Default | Description |
| ------------------------------------------------------------------ | ------ | --------------------------------- | ----------- |
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| cronJob.enabled | bool | `false` | |
| cronJob.jobs | object | `{}` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env.EMAIL_VERIFICATION_DISABLED.value | string | `"1"` | |
| deployment.env.PASSWORD_RESET_DISABLED.value | string | `"1"` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"0.8.0-pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |
---
Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2)
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.behavior.scaleDown.policies[0].periodSeconds | int | `120` | |
| autoscaling.behavior.scaleDown.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleDown.policies[0].value | int | `1` | |
| autoscaling.behavior.scaleDown.stabilizationWindowSeconds | int | `300` | |
| autoscaling.behavior.scaleUp.policies[0].periodSeconds | int | `60` | |
| autoscaling.behavior.scaleUp.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleUp.policies[0].value | int | `2` | |
| autoscaling.behavior.scaleUp.stabilizationWindowSeconds | int | `60` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env | object | `{}` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.image.tag | string | `""` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| formbricks.publicUrl | string | `""` | |
| formbricks.webappUrl | string | `""` | |
| hub.enabled | bool | `true` | |
| hub.env | object | `{}` | |
| hub.existingSecret | string | `""` | |
| hub.image.pullPolicy | string | `"IfNotPresent"` | |
| hub.image.repository | string | `"ghcr.io/formbricks/hub"` | |
| hub.image.tag | string | `"1.0.0"` | |
| hub.migration.activeDeadlineSeconds | int | `900` | |
| hub.migration.backoffLimit | int | `3` | |
| hub.migration.ttlSecondsAfterFinished | int | `300` | |
| hub.replicas | int | `1` | |
| hub.resources.limits.memory | string | `"512Mi"` | |
| hub.resources.requests.cpu | string | `"100m"` | |
| hub.resources.requests.memory | string | `"256Mi"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| migration.annotations | object | `{}` | |
| migration.backoffLimit | int | `3` | |
| migration.enabled | bool | `true` | |
| migration.resources.limits.memory | string | `"512Mi"` | |
| migration.resources.requests.cpu | string | `"100m"` | |
| migration.resources.requests.memory | string | `"256Mi"` | |
| migration.ttlSecondsAfterFinished | int | `300` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| pdb.additionalLabels | object | `{}` | |
| pdb.annotations | object | `{}` | |
| pdb.enabled | bool | `true` | |
| pdb.minAvailable | int | `1` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |

View File

@@ -8,6 +8,15 @@ It also truncates the name to a maximum of 63 characters and removes trailing hy
{{- end }}
{{/*
Hub resource name: base name truncated to 59 chars then "-hub" so the suffix is never lost (63 char limit).
*/}}
{{- define "formbricks.hubname" -}}
{{- $base := include "formbricks.name" . | trunc 59 | trimSuffix "-" }}
{{- printf "%s-hub" $base | trimSuffix "-" }}
{{- end }}
{{/*
Define the application version to be used in labels.
The version is taken from `.Values.deployment.image.tag` if provided, otherwise it defaults to `.Chart.Version`.
@@ -85,9 +94,17 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- default .Release.Namespace .Values.namespaceOverride -}}
{{- end -}}
{{- define "formbricks.appSecretName" -}}
{{- printf "%s-app-secrets" (include "formbricks.name" .) -}}
{{- end }}
{{- define "formbricks.hubSecretName" -}}
{{- default (include "formbricks.appSecretName" .) .Values.hub.existingSecret -}}
{{- end }}
{{- define "formbricks.postgresAdminPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "POSTGRES_ADMIN_PASSWORD") }}
{{- index $secret.data "POSTGRES_ADMIN_PASSWORD" | b64dec -}}
{{- else }}
@@ -96,7 +113,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.postgresUserPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "POSTGRES_USER_PASSWORD") }}
{{- index $secret.data "POSTGRES_USER_PASSWORD" | b64dec -}}
{{- else }}
@@ -105,7 +122,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.redisPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "REDIS_PASSWORD") }}
{{- index $secret.data "REDIS_PASSWORD" | b64dec -}}
{{- else }}
@@ -114,7 +131,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.cronSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "CRON_SECRET" | b64dec -}}
{{- else }}
@@ -123,7 +140,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.encryptionKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "ENCRYPTION_KEY" | b64dec -}}
{{- else }}
@@ -132,10 +149,19 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.nextAuthSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if $secret }}
{{- index $secret.data "NEXTAUTH_SECRET" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}
{{- define "formbricks.hubApiKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "HUB_API_KEY") }}
{{- index $secret.data "HUB_API_KEY" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}

View File

@@ -131,6 +131,10 @@ spec:
- name: SKIP_STARTUP_MIGRATION
value: "true"
{{- end }}
{{- if not (hasKey .Values.deployment.env "HUB_API_URL") }}
- name: HUB_API_URL
value: "http://{{ include "formbricks.hubname" . }}:8080"
{{- end }}
{{- range $key, $value := .Values.deployment.env }}
- name: {{ include "formbricks.tplvalues.render" ( dict "value" $key "context" $ ) }}
{{- if kindIs "string" $value }}

View File

@@ -0,0 +1,100 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
replicas: {{ .Values.hub.replicas | default 1 }}
selector:
matchLabels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
spec:
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
initContainers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
containers:
- name: hub
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
ports:
- name: http
containerPort: 8080
protocol: TCP
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
env:
- name: API_KEY
valueFrom:
secretKeyRef:
name: {{ include "formbricks.hubSecretName" . }}
key: HUB_API_KEY
{{- range $key, $value := .Values.hub.env }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- if .Values.hub.resources }}
resources:
{{- toYaml .Values.hub.resources | nindent 12 }}
{{- end }}
readinessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
livenessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
startupProbe:
httpGet:
path: /health
port: 8080
failureThreshold: 30
periodSeconds: 10

View File

@@ -0,0 +1,54 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: batch/v1
kind: Job
metadata:
name: {{ include "formbricks.hubname" . }}-migration
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
app.kubernetes.io/managed-by: {{ .Release.Service }}
annotations:
helm.sh/hook: pre-upgrade
helm.sh/hook-weight: "-5"
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
spec:
ttlSecondsAfterFinished: {{ .Values.hub.migration.ttlSecondsAfterFinished | default 300 }}
backoffLimit: {{ .Values.hub.migration.backoffLimit | default 3 }}
activeDeadlineSeconds: {{ .Values.hub.migration.activeDeadlineSeconds | default 900 }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
spec:
restartPolicy: Never
securityContext:
runAsNonRoot: true
runAsUser: 1000
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
containers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
capabilities:
drop: ["ALL"]
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}

View File

@@ -0,0 +1,25 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: v1
kind: Service
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
type: ClusterIP
selector:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
ports:
- name: http
port: 8080
targetPort: 8080
protocol: TCP

View File

@@ -4,11 +4,13 @@
{{- $postgresUserPassword := include "formbricks.postgresUserPassword" . }}
{{- $redisPassword := include "formbricks.redisPassword" . }}
{{- $webappUrl := required "formbricks.webappUrl is required. Set it to your Formbricks instance URL (e.g., https://formbricks.example.com)" .Values.formbricks.webappUrl }}
{{- $hubApiKey := include "formbricks.hubApiKey" . }}
{{- $includeHubApiKeyInAppSecret := or (not .Values.hub.existingSecret) (eq .Values.hub.existingSecret (include "formbricks.appSecretName" .)) }}
---
apiVersion: v1
kind: Secret
metadata:
name: {{ template "formbricks.name" . }}-app-secrets
name: {{ include "formbricks.appSecretName" . }}
labels:
{{- include "formbricks.labels" . | nindent 4 }}
data:
@@ -28,6 +30,9 @@ data:
{{- else }}
DATABASE_URL: {{ .Values.postgresql.externalDatabaseUrl | b64enc }}
{{- end }}
{{- if $includeHubApiKeyInAppSecret }}
HUB_API_KEY: {{ $hubApiKey | b64enc }}
{{- end }}
CRON_SECRET: {{ include "formbricks.cronSecret" . | b64enc }}
ENCRYPTION_KEY: {{ include "formbricks.encryptionKey" . | b64enc }}
NEXTAUTH_SECRET: {{ include "formbricks.nextAuthSecret" . | b64enc }}

View File

@@ -340,6 +340,43 @@ serviceMonitor:
path: /metrics
port: metrics
##########################################################
# Hub API Configuration
# Formbricks Hub image: ghcr.io/formbricks/hub
##########################################################
hub:
# Hub is mandatory in Formbricks 5. Keep this enabled.
enabled: true
replicas: 1
image:
repository: "ghcr.io/formbricks/hub"
# Pin to a semver tag for reproducible deployments; update on each Hub release.
tag: "1.0.0"
pullPolicy: IfNotPresent
# Optional override for the secret Hub reads from.
# Defaults to the generated app secret (<release>-app-secrets), which contains DATABASE_URL and HUB_API_KEY.
# If you set this, the custom secret must provide DATABASE_URL and HUB_API_KEY.
existingSecret: ""
# Optional env vars (non-secret). Use existingSecret for secret values such as DATABASE_URL and HUB_API_KEY.
env: {}
# Upgrade migration job runs goose + river before Helm upgrades Hub resources.
# Fresh installs run the same migrations through the Hub deployment init container.
migration:
ttlSecondsAfterFinished: 300
backoffLimit: 3
activeDeadlineSeconds: 900
resources:
limits:
memory: 512Mi
requests:
memory: 256Mi
cpu: "100m"
##########################################################
# PostgreSQL Configuration
##########################################################
@@ -352,7 +389,7 @@ postgresql:
fullnameOverride: "formbricks-postgresql"
image:
repository: pgvector/pgvector
tag: 0.8.0-pg17
tag: pg17
auth:
username: formbricks
database: formbricks

View File

@@ -1,14 +1,24 @@
services:
# PostgreSQL must load the vector library so Hub (and Formbricks) can use the pgvector extension.
postgres:
image: pgvector/pgvector:pg17
image: pgvector/pgvector:pg18
volumes:
- postgres:/var/lib/postgresql/data
- postgres:/var/lib/postgresql
environment:
- POSTGRES_DB=postgres
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
ports:
- 5432:5432
command: >
postgres
-c shared_preload_libraries=vector
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres || exit 1"]
interval: 5s
timeout: 3s
retries: 30
start_period: 10s
mailhog:
image: arjenz/mailhog
@@ -36,6 +46,40 @@ services:
volumes:
- minio-data:/data
# Run Hub DB migrations (goose + river) before the API starts. Idempotent; runs on every compose up.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command:
[
'if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres "$$DATABASE_URL" up && /usr/local/bin/river migrate-up --database-url "$$DATABASE_URL"; else echo ''Migration tools (goose/river) not in image.''; exit 1; fi',
]
environment:
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Shares the same Postgres database as Formbricks by default.
hub:
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
ports:
- "8080:8080"
environment:
API_KEY: ${HUB_API_KEY:-dev-api-key}
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
# Explicit Postgres env so migrations and any libpq fallback use the service host, not localhost
PGHOST: postgres
PGPORT: "5432"
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
PGSSLMODE: disable
volumes:
postgres:
driver: local

View File

@@ -27,3 +27,13 @@ The script will prompt you for the following information:
3. **Domain Name**: Enter the domain name that Traefik will use to create the SSL certificate and forward requests to Formbricks.
That's it! After running the command and providing the required information, visit the domain name you entered, and you should see the Formbricks home wizard!
## Formbricks Hub
The stack includes the [Formbricks Hub](https://github.com/formbricks/hub) API (`ghcr.io/formbricks/hub`). Hub shares the same database as Formbricks by default.
- **Migrations**: A `hub-migrate` service runs Hub's database migrations (goose + river) before the Hub API starts. It runs on every `docker compose up` and is idempotent.
- **Production** (`docker/docker-compose.yml`): Set `HUB_API_KEY` (required). `HUB_API_URL` defaults to `http://hub:8080` so the Formbricks app can reach Hub inside the compose network. Override `HUB_DATABASE_URL` only if you want Hub to use a separate database.
- **Development** (`docker-compose.dev.yml`): Hub uses the same Postgres database; `HUB_API_KEY` defaults to `dev-api-key` (override with `HUB_API_KEY`) and the local Hub URL is `http://localhost:8080`.
In development, Hub is exposed locally on port **8080**. In production Docker Compose, Hub stays internal to the compose network and is reached via `http://hub:8080`.

View File

@@ -29,6 +29,15 @@ x-environment: &environment
# To use external Redis/Valkey: remove the redis service below and update this URL
REDIS_URL: redis://redis:6379
# Formbricks Hub (port 8080): API key required. Use e.g. openssl rand -hex 32
HUB_API_KEY:
# Base URL the Formbricks app uses to reach Hub. Defaults to the internal Hub service.
HUB_API_URL: ${HUB_API_URL:-http://hub:8080}
# Hub database URL (optional). Default: same Postgres as Formbricks. Set only if Hub uses a separate DB.
# HUB_DATABASE_URL:
# Set the minimum log level(debug, info, warn, error, fatal)
# LOG_LEVEL: info
@@ -202,7 +211,7 @@ x-environment: &environment
services:
postgres:
restart: always
image: pgvector/pgvector:pg17
image: pgvector/pgvector:pg18
volumes:
- postgres:/var/lib/postgresql/data
environment:
@@ -245,6 +254,31 @@ services:
- ./saml-connection:/home/nextjs/apps/web/saml-connection
<<: *environment
# Run Hub DB migrations (goose + river) before the API starts. Uses same image; migrations are idempotent.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command: ["if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres \"$$DATABASE_URL\" up && /usr/local/bin/river migrate-up --database-url \"$$DATABASE_URL\"; else echo 'Migration tools (goose/river) not in image.'; exit 1; fi"]
environment:
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Set HUB_API_KEY. By default shares the Formbricks database; set HUB_DATABASE_URL to use a separate DB.
hub:
restart: always
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
postgres:
condition: service_healthy
environment:
API_KEY: ${HUB_API_KEY:?HUB_API_KEY is required to run Hub}
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
volumes:
postgres:
driver: local

View File

@@ -0,0 +1,275 @@
# Plan: Deprecate Environments in Formbricks
**Issue**: https://github.com/formbricks/internal/issues/1501
## Context
Formbricks currently has a 4-level hierarchy: **Organization → Project → Environment (prod/dev) → Resources**. The "Environment" layer adds complexity with minimal value — the only real difference between prod and dev is separate API keys and a UI badge. The UI already calls "Project" a "Workspace".
**Goal**: Collapse the Environment layer so resources live directly under Project. The production environment merges into the workspace identity. Dev environments with data become separate new workspaces.
**Key decisions**:
- DB model stays as `Project` (no table rename)
- SDK will accept `workspaceId` as new param, `environmentId` as deprecated alias
- Dev environments with data get promoted to separate workspaces
---
## Current State
```
Organization
└── Project ("Workspace" in UI)
├── Environment (production) ──→ surveys, contacts, webhooks, tags, ...
└── Environment (development) ──→ surveys, contacts, webhooks, tags, ...
```
Every project always has exactly 2 environments. The only differences between them:
- Separate data (contacts, responses, attributes, integrations, webhooks, segments, etc.)
- Separate API keys (`ApiKeyEnvironment` grants per-environment permissions)
- A red warning banner in the dev UI, plus an environment switcher breadcrumb
Key metrics:
- **564 files** in `apps/web` reference `environmentId`
- **52 files** in `packages` reference `environmentId`
- **68+ route directories** under `/environments/[environmentId]/`
- **22 API endpoint directories** keyed by `[environmentId]`
- **8 resource tables** FK to Environment: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- **SDK** requires `environmentId` to initialize, all client APIs use `/api/v1/client/[environmentId]/...`
- **Storage** paths: `private/${environmentId}/${fileName}`
---
## Phase 1: Add `projectId` Column to All Environment-Owned Models (PR 1 — Small, Low Risk)
Add an **optional** `projectId` column alongside the existing `environmentId` on every model that currently only references Environment.
**Why**: Today, Survey has `environmentId` pointing to Environment, and you have to join through Environment to reach Project. We need Survey to point directly to Project. But we can't just switch the FK in one shot — that would break everything. So we add a new nullable `projectId` column alongside the existing `environmentId`. No code changes, no runtime impact. Just schema preparation.
**Modify**: `packages/database/schema.prisma`
- Add `projectId String?` + FK to Project + index to: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- Add reverse relations on the `Project` model
- New Prisma migration file
No code changes. No runtime behavior change. All new columns are NULL.
---
## Phase 2: Backfill `projectId` (PR 2 — Small, Medium Risk)
Data migration to populate `projectId` on every existing row.
**Why**: The new `projectId` columns are all NULL. We need to populate them by joining through the Environment table: `Survey.environmentId → Environment.id → Environment.projectId`. After this, every row has both `environmentId` (old) and `projectId` (new) filled in, but the app still only reads `environmentId`.
```sql
UPDATE "Survey" s SET "projectId" = e."projectId"
FROM "Environment" e WHERE s."environmentId" = e."id" AND s."projectId" IS NULL;
-- Repeat for all 8 tables
```
**Create**: Migration script (idempotent — only updates rows where `projectId IS NULL`)
App behavior unchanged. New columns now populated but not yet read.
---
## Phase 3: Dual-Write (PR 3 — Large, Medium Risk)
All create/update operations write both `environmentId` AND `projectId`.
**Why**: New rows created after the backfill would still have `projectId = NULL` because the app code doesn't know about the new column yet. We update every `prisma.survey.create(...)`, `prisma.contact.create(...)`, etc. to write both `environmentId` and `projectId`. Now every new row gets both values. Old code still reads `environmentId` — nothing breaks.
**Key files to modify**:
- `apps/web/lib/survey/service.ts``createSurvey`
- `apps/web/lib/environment/service.ts``createEnvironment` (creates default ContactAttributeKeys)
- `apps/web/modules/projects/settings/lib/project.ts``createProject`
- `apps/web/modules/survey/list/lib/survey.ts``copySurveyToOtherEnvironment`
- `apps/web/modules/survey/components/template-list/lib/survey.ts``createSurvey`
- `apps/web/lib/actionClass/service.ts``createActionClass`
- `apps/web/modules/survey/editor/lib/action-class.ts``createActionClass`
- `apps/web/modules/ee/contacts/lib/contacts.ts``processCsvRecord`, `createMissingAttributeKeys`
- `apps/web/modules/ee/contacts/api/v2/management/contacts/lib/contact.ts``createContact`
- `apps/web/app/api/v1/client/[environmentId]/displays/lib/display.ts``createDisplay` (creates contacts)
- `apps/web/modules/ee/contacts/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/api/v2/management/contact-attribute-keys/lib/contact-attribute-key.ts``createContactAttributeKey`
- `apps/web/modules/ee/contacts/api/v1/management/contact-attribute-keys/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/integrations/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/modules/api/v2/management/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/app/api/v1/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/lib/tag/service.ts``createTag`
- `apps/web/modules/ee/contacts/segments/lib/segments.ts``createSegment`, `cloneSegment`, `resetSegmentInSurvey`
- `apps/web/lib/integration/service.ts``createOrUpdateIntegration`
Pattern:
```typescript
// Resolve environmentId to projectId using existing getEnvironment()
const environment = await getEnvironment(environmentId);
const projectId = environment.projectId;
await prisma.survey.create({ data: { environmentId, projectId, ...rest } });
```
---
## Phase 4: Switch Internal Reads to `projectId` (PR 4 — Very Large, High Risk)
Change internal (non-API) queries from `WHERE environmentId = ?` to `WHERE projectId = ?`.
**Why**: This is the actual migration. Every query that says `WHERE environmentId = X` changes to `WHERE projectId = X`. Functions like `getSurveys(environmentId)` become `getSurveys(projectId)`. The layout at `/environments/[environmentId]/layout.tsx` resolves the environmentId from the URL to a projectId early on and passes projectId downstream. After this phase, the app internally thinks in terms of projects, not environments, even though URLs still say `[environmentId]`.
**Key files**:
- `apps/web/modules/survey/list/lib/survey.ts``getSurveys(environmentId)``getSurveys(projectId)`
- `apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts``getEnvironmentStateData`
- `apps/web/modules/environments/lib/utils.ts``getEnvironmentAuth`, `getEnvironmentLayoutData`
- `apps/web/app/(app)/environments/[environmentId]/layout.tsx` — resolve `projectId` early, pass to context
- `apps/web/app/(app)/environments/[environmentId]/context/environment-context.tsx` — add `projectId`
- All page server components that pass `environmentId` to service functions
URL still has `[environmentId]`. Each page resolves `environmentId → projectId` at the top.
**This PR can be split further** by migrating one resource type at a time (surveys first, then contacts, then actions, etc.).
---
## Phase 5: Client API Backwards Compatibility (PR 5 — Medium, Medium Risk)
Make `/api/v1/client/[environmentId]/...` and `/api/v2/client/[environmentId]/...` accept either an `environmentId` or a `projectId`.
**Why**: The SDK sends requests to `/api/v1/client/[environmentId]/...`. Existing deployed SDKs will keep sending environmentIds. New SDKs will send projectIds. Each route handler needs to accept either and resolve to a projectId internally. This ensures old SDK versions don't break.
**Add fallback resolution at top of each route handler**:
```typescript
// Try Environment table first, fall back to Project table
let projectId: string;
const environment = await prisma.environment.findUnique({ where: { id: params.environmentId } });
if (environment) {
projectId = environment.projectId;
} else {
projectId = params.environmentId; // caller passed a projectId directly
}
```
**Files**:
- `apps/web/app/api/v1/client/[environmentId]/environment/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/displays/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/responses/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/storage/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/user/route.ts`
- `apps/web/app/api/v2/client/[environmentId]/` — all routes
---
## Phase 6: Management API + API Key Migration (PR 6 — Medium, Medium Risk)
**Why**: The `ApiKeyEnvironment` model grants per-environment permissions. API keys used by integrations (Zapier, Make, etc.) reference environmentIds. These need to work at the project level. The management API endpoints that accept `environmentId` in request bodies need to also accept `projectId`.
- Modify `ApiKeyEnvironment` to also support project-level permissions (or add `projectId` to the model)
- Update `apps/web/app/api/v1/auth.ts``authenticateRequest` resolves environment permissions to project
- Management route handlers accept `environmentId` OR `projectId` in request bodies
- API key management UI in `modules/organization/settings/api-keys/`
---
## Phase 7: Storage Path Migration (PR 7 — Medium, Medium Risk)
**Why**: Uploaded files are stored at paths like `private/{environmentId}/{fileName}`. New uploads should use `{projectId}/...`, but old files still live at the old paths. Downloads need to check both locations for backward compatibility.
- New uploads use `{projectId}/{accessType}/{fileName}`
- Downloads check both `{projectId}/...` and `{environmentId}/...` paths for backwards compat
- `apps/web/modules/storage/service.ts`
- `apps/web/app/storage/[environmentId]/[accessType]/[fileName]/route.ts`
---
## Phase 8: Dev Environment Data Migration (PR 8 — Large, High Risk)
**Why**: Currently each project has a prod and dev environment. After the migration, there's no "environment" concept — just projects. Dev environments with no data can be discarded. Dev environments with data need to be promoted into new standalone projects so that data isn't lost.
For each Project with a development Environment that has data:
1. Create new Project named `{name} (Dev)` in the same Organization
2. Create a production Environment for the new Project
3. Re-parent all dev environment resources to the new Project (update `projectId`)
4. Re-parent resources to the new production environment (update `environmentId`)
For development environments with NO data: leave as-is (will be cleaned up later).
**Create**: Idempotent migration script in `packages/database/migration/` or `scripts/`
---
## Phase 9: New `/workspaces/[projectId]/` Routes + Redirects (PR 9 — Very Large, High Risk)
**Why**: The URL currently says `/environments/[environmentId]/surveys/...`. After the migration, it should say `/workspaces/[projectId]/surveys/...`. This phase creates the new route group mirroring the old structure, removes the environment switcher breadcrumb, and adds redirects so old bookmarked URLs still work.
- Create `/apps/web/app/(app)/workspaces/[projectId]/` route group mirroring the environments structure
- New layout resolves `projectId` directly
- Old `/environments/[environmentId]/...` routes redirect to `/workspaces/{projectId}/...`
- Update `apps/web/app/page.tsx` to redirect to workspace URLs
- Remove environment switcher breadcrumb
**Can be split** into sub-PRs: layout first, then surveys, then settings, etc.
---
## Phase 10: Make `projectId` NOT NULL (PR 10 — Small, Low Risk)
**Why**: At this point, every row has `projectId` populated (backfill + dual-write), and all reads use `projectId`. Now we can safely make it required in the schema. This is a safety net — the DB will reject any row that somehow doesn't have a projectId.
```sql
ALTER TABLE "Survey" ALTER COLUMN "projectId" SET NOT NULL;
-- Repeat for all 8 tables
```
Pre-check: verify no NULL values remain.
---
## Phase 11: JS SDK Update (PR 11 — Medium, Low Risk)
**Why**: Add `workspaceId` as the new init parameter. `environmentId` keeps working as a deprecated alias. Existing integrations don't break.
- `packages/js-core/src/types/config.ts` — add `workspaceId` to `TConfigInput`
- `packages/js-core/src/lib/common/setup.ts` — accept `workspaceId`, fall back to `environmentId`
- `environmentId` continues working as deprecated alias indefinitely
```typescript
// New:
formbricks.init({ workspaceId: "cxxx", appUrl: "..." })
// Old (still works):
formbricks.init({ environmentId: "cxxx", appUrl: "..." })
```
---
## Verification
After each PR:
1. `pnpm build` passes
2. Existing tests pass (`pnpm test`)
3. Manual smoke test: create survey, submit response, check dashboard
4. SDK initialization works with existing `environmentId`
After full migration:
- Old environment URLs redirect correctly
- Old API keys work
- Old SDK `environmentId` init works
- New `workspaceId` SDK init works
- Storage files accessible via both old and new paths
- Dev environments with data are separate workspaces
---
## PR Summary
| PR | Phase | Description | Size | Risk |
|----|-------|-------------|------|------|
| 1 | 1 | Add nullable `projectId` columns | S | Low |
| 2 | 2 | Backfill `projectId` data migration | S | Med |
| 3 | 3 | Dual-write `projectId` on all creates | L | Med |
| 4 | 4 | Switch reads to `projectId` | XL | High |
| 5 | 5 | Client API backwards compat | M | Med |
| 6 | 6 | Management API + API key migration | M | Med |
| 7 | 7 | Storage path migration | M | Med |
| 8 | 8 | Dev environment → workspace promotion | L | High |
| 9 | 9 | New workspace routes + redirects | XL | High |
| 10 | 10 | Make `projectId` NOT NULL | S | Low |
| 11 | 11 | JS SDK `workspaceId` support | M | Low |

View File

@@ -77,4 +77,14 @@ These variables are present inside your machine's docker-compose file. Restart t
| AUDIT_LOG_ENABLED | Set this to 1 to enable audit logging. Requires Redis to be configured with the REDIS_URL env variable. | optional | 0 |
| AUDIT_LOG_GET_USER_IP | Set to 1 to include user IP addresses in audit logs from request headers | optional | 0 |
#### Formbricks Hub
When running the stack with [Formbricks Hub](https://github.com/formbricks/hub) (for example via Docker Compose or Helm), the following variables apply:
| Variable | Description | Required | Default |
| ---------------- | ------------------------------------------------------------------------------------------------ | -------------------------- | ----------------------------------------------------- |
| HUB_API_KEY | API key used by the Formbricks Hub API (port 8080). | required | (e.g. `openssl rand -hex 32`) |
| HUB_API_URL | Base URL the Formbricks app uses to call Hub. Use `http://localhost:8080` locally. | required | `http://localhost:8080` in local dev |
| HUB_DATABASE_URL | PostgreSQL connection URL for Hub. Omit to use the same database as Formbricks. | optional | Same as Formbricks `DATABASE_URL` (shared database) |
Note: If you want to configure something that is not possible via above, please open an issue on our GitHub repo here or reach out to us on Github Discussions and we'll try our best to work out a solution with you.

View File

@@ -47,7 +47,9 @@ export const xmSegmentMigration: MigrationScript = {
id: "s644oyyqccstfdeejc4fluye",
name: "20241209110456_xm_segment_migration",
run: async ({ tx }) => {
const allSegments = await tx.segment.findMany();
const allSegments = await tx.segment.findMany({
select: { id: true, filters: true },
});
const updationPromises = [];
for (const segment of allSegments) {
updationPromises.push(
@@ -56,6 +58,7 @@ export const xmSegmentMigration: MigrationScript = {
data: {
filters: findAndReplace(segment.filters),
},
select: { id: true },
})
);
}

View File

@@ -0,0 +1,80 @@
-- AlterTable
ALTER TABLE "ActionClass" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Contact" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "ContactAttributeKey" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Integration" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Segment" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Survey" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Tag" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "Webhook" ADD COLUMN "projectId" TEXT;
-- AlterTable
ALTER TABLE "ApiKeyEnvironment" ADD COLUMN "projectId" TEXT;
-- CreateIndex
CREATE INDEX "ActionClass_projectId_createdAt_idx" ON "ActionClass"("projectId", "created_at");
-- CreateIndex
CREATE INDEX "Contact_projectId_idx" ON "Contact"("projectId");
-- CreateIndex
CREATE INDEX "ContactAttributeKey_projectId_createdAt_idx" ON "ContactAttributeKey"("projectId", "created_at");
-- CreateIndex
CREATE INDEX "Integration_projectId_idx" ON "Integration"("projectId");
-- CreateIndex
CREATE INDEX "Segment_projectId_idx" ON "Segment"("projectId");
-- CreateIndex
CREATE INDEX "Survey_projectId_updatedAt_idx" ON "Survey"("projectId", "updated_at");
-- CreateIndex
CREATE INDEX "Tag_projectId_idx" ON "Tag"("projectId");
-- CreateIndex
CREATE INDEX "Webhook_projectId_idx" ON "Webhook"("projectId");
-- CreateIndex
CREATE INDEX "ApiKeyEnvironment_projectId_idx" ON "ApiKeyEnvironment"("projectId");
-- AddForeignKey
ALTER TABLE "Webhook" ADD CONSTRAINT "Webhook_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ContactAttributeKey" ADD CONSTRAINT "ContactAttributeKey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Contact" ADD CONSTRAINT "Contact_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Tag" ADD CONSTRAINT "Tag_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Survey" ADD CONSTRAINT "Survey_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ActionClass" ADD CONSTRAINT "ActionClass_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Integration" ADD CONSTRAINT "Integration_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Segment" ADD CONSTRAINT "Segment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ApiKeyEnvironment" ADD CONSTRAINT "ApiKeyEnvironment_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -49,11 +49,14 @@ model Webhook {
source WebhookSource @default(user)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
triggers PipelineTriggers[]
surveyIds String[]
secret String?
@@index([environmentId])
@@index([projectId])
}
/// Represents an attribute value associated with a contact.
@@ -116,11 +119,14 @@ model ContactAttributeKey {
dataType ContactAttributeDataType @default(string)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
attributes ContactAttribute[]
attributeFilters SurveyAttributeFilter[]
@@unique([key, environmentId])
@@index([environmentId, createdAt])
@@index([projectId, createdAt])
}
/// Represents a person or user who can receive and respond to surveys.
@@ -137,11 +143,14 @@ model Contact {
updatedAt DateTime @updatedAt @map(name: "updated_at")
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
responses Response[]
attributes ContactAttribute[]
displays Display[]
@@index([environmentId])
@@index([projectId])
}
/// Stores a user's response to a survey, including their answers and metadata.
@@ -204,8 +213,11 @@ model Tag {
responses TagsOnResponses[]
environmentId String
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
@@unique([environmentId, name])
@@index([projectId])
}
/// Junction table linking tags to responses.
@@ -350,6 +362,8 @@ model Survey {
type SurveyType @default(app)
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
creator User? @relation(fields: [createdBy], references: [id])
createdBy String?
status SurveyStatus @default(draft)
@@ -413,6 +427,7 @@ model Survey {
@@index([environmentId, updatedAt])
@@index([segmentId])
@@index([projectId, updatedAt])
}
/// Represents a quota configuration for a survey.
@@ -507,11 +522,14 @@ model ActionClass {
noCodeConfig Json?
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
environmentId String
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
surveyTriggers SurveyTrigger[]
@@unique([key, environmentId])
@@unique([name, environmentId])
@@index([environmentId, createdAt])
@@index([projectId, createdAt])
}
enum EnvironmentType {
@@ -540,9 +558,12 @@ model Integration {
/// [IntegrationConfig]
config Json
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
@@unique([type, environmentId])
@@index([environmentId])
@@index([projectId])
}
enum DataMigrationStatus {
@@ -648,6 +669,17 @@ model Project {
projectTeams ProjectTeam[]
customHeadScripts String? // Custom HTML scripts for link surveys (self-hosted only)
// Direct resource relations (for environment deprecation migration)
surveys Survey[]
contacts Contact[]
actionClasses ActionClass[]
contactAttributeKeys ContactAttributeKey[]
webhooks Webhook[]
tags Tag[]
segments Segment[]
integrations Integration[]
apiKeyEnvironments ApiKeyEnvironment[]
@@unique([organizationId, name])
}
@@ -807,10 +839,13 @@ model ApiKeyEnvironment {
apiKey ApiKey @relation(fields: [apiKeyId], references: [id], onDelete: Cascade)
environmentId String
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
permission ApiKeyPermission
@@unique([apiKeyId, environmentId])
@@index([environmentId])
@@index([projectId])
}
enum IdentityProvider {
@@ -910,9 +945,12 @@ model Segment {
filters Json @default("[]")
environmentId String
environment Environment @relation(fields: [environmentId], references: [id], onDelete: Cascade)
project Project? @relation(fields: [projectId], references: [id], onDelete: Cascade)
projectId String?
surveys Survey[]
@@unique([environmentId, title])
@@index([projectId])
}
/// Represents a supported language in the system.

View File

@@ -10,7 +10,7 @@ export const ZApiKeyEnvironment = z.object({
updatedAt: z.date(),
apiKeyId: z.cuid2(),
environmentId: z.cuid2(),
projectId: z.cuid2(),
projectId: z.cuid2().nullable(),
projectName: z.string(),
environmentType: z.enum(EnvironmentType),
permission: ZApiKeyPermission,

View File

@@ -54,6 +54,7 @@ export const ZContactAttributeKey = z.object({
})
.describe("The data type of the attribute (string, number, date)"),
environmentId: z.cuid2().describe("The ID of the environment this attribute belongs to"),
projectId: z.string().nullable().describe("The ID of the project this attribute belongs to"),
}) satisfies z.ZodType<ContactAttributeKey>;
ZContactAttributeKey.meta({

View File

@@ -17,6 +17,7 @@ export const ZContact = z.object({
})
.describe("When the contact was last updated"),
environmentId: z.string().describe("The environment this contact belongs to"),
projectId: z.string().nullable().describe("The project this contact belongs to"),
}) satisfies z.ZodType<Contact>;
ZContact.meta({

View File

@@ -72,6 +72,7 @@ const ZSurveyBase = z.object({
pin: z.string().nullable().describe("The pin of the survey"),
createdBy: z.string().nullable().describe("The user who created the survey"),
environmentId: z.cuid2().describe("The environment ID of the survey"),
projectId: z.string().nullable().describe("The project ID of the survey"),
questions: z.array(ZSurveyQuestion).describe("The questions of the survey"),
blocks: ZSurveyBlocks.prefault([]).describe("The blocks of the survey"),
endings: z.array(ZSurveyEnding).prefault([]).describe("The endings of the survey"),

View File

@@ -19,6 +19,7 @@ export const ZWebhook = z.object({
url: z.url().describe("The URL of the webhook"),
source: z.enum(["user", "zapier", "make", "n8n"]).describe("The source of the webhook"),
environmentId: z.cuid2().describe("The ID of the environment"),
projectId: z.string().nullable().describe("The ID of the project"),
triggers: z
.array(z.enum(["responseFinished", "responseCreated", "responseUpdated"]))
.describe("The triggers of the webhook")

View File

@@ -62,6 +62,7 @@ export const mockSurvey: TEnvironmentStateSurvey = {
createdAt: new Date("2025-01-01T10:00:00Z"),
updatedAt: new Date("2025-01-01T10:00:00Z"),
environmentId: mockEnvironmentId,
projectId: null,
description: "Manual Trigger",
noCodeConfig: {
elementSelector: { cssSelector: ".btn", innerHtml: "Click me" },

View File

@@ -135,6 +135,7 @@ export const ZActionClass = z.object({
key: z.string().trim().min(1).nullable(),
noCodeConfig: ZActionClassNoCodeConfig.nullable(),
environmentId: z.string(),
projectId: z.string().nullable(),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});

View File

@@ -19,6 +19,7 @@ export const ZContactAttributeKey = z.object({
type: ZContactAttributeKeyType,
dataType: ZContactAttributeDataType.prefault("string"),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export type TContactAttributeKey = z.infer<typeof ZContactAttributeKey>;

View File

@@ -19,6 +19,7 @@ export type TIntegrationConfig = z.infer<typeof ZIntegrationConfig>;
export const ZIntegrationBase = z.object({
id: z.string(),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export const ZIntegration = ZIntegrationBase.extend({

View File

@@ -3,6 +3,7 @@ import { z } from "zod";
export const ZIntegrationBase = z.object({
id: z.string(),
environmentId: z.string(),
projectId: z.string().nullable(),
});
export const ZIntegrationBaseSurveyData = z.object({

View File

@@ -344,6 +344,7 @@ export const ZSegment = z.object({
isPrivate: z.boolean().prefault(true),
filters: ZSegmentFilters,
environmentId: z.string(),
projectId: z.string().nullable(),
createdAt: z.date(),
updatedAt: z.date(),
surveys: z.array(z.string()),

Some files were not shown because too many files have changed in this diff Show More