Compare commits

..

4 Commits

Author SHA1 Message Date
Tiago Farto
f190cfc246 chore: align river implementation 2026-03-25 16:06:55 +00:00
Tiago Farto
b5b5da4fdc Merge branch 'main' into feat/workflows-service 2026-03-25 13:58:13 +00:00
Tiago Farto
c44e96e4ed chore: date format fix 2026-03-25 11:37:14 +00:00
Tiago Farto
90b26935a9 feat: River workflows POC 2026-03-25 10:53:46 +00:00
128 changed files with 2526 additions and 3324 deletions

View File

@@ -38,15 +38,6 @@ LOG_LEVEL=info
DATABASE_URL='postgresql://postgres:postgres@localhost:5432/formbricks?schema=public'
#################
# HUB (DEV) #
#################
# The dev stack (pnpm db:up / pnpm go) runs Formbricks Hub on port 8080.
# Set explicitly to avoid confusion; override as needed when using docker-compose.dev.yml.
HUB_API_KEY=dev-api-key
HUB_API_URL=http://localhost:8080
HUB_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
################
# MAIL SETUP #
################

3
.gitignore vendored
View File

@@ -66,3 +66,6 @@ i18n.cache
stats.html
# next-agents-md
.next-docs/
# Golang
.cache

View File

@@ -6,7 +6,7 @@ import { IS_FORMBRICKS_CLOUD, IS_STORAGE_CONFIGURED, RESPONSES_PER_PAGE } from "
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getResponseCountBySurveyId, getResponses } from "@/lib/response/service";
import { getSurvey } from "@/lib/survey/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getUser } from "@/lib/user/service";
import { getTranslate } from "@/lingodotdev/server";
import { getSegments } from "@/modules/ee/contacts/segments/lib/segments";
@@ -23,12 +23,10 @@ const Page = async (props: { params: Promise<{ environmentId: string; surveyId:
const { session, environment, organization, isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [survey, user, tags, isContactsEnabled, responseCount] = await Promise.all([
getSurvey(params.surveyId),
getUser(session.user.id),
getTagsByProjectId(projectId),
getTagsByEnvironmentId(params.environmentId),
getIsContactsEnabled(organization.id),
getResponseCountBySurveyId(params.surveyId),
]);
@@ -45,7 +43,7 @@ const Page = async (props: { params: Promise<{ environmentId: string; surveyId:
throw new ResourceNotFoundError(t("common.organization"), null);
}
const segments = isContactsEnabled ? await getSegments(projectId) : [];
const segments = isContactsEnabled ? await getSegments(params.environmentId) : [];
const publicDomain = getPublicDomain();

View File

@@ -24,8 +24,6 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
const { session, environment, isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const surveyId = params.surveyId;
if (!surveyId) {
@@ -46,7 +44,7 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
const organizationId = await getOrganizationIdFromEnvironmentId(environment.id);
const isContactsEnabled = await getIsContactsEnabled(organizationId);
const segments = isContactsEnabled ? await getSegments(projectId) : [];
const segments = isContactsEnabled ? await getSegments(environment.id) : [];
if (!organizationId) {
throw new ResourceNotFoundError(t("common.organization"), null);

View File

@@ -6,7 +6,7 @@ import { ResourceNotFoundError } from "@formbricks/types/errors";
import { ZResponseFilterCriteria } from "@formbricks/types/responses";
import { getResponseDownloadFile, getResponseFilteringValues } from "@/lib/response/service";
import { getSurvey } from "@/lib/survey/service";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { authenticatedActionClient } from "@/lib/utils/action-client";
import { checkAuthorizationUpdated } from "@/lib/utils/action-client/action-client-middleware";
import { getOrganizationIdFromSurveyId, getProjectIdFromSurveyId } from "@/lib/utils/helper";
@@ -84,10 +84,8 @@ export const getSurveyFilterDataAction = authenticatedActionClient
const isQuotasAllowed = await getIsQuotasEnabled(organizationId);
const projectId = survey.projectId!;
const [tags, { contactAttributes: attributes, meta, hiddenFields }, quotas = []] = await Promise.all([
getTagsByProjectId(projectId),
getTagsByEnvironmentId(survey.environmentId),
getResponseFilteringValues(parsedInput.surveyId),
isQuotasAllowed ? getQuotas(parsedInput.surveyId) : [],
]);

View File

@@ -20,11 +20,9 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, integrations, locale] = await Promise.all([
getSurveys(projectId),
getIntegrations(projectId),
getSurveys(params.environmentId),
getIntegrations(params.environmentId),
getUserLocale(session.user.id),
]);

View File

@@ -19,8 +19,6 @@ const ZValidateGoogleSheetsConnectionAction = z.object({
export const validateGoogleSheetsConnectionAction = authenticatedActionClient
.inputSchema(ZValidateGoogleSheetsConnectionAction)
.action(async ({ ctx, parsedInput }) => {
const projectId = await getProjectIdFromEnvironmentId(parsedInput.environmentId);
await checkAuthorizationUpdated({
userId: ctx.user.id,
organizationId: await getOrganizationIdFromEnvironmentId(parsedInput.environmentId),
@@ -31,13 +29,13 @@ export const validateGoogleSheetsConnectionAction = authenticatedActionClient
},
{
type: "projectTeam",
projectId,
projectId: await getProjectIdFromEnvironmentId(parsedInput.environmentId),
minPermission: "readWrite",
},
],
});
const integration = await getIntegrationByType(projectId, "googleSheets");
const integration = await getIntegrationByType(parsedInput.environmentId, "googleSheets");
if (!integration) {
return { data: false };
}

View File

@@ -24,11 +24,9 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, integrations, locale] = await Promise.all([
getSurveys(projectId),
getIntegrations(projectId),
getSurveys(params.environmentId),
getIntegrations(params.environmentId),
getUserLocale(session.user.id),
]);

View File

@@ -6,15 +6,15 @@ import { DatabaseError } from "@formbricks/types/errors";
import { validateInputs } from "@/lib/utils/validate";
export const getWebhookCountBySource = async (
projectId: string,
environmentId: string,
source?: Webhook["source"]
): Promise<number> => {
validateInputs([projectId, ZId], [source, z.string().optional()]);
validateInputs([environmentId, ZId], [source, z.string().optional()]);
try {
const count = await prisma.webhook.count({
where: {
projectId,
environmentId,
source,
},
});

View File

@@ -31,11 +31,9 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, notionIntegration, locale] = await Promise.all([
getSurveys(projectId),
getIntegrationByType(projectId, "notion"),
getSurveys(params.environmentId),
getIntegrationByType(params.environmentId, "notion"),
getUserLocale(session.user.id),
]);

View File

@@ -33,8 +33,6 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, isBilling } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [
integrations,
userWebhookCount,
@@ -43,12 +41,12 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
n8nwebhookCount,
activePiecesWebhookCount,
] = await Promise.all([
getIntegrations(projectId),
getWebhookCountBySource(projectId, "user"),
getWebhookCountBySource(projectId, "zapier"),
getWebhookCountBySource(projectId, "make"),
getWebhookCountBySource(projectId, "n8n"),
getWebhookCountBySource(projectId, "activepieces"),
getIntegrations(params.environmentId),
getWebhookCountBySource(params.environmentId, "user"),
getWebhookCountBySource(params.environmentId, "zapier"),
getWebhookCountBySource(params.environmentId, "make"),
getWebhookCountBySource(params.environmentId, "n8n"),
getWebhookCountBySource(params.environmentId, "activepieces"),
]);
const isIntegrationConnected = (type: TIntegrationType) =>

View File

@@ -19,11 +19,9 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
const { isReadOnly, environment, session } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [surveys, slackIntegration, locale] = await Promise.all([
getSurveys(projectId),
getIntegrationByType(projectId, "slack"),
getSurveys(params.environmentId),
getIntegrationByType(params.environmentId, "slack"),
getUserLocale(session.user.id),
]);

View File

@@ -15,7 +15,6 @@ import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { getResponseCountBySurveyId } from "@/lib/response/service";
import { getSurvey, updateSurvey } from "@/lib/survey/service";
import { convertDatesInObject } from "@/lib/time";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { queueAuditEvent } from "@/modules/ee/audit-logs/lib/handler";
import { TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
@@ -153,9 +152,8 @@ export const POST = async (request: Request) => {
if (event === "responseFinished") {
// Fetch integrations and responseCount in parallel
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const [integrations, responseCount] = await Promise.all([
getIntegrations(projectId),
getIntegrations(environmentId),
getResponseCountBySurveyId(surveyId),
]);

View File

@@ -10,7 +10,6 @@ import {
} from "@/lib/constants";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { authOptions } from "@/modules/auth/lib/authOptions";
export const GET = async (req: Request) => {
@@ -68,8 +67,7 @@ export const GET = async (req: Request) => {
}
const integrationType = "googleSheets" as const;
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const existingIntegration = await getIntegrationByType(projectId, integrationType);
const existingIntegration = await getIntegrationByType(environmentId, integrationType);
const existingConfig = existingIntegration?.config as TIntegrationGoogleSheetsConfig;
const googleSheetIntegration = {

View File

@@ -2,7 +2,6 @@ import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { TDisplayCreateInput, ZDisplayCreateInput } from "@formbricks/types/displays";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { getContactByUserId } from "./contact";
@@ -16,11 +15,9 @@ export const createDisplay = async (displayInput: TDisplayCreateInput): Promise<
if (userId) {
contact = await getContactByUserId(environmentId, userId);
if (!contact) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
contact = await prisma.contact.create({
data: {
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
attributeKey: {

View File

@@ -45,7 +45,6 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -5,7 +5,6 @@ import { withV1ApiWrapper } from "@/app/lib/api/with-api-logging";
import { getTables } from "@/lib/airtable/service";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -37,8 +36,7 @@ export const GET = withV1ApiWrapper({
};
}
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const integration = (await getIntegrationByType(projectId, "airtable")) as TIntegrationAirtable;
const integration = (await getIntegrationByType(environmentId, "airtable")) as TIntegrationAirtable;
if (!integration) {
return {

View File

@@ -11,7 +11,6 @@ import {
import { symmetricEncrypt } from "@/lib/crypto";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -89,8 +88,7 @@ export const GET = withV1ApiWrapper({
},
};
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const existingIntegration = await getIntegrationByType(projectId, "notion");
const existingIntegration = await getIntegrationByType(environmentId, "notion");
if (existingIntegration) {
notionIntegration.config.data = existingIntegration.config.data as TIntegrationNotionConfigData[];
}

View File

@@ -8,7 +8,6 @@ import { withV1ApiWrapper } from "@/app/lib/api/with-api-logging";
import { SLACK_CLIENT_ID, SLACK_CLIENT_SECRET, SLACK_REDIRECT_URI, WEBAPP_URL } from "@/lib/constants";
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const GET = withV1ApiWrapper({
handler: async ({ req, authentication }) => {
@@ -89,8 +88,7 @@ export const GET = withV1ApiWrapper({
team: data.team,
};
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const slackIntegration = await getIntegrationByType(projectId, "slack");
const slackIntegration = await getIntegrationByType(environmentId, "slack");
const slackConfiguration: TIntegrationSlackConfig = {
data: (slackIntegration?.config.data as TIntegrationSlackConfigData[]) ?? [],

View File

@@ -19,7 +19,6 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(async (environmentIds: string[]): Promise<TActionClass[]> => {

View File

@@ -50,7 +50,6 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -3,33 +3,40 @@ import { z } from "zod";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError } from "@formbricks/types/errors";
import { deleteSurveyLifecycleJobs } from "@/lib/river/survey-lifecycle";
import { validateInputs } from "@/lib/utils/validate";
export const deleteSurvey = async (surveyId: string) => {
validateInputs([surveyId, z.cuid2()]);
try {
const deletedSurvey = await prisma.survey.delete({
where: {
id: surveyId,
},
include: {
segment: true,
triggers: {
include: {
actionClass: true,
const deletedSurvey = await prisma.$transaction(async (tx) => {
await deleteSurveyLifecycleJobs({ tx, surveyId });
const removedSurvey = await tx.survey.delete({
where: {
id: surveyId,
},
include: {
segment: true,
triggers: {
include: {
actionClass: true,
},
},
},
},
});
if (deletedSurvey.type === "app" && deletedSurvey.segment?.isPrivate) {
await prisma.segment.delete({
where: {
id: deletedSurvey.segment.id,
},
});
}
if (removedSurvey.type === "app" && removedSurvey.segment?.isPrivate) {
await tx.segment.delete({
where: {
id: removedSurvey.segment.id,
},
});
}
return removedSurvey;
});
return deletedSurvey;
} catch (error) {

View File

@@ -5,7 +5,6 @@ import { DatabaseError, InvalidInputError } from "@formbricks/types/errors";
import { TWebhookInput, ZWebhookInput } from "@/app/api/v1/webhooks/types/webhooks";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
@@ -13,8 +12,6 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
validateInputs([webhookInput, ZWebhookInput]);
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(webhookInput.environmentId);
try {
const secret = generateWebhookSecret();
@@ -26,8 +23,11 @@ export const createWebhook = async (webhookInput: TWebhookInput): Promise<Webhoo
surveyIds: webhookInput.surveyIds || [],
triggers: webhookInput.triggers || [],
secret,
environmentId: webhookInput.environmentId,
projectId,
environment: {
connect: {
id: webhookInput.environmentId,
},
},
},
});

View File

@@ -44,16 +44,16 @@ export const GET = withV3ApiWrapper({
return authResult;
}
const { projectId } = authResult;
const { environmentId } = authResult;
const [{ surveys, nextCursor }, totalCount] = await Promise.all([
getSurveyListPage(projectId, {
getSurveyListPage(environmentId, {
limit: parsed.limit,
cursor: parsed.cursor,
sortBy: parsed.sortBy,
filterCriteria: parsed.filterCriteria,
}),
getSurveyCount(projectId, parsed.filterCriteria),
getSurveyCount(environmentId, parsed.filterCriteria),
]);
return successListResponse(

View File

@@ -4823,7 +4823,6 @@ export const previewSurvey = (projectName: string, t: TFunction): TSurvey => {
name: t("templates.preview_survey_name"),
type: "link" as const,
environmentId: "cltwumfcz0009echxg02fh7oa",
projectId: null,
createdBy: "cltwumfbz0000echxysz6ptvq",
status: "inProgress" as const,
welcomeCard: {

View File

@@ -5,7 +5,7 @@ import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import {
deleteActionClass,
getActionClass,
getActionClassByProjectIdAndName,
getActionClassByEnvironmentIdAndName,
getActionClasses,
} from "./service";
@@ -49,7 +49,7 @@ describe("ActionClass Service", () => {
const result = await getActionClasses("env1");
expect(result).toEqual(mockActionClasses);
expect(prisma.actionClass.findMany).toHaveBeenCalledWith({
where: { projectId: "env1" },
where: { environmentId: "env1" },
select: expect.any(Object),
take: undefined,
skip: undefined,
@@ -63,7 +63,7 @@ describe("ActionClass Service", () => {
});
});
describe("getActionClassByProjectIdAndName", () => {
describe("getActionClassByEnvironmentIdAndName", () => {
test("should return action class when found", async () => {
const mockActionClass: TActionClass = {
id: "id2",
@@ -83,10 +83,10 @@ describe("ActionClass Service", () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockResolvedValue(mockActionClass);
const result = await getActionClassByProjectIdAndName("env2", "Action 2");
const result = await getActionClassByEnvironmentIdAndName("env2", "Action 2");
expect(result).toEqual(mockActionClass);
expect(prisma.actionClass.findFirst).toHaveBeenCalledWith({
where: { name: "Action 2", projectId: "env2" },
where: { name: "Action 2", environmentId: "env2" },
select: expect.any(Object),
});
});
@@ -94,14 +94,14 @@ describe("ActionClass Service", () => {
test("should return null when not found", async () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockResolvedValue(null);
const result = await getActionClassByProjectIdAndName("env2", "Action 2");
const result = await getActionClassByEnvironmentIdAndName("env2", "Action 2");
expect(result).toBeNull();
});
test("should throw DatabaseError when prisma throws", async () => {
if (!prisma.actionClass.findFirst) prisma.actionClass.findFirst = vi.fn();
vi.mocked(prisma.actionClass.findFirst).mockRejectedValue(new Error("fail"));
await expect(getActionClassByProjectIdAndName("env2", "Action 2")).rejects.toThrow(DatabaseError);
await expect(getActionClassByEnvironmentIdAndName("env2", "Action 2")).rejects.toThrow(DatabaseError);
});
});

View File

@@ -9,7 +9,6 @@ import { TActionClass, TActionClassInput, ZActionClassInput } from "@formbricks/
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const selectActionClass = {
@@ -22,17 +21,16 @@ const selectActionClass = {
key: true,
noCodeConfig: true,
environmentId: true,
projectId: true,
} satisfies Prisma.ActionClassSelect;
export const getActionClasses = reactCache(
async (projectId: string, page?: number): Promise<TActionClass[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
async (environmentId: string, page?: number): Promise<TActionClass[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
try {
return await prisma.actionClass.findMany({
where: {
projectId,
environmentId: environmentId,
},
select: selectActionClass,
take: page ? ITEMS_PER_PAGE : undefined,
@@ -42,21 +40,21 @@ export const getActionClasses = reactCache(
},
});
} catch (error) {
throw new DatabaseError(`Database error when fetching actions for project ${projectId}`);
throw new DatabaseError(`Database error when fetching actions for environment ${environmentId}`);
}
}
);
// This function is used to get an action by its name and projectId(it can return private actions as well)
export const getActionClassByProjectIdAndName = reactCache(
async (projectId: string, name: string): Promise<TActionClass | null> => {
validateInputs([projectId, ZId], [name, ZString]);
// This function is used to get an action by its name and environmentId(it can return private actions as well)
export const getActionClassByEnvironmentIdAndName = reactCache(
async (environmentId: string, name: string): Promise<TActionClass | null> => {
validateInputs([environmentId, ZId], [name, ZString]);
try {
const actionClass = await prisma.actionClass.findFirst({
where: {
name,
projectId,
environmentId,
},
select: selectActionClass,
});
@@ -115,13 +113,10 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environmentId,
projectId,
environment: { connect: { id: environmentId } },
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -14,7 +14,6 @@ import {
} from "@formbricks/types/integration/airtable";
import { AIRTABLE_CLIENT_ID, AIRTABLE_MESSAGE_LIMIT } from "../constants";
import { createOrUpdateIntegration, getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { delay } from "../utils/promises";
import { truncateText } from "../utils/strings";
@@ -79,8 +78,10 @@ export const fetchAirtableAuthToken = async (formData: Record<string, any>) => {
export const getAirtableToken = async (environmentId: string) => {
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const airtableIntegration = (await getIntegrationByType(projectId, "airtable")) as TIntegrationAirtable;
const airtableIntegration = (await getIntegrationByType(
environmentId,
"airtable"
)) as TIntegrationAirtable;
const { access_token, expiry_date, refresh_token } = ZIntegrationAirtableCredential.parse(
airtableIntegration?.config.key

View File

@@ -40,8 +40,6 @@ export const GITHUB_ID = env.GITHUB_ID;
export const GITHUB_SECRET = env.GITHUB_SECRET;
export const GOOGLE_CLIENT_ID = env.GOOGLE_CLIENT_ID;
export const GOOGLE_CLIENT_SECRET = env.GOOGLE_CLIENT_SECRET;
export const HUB_API_URL = env.HUB_API_URL;
export const HUB_API_KEY = env.HUB_API_KEY;
export const AZUREAD_CLIENT_ID = env.AZUREAD_CLIENT_ID;
export const AZUREAD_CLIENT_SECRET = env.AZUREAD_CLIENT_SECRET;

View File

@@ -33,8 +33,6 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: z.string().optional(),
HTTP_PROXY: z.url().optional(),
HTTPS_PROXY: z.url().optional(),
HUB_API_URL: z.url(),
HUB_API_KEY: z.string().optional(),
IMPRINT_URL: z
.url()
.optional()
@@ -161,8 +159,6 @@ export const env = createEnv({
GOOGLE_SHEETS_REDIRECT_URL: process.env.GOOGLE_SHEETS_REDIRECT_URL,
HTTP_PROXY: process.env.HTTP_PROXY,
HTTPS_PROXY: process.env.HTTPS_PROXY,
HUB_API_URL: process.env.HUB_API_URL,
HUB_API_KEY: process.env.HUB_API_KEY,
IMPRINT_URL: process.env.IMPRINT_URL,
IMPRINT_ADDRESS: process.env.IMPRINT_ADDRESS,
INVITE_DISABLED: process.env.INVITE_DISABLED,

View File

@@ -7,7 +7,6 @@ import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { DatabaseError } from "@formbricks/types/errors";
import { TIntegration, TIntegrationInput, ZIntegrationType } from "@formbricks/types/integration";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
const transformIntegration = (integration: TIntegration): TIntegration => {
@@ -29,8 +28,6 @@ export const createOrUpdateIntegration = async (
): Promise<TIntegration> => {
validateInputs([environmentId, ZId]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const integration = await prisma.integration.upsert({
where: {
@@ -41,13 +38,11 @@ export const createOrUpdateIntegration = async (
},
update: {
...integrationData,
environmentId,
projectId,
environment: { connect: { id: environmentId } },
},
create: {
...integrationData,
environmentId,
projectId,
environment: { connect: { id: environmentId } },
},
});
return integration;
@@ -61,13 +56,13 @@ export const createOrUpdateIntegration = async (
};
export const getIntegrations = reactCache(
async (projectId: string, page?: number): Promise<TIntegration[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
async (environmentId: string, page?: number): Promise<TIntegration[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
try {
const integrations = await prisma.integration.findMany({
where: {
projectId,
environmentId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
@@ -99,14 +94,16 @@ export const getIntegration = reactCache(async (integrationId: string): Promise<
});
export const getIntegrationByType = reactCache(
async (projectId: string, type: TIntegrationInput["type"]): Promise<TIntegration | null> => {
validateInputs([projectId, ZId], [type, ZIntegrationType]);
async (environmentId: string, type: TIntegrationInput["type"]): Promise<TIntegration | null> => {
validateInputs([environmentId, ZId], [type, ZIntegrationType]);
try {
const integration = await prisma.integration.findFirst({
const integration = await prisma.integration.findUnique({
where: {
projectId,
type,
type_environmentId: {
environmentId,
type,
},
},
});
return integration ? transformIntegration(integration) : null;

View File

@@ -6,7 +6,6 @@ import {
import { ENCRYPTION_KEY } from "@/lib/constants";
import { symmetricDecrypt } from "@/lib/crypto";
import { getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
const fetchPages = async (config: TIntegrationNotionConfig) => {
try {
@@ -30,8 +29,7 @@ const fetchPages = async (config: TIntegrationNotionConfig) => {
export const getNotionDatabases = async (environmentId: string): Promise<TIntegrationNotionDatabase[]> => {
let results: TIntegrationNotionDatabase[] = [];
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const notionIntegration = (await getIntegrationByType(projectId, "notion")) as TIntegrationNotion;
const notionIntegration = (await getIntegrationByType(environmentId, "notion")) as TIntegrationNotion;
if (notionIntegration && notionIntegration.config?.key.bot_id) {
results = await fetchPages(notionIntegration.config);
}

View File

@@ -75,7 +75,6 @@ export const responseSelection = {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -0,0 +1,10 @@
import "server-only";
export const RIVER_SCHEMA = "river";
export const RIVER_SURVEY_LIFECYCLE_QUEUE = "survey_lifecycle";
export const RIVER_SURVEY_START_KIND = "survey.start";
export const RIVER_SURVEY_END_KIND = "survey.end";
export const RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS = 3;
export const RIVER_INSERT_NOTIFICATION_CHANNEL = "river_insert";
export const RIVER_PENDING_JOB_STATES = ["available", "scheduled", "retryable"] as const;

View File

@@ -0,0 +1,502 @@
import { Prisma } from "@prisma/client";
import type { PrismaClient } from "@prisma/client";
import { randomUUID } from "crypto";
import { Socket } from "net";
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, test, vi } from "vitest";
import { logger } from "@formbricks/logger";
import {
RIVER_INSERT_NOTIFICATION_CHANNEL,
RIVER_PENDING_JOB_STATES,
RIVER_SURVEY_END_KIND,
RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
RIVER_SURVEY_LIFECYCLE_QUEUE,
RIVER_SURVEY_START_KIND,
} from "./constants";
import { deleteSurveyLifecycleJobs, enqueueSurveyLifecycleJobs } from "./survey-lifecycle";
vi.mock("server-only", () => ({}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
const createMockTx = () =>
({
$executeRaw: vi.fn(),
}) as unknown as Prisma.TransactionClient;
const getQueryValues = (callIndex: number, tx: Prisma.TransactionClient) => {
const query = vi.mocked(tx.$executeRaw).mock.calls[callIndex][0] as Prisma.Sql;
return query.values;
};
describe("enqueueSurveyLifecycleJobs", () => {
beforeEach(() => {
vi.mocked(logger.error).mockReset();
});
test("enqueues a start job when startsAt is set on create", async () => {
const tx = createMockTx();
const startsAt = new Date("2026-04-01T12:00:00.000Z");
await enqueueSurveyLifecycleJobs({
tx,
now: new Date("2026-03-31T12:00:00.000Z"),
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt: null,
},
});
expect(tx.$executeRaw).toHaveBeenCalledTimes(2);
expect(getQueryValues(0, tx)).toEqual([
JSON.stringify({
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: startsAt.toISOString(),
}),
RIVER_SURVEY_START_KIND,
RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
RIVER_SURVEY_LIFECYCLE_QUEUE,
startsAt,
]);
expect(getQueryValues(1, tx)).toEqual([
`river.${RIVER_INSERT_NOTIFICATION_CHANNEL}`,
JSON.stringify({ queue: RIVER_SURVEY_LIFECYCLE_QUEUE }),
]);
});
test("enqueues an end job when endsAt is set on create", async () => {
const tx = createMockTx();
const endsAt = new Date("2026-04-02T12:00:00.000Z");
await enqueueSurveyLifecycleJobs({
tx,
now: new Date("2026-03-31T12:00:00.000Z"),
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt,
},
});
expect(tx.$executeRaw).toHaveBeenCalledTimes(2);
expect(getQueryValues(0, tx)).toEqual([
JSON.stringify({
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: endsAt.toISOString(),
}),
RIVER_SURVEY_END_KIND,
RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
RIVER_SURVEY_LIFECYCLE_QUEUE,
endsAt,
]);
});
test("enqueues both lifecycle jobs when both dates are set on create", async () => {
const tx = createMockTx();
const startsAt = new Date("2026-04-01T12:00:00.000Z");
const endsAt = new Date("2026-04-02T12:00:00.000Z");
await enqueueSurveyLifecycleJobs({
tx,
now: new Date("2026-03-31T12:00:00.000Z"),
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt,
},
});
expect(tx.$executeRaw).toHaveBeenCalledTimes(3);
expect(getQueryValues(0, tx)[1]).toBe(RIVER_SURVEY_START_KIND);
expect(getQueryValues(1, tx)[1]).toBe(RIVER_SURVEY_END_KIND);
});
test("does nothing when neither lifecycle date is set", async () => {
const tx = createMockTx();
await enqueueSurveyLifecycleJobs({
tx,
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt: null,
},
});
expect(tx.$executeRaw).not.toHaveBeenCalled();
});
test("enqueues a lifecycle job when a date transitions from null to a value", async () => {
const tx = createMockTx();
const startsAt = new Date("2026-04-01T12:00:00.000Z");
await enqueueSurveyLifecycleJobs({
tx,
now: new Date("2026-03-31T12:00:00.000Z"),
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt: null,
},
previousSurvey: {
startsAt: null,
endsAt: null,
},
});
expect(tx.$executeRaw).toHaveBeenCalledTimes(2);
});
test("does not enqueue when a lifecycle date changes after already being set", async () => {
const tx = createMockTx();
await enqueueSurveyLifecycleJobs({
tx,
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: new Date("2026-04-02T12:00:00.000Z"),
endsAt: null,
},
previousSurvey: {
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
});
expect(tx.$executeRaw).not.toHaveBeenCalled();
});
test("does not enqueue when a lifecycle date is cleared", async () => {
const tx = createMockTx();
await enqueueSurveyLifecycleJobs({
tx,
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt: null,
},
previousSurvey: {
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
});
expect(tx.$executeRaw).not.toHaveBeenCalled();
});
test("logs and rethrows SQL errors", async () => {
const tx = createMockTx();
const queryError = new Error("insert failed");
vi.mocked(tx.$executeRaw).mockRejectedValueOnce(queryError);
await expect(
enqueueSurveyLifecycleJobs({
tx,
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
})
).rejects.toThrow(queryError);
expect(logger.error).toHaveBeenCalledWith(
{ error: queryError, surveyId: "survey_1" },
"Failed to enqueue survey lifecycle jobs"
);
});
});
describe("deleteSurveyLifecycleJobs", () => {
beforeEach(() => {
vi.mocked(logger.error).mockReset();
});
test("deletes pending lifecycle jobs for the survey", async () => {
const tx = createMockTx();
await deleteSurveyLifecycleJobs({
tx,
surveyId: "survey_1",
});
expect(tx.$executeRaw).toHaveBeenCalledTimes(1);
expect(getQueryValues(0, tx)).toEqual([
RIVER_SURVEY_START_KIND,
RIVER_SURVEY_END_KIND,
"survey_1",
...RIVER_PENDING_JOB_STATES,
]);
});
test("logs and rethrows delete failures", async () => {
const tx = createMockTx();
const queryError = new Error("delete failed");
vi.mocked(tx.$executeRaw).mockRejectedValueOnce(queryError);
await expect(
deleteSurveyLifecycleJobs({
tx,
surveyId: "survey_1",
})
).rejects.toThrow(queryError);
expect(logger.error).toHaveBeenCalledWith(
{ error: queryError, surveyId: "survey_1" },
"Failed to delete pending survey lifecycle jobs"
);
});
});
const canReachPostgres = async (databaseURL?: string): Promise<boolean> => {
if (!databaseURL) {
return false;
}
try {
const parsedURL = new URL(databaseURL);
const port = Number(parsedURL.port || "5432");
await new Promise<void>((resolve, reject) => {
const socket = new Socket();
socket.setTimeout(500);
socket.once("connect", () => {
socket.destroy();
resolve();
});
socket.once("timeout", () => {
socket.destroy();
reject(new Error("timeout"));
});
socket.once("error", (error) => {
socket.destroy();
reject(error);
});
socket.connect(port, parsedURL.hostname);
});
return true;
} catch {
return false;
}
};
const describeIfDatabase = (await canReachPostgres(process.env.DATABASE_URL)) ? describe : describe.skip;
describeIfDatabase("survey lifecycle integration", () => {
let integrationPrisma: PrismaClient;
let schema: string;
const quoteIdentifier = (identifier: string) => `"${identifier}"`;
beforeAll(() => {
return vi
.importActual<typeof import("@prisma/client")>("@prisma/client")
.then(({ PrismaClient: ActualPrismaClient }) => {
integrationPrisma = new ActualPrismaClient({
datasources: {
db: {
url: process.env.DATABASE_URL,
},
},
});
});
});
beforeEach(async () => {
schema = `river_test_${randomUUID().replace(/-/g, "_")}`;
await integrationPrisma.$executeRaw(Prisma.raw(`CREATE SCHEMA ${quoteIdentifier(schema)}`));
await integrationPrisma.$executeRaw(
Prisma.raw(`
CREATE TYPE ${quoteIdentifier(schema)}.${quoteIdentifier("river_job_state")} AS ENUM (
'available',
'scheduled',
'retryable',
'completed'
)
`)
);
await integrationPrisma.$executeRaw(
Prisma.raw(`
CREATE TABLE ${quoteIdentifier(schema)}.${quoteIdentifier("river_job")} (
id BIGSERIAL PRIMARY KEY,
state ${quoteIdentifier(schema)}.${quoteIdentifier("river_job_state")} NOT NULL DEFAULT 'available',
args JSONB NOT NULL,
kind TEXT NOT NULL,
max_attempts SMALLINT NOT NULL,
queue TEXT NOT NULL,
scheduled_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
`)
);
});
afterEach(async () => {
await integrationPrisma.$executeRaw(
Prisma.raw(`DROP SCHEMA IF EXISTS ${quoteIdentifier(schema)} CASCADE`)
);
});
afterAll(async () => {
await integrationPrisma.$disconnect();
});
test("persists scheduled and immediate lifecycle jobs with the expected payload", async () => {
const surveyId = "survey_1";
const environmentId = "env_1";
const startsAt = new Date("2026-05-01T12:00:00.000Z");
const endsAt = new Date("2026-03-01T12:00:00.000Z");
const beforeInsert = new Date();
await integrationPrisma.$transaction(async (tx) => {
await enqueueSurveyLifecycleJobs({
tx,
schema,
now: new Date("2026-04-01T12:00:00.000Z"),
survey: {
id: surveyId,
environmentId,
startsAt,
endsAt: null,
},
});
await enqueueSurveyLifecycleJobs({
tx,
schema,
now: new Date("2026-04-01T12:00:00.000Z"),
survey: {
id: surveyId,
environmentId,
startsAt,
endsAt,
},
previousSurvey: {
startsAt,
endsAt: null,
},
});
});
const afterInsert = new Date();
const jobs = await integrationPrisma.$queryRaw<
Array<{
kind: string;
queue: string;
args: Record<string, string>;
scheduled_at: Date;
max_attempts: number;
}>
>(
Prisma.raw(
`SELECT kind, queue, args, scheduled_at, max_attempts
FROM ${quoteIdentifier(schema)}.${quoteIdentifier("river_job")}
ORDER BY kind ASC`
)
);
expect(jobs).toHaveLength(2);
expect(jobs).toEqual(
expect.arrayContaining([
expect.objectContaining({
kind: RIVER_SURVEY_END_KIND,
queue: RIVER_SURVEY_LIFECYCLE_QUEUE,
args: {
surveyId,
environmentId,
scheduledFor: endsAt.toISOString(),
},
max_attempts: RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
}),
expect.objectContaining({
kind: RIVER_SURVEY_START_KIND,
queue: RIVER_SURVEY_LIFECYCLE_QUEUE,
args: {
surveyId,
environmentId,
scheduledFor: startsAt.toISOString(),
},
scheduled_at: startsAt,
max_attempts: RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
}),
])
);
const immediateJob = jobs.find((job) => job.kind === RIVER_SURVEY_END_KIND);
expect(immediateJob?.scheduled_at.getTime()).toBeGreaterThanOrEqual(beforeInsert.getTime() - 1000);
expect(immediateJob?.scheduled_at.getTime()).toBeLessThanOrEqual(afterInsert.getTime() + 1000);
});
test("removes only pending lifecycle jobs for the target survey", async () => {
const surveyId = "survey_1";
await integrationPrisma.$executeRaw(
Prisma.raw(`
INSERT INTO ${quoteIdentifier(schema)}.${quoteIdentifier("river_job")}
(state, args, kind, max_attempts, queue)
VALUES
('available', '{"surveyId":"survey_1","environmentId":"env_1","scheduledFor":"2026-04-01T12:00:00.000Z"}', '${RIVER_SURVEY_START_KIND}', 3, '${RIVER_SURVEY_LIFECYCLE_QUEUE}'),
('completed', '{"surveyId":"survey_1","environmentId":"env_1","scheduledFor":"2026-04-02T12:00:00.000Z"}', '${RIVER_SURVEY_END_KIND}', 3, '${RIVER_SURVEY_LIFECYCLE_QUEUE}'),
('retryable', '{"surveyId":"survey_2","environmentId":"env_1","scheduledFor":"2026-04-03T12:00:00.000Z"}', '${RIVER_SURVEY_START_KIND}', 3, '${RIVER_SURVEY_LIFECYCLE_QUEUE}')
`)
);
await integrationPrisma.$transaction(async (tx) => {
await deleteSurveyLifecycleJobs({
tx,
surveyId,
schema,
});
});
const remainingJobs = await integrationPrisma.$queryRaw<
Array<{ state: string; kind: string; args: { surveyId: string } }>
>(
Prisma.raw(
`SELECT state, kind, args
FROM ${quoteIdentifier(schema)}.${quoteIdentifier("river_job")}
ORDER BY state ASC, kind ASC`
)
);
expect(remainingJobs).toEqual([
{
state: "completed",
kind: RIVER_SURVEY_END_KIND,
args: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: "2026-04-02T12:00:00.000Z",
},
},
{
state: "retryable",
kind: RIVER_SURVEY_START_KIND,
args: {
surveyId: "survey_2",
environmentId: "env_1",
scheduledFor: "2026-04-03T12:00:00.000Z",
},
},
]);
});
});

View File

@@ -0,0 +1,181 @@
import "server-only";
import { Prisma } from "@prisma/client";
import { logger } from "@formbricks/logger";
import { TSurvey } from "@formbricks/types/surveys/types";
import {
RIVER_INSERT_NOTIFICATION_CHANNEL,
RIVER_PENDING_JOB_STATES,
RIVER_SCHEMA,
RIVER_SURVEY_END_KIND,
RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS,
RIVER_SURVEY_LIFECYCLE_QUEUE,
RIVER_SURVEY_START_KIND,
} from "./constants";
export type SurveyLifecycleJobKind = typeof RIVER_SURVEY_START_KIND | typeof RIVER_SURVEY_END_KIND;
export interface SurveyLifecycleJobArgs {
surveyId: string;
environmentId: string;
scheduledFor: string;
}
export interface SurveyLifecycleSurvey {
id: TSurvey["id"];
environmentId: TSurvey["environmentId"];
startsAt?: TSurvey["startsAt"];
endsAt?: TSurvey["endsAt"];
}
interface EnqueueSurveyLifecycleJobsOptions {
tx: Prisma.TransactionClient;
survey: SurveyLifecycleSurvey;
previousSurvey?: Pick<SurveyLifecycleSurvey, "startsAt" | "endsAt"> | null;
now?: Date;
schema?: string;
}
interface DeleteSurveyLifecycleJobsOptions {
tx: Prisma.TransactionClient;
surveyId: string;
schema?: string;
kinds?: SurveyLifecycleJobKind[];
}
const identifierPattern = /^[A-Za-z_][A-Za-z0-9_]*$/;
const quoteIdentifier = (identifier: string): string => {
if (!identifierPattern.test(identifier)) {
throw new Error(`Invalid SQL identifier: ${identifier}`);
}
return `"${identifier}"`;
};
const getQualifiedRiverJobTable = (schema: string): Prisma.Sql =>
Prisma.raw(`${quoteIdentifier(schema)}.${quoteIdentifier("river_job")}`);
const getQualifiedInsertNotificationChannel = (schema: string): string => {
if (!identifierPattern.test(schema)) {
throw new Error(`Invalid SQL identifier: ${schema}`);
}
return `${schema}.${RIVER_INSERT_NOTIFICATION_CHANNEL}`;
};
const shouldEnqueueTransition = (previousValue?: Date | null, nextValue?: Date | null): nextValue is Date =>
previousValue == null && nextValue != null;
const buildJobArgs = (survey: SurveyLifecycleSurvey, scheduledFor: Date): SurveyLifecycleJobArgs => ({
surveyId: survey.id,
environmentId: survey.environmentId,
scheduledFor: scheduledFor.toISOString(),
});
const enqueueLifecycleJob = async (
tx: Prisma.TransactionClient,
{
kind,
survey,
scheduledFor,
schema,
now,
}: {
kind: SurveyLifecycleJobKind;
survey: SurveyLifecycleSurvey;
scheduledFor: Date;
schema: string;
now: Date;
}
): Promise<void> => {
const args = JSON.stringify(buildJobArgs(survey, scheduledFor));
const riverJobTable = getQualifiedRiverJobTable(schema);
if (scheduledFor.getTime() > now.getTime()) {
await tx.$executeRaw(
Prisma.sql`
INSERT INTO ${riverJobTable} (args, kind, max_attempts, queue, scheduled_at)
VALUES (
${args}::jsonb,
${kind},
${RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS},
${RIVER_SURVEY_LIFECYCLE_QUEUE},
${scheduledFor}
)
`
);
} else {
await tx.$executeRaw(
Prisma.sql`
INSERT INTO ${riverJobTable} (args, kind, max_attempts, queue)
VALUES (
${args}::jsonb,
${kind},
${RIVER_SURVEY_LIFECYCLE_MAX_ATTEMPTS},
${RIVER_SURVEY_LIFECYCLE_QUEUE}
)
`
);
}
};
const notifyLifecycleQueue = async (tx: Prisma.TransactionClient, schema: string): Promise<void> => {
const payload = JSON.stringify({ queue: RIVER_SURVEY_LIFECYCLE_QUEUE });
await tx.$executeRaw(
Prisma.sql`SELECT pg_notify(${getQualifiedInsertNotificationChannel(schema)}, ${payload})`
);
};
export const enqueueSurveyLifecycleJobs = async ({
tx,
survey,
previousSurvey,
now = new Date(),
schema = RIVER_SCHEMA,
}: EnqueueSurveyLifecycleJobsOptions): Promise<void> => {
const pendingJobs: Array<{ kind: SurveyLifecycleJobKind; scheduledFor: Date }> = [];
if (shouldEnqueueTransition(previousSurvey?.startsAt ?? null, survey.startsAt ?? null)) {
pendingJobs.push({ kind: RIVER_SURVEY_START_KIND, scheduledFor: survey.startsAt as Date });
}
if (shouldEnqueueTransition(previousSurvey?.endsAt ?? null, survey.endsAt ?? null)) {
pendingJobs.push({ kind: RIVER_SURVEY_END_KIND, scheduledFor: survey.endsAt as Date });
}
if (pendingJobs.length === 0) {
return;
}
try {
for (const job of pendingJobs) {
await enqueueLifecycleJob(tx, { ...job, survey, schema, now });
}
await notifyLifecycleQueue(tx, schema);
} catch (error) {
logger.error({ error, surveyId: survey.id }, "Failed to enqueue survey lifecycle jobs");
throw error;
}
};
export const deleteSurveyLifecycleJobs = async ({
tx,
surveyId,
schema = RIVER_SCHEMA,
kinds = [RIVER_SURVEY_START_KIND, RIVER_SURVEY_END_KIND],
}: DeleteSurveyLifecycleJobsOptions): Promise<void> => {
try {
await tx.$executeRaw(
Prisma.sql`
DELETE FROM ${getQualifiedRiverJobTable(schema)}
WHERE kind IN (${Prisma.join(kinds)})
AND args->>'surveyId' = ${surveyId}
AND state IN (${Prisma.join(RIVER_PENDING_JOB_STATES)})
`
);
} catch (error) {
logger.error({ error, surveyId }, "Failed to delete pending survey lifecycle jobs");
throw error;
}
};

View File

@@ -4,7 +4,6 @@ import { TIntegration, TIntegrationItem } from "@formbricks/types/integration";
import { TIntegrationSlack, TIntegrationSlackCredential } from "@formbricks/types/integration/slack";
import { SLACK_MESSAGE_LIMIT } from "../constants";
import { deleteIntegration, getIntegrationByType } from "../integration/service";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { truncateText } from "../utils/strings";
export const fetchChannels = async (slackIntegration: TIntegration): Promise<TIntegrationItem[]> => {
@@ -59,8 +58,7 @@ export const fetchChannels = async (slackIntegration: TIntegration): Promise<TIn
export const getSlackChannels = async (environmentId: string): Promise<TIntegrationItem[]> => {
let channels: TIntegrationItem[] = [];
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const slackIntegration = (await getIntegrationByType(projectId, "slack")) as TIntegrationSlack;
const slackIntegration = (await getIntegrationByType(environmentId, "slack")) as TIntegrationSlack;
if (slackIntegration && slackIntegration.config?.key) {
channels = await fetchChannels(slackIntegration);
}

View File

@@ -19,7 +19,6 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -42,7 +41,6 @@ const commonMockProperties = {
createdAt: currentDate,
updatedAt: currentDate,
environmentId: mockId,
projectId: null,
};
type SurveyMock = Prisma.SurveyGetPayload<{
@@ -195,6 +193,8 @@ const mockWelcomeCard: TSurveyWelcomeCard = {
const baseSurveyProperties = {
id: mockId,
name: "Mock Survey",
startsAt: null,
endsAt: null,
autoClose: 10,
delay: 0,
autoComplete: 7,

View File

@@ -1,11 +1,17 @@
import { prisma } from "@/lib/__mocks__/database";
import { prisma } from "../__mocks__/database";
import { ActionClass, Prisma, Survey } from "@prisma/client";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { testInputValidation } from "vitestSetup";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TSurveyFollowUp } from "@formbricks/database/types/survey-follow-up";
import { logger } from "@formbricks/logger";
import { TActionClass } from "@formbricks/types/action-classes";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import {
DatabaseError,
InvalidInputError,
ResourceNotFoundError,
ValidationError,
} from "@formbricks/types/errors";
import { TSegment } from "@formbricks/types/segment";
import { TSurvey, TSurveyCreateInput, TSurveyQuestionTypeEnum } from "@formbricks/types/surveys/types";
import { getActionClasses } from "@/lib/actionClass/service";
@@ -13,6 +19,7 @@ import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { enqueueSurveyLifecycleJobs } from "@/lib/river/survey-lifecycle";
import { evaluateLogic } from "@/lib/surveyLogic/utils";
import {
mockActionClass,
@@ -36,6 +43,8 @@ import {
updateSurveyInternal,
} from "./service";
vi.mock("server-only", () => ({}));
// Mock organization service
vi.mock("@/lib/organization/service", () => ({
getOrganizationByEnvironmentId: vi.fn().mockResolvedValue({
@@ -49,8 +58,21 @@ vi.mock("@/lib/actionClass/service", () => ({
getActionClasses: vi.fn(),
}));
vi.mock("@/lib/river/survey-lifecycle", () => ({
enqueueSurveyLifecycleJobs: vi.fn(),
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
beforeEach(() => {
prisma.survey.count.mockResolvedValue(1);
prisma.$transaction.mockImplementation(async (callback: any) => callback(prisma));
vi.mocked(enqueueSurveyLifecycleJobs).mockReset();
vi.mocked(logger.error).mockReset();
});
describe("evaluateLogic with mockSurveyWithLogic", () => {
@@ -309,6 +331,35 @@ describe("Tests for updateSurvey", () => {
expect(updatedSurvey).toEqual(mockTransformedSurveyOutput);
});
test("enqueues lifecycle jobs when a date is assigned for the first time", async () => {
const startsAt = new Date("2026-04-01T12:00:00.000Z");
const currentSurvey = { ...mockSurveyOutput, startsAt: null, endsAt: null };
const persistedSurvey = { ...mockSurveyOutput, startsAt, endsAt: null };
prisma.survey.findUnique.mockResolvedValueOnce(currentSurvey);
prisma.survey.update.mockResolvedValueOnce(persistedSurvey);
await updateSurvey({
...updateSurveyInput,
startsAt,
endsAt: null,
});
expect(enqueueSurveyLifecycleJobs).toHaveBeenCalledWith({
tx: prisma,
survey: {
id: persistedSurvey.id,
environmentId: persistedSurvey.environmentId,
startsAt,
endsAt: null,
},
previousSurvey: {
startsAt: null,
endsAt: null,
},
});
});
// Note: Language handling tests (for languages.length > 0 fix) are covered in
// apps/web/modules/survey/editor/lib/survey.test.ts where we have better control
// over the test mocks. The key fix ensures languages.length > 0 (not > 1) is used.
@@ -341,6 +392,31 @@ describe("Tests for updateSurvey", () => {
prisma.survey.update.mockRejectedValue(new Error(mockErrorMessage));
await expect(updateSurvey(updateSurveyInput)).rejects.toThrow(Error);
});
test("logs and rethrows lifecycle enqueue failures", async () => {
const enqueueError = new Error("enqueue failed");
prisma.survey.findUnique.mockResolvedValueOnce({
...mockSurveyOutput,
startsAt: null,
endsAt: null,
});
prisma.survey.update.mockResolvedValueOnce({
...mockSurveyOutput,
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
});
vi.mocked(enqueueSurveyLifecycleJobs).mockRejectedValueOnce(enqueueError);
await expect(
updateSurvey({
...updateSurveyInput,
startsAt: new Date("2026-04-01T12:00:00.000Z"),
})
).rejects.toThrow(enqueueError);
expect(logger.error).toHaveBeenCalledWith(enqueueError, "Error updating survey");
});
});
});
@@ -647,6 +723,36 @@ describe("Tests for createSurvey", () => {
expect(subscribeOrganizationMembersToSurveyResponses).toHaveBeenCalled();
});
test("passes start and end dates to the lifecycle scheduler", async () => {
const startsAt = new Date("2026-04-02T08:00:00.000Z");
const endsAt = new Date("2026-04-03T08:00:00.000Z");
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValueOnce(mockOrganizationOutput);
prisma.survey.create.mockResolvedValueOnce({
...mockSurveyOutput,
startsAt,
endsAt,
type: "link",
});
await createSurvey(mockEnvironmentId, {
...mockCreateSurveyInput,
type: "link",
startsAt,
endsAt,
});
expect(enqueueSurveyLifecycleJobs).toHaveBeenCalledWith({
tx: prisma,
survey: {
id: mockSurveyOutput.id,
environmentId: mockSurveyOutput.environmentId,
startsAt,
endsAt,
},
});
});
test("creates a private segment for app surveys", async () => {
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValueOnce(mockOrganizationOutput);
prisma.survey.create.mockResolvedValueOnce({
@@ -663,6 +769,10 @@ describe("Tests for createSurvey", () => {
createdAt: new Date(),
updatedAt: new Date(),
} as unknown as TSegment);
prisma.survey.update.mockResolvedValueOnce({
...mockSurveyOutput,
type: "app",
});
await createSurvey(mockEnvironmentId, {
...mockCreateSurveyInput,
@@ -725,6 +835,20 @@ describe("Tests for createSurvey", () => {
describe("Sad Path", () => {
testInputValidation(createSurvey, "123#", mockCreateSurveyInput);
test("rejects surveys whose start date is not before the end date", async () => {
const startsAt = new Date("2026-04-03T08:00:00.000Z");
const endsAt = new Date("2026-04-02T08:00:00.000Z");
await expect(
createSurvey(mockEnvironmentId, {
...mockCreateSurveyInput,
type: "link",
startsAt,
endsAt,
})
).rejects.toThrow(ValidationError);
});
test("throws ResourceNotFoundError if organization not found", async () => {
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValueOnce(null);
await expect(createSurvey(mockEnvironmentId, mockCreateSurveyInput)).rejects.toThrow(

View File

@@ -11,10 +11,10 @@ import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { enqueueSurveyLifecycleJobs } from "@/lib/river/survey-lifecycle";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "../actionClass/service";
import { ITEMS_PER_PAGE } from "../constants";
import { getProjectIdFromEnvironmentId } from "../utils/helper";
import { validateInputs } from "../utils/validate";
import {
checkForInvalidImagesInQuestions,
@@ -31,9 +31,10 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
startsAt: true,
endsAt: true,
welcomeCard: true,
questions: true,
blocks: true,
@@ -86,7 +87,6 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -246,13 +246,13 @@ export const getSurveysByActionClassId = reactCache(
);
export const getSurveys = reactCache(
async (projectId: string, limit?: number, offset?: number): Promise<TSurvey[]> => {
validateInputs([projectId, ZId], [limit, ZOptionalNumber], [offset, ZOptionalNumber]);
async (environmentId: string, limit?: number, offset?: number): Promise<TSurvey[]> => {
validateInputs([environmentId, ZId], [limit, ZOptionalNumber], [offset, ZOptionalNumber]);
try {
const surveysPrisma = await prisma.survey.findMany({
where: {
projectId,
environmentId,
},
select: selectSurvey,
orderBy: {
@@ -273,12 +273,12 @@ export const getSurveys = reactCache(
}
);
export const getSurveyCount = reactCache(async (projectId: string): Promise<number> => {
validateInputs([projectId, ZId]);
export const getSurveyCount = reactCache(async (environmentId: string): Promise<number> => {
validateInputs([environmentId, ZId]);
try {
const surveyCount = await prisma.survey.count({
where: {
projectId,
environmentId: environmentId,
},
});
@@ -303,8 +303,6 @@ export const updateSurveyInternal = async (
try {
const surveyId = updatedSurvey.id;
let data: any = {};
const actionClasses = await getActionClasses(updatedSurvey.environmentId);
const currentSurvey = await getSurvey(surveyId);
@@ -327,132 +325,139 @@ export const updateSurveyInternal = async (
}
}
if (languages) {
// Process languages update logic here
// Extract currentLanguageIds and updatedLanguageIds
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((l) => l.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((l) => l.language.id) : [];
const enabledLanguageIds = languages.map((language) => {
if (language.enabled) return language.language.id;
});
// Determine languages to add and remove
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((l) => l.default)?.language.id;
// Prepare data for Prisma update
data.languages = {};
// Update existing languages for default value changes
data.languages.updateMany = currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
}));
// Add new languages
if (languagesToAdd.length > 0) {
data.languages.create = languagesToAdd.map((languageId) => ({
languageId: languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
// Remove languages no longer associated with the survey
if (languagesToRemove.length > 0) {
data.languages.deleteMany = languagesToRemove.map((languageId) => ({
languageId: languageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
const prismaSurvey = await prisma.$transaction(async (tx) => {
let data: any = {};
// if the survey body has type other than "app" but has a private segment, we delete that segment, and if it has a public segment, we disconnect from to the survey
if (segment) {
if (type === "app") {
// parse the segment filters:
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!skipValidation && !parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
if (languages) {
// Process languages update logic here
// Extract currentLanguageIds and updatedLanguageIds
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((l) => l.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((l) => l.language.id) : [];
const enabledLanguageIds = languages.map((language) => {
if (language.enabled) return language.language.id;
});
// Determine languages to add and remove
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((l) => l.default)?.language.id;
// Prepare data for Prisma update
data.languages = {};
// Update existing languages for default value changes
data.languages.updateMany = currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
}));
// Add new languages
if (languagesToAdd.length > 0) {
data.languages.create = languagesToAdd.map((languageId) => ({
languageId: languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
try {
// update the segment:
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
// Remove languages no longer associated with the survey
if (languagesToRemove.length > 0) {
data.languages.deleteMany = languagesToRemove.map((languageId) => ({
languageId: languageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
}
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((surveyId) => ({ id: surveyId })),
},
};
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
// if the survey body has type other than "app" but has a private segment, we delete that segment, and if it has a public segment, we disconnect from to the survey
if (segment) {
if (type === "app") {
// parse the segment filters:
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!skipValidation && !parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
}
await prisma.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else {
if (segment.isPrivate) {
// disconnect the private segment first and then delete:
await prisma.segment.update({
where: { id: segment.id },
data: {
surveys: {
disconnect: {
id: surveyId,
try {
// update the segment:
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((segmentSurveyId) => ({ id: segmentSurveyId })),
},
};
}
await tx.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else {
if (segment.isPrivate) {
// disconnect the private segment first and then delete:
await tx.segment.update({
where: { id: segment.id },
data: {
surveys: {
disconnect: {
id: surveyId,
},
},
},
},
});
});
// delete the private segment:
await prisma.segment.delete({
where: {
id: segment.id,
},
});
} else {
await prisma.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
disconnect: true,
// delete the private segment:
await tx.segment.delete({
where: {
id: segment.id,
},
},
});
});
} else {
await tx.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
disconnect: true,
},
},
});
}
}
}
} else if (type === "app") {
if (!currentSurvey.segment) {
await prisma.survey.update({
} else if (type === "app" && !currentSurvey.segment) {
await tx.survey.update({
where: {
id: surveyId,
},
@@ -474,113 +479,104 @@ export const updateSurveyInternal = async (
id: environmentId,
},
},
project: {
connect: {
id: currentSurvey.projectId!,
},
},
},
},
},
},
});
}
}
if (followUps) {
// Separate follow-ups into categories based on deletion flag
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
if (followUps) {
// Separate follow-ups into categories based on deletion flag
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
// Get set of existing follow-up IDs from currentSurvey
const existingFollowUpIds = new Set(currentSurvey.followUps.map((f) => f.id));
// Get set of existing follow-up IDs from currentSurvey
const existingFollowUpIds = new Set(currentSurvey.followUps.map((f) => f.id));
// Separate non-deleted follow-ups into new and existing
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
// Separate non-deleted follow-ups into new and existing
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
data.followUps = {
// Update existing follow-ups
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
// Create new follow-ups
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
data.followUps = {
// Update existing follow-ups
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
// Create new follow-ups
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
// Delete follow-ups marked as deleted, regardless of whether they exist in DB
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
// Delete follow-ups marked as deleted, regardless of whether they exist in DB
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
}))
: undefined,
};
}
}))
: undefined,
};
}
data.questions = questions.map((question) => {
const { isDraft, ...rest } = question;
return rest;
data.questions = questions.map((question) => {
const { isDraft, ...rest } = question;
return rest;
});
// Strip isDraft from elements before saving
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = stripIsDraftFromBlocks(updatedSurvey.blocks);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
const prismaSurvey = await tx.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
});
await enqueueSurveyLifecycleJobs({
tx,
survey: {
id: prismaSurvey.id,
environmentId: prismaSurvey.environmentId,
startsAt: prismaSurvey.startsAt,
endsAt: prismaSurvey.endsAt,
},
previousSurvey: {
startsAt: currentSurvey.startsAt ?? null,
endsAt: currentSurvey.endsAt ?? null,
},
});
return prismaSurvey;
});
// Strip isDraft from elements before saving
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = stripIsDraftFromBlocks(updatedSurvey.blocks);
}
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
const prismaSurvey = await prisma.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
});
let surveySegment: TSegment | null = null;
if (prismaSurvey.segment) {
surveySegment = {
...prismaSurvey.segment,
surveys: prismaSurvey.segment.surveys.map((survey) => survey.id),
};
}
const modifiedSurvey: TSurvey = {
...prismaSurvey, // Properties from prismaSurvey
displayPercentage: Number(prismaSurvey.displayPercentage) || null,
segment: surveySegment,
customHeadScriptsMode: prismaSurvey.customHeadScriptsMode,
};
return modifiedSurvey;
return transformPrismaSurvey<TSurvey>(prismaSurvey);
} catch (error) {
logger.error(error, "Error updating survey");
if (error instanceof Prisma.PrismaClientKnownRequestError) {
@@ -632,10 +628,7 @@ export const createSurvey = async (
};
}
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(parsedEnvironmentId),
getProjectIdFromEnvironmentId(parsedEnvironmentId),
]);
const organization = await getOrganizationByEnvironmentId(parsedEnvironmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
@@ -662,66 +655,69 @@ export const createSurvey = async (
data.blocks = validateMediaAndPrepareBlocks(data.blocks);
}
const survey = await prisma.survey.create({
data: {
...data,
environment: {
connect: {
id: parsedEnvironmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
// if the survey created is an "app" survey, we also create a private segment for it.
if (survey.type === "app") {
const newSegment = await prisma.segment.create({
const survey = await prisma.$transaction(async (tx) => {
const createdSurvey = await tx.survey.create({
data: {
title: survey.id,
filters: [],
isPrivate: true,
environmentId: parsedEnvironmentId,
projectId,
},
});
await prisma.survey.update({
where: {
id: survey.id,
},
data: {
segment: {
...data,
environment: {
connect: {
id: newSegment.id,
id: parsedEnvironmentId,
},
},
},
select: selectSurvey,
});
}
// TODO: Fix this, this happens because the survey type "web" is no longer in the zod types but its required in the schema for migration
// @ts-expect-error
const transformedSurvey: TSurvey = {
...survey,
...(survey.segment && {
segment: {
...survey.segment,
surveys: survey.segment.surveys.map((survey) => survey.id),
let createdOrUpdatedSurvey = createdSurvey;
// if the survey created is an "app" survey, we also create a private segment for it.
if (createdSurvey.type === "app") {
const newSegment = await tx.segment.create({
data: {
title: createdSurvey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: parsedEnvironmentId,
},
},
},
});
createdOrUpdatedSurvey = await tx.survey.update({
where: {
id: createdSurvey.id,
},
data: {
segment: {
connect: {
id: newSegment.id,
},
},
},
select: selectSurvey,
});
}
await enqueueSurveyLifecycleJobs({
tx,
survey: {
id: createdOrUpdatedSurvey.id,
environmentId: createdOrUpdatedSurvey.environmentId,
startsAt: createdOrUpdatedSurvey.startsAt,
endsAt: createdOrUpdatedSurvey.endsAt,
},
}),
};
});
return createdOrUpdatedSurvey;
});
if (createdBy) {
await subscribeOrganizationMembersToSurveyResponses(survey.id, createdBy, organization.id);
}
return transformedSurvey;
return transformPrismaSurvey<TSurvey>(survey);
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
logger.error(error, "Error creating survey");

View File

@@ -4,7 +4,7 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TTag } from "@formbricks/types/tags";
import { TagError } from "@/modules/projects/settings/types/tag";
import { createTag, getTag, getTagsByProjectId } from "./service";
import { createTag, getTag, getTagsByEnvironmentId } from "./service";
vi.mock("@formbricks/database", () => ({
prisma: {
@@ -21,8 +21,8 @@ describe("Tag Service", () => {
vi.clearAllMocks();
});
describe("getTagsByProjectId", () => {
test("should return tags for a given project ID", async () => {
describe("getTagsByEnvironmentId", () => {
test("should return tags for a given environment ID", async () => {
const mockTags: TTag[] = [
{
id: "tag1",
@@ -35,11 +35,11 @@ describe("Tag Service", () => {
vi.mocked(prisma.tag.findMany).mockResolvedValue(mockTags);
const result = await getTagsByProjectId("env1");
const result = await getTagsByEnvironmentId("env1");
expect(result).toEqual(mockTags);
expect(prisma.tag.findMany).toHaveBeenCalledWith({
where: {
projectId: "env1",
environmentId: "env1",
},
take: undefined,
skip: undefined,
@@ -59,11 +59,11 @@ describe("Tag Service", () => {
vi.mocked(prisma.tag.findMany).mockResolvedValue(mockTags);
const result = await getTagsByProjectId("env1", 1);
const result = await getTagsByEnvironmentId("env1", 1);
expect(result).toEqual(mockTags);
expect(prisma.tag.findMany).toHaveBeenCalledWith({
where: {
projectId: "env1",
environmentId: "env1",
},
take: 30,
skip: 0,

View File

@@ -6,28 +6,29 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { ZId, ZOptionalNumber, ZString } from "@formbricks/types/common";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { TTag } from "@formbricks/types/tags";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TagError } from "@/modules/projects/settings/types/tag";
import { ITEMS_PER_PAGE } from "../constants";
import { validateInputs } from "../utils/validate";
export const getTagsByProjectId = reactCache(async (projectId: string, page?: number): Promise<TTag[]> => {
validateInputs([projectId, ZId], [page, ZOptionalNumber]);
export const getTagsByEnvironmentId = reactCache(
async (environmentId: string, page?: number): Promise<TTag[]> => {
validateInputs([environmentId, ZId], [page, ZOptionalNumber]);
try {
const tags = await prisma.tag.findMany({
where: {
projectId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
});
try {
const tags = await prisma.tag.findMany({
where: {
environmentId,
},
take: page ? ITEMS_PER_PAGE : undefined,
skip: page ? ITEMS_PER_PAGE * (page - 1) : undefined,
});
return tags;
} catch (error) {
throw error;
return tags;
} catch (error) {
throw error;
}
}
});
);
export const getTag = reactCache(async (id: string): Promise<TTag | null> => {
validateInputs([id, ZId]);
@@ -51,14 +52,11 @@ export const createTag = async (
): Promise<Result<TTag, { code: TagError; message: string; meta?: Record<string, string> }>> => {
validateInputs([environmentId, ZId], [name, ZString]);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
const tag = await prisma.tag.create({
data: {
name,
environmentId,
projectId,
},
});

View File

@@ -3,7 +3,6 @@ import { cache as reactCache } from "react";
import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { getContactAttributeKeysQuery } from "@/modules/api/v2/management/contact-attribute-keys/lib/utils";
import {
@@ -46,14 +45,6 @@ export const createContactAttributeKey = async (
): Promise<Result<ContactAttributeKey, ApiErrorResponseV2>> => {
const { environmentId, name, description, key, dataType } = contactAttributeKey;
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
try {
const prismaData: Prisma.ContactAttributeKeyCreateInput = {
environment: {
@@ -61,11 +52,6 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name: name ?? formatSnakeCaseToTitleCase(key),
description,
key,

View File

@@ -58,7 +58,6 @@ export const getResponseForPipeline = async (
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -184,7 +184,6 @@ describe("Response Lib", () => {
updatedAt: true,
name: true,
environmentId: true,
projectId: true,
},
},
},

View File

@@ -31,6 +31,8 @@ export const ZSurveyInput = ZSurveyWithoutQuestionType.pick({
environmentId: true,
questions: true,
blocks: true,
startsAt: true,
endsAt: true,
endings: true,
hiddenFields: true,
variables: true,
@@ -59,6 +61,8 @@ export const ZSurveyInput = ZSurveyWithoutQuestionType.pick({
displayLimit: true,
autoClose: true,
autoComplete: true,
startsAt: true,
endsAt: true,
surveyClosedMessage: true,
styling: true,
projectOverwrites: true,

View File

@@ -17,7 +17,6 @@ export const ZWebhookUpdateSchema = ZWebhook.omit({
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
secret: true,
}).meta({
id: "webhookUpdate",

View File

@@ -3,7 +3,6 @@ import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { InvalidInputError } from "@formbricks/types/errors";
import { generateWebhookSecret } from "@/lib/crypto";
import { getEnvironment } from "@/lib/environment/service";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getWebhooksQuery } from "@/modules/api/v2/management/webhooks/lib/utils";
import { TGetWebhooksFilter, TWebhookInput } from "@/modules/api/v2/management/webhooks/types/webhooks";
@@ -69,14 +68,6 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not_found" }],
});
}
try {
const secret = generateWebhookSecret();
@@ -86,11 +77,6 @@ export const createWebhook = async (webhook: TWebhookInput): Promise<Result<Webh
id: environmentId,
},
},
project: {
connect: {
id: environment.projectId,
},
},
name,
url,
source,

View File

@@ -21,21 +21,16 @@ interface ActivitySectionProps {
}
export const ActivitySection = async ({ environment, contactId, environmentTags }: ActivitySectionProps) => {
const [responses, displays, project] = await Promise.all([
const [responses, displays] = await Promise.all([
getResponsesByContactId(contactId),
getDisplaysByContactId(contactId),
getProjectByEnvironmentId(environment.id),
]);
if (!project) {
throw new ResourceNotFoundError("Project", null);
}
const allSurveyIds = [
...new Set([...(responses?.map((r) => r.surveyId) || []), ...displays.map((d) => d.surveyId)]),
];
const surveys: TSurvey[] = allSurveyIds.length === 0 ? [] : ((await getSurveys(project.id)) ?? []);
const surveys: TSurvey[] = allSurveyIds.length === 0 ? [] : ((await getSurveys(environment.id)) ?? []);
const session = await getServerSession(authOptions);
const t = await getTranslate();
@@ -53,6 +48,11 @@ export const ActivitySection = async ({ environment, contactId, environmentTags
throw new Error(t("environments.contacts.no_responses_found"));
}
const project = await getProjectByEnvironmentId(environment.id);
if (!project) {
throw new ResourceNotFoundError(t("common.workspace"), null);
}
const projectPermission = await getProjectPermissionByUserId(session.user.id, project.id);
const locale = user.locale ?? DEFAULT_LOCALE;

View File

@@ -1,6 +1,5 @@
import { ResourceNotFoundError } from "@formbricks/types/errors";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTranslate } from "@/lingodotdev/server";
import { AttributesSection } from "@/modules/ee/contacts/[contactId]/components/attributes-section";
import { ContactControlBar } from "@/modules/ee/contacts/[contactId]/components/contact-control-bar";
@@ -23,15 +22,13 @@ export const SingleContactPage = async (props: {
const { environment, isReadOnly, organization } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [environmentTags, contact, publishedLinkSurveys, attributesWithKeyInfo, allAttributeKeys] =
await Promise.all([
getTagsByProjectId(projectId),
getTagsByEnvironmentId(params.environmentId),
getContact(params.contactId),
getPublishedLinkSurveys(projectId),
getPublishedLinkSurveys(params.environmentId),
getContactAttributesWithKeyInfo(params.contactId),
getContactAttributeKeys(projectId),
getContactAttributeKeys(params.environmentId),
]);
if (!contact) {

View File

@@ -30,8 +30,6 @@ const ZGetContactsAction = z.object({
export const getContactsAction = authenticatedActionClient
.inputSchema(ZGetContactsAction)
.action(async ({ ctx, parsedInput }) => {
const projectId = await getProjectIdFromEnvironmentId(parsedInput.environmentId);
await checkAuthorizationUpdated({
userId: ctx.user.id,
organizationId: await getOrganizationIdFromEnvironmentId(parsedInput.environmentId),
@@ -43,12 +41,12 @@ export const getContactsAction = authenticatedActionClient
{
type: "projectTeam",
minPermission: "read",
projectId,
projectId: await getProjectIdFromEnvironmentId(parsedInput.environmentId),
},
],
});
return getContacts(projectId, parsedInput.offset, parsedInput.searchValue);
return getContacts(parsedInput.environmentId, parsedInput.offset, parsedInput.searchValue);
});
const ZContactDeleteAction = z.object({

View File

@@ -15,7 +15,7 @@ const getEnvironment = async (environmentId: string) =>
async () => {
return prisma.environment.findUnique({
where: { id: environmentId },
select: { id: true, type: true, projectId: true },
select: { id: true, type: true },
});
},
createCacheKey.environment.config(environmentId),
@@ -63,15 +63,12 @@ const getContactWithFullData = async (environmentId: string, userId: string) =>
/**
* Creates contact with comprehensive data structure
*/
const createContact = async (environmentId: string, projectId: string, userId: string) => {
const createContact = async (environmentId: string, userId: string) => {
return prisma.contact.create({
data: {
environment: {
connect: { id: environmentId },
},
project: {
connect: { id: projectId },
},
attributes: {
create: [
{
@@ -167,7 +164,7 @@ export const updateUser = async (
// Create contact if doesn't exist
if (!contactData) {
contactData = await createContact(environmentId, environment.projectId, userId);
contactData = await createContact(environmentId, userId);
}
// Process contact attributes efficiently (single pass)

View File

@@ -5,7 +5,6 @@ import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, OperationNotAllowedError } from "@formbricks/types/errors";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
import { TContactAttributeKeyCreateInput } from "@/modules/ee/contacts/api/v1/management/contact-attribute-keys/[contactAttributeKeyId]/types/contact-attribute-keys";
@@ -30,8 +29,6 @@ export const createContactAttributeKey = async (
environmentId: string,
data: TContactAttributeKeyCreateInput
): Promise<TContactAttributeKey | null> => {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const contactAttributeKeysCount = await prisma.contactAttributeKey.count({
where: {
environmentId,
@@ -57,11 +54,6 @@ export const createContactAttributeKey = async (
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
});

View File

@@ -4,7 +4,6 @@ import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -407,7 +406,6 @@ const upsertAttributeKeysInBatches = async (
tx: Prisma.TransactionClient,
keysToUpsert: Map<string, { key: string; name: string; dataType: TContactAttributeDataType }>,
environmentId: string,
projectId: string,
attributeKeyMap: Record<string, string>
): Promise<void> => {
const keysArray = Array.from(keysToUpsert.values());
@@ -416,18 +414,17 @@ const upsertAttributeKeysInBatches = async (
const batch = keysArray.slice(i, i + BATCH_SIZE);
const upsertedKeys = await tx.$queryRaw<{ id: string; key: string }[]>`
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "projectId", "dataType", "created_at", "updated_at")
SELECT
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "dataType", "created_at", "updated_at")
SELECT
unnest(${Prisma.sql`ARRAY[${batch.map(() => createId())}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.key)}]`}),
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.name)}]`}),
${environmentId},
${projectId},
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.dataType)}]`}::text[]::"ContactAttributeDataType"[]),
NOW(),
NOW()
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
ON CONFLICT ("key", "environmentId")
DO UPDATE SET
"name" = EXCLUDED."name",
"updated_at" = NOW()
RETURNING "id", "key"
@@ -493,16 +490,6 @@ export const upsertBulkContacts = async (
>
> => {
const contactIdxWithConflictingUserIds: number[] = [];
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
const { projectId } = environment;
const { userIdsInContacts, attributeKeys } = extractContactMetadata(contacts);
const [existingUserIds, existingContactsByEmail, existingAttributeKeys] = await Promise.all([
@@ -637,11 +624,11 @@ export const upsertBulkContacts = async (
// Upsert attribute keys in batches
if (keysToUpsert.size > 0) {
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, projectId, attributeKeyMap);
await upsertAttributeKeysInBatches(tx, keysToUpsert, environmentId, attributeKeyMap);
}
// Create new contacts
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId, projectId }));
const newContacts = contactsToCreate.map(() => ({ id: createId(), environmentId }));
if (newContacts.length > 0) {
await tx.contact.createMany({ data: newContacts });

View File

@@ -1,6 +1,5 @@
import { prisma } from "@formbricks/database";
import { Result, err, ok } from "@formbricks/types/error-handlers";
import { getEnvironment } from "@/lib/environment/service";
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
import { readAttributeValue } from "@/modules/ee/contacts/lib/attribute-storage";
import { TContactCreateRequest, TContactResponse } from "@/modules/ee/contacts/types/contact";
@@ -19,14 +18,6 @@ export const createContact = async (
});
}
const environment = await getEnvironment(environmentId);
if (!environment) {
return err({
type: "not_found",
details: [{ field: "environment", issue: "not found" }],
});
}
// Extract userId if present
const userId = attributes.userId;
@@ -107,7 +98,6 @@ export const createContact = async (
const result = await prisma.contact.create({
data: {
environmentId,
projectId: environment.projectId,
attributes: {
createMany: {
data: attributeData,

View File

@@ -1,4 +1,3 @@
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getLocale } from "@/lingodotdev/language";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
@@ -16,10 +15,9 @@ export const AttributesPage = async ({
const params = await paramsProps;
const locale = await getLocale();
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [{ isReadOnly, organization }, contactAttributeKeys] = await Promise.all([
getEnvironmentAuth(params.environmentId),
getContactAttributeKeys(projectId),
getContactAttributeKeys(params.environmentId),
]);
const isContactsEnabled = await getIsContactsEnabled(organization.id);

View File

@@ -4,7 +4,6 @@ import { ZId, ZString } from "@formbricks/types/common";
import { TContactAttributesInput, ZContactAttributesInput } from "@formbricks/types/contact-attribute";
import { TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { MAX_ATTRIBUTE_CLASSES_PER_ENVIRONMENT } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareNewSDKAttributeForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -146,20 +145,14 @@ export const updateAttributes = async (
? null
: String(contactAttributesParam.userId);
// Fetch current attributes, contact attribute keys, environment, and email/userId checks in parallel
const [
currentAttributes,
contactAttributeKeys,
projectId,
existingEmailAttribute,
existingUserIdAttribute,
] = await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
getProjectIdFromEnvironmentId(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Fetch current attributes, contact attribute keys, and email/userId checks in parallel
const [currentAttributes, contactAttributeKeys, existingEmailAttribute, existingUserIdAttribute] =
await Promise.all([
getContactAttributes(contactId),
getContactAttributeKeys(environmentId),
emailValue ? hasEmailAttribute(emailValue, environmentId, contactId) : Promise.resolve(null),
userIdValue ? hasUserIdAttribute(userIdValue, environmentId, contactId) : Promise.resolve(null),
]);
// Process email and userId existence early
const emailExists = !!existingEmailAttribute;
@@ -367,7 +360,6 @@ export const updateAttributes = async (
type: "custom",
dataType,
environment: { connect: { id: environmentId } },
project: { connect: { id: projectId } },
attributes: {
create: {
contactId,

View File

@@ -3,13 +3,12 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TContactAttributeDataType, TContactAttributeKey } from "@formbricks/types/contact-attribute-key";
import { InvalidInputError, OperationNotAllowedError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase } from "@/lib/utils/safe-identifier";
export const getContactAttributeKeys = reactCache(
async (projectId: string): Promise<TContactAttributeKey[]> => {
async (environmentId: string): Promise<TContactAttributeKey[]> => {
return await prisma.contactAttributeKey.findMany({
where: { projectId },
where: { environmentId },
});
}
);
@@ -32,8 +31,6 @@ export const createContactAttributeKey = async (data: {
description?: string;
dataType?: TContactAttributeDataType;
}): Promise<TContactAttributeKey> => {
const projectId = await getProjectIdFromEnvironmentId(data.environmentId);
try {
const contactAttributeKey = await prisma.contactAttributeKey.create({
data: {
@@ -41,7 +38,6 @@ export const createContactAttributeKey = async (data: {
name: data.name ?? formatSnakeCaseToTitleCase(data.key),
description: data.description ?? null,
environmentId: data.environmentId,
projectId,
type: "custom",
...(data.dataType && { dataType: data.dataType }),
},

View File

@@ -7,7 +7,6 @@ import { ZId, ZOptionalNumber, ZOptionalString } from "@formbricks/types/common"
import { TContactAttributeDataType } from "@formbricks/types/contact-attribute-key";
import { DatabaseError, ValidationError } from "@formbricks/types/errors";
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { formatSnakeCaseToTitleCase, isSafeIdentifier } from "@/lib/utils/safe-identifier";
import { validateInputs } from "@/lib/utils/validate";
import { prepareAttributeColumnsForStorage } from "@/modules/ee/contacts/lib/attribute-storage";
@@ -99,7 +98,6 @@ const selectContact = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
attributes: {
select: {
value: true,
@@ -116,8 +114,8 @@ const selectContact = {
},
} satisfies Prisma.ContactSelect;
export const buildContactWhereClause = (projectId: string, search?: string): Prisma.ContactWhereInput => {
const whereClause: Prisma.ContactWhereInput = { projectId };
export const buildContactWhereClause = (environmentId: string, search?: string): Prisma.ContactWhereInput => {
const whereClause: Prisma.ContactWhereInput = { environmentId };
if (search) {
whereClause.OR = [
@@ -144,12 +142,12 @@ export const buildContactWhereClause = (projectId: string, search?: string): Pri
};
export const getContacts = reactCache(
async (projectId: string, offset?: number, searchValue?: string): Promise<TContactWithAttributes[]> => {
validateInputs([projectId, ZId], [offset, ZOptionalNumber], [searchValue, ZOptionalString]);
async (environmentId: string, offset?: number, searchValue?: string): Promise<TContactWithAttributes[]> => {
validateInputs([environmentId, ZId], [offset, ZOptionalNumber], [searchValue, ZOptionalString]);
try {
const contacts = await prisma.contact.findMany({
where: buildContactWhereClause(projectId, searchValue),
where: buildContactWhereClause(environmentId, searchValue),
select: selectContact,
take: ITEMS_PER_PAGE,
skip: offset,
@@ -400,8 +398,7 @@ const createMissingAttributeKeys = async (
lowercaseToActualKeyMap: Map<string, string>,
attributeKeyMap: Map<string, string>,
attributeTypeMap: Map<string, TAttributeTypeInfo>,
environmentId: string,
projectId: string
environmentId: string
): Promise<void> => {
const missingKeys = Array.from(csvKeys).filter((key) => !lowercaseToActualKeyMap.has(key.toLowerCase()));
@@ -430,7 +427,6 @@ const createMissingAttributeKeys = async (
name: formatSnakeCaseToTitleCase(key),
dataType: attributeTypeMap.get(key)?.dataType ?? "string",
environmentId,
projectId,
})),
skipDuplicates: true,
});
@@ -465,7 +461,6 @@ type TCsvProcessingContext = {
attributeTypeMap: Map<string, TAttributeTypeInfo>;
duplicateContactsAction: "skip" | "update" | "overwrite";
environmentId: string;
projectId: string;
};
/**
@@ -483,7 +478,6 @@ const processCsvRecord = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
} = ctx;
// Map CSV keys to actual DB keys (case-insensitive matching)
const mappedRecord: Record<string, string> = {};
@@ -506,7 +500,6 @@ const processCsvRecord = async (
return prisma.contact.create({
data: {
environmentId,
projectId,
attributes: {
create: createAttributeConnections(mappedRecord, environmentId, attributeTypeMap),
},
@@ -617,17 +610,14 @@ export const createContactsFromCSV = async (
);
try {
// Step 1: Resolve projectId from environment
const projectId = await getProjectIdFromEnvironmentId(environmentId);
// Step 2: Extract metadata from CSV data
// Step 1: Extract metadata from CSV data
const { csvEmails, csvUserIds, csvKeys, attributeValuesByKey } = extractCsvMetadata(csvData);
// Step 3: Fetch existing data from database
// Step 2: Fetch existing data from database
const [existingContactsByEmail, existingUserIds, existingAttributeKeys] = await Promise.all([
prisma.contact.findMany({
where: {
projectId,
environmentId,
attributes: { some: { attributeKey: { key: "email" }, value: { in: csvEmails } } },
},
select: {
@@ -636,11 +626,11 @@ export const createContactsFromCSV = async (
},
}),
prisma.contactAttribute.findMany({
where: { attributeKey: { key: "userId", projectId }, value: { in: csvUserIds } },
where: { attributeKey: { key: "userId", environmentId }, value: { in: csvUserIds } },
select: { value: true, contactId: true },
}),
prisma.contactAttributeKey.findMany({
where: { projectId },
where: { environmentId },
select: { key: true, id: true, dataType: true },
}),
]);
@@ -678,8 +668,7 @@ export const createContactsFromCSV = async (
lowercaseToActualKeyMap,
attributeKeyMap,
attributeTypeMap,
environmentId,
projectId
environmentId
);
// Step 6: Process each CSV record
@@ -691,7 +680,6 @@ export const createContactsFromCSV = async (
attributeTypeMap,
duplicateContactsAction,
environmentId,
projectId,
};
const CHUNK_SIZE = 50;

View File

@@ -10,10 +10,10 @@ export interface PublishedLinkSurvey {
}
export const getPublishedLinkSurveys = reactCache(
async (projectId: string): Promise<PublishedLinkSurvey[]> => {
async (environmentId: string): Promise<PublishedLinkSurvey[]> => {
try {
const surveys = await prisma.survey.findMany({
where: { projectId, status: "inProgress", type: "link" },
where: { environmentId, status: "inProgress", type: "link" },
select: {
id: true,
name: true,

View File

@@ -1,5 +1,4 @@
import { ITEMS_PER_PAGE } from "@/lib/constants";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
import { UploadContactsCSVButton } from "@/modules/ee/contacts/components/upload-contacts-button";
@@ -20,14 +19,12 @@ export const ContactsPage = async ({
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const isContactsEnabled = await getIsContactsEnabled(organization.id);
const isQuotasAllowed = await getIsQuotasEnabled(organization.id);
const contactAttributeKeys = await getContactAttributeKeys(projectId);
const initialContacts = await getContacts(projectId, 0);
const contactAttributeKeys = await getContactAttributeKeys(params.environmentId);
const initialContacts = await getContacts(params.environmentId, 0);
const AddContactsButton = (
<UploadContactsCSVButton environmentId={environment.id} contactAttributeKeys={contactAttributeKeys} />

View File

@@ -45,7 +45,6 @@ export function CreateSegmentModal({
isPrivate: false,
filters: [],
environmentId,
projectId: null,
id: "",
surveys: [],
createdAt: new Date(),

View File

@@ -32,7 +32,6 @@ import {
ZSegmentUpdateInput,
} from "@formbricks/types/segment";
import { getSurvey } from "@/lib/survey/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { isResourceFilter, searchForAttributeKeyInSegment } from "@/modules/ee/contacts/segments/lib/utils";
import { isSameDay, subtractTimeUnit } from "./date-utils";
@@ -56,7 +55,6 @@ export const selectSegment = {
title: true,
description: true,
environmentId: true,
projectId: true,
filters: true,
isPrivate: true,
surveys: {
@@ -109,12 +107,12 @@ export const getSegment = reactCache(async (segmentId: string): Promise<TSegment
}
});
export const getSegments = reactCache(async (projectId: string): Promise<TSegmentWithSurveyRefs[]> => {
validateInputs([projectId, ZId]);
export const getSegments = reactCache(async (environmentId: string): Promise<TSegmentWithSurveyRefs[]> => {
validateInputs([environmentId, ZId]);
try {
const segments = await prisma.segment.findMany({
where: {
projectId,
environmentId,
},
select: selectSegment,
});
@@ -140,8 +138,6 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
const surveyConnect = surveyId ? { surveys: { connect: { id: surveyId } } } : {};
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
// Private segments use upsert because auto-save may have already created a
// default (empty-filter) segment via connectOrCreate before the user publishes.
@@ -160,13 +156,11 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
update: {
description,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -182,7 +176,6 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
description,
isPrivate,
filters,
projectId,
...surveyConnect,
},
select: selectSegment,
@@ -240,7 +233,6 @@ export const cloneSegment = async (segmentId: string, surveyId: string): Promise
isPrivate: segment.isPrivate,
environmentId: segment.environmentId,
filters: segment.filters,
projectId: segment.projectId,
surveys: {
connect: {
id: surveyId,
@@ -335,8 +327,7 @@ export const resetSegmentInSurvey = async (surveyId: string): Promise<TSegment>
isPrivate: true,
filters: [],
surveys: { connect: { id: surveyId } },
environmentId: survey.environmentId,
projectId: survey.projectId,
environment: { connect: { id: survey?.environmentId } },
},
select: selectSegment,
});
@@ -394,13 +385,13 @@ export const updateSegment = async (segmentId: string, data: TSegmentUpdateInput
}
};
export const getSegmentsByAttributeKey = reactCache(async (projectId: string, attributeKey: string) => {
validateInputs([projectId, ZId], [attributeKey, ZString]);
export const getSegmentsByAttributeKey = reactCache(async (environmentId: string, attributeKey: string) => {
validateInputs([environmentId, ZId], [attributeKey, ZString]);
try {
const segments = await prisma.segment.findMany({
where: {
projectId,
environmentId,
},
select: selectSegment,
});

View File

@@ -1,4 +1,3 @@
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { ContactsPageLayout } from "@/modules/ee/contacts/components/contacts-page-layout";
import { getContactAttributeKeys } from "@/modules/ee/contacts/lib/contact-attribute-keys";
@@ -18,11 +17,9 @@ export const SegmentsPage = async ({
const { isReadOnly, organization } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [segments, contactAttributeKeys] = await Promise.all([
getSegments(projectId),
getContactAttributeKeys(projectId),
getSegments(params.environmentId),
getContactAttributeKeys(params.environmentId),
]);
const isContactsEnabled = await getIsContactsEnabled(organization.id);

View File

@@ -27,7 +27,6 @@ export const WebhookTable = ({
const { t } = useTranslation();
const [activeWebhook, setActiveWebhook] = useState<Webhook>({
environmentId: environment.id,
projectId: null,
id: "",
name: "",
url: "",

View File

@@ -10,7 +10,6 @@ import {
UnknownError,
} from "@formbricks/types/errors";
import { generateStandardWebhookSignature, generateWebhookSecret } from "@/lib/crypto";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { validateInputs } from "@/lib/utils/validate";
import { validateWebhookUrl } from "@/lib/utils/validate-webhook-url";
import { getTranslate } from "@/lingodotdev/server";
@@ -106,8 +105,6 @@ export const createWebhook = async (
): Promise<Webhook> => {
await validateWebhookUrl(webhookInput.url);
const projectId = await getProjectIdFromEnvironmentId(environmentId);
try {
if (isDiscordWebhook(webhookInput.url)) {
throw new UnknownError("Discord webhooks are currently not supported.");
@@ -120,8 +117,11 @@ export const createWebhook = async (
...webhookInput,
surveyIds: webhookInput.surveyIds || [],
secret: signingSecret,
environmentId,
projectId,
environment: {
connect: {
id: environmentId,
},
},
},
});
@@ -139,13 +139,13 @@ export const createWebhook = async (
}
};
export const getWebhooks = async (projectId: string): Promise<Webhook[]> => {
validateInputs([projectId, ZId]);
export const getWebhooks = async (environmentId: string): Promise<Webhook[]> => {
validateInputs([environmentId, ZId]);
try {
const webhooks = await prisma.webhook.findMany({
where: {
projectId,
environmentId: environmentId,
},
orderBy: {
createdAt: "desc",

View File

@@ -16,11 +16,9 @@ export const WebhooksPage = async (props: { params: Promise<{ environmentId: str
const { isReadOnly, environment } = await getEnvironmentAuth(params.environmentId);
const projectId = environment.projectId;
const [webhooks, surveys] = await Promise.all([
getWebhooks(projectId),
getSurveys(projectId, 200), // HOTFIX: not getting all surveys for now since it's maxing out the prisma accelerate limit
getWebhooks(params.environmentId),
getSurveys(params.environmentId, 200), // HOTFIX: not getting all surveys for now since it's maxing out the prisma accelerate limit
]);
const renderAddWebhookButton = () => <AddWebhookButton environment={environment} surveys={surveys} />;

View File

@@ -1,7 +1,6 @@
import { SettingsCard } from "@/app/(app)/environments/[environmentId]/settings/components/SettingsCard";
import { getTagsByProjectId } from "@/lib/tag/service";
import { getTagsByEnvironmentId } from "@/lib/tag/service";
import { getTagsOnResponsesCount } from "@/lib/tagOnResponse/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getEnvironmentAuth } from "@/modules/environments/lib/utils";
import { ProjectConfigNavigation } from "@/modules/projects/settings/components/project-config-navigation";
@@ -15,10 +14,8 @@ export const TagsPage = async (props: { params: Promise<{ environmentId: string
const { isReadOnly } = await getEnvironmentAuth(params.environmentId);
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [tags, environmentTagsCount] = await Promise.all([
getTagsByProjectId(projectId),
getTagsByEnvironmentId(params.environmentId),
getTagsOnResponsesCount(params.environmentId),
]);

View File

@@ -1,284 +1,43 @@
import { ActionClass, Prisma } from "@prisma/client";
import "@testing-library/jest-dom/vitest";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurveyCreateInput } from "@formbricks/types/surveys/types";
import { DatabaseError } from "@formbricks/types/errors";
import { TSurvey, TSurveyCreateInput } from "@formbricks/types/surveys/types";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
createSurvey as createSurveyFromService,
handleTriggerUpdates as handleTriggerUpdatesFromService,
} from "@/lib/survey/service";
import { createSurvey, handleTriggerUpdates } from "./survey";
// Mock dependencies
vi.mock("@/lib/survey/utils", () => ({
checkForInvalidImagesInQuestions: vi.fn(),
vi.mock("@/lib/survey/service", () => ({
createSurvey: vi.fn(),
handleTriggerUpdates: vi.fn(),
}));
vi.mock("@/lib/organization/service", () => ({
subscribeOrganizationMembersToSurveyResponses: vi.fn(),
getOrganizationByEnvironmentId: vi.fn(),
}));
describe("template list survey wrappers", () => {
const environmentId = "env_1";
const surveyBody = { name: "Survey" } as TSurveyCreateInput;
const createdSurvey = { id: "survey_1" } as TSurvey;
vi.mock("@/modules/survey/lib/action-class", () => ({
getActionClasses: vi.fn(),
}));
vi.mock("@/modules/survey/lib/survey", () => ({
selectSurvey: {
id: true,
createdAt: true,
updatedAt: true,
name: true,
type: true,
status: true,
environmentId: true,
segment: true,
},
}));
vi.mock("@formbricks/database", () => ({
prisma: {
survey: {
create: vi.fn(),
update: vi.fn(),
},
segment: {
create: vi.fn(),
},
},
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("survey module", () => {
beforeEach(() => {
vi.resetAllMocks();
vi.clearAllMocks();
});
describe("createSurvey", () => {
test("creates a survey successfully", async () => {
// Mock input data
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
createdBy: "user-123",
};
// Mock dependencies
const mockActionClasses: ActionClass[] = [];
vi.mocked(getActionClasses).mockResolvedValue(mockActionClasses);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123", name: "Org" } as any);
const mockCreatedSurvey = {
id: "survey-123",
environmentId,
type: "app",
segment: {
surveys: [{ id: "survey-123" }],
},
} as any;
vi.mocked(prisma.survey.create).mockResolvedValue(mockCreatedSurvey);
const mockSegment = { id: "segment-123" } as any;
vi.mocked(prisma.segment.create).mockResolvedValue(mockSegment);
// Execute function
const result = await createSurvey(environmentId, surveyBody);
// Verify results
expect(getActionClasses).toHaveBeenCalledWith(environmentId);
expect(getOrganizationByEnvironmentId).toHaveBeenCalledWith(environmentId);
expect(prisma.survey.create).toHaveBeenCalledWith({
data: expect.objectContaining({
name: surveyBody.name,
type: surveyBody.type,
environment: { connect: { id: environmentId } },
creator: { connect: { id: surveyBody.createdBy } },
}),
select: selectSurvey,
});
expect(prisma.segment.create).toHaveBeenCalled();
expect(prisma.survey.update).toHaveBeenCalled();
expect(subscribeOrganizationMembersToSurveyResponses).toHaveBeenCalledWith(
"survey-123",
"user-123",
"org-123"
);
expect(result).toBeDefined();
expect(result.id).toBe("survey-123");
});
test("handles empty languages array", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
languages: [],
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
vi.mocked(prisma.survey.create).mockResolvedValue({
id: "survey-123",
environmentId,
type: "link",
segment: null,
} as any);
await createSurvey(environmentId, surveyBody);
expect(prisma.survey.create).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.not.objectContaining({ languages: [] }),
})
);
});
test("handles follow-ups properly", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
followUps: [{ name: "Follow Up 1", trigger: "trigger1", action: "action1" } as any],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
vi.mocked(prisma.survey.create).mockResolvedValue({
id: "survey-123",
environmentId,
type: "link",
segment: null,
} as any);
await createSurvey(environmentId, surveyBody);
expect(prisma.survey.create).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
followUps: {
create: [{ name: "Follow Up 1", trigger: "trigger1", action: "action1" }],
},
}),
})
);
});
test("throws error when organization not found", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue(null);
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(ResourceNotFoundError);
});
test("handles database errors", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
const prismaError = new Prisma.PrismaClientKnownRequestError("Database error", {
code: "P2002",
clientVersion: "5.0.0",
});
vi.mocked(prisma.survey.create).mockRejectedValue(prismaError);
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(DatabaseError);
expect(logger.error).toHaveBeenCalled();
});
test("re-exports the shared trigger update helper", () => {
expect(handleTriggerUpdates).toBe(handleTriggerUpdatesFromService);
});
describe("handleTriggerUpdates", () => {
test("handles empty triggers", () => {
const result = handleTriggerUpdates(undefined as any, [], []);
expect(result).toEqual({});
});
test("delegates createSurvey to the shared survey service", async () => {
vi.mocked(createSurveyFromService).mockResolvedValueOnce(createdSurvey);
test("adds new triggers", () => {
const updatedTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-2" } },
] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }, { id: "action-2" }] as ActionClass[];
const result = await createSurvey(environmentId, surveyBody);
const result = handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses);
expect(createSurveyFromService).toHaveBeenCalledWith(environmentId, surveyBody);
expect(result).toBe(createdSurvey);
});
expect(result).toEqual({
create: [{ actionClassId: "action-1" }, { actionClassId: "action-2" }],
});
});
test("propagates service errors", async () => {
const error = new DatabaseError("database error");
vi.mocked(createSurveyFromService).mockRejectedValueOnce(error);
test("removes triggers", () => {
const updatedTriggers = [] as any;
const currentTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-2" } },
] as any;
const actionClasses = [{ id: "action-1" }, { id: "action-2" }] as ActionClass[];
const result = handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses);
expect(result).toEqual({
deleteMany: {
actionClassId: {
in: ["action-1", "action-2"],
},
},
});
});
test("throws error for invalid trigger", () => {
const updatedTriggers = [{ actionClass: { id: "action-3" } }] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }] as ActionClass[];
expect(() => handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses)).toThrow(
InvalidInputError
);
});
test("throws error for duplicate triggers", () => {
const updatedTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-1" } },
] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }] as ActionClass[];
expect(() => handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses)).toThrow(
InvalidInputError
);
});
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(error);
});
});

View File

@@ -1,211 +1,11 @@
import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey, TSurveyCreateInput } from "@formbricks/types/surveys/types";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
import { createSurvey as createSurveyFromService, handleTriggerUpdates } from "@/lib/survey/service";
export { handleTriggerUpdates };
export const createSurvey = async (
environmentId: string,
surveyBody: TSurveyCreateInput
): Promise<TSurvey> => {
try {
const { createdBy, ...restSurveyBody } = surveyBody;
// empty languages array
if (!restSurveyBody.languages?.length) {
delete restSurveyBody.languages;
}
const [organization, projectId] = await Promise.all([
getOrganizationByEnvironmentId(environmentId),
getProjectIdFromEnvironmentId(environmentId),
]);
const actionClasses = await getActionClasses(projectId);
// @ts-expect-error
let data: Omit<Prisma.SurveyCreateInput, "environment"> = {
...restSurveyBody,
// TODO: Create with attributeFilters
triggers: restSurveyBody.triggers
? handleTriggerUpdates(restSurveyBody.triggers, [], actionClasses)
: undefined,
attributeFilters: undefined,
};
if (createdBy) {
data.creator = {
connect: {
id: createdBy,
},
};
}
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
// Survey follow-ups
if (restSurveyBody.followUps?.length) {
data.followUps = {
create: restSurveyBody.followUps.map((followUp) => ({
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
};
} else {
delete data.followUps;
}
// Validate and prepare blocks
if (data.blocks && data.blocks.length > 0) {
data.blocks = validateMediaAndPrepareBlocks(data.blocks);
}
const survey = await prisma.survey.create({
data: {
...data,
environment: {
connect: {
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
select: selectSurvey,
});
// if the survey created is an "app" survey, we also create a private segment for it.
if (survey.type === "app") {
const newSegment = await prisma.segment.create({
data: {
title: survey.id,
filters: [],
isPrivate: true,
environmentId,
projectId,
},
});
await prisma.survey.update({
where: {
id: survey.id,
},
data: {
segment: {
connect: {
id: newSegment.id,
},
},
},
});
}
// TODO: Fix this, this happens because the survey type "web" is no longer in the zod types but its required in the schema for migration
// @ts-expect-error
const transformedSurvey: TSurvey = {
...survey,
...(survey.segment && {
segment: {
...survey.segment,
surveys: survey.segment.surveys.map((survey) => survey.id),
},
}),
};
if (createdBy) {
await subscribeOrganizationMembersToSurveyResponses(survey.id, createdBy, organization.id);
}
return transformedSurvey;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
logger.error(error, "Error creating survey");
throw new DatabaseError(error.message);
}
throw error;
}
};
const getTriggerIds = (triggers: unknown): string[] | null => {
if (!triggers) return null;
if (!Array.isArray(triggers)) {
throw new InvalidInputError("Invalid trigger id");
}
return triggers.map((trigger) => {
const actionClassId = (trigger as { actionClass?: { id?: unknown } })?.actionClass?.id;
if (typeof actionClassId !== "string") {
throw new InvalidInputError("Invalid trigger id");
}
return actionClassId;
});
};
const checkTriggersValidity = (triggers: unknown, actionClasses: Array<{ id: string }>) => {
const triggerIds = getTriggerIds(triggers);
if (!triggerIds) return;
// check if all the triggers are valid
triggerIds.forEach((triggerId) => {
if (!actionClasses.find((actionClass) => actionClass.id === triggerId)) {
throw new InvalidInputError("Invalid trigger id");
}
});
if (new Set(triggerIds).size !== triggerIds.length) {
throw new InvalidInputError("Duplicate trigger id");
}
};
export const handleTriggerUpdates = (
updatedTriggers: unknown,
currentTriggers: unknown,
actionClasses: Array<{ id: string }>
) => {
const updatedTriggerIds = getTriggerIds(updatedTriggers);
if (!updatedTriggerIds) return {};
checkTriggersValidity(updatedTriggers, actionClasses);
const currentTriggerIds = getTriggerIds(currentTriggers) ?? [];
// added triggers are triggers that are not in the current triggers and are there in the new triggers
const addedTriggerIds = updatedTriggerIds.filter((triggerId) => !currentTriggerIds.includes(triggerId));
// deleted triggers are triggers that are not in the new triggers and are there in the current triggers
const deletedTriggerIds = currentTriggerIds.filter((triggerId) => !updatedTriggerIds.includes(triggerId));
// Construct the triggers update object
const triggersUpdate: TriggerUpdate = {};
if (addedTriggerIds.length > 0) {
triggersUpdate.create = addedTriggerIds.map((triggerId) => ({
actionClassId: triggerId,
}));
}
if (deletedTriggerIds.length > 0) {
// disconnect the public triggers from the survey
triggersUpdate.deleteMany = {
actionClassId: {
in: deletedTriggerIds,
},
};
}
return triggersUpdate;
return createSurveyFromService(environmentId, surveyBody);
};

View File

@@ -49,7 +49,6 @@ export const HowToSendCard = ({ localSurvey, setLocalSurvey, environment }: HowT
isPrivate: true,
title: localSurvey.id,
environmentId: environment.id,
projectId: null,
surveys: [localSurvey.id],
filters: [],
createdAt: new Date(),

View File

@@ -3,7 +3,6 @@ import { prisma } from "@formbricks/database";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TActionClassInput } from "@formbricks/types/action-classes";
import { DatabaseError } from "@formbricks/types/errors";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
export const createActionClass = async (
environmentId: string,
@@ -12,13 +11,10 @@ export const createActionClass = async (
const { environmentId: _, ...actionClassInput } = actionClass;
try {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
const actionClassPrisma = await prisma.actionClass.create({
data: {
...actionClassInput,
environmentId,
projectId,
environment: { connect: { id: environmentId } },
key: actionClassInput.type === "code" ? actionClassInput.key : undefined,
noCodeConfig:
actionClassInput.type === "noCode"

View File

@@ -1,837 +1,59 @@
import { ActionClass, Prisma } from "@prisma/client";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSegment } from "@formbricks/types/segment";
import { TSurvey, TSurveyQuestionTypeEnum } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
import { getSurvey } from "@/modules/survey/lib/survey";
import { checkTriggersValidity, handleTriggerUpdates, updateSurvey, updateSurveyDraft } from "./survey";
// Mock dependencies
vi.mock("@formbricks/database", () => ({
prisma: {
survey: {
update: vi.fn(),
},
segment: {
update: vi.fn(),
delete: vi.fn(),
},
},
}));
vi.mock("@/lib/survey/utils", () => ({
checkForInvalidImagesInQuestions: vi.fn(),
}));
import { beforeEach, describe, expect, test, vi } from "vitest";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey } from "@formbricks/types/surveys/types";
import {
handleTriggerUpdates as handleTriggerUpdatesFromService,
updateSurvey as updateSurveyFromService,
updateSurveyInternal,
} from "@/lib/survey/service";
import { handleTriggerUpdates, updateSurvey, updateSurveyDraft } from "./survey";
vi.mock("@/lib/survey/service", () => ({
handleTriggerUpdates: vi.fn(),
updateSurvey: vi.fn(),
updateSurveyInternal: vi.fn(),
}));
vi.mock("@/modules/survey/lib/action-class", () => ({
getActionClasses: vi.fn(),
}));
describe("survey editor wrappers", () => {
const survey = { id: "survey_1" } as TSurvey;
vi.mock("@/modules/survey/lib/organization", () => ({
getOrganizationIdFromEnvironmentId: vi.fn(),
getOrganizationAIKeys: vi.fn(),
}));
vi.mock("@/modules/survey/lib/survey", () => ({
getSurvey: vi.fn(),
selectSurvey: {
id: true,
createdAt: true,
updatedAt: true,
name: true,
type: true,
environmentId: true,
},
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("Survey Editor Library Tests", () => {
afterEach(() => {
beforeEach(() => {
vi.clearAllMocks();
});
describe("updateSurvey", () => {
const mockSurvey = {
id: "survey123",
createdAt: new Date(),
updatedAt: new Date(),
name: "Test Survey",
type: "app",
environmentId: "env123",
createdBy: "user123",
status: "draft",
displayOption: "displayOnce",
questions: [
{
id: "q1",
type: TSurveyQuestionTypeEnum.OpenText,
headline: { default: "Question 1" },
required: false,
inputType: "text",
charLimit: { enabled: false },
},
],
welcomeCard: {
enabled: false,
timeToFinish: true,
showResponseCount: false,
},
triggers: [],
endings: [],
hiddenFields: { enabled: false },
delay: 0,
autoComplete: null,
projectOverwrites: null,
styling: null,
showLanguageSwitch: false,
segment: null,
surveyClosedMessage: null,
singleUse: null,
isVerifyEmailEnabled: false,
recaptcha: null,
isSingleResponsePerEmailEnabled: false,
isBackButtonHidden: false,
pin: null,
displayPercentage: null,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
],
variables: [],
followUps: [],
} as unknown as TSurvey;
const mockCurrentSurvey = { ...mockSurvey };
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Code Action",
description: "Action from code",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
const mockOrganizationId = "org123";
const mockOrganization = {
id: mockOrganizationId,
name: "Test Organization",
ownerUserId: "user123",
billing: {
stripeCustomerId: "cust_123",
features: {},
usageCycleAnchor: new Date(),
},
isAIEnabled: false,
};
beforeEach(() => {
vi.mocked(prisma.survey.update).mockResolvedValue(mockSurvey as any);
vi.mocked(prisma.segment.update).mockResolvedValue({
id: "segment1",
environmentId: "env123",
surveys: [{ id: "survey123" }],
} as any);
vi.mocked(getSurvey).mockResolvedValue(mockCurrentSurvey);
vi.mocked(getActionClasses).mockResolvedValue(mockActionClasses);
vi.mocked(getOrganizationIdFromEnvironmentId).mockResolvedValue(mockOrganizationId);
vi.mocked(getOrganizationAIKeys).mockResolvedValue(mockOrganization as any);
});
test("should handle languages update with multiple languages", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
{
language: {
id: "es",
code: "es",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: false,
enabled: true,
},
],
};
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: { id: "survey123" },
data: expect.objectContaining({
languages: {
updateMany: expect.any(Array),
create: expect.arrayContaining([
expect.objectContaining({
languageId: "es",
default: false,
enabled: true,
}),
]),
},
}),
select: expect.any(Object),
});
});
test("should handle languages update with single default language", async () => {
// This tests the fix for the bug where languages.length === 1 would incorrectly
// set updatedLanguageIds to [] causing the default language to be removed
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
],
};
await updateSurvey(updatedSurvey);
// Verify that prisma.survey.update was called
expect(prisma.survey.update).toHaveBeenCalled();
const updateCall = vi.mocked(prisma.survey.update).mock.calls[0][0];
// The key test: when languages.length === 1, we should still process language updates
// and NOT delete the language. Before the fix, languages.length > 1 would fail this case.
expect(updateCall).toBeDefined();
expect(updateCall.where).toEqual({ id: "survey123" });
expect(updateCall.data).toBeDefined();
});
test("should remove all languages when empty array is passed", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [],
};
await updateSurvey(updatedSurvey);
// Verify that prisma.survey.update was called
expect(prisma.survey.update).toHaveBeenCalled();
const updateCall = vi.mocked(prisma.survey.update).mock.calls[0][0];
// When languages is empty array, all existing languages should be removed
expect(updateCall).toBeDefined();
expect(updateCall.where).toEqual({ id: "survey123" });
expect(updateCall.data).toBeDefined();
});
test("should delete private segment for non-app type surveys", async () => {
const mockSegment: TSegment = {
id: "segment1",
title: "Test Segment",
isPrivate: true,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [{ id: "filter1" } as any],
};
const updatedSurvey: TSurvey = {
...mockSurvey,
type: "link",
segment: mockSegment,
};
await updateSurvey(updatedSurvey);
expect(prisma.segment.update).toHaveBeenCalledWith({
where: { id: "segment1" },
data: {
surveys: {
disconnect: {
id: "survey123",
},
},
},
});
expect(prisma.segment.delete).toHaveBeenCalledWith({
where: {
id: "segment1",
},
});
});
test("should disconnect public segment for non-app type surveys", async () => {
const mockSegment: TSegment = {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [],
};
const updatedSurvey: TSurvey = {
...mockSurvey,
type: "link",
segment: mockSegment,
};
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: {
id: "survey123",
},
data: {
segment: {
disconnect: true,
},
},
});
});
test("should handle followUps updates", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
followUps: [
{
id: "f1",
name: "Existing Follow Up",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "test@example.com",
subject: "Test",
body: "Test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: false,
},
{
id: "f2",
name: "New Follow Up",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "new@example.com",
subject: "New Test",
body: "New test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: false,
},
{
id: "f3",
name: "Follow Up To Delete",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "delete@example.com",
subject: "Delete Test",
body: "Delete test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: true,
},
],
};
// Mock current survey with existing followUps
vi.mocked(getSurvey).mockResolvedValueOnce({
...mockCurrentSurvey,
followUps: [
{
id: "f1",
name: "Existing Follow Up",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "test@example.com",
subject: "Test",
body: "Test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
},
],
} as any);
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: { id: "survey123" },
data: expect.objectContaining({
followUps: {
updateMany: [
{
where: {
id: "f1",
},
data: expect.objectContaining({
name: "Existing Follow Up",
}),
},
],
createMany: {
data: [
expect.objectContaining({
name: "New Follow Up",
}),
],
},
deleteMany: [
{
id: "f3",
},
],
},
}),
select: expect.any(Object),
});
});
test("should throw ResourceNotFoundError when survey is not found", async () => {
vi.mocked(getSurvey).mockResolvedValueOnce(null as unknown as TSurvey);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
expect(getSurvey).toHaveBeenCalledWith("survey123");
});
test("should throw ResourceNotFoundError when organization is not found", async () => {
vi.mocked(getOrganizationAIKeys).mockResolvedValueOnce(null);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
});
test("should throw DatabaseError when Prisma throws a known request error", async () => {
const prismaError = new Prisma.PrismaClientKnownRequestError("Database error", {
code: "P2002",
clientVersion: "4.0.0",
});
vi.mocked(prisma.survey.update).mockRejectedValueOnce(prismaError);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(DatabaseError);
});
test("should rethrow other errors", async () => {
const genericError = new Error("Some other error");
vi.mocked(prisma.survey.update).mockRejectedValueOnce(genericError);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(genericError);
});
test("should throw InvalidInputError for invalid segment filters", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
segment: {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: "invalid filters" as any,
},
};
await expect(updateSurvey(updatedSurvey)).rejects.toThrow(InvalidInputError);
});
test("should handle error in segment update", async () => {
vi.mocked(prisma.segment.update).mockRejectedValueOnce(new Error("Error updating survey"));
const updatedSurvey: TSurvey = {
...mockSurvey,
segment: {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [],
},
};
await expect(updateSurvey(updatedSurvey)).rejects.toThrow("Error updating survey");
});
test("re-exports the shared trigger update helper", () => {
expect(handleTriggerUpdates).toBe(handleTriggerUpdatesFromService);
});
describe("checkTriggersValidity", () => {
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Action 1",
description: "Test Action 1",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action2",
name: "Action 2",
description: "Test Action 2",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
test("delegates updateSurvey to the shared survey service", async () => {
vi.mocked(updateSurveyFromService).mockResolvedValueOnce(survey);
const createFullActionClass = (id: string, type: "code" | "noCode" = "code"): ActionClass => ({
id,
name: `Action ${id}`,
description: `Test Action ${id}`,
type,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
});
const result = await updateSurvey(survey);
test("should not throw error for valid triggers", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("action2", "noCode") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).not.toThrow();
});
test("should throw error for invalid trigger id", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("invalid") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow(InvalidInputError);
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow("Invalid trigger id");
});
test("should throw error for duplicate trigger ids", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("action1") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow(InvalidInputError);
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow("Duplicate trigger id");
});
test("should do nothing when triggers are undefined", () => {
expect(() => checkTriggersValidity(undefined as any, mockActionClasses)).not.toThrow();
});
expect(updateSurveyFromService).toHaveBeenCalledWith(survey);
expect(result).toBe(survey);
});
describe("handleTriggerUpdates", () => {
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Action 1",
description: "Test Action 1",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action2",
name: "Action 2",
description: "Test Action 2",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action3",
name: "Action 3",
description: "Test Action 3",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
test("delegates draft saves to updateSurveyInternal with skipValidation enabled", async () => {
vi.mocked(updateSurveyInternal).mockResolvedValueOnce(survey);
const createActionClassObj = (id: string, type: "code" | "noCode" = "code"): ActionClass => ({
id,
name: `Action ${id}`,
description: `Test Action ${id}`,
type,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
});
const result = await updateSurveyDraft(survey);
test("should return empty object when updatedTriggers is undefined", () => {
const result = handleTriggerUpdates(undefined as any, [], mockActionClasses);
expect(result).toEqual({});
});
test("should identify added triggers correctly", () => {
const currentTriggers = [{ actionClass: createActionClassObj("action1") }];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
create: [{ actionClassId: "action2" }],
});
});
test("should identify deleted triggers correctly", () => {
const currentTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const updatedTriggers = [{ actionClass: createActionClassObj("action1") }];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
deleteMany: {
actionClassId: {
in: ["action2"],
},
},
});
});
test("should handle both added and deleted triggers", () => {
const currentTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action3", "noCode") },
];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
create: [{ actionClassId: "action3" }],
deleteMany: {
actionClassId: {
in: ["action2"],
},
},
});
});
test("should validate triggers before processing", () => {
const currentTriggers = [{ actionClass: createActionClassObj("action1") }];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("invalid") },
];
expect(() =>
handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses)
).toThrow(InvalidInputError);
});
expect(updateSurveyInternal).toHaveBeenCalledWith(survey, true);
expect(result).toBe(survey);
});
describe("updateSurveyDraft", () => {
const mockSurvey = {
id: "survey123",
createdAt: new Date(),
updatedAt: new Date(),
name: "Draft Survey",
type: "app",
environmentId: "env123",
createdBy: "user123",
status: "draft",
displayOption: "displayOnce",
questions: [
{
id: "q1",
type: TSurveyQuestionTypeEnum.OpenText,
headline: { default: "Question 1" },
required: false,
inputType: "text",
charLimit: { enabled: false },
},
],
welcomeCard: {
enabled: false,
timeToFinish: true,
showResponseCount: false,
},
triggers: [],
endings: [],
hiddenFields: { enabled: false },
delay: 0,
autoComplete: null,
projectOverwrites: null,
styling: null,
showLanguageSwitch: false,
segment: null,
surveyClosedMessage: null,
singleUse: null,
isVerifyEmailEnabled: false,
recaptcha: null,
isSingleResponsePerEmailEnabled: false,
isBackButtonHidden: false,
pin: null,
displayPercentage: null,
languages: [],
variables: [],
followUps: [],
} as unknown as TSurvey;
test("propagates service errors for updateSurvey", async () => {
const error = new DatabaseError("database error");
vi.mocked(updateSurveyFromService).mockRejectedValueOnce(error);
beforeEach(() => {
vi.mocked(updateSurveyInternal).mockResolvedValue(mockSurvey);
});
await expect(updateSurvey(survey)).rejects.toThrow(error);
});
test("should call updateSurveyInternal with skipValidation=true", async () => {
await updateSurveyDraft(mockSurvey);
test("propagates service errors for updateSurveyDraft", async () => {
const error = new ResourceNotFoundError("Survey", "survey_1");
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(error);
expect(updateSurveyInternal).toHaveBeenCalledWith(mockSurvey, true);
expect(updateSurveyInternal).toHaveBeenCalledTimes(1);
});
test("should return the survey from updateSurveyInternal", async () => {
const result = await updateSurveyDraft(mockSurvey);
expect(result).toEqual(mockSurvey);
});
test("should propagate errors from updateSurveyInternal", async () => {
const error = new Error("Internal update failed");
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(error);
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow("Internal update failed");
});
test("should propagate ResourceNotFoundError from updateSurveyInternal", async () => {
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(new ResourceNotFoundError("Survey", "survey123"));
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
});
test("should propagate DatabaseError from updateSurveyInternal", async () => {
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(new DatabaseError("Database connection failed"));
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow(DatabaseError);
});
await expect(updateSurveyDraft(survey)).rejects.toThrow(error);
});
});

View File

@@ -1,369 +1,16 @@
import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSegment, ZSegmentFilters } from "@formbricks/types/segment";
import { TSurvey } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
import { getSurvey, selectSurvey } from "@/modules/survey/lib/survey";
import {
handleTriggerUpdates,
updateSurvey as updateSurveyFromService,
updateSurveyInternal,
} from "@/lib/survey/service";
export { handleTriggerUpdates };
export const updateSurveyDraft = async (updatedSurvey: TSurvey): Promise<TSurvey> => {
// Use internal version with skipValidation=true to allow incomplete drafts
return updateSurveyInternal(updatedSurvey, true);
};
export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> => {
try {
const surveyId = updatedSurvey.id;
let data: any = {};
const [actionClasses, currentSurvey] = await Promise.all([
getProjectIdFromEnvironmentId(updatedSurvey.environmentId).then((projectId) =>
getActionClasses(projectId)
),
getSurvey(surveyId),
]);
if (!currentSurvey) {
throw new ResourceNotFoundError("Survey", surveyId);
}
const { triggers, environmentId, segment, questions, languages, type, followUps, ...surveyData } =
updatedSurvey;
// Validate and prepare blocks for persistence
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = validateMediaAndPrepareBlocks(updatedSurvey.blocks);
}
if (languages) {
// Process languages update logic here
// Extract currentLanguageIds and updatedLanguageIds
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((l) => l.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((l) => l.language.id) : [];
const enabledLanguageIds = languages.map((language) => {
if (language.enabled) return language.language.id;
});
// Determine languages to add and remove
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((l) => l.default)?.language.id;
// Prepare data for Prisma update
data.languages = {};
// Update existing languages for default value changes
data.languages.updateMany = currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
}));
// Add new languages
if (languagesToAdd.length > 0) {
data.languages.create = languagesToAdd.map((languageId) => ({
languageId: languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
// Remove languages no longer associated with the survey
if (languagesToRemove.length > 0) {
data.languages.deleteMany = languagesToRemove.map((languageId) => ({
languageId: languageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
}
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
// if the survey body has type other than "app" but has a private segment, we delete that segment, and if it has a public segment, we disconnect from to the survey
if (segment) {
if (type === "app") {
// parse the segment filters:
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
}
try {
// update the segment:
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((surveyId) => ({ id: surveyId })),
},
};
}
await prisma.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else {
if (segment.isPrivate) {
// disconnect the private segment first and then delete:
await prisma.segment.update({
where: { id: segment.id },
data: {
surveys: {
disconnect: {
id: surveyId,
},
},
},
});
// delete the private segment:
await prisma.segment.delete({
where: {
id: segment.id,
},
});
} else {
await prisma.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
disconnect: true,
},
},
});
}
}
} else if (type === "app") {
if (!currentSurvey.segment) {
const projectId = await getProjectIdFromEnvironmentId(environmentId);
await prisma.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
connectOrCreate: {
where: {
environmentId_title: {
environmentId,
title: surveyId,
},
},
create: {
title: surveyId,
isPrivate: true,
filters: [],
environment: {
connect: {
id: environmentId,
},
},
project: {
connect: {
id: projectId,
},
},
},
},
},
},
});
}
}
if (followUps) {
// Separate follow-ups into categories based on deletion flag
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
// Get set of existing follow-up IDs from currentSurvey
const existingFollowUpIds = new Set(currentSurvey.followUps.map((f) => f.id));
// Separate non-deleted follow-ups into new and existing
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
data.followUps = {
// Update existing follow-ups
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
// Create new follow-ups
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
// Delete follow-ups marked as deleted, regardless of whether they exist in DB
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
}))
: undefined,
};
}
const organizationId = await getOrganizationIdFromEnvironmentId(environmentId);
const organization = await getOrganizationAIKeys(organizationId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
const prismaSurvey = await prisma.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
});
let surveySegment: TSegment | null = null;
if (prismaSurvey.segment) {
surveySegment = {
...prismaSurvey.segment,
surveys: prismaSurvey.segment.surveys.map((survey) => survey.id),
};
}
const modifiedSurvey: TSurvey = {
...prismaSurvey, // Properties from prismaSurvey
displayPercentage: Number(prismaSurvey.displayPercentage) || null,
segment: surveySegment,
customHeadScriptsMode: prismaSurvey.customHeadScriptsMode,
};
return modifiedSurvey;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
logger.error(error, "Error updating survey");
throw new DatabaseError(error.message);
}
throw error;
}
};
const getTriggerIds = (triggers: unknown): string[] | null => {
if (!triggers) return null;
if (!Array.isArray(triggers)) {
throw new InvalidInputError("Invalid trigger id");
}
return triggers.map((trigger) => {
const actionClassId = (trigger as { actionClass?: { id?: unknown } })?.actionClass?.id;
if (typeof actionClassId !== "string") {
throw new InvalidInputError("Invalid trigger id");
}
return actionClassId;
});
};
export const checkTriggersValidity = (triggers: unknown, actionClasses: Array<{ id: string }>) => {
const triggerIds = getTriggerIds(triggers);
if (!triggerIds) return;
// check if all the triggers are valid
triggerIds.forEach((triggerId) => {
if (!actionClasses.find((actionClass) => actionClass.id === triggerId)) {
throw new InvalidInputError("Invalid trigger id");
}
});
if (new Set(triggerIds).size !== triggerIds.length) {
throw new InvalidInputError("Duplicate trigger id");
}
};
export const handleTriggerUpdates = (
updatedTriggers: unknown,
currentTriggers: unknown,
actionClasses: Array<{ id: string }>
) => {
const updatedTriggerIds = getTriggerIds(updatedTriggers);
if (!updatedTriggerIds) return {};
checkTriggersValidity(updatedTriggers, actionClasses);
const currentTriggerIds = getTriggerIds(currentTriggers) ?? [];
// added triggers are triggers that are not in the current triggers and are there in the new triggers
const addedTriggerIds = updatedTriggerIds.filter((triggerId) => !currentTriggerIds.includes(triggerId));
// deleted triggers are triggers that are not in the new triggers and are there in the current triggers
const deletedTriggerIds = currentTriggerIds.filter((triggerId) => !updatedTriggerIds.includes(triggerId));
// Construct the triggers update object
const triggersUpdate: TriggerUpdate = {};
if (addedTriggerIds.length > 0) {
triggersUpdate.create = addedTriggerIds.map((triggerId) => ({
actionClassId: triggerId,
}));
}
if (deletedTriggerIds.length > 0) {
// disconnect the public triggers from the survey
triggersUpdate.deleteMany = {
actionClassId: {
in: deletedTriggerIds,
},
};
}
return triggersUpdate;
return updateSurveyFromService(updatedSurvey);
};

View File

@@ -8,7 +8,6 @@ import {
UNSPLASH_ACCESS_KEY,
} from "@/lib/constants";
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getContactAttributeKeys } from "@/modules/ee/contacts/lib/contact-attribute-keys";
import { getSegments } from "@/modules/ee/contacts/segments/lib/segments";
@@ -51,16 +50,14 @@ export const SurveyEditorPage = async (props: {
await getEnvironmentAuth(params.environmentId);
const t = await getTranslate();
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const [survey, projectWithTeamIds, actionClasses, contactAttributeKeys, responseCount, segments] =
await Promise.all([
getSurvey(params.surveyId),
getProjectWithTeamIdsByEnvironmentId(params.environmentId),
getActionClasses(projectId),
getContactAttributeKeys(projectId),
getActionClasses(params.environmentId),
getContactAttributeKeys(params.environmentId),
getResponseCountBySurveyId(params.surveyId),
getSegments(projectId),
getSegments(params.environmentId),
]);
if (!projectWithTeamIds) {

View File

@@ -5,19 +5,19 @@ import { prisma } from "@formbricks/database";
import { DatabaseError } from "@formbricks/types/errors";
import { validateInputs } from "@/lib/utils/validate";
export const getActionClasses = reactCache(async (projectId: string): Promise<ActionClass[]> => {
validateInputs([projectId, z.cuid2()]);
export const getActionClasses = reactCache(async (environmentId: string): Promise<ActionClass[]> => {
validateInputs([environmentId, z.cuid2()]);
try {
return await prisma.actionClass.findMany({
where: {
projectId,
environmentId: environmentId,
},
orderBy: {
createdAt: "asc",
},
});
} catch (error) {
throw new DatabaseError(`Database error when fetching actions for project ${projectId}`);
throw new DatabaseError(`Database error when fetching actions for environment ${environmentId}`);
}
});

View File

@@ -14,9 +14,10 @@ export const selectSurvey = {
name: true,
type: true,
environmentId: true,
projectId: true,
createdBy: true,
status: true,
startsAt: true,
endsAt: true,
welcomeCard: true,
questions: true,
blocks: true,
@@ -70,7 +71,6 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
name: true,
description: true,
type: true,
@@ -86,7 +86,6 @@ export const selectSurvey = {
createdAt: true,
updatedAt: true,
environmentId: true,
projectId: true,
title: true,
description: true,
isPrivate: true,

View File

@@ -148,12 +148,12 @@ function buildStandardCursorWhere(
}
function buildBaseWhere(
projectId: string,
environmentId: string,
filterCriteria?: TSurveyFilterCriteria,
extraWhere?: Prisma.SurveyWhereInput
): Prisma.SurveyWhereInput {
return {
projectId,
environmentId,
...buildWhereClause(filterCriteria),
...extraWhere,
};
@@ -197,7 +197,7 @@ function getRelevanceNextCursor(survey: TSurveyRow, bucket: TRelevanceBucket): T
}
async function findSurveyRows(
projectId: string,
environmentId: string,
limit: number,
sortBy: TStandardSurveyListSort,
filterCriteria?: TSurveyFilterCriteria,
@@ -207,7 +207,7 @@ async function findSurveyRows(
const cursorWhere = cursor ? buildStandardCursorWhere(sortBy, cursor) : undefined;
return prisma.survey.findMany({
where: buildBaseWhere(projectId, filterCriteria, {
where: buildBaseWhere(environmentId, filterCriteria, {
...extraWhere,
...cursorWhere,
}),
@@ -237,11 +237,11 @@ function buildSurveyListPage(rows: TSurveyRow[], cursor: TSurveyListPageCursor |
}
async function getStandardSurveyListPage(
projectId: string,
environmentId: string,
options: TGetSurveyListPageOptions & { sortBy: TStandardSurveyListSort }
): Promise<TSurveyListPage> {
const surveyRows = await findSurveyRows(
projectId,
environmentId,
options.limit,
options.sortBy,
options.filterCriteria,
@@ -258,7 +258,7 @@ async function getStandardSurveyListPage(
}
async function findRelevanceRows(
projectId: string,
environmentId: string,
limit: number,
filterCriteria: TSurveyFilterCriteria | undefined,
bucket: TRelevanceBucket,
@@ -271,7 +271,7 @@ async function findRelevanceRows(
: undefined;
return prisma.survey.findMany({
where: buildBaseWhere(projectId, filterCriteria, {
where: buildBaseWhere(environmentId, filterCriteria, {
...statusWhere,
...cursorWhere,
}),
@@ -282,10 +282,10 @@ async function findRelevanceRows(
}
async function hasMoreRelevanceRowsInOtherBucket(
projectId: string,
environmentId: string,
filterCriteria?: TSurveyFilterCriteria
): Promise<boolean> {
const otherRows = await findRelevanceRows(projectId, 1, filterCriteria, OTHER_BUCKET, null);
const otherRows = await findRelevanceRows(environmentId, 1, filterCriteria, OTHER_BUCKET, null);
return otherRows.length > 0;
}
@@ -315,13 +315,13 @@ function buildRelevancePage(rows: TSurveyRow[], bucket: TRelevanceBucket | null)
}
async function getInProgressRelevanceStep(
projectId: string,
environmentId: string,
limit: number,
filterCriteria: TSurveyFilterCriteria | undefined,
cursor: TRelevanceSurveyListCursor | null
): Promise<{ pageRows: TSurveyRow[]; remaining: number; response: TSurveyListPage | null }> {
const inProgressRows = await findRelevanceRows(
projectId,
environmentId,
limit,
filterCriteria,
IN_PROGRESS_BUCKET,
@@ -337,7 +337,7 @@ async function getInProgressRelevanceStep(
}
async function buildInProgressOnlyRelevancePage(
projectId: string,
environmentId: string,
rows: TSurveyRow[],
filterCriteria: TSurveyFilterCriteria | undefined,
cursor: TRelevanceSurveyListCursor | null
@@ -345,13 +345,13 @@ async function buildInProgressOnlyRelevancePage(
const hasOtherRows =
rows.length > 0 &&
shouldReadInProgressBucket(cursor) &&
(await hasMoreRelevanceRowsInOtherBucket(projectId, filterCriteria));
(await hasMoreRelevanceRowsInOtherBucket(environmentId, filterCriteria));
return buildRelevancePage(rows, hasOtherRows ? IN_PROGRESS_BUCKET : null);
}
async function getRelevanceSurveyListPage(
projectId: string,
environmentId: string,
options: TGetSurveyListPageOptions & { sortBy: "relevance" }
): Promise<TSurveyListPage> {
const relevanceCursor = getRelevanceCursor(options.cursor);
@@ -360,7 +360,7 @@ async function getRelevanceSurveyListPage(
if (shouldReadInProgressBucket(relevanceCursor)) {
const inProgressStep = await getInProgressRelevanceStep(
projectId,
environmentId,
remaining,
options.filterCriteria,
relevanceCursor
@@ -376,7 +376,7 @@ async function getRelevanceSurveyListPage(
if (remaining <= 0) {
return await buildInProgressOnlyRelevancePage(
projectId,
environmentId,
pageRows,
options.filterCriteria,
relevanceCursor
@@ -384,7 +384,7 @@ async function getRelevanceSurveyListPage(
}
const otherRows = await findRelevanceRows(
projectId,
environmentId,
remaining,
options.filterCriteria,
OTHER_BUCKET,
@@ -397,18 +397,18 @@ async function getRelevanceSurveyListPage(
}
export async function getSurveyListPage(
projectId: string,
environmentId: string,
options: TGetSurveyListPageOptions
): Promise<TSurveyListPage> {
try {
if (options.sortBy === "relevance") {
return await getRelevanceSurveyListPage(projectId, {
return await getRelevanceSurveyListPage(environmentId, {
...options,
sortBy: "relevance",
});
}
return await getStandardSurveyListPage(projectId, {
return await getStandardSurveyListPage(environmentId, {
...options,
sortBy: options.sortBy,
});

View File

@@ -15,7 +15,6 @@ export const surveySelect = {
status: true,
singleUse: true,
environmentId: true,
projectId: true,
_count: {
select: { responses: true },
},

View File

@@ -7,6 +7,7 @@ import { logger } from "@formbricks/logger";
import { TActionClassType } from "@formbricks/types/action-classes";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { deleteSurveyLifecycleJobs } from "@/lib/river/survey-lifecycle";
import { checkForInvalidMediaInBlocks } from "@/lib/survey/utils";
import { validateInputs } from "@/lib/utils/validate";
import { getIsQuotasEnabled } from "@/modules/ee/license-check/lib/utils";
@@ -25,6 +26,8 @@ import {
} from "./survey";
import { surveySelect } from "./survey-record";
vi.mock("server-only", () => ({}));
vi.mock("react", async (importOriginal) => {
const actual = await importOriginal<typeof import("react")>();
return {
@@ -37,6 +40,10 @@ vi.mock("@/lib/survey/utils", () => ({
checkForInvalidMediaInBlocks: vi.fn(() => ({ ok: true, data: undefined })),
}));
vi.mock("@/lib/river/survey-lifecycle", () => ({
deleteSurveyLifecycleJobs: vi.fn(),
}));
vi.mock("@/lib/utils/validate", () => ({
validateInputs: vi.fn(),
}));
@@ -76,6 +83,7 @@ vi.mock("@/lingodotdev/server", () => ({
vi.mock("@formbricks/database", () => ({
prisma: {
$transaction: vi.fn(),
survey: {
findMany: vi.fn(),
findUnique: vi.fn(),
@@ -126,9 +134,11 @@ const resetMocks = () => {
vi.mocked(prisma.survey.count).mockReset();
vi.mocked(prisma.survey.delete).mockReset();
vi.mocked(prisma.survey.create).mockReset();
vi.mocked(prisma.$transaction).mockReset();
vi.mocked(prisma.segment.delete).mockReset();
vi.mocked(prisma.segment.findFirst).mockReset();
vi.mocked(prisma.actionClass.findMany).mockReset();
vi.mocked(deleteSurveyLifecycleJobs).mockReset();
vi.mocked(logger.error).mockClear();
};
@@ -423,6 +433,7 @@ describe("getSurveysSortedByRelevance", () => {
describe("deleteSurvey", () => {
beforeEach(() => {
resetMocks();
vi.mocked(prisma.$transaction).mockImplementation(async (callback: any) => callback(prisma));
});
const mockDeletedSurveyData = {
@@ -438,6 +449,7 @@ describe("deleteSurvey", () => {
const result = await deleteSurvey(surveyId);
expect(result).toBe(true);
expect(deleteSurveyLifecycleJobs).toHaveBeenCalledWith({ tx: prisma, surveyId });
expect(prisma.survey.delete).toHaveBeenCalledWith({
where: { id: surveyId },
select: expect.objectContaining({ id: true, environmentId: true, segment: expect.anything() }),
@@ -454,19 +466,20 @@ describe("deleteSurvey", () => {
await deleteSurvey(surveyId);
expect(deleteSurveyLifecycleJobs).toHaveBeenCalledWith({ tx: prisma, surveyId });
expect(prisma.segment.delete).not.toHaveBeenCalled();
});
test("should throw DatabaseError on Prisma error", async () => {
const prismaError = makePrismaKnownError();
vi.mocked(prisma.survey.delete).mockRejectedValue(prismaError);
vi.mocked(prisma.$transaction).mockRejectedValue(prismaError);
await expect(deleteSurvey(surveyId)).rejects.toThrow(DatabaseError);
expect(logger.error).toHaveBeenCalledWith(prismaError, "Error deleting survey");
});
test("should rethrow unknown error", async () => {
const unknownError = new Error("Unknown error");
vi.mocked(prisma.survey.delete).mockRejectedValue(unknownError);
vi.mocked(prisma.$transaction).mockRejectedValue(unknownError);
await expect(deleteSurvey(surveyId)).rejects.toThrow(unknownError);
});
});

View File

@@ -8,6 +8,7 @@ import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurveyFilterCriteria } from "@formbricks/types/surveys/types";
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { deleteSurveyLifecycleJobs } from "@/lib/river/survey-lifecycle";
import { checkForInvalidMediaInBlocks } from "@/lib/survey/utils";
import { validateInputs } from "@/lib/utils/validate";
import { getTranslate } from "@/lingodotdev/server";
@@ -21,7 +22,7 @@ import { mapSurveyRowToSurvey, mapSurveyRowsToSurveys, surveySelect } from "./su
export const getSurveys = reactCache(
async (
projectId: string,
environmentId: string,
limit?: number,
offset?: number,
filterCriteria?: TSurveyFilterCriteria
@@ -29,13 +30,13 @@ export const getSurveys = reactCache(
try {
if (filterCriteria?.sortBy === "relevance") {
// Call the sortByRelevance function
return await getSurveysSortedByRelevance(projectId, limit, offset ?? 0, filterCriteria);
return await getSurveysSortedByRelevance(environmentId, limit, offset ?? 0, filterCriteria);
}
// Fetch surveys normally with pagination and include response count
const surveysPrisma = await prisma.survey.findMany({
where: {
projectId,
environmentId,
...buildWhereClause(filterCriteria),
},
select: surveySelect,
@@ -57,7 +58,7 @@ export const getSurveys = reactCache(
export const getSurveysSortedByRelevance = reactCache(
async (
projectId: string,
environmentId: string,
limit?: number,
offset?: number,
filterCriteria?: TSurveyFilterCriteria
@@ -67,7 +68,7 @@ export const getSurveysSortedByRelevance = reactCache(
const inProgressSurveyCount = await prisma.survey.count({
where: {
projectId,
environmentId,
status: "inProgress",
...buildWhereClause(filterCriteria),
},
@@ -79,7 +80,7 @@ export const getSurveysSortedByRelevance = reactCache(
? []
: await prisma.survey.findMany({
where: {
projectId,
environmentId,
status: "inProgress",
...buildWhereClause(filterCriteria),
},
@@ -97,7 +98,7 @@ export const getSurveysSortedByRelevance = reactCache(
const newOffset = Math.max(0, offset - inProgressSurveyCount);
const additionalSurveys = await prisma.survey.findMany({
where: {
projectId,
environmentId,
status: { not: "inProgress" },
...buildWhereClause(filterCriteria),
},
@@ -147,39 +148,45 @@ export const getSurvey = reactCache(async (surveyId: string): Promise<TSurvey |
export const deleteSurvey = async (surveyId: string): Promise<boolean> => {
try {
const deletedSurvey = await prisma.survey.delete({
where: {
id: surveyId,
},
select: {
id: true,
environmentId: true,
segment: {
select: {
id: true,
isPrivate: true,
},
const deletedSurvey = await prisma.$transaction(async (tx) => {
await deleteSurveyLifecycleJobs({ tx, surveyId });
const removedSurvey = await tx.survey.delete({
where: {
id: surveyId,
},
type: true,
triggers: {
select: {
actionClass: {
select: {
id: true,
select: {
id: true,
environmentId: true,
segment: {
select: {
id: true,
isPrivate: true,
},
},
type: true,
triggers: {
select: {
actionClass: {
select: {
id: true,
},
},
},
},
},
},
});
if (deletedSurvey.type === "app" && deletedSurvey.segment?.isPrivate) {
await prisma.segment.delete({
where: {
id: deletedSurvey.segment.id,
},
});
}
if (removedSurvey.type === "app" && removedSurvey.segment?.isPrivate) {
await tx.segment.delete({
where: {
id: removedSurvey.segment.id,
},
});
}
return removedSurvey;
});
return true;
} catch (error) {
@@ -288,10 +295,10 @@ export const copySurveyToOtherEnvironment = async (
if (!targetProject) throw new ResourceNotFoundError("Project", targetEnvironmentId);
}
// Fetch existing action classes in target project for name conflict checks
// Fetch existing action classes in target environment for name conflict checks
const existingActionClasses = !isSameEnvironment
? await prisma.actionClass.findMany({
where: { projectId: targetProject.id },
where: { environmentId: targetEnvironmentId },
select: { name: true, type: true, key: true, noCodeConfig: true, id: true },
})
: [];
@@ -380,7 +387,6 @@ export const copySurveyToOtherEnvironment = async (
const baseActionClassData = {
name: modifiedName,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
description: trigger.actionClass.description,
type: trigger.actionClass.type,
};
@@ -445,11 +451,6 @@ export const copySurveyToOtherEnvironment = async (
id: targetEnvironmentId,
},
},
project: {
connect: {
id: targetProject.id,
},
},
creator: {
connect: {
id: userId,
@@ -499,7 +500,6 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: true,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
} else if (isSameEnvironment) {
@@ -509,7 +509,7 @@ export const copySurveyToOtherEnvironment = async (
where: {
title: existingSurvey.segment.title,
isPrivate: false,
projectId: targetProject.id,
environmentId: targetEnvironmentId,
},
});
@@ -521,7 +521,6 @@ export const copySurveyToOtherEnvironment = async (
isPrivate: false,
filters: existingSurvey.segment.filters,
environment: { connect: { id: targetEnvironmentId } },
project: { connect: { id: targetProject.id } },
},
};
}
@@ -577,14 +576,14 @@ export const copySurveyToOtherEnvironment = async (
}
};
/** Count surveys in a project, optionally with the same filter as getSurveys (so total matches list). */
/** Count surveys in an environment, optionally with the same filter as getSurveys (so total matches list). */
export const getSurveyCount = reactCache(
async (projectId: string, filterCriteria?: TSurveyFilterCriteria): Promise<number> => {
validateInputs([projectId, z.cuid2()]);
async (environmentId: string, filterCriteria?: TSurveyFilterCriteria): Promise<number> => {
validateInputs([environmentId, z.cuid2()]);
try {
const surveyCount = await prisma.survey.count({
where: {
projectId,
environmentId,
...buildWhereClause(filterCriteria),
},
});

View File

@@ -6,7 +6,6 @@ import { ResourceNotFoundError } from "@formbricks/types/errors";
import { DEFAULT_LOCALE, SURVEYS_PER_PAGE } from "@/lib/constants";
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getUserLocale } from "@/lib/user/service";
import { getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
import { getTranslate } from "@/lingodotdev/server";
import { getEnvironmentAuth } from "@/modules/environments/lib/utils";
import { getProjectWithTeamIdsByEnvironmentId } from "@/modules/survey/lib/project";
@@ -44,8 +43,7 @@ export const SurveysPage = async ({ params: paramsProps }: SurveyTemplateProps)
return redirect(`/environments/${params.environmentId}/settings/billing`);
}
const projectId = await getProjectIdFromEnvironmentId(params.environmentId);
const surveyCount = await getSurveyCount(projectId);
const surveyCount = await getSurveyCount(params.environmentId);
const currentProjectChannel = project.config.channel ?? null;
const locale = (await getUserLocale(session.user.id)) ?? DEFAULT_LOCALE;

View File

@@ -9,7 +9,6 @@ export const getMinimalSurvey = (t: TFunction): TSurvey => ({
name: "Minimal Survey",
type: "app",
environmentId: "someEnvId1",
projectId: null,
createdBy: null,
status: "draft",
displayOption: "displayOnce",

View File

@@ -42,14 +42,14 @@ export interface ButtonProps
}
const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
({ className, variant, size, loading, asChild = false, disabled, children, ...props }, ref) => {
({ className, variant, size, loading, asChild = false, children, ...props }, ref) => {
const Comp = asChild ? Slot : "button";
return (
<Comp
className={cn(buttonVariants({ variant, size, loading, className }))}
disabled={loading}
ref={ref}
{...props}
disabled={loading || disabled}>
{...props}>
{loading ? (
<>
<Loader2 className="animate-spin" />

View File

@@ -1,6 +1,6 @@
# formbricks
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.7.0](https://img.shields.io/badge/AppVersion-3.7.0-informational?style=flat-square)
![Version: 0.0.0-dev](https://img.shields.io/badge/Version-0.0.0--dev-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
A Helm chart for Formbricks with PostgreSQL, Redis
@@ -8,178 +8,150 @@ A Helm chart for Formbricks with PostgreSQL, Redis
## Maintainers
| Name | Email | Url |
| ---- | ------ | --- |
| Formbricks | <info@formbricks.com> | |
| Name | Email | Url |
| ---------- | --------------------- | --- |
| Formbricks | <info@formbricks.com> | |
## Requirements
| Repository | Name | Version |
|------------|------|---------|
| Repository | Name | Version |
| ---------------------------------------- | ---------- | ------- |
| oci://registry-1.docker.io/bitnamicharts | postgresql | 16.4.16 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
| oci://registry-1.docker.io/bitnamicharts | redis | 20.11.2 |
## Values
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.behavior.scaleDown.policies[0].periodSeconds | int | `120` | |
| autoscaling.behavior.scaleDown.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleDown.policies[0].value | int | `1` | |
| autoscaling.behavior.scaleDown.stabilizationWindowSeconds | int | `300` | |
| autoscaling.behavior.scaleUp.policies[0].periodSeconds | int | `60` | |
| autoscaling.behavior.scaleUp.policies[0].type | string | `"Pods"` | |
| autoscaling.behavior.scaleUp.policies[0].value | int | `2` | |
| autoscaling.behavior.scaleUp.stabilizationWindowSeconds | int | `60` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env | object | `{}` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.image.tag | string | `""` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| formbricks.publicUrl | string | `""` | |
| formbricks.webappUrl | string | `""` | |
| hub.enabled | bool | `true` | |
| hub.env | object | `{}` | |
| hub.existingSecret | string | `""` | |
| hub.image.pullPolicy | string | `"IfNotPresent"` | |
| hub.image.repository | string | `"ghcr.io/formbricks/hub"` | |
| hub.image.tag | string | `"1.0.0"` | |
| hub.migration.activeDeadlineSeconds | int | `900` | |
| hub.migration.backoffLimit | int | `3` | |
| hub.migration.ttlSecondsAfterFinished | int | `300` | |
| hub.replicas | int | `1` | |
| hub.resources.limits.memory | string | `"512Mi"` | |
| hub.resources.requests.cpu | string | `"100m"` | |
| hub.resources.requests.memory | string | `"256Mi"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| migration.annotations | object | `{}` | |
| migration.backoffLimit | int | `3` | |
| migration.enabled | bool | `true` | |
| migration.resources.limits.memory | string | `"512Mi"` | |
| migration.resources.requests.cpu | string | `"100m"` | |
| migration.resources.requests.memory | string | `"256Mi"` | |
| migration.ttlSecondsAfterFinished | int | `300` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| pdb.additionalLabels | object | `{}` | |
| pdb.annotations | object | `{}` | |
| pdb.enabled | bool | `true` | |
| pdb.minAvailable | int | `1` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |
| Key | Type | Default | Description |
| ------------------------------------------------------------------ | ------ | --------------------------------- | ----------- |
| autoscaling.additionalLabels | object | `{}` | |
| autoscaling.annotations | object | `{}` | |
| autoscaling.enabled | bool | `true` | |
| autoscaling.maxReplicas | int | `10` | |
| autoscaling.metrics[0].resource.name | string | `"cpu"` | |
| autoscaling.metrics[0].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[0].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[0].type | string | `"Resource"` | |
| autoscaling.metrics[1].resource.name | string | `"memory"` | |
| autoscaling.metrics[1].resource.target.averageUtilization | int | `60` | |
| autoscaling.metrics[1].resource.target.type | string | `"Utilization"` | |
| autoscaling.metrics[1].type | string | `"Resource"` | |
| autoscaling.minReplicas | int | `1` | |
| componentOverride | string | `""` | |
| cronJob.enabled | bool | `false` | |
| cronJob.jobs | object | `{}` | |
| deployment.additionalLabels | object | `{}` | |
| deployment.additionalPodAnnotations | object | `{}` | |
| deployment.additionalPodLabels | object | `{}` | |
| deployment.affinity | object | `{}` | |
| deployment.annotations | object | `{}` | |
| deployment.args | list | `[]` | |
| deployment.command | list | `[]` | |
| deployment.containerSecurityContext.readOnlyRootFilesystem | bool | `true` | |
| deployment.containerSecurityContext.runAsNonRoot | bool | `true` | |
| deployment.env.EMAIL_VERIFICATION_DISABLED.value | string | `"1"` | |
| deployment.env.PASSWORD_RESET_DISABLED.value | string | `"1"` | |
| deployment.envFrom | string | `nil` | |
| deployment.image.digest | string | `""` | |
| deployment.image.pullPolicy | string | `"IfNotPresent"` | |
| deployment.image.repository | string | `"ghcr.io/formbricks/formbricks"` | |
| deployment.imagePullSecrets | string | `""` | |
| deployment.nodeSelector | object | `{}` | |
| deployment.ports.http.containerPort | int | `3000` | |
| deployment.ports.http.exposed | bool | `true` | |
| deployment.ports.http.protocol | string | `"TCP"` | |
| deployment.ports.metrics.containerPort | int | `9464` | |
| deployment.ports.metrics.exposed | bool | `true` | |
| deployment.ports.metrics.protocol | string | `"TCP"` | |
| deployment.probes.livenessProbe.failureThreshold | int | `5` | |
| deployment.probes.livenessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.livenessProbe.httpGet.port | int | `3000` | |
| deployment.probes.livenessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.livenessProbe.periodSeconds | int | `10` | |
| deployment.probes.livenessProbe.successThreshold | int | `1` | |
| deployment.probes.livenessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.readinessProbe.failureThreshold | int | `5` | |
| deployment.probes.readinessProbe.httpGet.path | string | `"/health"` | |
| deployment.probes.readinessProbe.httpGet.port | int | `3000` | |
| deployment.probes.readinessProbe.initialDelaySeconds | int | `10` | |
| deployment.probes.readinessProbe.periodSeconds | int | `10` | |
| deployment.probes.readinessProbe.successThreshold | int | `1` | |
| deployment.probes.readinessProbe.timeoutSeconds | int | `5` | |
| deployment.probes.startupProbe.failureThreshold | int | `30` | |
| deployment.probes.startupProbe.periodSeconds | int | `10` | |
| deployment.probes.startupProbe.tcpSocket.port | int | `3000` | |
| deployment.reloadOnChange | bool | `false` | |
| deployment.replicas | int | `1` | |
| deployment.resources.limits.memory | string | `"2Gi"` | |
| deployment.resources.requests.cpu | string | `"1"` | |
| deployment.resources.requests.memory | string | `"1Gi"` | |
| deployment.revisionHistoryLimit | int | `2` | |
| deployment.securityContext | object | `{}` | |
| deployment.strategy.type | string | `"RollingUpdate"` | |
| deployment.tolerations | list | `[]` | |
| deployment.topologySpreadConstraints | list | `[]` | |
| enterprise.enabled | bool | `false` | |
| enterprise.licenseKey | string | `""` | |
| externalSecret.enabled | bool | `false` | |
| externalSecret.files | object | `{}` | |
| externalSecret.refreshInterval | string | `"1h"` | |
| externalSecret.secretStore.kind | string | `"ClusterSecretStore"` | |
| externalSecret.secretStore.name | string | `"aws-secrets-manager"` | |
| ingress.annotations | object | `{}` | |
| ingress.enabled | bool | `false` | |
| ingress.hosts[0].host | string | `"k8s.formbricks.com"` | |
| ingress.hosts[0].paths[0].path | string | `"/"` | |
| ingress.hosts[0].paths[0].pathType | string | `"Prefix"` | |
| ingress.hosts[0].paths[0].serviceName | string | `"formbricks"` | |
| ingress.ingressClassName | string | `"alb"` | |
| nameOverride | string | `""` | |
| partOfOverride | string | `""` | |
| postgresql.auth.database | string | `"formbricks"` | |
| postgresql.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| postgresql.auth.secretKeys.adminPasswordKey | string | `"POSTGRES_ADMIN_PASSWORD"` | |
| postgresql.auth.secretKeys.userPasswordKey | string | `"POSTGRES_USER_PASSWORD"` | |
| postgresql.auth.username | string | `"formbricks"` | |
| postgresql.enabled | bool | `true` | |
| postgresql.externalDatabaseUrl | string | `""` | |
| postgresql.fullnameOverride | string | `"formbricks-postgresql"` | |
| postgresql.global.security.allowInsecureImages | bool | `true` | |
| postgresql.image.repository | string | `"pgvector/pgvector"` | |
| postgresql.image.tag | string | `"0.8.0-pg17"` | |
| postgresql.primary.containerSecurityContext.enabled | bool | `true` | |
| postgresql.primary.containerSecurityContext.readOnlyRootFilesystem | bool | `false` | |
| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | |
| postgresql.primary.networkPolicy.enabled | bool | `false` | |
| postgresql.primary.persistence.enabled | bool | `true` | |
| postgresql.primary.persistence.size | string | `"10Gi"` | |
| postgresql.primary.podSecurityContext.enabled | bool | `true` | |
| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | |
| postgresql.primary.podSecurityContext.runAsUser | int | `1001` | |
| rbac.enabled | bool | `false` | |
| rbac.serviceAccount.additionalLabels | object | `{}` | |
| rbac.serviceAccount.annotations | object | `{}` | |
| rbac.serviceAccount.enabled | bool | `false` | |
| rbac.serviceAccount.name | string | `""` | |
| redis.architecture | string | `"standalone"` | |
| redis.auth.enabled | bool | `true` | |
| redis.auth.existingSecret | string | `"formbricks-app-secrets"` | |
| redis.auth.existingSecretPasswordKey | string | `"REDIS_PASSWORD"` | |
| redis.enabled | bool | `true` | |
| redis.externalRedisUrl | string | `""` | |
| redis.fullnameOverride | string | `"formbricks-redis"` | |
| redis.master.persistence.enabled | bool | `true` | |
| redis.networkPolicy.enabled | bool | `false` | |
| secret.enabled | bool | `true` | |
| service.additionalLabels | object | `{}` | |
| service.annotations | object | `{}` | |
| service.enabled | bool | `true` | |
| service.ports | list | `[]` | |
| service.type | string | `"ClusterIP"` | |
| serviceMonitor.additionalLabels | string | `nil` | |
| serviceMonitor.annotations | string | `nil` | |
| serviceMonitor.enabled | bool | `true` | |
| serviceMonitor.endpoints[0].interval | string | `"5s"` | |
| serviceMonitor.endpoints[0].path | string | `"/metrics"` | |
| serviceMonitor.endpoints[0].port | string | `"metrics"` | |
---
Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2)

View File

@@ -8,15 +8,6 @@ It also truncates the name to a maximum of 63 characters and removes trailing hy
{{- end }}
{{/*
Hub resource name: base name truncated to 59 chars then "-hub" so the suffix is never lost (63 char limit).
*/}}
{{- define "formbricks.hubname" -}}
{{- $base := include "formbricks.name" . | trunc 59 | trimSuffix "-" }}
{{- printf "%s-hub" $base | trimSuffix "-" }}
{{- end }}
{{/*
Define the application version to be used in labels.
The version is taken from `.Values.deployment.image.tag` if provided, otherwise it defaults to `.Chart.Version`.
@@ -94,17 +85,9 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- default .Release.Namespace .Values.namespaceOverride -}}
{{- end -}}
{{- define "formbricks.appSecretName" -}}
{{- printf "%s-app-secrets" (include "formbricks.name" .) -}}
{{- end }}
{{- define "formbricks.hubSecretName" -}}
{{- default (include "formbricks.appSecretName" .) .Values.hub.existingSecret -}}
{{- end }}
{{- define "formbricks.postgresAdminPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if and $secret (index $secret.data "POSTGRES_ADMIN_PASSWORD") }}
{{- index $secret.data "POSTGRES_ADMIN_PASSWORD" | b64dec -}}
{{- else }}
@@ -113,7 +96,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.postgresUserPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if and $secret (index $secret.data "POSTGRES_USER_PASSWORD") }}
{{- index $secret.data "POSTGRES_USER_PASSWORD" | b64dec -}}
{{- else }}
@@ -122,7 +105,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.redisPassword" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if and $secret (index $secret.data "REDIS_PASSWORD") }}
{{- index $secret.data "REDIS_PASSWORD" | b64dec -}}
{{- else }}
@@ -131,7 +114,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.cronSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if $secret }}
{{- index $secret.data "CRON_SECRET" | b64dec -}}
{{- else }}
@@ -140,7 +123,7 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.encryptionKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if $secret }}
{{- index $secret.data "ENCRYPTION_KEY" | b64dec -}}
{{- else }}
@@ -149,19 +132,10 @@ If `namespaceOverride` is provided, it will be used; otherwise, it defaults to `
{{- end }}
{{- define "formbricks.nextAuthSecret" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (printf "%s-app-secrets" (include "formbricks.name" .))) }}
{{- if $secret }}
{{- index $secret.data "NEXTAUTH_SECRET" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}
{{- define "formbricks.hubApiKey" -}}
{{- $secret := (lookup "v1" "Secret" .Release.Namespace (include "formbricks.appSecretName" .)) }}
{{- if and $secret (index $secret.data "HUB_API_KEY") }}
{{- index $secret.data "HUB_API_KEY" | b64dec -}}
{{- else }}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end }}

View File

@@ -131,10 +131,6 @@ spec:
- name: SKIP_STARTUP_MIGRATION
value: "true"
{{- end }}
{{- if not (hasKey .Values.deployment.env "HUB_API_URL") }}
- name: HUB_API_URL
value: "http://{{ include "formbricks.hubname" . }}:8080"
{{- end }}
{{- range $key, $value := .Values.deployment.env }}
- name: {{ include "formbricks.tplvalues.render" ( dict "value" $key "context" $ ) }}
{{- if kindIs "string" $value }}

View File

@@ -1,100 +0,0 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
replicas: {{ .Values.hub.replicas | default 1 }}
selector:
matchLabels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
spec:
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
initContainers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
containers:
- name: hub
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
runAsNonRoot: true
ports:
- name: http
containerPort: 8080
protocol: TCP
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}
env:
- name: API_KEY
valueFrom:
secretKeyRef:
name: {{ include "formbricks.hubSecretName" . }}
key: HUB_API_KEY
{{- range $key, $value := .Values.hub.env }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- if .Values.hub.resources }}
resources:
{{- toYaml .Values.hub.resources | nindent 12 }}
{{- end }}
readinessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
livenessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
periodSeconds: 10
failureThreshold: 5
timeoutSeconds: 5
successThreshold: 1
startupProbe:
httpGet:
path: /health
port: 8080
failureThreshold: 30
periodSeconds: 10

View File

@@ -1,54 +0,0 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: batch/v1
kind: Job
metadata:
name: {{ include "formbricks.hubname" . }}-migration
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
app.kubernetes.io/managed-by: {{ .Release.Service }}
annotations:
helm.sh/hook: pre-upgrade
helm.sh/hook-weight: "-5"
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
spec:
ttlSecondsAfterFinished: {{ .Values.hub.migration.ttlSecondsAfterFinished | default 300 }}
backoffLimit: {{ .Values.hub.migration.backoffLimit | default 3 }}
activeDeadlineSeconds: {{ .Values.hub.migration.activeDeadlineSeconds | default 900 }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub-migration
spec:
restartPolicy: Never
securityContext:
runAsNonRoot: true
runAsUser: 1000
{{- if .Values.deployment.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.deployment.imagePullSecrets | nindent 8 }}
{{- end }}
containers:
- name: hub-migrate
image: {{ .Values.hub.image.repository }}:{{ .Values.hub.image.tag | default "latest" }}
imagePullPolicy: {{ .Values.hub.image.pullPolicy }}
securityContext:
readOnlyRootFilesystem: true
capabilities:
drop: ["ALL"]
command:
- sh
- -c
- |
/usr/local/bin/goose -dir /app/migrations postgres "$DATABASE_URL" up && \
/usr/local/bin/river migrate-up --database-url "$DATABASE_URL"
envFrom:
- secretRef:
name: {{ include "formbricks.hubSecretName" . }}

View File

@@ -1,25 +0,0 @@
{{- if not .Values.hub.enabled }}
{{- fail "hub.enabled=false is not supported in Formbricks 5; Hub is mandatory." }}
{{- end }}
---
apiVersion: v1
kind: Service
metadata:
name: {{ include "formbricks.hubname" . }}
labels:
helm.sh/chart: {{ include "formbricks.chart" . }}
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/component: hub
app.kubernetes.io/managed-by: {{ .Release.Service }}
app.kubernetes.io/part-of: {{ .Values.partOfOverride | default (include "formbricks.name" .) }}
spec:
type: ClusterIP
selector:
app.kubernetes.io/name: {{ include "formbricks.hubname" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
ports:
- name: http
port: 8080
targetPort: 8080
protocol: TCP

View File

@@ -4,13 +4,11 @@
{{- $postgresUserPassword := include "formbricks.postgresUserPassword" . }}
{{- $redisPassword := include "formbricks.redisPassword" . }}
{{- $webappUrl := required "formbricks.webappUrl is required. Set it to your Formbricks instance URL (e.g., https://formbricks.example.com)" .Values.formbricks.webappUrl }}
{{- $hubApiKey := include "formbricks.hubApiKey" . }}
{{- $includeHubApiKeyInAppSecret := or (not .Values.hub.existingSecret) (eq .Values.hub.existingSecret (include "formbricks.appSecretName" .)) }}
---
apiVersion: v1
kind: Secret
metadata:
name: {{ include "formbricks.appSecretName" . }}
name: {{ template "formbricks.name" . }}-app-secrets
labels:
{{- include "formbricks.labels" . | nindent 4 }}
data:
@@ -30,9 +28,6 @@ data:
{{- else }}
DATABASE_URL: {{ .Values.postgresql.externalDatabaseUrl | b64enc }}
{{- end }}
{{- if $includeHubApiKeyInAppSecret }}
HUB_API_KEY: {{ $hubApiKey | b64enc }}
{{- end }}
CRON_SECRET: {{ include "formbricks.cronSecret" . | b64enc }}
ENCRYPTION_KEY: {{ include "formbricks.encryptionKey" . | b64enc }}
NEXTAUTH_SECRET: {{ include "formbricks.nextAuthSecret" . | b64enc }}

View File

@@ -340,43 +340,6 @@ serviceMonitor:
path: /metrics
port: metrics
##########################################################
# Hub API Configuration
# Formbricks Hub image: ghcr.io/formbricks/hub
##########################################################
hub:
# Hub is mandatory in Formbricks 5. Keep this enabled.
enabled: true
replicas: 1
image:
repository: "ghcr.io/formbricks/hub"
# Pin to a semver tag for reproducible deployments; update on each Hub release.
tag: "1.0.0"
pullPolicy: IfNotPresent
# Optional override for the secret Hub reads from.
# Defaults to the generated app secret (<release>-app-secrets), which contains DATABASE_URL and HUB_API_KEY.
# If you set this, the custom secret must provide DATABASE_URL and HUB_API_KEY.
existingSecret: ""
# Optional env vars (non-secret). Use existingSecret for secret values such as DATABASE_URL and HUB_API_KEY.
env: {}
# Upgrade migration job runs goose + river before Helm upgrades Hub resources.
# Fresh installs run the same migrations through the Hub deployment init container.
migration:
ttlSecondsAfterFinished: 300
backoffLimit: 3
activeDeadlineSeconds: 900
resources:
limits:
memory: 512Mi
requests:
memory: 256Mi
cpu: "100m"
##########################################################
# PostgreSQL Configuration
##########################################################
@@ -389,7 +352,7 @@ postgresql:
fullnameOverride: "formbricks-postgresql"
image:
repository: pgvector/pgvector
tag: pg17
tag: 0.8.0-pg17
auth:
username: formbricks
database: formbricks

View File

@@ -1,24 +1,14 @@
services:
# PostgreSQL must load the vector library so Hub (and Formbricks) can use the pgvector extension.
postgres:
image: pgvector/pgvector:pg18
image: pgvector/pgvector:pg17
volumes:
- postgres:/var/lib/postgresql
- postgres:/var/lib/postgresql/data
environment:
- POSTGRES_DB=postgres
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
ports:
- 5432:5432
command: >
postgres
-c shared_preload_libraries=vector
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres || exit 1"]
interval: 5s
timeout: 3s
retries: 30
start_period: 10s
mailhog:
image: arjenz/mailhog
@@ -46,40 +36,6 @@ services:
volumes:
- minio-data:/data
# Run Hub DB migrations (goose + river) before the API starts. Idempotent; runs on every compose up.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command:
[
'if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres "$$DATABASE_URL" up && /usr/local/bin/river migrate-up --database-url "$$DATABASE_URL"; else echo ''Migration tools (goose/river) not in image.''; exit 1; fi',
]
environment:
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Shares the same Postgres database as Formbricks by default.
hub:
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
ports:
- "8080:8080"
environment:
API_KEY: ${HUB_API_KEY:-dev-api-key}
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/postgres?sslmode=disable
# Explicit Postgres env so migrations and any libpq fallback use the service host, not localhost
PGHOST: postgres
PGPORT: "5432"
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
PGSSLMODE: disable
volumes:
postgres:
driver: local

View File

@@ -27,13 +27,3 @@ The script will prompt you for the following information:
3. **Domain Name**: Enter the domain name that Traefik will use to create the SSL certificate and forward requests to Formbricks.
That's it! After running the command and providing the required information, visit the domain name you entered, and you should see the Formbricks home wizard!
## Formbricks Hub
The stack includes the [Formbricks Hub](https://github.com/formbricks/hub) API (`ghcr.io/formbricks/hub`). Hub shares the same database as Formbricks by default.
- **Migrations**: A `hub-migrate` service runs Hub's database migrations (goose + river) before the Hub API starts. It runs on every `docker compose up` and is idempotent.
- **Production** (`docker/docker-compose.yml`): Set `HUB_API_KEY` (required). `HUB_API_URL` defaults to `http://hub:8080` so the Formbricks app can reach Hub inside the compose network. Override `HUB_DATABASE_URL` only if you want Hub to use a separate database.
- **Development** (`docker-compose.dev.yml`): Hub uses the same Postgres database; `HUB_API_KEY` defaults to `dev-api-key` (override with `HUB_API_KEY`) and the local Hub URL is `http://localhost:8080`.
In development, Hub is exposed locally on port **8080**. In production Docker Compose, Hub stays internal to the compose network and is reached via `http://hub:8080`.

View File

@@ -29,15 +29,6 @@ x-environment: &environment
# To use external Redis/Valkey: remove the redis service below and update this URL
REDIS_URL: redis://redis:6379
# Formbricks Hub (port 8080): API key required. Use e.g. openssl rand -hex 32
HUB_API_KEY:
# Base URL the Formbricks app uses to reach Hub. Defaults to the internal Hub service.
HUB_API_URL: ${HUB_API_URL:-http://hub:8080}
# Hub database URL (optional). Default: same Postgres as Formbricks. Set only if Hub uses a separate DB.
# HUB_DATABASE_URL:
# Set the minimum log level(debug, info, warn, error, fatal)
# LOG_LEVEL: info
@@ -211,7 +202,7 @@ x-environment: &environment
services:
postgres:
restart: always
image: pgvector/pgvector:pg18
image: pgvector/pgvector:pg17
volumes:
- postgres:/var/lib/postgresql/data
environment:
@@ -254,31 +245,6 @@ services:
- ./saml-connection:/home/nextjs/apps/web/saml-connection
<<: *environment
# Run Hub DB migrations (goose + river) before the API starts. Uses same image; migrations are idempotent.
hub-migrate:
image: ghcr.io/formbricks/hub:latest
restart: "no"
entrypoint: ["sh", "-c"]
command: ["if [ -x /usr/local/bin/goose ] && [ -x /usr/local/bin/river ]; then /usr/local/bin/goose -dir /app/migrations postgres \"$$DATABASE_URL\" up && /usr/local/bin/river migrate-up --database-url \"$$DATABASE_URL\"; else echo 'Migration tools (goose/river) not in image.'; exit 1; fi"]
environment:
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
depends_on:
postgres:
condition: service_healthy
# Formbricks Hub API (ghcr.io/formbricks/hub). Set HUB_API_KEY. By default shares the Formbricks database; set HUB_DATABASE_URL to use a separate DB.
hub:
restart: always
image: ghcr.io/formbricks/hub:latest
depends_on:
hub-migrate:
condition: service_completed_successfully
postgres:
condition: service_healthy
environment:
API_KEY: ${HUB_API_KEY:?HUB_API_KEY is required to run Hub}
DATABASE_URL: ${HUB_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/formbricks?sslmode=disable}
volumes:
postgres:
driver: local

View File

@@ -1,275 +0,0 @@
# Plan: Deprecate Environments in Formbricks
**Issue**: https://github.com/formbricks/internal/issues/1501
## Context
Formbricks currently has a 4-level hierarchy: **Organization → Project → Environment (prod/dev) → Resources**. The "Environment" layer adds complexity with minimal value — the only real difference between prod and dev is separate API keys and a UI badge. The UI already calls "Project" a "Workspace".
**Goal**: Collapse the Environment layer so resources live directly under Project. The production environment merges into the workspace identity. Dev environments with data become separate new workspaces.
**Key decisions**:
- DB model stays as `Project` (no table rename)
- SDK will accept `workspaceId` as new param, `environmentId` as deprecated alias
- Dev environments with data get promoted to separate workspaces
---
## Current State
```
Organization
└── Project ("Workspace" in UI)
├── Environment (production) ──→ surveys, contacts, webhooks, tags, ...
└── Environment (development) ──→ surveys, contacts, webhooks, tags, ...
```
Every project always has exactly 2 environments. The only differences between them:
- Separate data (contacts, responses, attributes, integrations, webhooks, segments, etc.)
- Separate API keys (`ApiKeyEnvironment` grants per-environment permissions)
- A red warning banner in the dev UI, plus an environment switcher breadcrumb
Key metrics:
- **564 files** in `apps/web` reference `environmentId`
- **52 files** in `packages` reference `environmentId`
- **68+ route directories** under `/environments/[environmentId]/`
- **22 API endpoint directories** keyed by `[environmentId]`
- **8 resource tables** FK to Environment: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- **SDK** requires `environmentId` to initialize, all client APIs use `/api/v1/client/[environmentId]/...`
- **Storage** paths: `private/${environmentId}/${fileName}`
---
## Phase 1: Add `projectId` Column to All Environment-Owned Models (PR 1 — Small, Low Risk)
Add an **optional** `projectId` column alongside the existing `environmentId` on every model that currently only references Environment.
**Why**: Today, Survey has `environmentId` pointing to Environment, and you have to join through Environment to reach Project. We need Survey to point directly to Project. But we can't just switch the FK in one shot — that would break everything. So we add a new nullable `projectId` column alongside the existing `environmentId`. No code changes, no runtime impact. Just schema preparation.
**Modify**: `packages/database/schema.prisma`
- Add `projectId String?` + FK to Project + index to: `Survey`, `Contact`, `ActionClass`, `ContactAttributeKey`, `Webhook`, `Tag`, `Segment`, `Integration`
- Add reverse relations on the `Project` model
- New Prisma migration file
No code changes. No runtime behavior change. All new columns are NULL.
---
## Phase 2: Backfill `projectId` (PR 2 — Small, Medium Risk)
Data migration to populate `projectId` on every existing row.
**Why**: The new `projectId` columns are all NULL. We need to populate them by joining through the Environment table: `Survey.environmentId → Environment.id → Environment.projectId`. After this, every row has both `environmentId` (old) and `projectId` (new) filled in, but the app still only reads `environmentId`.
```sql
UPDATE "Survey" s SET "projectId" = e."projectId"
FROM "Environment" e WHERE s."environmentId" = e."id" AND s."projectId" IS NULL;
-- Repeat for all 8 tables
```
**Create**: Migration script (idempotent — only updates rows where `projectId IS NULL`)
App behavior unchanged. New columns now populated but not yet read.
---
## Phase 3: Dual-Write (PR 3 — Large, Medium Risk)
All create/update operations write both `environmentId` AND `projectId`.
**Why**: New rows created after the backfill would still have `projectId = NULL` because the app code doesn't know about the new column yet. We update every `prisma.survey.create(...)`, `prisma.contact.create(...)`, etc. to write both `environmentId` and `projectId`. Now every new row gets both values. Old code still reads `environmentId` — nothing breaks.
**Key files to modify**:
- `apps/web/lib/survey/service.ts``createSurvey`
- `apps/web/lib/environment/service.ts``createEnvironment` (creates default ContactAttributeKeys)
- `apps/web/modules/projects/settings/lib/project.ts``createProject`
- `apps/web/modules/survey/list/lib/survey.ts``copySurveyToOtherEnvironment`
- `apps/web/modules/survey/components/template-list/lib/survey.ts``createSurvey`
- `apps/web/lib/actionClass/service.ts``createActionClass`
- `apps/web/modules/survey/editor/lib/action-class.ts``createActionClass`
- `apps/web/modules/ee/contacts/lib/contacts.ts``processCsvRecord`, `createMissingAttributeKeys`
- `apps/web/modules/ee/contacts/api/v2/management/contacts/lib/contact.ts``createContact`
- `apps/web/app/api/v1/client/[environmentId]/displays/lib/display.ts``createDisplay` (creates contacts)
- `apps/web/modules/ee/contacts/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/api/v2/management/contact-attribute-keys/lib/contact-attribute-key.ts``createContactAttributeKey`
- `apps/web/modules/ee/contacts/api/v1/management/contact-attribute-keys/lib/contact-attribute-keys.ts``createContactAttributeKey`
- `apps/web/modules/integrations/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/modules/api/v2/management/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/app/api/v1/webhooks/lib/webhook.ts``createWebhook`
- `apps/web/lib/tag/service.ts``createTag`
- `apps/web/modules/ee/contacts/segments/lib/segments.ts``createSegment`, `cloneSegment`, `resetSegmentInSurvey`
- `apps/web/lib/integration/service.ts``createOrUpdateIntegration`
Pattern:
```typescript
// Resolve environmentId to projectId using existing getEnvironment()
const environment = await getEnvironment(environmentId);
const projectId = environment.projectId;
await prisma.survey.create({ data: { environmentId, projectId, ...rest } });
```
---
## Phase 4: Switch Internal Reads to `projectId` (PR 4 — Very Large, High Risk)
Change internal (non-API) queries from `WHERE environmentId = ?` to `WHERE projectId = ?`.
**Why**: This is the actual migration. Every query that says `WHERE environmentId = X` changes to `WHERE projectId = X`. Functions like `getSurveys(environmentId)` become `getSurveys(projectId)`. The layout at `/environments/[environmentId]/layout.tsx` resolves the environmentId from the URL to a projectId early on and passes projectId downstream. After this phase, the app internally thinks in terms of projects, not environments, even though URLs still say `[environmentId]`.
**Key files**:
- `apps/web/modules/survey/list/lib/survey.ts``getSurveys(environmentId)``getSurveys(projectId)`
- `apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts``getEnvironmentStateData`
- `apps/web/modules/environments/lib/utils.ts``getEnvironmentAuth`, `getEnvironmentLayoutData`
- `apps/web/app/(app)/environments/[environmentId]/layout.tsx` — resolve `projectId` early, pass to context
- `apps/web/app/(app)/environments/[environmentId]/context/environment-context.tsx` — add `projectId`
- All page server components that pass `environmentId` to service functions
URL still has `[environmentId]`. Each page resolves `environmentId → projectId` at the top.
**This PR can be split further** by migrating one resource type at a time (surveys first, then contacts, then actions, etc.).
---
## Phase 5: Client API Backwards Compatibility (PR 5 — Medium, Medium Risk)
Make `/api/v1/client/[environmentId]/...` and `/api/v2/client/[environmentId]/...` accept either an `environmentId` or a `projectId`.
**Why**: The SDK sends requests to `/api/v1/client/[environmentId]/...`. Existing deployed SDKs will keep sending environmentIds. New SDKs will send projectIds. Each route handler needs to accept either and resolve to a projectId internally. This ensures old SDK versions don't break.
**Add fallback resolution at top of each route handler**:
```typescript
// Try Environment table first, fall back to Project table
let projectId: string;
const environment = await prisma.environment.findUnique({ where: { id: params.environmentId } });
if (environment) {
projectId = environment.projectId;
} else {
projectId = params.environmentId; // caller passed a projectId directly
}
```
**Files**:
- `apps/web/app/api/v1/client/[environmentId]/environment/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/displays/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/responses/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/storage/route.ts`
- `apps/web/app/api/v1/client/[environmentId]/user/route.ts`
- `apps/web/app/api/v2/client/[environmentId]/` — all routes
---
## Phase 6: Management API + API Key Migration (PR 6 — Medium, Medium Risk)
**Why**: The `ApiKeyEnvironment` model grants per-environment permissions. API keys used by integrations (Zapier, Make, etc.) reference environmentIds. These need to work at the project level. The management API endpoints that accept `environmentId` in request bodies need to also accept `projectId`.
- Modify `ApiKeyEnvironment` to also support project-level permissions (or add `projectId` to the model)
- Update `apps/web/app/api/v1/auth.ts``authenticateRequest` resolves environment permissions to project
- Management route handlers accept `environmentId` OR `projectId` in request bodies
- API key management UI in `modules/organization/settings/api-keys/`
---
## Phase 7: Storage Path Migration (PR 7 — Medium, Medium Risk)
**Why**: Uploaded files are stored at paths like `private/{environmentId}/{fileName}`. New uploads should use `{projectId}/...`, but old files still live at the old paths. Downloads need to check both locations for backward compatibility.
- New uploads use `{projectId}/{accessType}/{fileName}`
- Downloads check both `{projectId}/...` and `{environmentId}/...` paths for backwards compat
- `apps/web/modules/storage/service.ts`
- `apps/web/app/storage/[environmentId]/[accessType]/[fileName]/route.ts`
---
## Phase 8: Dev Environment Data Migration (PR 8 — Large, High Risk)
**Why**: Currently each project has a prod and dev environment. After the migration, there's no "environment" concept — just projects. Dev environments with no data can be discarded. Dev environments with data need to be promoted into new standalone projects so that data isn't lost.
For each Project with a development Environment that has data:
1. Create new Project named `{name} (Dev)` in the same Organization
2. Create a production Environment for the new Project
3. Re-parent all dev environment resources to the new Project (update `projectId`)
4. Re-parent resources to the new production environment (update `environmentId`)
For development environments with NO data: leave as-is (will be cleaned up later).
**Create**: Idempotent migration script in `packages/database/migration/` or `scripts/`
---
## Phase 9: New `/workspaces/[projectId]/` Routes + Redirects (PR 9 — Very Large, High Risk)
**Why**: The URL currently says `/environments/[environmentId]/surveys/...`. After the migration, it should say `/workspaces/[projectId]/surveys/...`. This phase creates the new route group mirroring the old structure, removes the environment switcher breadcrumb, and adds redirects so old bookmarked URLs still work.
- Create `/apps/web/app/(app)/workspaces/[projectId]/` route group mirroring the environments structure
- New layout resolves `projectId` directly
- Old `/environments/[environmentId]/...` routes redirect to `/workspaces/{projectId}/...`
- Update `apps/web/app/page.tsx` to redirect to workspace URLs
- Remove environment switcher breadcrumb
**Can be split** into sub-PRs: layout first, then surveys, then settings, etc.
---
## Phase 10: Make `projectId` NOT NULL (PR 10 — Small, Low Risk)
**Why**: At this point, every row has `projectId` populated (backfill + dual-write), and all reads use `projectId`. Now we can safely make it required in the schema. This is a safety net — the DB will reject any row that somehow doesn't have a projectId.
```sql
ALTER TABLE "Survey" ALTER COLUMN "projectId" SET NOT NULL;
-- Repeat for all 8 tables
```
Pre-check: verify no NULL values remain.
---
## Phase 11: JS SDK Update (PR 11 — Medium, Low Risk)
**Why**: Add `workspaceId` as the new init parameter. `environmentId` keeps working as a deprecated alias. Existing integrations don't break.
- `packages/js-core/src/types/config.ts` — add `workspaceId` to `TConfigInput`
- `packages/js-core/src/lib/common/setup.ts` — accept `workspaceId`, fall back to `environmentId`
- `environmentId` continues working as deprecated alias indefinitely
```typescript
// New:
formbricks.init({ workspaceId: "cxxx", appUrl: "..." })
// Old (still works):
formbricks.init({ environmentId: "cxxx", appUrl: "..." })
```
---
## Verification
After each PR:
1. `pnpm build` passes
2. Existing tests pass (`pnpm test`)
3. Manual smoke test: create survey, submit response, check dashboard
4. SDK initialization works with existing `environmentId`
After full migration:
- Old environment URLs redirect correctly
- Old API keys work
- Old SDK `environmentId` init works
- New `workspaceId` SDK init works
- Storage files accessible via both old and new paths
- Dev environments with data are separate workspaces
---
## PR Summary
| PR | Phase | Description | Size | Risk |
|----|-------|-------------|------|------|
| 1 | 1 | Add nullable `projectId` columns | S | Low |
| 2 | 2 | Backfill `projectId` data migration | S | Med |
| 3 | 3 | Dual-write `projectId` on all creates | L | Med |
| 4 | 4 | Switch reads to `projectId` | XL | High |
| 5 | 5 | Client API backwards compat | M | Med |
| 6 | 6 | Management API + API key migration | M | Med |
| 7 | 7 | Storage path migration | M | Med |
| 8 | 8 | Dev environment → workspace promotion | L | High |
| 9 | 9 | New workspace routes + redirects | XL | High |
| 10 | 10 | Make `projectId` NOT NULL | S | Low |
| 11 | 11 | JS SDK `workspaceId` support | M | Low |

View File

@@ -77,14 +77,4 @@ These variables are present inside your machine's docker-compose file. Restart t
| AUDIT_LOG_ENABLED | Set this to 1 to enable audit logging. Requires Redis to be configured with the REDIS_URL env variable. | optional | 0 |
| AUDIT_LOG_GET_USER_IP | Set to 1 to include user IP addresses in audit logs from request headers | optional | 0 |
#### Formbricks Hub
When running the stack with [Formbricks Hub](https://github.com/formbricks/hub) (for example via Docker Compose or Helm), the following variables apply:
| Variable | Description | Required | Default |
| ---------------- | ------------------------------------------------------------------------------------------------ | -------------------------- | ----------------------------------------------------- |
| HUB_API_KEY | API key used by the Formbricks Hub API (port 8080). | required | (e.g. `openssl rand -hex 32`) |
| HUB_API_URL | Base URL the Formbricks app uses to call Hub. Use `http://localhost:8080` locally. | required | `http://localhost:8080` in local dev |
| HUB_DATABASE_URL | PostgreSQL connection URL for Hub. Omit to use the same database as Formbricks. | optional | Same as Formbricks `DATABASE_URL` (shared database) |
Note: If you want to configure something that is not possible via above, please open an issue on our GitHub repo here or reach out to us on Github Discussions and we'll try our best to work out a solution with you.

View File

@@ -4,7 +4,8 @@
"private": true,
"workspaces": [
"apps/*",
"packages/*"
"packages/*",
"services/*"
],
"prisma": {
"schema": "packages/database/schema.prisma"

View File

@@ -47,9 +47,7 @@ export const xmSegmentMigration: MigrationScript = {
id: "s644oyyqccstfdeejc4fluye",
name: "20241209110456_xm_segment_migration",
run: async ({ tx }) => {
const allSegments = await tx.segment.findMany({
select: { id: true, filters: true },
});
const allSegments = await tx.segment.findMany();
const updationPromises = [];
for (const segment of allSegments) {
updationPromises.push(
@@ -58,7 +56,6 @@ export const xmSegmentMigration: MigrationScript = {
data: {
filters: findAndReplace(segment.filters),
},
select: { id: true },
})
);
}

View File

@@ -0,0 +1,213 @@
ALTER TABLE "Survey"
ADD COLUMN "startsAt" TIMESTAMP(3),
ADD COLUMN "endsAt" TIMESTAMP(3);
ALTER TABLE "Survey"
ADD CONSTRAINT "Survey_startsAt_before_endsAt"
CHECK ("startsAt" IS NULL OR "endsAt" IS NULL OR "startsAt" < "endsAt");
CREATE SCHEMA IF NOT EXISTS "river";
-- River main migration 002 [up]
CREATE TYPE river.river_job_state AS ENUM(
'available',
'cancelled',
'completed',
'discarded',
'retryable',
'running',
'scheduled'
);
CREATE TABLE river.river_job(
id bigserial PRIMARY KEY,
state river.river_job_state NOT NULL DEFAULT 'available',
attempt smallint NOT NULL DEFAULT 0,
max_attempts smallint NOT NULL,
attempted_at timestamptz,
created_at timestamptz NOT NULL DEFAULT NOW(),
finalized_at timestamptz,
scheduled_at timestamptz NOT NULL DEFAULT NOW(),
priority smallint NOT NULL DEFAULT 1,
args jsonb,
attempted_by text[],
errors jsonb[],
kind text NOT NULL,
metadata jsonb NOT NULL DEFAULT '{}',
queue text NOT NULL DEFAULT 'default',
tags varchar(255)[],
CONSTRAINT finalized_or_finalized_at_null CHECK (
(state IN ('cancelled', 'completed', 'discarded') AND finalized_at IS NOT NULL) OR finalized_at IS NULL
),
CONSTRAINT max_attempts_is_positive CHECK (max_attempts > 0),
CONSTRAINT priority_in_range CHECK (priority >= 1 AND priority <= 4),
CONSTRAINT queue_length CHECK (char_length(queue) > 0 AND char_length(queue) < 128),
CONSTRAINT kind_length CHECK (char_length(kind) > 0 AND char_length(kind) < 128)
);
CREATE INDEX river_job_kind ON river.river_job USING btree(kind);
CREATE INDEX river_job_state_and_finalized_at_index ON river.river_job USING btree(state, finalized_at)
WHERE finalized_at IS NOT NULL;
CREATE INDEX river_job_prioritized_fetching_index ON river.river_job USING btree(
state,
queue,
priority,
scheduled_at,
id
);
CREATE INDEX river_job_args_index ON river.river_job USING GIN(args);
CREATE INDEX river_job_metadata_index ON river.river_job USING GIN(metadata);
CREATE OR REPLACE FUNCTION river.river_job_notify()
RETURNS TRIGGER
AS $$
DECLARE
payload json;
BEGIN
IF NEW.state = 'available' THEN
payload = json_build_object('queue', NEW.queue);
PERFORM pg_notify('river_insert', payload::text);
END IF;
RETURN NULL;
END;
$$
LANGUAGE plpgsql;
CREATE TRIGGER river_notify
AFTER INSERT ON river.river_job
FOR EACH ROW
EXECUTE PROCEDURE river.river_job_notify();
CREATE UNLOGGED TABLE river.river_leader(
elected_at timestamptz NOT NULL,
expires_at timestamptz NOT NULL,
leader_id text NOT NULL,
name text PRIMARY KEY,
CONSTRAINT name_length CHECK (char_length(name) > 0 AND char_length(name) < 128),
CONSTRAINT leader_id_length CHECK (char_length(leader_id) > 0 AND char_length(leader_id) < 128)
);
-- River main migration 003 [up]
ALTER TABLE river.river_job ALTER COLUMN tags SET DEFAULT '{}';
UPDATE river.river_job SET tags = '{}' WHERE tags IS NULL;
ALTER TABLE river.river_job ALTER COLUMN tags SET NOT NULL;
-- River main migration 004 [up]
ALTER TABLE river.river_job ALTER COLUMN args SET DEFAULT '{}';
UPDATE river.river_job SET args = '{}' WHERE args IS NULL;
ALTER TABLE river.river_job ALTER COLUMN args SET NOT NULL;
ALTER TABLE river.river_job ALTER COLUMN args DROP DEFAULT;
ALTER TABLE river.river_job ALTER COLUMN metadata SET DEFAULT '{}';
UPDATE river.river_job SET metadata = '{}' WHERE metadata IS NULL;
ALTER TABLE river.river_job ALTER COLUMN metadata SET NOT NULL;
ALTER TYPE river.river_job_state ADD VALUE IF NOT EXISTS 'pending' AFTER 'discarded';
ALTER TABLE river.river_job DROP CONSTRAINT finalized_or_finalized_at_null;
ALTER TABLE river.river_job ADD CONSTRAINT finalized_or_finalized_at_null CHECK (
(finalized_at IS NULL AND state NOT IN ('cancelled', 'completed', 'discarded')) OR
(finalized_at IS NOT NULL AND state IN ('cancelled', 'completed', 'discarded'))
);
DROP TRIGGER river_notify ON river.river_job;
DROP FUNCTION river.river_job_notify;
CREATE TABLE river.river_queue (
name text PRIMARY KEY NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
metadata jsonb NOT NULL DEFAULT '{}'::jsonb,
paused_at timestamptz,
updated_at timestamptz NOT NULL
);
ALTER TABLE river.river_leader
ALTER COLUMN name SET DEFAULT 'default',
DROP CONSTRAINT name_length,
ADD CONSTRAINT name_length CHECK (name = 'default');
-- River main migration 005 [up]
DO
$body$
BEGIN
IF (SELECT to_regclass('river.river_migration') IS NOT NULL) THEN
ALTER TABLE river.river_migration
RENAME TO river_migration_old;
CREATE TABLE river.river_migration(
line TEXT NOT NULL,
version bigint NOT NULL,
created_at timestamptz NOT NULL DEFAULT NOW(),
CONSTRAINT line_length CHECK (char_length(line) > 0 AND char_length(line) < 128),
CONSTRAINT version_gte_1 CHECK (version >= 1),
PRIMARY KEY (line, version)
);
INSERT INTO river.river_migration (created_at, line, version)
SELECT created_at, 'main', version
FROM river.river_migration_old;
DROP TABLE river.river_migration_old;
END IF;
END;
$body$
LANGUAGE 'plpgsql';
ALTER TABLE river.river_job
ADD COLUMN IF NOT EXISTS unique_key bytea;
CREATE UNIQUE INDEX IF NOT EXISTS river_job_kind_unique_key_idx
ON river.river_job (kind, unique_key)
WHERE unique_key IS NOT NULL;
CREATE UNLOGGED TABLE river.river_client (
id text PRIMARY KEY NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
metadata jsonb NOT NULL DEFAULT '{}',
paused_at timestamptz,
updated_at timestamptz NOT NULL,
CONSTRAINT name_length CHECK (char_length(id) > 0 AND char_length(id) < 128)
);
CREATE UNLOGGED TABLE river.river_client_queue (
river_client_id text NOT NULL REFERENCES river.river_client (id) ON DELETE CASCADE,
name text NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
max_workers bigint NOT NULL DEFAULT 0,
metadata jsonb NOT NULL DEFAULT '{}',
num_jobs_completed bigint NOT NULL DEFAULT 0,
num_jobs_running bigint NOT NULL DEFAULT 0,
updated_at timestamptz NOT NULL,
PRIMARY KEY (river_client_id, name),
CONSTRAINT name_length CHECK (char_length(name) > 0 AND char_length(name) < 128),
CONSTRAINT num_jobs_completed_zero_or_positive CHECK (num_jobs_completed >= 0),
CONSTRAINT num_jobs_running_zero_or_positive CHECK (num_jobs_running >= 0)
);
-- River main migration 006 [up]
CREATE OR REPLACE FUNCTION river.river_job_state_in_bitmask(bitmask BIT(8), state river.river_job_state)
RETURNS boolean
LANGUAGE SQL
IMMUTABLE
AS $$
SELECT CASE state
WHEN 'available' THEN get_bit(bitmask, 7)
WHEN 'cancelled' THEN get_bit(bitmask, 6)
WHEN 'completed' THEN get_bit(bitmask, 5)
WHEN 'discarded' THEN get_bit(bitmask, 4)
WHEN 'pending' THEN get_bit(bitmask, 3)
WHEN 'retryable' THEN get_bit(bitmask, 2)
WHEN 'running' THEN get_bit(bitmask, 1)
WHEN 'scheduled' THEN get_bit(bitmask, 0)
ELSE 0
END = 1;
$$;
ALTER TABLE river.river_job ADD COLUMN IF NOT EXISTS unique_states BIT(8);
CREATE UNIQUE INDEX IF NOT EXISTS river_job_unique_idx ON river.river_job (unique_key)
WHERE unique_key IS NOT NULL
AND unique_states IS NOT NULL
AND river.river_job_state_in_bitmask(unique_states, state);
DROP INDEX river.river_job_kind_unique_key_idx;

Some files were not shown because too many files have changed in this diff Show More