Compare commits

...

4 Commits

Author SHA1 Message Date
Tiago Farto 2084465bc9 chore: docker compose fix 2026-03-25 14:22:56 +00:00
Tiago Farto 80353d641e chore: docker fixes 2026-03-25 14:11:53 +00:00
Tiago Farto 3ec627cf3c Merge branch 'main' into feat/workflows-service-inngest 2026-03-25 13:57:50 +00:00
Tiago Farto 5fd6ee64c0 chore: inngest workflows poc 2026-03-25 13:55:43 +00:00
43 changed files with 2076 additions and 1864 deletions
+4
View File
@@ -57,6 +57,10 @@ packages/database/migrations
branch.json
.vercel
# Golang
.cache
services/**/bin/
# IntelliJ IDEA
/.idea/
/*.iml
+4
View File
@@ -38,6 +38,8 @@ export const env = createEnv({
.optional()
.or(z.string().refine((str) => str === "")),
IMPRINT_ADDRESS: z.string().optional(),
INNGEST_BASE_URL: z.url().optional(),
INNGEST_EVENT_KEY: z.string().optional(),
INVITE_DISABLED: z.enum(["1", "0"]).optional(),
CHATWOOT_WEBSITE_TOKEN: z.string().optional(),
CHATWOOT_BASE_URL: z.url().optional(),
@@ -161,6 +163,8 @@ export const env = createEnv({
HTTPS_PROXY: process.env.HTTPS_PROXY,
IMPRINT_URL: process.env.IMPRINT_URL,
IMPRINT_ADDRESS: process.env.IMPRINT_ADDRESS,
INNGEST_BASE_URL: process.env.INNGEST_BASE_URL,
INNGEST_EVENT_KEY: process.env.INNGEST_EVENT_KEY,
INVITE_DISABLED: process.env.INVITE_DISABLED,
CHATWOOT_WEBSITE_TOKEN: process.env.CHATWOOT_WEBSITE_TOKEN,
CHATWOOT_BASE_URL: process.env.CHATWOOT_BASE_URL,
@@ -0,0 +1,113 @@
import { type IncomingMessage, type Server, type ServerResponse, createServer } from "node:http";
import { AddressInfo } from "node:net";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
interface CapturedRequest {
method?: string;
url?: string;
headers: IncomingMessage["headers"];
body: string;
}
describe("sendInngestEvents", () => {
let server: Server;
let capturedRequests: CapturedRequest[];
beforeEach(async () => {
capturedRequests = [];
server = createServer(async (req: IncomingMessage, res: ServerResponse) => {
const chunks: Buffer[] = [];
for await (const chunk of req) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
capturedRequests.push({
method: req.method,
url: req.url,
headers: req.headers,
body: Buffer.concat(chunks).toString("utf8"),
});
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify({ status: 200, ids: ["evt_1", "evt_2"] }));
});
await new Promise<void>((resolve) => {
server.listen(0, "127.0.0.1", () => resolve());
});
});
afterEach(async () => {
vi.resetModules();
vi.doUnmock("@/lib/env");
await new Promise<void>((resolve, reject) => {
server.close((error) => {
if (error) {
reject(error);
return;
}
resolve();
});
});
});
test("posts events to the self-hosted event API using the configured event key and timestamp", async () => {
const address = server.address() as AddressInfo;
const baseUrl = `http://127.0.0.1:${address.port}/`;
vi.doMock("@/lib/env", () => ({
env: {
INNGEST_BASE_URL: baseUrl,
INNGEST_EVENT_KEY: "test-event-key",
},
}));
const { resetInngestClientForTests, sendInngestEvents } = await import("./client");
await sendInngestEvents([
{
name: "survey.start",
data: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: "2026-04-01T12:00:00.000Z",
},
ts: 1775044800000,
},
{
name: "survey.end.cancelled",
data: {
surveyId: "survey_1",
environmentId: "env_1",
},
},
]);
resetInngestClientForTests();
expect(capturedRequests).toHaveLength(1);
expect(capturedRequests[0]?.method).toBe("POST");
expect(capturedRequests[0]?.url).toBe("/e/test-event-key");
expect(capturedRequests[0]?.headers["content-type"]).toContain("application/json");
expect(JSON.parse(capturedRequests[0]?.body ?? "[]")).toEqual([
{
name: "survey.start",
data: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: "2026-04-01T12:00:00.000Z",
},
ts: 1775044800000,
},
{
name: "survey.end.cancelled",
data: {
surveyId: "survey_1",
environmentId: "env_1",
},
ts: expect.any(Number),
},
]);
});
});
+79
View File
@@ -0,0 +1,79 @@
import "server-only";
import { Inngest } from "inngest";
import { env } from "@/lib/env";
import { INNGEST_POC_APP_ID } from "./constants";
export interface InngestScheduledEventData {
surveyId: string;
environmentId: string;
scheduledFor: string;
}
export interface InngestCancelledEventData {
surveyId: string;
environmentId: string;
}
export type InngestSendableEvent =
| {
name: string;
data: InngestScheduledEventData;
ts?: number;
}
| {
name: string;
data: InngestCancelledEventData;
ts?: number;
};
interface InngestEventClient {
send: (payload: InngestSendableEvent | InngestSendableEvent[]) => Promise<unknown>;
}
let inngestClient: InngestEventClient | null = null;
const getRequiredEnv = (): { baseUrl: string; eventKey: string } => {
if (!env.INNGEST_BASE_URL) {
throw new Error("INNGEST_BASE_URL is required to publish survey lifecycle events");
}
if (!env.INNGEST_EVENT_KEY) {
throw new Error("INNGEST_EVENT_KEY is required to publish survey lifecycle events");
}
return {
baseUrl: env.INNGEST_BASE_URL,
eventKey: env.INNGEST_EVENT_KEY,
};
};
const createInngestClient = (): InngestEventClient => {
const { baseUrl, eventKey } = getRequiredEnv();
return new Inngest({
id: INNGEST_POC_APP_ID,
baseUrl,
eventKey,
isDev: false,
}) as unknown as InngestEventClient;
};
const getInngestClient = (): InngestEventClient => {
if (!inngestClient) {
inngestClient = createInngestClient();
}
return inngestClient;
};
export const resetInngestClientForTests = (): void => {
inngestClient = null;
};
export const sendInngestEvents = async (events: InngestSendableEvent[]): Promise<unknown> => {
if (events.length === 0) {
return [];
}
return getInngestClient().send(events);
};
+5
View File
@@ -0,0 +1,5 @@
export const INNGEST_POC_APP_ID = "formbricks-inngest-poc";
export const INNGEST_SURVEY_START_EVENT = "survey.start";
export const INNGEST_SURVEY_END_EVENT = "survey.end";
export const INNGEST_SURVEY_START_CANCELLED_EVENT = "survey.start.cancelled";
export const INNGEST_SURVEY_END_CANCELLED_EVENT = "survey.end.cancelled";
@@ -0,0 +1,261 @@
import { beforeEach, describe, expect, test, vi } from "vitest";
import { logger } from "@formbricks/logger";
import {
INNGEST_SURVEY_END_CANCELLED_EVENT,
INNGEST_SURVEY_END_EVENT,
INNGEST_SURVEY_START_CANCELLED_EVENT,
INNGEST_SURVEY_START_EVENT,
} from "./constants";
import {
getSurveyLifecycleCancellationEvents,
getSurveyLifecycleEvents,
publishSurveyLifecycleCancellationEvents,
publishSurveyLifecycleEvents,
} from "./survey-lifecycle";
vi.mock("server-only", () => ({}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("survey lifecycle inngest events", () => {
beforeEach(() => {
vi.mocked(logger.error).mockReset();
});
test("builds a start event when startsAt is set on create", () => {
const startsAt = new Date("2026-04-01T12:00:00.000Z");
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt: null,
},
now: new Date("2026-03-31T12:00:00.000Z"),
})
).toEqual([
{
name: INNGEST_SURVEY_START_EVENT,
data: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: startsAt.toISOString(),
},
ts: startsAt.getTime(),
},
]);
});
test("builds an end event when endsAt is set on create", () => {
const endsAt = new Date("2026-04-02T12:00:00.000Z");
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt,
},
now: new Date("2026-03-31T12:00:00.000Z"),
})
).toEqual([
{
name: INNGEST_SURVEY_END_EVENT,
data: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: endsAt.toISOString(),
},
ts: endsAt.getTime(),
},
]);
});
test("builds both lifecycle events when both dates are set on create", () => {
const startsAt = new Date("2026-04-01T12:00:00.000Z");
const endsAt = new Date("2026-04-02T12:00:00.000Z");
const events = getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt,
},
now: new Date("2026-03-31T12:00:00.000Z"),
});
expect(events).toHaveLength(2);
expect(events[0]?.name).toBe(INNGEST_SURVEY_START_EVENT);
expect(events[1]?.name).toBe(INNGEST_SURVEY_END_EVENT);
});
test("does nothing when neither lifecycle date is set", () => {
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt: null,
},
})
).toEqual([]);
});
test("builds a lifecycle event when a date transitions from null to a value", () => {
const startsAt = new Date("2026-04-01T12:00:00.000Z");
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt: null,
},
previousSurvey: {
startsAt: null,
endsAt: null,
},
now: new Date("2026-03-31T12:00:00.000Z"),
})
).toHaveLength(1);
});
test("does not build events when a lifecycle date changes after already being set", () => {
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: new Date("2026-04-02T12:00:00.000Z"),
endsAt: null,
},
previousSurvey: {
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
})
).toEqual([]);
});
test("does not build events when a lifecycle date is cleared", () => {
expect(
getSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: null,
endsAt: null,
},
previousSurvey: {
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
})
).toEqual([]);
});
test("publishes immediate events without a scheduled timestamp when the date is in the past", async () => {
const sender = vi.fn().mockResolvedValue(undefined);
const startsAt = new Date("2026-03-30T12:00:00.000Z");
await publishSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt,
endsAt: null,
},
now: new Date("2026-03-31T12:00:00.000Z"),
sender,
});
expect(sender).toHaveBeenCalledWith([
{
name: INNGEST_SURVEY_START_EVENT,
data: {
surveyId: "survey_1",
environmentId: "env_1",
scheduledFor: startsAt.toISOString(),
},
},
]);
});
test("builds lifecycle cancellation events for survey deletion", () => {
expect(
getSurveyLifecycleCancellationEvents({
surveyId: "survey_1",
environmentId: "env_1",
})
).toEqual([
{
name: INNGEST_SURVEY_START_CANCELLED_EVENT,
data: {
surveyId: "survey_1",
environmentId: "env_1",
},
},
{
name: INNGEST_SURVEY_END_CANCELLED_EVENT,
data: {
surveyId: "survey_1",
environmentId: "env_1",
},
},
]);
});
test("logs and rethrows publish failures", async () => {
const sender = vi.fn().mockRejectedValue(new Error("send failed"));
await expect(
publishSurveyLifecycleEvents({
survey: {
id: "survey_1",
environmentId: "env_1",
startsAt: new Date("2026-04-01T12:00:00.000Z"),
endsAt: null,
},
sender,
})
).rejects.toThrow("send failed");
expect(logger.error).toHaveBeenCalledWith(
{
error: expect.any(Error),
surveyId: "survey_1",
},
"Failed to publish survey lifecycle events"
);
});
test("logs and rethrows cancellation publish failures", async () => {
const sender = vi.fn().mockRejectedValue(new Error("cancel failed"));
await expect(
publishSurveyLifecycleCancellationEvents({
surveyId: "survey_1",
environmentId: "env_1",
sender,
})
).rejects.toThrow("cancel failed");
expect(logger.error).toHaveBeenCalledWith(
{
error: expect.any(Error),
surveyId: "survey_1",
},
"Failed to publish survey lifecycle cancellation events"
);
});
});
+119
View File
@@ -0,0 +1,119 @@
import "server-only";
import { logger } from "@formbricks/logger";
import { TSurvey } from "@formbricks/types/surveys/types";
import { type InngestSendableEvent, sendInngestEvents } from "./client";
import {
INNGEST_SURVEY_END_CANCELLED_EVENT,
INNGEST_SURVEY_END_EVENT,
INNGEST_SURVEY_START_CANCELLED_EVENT,
INNGEST_SURVEY_START_EVENT,
} from "./constants";
interface SurveyLifecycleSurvey {
id: TSurvey["id"];
environmentId: TSurvey["environmentId"];
startsAt?: TSurvey["startsAt"];
endsAt?: TSurvey["endsAt"];
}
interface PublishSurveyLifecycleEventsOptions {
survey: SurveyLifecycleSurvey;
previousSurvey?: Pick<SurveyLifecycleSurvey, "startsAt" | "endsAt"> | null;
now?: Date;
sender?: (events: InngestSendableEvent[]) => Promise<unknown>;
}
interface PublishSurveyLifecycleCancellationEventsOptions {
surveyId: string;
environmentId: string;
sender?: (events: InngestSendableEvent[]) => Promise<unknown>;
}
const shouldPublishTransition = (previousValue?: Date | null, nextValue?: Date | null): nextValue is Date =>
previousValue == null && nextValue != null;
const buildScheduledEvent = (
name: string,
survey: SurveyLifecycleSurvey,
scheduledFor: Date,
now: Date
): InngestSendableEvent => ({
name,
data: {
surveyId: survey.id,
environmentId: survey.environmentId,
scheduledFor: scheduledFor.toISOString(),
},
...(scheduledFor.getTime() > now.getTime() ? { ts: scheduledFor.getTime() } : {}),
});
export const getSurveyLifecycleEvents = ({
survey,
previousSurvey,
now = new Date(),
}: Omit<PublishSurveyLifecycleEventsOptions, "sender">): InngestSendableEvent[] => {
const events: InngestSendableEvent[] = [];
if (shouldPublishTransition(previousSurvey?.startsAt ?? null, survey.startsAt ?? null)) {
events.push(buildScheduledEvent(INNGEST_SURVEY_START_EVENT, survey, survey.startsAt, now));
}
if (shouldPublishTransition(previousSurvey?.endsAt ?? null, survey.endsAt ?? null)) {
events.push(buildScheduledEvent(INNGEST_SURVEY_END_EVENT, survey, survey.endsAt, now));
}
return events;
};
export const publishSurveyLifecycleEvents = async ({
survey,
previousSurvey,
now = new Date(),
sender = sendInngestEvents,
}: PublishSurveyLifecycleEventsOptions): Promise<void> => {
const events = getSurveyLifecycleEvents({ survey, previousSurvey, now });
if (events.length === 0) {
return;
}
try {
await sender(events);
} catch (error) {
logger.error({ error, surveyId: survey.id }, "Failed to publish survey lifecycle events");
throw error;
}
};
export const getSurveyLifecycleCancellationEvents = ({
surveyId,
environmentId,
}: Omit<PublishSurveyLifecycleCancellationEventsOptions, "sender">): InngestSendableEvent[] => [
{
name: INNGEST_SURVEY_START_CANCELLED_EVENT,
data: {
surveyId,
environmentId,
},
},
{
name: INNGEST_SURVEY_END_CANCELLED_EVENT,
data: {
surveyId,
environmentId,
},
},
];
export const publishSurveyLifecycleCancellationEvents = async ({
surveyId,
environmentId,
sender = sendInngestEvents,
}: PublishSurveyLifecycleCancellationEventsOptions): Promise<void> => {
try {
await sender(getSurveyLifecycleCancellationEvents({ surveyId, environmentId }));
} catch (error) {
logger.error({ error, surveyId }, "Failed to publish survey lifecycle cancellation events");
throw error;
}
};
@@ -193,6 +193,8 @@ const mockWelcomeCard: TSurveyWelcomeCard = {
const baseSurveyProperties = {
id: mockId,
name: "Mock Survey",
startsAt: null,
endsAt: null,
autoClose: 10,
delay: 0,
autoComplete: 7,
+67 -1
View File
@@ -4,11 +4,18 @@ import { beforeEach, describe, expect, test, vi } from "vitest";
import { testInputValidation } from "vitestSetup";
import { PrismaErrorType } from "@formbricks/database/types/error";
import { TSurveyFollowUp } from "@formbricks/database/types/survey-follow-up";
import { logger } from "@formbricks/logger";
import { TActionClass } from "@formbricks/types/action-classes";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import {
DatabaseError,
InvalidInputError,
ResourceNotFoundError,
ValidationError,
} from "@formbricks/types/errors";
import { TSegment } from "@formbricks/types/segment";
import { TSurvey, TSurveyCreateInput, TSurveyQuestionTypeEnum } from "@formbricks/types/surveys/types";
import { getActionClasses } from "@/lib/actionClass/service";
import { publishSurveyLifecycleEvents } from "@/lib/inngest/survey-lifecycle";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
@@ -49,8 +56,23 @@ vi.mock("@/lib/actionClass/service", () => ({
getActionClasses: vi.fn(),
}));
vi.mock("@/lib/inngest/survey-lifecycle", () => ({
publishSurveyLifecycleEvents: vi.fn(),
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
beforeEach(() => {
prisma.survey.count.mockResolvedValue(1);
prisma.$transaction.mockImplementation(async (callback: (tx: typeof prisma) => Promise<unknown>) =>
callback(prisma)
);
vi.mocked(publishSurveyLifecycleEvents).mockReset();
vi.mocked(logger.error).mockReset();
});
describe("evaluateLogic with mockSurveyWithLogic", () => {
@@ -307,6 +329,18 @@ describe("Tests for updateSurvey", () => {
prisma.survey.update.mockResolvedValueOnce(mockSurveyOutput);
const updatedSurvey = await updateSurvey(updateSurveyInput);
expect(updatedSurvey).toEqual(mockTransformedSurveyOutput);
expect(publishSurveyLifecycleEvents).toHaveBeenCalledWith({
survey: {
id: mockTransformedSurveyOutput.id,
environmentId: mockTransformedSurveyOutput.environmentId,
startsAt: mockTransformedSurveyOutput.startsAt,
endsAt: mockTransformedSurveyOutput.endsAt,
},
previousSurvey: {
startsAt: mockTransformedSurveyOutput.startsAt,
endsAt: mockTransformedSurveyOutput.endsAt,
},
});
});
// Note: Language handling tests (for languages.length > 0 fix) are covered in
@@ -341,6 +375,26 @@ describe("Tests for updateSurvey", () => {
prisma.survey.update.mockRejectedValue(new Error(mockErrorMessage));
await expect(updateSurvey(updateSurveyInput)).rejects.toThrow(Error);
});
test("surfaces post-commit Inngest publish failures", async () => {
prisma.survey.findUnique.mockResolvedValueOnce(mockSurveyOutput);
prisma.survey.update.mockResolvedValueOnce(mockSurveyOutput);
vi.mocked(publishSurveyLifecycleEvents).mockRejectedValueOnce(new Error("send failed"));
await expect(updateSurvey(updateSurveyInput)).rejects.toThrow("send failed");
expect(prisma.survey.update).toHaveBeenCalled();
expect(logger.error).toHaveBeenCalledWith(expect.any(Error), "Error updating survey");
});
test("throws a validation error when startsAt is not before endsAt", async () => {
await expect(
updateSurvey({
...updateSurveyInput,
startsAt: new Date("2026-04-02T12:00:00.000Z"),
endsAt: new Date("2026-04-01T12:00:00.000Z"),
})
).rejects.toThrow(ValidationError);
});
});
});
@@ -644,6 +698,14 @@ describe("Tests for createSurvey", () => {
expect(prisma.survey.create).toHaveBeenCalled();
expect(result.name).toEqual(mockSurveyOutput.name);
expect(publishSurveyLifecycleEvents).toHaveBeenCalledWith({
survey: {
id: mockTransformedSurveyOutput.id,
environmentId: mockTransformedSurveyOutput.environmentId,
startsAt: mockTransformedSurveyOutput.startsAt,
endsAt: mockTransformedSurveyOutput.endsAt,
},
});
expect(subscribeOrganizationMembersToSurveyResponses).toHaveBeenCalled();
});
@@ -663,6 +725,10 @@ describe("Tests for createSurvey", () => {
createdAt: new Date(),
updatedAt: new Date(),
} as unknown as TSegment);
prisma.survey.update.mockResolvedValueOnce({
...mockSurveyOutput,
type: "app",
});
await createSurvey(mockEnvironmentId, {
...mockCreateSurveyInput,
+192 -209
View File
@@ -7,6 +7,7 @@ import { ZId, ZOptionalNumber } from "@formbricks/types/common";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSegment, ZSegmentFilters } from "@formbricks/types/segment";
import { TSurvey, TSurveyCreateInput, ZSurvey, ZSurveyCreateInput } from "@formbricks/types/surveys/types";
import { publishSurveyLifecycleEvents } from "@/lib/inngest/survey-lifecycle";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
@@ -32,6 +33,8 @@ export const selectSurvey = {
environmentId: true,
createdBy: true,
status: true,
startsAt: true,
endsAt: true,
welcomeCard: true,
questions: true,
blocks: true,
@@ -300,8 +303,6 @@ export const updateSurveyInternal = async (
try {
const surveyId = updatedSurvey.id;
let data: any = {};
const actionClasses = await getActionClasses(updatedSurvey.environmentId);
const currentSurvey = await getSurvey(surveyId);
@@ -324,100 +325,95 @@ export const updateSurveyInternal = async (
}
}
if (languages) {
// Process languages update logic here
// Extract currentLanguageIds and updatedLanguageIds
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((l) => l.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((l) => l.language.id) : [];
const enabledLanguageIds = languages.map((language) => {
if (language.enabled) return language.language.id;
});
// Determine languages to add and remove
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((l) => l.default)?.language.id;
// Prepare data for Prisma update
data.languages = {};
// Update existing languages for default value changes
data.languages.updateMany = currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
}));
// Add new languages
if (languagesToAdd.length > 0) {
data.languages.create = languagesToAdd.map((languageId) => ({
languageId: languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
// Remove languages no longer associated with the survey
if (languagesToRemove.length > 0) {
data.languages.deleteMany = languagesToRemove.map((languageId) => ({
languageId: languageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
const prismaSurvey = await prisma.$transaction(async (tx) => {
let data: Prisma.SurveyUpdateInput = {};
// if the survey body has type other than "app" but has a private segment, we delete that segment, and if it has a public segment, we disconnect from to the survey
if (segment) {
if (type === "app") {
// parse the segment filters:
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!skipValidation && !parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
}
if (languages) {
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((language) => language.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((language) => language.language.id) : [];
const enabledLanguageIds = languages.flatMap((language) =>
language.enabled ? [language.language.id] : []
);
try {
// update the segment:
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((language) => language.default)?.language.id;
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((surveyId) => ({ id: surveyId })),
},
};
data.languages = {
updateMany: currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
})),
create:
languagesToAdd.length > 0
? languagesToAdd.map((languageId) => ({
languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}))
: undefined,
deleteMany:
languagesToRemove.length > 0
? languagesToRemove.map((languageId) => ({
languageId,
enabled: enabledLanguageIds.includes(languageId),
}))
: undefined,
};
}
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
if (segment) {
if (type === "app") {
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!skipValidation && !parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
}
await prisma.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else {
if (segment.isPrivate) {
// disconnect the private segment first and then delete:
await prisma.segment.update({
try {
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((segmentSurveyId) => ({ id: segmentSurveyId })),
},
};
}
await tx.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else if (segment.isPrivate) {
await tx.segment.update({
where: { id: segment.id },
data: {
surveys: {
@@ -428,14 +424,13 @@ export const updateSurveyInternal = async (
},
});
// delete the private segment:
await prisma.segment.delete({
await tx.segment.delete({
where: {
id: segment.id,
},
});
} else {
await prisma.survey.update({
await tx.survey.update({
where: {
id: surveyId,
},
@@ -446,10 +441,8 @@ export const updateSurveyInternal = async (
},
});
}
}
} else if (type === "app") {
if (!currentSurvey.segment) {
await prisma.survey.update({
} else if (type === "app" && !currentSurvey.segment) {
await tx.survey.update({
where: {
id: surveyId,
},
@@ -477,102 +470,89 @@ export const updateSurveyInternal = async (
},
});
}
}
if (followUps) {
// Separate follow-ups into categories based on deletion flag
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
if (followUps) {
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
const existingFollowUpIds = new Set(currentSurvey.followUps.map((followUp) => followUp.id));
// Get set of existing follow-up IDs from currentSurvey
const existingFollowUpIds = new Set(currentSurvey.followUps.map((f) => f.id));
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
// Separate non-deleted follow-ups into new and existing
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
data.followUps = {
// Update existing follow-ups
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
// Create new follow-ups
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
data.followUps = {
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
// Delete follow-ups marked as deleted, regardless of whether they exist in DB
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
}))
: undefined,
};
}
}))
: undefined,
};
}
data.questions = questions.map((question) => {
const { isDraft, ...rest } = question;
return rest;
data.questions = questions.map((question) => {
const { isDraft, ...rest } = question;
return rest;
});
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = stripIsDraftFromBlocks(updatedSurvey.blocks);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
return tx.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
});
});
// Strip isDraft from elements before saving
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = stripIsDraftFromBlocks(updatedSurvey.blocks);
}
const transformedSurvey = transformPrismaSurvey<TSurvey>(prismaSurvey);
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
const prismaSurvey = await prisma.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
await publishSurveyLifecycleEvents({
survey: {
id: transformedSurvey.id,
environmentId: transformedSurvey.environmentId,
startsAt: transformedSurvey.startsAt,
endsAt: transformedSurvey.endsAt,
},
previousSurvey: {
startsAt: currentSurvey.startsAt ?? null,
endsAt: currentSurvey.endsAt ?? null,
},
});
let surveySegment: TSegment | null = null;
if (prismaSurvey.segment) {
surveySegment = {
...prismaSurvey.segment,
surveys: prismaSurvey.segment.surveys.map((survey) => survey.id),
};
}
const modifiedSurvey: TSurvey = {
...prismaSurvey, // Properties from prismaSurvey
displayPercentage: Number(prismaSurvey.displayPercentage) || null,
segment: surveySegment,
customHeadScriptsMode: prismaSurvey.customHeadScriptsMode,
};
return modifiedSurvey;
return transformedSurvey;
} catch (error) {
logger.error(error, "Error updating survey");
if (error instanceof Prisma.PrismaClientKnownRequestError) {
@@ -651,23 +631,26 @@ export const createSurvey = async (
data.blocks = validateMediaAndPrepareBlocks(data.blocks);
}
const survey = await prisma.survey.create({
data: {
...data,
environment: {
connect: {
id: parsedEnvironmentId,
const survey = await prisma.$transaction(async (tx) => {
const createdSurvey = await tx.survey.create({
data: {
...data,
environment: {
connect: {
id: parsedEnvironmentId,
},
},
},
},
select: selectSurvey,
});
select: selectSurvey,
});
// if the survey created is an "app" survey, we also create a private segment for it.
if (survey.type === "app") {
const newSegment = await prisma.segment.create({
if (createdSurvey.type !== "app") {
return createdSurvey;
}
const newSegment = await tx.segment.create({
data: {
title: survey.id,
title: createdSurvey.id,
filters: [],
isPrivate: true,
environment: {
@@ -678,9 +661,9 @@ export const createSurvey = async (
},
});
await prisma.survey.update({
return tx.survey.update({
where: {
id: survey.id,
id: createdSurvey.id,
},
data: {
segment: {
@@ -689,20 +672,20 @@ export const createSurvey = async (
},
},
},
select: selectSurvey,
});
}
});
// TODO: Fix this, this happens because the survey type "web" is no longer in the zod types but its required in the schema for migration
// @ts-expect-error
const transformedSurvey: TSurvey = {
...survey,
...(survey.segment && {
segment: {
...survey.segment,
surveys: survey.segment.surveys.map((survey) => survey.id),
},
}),
};
const transformedSurvey = transformPrismaSurvey<TSurvey>(survey);
await publishSurveyLifecycleEvents({
survey: {
id: transformedSurvey.id,
environmentId: transformedSurvey.environmentId,
startsAt: transformedSurvey.startsAt,
endsAt: transformedSurvey.endsAt,
},
});
if (createdBy) {
await subscribeOrganizationMembersToSurveyResponses(survey.id, createdBy, organization.id);
@@ -31,6 +31,8 @@ export const ZSurveyInput = ZSurveyWithoutQuestionType.pick({
environmentId: true,
questions: true,
blocks: true,
startsAt: true,
endsAt: true,
endings: true,
hiddenFields: true,
variables: true,
@@ -59,6 +61,8 @@ export const ZSurveyInput = ZSurveyWithoutQuestionType.pick({
displayLimit: true,
autoClose: true,
autoComplete: true,
startsAt: true,
endsAt: true,
surveyClosedMessage: true,
styling: true,
projectOverwrites: true,
@@ -1,284 +1,43 @@
import { ActionClass, Prisma } from "@prisma/client";
import "@testing-library/jest-dom/vitest";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurveyCreateInput } from "@formbricks/types/surveys/types";
import { DatabaseError } from "@formbricks/types/errors";
import { TSurvey, TSurveyCreateInput } from "@formbricks/types/surveys/types";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
createSurvey as createSurveyFromService,
handleTriggerUpdates as handleTriggerUpdatesFromService,
} from "@/lib/survey/service";
import { createSurvey, handleTriggerUpdates } from "./survey";
// Mock dependencies
vi.mock("@/lib/survey/utils", () => ({
checkForInvalidImagesInQuestions: vi.fn(),
vi.mock("@/lib/survey/service", () => ({
createSurvey: vi.fn(),
handleTriggerUpdates: vi.fn(),
}));
vi.mock("@/lib/organization/service", () => ({
subscribeOrganizationMembersToSurveyResponses: vi.fn(),
getOrganizationByEnvironmentId: vi.fn(),
}));
describe("template list survey wrappers", () => {
const environmentId = "env_1";
const surveyBody = { name: "Survey" } as TSurveyCreateInput;
const createdSurvey = { id: "survey_1" } as TSurvey;
vi.mock("@/modules/survey/lib/action-class", () => ({
getActionClasses: vi.fn(),
}));
vi.mock("@/modules/survey/lib/survey", () => ({
selectSurvey: {
id: true,
createdAt: true,
updatedAt: true,
name: true,
type: true,
status: true,
environmentId: true,
segment: true,
},
}));
vi.mock("@formbricks/database", () => ({
prisma: {
survey: {
create: vi.fn(),
update: vi.fn(),
},
segment: {
create: vi.fn(),
},
},
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("survey module", () => {
beforeEach(() => {
vi.resetAllMocks();
vi.clearAllMocks();
});
describe("createSurvey", () => {
test("creates a survey successfully", async () => {
// Mock input data
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
createdBy: "user-123",
};
// Mock dependencies
const mockActionClasses: ActionClass[] = [];
vi.mocked(getActionClasses).mockResolvedValue(mockActionClasses);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123", name: "Org" } as any);
const mockCreatedSurvey = {
id: "survey-123",
environmentId,
type: "app",
segment: {
surveys: [{ id: "survey-123" }],
},
} as any;
vi.mocked(prisma.survey.create).mockResolvedValue(mockCreatedSurvey);
const mockSegment = { id: "segment-123" } as any;
vi.mocked(prisma.segment.create).mockResolvedValue(mockSegment);
// Execute function
const result = await createSurvey(environmentId, surveyBody);
// Verify results
expect(getActionClasses).toHaveBeenCalledWith(environmentId);
expect(getOrganizationByEnvironmentId).toHaveBeenCalledWith(environmentId);
expect(prisma.survey.create).toHaveBeenCalledWith({
data: expect.objectContaining({
name: surveyBody.name,
type: surveyBody.type,
environment: { connect: { id: environmentId } },
creator: { connect: { id: surveyBody.createdBy } },
}),
select: selectSurvey,
});
expect(prisma.segment.create).toHaveBeenCalled();
expect(prisma.survey.update).toHaveBeenCalled();
expect(subscribeOrganizationMembersToSurveyResponses).toHaveBeenCalledWith(
"survey-123",
"user-123",
"org-123"
);
expect(result).toBeDefined();
expect(result.id).toBe("survey-123");
});
test("handles empty languages array", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
languages: [],
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
vi.mocked(prisma.survey.create).mockResolvedValue({
id: "survey-123",
environmentId,
type: "link",
segment: null,
} as any);
await createSurvey(environmentId, surveyBody);
expect(prisma.survey.create).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.not.objectContaining({ languages: [] }),
})
);
});
test("handles follow-ups properly", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
followUps: [{ name: "Follow Up 1", trigger: "trigger1", action: "action1" } as any],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
vi.mocked(prisma.survey.create).mockResolvedValue({
id: "survey-123",
environmentId,
type: "link",
segment: null,
} as any);
await createSurvey(environmentId, surveyBody);
expect(prisma.survey.create).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
followUps: {
create: [{ name: "Follow Up 1", trigger: "trigger1", action: "action1" }],
},
}),
})
);
});
test("throws error when organization not found", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue(null);
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(ResourceNotFoundError);
});
test("handles database errors", async () => {
const environmentId = "env-123";
const surveyBody: TSurveyCreateInput = {
name: "Test Survey",
type: "app",
status: "draft",
questions: [],
};
vi.mocked(getActionClasses).mockResolvedValue([]);
vi.mocked(getOrganizationByEnvironmentId).mockResolvedValue({ id: "org-123" } as any);
const prismaError = new Prisma.PrismaClientKnownRequestError("Database error", {
code: "P2002",
clientVersion: "5.0.0",
});
vi.mocked(prisma.survey.create).mockRejectedValue(prismaError);
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(DatabaseError);
expect(logger.error).toHaveBeenCalled();
});
test("re-exports the shared trigger update helper", () => {
expect(handleTriggerUpdates).toBe(handleTriggerUpdatesFromService);
});
describe("handleTriggerUpdates", () => {
test("handles empty triggers", () => {
const result = handleTriggerUpdates(undefined as any, [], []);
expect(result).toEqual({});
});
test("delegates createSurvey to the shared survey service", async () => {
vi.mocked(createSurveyFromService).mockResolvedValueOnce(createdSurvey);
test("adds new triggers", () => {
const updatedTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-2" } },
] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }, { id: "action-2" }] as ActionClass[];
const result = await createSurvey(environmentId, surveyBody);
const result = handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses);
expect(createSurveyFromService).toHaveBeenCalledWith(environmentId, surveyBody);
expect(result).toBe(createdSurvey);
});
expect(result).toEqual({
create: [{ actionClassId: "action-1" }, { actionClassId: "action-2" }],
});
});
test("propagates service errors", async () => {
const error = new DatabaseError("database error");
vi.mocked(createSurveyFromService).mockRejectedValueOnce(error);
test("removes triggers", () => {
const updatedTriggers = [] as any;
const currentTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-2" } },
] as any;
const actionClasses = [{ id: "action-1" }, { id: "action-2" }] as ActionClass[];
const result = handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses);
expect(result).toEqual({
deleteMany: {
actionClassId: {
in: ["action-1", "action-2"],
},
},
});
});
test("throws error for invalid trigger", () => {
const updatedTriggers = [{ actionClass: { id: "action-3" } }] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }] as ActionClass[];
expect(() => handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses)).toThrow(
InvalidInputError
);
});
test("throws error for duplicate triggers", () => {
const updatedTriggers = [
{ actionClass: { id: "action-1" } },
{ actionClass: { id: "action-1" } },
] as any;
const currentTriggers = [] as any;
const actionClasses = [{ id: "action-1" }] as ActionClass[];
expect(() => handleTriggerUpdates(updatedTriggers, currentTriggers, actionClasses)).toThrow(
InvalidInputError
);
});
await expect(createSurvey(environmentId, surveyBody)).rejects.toThrow(error);
});
});
@@ -1,205 +1,11 @@
import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey, TSurveyCreateInput } from "@formbricks/types/surveys/types";
import {
getOrganizationByEnvironmentId,
subscribeOrganizationMembersToSurveyResponses,
} from "@/lib/organization/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { selectSurvey } from "@/modules/survey/lib/survey";
import { createSurvey as createSurveyFromService, handleTriggerUpdates } from "@/lib/survey/service";
export { handleTriggerUpdates };
export const createSurvey = async (
environmentId: string,
surveyBody: TSurveyCreateInput
): Promise<TSurvey> => {
try {
const { createdBy, ...restSurveyBody } = surveyBody;
// empty languages array
if (!restSurveyBody.languages?.length) {
delete restSurveyBody.languages;
}
const actionClasses = await getActionClasses(environmentId);
// @ts-expect-error
let data: Omit<Prisma.SurveyCreateInput, "environment"> = {
...restSurveyBody,
// TODO: Create with attributeFilters
triggers: restSurveyBody.triggers
? handleTriggerUpdates(restSurveyBody.triggers, [], actionClasses)
: undefined,
attributeFilters: undefined,
};
if (createdBy) {
data.creator = {
connect: {
id: createdBy,
},
};
}
const organization = await getOrganizationByEnvironmentId(environmentId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
// Survey follow-ups
if (restSurveyBody.followUps?.length) {
data.followUps = {
create: restSurveyBody.followUps.map((followUp) => ({
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
};
} else {
delete data.followUps;
}
// Validate and prepare blocks
if (data.blocks && data.blocks.length > 0) {
data.blocks = validateMediaAndPrepareBlocks(data.blocks);
}
const survey = await prisma.survey.create({
data: {
...data,
environment: {
connect: {
id: environmentId,
},
},
},
select: selectSurvey,
});
// if the survey created is an "app" survey, we also create a private segment for it.
if (survey.type === "app") {
const newSegment = await prisma.segment.create({
data: {
title: survey.id,
filters: [],
isPrivate: true,
environment: {
connect: {
id: environmentId,
},
},
},
});
await prisma.survey.update({
where: {
id: survey.id,
},
data: {
segment: {
connect: {
id: newSegment.id,
},
},
},
});
}
// TODO: Fix this, this happens because the survey type "web" is no longer in the zod types but its required in the schema for migration
// @ts-expect-error
const transformedSurvey: TSurvey = {
...survey,
...(survey.segment && {
segment: {
...survey.segment,
surveys: survey.segment.surveys.map((survey) => survey.id),
},
}),
};
if (createdBy) {
await subscribeOrganizationMembersToSurveyResponses(survey.id, createdBy, organization.id);
}
return transformedSurvey;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
logger.error(error, "Error creating survey");
throw new DatabaseError(error.message);
}
throw error;
}
};
const getTriggerIds = (triggers: unknown): string[] | null => {
if (!triggers) return null;
if (!Array.isArray(triggers)) {
throw new InvalidInputError("Invalid trigger id");
}
return triggers.map((trigger) => {
const actionClassId = (trigger as { actionClass?: { id?: unknown } })?.actionClass?.id;
if (typeof actionClassId !== "string") {
throw new InvalidInputError("Invalid trigger id");
}
return actionClassId;
});
};
const checkTriggersValidity = (triggers: unknown, actionClasses: Array<{ id: string }>) => {
const triggerIds = getTriggerIds(triggers);
if (!triggerIds) return;
// check if all the triggers are valid
triggerIds.forEach((triggerId) => {
if (!actionClasses.find((actionClass) => actionClass.id === triggerId)) {
throw new InvalidInputError("Invalid trigger id");
}
});
if (new Set(triggerIds).size !== triggerIds.length) {
throw new InvalidInputError("Duplicate trigger id");
}
};
export const handleTriggerUpdates = (
updatedTriggers: unknown,
currentTriggers: unknown,
actionClasses: Array<{ id: string }>
) => {
const updatedTriggerIds = getTriggerIds(updatedTriggers);
if (!updatedTriggerIds) return {};
checkTriggersValidity(updatedTriggers, actionClasses);
const currentTriggerIds = getTriggerIds(currentTriggers) ?? [];
// added triggers are triggers that are not in the current triggers and are there in the new triggers
const addedTriggerIds = updatedTriggerIds.filter((triggerId) => !currentTriggerIds.includes(triggerId));
// deleted triggers are triggers that are not in the new triggers and are there in the current triggers
const deletedTriggerIds = currentTriggerIds.filter((triggerId) => !updatedTriggerIds.includes(triggerId));
// Construct the triggers update object
const triggersUpdate: TriggerUpdate = {};
if (addedTriggerIds.length > 0) {
triggersUpdate.create = addedTriggerIds.map((triggerId) => ({
actionClassId: triggerId,
}));
}
if (deletedTriggerIds.length > 0) {
// disconnect the public triggers from the survey
triggersUpdate.deleteMany = {
actionClassId: {
in: deletedTriggerIds,
},
};
}
return triggersUpdate;
return createSurveyFromService(environmentId, surveyBody);
};
+35 -813
View File
@@ -1,837 +1,59 @@
import { ActionClass, Prisma } from "@prisma/client";
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSegment } from "@formbricks/types/segment";
import { TSurvey, TSurveyQuestionTypeEnum } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
import { getSurvey } from "@/modules/survey/lib/survey";
import { checkTriggersValidity, handleTriggerUpdates, updateSurvey, updateSurveyDraft } from "./survey";
// Mock dependencies
vi.mock("@formbricks/database", () => ({
prisma: {
survey: {
update: vi.fn(),
},
segment: {
update: vi.fn(),
delete: vi.fn(),
},
},
}));
vi.mock("@/lib/survey/utils", () => ({
checkForInvalidImagesInQuestions: vi.fn(),
}));
import { beforeEach, describe, expect, test, vi } from "vitest";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurvey } from "@formbricks/types/surveys/types";
import {
handleTriggerUpdates as handleTriggerUpdatesFromService,
updateSurvey as updateSurveyFromService,
updateSurveyInternal,
} from "@/lib/survey/service";
import { handleTriggerUpdates, updateSurvey, updateSurveyDraft } from "./survey";
vi.mock("@/lib/survey/service", () => ({
handleTriggerUpdates: vi.fn(),
updateSurvey: vi.fn(),
updateSurveyInternal: vi.fn(),
}));
vi.mock("@/modules/survey/lib/action-class", () => ({
getActionClasses: vi.fn(),
}));
describe("survey editor wrappers", () => {
const survey = { id: "survey_1" } as TSurvey;
vi.mock("@/modules/survey/lib/organization", () => ({
getOrganizationIdFromEnvironmentId: vi.fn(),
getOrganizationAIKeys: vi.fn(),
}));
vi.mock("@/modules/survey/lib/survey", () => ({
getSurvey: vi.fn(),
selectSurvey: {
id: true,
createdAt: true,
updatedAt: true,
name: true,
type: true,
environmentId: true,
},
}));
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
},
}));
describe("Survey Editor Library Tests", () => {
afterEach(() => {
beforeEach(() => {
vi.clearAllMocks();
});
describe("updateSurvey", () => {
const mockSurvey = {
id: "survey123",
createdAt: new Date(),
updatedAt: new Date(),
name: "Test Survey",
type: "app",
environmentId: "env123",
createdBy: "user123",
status: "draft",
displayOption: "displayOnce",
questions: [
{
id: "q1",
type: TSurveyQuestionTypeEnum.OpenText,
headline: { default: "Question 1" },
required: false,
inputType: "text",
charLimit: { enabled: false },
},
],
welcomeCard: {
enabled: false,
timeToFinish: true,
showResponseCount: false,
},
triggers: [],
endings: [],
hiddenFields: { enabled: false },
delay: 0,
autoComplete: null,
projectOverwrites: null,
styling: null,
showLanguageSwitch: false,
segment: null,
surveyClosedMessage: null,
singleUse: null,
isVerifyEmailEnabled: false,
recaptcha: null,
isSingleResponsePerEmailEnabled: false,
isBackButtonHidden: false,
pin: null,
displayPercentage: null,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
],
variables: [],
followUps: [],
} as unknown as TSurvey;
const mockCurrentSurvey = { ...mockSurvey };
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Code Action",
description: "Action from code",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
const mockOrganizationId = "org123";
const mockOrganization = {
id: mockOrganizationId,
name: "Test Organization",
ownerUserId: "user123",
billing: {
stripeCustomerId: "cust_123",
features: {},
usageCycleAnchor: new Date(),
},
isAIEnabled: false,
};
beforeEach(() => {
vi.mocked(prisma.survey.update).mockResolvedValue(mockSurvey as any);
vi.mocked(prisma.segment.update).mockResolvedValue({
id: "segment1",
environmentId: "env123",
surveys: [{ id: "survey123" }],
} as any);
vi.mocked(getSurvey).mockResolvedValue(mockCurrentSurvey);
vi.mocked(getActionClasses).mockResolvedValue(mockActionClasses);
vi.mocked(getOrganizationIdFromEnvironmentId).mockResolvedValue(mockOrganizationId);
vi.mocked(getOrganizationAIKeys).mockResolvedValue(mockOrganization as any);
});
test("should handle languages update with multiple languages", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
{
language: {
id: "es",
code: "es",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: false,
enabled: true,
},
],
};
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: { id: "survey123" },
data: expect.objectContaining({
languages: {
updateMany: expect.any(Array),
create: expect.arrayContaining([
expect.objectContaining({
languageId: "es",
default: false,
enabled: true,
}),
]),
},
}),
select: expect.any(Object),
});
});
test("should handle languages update with single default language", async () => {
// This tests the fix for the bug where languages.length === 1 would incorrectly
// set updatedLanguageIds to [] causing the default language to be removed
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [
{
language: {
id: "en",
code: "en",
createdAt: new Date(),
updatedAt: new Date(),
alias: null,
projectId: "project1",
},
default: true,
enabled: true,
},
],
};
await updateSurvey(updatedSurvey);
// Verify that prisma.survey.update was called
expect(prisma.survey.update).toHaveBeenCalled();
const updateCall = vi.mocked(prisma.survey.update).mock.calls[0][0];
// The key test: when languages.length === 1, we should still process language updates
// and NOT delete the language. Before the fix, languages.length > 1 would fail this case.
expect(updateCall).toBeDefined();
expect(updateCall.where).toEqual({ id: "survey123" });
expect(updateCall.data).toBeDefined();
});
test("should remove all languages when empty array is passed", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
languages: [],
};
await updateSurvey(updatedSurvey);
// Verify that prisma.survey.update was called
expect(prisma.survey.update).toHaveBeenCalled();
const updateCall = vi.mocked(prisma.survey.update).mock.calls[0][0];
// When languages is empty array, all existing languages should be removed
expect(updateCall).toBeDefined();
expect(updateCall.where).toEqual({ id: "survey123" });
expect(updateCall.data).toBeDefined();
});
test("should delete private segment for non-app type surveys", async () => {
const mockSegment: TSegment = {
id: "segment1",
title: "Test Segment",
isPrivate: true,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [{ id: "filter1" } as any],
};
const updatedSurvey: TSurvey = {
...mockSurvey,
type: "link",
segment: mockSegment,
};
await updateSurvey(updatedSurvey);
expect(prisma.segment.update).toHaveBeenCalledWith({
where: { id: "segment1" },
data: {
surveys: {
disconnect: {
id: "survey123",
},
},
},
});
expect(prisma.segment.delete).toHaveBeenCalledWith({
where: {
id: "segment1",
},
});
});
test("should disconnect public segment for non-app type surveys", async () => {
const mockSegment: TSegment = {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [],
};
const updatedSurvey: TSurvey = {
...mockSurvey,
type: "link",
segment: mockSegment,
};
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: {
id: "survey123",
},
data: {
segment: {
disconnect: true,
},
},
});
});
test("should handle followUps updates", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
followUps: [
{
id: "f1",
name: "Existing Follow Up",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "test@example.com",
subject: "Test",
body: "Test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: false,
},
{
id: "f2",
name: "New Follow Up",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "new@example.com",
subject: "New Test",
body: "New test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: false,
},
{
id: "f3",
name: "Follow Up To Delete",
createdAt: new Date(),
updatedAt: new Date(),
surveyId: "survey123",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "delete@example.com",
subject: "Delete Test",
body: "Delete test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
deleted: true,
},
],
};
// Mock current survey with existing followUps
vi.mocked(getSurvey).mockResolvedValueOnce({
...mockCurrentSurvey,
followUps: [
{
id: "f1",
name: "Existing Follow Up",
trigger: {
type: "response",
properties: {
endingIds: ["ending1"],
},
},
action: {
type: "send-email",
properties: {
to: "test@example.com",
subject: "Test",
body: "Test body",
from: "test@formbricks.com",
replyTo: ["reply@formbricks.com"],
attachResponseData: false,
},
},
},
],
} as any);
await updateSurvey(updatedSurvey);
expect(prisma.survey.update).toHaveBeenCalledWith({
where: { id: "survey123" },
data: expect.objectContaining({
followUps: {
updateMany: [
{
where: {
id: "f1",
},
data: expect.objectContaining({
name: "Existing Follow Up",
}),
},
],
createMany: {
data: [
expect.objectContaining({
name: "New Follow Up",
}),
],
},
deleteMany: [
{
id: "f3",
},
],
},
}),
select: expect.any(Object),
});
});
test("should throw ResourceNotFoundError when survey is not found", async () => {
vi.mocked(getSurvey).mockResolvedValueOnce(null as unknown as TSurvey);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
expect(getSurvey).toHaveBeenCalledWith("survey123");
});
test("should throw ResourceNotFoundError when organization is not found", async () => {
vi.mocked(getOrganizationAIKeys).mockResolvedValueOnce(null);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
});
test("should throw DatabaseError when Prisma throws a known request error", async () => {
const prismaError = new Prisma.PrismaClientKnownRequestError("Database error", {
code: "P2002",
clientVersion: "4.0.0",
});
vi.mocked(prisma.survey.update).mockRejectedValueOnce(prismaError);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(DatabaseError);
});
test("should rethrow other errors", async () => {
const genericError = new Error("Some other error");
vi.mocked(prisma.survey.update).mockRejectedValueOnce(genericError);
await expect(updateSurvey(mockSurvey)).rejects.toThrow(genericError);
});
test("should throw InvalidInputError for invalid segment filters", async () => {
const updatedSurvey: TSurvey = {
...mockSurvey,
segment: {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: "invalid filters" as any,
},
};
await expect(updateSurvey(updatedSurvey)).rejects.toThrow(InvalidInputError);
});
test("should handle error in segment update", async () => {
vi.mocked(prisma.segment.update).mockRejectedValueOnce(new Error("Error updating survey"));
const updatedSurvey: TSurvey = {
...mockSurvey,
segment: {
id: "segment1",
title: "Test Segment",
isPrivate: false,
environmentId: "env123",
surveys: ["survey123"],
createdAt: new Date(),
updatedAt: new Date(),
description: null,
filters: [],
},
};
await expect(updateSurvey(updatedSurvey)).rejects.toThrow("Error updating survey");
});
test("re-exports the shared trigger update helper", () => {
expect(handleTriggerUpdates).toBe(handleTriggerUpdatesFromService);
});
describe("checkTriggersValidity", () => {
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Action 1",
description: "Test Action 1",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action2",
name: "Action 2",
description: "Test Action 2",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
test("delegates updateSurvey to the shared survey service", async () => {
vi.mocked(updateSurveyFromService).mockResolvedValueOnce(survey);
const createFullActionClass = (id: string, type: "code" | "noCode" = "code"): ActionClass => ({
id,
name: `Action ${id}`,
description: `Test Action ${id}`,
type,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
});
const result = await updateSurvey(survey);
test("should not throw error for valid triggers", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("action2", "noCode") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).not.toThrow();
});
test("should throw error for invalid trigger id", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("invalid") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow(InvalidInputError);
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow("Invalid trigger id");
});
test("should throw error for duplicate trigger ids", () => {
const triggers = [
{ actionClass: createFullActionClass("action1") },
{ actionClass: createFullActionClass("action1") },
];
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow(InvalidInputError);
expect(() => checkTriggersValidity(triggers as any, mockActionClasses)).toThrow("Duplicate trigger id");
});
test("should do nothing when triggers are undefined", () => {
expect(() => checkTriggersValidity(undefined as any, mockActionClasses)).not.toThrow();
});
expect(updateSurveyFromService).toHaveBeenCalledWith(survey);
expect(result).toBe(survey);
});
describe("handleTriggerUpdates", () => {
const mockActionClasses: ActionClass[] = [
{
id: "action1",
name: "Action 1",
description: "Test Action 1",
type: "code" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action2",
name: "Action 2",
description: "Test Action 2",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
{
id: "action3",
name: "Action 3",
description: "Test Action 3",
type: "noCode" as const,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
},
];
test("delegates draft saves to updateSurveyInternal with skipValidation enabled", async () => {
vi.mocked(updateSurveyInternal).mockResolvedValueOnce(survey);
const createActionClassObj = (id: string, type: "code" | "noCode" = "code"): ActionClass => ({
id,
name: `Action ${id}`,
description: `Test Action ${id}`,
type,
environmentId: "env123",
createdAt: new Date(),
updatedAt: new Date(),
key: null,
noCodeConfig: null,
});
const result = await updateSurveyDraft(survey);
test("should return empty object when updatedTriggers is undefined", () => {
const result = handleTriggerUpdates(undefined as any, [], mockActionClasses);
expect(result).toEqual({});
});
test("should identify added triggers correctly", () => {
const currentTriggers = [{ actionClass: createActionClassObj("action1") }];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
create: [{ actionClassId: "action2" }],
});
});
test("should identify deleted triggers correctly", () => {
const currentTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const updatedTriggers = [{ actionClass: createActionClassObj("action1") }];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
deleteMany: {
actionClassId: {
in: ["action2"],
},
},
});
});
test("should handle both added and deleted triggers", () => {
const currentTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action2", "noCode") },
];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("action3", "noCode") },
];
const result = handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses);
expect(result).toEqual({
create: [{ actionClassId: "action3" }],
deleteMany: {
actionClassId: {
in: ["action2"],
},
},
});
});
test("should validate triggers before processing", () => {
const currentTriggers = [{ actionClass: createActionClassObj("action1") }];
const updatedTriggers = [
{ actionClass: createActionClassObj("action1") },
{ actionClass: createActionClassObj("invalid") },
];
expect(() =>
handleTriggerUpdates(updatedTriggers as any, currentTriggers as any, mockActionClasses)
).toThrow(InvalidInputError);
});
expect(updateSurveyInternal).toHaveBeenCalledWith(survey, true);
expect(result).toBe(survey);
});
describe("updateSurveyDraft", () => {
const mockSurvey = {
id: "survey123",
createdAt: new Date(),
updatedAt: new Date(),
name: "Draft Survey",
type: "app",
environmentId: "env123",
createdBy: "user123",
status: "draft",
displayOption: "displayOnce",
questions: [
{
id: "q1",
type: TSurveyQuestionTypeEnum.OpenText,
headline: { default: "Question 1" },
required: false,
inputType: "text",
charLimit: { enabled: false },
},
],
welcomeCard: {
enabled: false,
timeToFinish: true,
showResponseCount: false,
},
triggers: [],
endings: [],
hiddenFields: { enabled: false },
delay: 0,
autoComplete: null,
projectOverwrites: null,
styling: null,
showLanguageSwitch: false,
segment: null,
surveyClosedMessage: null,
singleUse: null,
isVerifyEmailEnabled: false,
recaptcha: null,
isSingleResponsePerEmailEnabled: false,
isBackButtonHidden: false,
pin: null,
displayPercentage: null,
languages: [],
variables: [],
followUps: [],
} as unknown as TSurvey;
test("propagates service errors for updateSurvey", async () => {
const error = new DatabaseError("database error");
vi.mocked(updateSurveyFromService).mockRejectedValueOnce(error);
beforeEach(() => {
vi.mocked(updateSurveyInternal).mockResolvedValue(mockSurvey);
});
await expect(updateSurvey(survey)).rejects.toThrow(error);
});
test("should call updateSurveyInternal with skipValidation=true", async () => {
await updateSurveyDraft(mockSurvey);
test("propagates service errors for updateSurveyDraft", async () => {
const error = new ResourceNotFoundError("Survey", "survey_1");
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(error);
expect(updateSurveyInternal).toHaveBeenCalledWith(mockSurvey, true);
expect(updateSurveyInternal).toHaveBeenCalledTimes(1);
});
test("should return the survey from updateSurveyInternal", async () => {
const result = await updateSurveyDraft(mockSurvey);
expect(result).toEqual(mockSurvey);
});
test("should propagate errors from updateSurveyInternal", async () => {
const error = new Error("Internal update failed");
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(error);
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow("Internal update failed");
});
test("should propagate ResourceNotFoundError from updateSurveyInternal", async () => {
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(new ResourceNotFoundError("Survey", "survey123"));
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow(ResourceNotFoundError);
});
test("should propagate DatabaseError from updateSurveyInternal", async () => {
vi.mocked(updateSurveyInternal).mockRejectedValueOnce(new DatabaseError("Database connection failed"));
await expect(updateSurveyDraft(mockSurvey)).rejects.toThrow(DatabaseError);
});
await expect(updateSurveyDraft(survey)).rejects.toThrow(error);
});
});
+8 -349
View File
@@ -1,357 +1,16 @@
import { Prisma } from "@prisma/client";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSegment, ZSegmentFilters } from "@formbricks/types/segment";
import { TSurvey } from "@formbricks/types/surveys/types";
import { updateSurveyInternal } from "@/lib/survey/service";
import { validateMediaAndPrepareBlocks } from "@/lib/survey/utils";
import { TriggerUpdate } from "@/modules/survey/editor/types/survey-trigger";
import { getActionClasses } from "@/modules/survey/lib/action-class";
import { getOrganizationAIKeys, getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
import { getSurvey, selectSurvey } from "@/modules/survey/lib/survey";
import {
handleTriggerUpdates,
updateSurvey as updateSurveyFromService,
updateSurveyInternal,
} from "@/lib/survey/service";
export { handleTriggerUpdates };
export const updateSurveyDraft = async (updatedSurvey: TSurvey): Promise<TSurvey> => {
// Use internal version with skipValidation=true to allow incomplete drafts
return updateSurveyInternal(updatedSurvey, true);
};
export const updateSurvey = async (updatedSurvey: TSurvey): Promise<TSurvey> => {
try {
const surveyId = updatedSurvey.id;
let data: any = {};
const actionClasses = await getActionClasses(updatedSurvey.environmentId);
const currentSurvey = await getSurvey(surveyId);
if (!currentSurvey) {
throw new ResourceNotFoundError("Survey", surveyId);
}
const { triggers, environmentId, segment, questions, languages, type, followUps, ...surveyData } =
updatedSurvey;
// Validate and prepare blocks for persistence
if (updatedSurvey.blocks && updatedSurvey.blocks.length > 0) {
data.blocks = validateMediaAndPrepareBlocks(updatedSurvey.blocks);
}
if (languages) {
// Process languages update logic here
// Extract currentLanguageIds and updatedLanguageIds
const currentLanguageIds = currentSurvey.languages
? currentSurvey.languages.map((l) => l.language.id)
: [];
const updatedLanguageIds =
languages.length > 0 ? updatedSurvey.languages.map((l) => l.language.id) : [];
const enabledLanguageIds = languages.map((language) => {
if (language.enabled) return language.language.id;
});
// Determine languages to add and remove
const languagesToAdd = updatedLanguageIds.filter((id) => !currentLanguageIds.includes(id));
const languagesToRemove = currentLanguageIds.filter((id) => !updatedLanguageIds.includes(id));
const defaultLanguageId = updatedSurvey.languages.find((l) => l.default)?.language.id;
// Prepare data for Prisma update
data.languages = {};
// Update existing languages for default value changes
data.languages.updateMany = currentSurvey.languages.map((surveyLanguage) => ({
where: { languageId: surveyLanguage.language.id },
data: {
default: surveyLanguage.language.id === defaultLanguageId,
enabled: enabledLanguageIds.includes(surveyLanguage.language.id),
},
}));
// Add new languages
if (languagesToAdd.length > 0) {
data.languages.create = languagesToAdd.map((languageId) => ({
languageId: languageId,
default: languageId === defaultLanguageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
// Remove languages no longer associated with the survey
if (languagesToRemove.length > 0) {
data.languages.deleteMany = languagesToRemove.map((languageId) => ({
languageId: languageId,
enabled: enabledLanguageIds.includes(languageId),
}));
}
}
if (triggers) {
data.triggers = handleTriggerUpdates(triggers, currentSurvey.triggers, actionClasses);
}
// if the survey body has type other than "app" but has a private segment, we delete that segment, and if it has a public segment, we disconnect from to the survey
if (segment) {
if (type === "app") {
// parse the segment filters:
const parsedFilters = ZSegmentFilters.safeParse(segment.filters);
if (!parsedFilters.success) {
throw new InvalidInputError("Invalid user segment filters");
}
try {
// update the segment:
let updatedInput: Prisma.SegmentUpdateInput = {
...segment,
surveys: undefined,
};
if (segment.surveys) {
updatedInput = {
...segment,
surveys: {
connect: segment.surveys.map((surveyId) => ({ id: surveyId })),
},
};
}
await prisma.segment.update({
where: { id: segment.id },
data: updatedInput,
select: {
surveys: { select: { id: true } },
environmentId: true,
id: true,
},
});
} catch (error) {
logger.error(error, "Error updating survey");
throw new Error("Error updating survey");
}
} else {
if (segment.isPrivate) {
// disconnect the private segment first and then delete:
await prisma.segment.update({
where: { id: segment.id },
data: {
surveys: {
disconnect: {
id: surveyId,
},
},
},
});
// delete the private segment:
await prisma.segment.delete({
where: {
id: segment.id,
},
});
} else {
await prisma.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
disconnect: true,
},
},
});
}
}
} else if (type === "app") {
if (!currentSurvey.segment) {
await prisma.survey.update({
where: {
id: surveyId,
},
data: {
segment: {
connectOrCreate: {
where: {
environmentId_title: {
environmentId,
title: surveyId,
},
},
create: {
title: surveyId,
isPrivate: true,
filters: [],
environment: {
connect: {
id: environmentId,
},
},
},
},
},
},
});
}
}
if (followUps) {
// Separate follow-ups into categories based on deletion flag
const deletedFollowUps = followUps.filter((followUp) => followUp.deleted);
const nonDeletedFollowUps = followUps.filter((followUp) => !followUp.deleted);
// Get set of existing follow-up IDs from currentSurvey
const existingFollowUpIds = new Set(currentSurvey.followUps.map((f) => f.id));
// Separate non-deleted follow-ups into new and existing
const existingFollowUps = nonDeletedFollowUps.filter((followUp) =>
existingFollowUpIds.has(followUp.id)
);
const newFollowUps = nonDeletedFollowUps.filter((followUp) => !existingFollowUpIds.has(followUp.id));
data.followUps = {
// Update existing follow-ups
updateMany: existingFollowUps.map((followUp) => ({
where: {
id: followUp.id,
},
data: {
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
},
})),
// Create new follow-ups
createMany:
newFollowUps.length > 0
? {
data: newFollowUps.map((followUp) => ({
id: followUp.id,
name: followUp.name,
trigger: followUp.trigger,
action: followUp.action,
})),
}
: undefined,
// Delete follow-ups marked as deleted, regardless of whether they exist in DB
deleteMany:
deletedFollowUps.length > 0
? deletedFollowUps.map((followUp) => ({
id: followUp.id,
}))
: undefined,
};
}
const organizationId = await getOrganizationIdFromEnvironmentId(environmentId);
const organization = await getOrganizationAIKeys(organizationId);
if (!organization) {
throw new ResourceNotFoundError("Organization", null);
}
surveyData.updatedAt = new Date();
data = {
...surveyData,
...data,
type,
};
delete data.createdBy;
const prismaSurvey = await prisma.survey.update({
where: { id: surveyId },
data,
select: selectSurvey,
});
let surveySegment: TSegment | null = null;
if (prismaSurvey.segment) {
surveySegment = {
...prismaSurvey.segment,
surveys: prismaSurvey.segment.surveys.map((survey) => survey.id),
};
}
const modifiedSurvey: TSurvey = {
...prismaSurvey, // Properties from prismaSurvey
displayPercentage: Number(prismaSurvey.displayPercentage) || null,
segment: surveySegment,
customHeadScriptsMode: prismaSurvey.customHeadScriptsMode,
};
return modifiedSurvey;
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
logger.error(error, "Error updating survey");
throw new DatabaseError(error.message);
}
throw error;
}
};
const getTriggerIds = (triggers: unknown): string[] | null => {
if (!triggers) return null;
if (!Array.isArray(triggers)) {
throw new InvalidInputError("Invalid trigger id");
}
return triggers.map((trigger) => {
const actionClassId = (trigger as { actionClass?: { id?: unknown } })?.actionClass?.id;
if (typeof actionClassId !== "string") {
throw new InvalidInputError("Invalid trigger id");
}
return actionClassId;
});
};
export const checkTriggersValidity = (triggers: unknown, actionClasses: Array<{ id: string }>) => {
const triggerIds = getTriggerIds(triggers);
if (!triggerIds) return;
// check if all the triggers are valid
triggerIds.forEach((triggerId) => {
if (!actionClasses.find((actionClass) => actionClass.id === triggerId)) {
throw new InvalidInputError("Invalid trigger id");
}
});
if (new Set(triggerIds).size !== triggerIds.length) {
throw new InvalidInputError("Duplicate trigger id");
}
};
export const handleTriggerUpdates = (
updatedTriggers: unknown,
currentTriggers: unknown,
actionClasses: Array<{ id: string }>
) => {
const updatedTriggerIds = getTriggerIds(updatedTriggers);
if (!updatedTriggerIds) return {};
checkTriggersValidity(updatedTriggers, actionClasses);
const currentTriggerIds = getTriggerIds(currentTriggers) ?? [];
// added triggers are triggers that are not in the current triggers and are there in the new triggers
const addedTriggerIds = updatedTriggerIds.filter((triggerId) => !currentTriggerIds.includes(triggerId));
// deleted triggers are triggers that are not in the new triggers and are there in the current triggers
const deletedTriggerIds = currentTriggerIds.filter((triggerId) => !updatedTriggerIds.includes(triggerId));
// Construct the triggers update object
const triggersUpdate: TriggerUpdate = {};
if (addedTriggerIds.length > 0) {
triggersUpdate.create = addedTriggerIds.map((triggerId) => ({
actionClassId: triggerId,
}));
}
if (deletedTriggerIds.length > 0) {
// disconnect the public triggers from the survey
triggersUpdate.deleteMany = {
actionClassId: {
in: deletedTriggerIds,
},
};
}
return triggersUpdate;
return updateSurveyFromService(updatedSurvey);
};
+2
View File
@@ -16,6 +16,8 @@ export const selectSurvey = {
environmentId: true,
createdBy: true,
status: true,
startsAt: true,
endsAt: true,
welcomeCard: true,
questions: true,
blocks: true,
@@ -7,6 +7,8 @@ export const surveySelect = {
updatedAt: true,
name: true,
type: true,
startsAt: true,
endsAt: true,
creator: {
select: {
name: true,
@@ -6,6 +6,7 @@ import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { TActionClassType } from "@formbricks/types/action-classes";
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
import { publishSurveyLifecycleCancellationEvents } from "@/lib/inngest/survey-lifecycle";
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { checkForInvalidMediaInBlocks } from "@/lib/survey/utils";
import { validateInputs } from "@/lib/utils/validate";
@@ -37,6 +38,10 @@ vi.mock("@/lib/survey/utils", () => ({
checkForInvalidMediaInBlocks: vi.fn(() => ({ ok: true, data: undefined })),
}));
vi.mock("@/lib/inngest/survey-lifecycle", () => ({
publishSurveyLifecycleCancellationEvents: vi.fn(),
}));
vi.mock("@/lib/utils/validate", () => ({
validateInputs: vi.fn(),
}));
@@ -76,6 +81,7 @@ vi.mock("@/lingodotdev/server", () => ({
vi.mock("@formbricks/database", () => ({
prisma: {
$transaction: vi.fn(),
survey: {
findMany: vi.fn(),
findUnique: vi.fn(),
@@ -126,9 +132,11 @@ const resetMocks = () => {
vi.mocked(prisma.survey.count).mockReset();
vi.mocked(prisma.survey.delete).mockReset();
vi.mocked(prisma.survey.create).mockReset();
vi.mocked(prisma.$transaction).mockReset();
vi.mocked(prisma.segment.delete).mockReset();
vi.mocked(prisma.segment.findFirst).mockReset();
vi.mocked(prisma.actionClass.findMany).mockReset();
vi.mocked(publishSurveyLifecycleCancellationEvents).mockReset();
vi.mocked(logger.error).mockClear();
};
@@ -423,6 +431,9 @@ describe("getSurveysSortedByRelevance", () => {
describe("deleteSurvey", () => {
beforeEach(() => {
resetMocks();
vi.mocked(prisma.$transaction).mockImplementation(
async (callback: (tx: typeof prisma) => Promise<unknown>) => callback(prisma)
);
});
const mockDeletedSurveyData = {
@@ -442,6 +453,10 @@ describe("deleteSurvey", () => {
where: { id: surveyId },
select: expect.objectContaining({ id: true, environmentId: true, segment: expect.anything() }),
});
expect(publishSurveyLifecycleCancellationEvents).toHaveBeenCalledWith({
surveyId,
environmentId,
});
expect(prisma.segment.delete).not.toHaveBeenCalled();
});
+36 -26
View File
@@ -7,6 +7,7 @@ import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
import { TSurveyFilterCriteria } from "@formbricks/types/surveys/types";
import { publishSurveyLifecycleCancellationEvents } from "@/lib/inngest/survey-lifecycle";
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
import { checkForInvalidMediaInBlocks } from "@/lib/survey/utils";
import { validateInputs } from "@/lib/utils/validate";
@@ -147,39 +148,48 @@ export const getSurvey = reactCache(async (surveyId: string): Promise<TSurvey |
export const deleteSurvey = async (surveyId: string): Promise<boolean> => {
try {
const deletedSurvey = await prisma.survey.delete({
where: {
id: surveyId,
},
select: {
id: true,
environmentId: true,
segment: {
select: {
id: true,
isPrivate: true,
},
const deletedSurvey = await prisma.$transaction(async (tx) => {
const removedSurvey = await tx.survey.delete({
where: {
id: surveyId,
},
type: true,
triggers: {
select: {
actionClass: {
select: {
id: true,
select: {
id: true,
environmentId: true,
segment: {
select: {
id: true,
isPrivate: true,
},
},
type: true,
triggers: {
select: {
actionClass: {
select: {
id: true,
},
},
},
},
},
},
});
if (removedSurvey.type === "app" && removedSurvey.segment?.isPrivate) {
await tx.segment.delete({
where: {
id: removedSurvey.segment.id,
},
});
}
return removedSurvey;
});
if (deletedSurvey.type === "app" && deletedSurvey.segment?.isPrivate) {
await prisma.segment.delete({
where: {
id: deletedSurvey.segment.id,
},
});
}
await publishSurveyLifecycleCancellationEvents({
surveyId: deletedSurvey.id,
environmentId: deletedSurvey.environmentId,
});
return true;
} catch (error) {
@@ -8,6 +8,8 @@ export const ZSurvey = z.object({
environmentId: z.string(),
type: z.enum(["link", "app", "website", "web"]), //we can replace this with ZSurveyType after we remove "web" from schema
status: ZSurveyStatus,
startsAt: z.date().nullable().optional(),
endsAt: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
responseCount: z.number(),
+1
View File
@@ -89,6 +89,7 @@
"i18next": "25.8.18",
"i18next-icu": "2.4.3",
"i18next-resources-to-backend": "1.2.1",
"inngest": "4.0.5",
"jiti": "2.6.1",
"jsonwebtoken": "9.0.3",
"lexical": "0.41.0",
+55
View File
@@ -9,6 +9,11 @@ services:
- POSTGRES_PASSWORD=postgres
ports:
- 5432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres || exit 1"]
interval: 5s
timeout: 5s
retries: 5
mailhog:
image: arjenz/mailhog
@@ -23,6 +28,11 @@ services:
- 6379:6379
volumes:
- valkey-data:/data
healthcheck:
test: ["CMD", "valkey-cli", "ping"]
interval: 5s
timeout: 3s
retries: 5
minio:
image: minio/minio:RELEASE.2025-09-07T16-13-09Z
@@ -36,6 +46,51 @@ services:
volumes:
- minio-data:/data
inngest:
profiles: ["inngest-poc"]
image: inngest/inngest
command: "inngest start"
ports:
- 8288:8288
- 8289:8289
environment:
- INNGEST_EVENT_KEY=${INNGEST_EVENT_KEY:?INNGEST_EVENT_KEY is required}
- INNGEST_SIGNING_KEY=${INNGEST_SIGNING_KEY:?INNGEST_SIGNING_KEY is required}
- INNGEST_POSTGRES_URI=postgres://postgres:postgres@postgres:5432/postgres
- INNGEST_REDIS_URI=redis://valkey:6379
- INNGEST_SDK_URL=http://inngest-poc-worker:8287/api/inngest
- INNGEST_POLL_INTERVAL=60
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
inngest-poc-worker:
condition: service_started
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8288/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
inngest-poc-worker:
profiles: ["inngest-poc"]
build:
context: .
dockerfile: services/inngest-poc-worker/Dockerfile
environment:
- INNGEST_BASE_URL=http://inngest:8288
- INNGEST_SIGNING_KEY=${INNGEST_SIGNING_KEY:?INNGEST_SIGNING_KEY is required}
- PORT=8287
ports:
- 8287:8287
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:8287/health"]
interval: 10s
timeout: 3s
retries: 5
volumes:
postgres:
driver: local
+6
View File
@@ -27,3 +27,9 @@ The script will prompt you for the following information:
3. **Domain Name**: Enter the domain name that Traefik will use to create the SSL certificate and forward requests to Formbricks.
That's it! After running the command and providing the required information, visit the domain name you entered, and you should see the Formbricks home wizard!
## Optional Inngest POC Profile
The experimental `inngest-poc` Docker Compose profile adds a self-hosted Inngest server and the Go worker used for the survey start/end lifecycle proof of concept. It reuses the same Postgres and Redis services that already back Formbricks in this stack.
Because the worker is built from source, this profile is meant to be used from a full checkout of the Formbricks repository rather than the one-file quickstart flow above.
+53
View File
@@ -199,6 +199,14 @@ x-environment: &environment
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
# SESSION_MAX_AGE: 86400
########################################## OPTIONAL (INNGEST POC PROFILE) ##########################################
# Only required when starting docker compose with --profile inngest-poc
# Replace both values with your own `openssl rand -hex 32` output before exposing this profile.
INNGEST_BASE_URL: http://inngest:8288
INNGEST_EVENT_KEY: 0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
INNGEST_SIGNING_KEY: abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789
services:
postgres:
restart: always
@@ -245,6 +253,51 @@ services:
- ./saml-connection:/home/nextjs/apps/web/saml-connection
<<: *environment
inngest:
profiles: ["inngest-poc"]
restart: always
image: inngest/inngest
command: "inngest start"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
inngest-poc-worker:
condition: service_started
ports:
- 8288:8288
- 8289:8289
environment:
- INNGEST_EVENT_KEY=${INNGEST_EVENT_KEY:?INNGEST_EVENT_KEY is required}
- INNGEST_SIGNING_KEY=${INNGEST_SIGNING_KEY:?INNGEST_SIGNING_KEY is required}
- INNGEST_POSTGRES_URI=postgres://postgres:postgres@postgres:5432/postgres
- INNGEST_REDIS_URI=redis://redis:6379
- INNGEST_SDK_URL=http://inngest-poc-worker:8287/api/inngest
- INNGEST_POLL_INTERVAL=60
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8288/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
inngest-poc-worker:
profiles: ["inngest-poc"]
restart: always
build:
context: ..
dockerfile: services/inngest-poc-worker/Dockerfile
environment:
- INNGEST_BASE_URL=http://inngest:8288
- INNGEST_SIGNING_KEY=${INNGEST_SIGNING_KEY:?INNGEST_SIGNING_KEY is required}
- PORT=8287
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:8287/health"]
interval: 10s
timeout: 3s
retries: 5
volumes:
postgres:
driver: local
+45
View File
@@ -117,6 +117,51 @@ Please take a look at our [migration guide](/self-hosting/advanced/migration) fo
docker compose up -d
```
## Optional: Self-hosted Inngest POC Profile
This repository also includes an experimental `inngest-poc` Docker Compose profile for the survey lifecycle proof of concept. It adds:
- A self-hosted Inngest server on port `8288`
- The Go `inngest-poc-worker` service that exposes `/api/inngest`
The profile intentionally reuses the same Postgres and Redis/Valkey services that already back Formbricks in the
Compose stack instead of starting separate infrastructure just for Inngest.
<Note>
This profile builds the worker from the repository source, so it is intended for local evaluation or a full
repository checkout. It is not part of the one-file quickstart flow.
</Note>
### Local Development Stack
From the repository root, start the self-hosted Inngest services with:
```bash
docker compose -f docker-compose.dev.yml --profile inngest-poc up -d
```
Then set these environment variables for the web app before creating surveys with `startsAt` or `endsAt`:
```bash
INNGEST_BASE_URL=http://localhost:8288
INNGEST_EVENT_KEY=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
```
### Docker Production-style Stack
From a repository checkout, start the optional profile with:
```bash
docker compose -f docker/docker-compose.yml --profile inngest-poc up -d
```
Before doing that, replace the `INNGEST_EVENT_KEY` and `INNGEST_SIGNING_KEY` placeholder values in
`docker/docker-compose.yml` with your own `openssl rand -hex 32` output.
In both Compose files, the `inngest-poc` profile points Inngest at the existing Formbricks Postgres and Redis
services. If you prefer different connection strings or stronger isolation, override `INNGEST_POSTGRES_URI` and
`INNGEST_REDIS_URI` in your own deployment setup.
## Optional: Adding MinIO for File Storage
MinIO provides S3-compatible object storage for file uploads in Formbricks. If you want to enable features like image uploads, file uploads in surveys, or custom logos, you can add MinIO to your Docker setup.
+2 -1
View File
@@ -4,7 +4,8 @@
"private": true,
"workspaces": [
"apps/*",
"packages/*"
"packages/*",
"services/*"
],
"prisma": {
"schema": "packages/database/schema.prisma"
+2
View File
@@ -353,6 +353,8 @@ model Survey {
creator User? @relation(fields: [createdBy], references: [id])
createdBy String?
status SurveyStatus @default(draft)
startsAt DateTime?
endsAt DateTime?
/// [SurveyWelcomeCard]
welcomeCard Json @default("{\"enabled\": false}")
/// [SurveyQuestions]
+2
View File
@@ -53,6 +53,8 @@ const ZSurveyBase = z.object({
redirectUrl: z.url().nullable().describe("The URL to redirect to after the survey is completed"),
type: z.enum(SurveyType).describe("The type of the survey"),
status: z.enum(SurveyStatus).describe("The status of the survey"),
startsAt: z.coerce.date().nullable().optional().describe("When the survey should start"),
endsAt: z.coerce.date().nullable().optional().describe("When the survey should end"),
thankYouMessage: z.string().nullable().describe("The thank you message of the survey"),
showLanguageSwitch: z.boolean().nullable().describe("Whether to show the language switch"),
showThankYouMessage: z.boolean().nullable().describe("Whether to show the thank you message"),
+11 -1
View File
@@ -828,6 +828,8 @@ export const ZSurveyBase = z.object({
environmentId: z.string(),
createdBy: z.string().nullable(),
status: ZSurveyStatus,
startsAt: z.coerce.date().nullable().optional(),
endsAt: z.coerce.date().nullable().optional(),
displayOption: ZSurveyDisplayOption,
autoClose: z.number().nullable(),
triggers: z.array(z.object({ actionClass: ZActionClass })),
@@ -930,7 +932,15 @@ export const ZSurveyBase = z.object({
});
export const surveyRefinement = (survey: z.infer<typeof ZSurveyBase>, ctx: z.RefinementCtx): void => {
const { questions, blocks, languages, welcomeCard, endings, isBackButtonHidden } = survey;
const { questions, blocks, languages, welcomeCard, endings, isBackButtonHidden, startsAt, endsAt } = survey;
if (startsAt && endsAt && startsAt >= endsAt) {
ctx.addIssue({
code: "custom",
message: "Survey start date must be before end date",
path: ["startsAt"],
});
}
// Validate: must have questions OR blocks with elements, not both
const hasQuestions = questions.length > 0;
+235
View File
@@ -324,6 +324,9 @@ importers:
i18next-resources-to-backend:
specifier: 1.2.1
version: 1.2.1
inngest:
specifier: 4.0.5
version: 4.0.5(@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(hono@4.12.7)(next@16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(zod@4.3.6)
jiti:
specifier: 2.6.1
version: 2.6.1
@@ -1025,6 +1028,12 @@ importers:
specifier: 7.3.1
version: 7.3.1(@types/node@25.4.0)(jiti@2.6.1)(lightningcss@1.31.1)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
services/inngest-poc-worker:
devDependencies:
dotenv-cli:
specifier: 11.0.0
version: 11.0.0
packages:
'@acemir/cssom@0.9.31':
@@ -1625,6 +1634,9 @@ packages:
resolution: {integrity: sha512-ctxtJ/eA+t+6q2++vj5j7FYX3nRu311q1wfYH3xjlLOsczhlhxAg2FWNUXhpGvAw3BWo1xBcvOV6/YLc2r5FJw==}
hasBin: true
'@bufbuild/protobuf@2.11.0':
resolution: {integrity: sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==}
'@calcom/embed-core@1.5.3':
resolution: {integrity: sha512-GeId9gaByJ5EWiPmuvelZOvFWPOTWkcWZr5vGTCbIUTX125oE5yn0n8lDF1MJk5Xj1WO+/dk9jKIE08Ad9ytiQ==}
@@ -2170,6 +2182,9 @@ packages:
cpu: [x64]
os: [win32]
'@inngest/ai@0.1.7':
resolution: {integrity: sha512-5xWatW441jacGf9czKEZdgAmkvoy7GS2tp7X8GSbdGeRXzjisHR6vM+q8DQbv6rqRsmQoCQ5iShh34MguELvUQ==}
'@isaacs/cliui@8.0.2':
resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
engines: {node: '>=12'}
@@ -2187,6 +2202,9 @@ packages:
typescript:
optional: true
'@jpwilliams/waitgroup@2.1.1':
resolution: {integrity: sha512-0CxRhNfkvFCTLZBKGvKxY2FYtYW1yWhO2McLqBL0X5UWvYjIf9suH8anKW/DNutl369A75Ewyoh2iJMwBZ2tRg==}
'@jridgewell/gen-mapping@0.3.13':
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
@@ -2441,6 +2459,10 @@ packages:
engines: {node: '>=10'}
deprecated: This functionality has been moved to @npmcli/fs
'@opentelemetry/api-logs@0.203.0':
resolution: {integrity: sha512-9B9RU0H7Ya1Dx/Rkyc4stuBZSGVQF27WigitInx2QQoj6KUpEFYPKoWjdFTunJYxmXmh17HeBvbMa1EhGyPmqQ==}
engines: {node: '>=8.0.0'}
'@opentelemetry/api-logs@0.207.0':
resolution: {integrity: sha512-lAb0jQRVyleQQGiuuvCOTDVspc14nx6XJjP4FspJ1sNARo3Regq4ZZbrc3rN4b1TYSuUCvgH+UXUPug4SLOqEQ==}
engines: {node: '>=8.0.0'}
@@ -2998,6 +3020,12 @@ packages:
peerDependencies:
'@opentelemetry/api': ^1.3.0
'@opentelemetry/instrumentation@0.203.0':
resolution: {integrity: sha512-ke1qyM+3AK2zPuBPb6Hk/GCsc5ewbLvPNkEuELx/JmANeEp6ZjnZ+wypPAJSucTw0wvCGrUaibDSdcrGFoWxKQ==}
engines: {node: ^18.19.0 || >=20.6.0}
peerDependencies:
'@opentelemetry/api': ^1.3.0
'@opentelemetry/instrumentation@0.207.0':
resolution: {integrity: sha512-y6eeli9+TLKnznrR8AZlQMSJT7wILpXH+6EYq5Vf/4Ao+huI7EedxQHwRgVUOMLFbe7VFDvHJrX9/f4lcwnJsA==}
engines: {node: ^18.19.0 || >=20.6.0}
@@ -5199,6 +5227,14 @@ packages:
resolution: {integrity: sha512-VyMVKRrpHTT8PnotUeV8L/mDaMwD5DaAKCFLP73zAqAtvF0FCqky+Ki7BYbFCYQmqFyTe9316Ed5zS70QUR9eg==}
engines: {node: '>= 10'}
'@traceloop/ai-semantic-conventions@0.20.0':
resolution: {integrity: sha512-bvivhZU6U8TW4TKktYnjdTi+7GE4WxI8epaGjawalSKDunmxaA+4UVFQ+4tSCBvp2Scby+gnYNaTZSrtABfOlQ==}
engines: {node: '>=14'}
'@traceloop/instrumentation-anthropic@0.20.0':
resolution: {integrity: sha512-xQcPxVrKr3yT9+ZEM3skYXikJc/ocZlGDIcsBQ3mMwL3Weq1QL7jx/uGLXvrSO2Yh0DWUjWI6Q/oiRCEUM6P8w==}
engines: {node: '>=14'}
'@trivago/prettier-plugin-sort-imports@6.0.2':
resolution: {integrity: sha512-3DgfkukFyC/sE/VuYjaUUWoFfuVjPK55vOFDsxD56XXynFMCZDYFogH2l/hDfOsQAm1myoU/1xByJ3tWqtulXA==}
engines: {node: '>= 20'}
@@ -5266,6 +5302,9 @@ packages:
'@types/cors@2.8.19':
resolution: {integrity: sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==}
'@types/debug@4.1.13':
resolution: {integrity: sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==}
'@types/deep-eql@4.0.2':
resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==}
@@ -6284,6 +6323,9 @@ packages:
caniuse-lite@1.0.30001776:
resolution: {integrity: sha512-sg01JDPzZ9jGshqKSckOQthXnYwOEP50jeVFhaSFbZcOy05TiuuaffDOfcwtCisJ9kNQuLBFibYywv2Bgm9osw==}
canonicalize@1.0.8:
resolution: {integrity: sha512-0CNTVCLZggSh7bc5VkX5WWPWO+cyZbNd07IHIsSXLia/eAq+r836hgk+8BKoEh7949Mda87VUOitx5OddVj64A==}
chai@5.3.3:
resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==}
engines: {node: '>=18'}
@@ -6349,6 +6391,9 @@ packages:
citty@0.1.6:
resolution: {integrity: sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==}
cjs-module-lexer@1.4.3:
resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==}
cjs-module-lexer@2.2.0:
resolution: {integrity: sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==}
@@ -6502,6 +6547,9 @@ packages:
engines: {node: '>=20'}
hasBin: true
cross-fetch@4.1.0:
resolution: {integrity: sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw==}
cross-spawn@7.0.6:
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
engines: {node: '>= 8'}
@@ -7590,6 +7638,9 @@ packages:
resolution: {integrity: sha512-Bb33KbowVTIj5s7Ked1OsqHUeCpz//tPwR+E2zJgJKo9Z5XolZ9b6bdUgjmYlwnWhoOQKoTd1TYToZGn5mAYOg==}
engines: {node: '>= 0.8'}
hash.js@1.1.7:
resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==}
hasown@2.0.2:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
@@ -7712,6 +7763,9 @@ packages:
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
engines: {node: '>=6'}
import-in-the-middle@1.15.0:
resolution: {integrity: sha512-bpQy+CrsRmYmoPMAE/0G33iwRqwW4ouqdRg8jgbH3aKuCtOc8lxgmYXg2dMM92CRiGP660EtBcymH/eVUpCSaA==}
import-in-the-middle@2.0.6:
resolution: {integrity: sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==}
@@ -7747,6 +7801,43 @@ packages:
inline-style-prefixer@7.0.1:
resolution: {integrity: sha512-lhYo5qNTQp3EvSSp3sRvXMbVQTLrvGV6DycRMJ5dm2BLMiJ30wpXKdDdgX+GmJZ5uQMucwRKHamXSst3Sj/Giw==}
inngest@4.0.5:
resolution: {integrity: sha512-NK0YP7m1ni27ef4bxvLXwudHPMAgQVDncWW5yzm7eQh3EH9Hmshy4IRKq2Kp8x6SKUZCEwq0AKWw8aNoNa2/5g==}
engines: {node: '>=20'}
peerDependencies:
'@sveltejs/kit': '>=1.27.3'
'@vercel/node': '>=2.15.9'
aws-lambda: '>=1.0.7'
express: '>=4.19.2'
fastify: '>=4.21.0'
h3: '>=1.8.1'
hono: 4.12.7
koa: '>=2.14.2'
next: '>=12.0.0'
typescript: '>=5.8.0'
zod: ^3.25.0 || ^4.0.0
peerDependenciesMeta:
'@sveltejs/kit':
optional: true
'@vercel/node':
optional: true
aws-lambda:
optional: true
express:
optional: true
fastify:
optional: true
h3:
optional: true
hono:
optional: true
koa:
optional: true
next:
optional: true
typescript:
optional: true
internal-slot@1.1.0:
resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==}
engines: {node: '>= 0.4'}
@@ -8077,6 +8168,9 @@ packages:
json-stable-stringify-without-jsonify@1.0.1:
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
json-stringify-safe@5.0.1:
resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==}
json5@1.0.2:
resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==}
hasBin: true
@@ -8433,6 +8527,9 @@ packages:
resolution: {integrity: sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==}
hasBin: true
minimalistic-assert@1.0.1:
resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==}
minimatch@10.2.4:
resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==}
engines: {node: 18 || 20 || >=22}
@@ -9568,6 +9665,10 @@ packages:
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
engines: {node: '>=0.10.0'}
require-in-the-middle@7.5.2:
resolution: {integrity: sha512-gAZ+kLqBdHarXB64XpAe2VCjB7rIRv+mU8tfRWziHRJ5umKsIHN2tLLv6EtMw7WCdP19S0ERVMldNvxYCHnhSQ==}
engines: {node: '>=8.6.0'}
require-in-the-middle@8.0.1:
resolution: {integrity: sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==}
engines: {node: '>=9.3.0 || >=8.10.0 <9.0.0'}
@@ -9737,6 +9838,10 @@ packages:
seq-queue@0.0.5:
resolution: {integrity: sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==}
serialize-error-cjs@0.1.4:
resolution: {integrity: sha512-6a6dNqipzbCPlTFgztfNP2oG+IGcflMe/01zSzGrQcxGMKbIjOemBBD85pH92klWaJavAUWxAh9Z0aU28zxW6A==}
deprecated: Rolling release, please update to 0.2.0
server-only@0.0.1:
resolution: {integrity: sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA==}
@@ -10152,6 +10257,12 @@ packages:
resolution: {integrity: sha512-g7jC56o3MzLkE3lHkaFe2ZdOVFBahq5bsB60/M4NYUbocw/MCrS89IOEQUFr+ba6pb8ZHczZ/VqCyYeYq0xBAg==}
engines: {node: '>=18'}
temporal-polyfill@0.2.5:
resolution: {integrity: sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==}
temporal-spec@0.2.4:
resolution: {integrity: sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==}
terser-webpack-plugin@5.3.17:
resolution: {integrity: sha512-YR7PtUp6GMU91BgSJmlaX/rS2lGDbAF7D+Wtq7hRO+MiljNmodYvqslzCFiYVAgW+Qoaaia/QUIP4lGXufjdZw==}
engines: {node: '>= 10.13.0'}
@@ -10498,6 +10609,10 @@ packages:
resolution: {integrity: sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==}
engines: {node: '>=18'}
ulid@2.4.0:
resolution: {integrity: sha512-fIRiVTJNcSRmXKPZtGzFQv9WRrZ3M9eoptl/teFJvjOzmpU+/K/JH6HZ8deBfb5vMEpicJcLn7JmvdknlMq7Zg==}
hasBin: true
unbox-primitive@1.1.0:
resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==}
engines: {node: '>= 0.4'}
@@ -12423,6 +12538,8 @@ snapshots:
dependencies:
css-tree: 3.1.0
'@bufbuild/protobuf@2.11.0': {}
'@calcom/embed-core@1.5.3': {}
'@calcom/embed-snippet@1.3.3':
@@ -12872,6 +12989,11 @@ snapshots:
'@img/sharp-win32-x64@0.34.5':
optional: true
'@inngest/ai@0.1.7':
dependencies:
'@types/node': 22.19.13
typescript: 5.9.3
'@isaacs/cliui@8.0.2':
dependencies:
string-width: 5.1.2
@@ -12893,6 +13015,8 @@ snapshots:
optionalDependencies:
typescript: 5.9.3
'@jpwilliams/waitgroup@2.1.1': {}
'@jridgewell/gen-mapping@0.3.13':
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
@@ -13265,6 +13389,10 @@ snapshots:
rimraf: 3.0.2
optional: true
'@opentelemetry/api-logs@0.203.0':
dependencies:
'@opentelemetry/api': 1.9.0
'@opentelemetry/api-logs@0.207.0':
dependencies:
'@opentelemetry/api': 1.9.0
@@ -14088,6 +14216,15 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@opentelemetry/instrumentation@0.203.0(@opentelemetry/api@1.9.0)':
dependencies:
'@opentelemetry/api': 1.9.0
'@opentelemetry/api-logs': 0.203.0
import-in-the-middle: 1.15.0
require-in-the-middle: 7.5.2
transitivePeerDependencies:
- supports-color
'@opentelemetry/instrumentation@0.207.0(@opentelemetry/api@1.9.0)':
dependencies:
'@opentelemetry/api': 1.9.0
@@ -16586,6 +16723,21 @@ snapshots:
'@tootallnate/once@3.0.1':
optional: true
'@traceloop/ai-semantic-conventions@0.20.0':
dependencies:
'@opentelemetry/api': 1.9.0
'@traceloop/instrumentation-anthropic@0.20.0':
dependencies:
'@opentelemetry/api': 1.9.0
'@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0)
'@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0)
'@opentelemetry/semantic-conventions': 1.40.0
'@traceloop/ai-semantic-conventions': 0.20.0
tslib: 2.8.1
transitivePeerDependencies:
- supports-color
'@trivago/prettier-plugin-sort-imports@6.0.2(prettier@3.8.1)':
dependencies:
'@babel/generator': 7.29.1
@@ -16661,6 +16813,10 @@ snapshots:
dependencies:
'@types/node': 25.4.0
'@types/debug@4.1.13':
dependencies:
'@types/ms': 2.1.0
'@types/deep-eql@4.0.2': {}
'@types/doctrine@0.0.9': {}
@@ -17848,6 +18004,8 @@ snapshots:
caniuse-lite@1.0.30001776: {}
canonicalize@1.0.8: {}
chai@5.3.3:
dependencies:
assertion-error: 2.0.1
@@ -17910,6 +18068,8 @@ snapshots:
dependencies:
consola: 3.4.2
cjs-module-lexer@1.4.3: {}
cjs-module-lexer@2.2.0: {}
class-variance-authority@0.7.1:
@@ -18060,6 +18220,12 @@ snapshots:
'@epic-web/invariant': 1.0.0
cross-spawn: 7.0.6
cross-fetch@4.1.0(encoding@0.1.13):
dependencies:
node-fetch: 2.7.0(encoding@0.1.13)
transitivePeerDependencies:
- encoding
cross-spawn@7.0.6:
dependencies:
path-key: 3.1.1
@@ -19391,6 +19557,11 @@ snapshots:
safe-buffer: 5.2.1
to-buffer: 1.2.2
hash.js@1.1.7:
dependencies:
inherits: 2.0.4
minimalistic-assert: 1.0.1
hasown@2.0.2:
dependencies:
function-bind: 1.1.2
@@ -19534,6 +19705,13 @@ snapshots:
parent-module: 1.0.1
resolve-from: 4.0.0
import-in-the-middle@1.15.0:
dependencies:
acorn: 8.16.0
acorn-import-attributes: 1.9.5(acorn@8.16.0)
cjs-module-lexer: 1.4.3
module-details-from-path: 1.0.4
import-in-the-middle@2.0.6:
dependencies:
acorn: 8.16.0
@@ -19570,6 +19748,41 @@ snapshots:
dependencies:
css-in-js-utils: 3.1.0
inngest@4.0.5(@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(hono@4.12.7)(next@16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(zod@4.3.6):
dependencies:
'@bufbuild/protobuf': 2.11.0
'@inngest/ai': 0.1.7
'@jpwilliams/waitgroup': 2.1.1
'@opentelemetry/api': 1.9.0
'@opentelemetry/auto-instrumentations-node': 0.71.0(@opentelemetry/api@1.9.0)(@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0))
'@opentelemetry/context-async-hooks': 2.6.0(@opentelemetry/api@1.9.0)
'@opentelemetry/exporter-trace-otlp-http': 0.213.0(@opentelemetry/api@1.9.0)
'@opentelemetry/instrumentation': 0.213.0(@opentelemetry/api@1.9.0)
'@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0)
'@opentelemetry/sdk-trace-base': 2.6.0(@opentelemetry/api@1.9.0)
'@standard-schema/spec': 1.1.0
'@traceloop/instrumentation-anthropic': 0.20.0
'@types/debug': 4.1.13
'@types/ms': 2.1.0
canonicalize: 1.0.8
cross-fetch: 4.1.0(encoding@0.1.13)
debug: 4.4.3
hash.js: 1.1.7
json-stringify-safe: 5.0.1
ms: 2.1.3
serialize-error-cjs: 0.1.4
temporal-polyfill: 0.2.5
ulid: 2.4.0
zod: 4.3.6
optionalDependencies:
hono: 4.12.7
next: 16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
typescript: 5.9.3
transitivePeerDependencies:
- '@opentelemetry/core'
- encoding
- supports-color
internal-slot@1.1.0:
dependencies:
es-errors: 1.3.0
@@ -19890,6 +20103,8 @@ snapshots:
json-stable-stringify-without-jsonify@1.0.1: {}
json-stringify-safe@5.0.1: {}
json5@1.0.2:
dependencies:
minimist: 1.2.8
@@ -20233,6 +20448,8 @@ snapshots:
mini-svg-data-uri@1.4.4: {}
minimalistic-assert@1.0.1: {}
minimatch@10.2.4:
dependencies:
brace-expansion: 5.0.4
@@ -21482,6 +21699,14 @@ snapshots:
require-from-string@2.0.2: {}
require-in-the-middle@7.5.2:
dependencies:
debug: 4.4.3
module-details-from-path: 1.0.4
resolve: 1.22.11
transitivePeerDependencies:
- supports-color
require-in-the-middle@8.0.1:
dependencies:
debug: 4.4.3
@@ -21675,6 +21900,8 @@ snapshots:
seq-queue@0.0.5: {}
serialize-error-cjs@0.1.4: {}
server-only@0.0.1: {}
set-blocking@2.0.0: {}
@@ -22219,6 +22446,12 @@ snapshots:
transitivePeerDependencies:
- supports-color
temporal-polyfill@0.2.5:
dependencies:
temporal-spec: 0.2.4
temporal-spec@0.2.4: {}
terser-webpack-plugin@5.3.17(esbuild@0.27.3)(webpack@5.105.4(esbuild@0.27.3)):
dependencies:
'@jridgewell/trace-mapping': 0.3.31
@@ -22513,6 +22746,8 @@ snapshots:
uint8array-extras@1.5.0: {}
ulid@2.4.0: {}
unbox-primitive@1.1.0:
dependencies:
call-bound: 1.0.4
+1
View File
@@ -1,6 +1,7 @@
packages:
- "apps/*"
- "packages/*"
- "services/*"
# Allow lifecycle scripts for packages that need to build native binaries
# Required for pnpm v10+ which blocks scripts by default
+21
View File
@@ -0,0 +1,21 @@
FROM golang:1.25.1-alpine AS builder
WORKDIR /app
COPY services/inngest-poc-worker/go.mod services/inngest-poc-worker/go.sum* ./
RUN go mod download
COPY services/inngest-poc-worker ./
RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /inngest-poc-worker ./cmd/inngest-poc-worker
FROM alpine:3.22
RUN adduser -D -u 10001 appuser
COPY --from=builder /inngest-poc-worker /usr/local/bin/inngest-poc-worker
USER appuser
EXPOSE 8287
ENTRYPOINT ["inngest-poc-worker"]
@@ -0,0 +1,60 @@
package main
import (
"context"
"errors"
"log/slog"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/config"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/inngestapp"
)
func main() {
logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo}))
cfg, err := config.LoadFromEnv()
if err != nil {
logger.Error("failed to load configuration", slog.Any("error", err))
os.Exit(1)
}
app, err := inngestapp.New(cfg, logger)
if err != nil {
logger.Error("failed to initialize Inngest application", slog.Any("error", err))
os.Exit(1)
}
server := &http.Server{
Addr: ":" + cfg.Port,
Handler: app.Routes(),
ReadHeaderTimeout: 10 * time.Second,
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
defer stop()
go func() {
<-ctx.Done()
shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := server.Shutdown(shutdownCtx); err != nil {
logger.Error("failed to shut down server cleanly", slog.Any("error", err))
}
}()
logger.Info("inngest-poc-worker started", slog.String("addr", server.Addr))
if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
logger.Error("worker server stopped unexpectedly", slog.Any("error", err))
os.Exit(1)
}
logger.Info("inngest-poc-worker stopped")
}
+27
View File
@@ -0,0 +1,27 @@
module github.com/formbricks/formbricks/services/inngest-poc-worker
go 1.25.1
require github.com/inngest/inngestgo v0.15.1
require (
github.com/coder/websocket v1.8.12 // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/gosimple/slug v1.12.0 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/gowebpki/jcs v1.0.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inngest/inngest v1.13.5 // indirect
github.com/oklog/ulid/v2 v2.1.1 // indirect
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
)
+77
View File
@@ -0,0 +1,77 @@
github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo=
github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gosimple/slug v1.12.0 h1:xzuhj7G7cGtd34NXnW/yF0l+AGNfWqwgh/IXgFy7dnc=
github.com/gosimple/slug v1.12.0/go.mod h1:UiRaFH+GEilHstLUmcBgWcI42viBN7mAb818JrYOeFQ=
github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o=
github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc=
github.com/gowebpki/jcs v1.0.0 h1:0pZtOgGetfH/L7yXb4KWcJqIyZNA43WXFyMd7ftZACw=
github.com/gowebpki/jcs v1.0.0/go.mod h1:CID1cNZ+sHp1CCpAR8mPf6QRtagFBgPJE0FCUQ6+BrI=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/inngest/inngest v1.13.5 h1:2kcz62tYL5bsYss4L612I5AY65E+095Yrm4rvvlPVo8=
github.com/inngest/inngest v1.13.5/go.mod h1:EcufIFCh08d/ififXs6gWfNb5R9gSapd6Pi7yRgSh08=
github.com/inngest/inngestgo v0.15.1 h1:JccdXQj5x1SZ7TOVgeUEeAzSugzPzUFzuYUQ9hB0jY0=
github.com/inngest/inngestgo v0.15.1/go.mod h1:2Qm4ULk506Zwt8MJXHfTZ4lthY1WTpYksXK1z6lEM/U=
github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s=
github.com/oklog/ulid/v2 v2.1.1/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sashabaranov/go-openai v1.35.6 h1:oi0rwCvyxMxgFALDGnyqFTyCJm6n72OnEG3sybIFR0g=
github.com/sashabaranov/go-openai v1.35.6/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI=
go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg=
go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc=
go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps=
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok=
google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -0,0 +1,53 @@
package config
import (
"errors"
"fmt"
"net/url"
"os"
"strings"
)
var (
ErrInngestBaseURLRequired = errors.New("INNGEST_BASE_URL is required")
ErrInngestSigningKeyRequired = errors.New("INNGEST_SIGNING_KEY is required")
ErrPortRequired = errors.New("PORT is required")
)
type Config struct {
BaseURL string
SigningKey string
Port string
}
func LoadFromEnv() (Config, error) {
baseURL := strings.TrimSpace(os.Getenv("INNGEST_BASE_URL"))
if baseURL == "" {
return Config{}, ErrInngestBaseURLRequired
}
signingKey := strings.TrimSpace(os.Getenv("INNGEST_SIGNING_KEY"))
if signingKey == "" {
return Config{}, ErrInngestSigningKeyRequired
}
port := strings.TrimSpace(os.Getenv("PORT"))
if port == "" {
return Config{}, ErrPortRequired
}
parsedURL, err := url.Parse(baseURL)
if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" {
return Config{}, fmt.Errorf("invalid INNGEST_BASE_URL: %s", baseURL)
}
return Config{
BaseURL: strings.TrimRight(baseURL, "/"),
SigningKey: signingKey,
Port: port,
}, nil
}
func (c Config) RegisterURL() string {
return c.BaseURL + "/fn/register"
}
@@ -0,0 +1,65 @@
package config
import "testing"
func TestLoadFromEnv(t *testing.T) {
t.Run("returns an error when INNGEST_BASE_URL is missing", func(t *testing.T) {
t.Setenv("INNGEST_BASE_URL", "")
t.Setenv("INNGEST_SIGNING_KEY", "signkey-test-1234")
t.Setenv("PORT", "8287")
_, err := LoadFromEnv()
if err != ErrInngestBaseURLRequired {
t.Fatalf("expected ErrInngestBaseURLRequired, got %v", err)
}
})
t.Run("returns an error when INNGEST_SIGNING_KEY is missing", func(t *testing.T) {
t.Setenv("INNGEST_BASE_URL", "http://localhost:8288")
t.Setenv("INNGEST_SIGNING_KEY", "")
t.Setenv("PORT", "8287")
_, err := LoadFromEnv()
if err != ErrInngestSigningKeyRequired {
t.Fatalf("expected ErrInngestSigningKeyRequired, got %v", err)
}
})
t.Run("returns an error when PORT is missing", func(t *testing.T) {
t.Setenv("INNGEST_BASE_URL", "http://localhost:8288")
t.Setenv("INNGEST_SIGNING_KEY", "signkey-test-1234")
t.Setenv("PORT", "")
_, err := LoadFromEnv()
if err != ErrPortRequired {
t.Fatalf("expected ErrPortRequired, got %v", err)
}
})
t.Run("loads worker configuration from the environment", func(t *testing.T) {
t.Setenv("INNGEST_BASE_URL", " http://localhost:8288/ ")
t.Setenv("INNGEST_SIGNING_KEY", " signkey-test-1234 ")
t.Setenv("PORT", " 8287 ")
cfg, err := LoadFromEnv()
if err != nil {
t.Fatalf("expected no error, got %v", err)
}
if got, want := cfg.BaseURL, "http://localhost:8288"; got != want {
t.Fatalf("expected BaseURL %q, got %q", want, got)
}
if got, want := cfg.SigningKey, "signkey-test-1234"; got != want {
t.Fatalf("expected SigningKey %q, got %q", want, got)
}
if got, want := cfg.Port, "8287"; got != want {
t.Fatalf("expected Port %q, got %q", want, got)
}
if got, want := cfg.RegisterURL(), "http://localhost:8288/fn/register"; got != want {
t.Fatalf("expected RegisterURL %q, got %q", want, got)
}
})
}
@@ -0,0 +1,62 @@
package inngestapp
import (
"fmt"
"log/slog"
"net/http"
"github.com/inngest/inngestgo"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/config"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/workers"
)
const (
AppID = "formbricks-inngest-poc-worker"
ServePath = "/api/inngest"
)
type App struct {
client inngestgo.Client
logger *slog.Logger
}
func New(cfg config.Config, logger *slog.Logger) (*App, error) {
client, err := inngestgo.NewClient(inngestgo.ClientOpts{
AppID: AppID,
APIBaseURL: inngestgo.StrPtr(cfg.BaseURL),
EventAPIBaseURL: inngestgo.StrPtr(cfg.BaseURL),
RegisterURL: inngestgo.StrPtr(cfg.RegisterURL()),
SigningKey: inngestgo.StrPtr(cfg.SigningKey),
Logger: logger,
Dev: inngestgo.BoolPtr(false),
})
if err != nil {
return nil, fmt.Errorf("create inngest client: %w", err)
}
if _, err := workers.Register(client, logger); err != nil {
return nil, fmt.Errorf("register survey lifecycle functions: %w", err)
}
return &App{
client: client,
logger: logger,
}, nil
}
func (a *App) Handler() http.Handler {
return a.client.ServeWithOpts(inngestgo.ServeOpts{
Path: inngestgo.StrPtr(ServePath),
})
}
func (a *App) Routes() http.Handler {
mux := http.NewServeMux()
mux.Handle(ServePath, a.Handler())
mux.HandleFunc("/health", func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte("ok"))
})
return mux
}
@@ -0,0 +1,84 @@
package inngestapp
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/inngest/inngestgo"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/config"
"github.com/formbricks/formbricks/services/inngest-poc-worker/internal/workers"
)
func TestRoutesInvokeSignedFunction(t *testing.T) {
var logs bytes.Buffer
logger := slog.New(slog.NewTextHandler(&logs, &slog.HandlerOptions{Level: slog.LevelInfo}))
signingKey := "signkey-test-0123456789abcdef0123456789abcdef"
app, err := New(
config.Config{
BaseURL: "http://inngest:8288",
SigningKey: signingKey,
Port: "8287",
},
logger,
)
if err != nil {
t.Fatalf("expected app to initialize, got %v", err)
}
body, err := json.Marshal(map[string]any{
"event": workers.SurveyLifecycleScheduledEvent{
Name: workers.SurveyStartEventName,
Data: workers.SurveyLifecycleScheduledEventData{
SurveyID: "survey_1",
EnvironmentID: "env_1",
ScheduledFor: "2026-04-01T12:00:00.000Z",
},
},
"ctx": map[string]any{
"fn_id": "6ba7b810-9dad-11d1-80b4-00c04fd430c8",
"run_id": "run-id",
},
})
if err != nil {
t.Fatalf("expected request body to marshal, got %v", err)
}
signature, err := inngestgo.Sign(context.Background(), time.Now(), []byte(signingKey), body)
if err != nil {
t.Fatalf("expected request to be signed, got %v", err)
}
req := httptest.NewRequest(
http.MethodPost,
fmt.Sprintf("%s?fnId=%s-%s", ServePath, AppID, workers.SurveyStartFunctionID),
bytes.NewReader(body),
)
req.Header.Set(inngestgo.HeaderKeySignature, signature)
req.Header.Set(inngestgo.HeaderKeyContentType, "application/json")
recorder := httptest.NewRecorder()
app.Routes().ServeHTTP(recorder, req)
if recorder.Code != http.StatusOK {
t.Fatalf("expected status %d, got %d with body %q", http.StatusOK, recorder.Code, recorder.Body.String())
}
output := logs.String()
if !strings.Contains(output, "STARTING SURVEY") {
t.Fatalf("expected logs to contain STARTING SURVEY, got %q", output)
}
if !strings.Contains(output, "event_kind="+workers.SurveyStartEventName) {
t.Fatalf("expected logs to contain event kind, got %q", output)
}
}
@@ -0,0 +1,100 @@
package workers
import (
"context"
"log/slog"
"github.com/inngest/inngestgo"
)
const (
SurveyStartFunctionID = "survey-start"
SurveyEndFunctionID = "survey-end"
SurveyStartEventName = "survey.start"
SurveyEndEventName = "survey.end"
SurveyStartCancelledEvent = "survey.start.cancelled"
SurveyEndCancelledEvent = "survey.end.cancelled"
)
type SurveyLifecycleScheduledEventData struct {
SurveyID string `json:"surveyId"`
EnvironmentID string `json:"environmentId"`
ScheduledFor string `json:"scheduledFor"`
}
type SurveyLifecycleScheduledEvent = inngestgo.GenericEvent[SurveyLifecycleScheduledEventData]
func Register(client inngestgo.Client, logger *slog.Logger) ([]inngestgo.ServableFunction, error) {
startFunction, err := inngestgo.CreateFunction(
client,
inngestgo.FunctionOpts{
ID: SurveyStartFunctionID,
Cancel: []inngestgo.ConfigCancel{
{
Event: SurveyStartCancelledEvent,
If: inngestgo.StrPtr(
"event.data.surveyId == async.data.surveyId && event.data.environmentId == async.data.environmentId",
),
},
},
},
inngestgo.EventTrigger(SurveyStartEventName, nil),
NewSurveyStartHandler(logger),
)
if err != nil {
return nil, err
}
endFunction, err := inngestgo.CreateFunction(
client,
inngestgo.FunctionOpts{
ID: SurveyEndFunctionID,
Cancel: []inngestgo.ConfigCancel{
{
Event: SurveyEndCancelledEvent,
If: inngestgo.StrPtr(
"event.data.surveyId == async.data.surveyId && event.data.environmentId == async.data.environmentId",
),
},
},
},
inngestgo.EventTrigger(SurveyEndEventName, nil),
NewSurveyEndHandler(logger),
)
if err != nil {
return nil, err
}
return []inngestgo.ServableFunction{startFunction, endFunction}, nil
}
func NewSurveyStartHandler(logger *slog.Logger) inngestgo.SDKFunction[SurveyLifecycleScheduledEventData] {
return func(ctx context.Context, input inngestgo.Input[SurveyLifecycleScheduledEventData]) (any, error) {
logLifecycle(ctx, logger, "STARTING SURVEY", SurveyStartEventName, input.Event.Data)
return nil, nil
}
}
func NewSurveyEndHandler(logger *slog.Logger) inngestgo.SDKFunction[SurveyLifecycleScheduledEventData] {
return func(ctx context.Context, input inngestgo.Input[SurveyLifecycleScheduledEventData]) (any, error) {
logLifecycle(ctx, logger, "ENDING SURVEY", SurveyEndEventName, input.Event.Data)
return nil, nil
}
}
func logLifecycle(
ctx context.Context,
logger *slog.Logger,
message string,
eventKind string,
payload SurveyLifecycleScheduledEventData,
) {
logger.InfoContext(
ctx,
message,
slog.String("event_kind", eventKind),
slog.String("survey_id", payload.SurveyID),
slog.String("environment_id", payload.EnvironmentID),
slog.String("scheduled_for", payload.ScheduledFor),
)
}
@@ -0,0 +1,118 @@
package workers
import (
"bytes"
"context"
"log/slog"
"strings"
"testing"
"github.com/inngest/inngestgo"
)
func TestRegisterAddsCancellationRules(t *testing.T) {
client, err := inngestgo.NewClient(inngestgo.ClientOpts{
AppID: "test-app",
Dev: inngestgo.BoolPtr(true),
})
if err != nil {
t.Fatalf("expected client to initialize, got %v", err)
}
functions, err := Register(client, slog.New(slog.NewTextHandler(&bytes.Buffer{}, nil)))
if err != nil {
t.Fatalf("expected functions to register, got %v", err)
}
if len(functions) != 2 {
t.Fatalf("expected 2 functions, got %d", len(functions))
}
startCancel := functions[0].Config().Cancel
if len(startCancel) != 1 || startCancel[0].Event != SurveyStartCancelledEvent {
t.Fatalf("expected survey start cancellation config, got %#v", startCancel)
}
endCancel := functions[1].Config().Cancel
if len(endCancel) != 1 || endCancel[0].Event != SurveyEndCancelledEvent {
t.Fatalf("expected survey end cancellation config, got %#v", endCancel)
}
}
func TestSurveyLifecycleHandlersLogStructuredEvents(t *testing.T) {
testCases := []struct {
name string
handler inngestgo.SDKFunction[SurveyLifecycleScheduledEventData]
message string
expectedKind string
expectedPayload SurveyLifecycleScheduledEventData
}{
{
name: "start handler logs survey start",
handler: NewSurveyStartHandler(slog.New(slog.NewTextHandler(&bytes.Buffer{}, nil))),
message: "STARTING SURVEY",
expectedKind: SurveyStartEventName,
expectedPayload: SurveyLifecycleScheduledEventData{
SurveyID: "survey_start_1",
EnvironmentID: "env_1",
ScheduledFor: "2026-04-01T12:00:00.000Z",
},
},
{
name: "end handler logs survey end",
handler: NewSurveyEndHandler(slog.New(slog.NewTextHandler(&bytes.Buffer{}, nil))),
message: "ENDING SURVEY",
expectedKind: SurveyEndEventName,
expectedPayload: SurveyLifecycleScheduledEventData{
SurveyID: "survey_end_1",
EnvironmentID: "env_2",
ScheduledFor: "2026-04-02T12:00:00.000Z",
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
var logs bytes.Buffer
logger := slog.New(slog.NewTextHandler(&logs, &slog.HandlerOptions{Level: slog.LevelInfo}))
handler := NewSurveyStartHandler(logger)
if testCase.expectedKind == SurveyEndEventName {
handler = NewSurveyEndHandler(logger)
}
if _, err := handler(
context.Background(),
inngestgo.Input[SurveyLifecycleScheduledEventData]{
Event: SurveyLifecycleScheduledEvent{
Name: testCase.expectedKind,
Data: testCase.expectedPayload,
},
},
); err != nil {
t.Fatalf("expected no error, got %v", err)
}
output := logs.String()
if !strings.Contains(output, testCase.message) {
t.Fatalf("expected log output to contain %q, got %q", testCase.message, output)
}
if !strings.Contains(output, "event_kind="+testCase.expectedKind) {
t.Fatalf("expected log output to contain kind %q, got %q", testCase.expectedKind, output)
}
if !strings.Contains(output, "survey_id="+testCase.expectedPayload.SurveyID) {
t.Fatalf("expected log output to contain survey_id, got %q", output)
}
if !strings.Contains(output, "environment_id="+testCase.expectedPayload.EnvironmentID) {
t.Fatalf("expected log output to contain environment_id, got %q", output)
}
if !strings.Contains(output, "scheduled_for="+testCase.expectedPayload.ScheduledFor) {
t.Fatalf("expected log output to contain scheduled_for, got %q", output)
}
})
}
}
+14
View File
@@ -0,0 +1,14 @@
{
"name": "@formbricks/inngest-poc-worker",
"version": "0.0.0",
"private": true,
"packageManager": "pnpm@10.32.1",
"scripts": {
"go": "dotenv -e ../../.env -- go run ./cmd/inngest-poc-worker",
"test": "dotenv -e ../../.env -- go test ./...",
"test:coverage": "dotenv -e ../../.env -- go test ./... -covermode=atomic -coverprofile=coverage.out"
},
"devDependencies": {
"dotenv-cli": "11.0.0"
}
}
+3
View File
@@ -163,6 +163,9 @@
"HTTPS_PROXY",
"IMPRINT_URL",
"IMPRINT_ADDRESS",
"INNGEST_BASE_URL",
"INNGEST_EVENT_KEY",
"INNGEST_SIGNING_KEY",
"INVITE_DISABLED",
"IS_FORMBRICKS_CLOUD",
"CHATWOOT_WEBSITE_TOKEN",