mirror of
https://github.com/formbricks/formbricks.git
synced 2026-02-18 10:09:49 -06:00
Compare commits
3 Commits
codex/hub-
...
fix/segmen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3ba952154 | ||
|
|
aa538a3a51 | ||
|
|
817e108ff5 |
@@ -5,10 +5,14 @@ import { getSegment } from "../segments";
|
||||
import { segmentFilterToPrismaQuery } from "./prisma-query";
|
||||
|
||||
const mockQueryRawUnsafe = vi.fn();
|
||||
const mockFindFirst = vi.fn();
|
||||
|
||||
vi.mock("@formbricks/database", () => ({
|
||||
prisma: {
|
||||
$queryRawUnsafe: (...args: unknown[]) => mockQueryRawUnsafe(...args),
|
||||
contactAttribute: {
|
||||
findFirst: (...args: unknown[]) => mockFindFirst(...args),
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -26,7 +30,9 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Default mock: number filter raw SQL returns one matching contact
|
||||
// Default: backfill is complete, no un-migrated rows
|
||||
mockFindFirst.mockResolvedValue(null);
|
||||
// Fallback path mock: raw SQL returns one matching contact when un-migrated rows exist
|
||||
mockQueryRawUnsafe.mockResolvedValue([{ contactId: "mock-contact-1" }]);
|
||||
});
|
||||
|
||||
@@ -145,7 +151,16 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
},
|
||||
},
|
||||
],
|
||||
OR: [{ id: { in: ["mock-contact-1"] } }],
|
||||
OR: [
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gt: 30 },
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -757,7 +772,12 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
});
|
||||
|
||||
expect(subgroup.AND[0].AND[2]).toStrictEqual({
|
||||
id: { in: ["mock-contact-1"] },
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gte: 18 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Segment inclusion
|
||||
@@ -1158,10 +1178,23 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
},
|
||||
});
|
||||
|
||||
// Second subgroup (numeric operators - now use raw SQL subquery returning contact IDs)
|
||||
// Second subgroup (numeric operators - uses clean Prisma filter post-backfill)
|
||||
const secondSubgroup = whereClause.AND?.[0];
|
||||
expect(secondSubgroup.AND[1].AND).toContainEqual({
|
||||
id: { in: ["mock-contact-1"] },
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "loginCount" },
|
||||
valueNumber: { gt: 5 },
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(secondSubgroup.AND[1].AND).toContainEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "purchaseAmount" },
|
||||
valueNumber: { lte: 1000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Third subgroup (negation operators in OR clause)
|
||||
@@ -1196,6 +1229,103 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
}
|
||||
});
|
||||
|
||||
test("number filter falls back to raw SQL when un-migrated rows exist", async () => {
|
||||
mockFindFirst.mockResolvedValue({ id: "unmigrated-row-1" });
|
||||
mockQueryRawUnsafe.mockResolvedValue([{ contactId: "mock-contact-1" }]);
|
||||
|
||||
const filters: TBaseFilters = [
|
||||
{
|
||||
id: "filter_1",
|
||||
connector: null,
|
||||
resource: {
|
||||
id: "attr_1",
|
||||
root: {
|
||||
type: "attribute" as const,
|
||||
contactAttributeKey: "age",
|
||||
},
|
||||
value: 25,
|
||||
qualifier: {
|
||||
operator: "greaterThan",
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const result = await segmentFilterToPrismaQuery(mockSegmentId, filters, mockEnvironmentId);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
const filterClause = result.data.whereClause.AND?.[1] as any;
|
||||
expect(filterClause.AND[0]).toEqual({
|
||||
OR: [
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gt: 25 },
|
||||
},
|
||||
},
|
||||
},
|
||||
{ id: { in: ["mock-contact-1"] } },
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockFindFirst).toHaveBeenCalledWith({
|
||||
where: {
|
||||
attributeKey: {
|
||||
key: "age",
|
||||
environmentId: mockEnvironmentId,
|
||||
},
|
||||
valueNumber: null,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
expect(mockQueryRawUnsafe).toHaveBeenCalled();
|
||||
const sqlCall = mockQueryRawUnsafe.mock.calls[0];
|
||||
expect(sqlCall[0]).toContain('cak."environmentId" = $4');
|
||||
expect(sqlCall[4]).toBe(mockEnvironmentId);
|
||||
});
|
||||
|
||||
test("number filter uses clean Prisma query when backfill is complete", async () => {
|
||||
const filters: TBaseFilters = [
|
||||
{
|
||||
id: "filter_1",
|
||||
connector: null,
|
||||
resource: {
|
||||
id: "attr_1",
|
||||
root: {
|
||||
type: "attribute" as const,
|
||||
contactAttributeKey: "score",
|
||||
},
|
||||
value: 100,
|
||||
qualifier: {
|
||||
operator: "lessEqual",
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const result = await segmentFilterToPrismaQuery(mockSegmentId, filters, mockEnvironmentId);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
const filterClause = result.data.whereClause.AND?.[1] as any;
|
||||
expect(filterClause.AND[0]).toEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "score" },
|
||||
valueNumber: { lte: 100 },
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockFindFirst).toHaveBeenCalled();
|
||||
expect(mockQueryRawUnsafe).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// ==========================================
|
||||
// DATE FILTER TESTS
|
||||
// ==========================================
|
||||
@@ -1638,8 +1768,15 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
mode: "insensitive",
|
||||
});
|
||||
|
||||
// Number filter uses raw SQL subquery (transition code) returning contact IDs
|
||||
expect(andConditions[1]).toEqual({ id: { in: ["mock-contact-1"] } });
|
||||
// Number filter uses clean Prisma filter post-backfill
|
||||
expect(andConditions[1]).toEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "purchaseCount" },
|
||||
valueNumber: { gt: 5 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Date filter uses OR fallback with 'valueDate' and string 'value'
|
||||
expect((andConditions[2] as unknown as any).attributes.some.OR[0].valueDate).toHaveProperty("gte");
|
||||
|
||||
@@ -116,59 +116,100 @@ const buildDateAttributeFilterWhereClause = (filter: TSegmentAttributeFilter): P
|
||||
|
||||
/**
|
||||
* Builds a Prisma where clause for number attribute filters.
|
||||
* Uses a raw SQL subquery to handle both migrated rows (valueNumber populated)
|
||||
* and un-migrated rows (valueNumber NULL, value contains numeric string).
|
||||
* This is transition code for the deferred value backfill.
|
||||
* Uses a clean Prisma query when all rows have valueNumber populated (post-backfill).
|
||||
* Falls back to a raw SQL subquery for un-migrated rows (valueNumber NULL, value contains numeric string).
|
||||
*
|
||||
* TODO: After the backfill script has been run and all valueNumber columns are populated,
|
||||
* revert this to the clean Prisma-only version that queries valueNumber directly.
|
||||
* remove the un-migrated fallback path entirely.
|
||||
*/
|
||||
const buildNumberAttributeFilterWhereClause = async (
|
||||
filter: TSegmentAttributeFilter
|
||||
filter: TSegmentAttributeFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root, qualifier, value } = filter;
|
||||
const { contactAttributeKey } = root;
|
||||
const { operator } = qualifier;
|
||||
|
||||
const numericValue = typeof value === "number" ? value : Number(value);
|
||||
const sqlOp = SQL_OPERATORS[operator];
|
||||
|
||||
if (!sqlOp) {
|
||||
return {};
|
||||
let valueNumberCondition: Prisma.FloatNullableFilter;
|
||||
|
||||
switch (operator) {
|
||||
case "greaterThan":
|
||||
valueNumberCondition = { gt: numericValue };
|
||||
break;
|
||||
case "greaterEqual":
|
||||
valueNumberCondition = { gte: numericValue };
|
||||
break;
|
||||
case "lessThan":
|
||||
valueNumberCondition = { lt: numericValue };
|
||||
break;
|
||||
case "lessEqual":
|
||||
valueNumberCondition = { lte: numericValue };
|
||||
break;
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
|
||||
const matchingContactIds = await prisma.$queryRawUnsafe<{ contactId: string }[]>(
|
||||
const migratedFilter: Prisma.ContactWhereInput = {
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: contactAttributeKey },
|
||||
valueNumber: valueNumberCondition,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const hasUnmigratedRows = await prisma.contactAttribute.findFirst({
|
||||
where: {
|
||||
attributeKey: {
|
||||
key: contactAttributeKey,
|
||||
environmentId,
|
||||
},
|
||||
valueNumber: null,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!hasUnmigratedRows) {
|
||||
return migratedFilter;
|
||||
}
|
||||
|
||||
const sqlOp = SQL_OPERATORS[operator];
|
||||
const unmigratedMatchingIds = await prisma.$queryRawUnsafe<{ contactId: string }[]>(
|
||||
`
|
||||
SELECT DISTINCT ca."contactId"
|
||||
FROM "ContactAttribute" ca
|
||||
JOIN "ContactAttributeKey" cak ON ca."attributeKeyId" = cak.id
|
||||
WHERE cak.key = $1
|
||||
AND (
|
||||
(ca."valueNumber" IS NOT NULL AND ca."valueNumber" ${sqlOp} $2)
|
||||
OR
|
||||
(ca."valueNumber" IS NULL AND ca.value ~ $3 AND ca.value::double precision ${sqlOp} $2)
|
||||
)
|
||||
AND cak."environmentId" = $4
|
||||
AND ca."valueNumber" IS NULL
|
||||
AND ca.value ~ $3
|
||||
AND ca.value::double precision ${sqlOp} $2
|
||||
`,
|
||||
contactAttributeKey,
|
||||
numericValue,
|
||||
NUMBER_PATTERN_SQL
|
||||
NUMBER_PATTERN_SQL,
|
||||
environmentId
|
||||
);
|
||||
|
||||
const contactIds = matchingContactIds.map((r) => r.contactId);
|
||||
|
||||
if (contactIds.length === 0) {
|
||||
// Return an impossible condition so the filter correctly excludes all contacts
|
||||
return { id: "__NUMBER_FILTER_NO_MATCH__" };
|
||||
if (unmigratedMatchingIds.length === 0) {
|
||||
return migratedFilter;
|
||||
}
|
||||
|
||||
return { id: { in: contactIds } };
|
||||
const contactIds = unmigratedMatchingIds.map((r) => r.contactId);
|
||||
|
||||
return {
|
||||
OR: [migratedFilter, { id: { in: contactIds } }],
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds a Prisma where clause from a segment attribute filter
|
||||
*/
|
||||
const buildAttributeFilterWhereClause = async (
|
||||
filter: TSegmentAttributeFilter
|
||||
filter: TSegmentAttributeFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root, qualifier, value } = filter;
|
||||
const { contactAttributeKey } = root;
|
||||
@@ -215,7 +256,7 @@ const buildAttributeFilterWhereClause = async (
|
||||
|
||||
// Handle number operators
|
||||
if (["greaterThan", "greaterEqual", "lessThan", "lessEqual"].includes(operator)) {
|
||||
return await buildNumberAttributeFilterWhereClause(filter);
|
||||
return await buildNumberAttributeFilterWhereClause(filter, environmentId);
|
||||
}
|
||||
|
||||
// For string operators, ensure value is a primitive (not an object or array)
|
||||
@@ -253,7 +294,8 @@ const buildAttributeFilterWhereClause = async (
|
||||
* Builds a Prisma where clause from a person filter
|
||||
*/
|
||||
const buildPersonFilterWhereClause = async (
|
||||
filter: TSegmentPersonFilter
|
||||
filter: TSegmentPersonFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { personIdentifier } = filter.root;
|
||||
|
||||
@@ -265,7 +307,7 @@ const buildPersonFilterWhereClause = async (
|
||||
contactAttributeKey: personIdentifier,
|
||||
},
|
||||
};
|
||||
return await buildAttributeFilterWhereClause(personFilter);
|
||||
return await buildAttributeFilterWhereClause(personFilter, environmentId);
|
||||
}
|
||||
|
||||
return {};
|
||||
@@ -314,6 +356,7 @@ const buildDeviceFilterWhereClause = (
|
||||
const buildSegmentFilterWhereClause = async (
|
||||
filter: TSegmentSegmentFilter,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root } = filter;
|
||||
@@ -337,7 +380,7 @@ const buildSegmentFilterWhereClause = async (
|
||||
const newPath = new Set(segmentPath);
|
||||
newPath.add(segmentId);
|
||||
|
||||
return processFilters(segment.filters, newPath, deviceType);
|
||||
return processFilters(segment.filters, newPath, environmentId, deviceType);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -346,19 +389,25 @@ const buildSegmentFilterWhereClause = async (
|
||||
const processSingleFilter = async (
|
||||
filter: TSegmentFilter,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root } = filter;
|
||||
|
||||
switch (root.type) {
|
||||
case "attribute":
|
||||
return await buildAttributeFilterWhereClause(filter as TSegmentAttributeFilter);
|
||||
return await buildAttributeFilterWhereClause(filter as TSegmentAttributeFilter, environmentId);
|
||||
case "person":
|
||||
return await buildPersonFilterWhereClause(filter as TSegmentPersonFilter);
|
||||
return await buildPersonFilterWhereClause(filter as TSegmentPersonFilter, environmentId);
|
||||
case "device":
|
||||
return buildDeviceFilterWhereClause(filter as TSegmentDeviceFilter, deviceType);
|
||||
case "segment":
|
||||
return await buildSegmentFilterWhereClause(filter as TSegmentSegmentFilter, segmentPath, deviceType);
|
||||
return await buildSegmentFilterWhereClause(
|
||||
filter as TSegmentSegmentFilter,
|
||||
segmentPath,
|
||||
environmentId,
|
||||
deviceType
|
||||
);
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
@@ -370,6 +419,7 @@ const processSingleFilter = async (
|
||||
const processFilters = async (
|
||||
filters: TBaseFilters,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
if (filters.length === 0) return {};
|
||||
@@ -386,10 +436,10 @@ const processFilters = async (
|
||||
// Process the resource based on its type
|
||||
if (isResourceFilter(resource)) {
|
||||
// If it's a single filter, process it directly
|
||||
whereClause = await processSingleFilter(resource, segmentPath, deviceType);
|
||||
whereClause = await processSingleFilter(resource, segmentPath, environmentId, deviceType);
|
||||
} else {
|
||||
// If it's a group of filters, process it recursively
|
||||
whereClause = await processFilters(resource, segmentPath, deviceType);
|
||||
whereClause = await processFilters(resource, segmentPath, environmentId, deviceType);
|
||||
}
|
||||
|
||||
if (Object.keys(whereClause).length === 0) continue;
|
||||
@@ -432,7 +482,7 @@ export const segmentFilterToPrismaQuery = reactCache(
|
||||
|
||||
// Initialize an empty stack for tracking the current evaluation path
|
||||
const segmentPath = new Set<string>([segmentId]);
|
||||
const filtersWhereClause = await processFilters(filters, segmentPath, deviceType);
|
||||
const filtersWhereClause = await processFilters(filters, segmentPath, environmentId, deviceType);
|
||||
|
||||
const whereClause = {
|
||||
AND: [baseWhereClause, filtersWhereClause],
|
||||
|
||||
@@ -37,6 +37,7 @@ vi.mock("@formbricks/database", () => ({
|
||||
create: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
update: vi.fn(),
|
||||
upsert: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
},
|
||||
survey: {
|
||||
@@ -206,6 +207,73 @@ describe("Segment Service Tests", () => {
|
||||
vi.mocked(prisma.segment.create).mockRejectedValue(new Error("DB error"));
|
||||
await expect(createSegment(mockSegmentCreateInput)).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test("should upsert a private segment without surveyId", async () => {
|
||||
const privateInput: TSegmentCreateInput = {
|
||||
...mockSegmentCreateInput,
|
||||
isPrivate: true,
|
||||
};
|
||||
const privateSegmentPrisma = { ...mockSegmentPrisma, isPrivate: true };
|
||||
vi.mocked(prisma.segment.upsert).mockResolvedValue(privateSegmentPrisma);
|
||||
const segment = await createSegment(privateInput);
|
||||
expect(segment).toEqual({ ...mockSegment, isPrivate: true });
|
||||
expect(prisma.segment.upsert).toHaveBeenCalledWith({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title: privateInput.title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title: privateInput.title,
|
||||
description: undefined,
|
||||
isPrivate: true,
|
||||
filters: [],
|
||||
},
|
||||
update: {
|
||||
description: undefined,
|
||||
filters: [],
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
expect(prisma.segment.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("should upsert a private segment with surveyId", async () => {
|
||||
const privateInputWithSurvey: TSegmentCreateInput = {
|
||||
...mockSegmentCreateInput,
|
||||
isPrivate: true,
|
||||
surveyId,
|
||||
};
|
||||
const privateSegmentPrisma = { ...mockSegmentPrisma, isPrivate: true };
|
||||
vi.mocked(prisma.segment.upsert).mockResolvedValue(privateSegmentPrisma);
|
||||
const segment = await createSegment(privateInputWithSurvey);
|
||||
expect(segment).toEqual({ ...mockSegment, isPrivate: true });
|
||||
expect(prisma.segment.upsert).toHaveBeenCalledWith({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title: privateInputWithSurvey.title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title: privateInputWithSurvey.title,
|
||||
description: undefined,
|
||||
isPrivate: true,
|
||||
filters: [],
|
||||
surveys: { connect: { id: surveyId } },
|
||||
},
|
||||
update: {
|
||||
description: undefined,
|
||||
filters: [],
|
||||
surveys: { connect: { id: surveyId } },
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
expect(prisma.segment.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cloneSegment", () => {
|
||||
|
||||
@@ -136,28 +136,48 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
|
||||
|
||||
const { description, environmentId, filters, isPrivate, surveyId, title } = segmentCreateInput;
|
||||
|
||||
let data: Prisma.SegmentCreateArgs["data"] = {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
};
|
||||
|
||||
if (surveyId) {
|
||||
data = {
|
||||
...data,
|
||||
surveys: {
|
||||
connect: {
|
||||
id: surveyId,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
const surveyConnect = surveyId ? { surveys: { connect: { id: surveyId } } } : {};
|
||||
|
||||
try {
|
||||
// Private segments use upsert because auto-save may have already created a
|
||||
// default (empty-filter) segment via connectOrCreate before the user publishes.
|
||||
// Without upsert the second create hits the (environmentId, title) unique constraint.
|
||||
if (isPrivate) {
|
||||
const segment = await prisma.segment.upsert({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
update: {
|
||||
description,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
|
||||
return transformPrismaSegment(segment);
|
||||
}
|
||||
|
||||
const segment = await prisma.segment.create({
|
||||
data,
|
||||
data: {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
|
||||
|
||||
@@ -405,14 +405,6 @@ const nextConfig = {
|
||||
},
|
||||
async rewrites() {
|
||||
return [
|
||||
{
|
||||
source: "/hub",
|
||||
destination: "https://hub.stldocs.app",
|
||||
},
|
||||
{
|
||||
source: "/hub/:path*",
|
||||
destination: "https://hub.stldocs.app/:path*",
|
||||
},
|
||||
{
|
||||
source: "/api/packages/website",
|
||||
destination: "/js/formbricks.umd.cjs",
|
||||
@@ -490,4 +482,5 @@ const sentryOptions = {
|
||||
// Runtime Sentry reporting still depends on DSN being set via environment variables
|
||||
const exportConfig = process.env.SENTRY_AUTH_TOKEN ? withSentryConfig(nextConfig, sentryOptions) : nextConfig;
|
||||
|
||||
|
||||
export default exportConfig;
|
||||
|
||||
@@ -4,12 +4,182 @@ description: "Formbricks Self-hosted version migration"
|
||||
icon: "arrow-right"
|
||||
---
|
||||
|
||||
## v4.7
|
||||
|
||||
Formbricks v4.7 introduces **typed contact attributes** with native `number` and `date` data types. This enables comparison-based segment filters (e.g. "signup date before 2025-01-01") that were previously not possible with string-only attribute values.
|
||||
|
||||
### What Happens Automatically
|
||||
|
||||
When Formbricks v4.7 starts for the first time, the data migration will:
|
||||
|
||||
1. Analyze all existing contact attribute keys and infer their data types (`text`, `number`, or `date`) based on the stored values
|
||||
2. Update the `ContactAttributeKey` table with the detected `dataType` for each key
|
||||
3. **If your instance has fewer than 1,000,000 contact attribute rows**: backfill the new `valueNumber` and `valueDate` columns inline. No manual action is needed.
|
||||
4. **If your instance has 1,000,000 or more contact attribute rows**: the value backfill is skipped to avoid hitting the migration timeout. You will need to run a standalone backfill script after the upgrade.
|
||||
|
||||
<Info>
|
||||
Most self-hosted instances have far fewer than 1,000,000 contact attribute rows (a typical setup with 100K
|
||||
contacts and 5-10 attributes each lands around 500K-1M rows). If you are below the threshold, the migration
|
||||
handles everything automatically and you can skip the manual backfill step below.
|
||||
</Info>
|
||||
|
||||
### Steps to Migrate
|
||||
|
||||
**1. Backup your Database**
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
Before running these steps, navigate to the `formbricks` directory where your `docker-compose.yml` file is located.
|
||||
|
||||
```bash
|
||||
docker exec formbricks-postgres-1 pg_dump -Fc -U postgres -d formbricks > formbricks_pre_v4.7_$(date +%Y%m%d_%H%M%S).dump
|
||||
```
|
||||
|
||||
<Info>
|
||||
If you run into "**No such container**", use `docker ps` to find your container name, e.g.
|
||||
`formbricks_postgres_1`.
|
||||
</Info>
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
If you are using the **in-cluster PostgreSQL** deployed by the Helm chart:
|
||||
|
||||
```bash
|
||||
kubectl exec -n formbricks formbricks-postgresql-0 -- pg_dump -Fc -U formbricks -d formbricks > formbricks_pre_v4.7_$(date +%Y%m%d_%H%M%S).dump
|
||||
```
|
||||
|
||||
<Info>
|
||||
If your PostgreSQL pod has a different name, run `kubectl get pods -n formbricks` to find it.
|
||||
</Info>
|
||||
|
||||
If you are using a **managed PostgreSQL** service (e.g. AWS RDS, Cloud SQL), use your provider's backup/snapshot feature or run `pg_dump` directly against the external host.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
**2. Upgrade to Formbricks v4.7**
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
# Pull the latest version
|
||||
docker compose pull
|
||||
|
||||
# Stop the current instance
|
||||
docker compose down
|
||||
|
||||
# Start with Formbricks v4.7
|
||||
docker compose up -d
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
helm upgrade formbricks oci://ghcr.io/formbricks/helm-charts/formbricks \
|
||||
-n formbricks \
|
||||
--set deployment.image.tag=v4.7.0
|
||||
```
|
||||
|
||||
<Info>
|
||||
The Helm chart includes a migration Job that automatically runs Prisma schema migrations as a
|
||||
PreSync hook before the new pods start. No manual migration step is needed.
|
||||
</Info>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
**3. Check the Migration Logs**
|
||||
|
||||
After Formbricks starts, check the logs to see whether the value backfill was completed or skipped:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
docker compose logs formbricks | grep -i "backfill"
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
# Check the application pod logs
|
||||
kubectl logs -n formbricks -l app.kubernetes.io/name=formbricks --tail=200 | grep -i "backfill"
|
||||
```
|
||||
|
||||
If the Helm migration Job ran, you can also inspect its logs:
|
||||
|
||||
```bash
|
||||
kubectl logs -n formbricks job/formbricks-migration
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
If you see a message like `Skipping value backfill (X rows >= 1000000 threshold)`, proceed to step 4. Otherwise, the migration is complete and no further action is needed.
|
||||
|
||||
**4. Run the Backfill Script (large datasets only)**
|
||||
|
||||
If the migration skipped the value backfill, run the standalone backfill script inside the running Formbricks container:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
docker exec formbricks node packages/database/dist/scripts/backfill-attribute-values.js
|
||||
```
|
||||
|
||||
<Info>Replace `formbricks` with your actual container name if it differs. Use `docker ps` to find it.</Info>
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
kubectl exec -n formbricks deploy/formbricks -- node packages/database/dist/scripts/backfill-attribute-values.js
|
||||
```
|
||||
|
||||
<Info>
|
||||
If your Formbricks deployment has a different name, run `kubectl get deploy -n formbricks` to find it.
|
||||
</Info>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
The script will output progress as it runs:
|
||||
|
||||
```
|
||||
========================================
|
||||
Attribute Value Backfill Script
|
||||
========================================
|
||||
|
||||
Fetching number-type attribute keys...
|
||||
Found 12 number-type keys. Backfilling valueNumber...
|
||||
Number backfill progress: 10/12 keys (48230 rows updated)
|
||||
Number backfill progress: 12/12 keys (52104 rows updated)
|
||||
|
||||
Fetching date-type attribute keys...
|
||||
Found 5 date-type keys. Backfilling valueDate...
|
||||
Date backfill progress: 5/5 keys (31200 rows updated)
|
||||
|
||||
========================================
|
||||
Backfill Complete!
|
||||
========================================
|
||||
valueNumber rows updated: 52104
|
||||
valueDate rows updated: 31200
|
||||
Duration: 42.3s
|
||||
========================================
|
||||
```
|
||||
|
||||
Key characteristics of the backfill script:
|
||||
|
||||
- **Safe to run while Formbricks is live** -- it does not lock the entire table or wrap work in a long transaction
|
||||
- **Idempotent** -- it only updates rows where the typed columns are still `NULL`, so you can safely run it multiple times
|
||||
- **Resumable** -- each batch commits independently, so if the process is interrupted you can re-run it and it picks up where it left off
|
||||
- **No timeout risk** -- unlike the migration, this script runs outside the migration transaction and has no time limit
|
||||
|
||||
**5. Verify the Upgrade**
|
||||
|
||||
- Access your Formbricks instance at the same URL as before
|
||||
- If you use contact segments with number or date filters, verify they return the expected results
|
||||
- Check that existing surveys and response data are intact
|
||||
|
||||
---
|
||||
|
||||
## v4.0
|
||||
|
||||
<Warning>
|
||||
**Important: Migration Required**
|
||||
|
||||
Formbricks 4 introduces additional requirements for self-hosting setups and makes a dedicated Redis cache as well as S3-compatible file storage mandatory.
|
||||
Formbricks 4 introduces additional requirements for self-hosting setups and makes a dedicated Redis cache as well as S3-compatible file storage mandatory.
|
||||
|
||||
</Warning>
|
||||
|
||||
Formbricks 4.0 is a **major milestone** that sets up the technical foundation for future iterations and feature improvements. This release focuses on modernizing core infrastructure components to improve reliability, scalability, and enable advanced features going forward.
|
||||
@@ -17,9 +187,11 @@ Formbricks 4.0 is a **major milestone** that sets up the technical foundation fo
|
||||
### What's New in Formbricks 4.0
|
||||
|
||||
**🚀 New Enterprise Features:**
|
||||
|
||||
- **Quotas Management**: Advanced quota controls for enterprise users
|
||||
|
||||
**🏗️ Technical Foundation Improvements:**
|
||||
|
||||
- **Enhanced File Storage**: Improved file handling with better performance and reliability
|
||||
- **Improved Caching**: New caching functionality improving speed, extensibility and reliability
|
||||
- **Database Optimization**: Removal of unused database tables and fields for better performance
|
||||
@@ -39,7 +211,8 @@ These services are already included in the updated one-click setup for self-host
|
||||
We know this represents more moving parts in your infrastructure and might even introduce more complexity in hosting Formbricks, and we don't take this decision lightly. As Formbricks grows into a comprehensive Survey and Experience Management platform, we've reached a point where the simple, single-service approach was holding back our ability to deliver the reliable, feature-rich product our users demand and deserve.
|
||||
|
||||
By moving to dedicated, professional-grade services for these critical functions, we're building the foundation needed to deliver:
|
||||
- **Enterprise-grade reliability** with proper redundancy and backup capabilities
|
||||
|
||||
- **Enterprise-grade reliability** with proper redundancy and backup capabilities
|
||||
- **Advanced features** that require sophisticated caching and file processing
|
||||
- **Better performance** through optimized, dedicated services
|
||||
- **Future scalability** to support larger deployments and more complex use cases without the need to maintain two different approaches
|
||||
@@ -52,7 +225,7 @@ Additional migration steps are needed if you are using a self-hosted Formbricks
|
||||
|
||||
### One-Click Setup
|
||||
|
||||
For users using our official one-click setup, we provide an automated migration using a migration script:
|
||||
For users using our official one-click setup, we provide an automated migration using a migration script:
|
||||
|
||||
```bash
|
||||
# Download the latest script
|
||||
@@ -67,11 +240,11 @@ chmod +x migrate-to-v4.sh
|
||||
```
|
||||
|
||||
This script guides you through the steps for the infrastructure migration and does the following:
|
||||
|
||||
- Adds a Redis service to your setup and configures it
|
||||
- Adds a MinIO service (open source S3-alternative) to your setup, configures it and migrates local files to it
|
||||
- Pulls the latest Formbricks image and updates your instance
|
||||
|
||||
|
||||
### Manual Setup
|
||||
|
||||
If you use a different setup to host your Formbricks instance, you need to make sure to make the necessary adjustments to run Formbricks 4.0.
|
||||
@@ -87,6 +260,7 @@ You need to configure the `REDIS_URL` environment variable and point it to your
|
||||
To use file storage (e.g., file upload questions, image choice questions, custom survey backgrounds, etc.), you need to have S3-compatible file storage set up and connected to Formbricks.
|
||||
|
||||
Formbricks supports multiple storage providers (among many other S3-compatible storages):
|
||||
|
||||
- AWS S3
|
||||
- Digital Ocean Spaces
|
||||
- Hetzner Object Storage
|
||||
@@ -101,6 +275,7 @@ Please make sure to set up a storage bucket with one of these solutions and then
|
||||
S3_BUCKET_NAME: formbricks-uploads
|
||||
S3_ENDPOINT_URL: http://minio:9000 # not needed for AWS S3
|
||||
```
|
||||
|
||||
#### Upgrade Process
|
||||
|
||||
**1. Backup your Database**
|
||||
@@ -112,8 +287,8 @@ docker exec formbricks-postgres-1 pg_dump -Fc -U postgres -d formbricks > formbr
|
||||
```
|
||||
|
||||
<Info>
|
||||
If you run into "**No such container**", use `docker ps` to find your container name,
|
||||
e.g. `formbricks_postgres_1`.
|
||||
If you run into "**No such container**", use `docker ps` to find your container name, e.g.
|
||||
`formbricks_postgres_1`.
|
||||
</Info>
|
||||
|
||||
**2. Upgrade to Formbricks 4.0**
|
||||
@@ -134,6 +309,7 @@ docker compose up -d
|
||||
**3. Automatic Database Migration**
|
||||
|
||||
When you start Formbricks 4.0 for the first time, it will **automatically**:
|
||||
|
||||
- Detect and apply required database schema updates
|
||||
- Remove unused database tables and fields
|
||||
- Optimize the database structure for better performance
|
||||
|
||||
Reference in New Issue
Block a user