mirror of
https://github.com/formbricks/formbricks.git
synced 2026-02-20 10:09:20 -06:00
Compare commits
13 Commits
feat/crud-
...
cursor/cus
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a1816786b | ||
|
|
219883266c | ||
|
|
55fc2b2bc8 | ||
|
|
6e4ef9a099 | ||
|
|
ebf7d1e3a1 | ||
|
|
998162bc48 | ||
|
|
4fadc54b4e | ||
|
|
f4ac9a8292 | ||
|
|
7c8a7606b7 | ||
|
|
225217330b | ||
|
|
589c04a530 | ||
|
|
aa538a3a51 | ||
|
|
817e108ff5 |
42
.github/workflows/translation-check.yml
vendored
42
.github/workflows/translation-check.yml
vendored
@@ -6,19 +6,9 @@ permissions:
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
|
||||
jobs:
|
||||
validate-translations:
|
||||
@@ -33,30 +23,38 @@ jobs:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Check for relevant changes
|
||||
id: changes
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
filters: |
|
||||
translations:
|
||||
- 'apps/web/**/*.ts'
|
||||
- 'apps/web/**/*.tsx'
|
||||
- 'apps/web/locales/**/*.json'
|
||||
- 'packages/surveys/src/**/*.{ts,tsx}'
|
||||
- 'packages/surveys/locales/**/*.json'
|
||||
- 'packages/email/**/*.{ts,tsx}'
|
||||
|
||||
- name: Setup Node.js 22.x
|
||||
if: steps.changes.outputs.translations == 'true'
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
- name: Install pnpm
|
||||
if: steps.changes.outputs.translations == 'true'
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.changes.outputs.translations == 'true'
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Validate translation keys
|
||||
run: |
|
||||
echo ""
|
||||
echo "🔍 Validating translation keys..."
|
||||
echo ""
|
||||
pnpm run scan-translations
|
||||
if: steps.changes.outputs.translations == 'true'
|
||||
run: pnpm run scan-translations
|
||||
|
||||
- name: Summary
|
||||
if: success()
|
||||
run: |
|
||||
echo ""
|
||||
echo "✅ Translation validation completed successfully!"
|
||||
echo ""
|
||||
- name: Skip (no translation-related changes)
|
||||
if: steps.changes.outputs.translations != 'true'
|
||||
run: echo "No translation-related files changed — skipping validation."
|
||||
|
||||
@@ -1,40 +1 @@
|
||||
# Load environment variables from .env files
|
||||
if [ -f .env ]; then
|
||||
set -a
|
||||
. .env
|
||||
set +a
|
||||
fi
|
||||
|
||||
pnpm lint-staged
|
||||
|
||||
# Run Lingo.dev i18n workflow if LINGODOTDEV_API_KEY is set
|
||||
if [ -n "$LINGODOTDEV_API_KEY" ]; then
|
||||
echo ""
|
||||
echo "🌍 Running Lingo.dev translation workflow..."
|
||||
echo ""
|
||||
|
||||
# Run translation generation and validation
|
||||
if pnpm run i18n; then
|
||||
echo ""
|
||||
echo "✅ Translation validation passed"
|
||||
echo ""
|
||||
# Add updated locale files to git
|
||||
git add apps/web/locales/*.json
|
||||
else
|
||||
echo ""
|
||||
echo "❌ Translation validation failed!"
|
||||
echo ""
|
||||
echo "Please fix the translation issues above before committing:"
|
||||
echo " • Add missing translation keys to your locale files"
|
||||
echo " • Remove unused translation keys"
|
||||
echo ""
|
||||
echo "Or run 'pnpm i18n' to see the detailed report"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo ""
|
||||
echo "⚠️ Skipping translation validation: LINGODOTDEV_API_KEY is not set"
|
||||
echo " (This is expected for community contributors)"
|
||||
echo ""
|
||||
fi
|
||||
pnpm lint-staged
|
||||
@@ -30,7 +30,7 @@ export const NotificationSwitch = ({
|
||||
const isChecked =
|
||||
notificationType === "unsubscribedOrganizationIds"
|
||||
? !notificationSettings.unsubscribedOrganizationIds?.includes(surveyOrProjectOrOrganizationId)
|
||||
: notificationSettings[notificationType][surveyOrProjectOrOrganizationId] === true;
|
||||
: notificationSettings[notificationType]?.[surveyOrProjectOrOrganizationId] === true;
|
||||
|
||||
const handleSwitchChange = async () => {
|
||||
setIsLoading(true);
|
||||
@@ -49,8 +49,11 @@ export const NotificationSwitch = ({
|
||||
];
|
||||
}
|
||||
} else {
|
||||
updatedNotificationSettings[notificationType][surveyOrProjectOrOrganizationId] =
|
||||
!updatedNotificationSettings[notificationType][surveyOrProjectOrOrganizationId];
|
||||
updatedNotificationSettings[notificationType] = {
|
||||
...updatedNotificationSettings[notificationType],
|
||||
[surveyOrProjectOrOrganizationId]:
|
||||
!updatedNotificationSettings[notificationType]?.[surveyOrProjectOrOrganizationId],
|
||||
};
|
||||
}
|
||||
|
||||
const updatedNotificationSettingsActionResponse = await updateNotificationSettingsAction({
|
||||
@@ -78,7 +81,7 @@ export const NotificationSwitch = ({
|
||||
) {
|
||||
switch (notificationType) {
|
||||
case "alert":
|
||||
if (notificationSettings[notificationType][surveyOrProjectOrOrganizationId] === true) {
|
||||
if (notificationSettings[notificationType]?.[surveyOrProjectOrOrganizationId] === true) {
|
||||
handleSwitchChange();
|
||||
toast.success(
|
||||
t(
|
||||
|
||||
@@ -1,12 +1,49 @@
|
||||
"use server";
|
||||
|
||||
import { z } from "zod";
|
||||
import { ZIntegrationGoogleSheets } from "@formbricks/types/integration/google-sheet";
|
||||
import { getSpreadsheetNameById } from "@/lib/googleSheet/service";
|
||||
import { ZId } from "@formbricks/types/common";
|
||||
import {
|
||||
TIntegrationGoogleSheets,
|
||||
ZIntegrationGoogleSheets,
|
||||
} from "@formbricks/types/integration/google-sheet";
|
||||
import { getSpreadsheetNameById, validateGoogleSheetsConnection } from "@/lib/googleSheet/service";
|
||||
import { getIntegrationByType } from "@/lib/integration/service";
|
||||
import { authenticatedActionClient } from "@/lib/utils/action-client";
|
||||
import { checkAuthorizationUpdated } from "@/lib/utils/action-client/action-client-middleware";
|
||||
import { getOrganizationIdFromEnvironmentId, getProjectIdFromEnvironmentId } from "@/lib/utils/helper";
|
||||
|
||||
const ZValidateGoogleSheetsConnectionAction = z.object({
|
||||
environmentId: ZId,
|
||||
});
|
||||
|
||||
export const validateGoogleSheetsConnectionAction = authenticatedActionClient
|
||||
.schema(ZValidateGoogleSheetsConnectionAction)
|
||||
.action(async ({ ctx, parsedInput }) => {
|
||||
await checkAuthorizationUpdated({
|
||||
userId: ctx.user.id,
|
||||
organizationId: await getOrganizationIdFromEnvironmentId(parsedInput.environmentId),
|
||||
access: [
|
||||
{
|
||||
type: "organization",
|
||||
roles: ["owner", "manager"],
|
||||
},
|
||||
{
|
||||
type: "projectTeam",
|
||||
projectId: await getProjectIdFromEnvironmentId(parsedInput.environmentId),
|
||||
minPermission: "readWrite",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const integration = await getIntegrationByType(parsedInput.environmentId, "googleSheets");
|
||||
if (!integration) {
|
||||
return { data: false };
|
||||
}
|
||||
|
||||
await validateGoogleSheetsConnection(integration as TIntegrationGoogleSheets);
|
||||
return { data: true };
|
||||
});
|
||||
|
||||
const ZGetSpreadsheetNameByIdAction = z.object({
|
||||
googleSheetIntegration: ZIntegrationGoogleSheets,
|
||||
environmentId: z.string(),
|
||||
|
||||
@@ -20,6 +20,10 @@ import {
|
||||
isValidGoogleSheetsUrl,
|
||||
} from "@/app/(app)/environments/[environmentId]/workspace/integrations/google-sheets/lib/util";
|
||||
import GoogleSheetLogo from "@/images/googleSheetsLogo.png";
|
||||
import {
|
||||
GOOGLE_SHEET_INTEGRATION_INSUFFICIENT_PERMISSION,
|
||||
GOOGLE_SHEET_INTEGRATION_INVALID_GRANT,
|
||||
} from "@/lib/googleSheet/constants";
|
||||
import { getFormattedErrorMessage } from "@/lib/utils/helper";
|
||||
import { recallToHeadline } from "@/lib/utils/recall";
|
||||
import { getElementsFromBlocks } from "@/modules/survey/lib/client-utils";
|
||||
@@ -118,6 +122,17 @@ export const AddIntegrationModal = ({
|
||||
resetForm();
|
||||
}, [selectedIntegration, surveys]);
|
||||
|
||||
const showErrorMessageToast = (response: Awaited<ReturnType<typeof getSpreadsheetNameByIdAction>>) => {
|
||||
const errorMessage = getFormattedErrorMessage(response);
|
||||
if (errorMessage === GOOGLE_SHEET_INTEGRATION_INVALID_GRANT) {
|
||||
toast.error(t("environments.integrations.google_sheets.token_expired_error"));
|
||||
} else if (errorMessage === GOOGLE_SHEET_INTEGRATION_INSUFFICIENT_PERMISSION) {
|
||||
toast.error(t("environments.integrations.google_sheets.spreadsheet_permission_error"));
|
||||
} else {
|
||||
toast.error(errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
const linkSheet = async () => {
|
||||
try {
|
||||
if (!isValidGoogleSheetsUrl(spreadsheetUrl)) {
|
||||
@@ -129,6 +144,7 @@ export const AddIntegrationModal = ({
|
||||
if (selectedElements.length === 0) {
|
||||
throw new Error(t("environments.integrations.select_at_least_one_question_error"));
|
||||
}
|
||||
setIsLinkingSheet(true);
|
||||
const spreadsheetId = extractSpreadsheetIdFromUrl(spreadsheetUrl);
|
||||
const spreadsheetNameResponse = await getSpreadsheetNameByIdAction({
|
||||
googleSheetIntegration,
|
||||
@@ -137,13 +153,11 @@ export const AddIntegrationModal = ({
|
||||
});
|
||||
|
||||
if (!spreadsheetNameResponse?.data) {
|
||||
const errorMessage = getFormattedErrorMessage(spreadsheetNameResponse);
|
||||
throw new Error(errorMessage);
|
||||
showErrorMessageToast(spreadsheetNameResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
const spreadsheetName = spreadsheetNameResponse.data;
|
||||
|
||||
setIsLinkingSheet(true);
|
||||
integrationData.spreadsheetId = spreadsheetId;
|
||||
integrationData.spreadsheetName = spreadsheetName;
|
||||
integrationData.surveyId = selectedSurvey.id;
|
||||
@@ -280,7 +294,7 @@ export const AddIntegrationModal = ({
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<Label htmlFor="Surveys">{t("common.questions")}</Label>
|
||||
<div className="mt-1 max-h-[15vh] overflow-x-hidden overflow-y-auto rounded-lg border border-slate-200">
|
||||
<div className="mt-1 max-h-[15vh] overflow-y-auto overflow-x-hidden rounded-lg border border-slate-200">
|
||||
<div className="grid content-center rounded-lg bg-slate-50 p-3 text-left text-sm text-slate-900">
|
||||
{surveyElements.map((question) => (
|
||||
<div key={question.id} className="my-1 flex items-center space-x-2">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { TEnvironment } from "@formbricks/types/environment";
|
||||
import {
|
||||
TIntegrationGoogleSheets,
|
||||
@@ -8,9 +8,11 @@ import {
|
||||
} from "@formbricks/types/integration/google-sheet";
|
||||
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||
import { TUserLocale } from "@formbricks/types/user";
|
||||
import { validateGoogleSheetsConnectionAction } from "@/app/(app)/environments/[environmentId]/workspace/integrations/google-sheets/actions";
|
||||
import { ManageIntegration } from "@/app/(app)/environments/[environmentId]/workspace/integrations/google-sheets/components/ManageIntegration";
|
||||
import { authorize } from "@/app/(app)/environments/[environmentId]/workspace/integrations/google-sheets/lib/google";
|
||||
import googleSheetLogo from "@/images/googleSheetsLogo.png";
|
||||
import { GOOGLE_SHEET_INTEGRATION_INVALID_GRANT } from "@/lib/googleSheet/constants";
|
||||
import { ConnectIntegration } from "@/modules/ui/components/connect-integration";
|
||||
import { AddIntegrationModal } from "./AddIntegrationModal";
|
||||
|
||||
@@ -35,10 +37,23 @@ export const GoogleSheetWrapper = ({
|
||||
googleSheetIntegration ? googleSheetIntegration.config?.key : false
|
||||
);
|
||||
const [isModalOpen, setIsModalOpen] = useState<boolean>(false);
|
||||
const [showReconnectButton, setShowReconnectButton] = useState<boolean>(false);
|
||||
const [selectedIntegration, setSelectedIntegration] = useState<
|
||||
(TIntegrationGoogleSheetsConfigData & { index: number }) | null
|
||||
>(null);
|
||||
|
||||
const validateConnection = useCallback(async () => {
|
||||
if (!isConnected || !googleSheetIntegration) return;
|
||||
const response = await validateGoogleSheetsConnectionAction({ environmentId: environment.id });
|
||||
if (response?.serverError === GOOGLE_SHEET_INTEGRATION_INVALID_GRANT) {
|
||||
setShowReconnectButton(true);
|
||||
}
|
||||
}, [environment.id, isConnected, googleSheetIntegration]);
|
||||
|
||||
useEffect(() => {
|
||||
validateConnection();
|
||||
}, [validateConnection]);
|
||||
|
||||
const handleGoogleAuthorization = async () => {
|
||||
authorize(environment.id, webAppUrl).then((url: string) => {
|
||||
if (url) {
|
||||
@@ -64,6 +79,8 @@ export const GoogleSheetWrapper = ({
|
||||
setOpenAddIntegrationModal={setIsModalOpen}
|
||||
setIsConnected={setIsConnected}
|
||||
setSelectedIntegration={setSelectedIntegration}
|
||||
showReconnectButton={showReconnectButton}
|
||||
handleGoogleAuthorization={handleGoogleAuthorization}
|
||||
locale={locale}
|
||||
/>
|
||||
</>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { Trash2Icon } from "lucide-react";
|
||||
import { RefreshCcwIcon, Trash2Icon } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import toast from "react-hot-toast";
|
||||
import { useTranslation } from "react-i18next";
|
||||
@@ -12,15 +12,19 @@ import { TUserLocale } from "@formbricks/types/user";
|
||||
import { deleteIntegrationAction } from "@/app/(app)/environments/[environmentId]/workspace/integrations/actions";
|
||||
import { timeSince } from "@/lib/time";
|
||||
import { getFormattedErrorMessage } from "@/lib/utils/helper";
|
||||
import { Alert, AlertButton, AlertDescription } from "@/modules/ui/components/alert";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { DeleteDialog } from "@/modules/ui/components/delete-dialog";
|
||||
import { EmptyState } from "@/modules/ui/components/empty-state";
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/modules/ui/components/tooltip";
|
||||
|
||||
interface ManageIntegrationProps {
|
||||
googleSheetIntegration: TIntegrationGoogleSheets;
|
||||
setOpenAddIntegrationModal: (v: boolean) => void;
|
||||
setIsConnected: (v: boolean) => void;
|
||||
setSelectedIntegration: (v: (TIntegrationGoogleSheetsConfigData & { index: number }) | null) => void;
|
||||
showReconnectButton: boolean;
|
||||
handleGoogleAuthorization: () => void;
|
||||
locale: TUserLocale;
|
||||
}
|
||||
|
||||
@@ -29,6 +33,8 @@ export const ManageIntegration = ({
|
||||
setOpenAddIntegrationModal,
|
||||
setIsConnected,
|
||||
setSelectedIntegration,
|
||||
showReconnectButton,
|
||||
handleGoogleAuthorization,
|
||||
locale,
|
||||
}: ManageIntegrationProps) => {
|
||||
const { t } = useTranslation();
|
||||
@@ -68,7 +74,17 @@ export const ManageIntegration = ({
|
||||
|
||||
return (
|
||||
<div className="mt-6 flex w-full flex-col items-center justify-center p-6">
|
||||
<div className="flex w-full justify-end">
|
||||
{showReconnectButton && (
|
||||
<Alert variant="warning" size="small" className="mb-4 w-full">
|
||||
<AlertDescription>
|
||||
{t("environments.integrations.google_sheets.reconnect_button_description")}
|
||||
</AlertDescription>
|
||||
<AlertButton onClick={handleGoogleAuthorization}>
|
||||
{t("environments.integrations.google_sheets.reconnect_button")}
|
||||
</AlertButton>
|
||||
</Alert>
|
||||
)}
|
||||
<div className="flex w-full justify-end space-x-2">
|
||||
<div className="mr-6 flex items-center">
|
||||
<span className="mr-4 h-4 w-4 rounded-full bg-green-600"></span>
|
||||
<span className="text-slate-500">
|
||||
@@ -77,6 +93,19 @@ export const ManageIntegration = ({
|
||||
})}
|
||||
</span>
|
||||
</div>
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button variant="outline" onClick={handleGoogleAuthorization}>
|
||||
<RefreshCcwIcon className="mr-2 h-4 w-4" />
|
||||
{t("environments.integrations.google_sheets.reconnect_button")}
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t("environments.integrations.google_sheets.reconnect_button_tooltip")}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setSelectedIntegration(null);
|
||||
|
||||
@@ -21,6 +21,7 @@ import { getElementsFromBlocks } from "@/lib/survey/utils";
|
||||
import { getFormattedDateTimeString } from "@/lib/utils/datetime";
|
||||
import { parseRecallInfo } from "@/lib/utils/recall";
|
||||
import { truncateText } from "@/lib/utils/strings";
|
||||
import { resolveStorageUrlAuto } from "@/modules/storage/utils";
|
||||
|
||||
const convertMetaObjectToString = (metadata: TResponseMeta): string => {
|
||||
let result: string[] = [];
|
||||
@@ -256,10 +257,16 @@ const processElementResponse = (
|
||||
const selectedChoiceIds = responseValue as string[];
|
||||
return element.choices
|
||||
.filter((choice) => selectedChoiceIds.includes(choice.id))
|
||||
.map((choice) => choice.imageUrl)
|
||||
.map((choice) => resolveStorageUrlAuto(choice.imageUrl))
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (element.type === TSurveyElementTypeEnum.FileUpload && Array.isArray(responseValue)) {
|
||||
return responseValue
|
||||
.map((url) => (typeof url === "string" ? resolveStorageUrlAuto(url) : url))
|
||||
.join("; ");
|
||||
}
|
||||
|
||||
return processResponseData(responseValue);
|
||||
};
|
||||
|
||||
@@ -368,7 +375,7 @@ const buildNotionPayloadProperties = (
|
||||
|
||||
responses[resp] = (pictureElement as any)?.choices
|
||||
.filter((choice) => selectedChoiceIds.includes(choice.id))
|
||||
.map((choice) => choice.imageUrl);
|
||||
.map((choice) => resolveStorageUrlAuto(choice.imageUrl));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ import { convertDatesInObject } from "@/lib/time";
|
||||
import { queueAuditEvent } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { TAuditStatus, UNKNOWN_DATA } from "@/modules/ee/audit-logs/types/audit-log";
|
||||
import { sendResponseFinishedEmail } from "@/modules/email";
|
||||
import { resolveStorageUrlsInObject } from "@/modules/storage/utils";
|
||||
import { sendFollowUpsForResponse } from "@/modules/survey/follow-ups/lib/follow-ups";
|
||||
import { FollowUpSendError } from "@/modules/survey/follow-ups/types/follow-up";
|
||||
import { handleIntegrations } from "./lib/handleIntegrations";
|
||||
@@ -95,12 +96,15 @@ export const POST = async (request: Request) => {
|
||||
]);
|
||||
};
|
||||
|
||||
const resolvedResponseData = resolveStorageUrlsInObject(response.data);
|
||||
|
||||
const webhookPromises = webhooks.map((webhook) => {
|
||||
const body = JSON.stringify({
|
||||
webhookId: webhook.id,
|
||||
event,
|
||||
data: {
|
||||
...response,
|
||||
data: resolvedResponseData,
|
||||
survey: {
|
||||
title: survey.name,
|
||||
type: survey.type,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { google } from "googleapis";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { TIntegrationGoogleSheetsConfig } from "@formbricks/types/integration/google-sheet";
|
||||
import { responses } from "@/app/lib/api/response";
|
||||
import {
|
||||
GOOGLE_SHEETS_CLIENT_ID,
|
||||
@@ -8,7 +9,7 @@ import {
|
||||
WEBAPP_URL,
|
||||
} from "@/lib/constants";
|
||||
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
|
||||
import { createOrUpdateIntegration } from "@/lib/integration/service";
|
||||
import { createOrUpdateIntegration, getIntegrationByType } from "@/lib/integration/service";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
@@ -42,33 +43,39 @@ export const GET = async (req: Request) => {
|
||||
if (!redirect_uri) return responses.internalServerErrorResponse("Google redirect url is missing");
|
||||
const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uri);
|
||||
|
||||
let key;
|
||||
let userEmail;
|
||||
|
||||
if (code) {
|
||||
const token = await oAuth2Client.getToken(code);
|
||||
key = token.res?.data;
|
||||
|
||||
// Set credentials using the provided token
|
||||
oAuth2Client.setCredentials({
|
||||
access_token: key.access_token,
|
||||
});
|
||||
|
||||
// Fetch user's email
|
||||
const oauth2 = google.oauth2({
|
||||
auth: oAuth2Client,
|
||||
version: "v2",
|
||||
});
|
||||
const userInfo = await oauth2.userinfo.get();
|
||||
userEmail = userInfo.data.email;
|
||||
if (!code) {
|
||||
return Response.redirect(
|
||||
`${WEBAPP_URL}/environments/${environmentId}/workspace/integrations/google-sheets`
|
||||
);
|
||||
}
|
||||
|
||||
const token = await oAuth2Client.getToken(code);
|
||||
const key = token.res?.data;
|
||||
if (!key) {
|
||||
return Response.redirect(
|
||||
`${WEBAPP_URL}/environments/${environmentId}/workspace/integrations/google-sheets`
|
||||
);
|
||||
}
|
||||
|
||||
oAuth2Client.setCredentials({ access_token: key.access_token });
|
||||
const oauth2 = google.oauth2({ auth: oAuth2Client, version: "v2" });
|
||||
const userInfo = await oauth2.userinfo.get();
|
||||
const userEmail = userInfo.data.email;
|
||||
|
||||
if (!userEmail) {
|
||||
return responses.internalServerErrorResponse("Failed to get user email");
|
||||
}
|
||||
|
||||
const integrationType = "googleSheets" as const;
|
||||
const existingIntegration = await getIntegrationByType(environmentId, integrationType);
|
||||
const existingConfig = existingIntegration?.config as TIntegrationGoogleSheetsConfig;
|
||||
|
||||
const googleSheetIntegration = {
|
||||
type: "googleSheets" as "googleSheets",
|
||||
type: integrationType,
|
||||
environment: environmentId,
|
||||
config: {
|
||||
key,
|
||||
data: [],
|
||||
data: existingConfig?.data ?? [],
|
||||
email: userEmail,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
TJsEnvironmentStateSurvey,
|
||||
} from "@formbricks/types/js";
|
||||
import { validateInputs } from "@/lib/utils/validate";
|
||||
import { resolveStorageUrlsInObject } from "@/modules/storage/utils";
|
||||
import { transformPrismaSurvey } from "@/modules/survey/lib/utils";
|
||||
|
||||
/**
|
||||
@@ -177,14 +178,14 @@ export const getEnvironmentStateData = async (environmentId: string): Promise<En
|
||||
overlay: environmentData.project.overlay,
|
||||
placement: environmentData.project.placement,
|
||||
inAppSurveyBranding: environmentData.project.inAppSurveyBranding,
|
||||
styling: environmentData.project.styling,
|
||||
styling: resolveStorageUrlsInObject(environmentData.project.styling),
|
||||
},
|
||||
},
|
||||
organization: {
|
||||
id: environmentData.project.organization.id,
|
||||
billing: environmentData.project.organization.billing,
|
||||
},
|
||||
surveys: transformedSurveys,
|
||||
surveys: resolveStorageUrlsInObject(transformedSurveys),
|
||||
actionClasses: environmentData.actionClasses as TJsEnvironmentStateActionClass[],
|
||||
};
|
||||
} catch (error) {
|
||||
|
||||
@@ -44,13 +44,10 @@ const validateResponse = (
|
||||
...responseUpdateInput.data,
|
||||
};
|
||||
|
||||
const isFinished = responseUpdateInput.finished ?? false;
|
||||
|
||||
const validationErrors = validateResponseData(
|
||||
survey.blocks,
|
||||
mergedData,
|
||||
responseUpdateInput.language ?? response.language ?? "en",
|
||||
isFinished,
|
||||
survey.questions
|
||||
);
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ const validateResponse = (responseInputData: TResponseInput, survey: TSurvey) =>
|
||||
survey.blocks,
|
||||
responseInputData.data,
|
||||
responseInputData.language ?? "en",
|
||||
responseInputData.finished,
|
||||
survey.questions
|
||||
);
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import { deleteResponse, getResponse } from "@/lib/response/service";
|
||||
import { getSurvey } from "@/lib/survey/service";
|
||||
import { formatValidationErrorsForV1Api, validateResponseData } from "@/modules/api/lib/validation";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { validateFileUploads } from "@/modules/storage/utils";
|
||||
import { resolveStorageUrlsInObject, validateFileUploads } from "@/modules/storage/utils";
|
||||
import { updateResponseWithQuotaEvaluation } from "./lib/response";
|
||||
|
||||
async function fetchAndAuthorizeResponse(
|
||||
@@ -57,7 +57,10 @@ export const GET = withV1ApiWrapper({
|
||||
}
|
||||
|
||||
return {
|
||||
response: responses.successResponse(result.response),
|
||||
response: responses.successResponse({
|
||||
...result.response,
|
||||
data: resolveStorageUrlsInObject(result.response.data),
|
||||
}),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
@@ -146,7 +149,6 @@ export const PUT = withV1ApiWrapper({
|
||||
result.survey.blocks,
|
||||
responseUpdate.data,
|
||||
responseUpdate.language ?? "en",
|
||||
responseUpdate.finished,
|
||||
result.survey.questions
|
||||
);
|
||||
|
||||
@@ -190,7 +192,7 @@ export const PUT = withV1ApiWrapper({
|
||||
}
|
||||
|
||||
return {
|
||||
response: responses.successResponse(updated),
|
||||
response: responses.successResponse({ ...updated, data: resolveStorageUrlsInObject(updated.data) }),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
|
||||
@@ -9,7 +9,7 @@ import { sendToPipeline } from "@/app/lib/pipelines";
|
||||
import { getSurvey } from "@/lib/survey/service";
|
||||
import { formatValidationErrorsForV1Api, validateResponseData } from "@/modules/api/lib/validation";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { validateFileUploads } from "@/modules/storage/utils";
|
||||
import { resolveStorageUrlsInObject, validateFileUploads } from "@/modules/storage/utils";
|
||||
import {
|
||||
createResponseWithQuotaEvaluation,
|
||||
getResponses,
|
||||
@@ -54,7 +54,9 @@ export const GET = withV1ApiWrapper({
|
||||
allResponses.push(...environmentResponses);
|
||||
}
|
||||
return {
|
||||
response: responses.successResponse(allResponses),
|
||||
response: responses.successResponse(
|
||||
allResponses.map((r) => ({ ...r, data: resolveStorageUrlsInObject(r.data) }))
|
||||
),
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof DatabaseError) {
|
||||
@@ -155,7 +157,6 @@ export const POST = withV1ApiWrapper({
|
||||
surveyResult.survey.blocks,
|
||||
responseInput.data,
|
||||
responseInput.language ?? "en",
|
||||
responseInput.finished,
|
||||
surveyResult.survey.questions
|
||||
);
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import { TApiAuditLog, TApiKeyAuthentication, withV1ApiWrapper } from "@/app/lib
|
||||
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
|
||||
import { getSurvey, updateSurvey } from "@/lib/survey/service";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { resolveStorageUrlsInObject } from "@/modules/storage/utils";
|
||||
|
||||
const fetchAndAuthorizeSurvey = async (
|
||||
surveyId: string,
|
||||
@@ -58,16 +59,18 @@ export const GET = withV1ApiWrapper({
|
||||
|
||||
if (shouldTransformToQuestions) {
|
||||
return {
|
||||
response: responses.successResponse({
|
||||
...result.survey,
|
||||
questions: transformBlocksToQuestions(result.survey.blocks, result.survey.endings),
|
||||
blocks: [],
|
||||
}),
|
||||
response: responses.successResponse(
|
||||
resolveStorageUrlsInObject({
|
||||
...result.survey,
|
||||
questions: transformBlocksToQuestions(result.survey.blocks, result.survey.endings),
|
||||
blocks: [],
|
||||
})
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
response: responses.successResponse(result.survey),
|
||||
response: responses.successResponse(resolveStorageUrlsInObject(result.survey)),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
@@ -202,12 +205,12 @@ export const PUT = withV1ApiWrapper({
|
||||
};
|
||||
|
||||
return {
|
||||
response: responses.successResponse(surveyWithQuestions),
|
||||
response: responses.successResponse(resolveStorageUrlsInObject(surveyWithQuestions)),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
response: responses.successResponse(updatedSurvey),
|
||||
response: responses.successResponse(resolveStorageUrlsInObject(updatedSurvey)),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
|
||||
@@ -14,6 +14,7 @@ import { TApiAuditLog, TApiKeyAuthentication, withV1ApiWrapper } from "@/app/lib
|
||||
import { getOrganizationByEnvironmentId } from "@/lib/organization/service";
|
||||
import { createSurvey } from "@/lib/survey/service";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { resolveStorageUrlsInObject } from "@/modules/storage/utils";
|
||||
import { getSurveys } from "./lib/surveys";
|
||||
|
||||
export const GET = withV1ApiWrapper({
|
||||
@@ -55,7 +56,7 @@ export const GET = withV1ApiWrapper({
|
||||
});
|
||||
|
||||
return {
|
||||
response: responses.successResponse(surveysWithQuestions),
|
||||
response: responses.successResponse(resolveStorageUrlsInObject(surveysWithQuestions)),
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof DatabaseError) {
|
||||
|
||||
@@ -112,7 +112,6 @@ export const POST = async (request: Request, context: Context): Promise<Response
|
||||
survey.blocks,
|
||||
responseInputData.data,
|
||||
responseInputData.language ?? "en",
|
||||
responseInputData.finished,
|
||||
survey.questions
|
||||
);
|
||||
|
||||
|
||||
@@ -257,6 +257,7 @@ describe("endpoint-validator", () => {
|
||||
expect(isAuthProtectedRoute("/api/v1/client/test")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/s/survey123")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/p/pretty-url")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/c/jwt-token")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/health")).toBe(false);
|
||||
});
|
||||
@@ -312,6 +313,19 @@ describe("endpoint-validator", () => {
|
||||
expect(isPublicDomainRoute("/c")).toBe(false);
|
||||
expect(isPublicDomainRoute("/contact/token")).toBe(false);
|
||||
});
|
||||
|
||||
test("should return true for pretty URL survey routes", () => {
|
||||
expect(isPublicDomainRoute("/p/pretty123")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty-name-with-dashes")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/survey_id_with_underscores")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/abc123def456")).toBe(true);
|
||||
});
|
||||
|
||||
test("should return false for malformed pretty URL survey routes", () => {
|
||||
expect(isPublicDomainRoute("/p/")).toBe(false);
|
||||
expect(isPublicDomainRoute("/p")).toBe(false);
|
||||
expect(isPublicDomainRoute("/pretty/123")).toBe(false);
|
||||
});
|
||||
|
||||
test("should return true for client API routes", () => {
|
||||
expect(isPublicDomainRoute("/api/v1/client/something")).toBe(true);
|
||||
@@ -375,6 +389,8 @@ describe("endpoint-validator", () => {
|
||||
expect(isAdminDomainRoute("/s/survey-id-with-dashes")).toBe(false);
|
||||
expect(isAdminDomainRoute("/c/jwt-token")).toBe(false);
|
||||
expect(isAdminDomainRoute("/c/very-long-jwt-token-123")).toBe(false);
|
||||
expect(isAdminDomainRoute("/p/pretty123")).toBe(false);
|
||||
expect(isAdminDomainRoute("/p/pretty-name-with-dashes")).toBe(false);
|
||||
expect(isAdminDomainRoute("/api/v1/client/test")).toBe(false);
|
||||
expect(isAdminDomainRoute("/api/v2/client/other")).toBe(false);
|
||||
});
|
||||
@@ -390,6 +406,7 @@ describe("endpoint-validator", () => {
|
||||
test("should allow public routes on public domain", () => {
|
||||
expect(isRouteAllowedForDomain("/s/survey123", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/c/jwt-token", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/p/pretty123", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/api/v1/client/test", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/api/v2/client/other", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/health", true)).toBe(true);
|
||||
@@ -426,6 +443,8 @@ describe("endpoint-validator", () => {
|
||||
expect(isRouteAllowedForDomain("/s/survey-id-with-dashes", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/c/jwt-token", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/c/very-long-jwt-token-123", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/p/pretty123", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/p/pretty-name-with-dashes", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/api/v1/client/test", false)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/api/v2/client/other", false)).toBe(false);
|
||||
});
|
||||
@@ -440,6 +459,8 @@ describe("endpoint-validator", () => {
|
||||
test("should handle paths with query parameters and fragments", () => {
|
||||
expect(isRouteAllowedForDomain("/s/survey123?param=value", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/s/survey123#section", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/p/pretty123?param=value", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/p/pretty123#section", true)).toBe(true);
|
||||
expect(isRouteAllowedForDomain("/environments/123?tab=settings", true)).toBe(false);
|
||||
expect(isRouteAllowedForDomain("/environments/123?tab=settings", false)).toBe(true);
|
||||
});
|
||||
@@ -450,6 +471,7 @@ describe("endpoint-validator", () => {
|
||||
describe("URL parsing edge cases", () => {
|
||||
test("should handle paths with query parameters", () => {
|
||||
expect(isPublicDomainRoute("/s/survey123?param=value&other=test")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123?param=value&other=test")).toBe(true);
|
||||
expect(isPublicDomainRoute("/api/v1/client/test?query=data")).toBe(true);
|
||||
expect(isPublicDomainRoute("/environments/123?tab=settings")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/environments/123?tab=overview")).toBe(true);
|
||||
@@ -458,12 +480,14 @@ describe("endpoint-validator", () => {
|
||||
test("should handle paths with fragments", () => {
|
||||
expect(isPublicDomainRoute("/s/survey123#section")).toBe(true);
|
||||
expect(isPublicDomainRoute("/c/jwt-token#top")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123#section")).toBe(true);
|
||||
expect(isPublicDomainRoute("/environments/123#overview")).toBe(false);
|
||||
expect(isAuthProtectedRoute("/organizations/456#settings")).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle trailing slashes", () => {
|
||||
expect(isPublicDomainRoute("/s/survey123/")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123/")).toBe(true);
|
||||
expect(isPublicDomainRoute("/api/v1/client/test/")).toBe(true);
|
||||
expect(isManagementApiRoute("/api/v1/management/test/")).toEqual({
|
||||
isManagementApi: true,
|
||||
@@ -478,6 +502,9 @@ describe("endpoint-validator", () => {
|
||||
expect(isPublicDomainRoute("/s/survey123/preview")).toBe(true);
|
||||
expect(isPublicDomainRoute("/s/survey123/embed")).toBe(true);
|
||||
expect(isPublicDomainRoute("/s/survey123/thank-you")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123/preview")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123/embed")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty123/thank-you")).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle nested client API routes", () => {
|
||||
@@ -529,6 +556,7 @@ describe("endpoint-validator", () => {
|
||||
test("should handle special characters in survey IDs", () => {
|
||||
expect(isPublicDomainRoute("/s/survey-123_test.v2")).toBe(true);
|
||||
expect(isPublicDomainRoute("/c/jwt.token.with.dots")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty-123_test.v2")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -536,6 +564,7 @@ describe("endpoint-validator", () => {
|
||||
test("should properly validate malicious or injection-like URLs", () => {
|
||||
// SQL injection-like attempts
|
||||
expect(isPublicDomainRoute("/s/'; DROP TABLE users; --")).toBe(true); // Still valid survey ID format
|
||||
expect(isPublicDomainRoute("/p/'; DROP TABLE users; --")).toBe(true);
|
||||
expect(isManagementApiRoute("/api/v1/management/'; DROP TABLE users; --")).toEqual({
|
||||
isManagementApi: true,
|
||||
authenticationMethod: AuthenticationMethod.ApiKey,
|
||||
@@ -543,10 +572,12 @@ describe("endpoint-validator", () => {
|
||||
|
||||
// Path traversal attempts
|
||||
expect(isPublicDomainRoute("/s/../../../etc/passwd")).toBe(true); // Still matches pattern
|
||||
expect(isPublicDomainRoute("/p/../../../etc/passwd")).toBe(true);
|
||||
expect(isAuthProtectedRoute("/environments/../../../etc/passwd")).toBe(true);
|
||||
|
||||
// XSS-like attempts
|
||||
expect(isPublicDomainRoute("/s/<script>alert('xss')</script>")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/<script>alert('xss')</script>")).toBe(true);
|
||||
expect(isClientSideApiRoute("/api/v1/client/<script>alert('xss')</script>")).toEqual({
|
||||
isClientSideApi: true,
|
||||
isRateLimited: true,
|
||||
@@ -556,6 +587,7 @@ describe("endpoint-validator", () => {
|
||||
test("should handle URL encoding", () => {
|
||||
expect(isPublicDomainRoute("/s/survey%20123")).toBe(true);
|
||||
expect(isPublicDomainRoute("/c/jwt%2Etoken")).toBe(true);
|
||||
expect(isPublicDomainRoute("/p/pretty%20123")).toBe(true);
|
||||
expect(isAuthProtectedRoute("/environments%2F123")).toBe(true);
|
||||
expect(isManagementApiRoute("/api/v1/management/test%20route")).toEqual({
|
||||
isManagementApi: true,
|
||||
@@ -591,6 +623,7 @@ describe("endpoint-validator", () => {
|
||||
// These should not match due to case sensitivity
|
||||
expect(isPublicDomainRoute("/S/survey123")).toBe(false);
|
||||
expect(isPublicDomainRoute("/C/jwt-token")).toBe(false);
|
||||
expect(isPublicDomainRoute("/P/pretty123")).toBe(false);
|
||||
expect(isClientSideApiRoute("/API/V1/CLIENT/test")).toEqual({
|
||||
isClientSideApi: false,
|
||||
isRateLimited: true,
|
||||
|
||||
@@ -7,6 +7,7 @@ const PUBLIC_ROUTES = {
|
||||
SURVEY_ROUTES: [
|
||||
/^\/s\/[^/]+/, // /s/[surveyId] - survey pages
|
||||
/^\/c\/[^/]+/, // /c/[jwt] - contact survey pages
|
||||
/^\/p\/[^/]+/, // /p/[prettyUrl] - pretty URL pages
|
||||
],
|
||||
|
||||
// API routes accessible from public domain
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { LinkSurveyLayout, viewport } from "@/modules/survey/link/layout";
|
||||
import { Viewport } from "next";
|
||||
import { LinkSurveyLayout } from "@/modules/survey/link/layout";
|
||||
|
||||
export { viewport };
|
||||
export const viewport: Viewport = {
|
||||
width: "device-width",
|
||||
initialScale: 1.0,
|
||||
maximumScale: 1.0,
|
||||
userScalable: false,
|
||||
viewportFit: "contain",
|
||||
};
|
||||
|
||||
export default LinkSurveyLayout;
|
||||
|
||||
@@ -711,7 +711,12 @@ checksums:
|
||||
environments/integrations/google_sheets/link_google_sheet: fa78146ae26ce5b1d2aaf2678f628943
|
||||
environments/integrations/google_sheets/link_new_sheet: 8ad2ea8708f50ed184c00b84577b325e
|
||||
environments/integrations/google_sheets/no_integrations_yet: ea46f7747937baf48a47a4c1b1776aee
|
||||
environments/integrations/google_sheets/reconnect_button: 8992a0f250278c116cb26be448b68ba2
|
||||
environments/integrations/google_sheets/reconnect_button_description: 851fd2fda57211293090f371d5b2c734
|
||||
environments/integrations/google_sheets/reconnect_button_tooltip: 210dd97470fde8264d2c076db3c98fde
|
||||
environments/integrations/google_sheets/spreadsheet_permission_error: 94f0007a187d3b9a7ab8200fe26aad20
|
||||
environments/integrations/google_sheets/spreadsheet_url: b1665f96e6ecce23ea2d9196f4a3e5dd
|
||||
environments/integrations/google_sheets/token_expired_error: 555d34c18c554ec8ac66614f21bd44fc
|
||||
environments/integrations/include_created_at: 8011355b13e28e638d74e6f3d68a2bbf
|
||||
environments/integrations/include_hidden_fields: 25f0ea5ca1c6ead2cd121f8754cb8d72
|
||||
environments/integrations/include_metadata: 750091d965d7cc8d02468b5239816dc5
|
||||
|
||||
6
apps/web/lib/googleSheet/constants.ts
Normal file
6
apps/web/lib/googleSheet/constants.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* Error codes returned by Google Sheets integration.
|
||||
* Use these constants when comparing error responses to avoid typos and enable reuse.
|
||||
*/
|
||||
export const GOOGLE_SHEET_INTEGRATION_INVALID_GRANT = "invalid_grant";
|
||||
export const GOOGLE_SHEET_INTEGRATION_INSUFFICIENT_PERMISSION = "insufficient_permission";
|
||||
@@ -2,7 +2,12 @@ import "server-only";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { z } from "zod";
|
||||
import { ZString } from "@formbricks/types/common";
|
||||
import { DatabaseError, UnknownError } from "@formbricks/types/errors";
|
||||
import {
|
||||
AuthenticationError,
|
||||
DatabaseError,
|
||||
OperationNotAllowedError,
|
||||
UnknownError,
|
||||
} from "@formbricks/types/errors";
|
||||
import {
|
||||
TIntegrationGoogleSheets,
|
||||
ZIntegrationGoogleSheets,
|
||||
@@ -11,8 +16,12 @@ import {
|
||||
GOOGLE_SHEETS_CLIENT_ID,
|
||||
GOOGLE_SHEETS_CLIENT_SECRET,
|
||||
GOOGLE_SHEETS_REDIRECT_URL,
|
||||
GOOGLE_SHEET_MESSAGE_LIMIT,
|
||||
} from "@/lib/constants";
|
||||
import { GOOGLE_SHEET_MESSAGE_LIMIT } from "@/lib/constants";
|
||||
import {
|
||||
GOOGLE_SHEET_INTEGRATION_INSUFFICIENT_PERMISSION,
|
||||
GOOGLE_SHEET_INTEGRATION_INVALID_GRANT,
|
||||
} from "@/lib/googleSheet/constants";
|
||||
import { createOrUpdateIntegration } from "@/lib/integration/service";
|
||||
import { truncateText } from "../utils/strings";
|
||||
import { validateInputs } from "../utils/validate";
|
||||
@@ -81,6 +90,17 @@ export const writeData = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const validateGoogleSheetsConnection = async (
|
||||
googleSheetIntegrationData: TIntegrationGoogleSheets
|
||||
): Promise<void> => {
|
||||
validateInputs([googleSheetIntegrationData, ZIntegrationGoogleSheets]);
|
||||
const integrationData = structuredClone(googleSheetIntegrationData);
|
||||
integrationData.config.data.forEach((data) => {
|
||||
data.createdAt = new Date(data.createdAt);
|
||||
});
|
||||
await authorize(integrationData);
|
||||
};
|
||||
|
||||
export const getSpreadsheetNameById = async (
|
||||
googleSheetIntegrationData: TIntegrationGoogleSheets,
|
||||
spreadsheetId: string
|
||||
@@ -94,7 +114,17 @@ export const getSpreadsheetNameById = async (
|
||||
return new Promise((resolve, reject) => {
|
||||
sheets.spreadsheets.get({ spreadsheetId }, (err, response) => {
|
||||
if (err) {
|
||||
reject(new UnknownError(`Error while fetching spreadsheet data: ${err.message}`));
|
||||
const msg = err.message?.toLowerCase() ?? "";
|
||||
const isPermissionError =
|
||||
msg.includes("permission") ||
|
||||
msg.includes("caller does not have") ||
|
||||
msg.includes("insufficient permission") ||
|
||||
msg.includes("access denied");
|
||||
if (isPermissionError) {
|
||||
reject(new OperationNotAllowedError(GOOGLE_SHEET_INTEGRATION_INSUFFICIENT_PERMISSION));
|
||||
} else {
|
||||
reject(new UnknownError(`Error while fetching spreadsheet data: ${err.message}`));
|
||||
}
|
||||
return;
|
||||
}
|
||||
const spreadsheetTitle = response.data.properties.title;
|
||||
@@ -109,26 +139,70 @@ export const getSpreadsheetNameById = async (
|
||||
}
|
||||
};
|
||||
|
||||
const isInvalidGrantError = (error: unknown): boolean => {
|
||||
const err = error as { message?: string; response?: { data?: { error?: string } } };
|
||||
return (
|
||||
typeof err?.message === "string" &&
|
||||
err.message.toLowerCase().includes(GOOGLE_SHEET_INTEGRATION_INVALID_GRANT)
|
||||
);
|
||||
};
|
||||
|
||||
/** Buffer in ms before expiry_date to consider token near-expired (5 minutes). */
|
||||
const TOKEN_EXPIRY_BUFFER_MS = 5 * 60 * 1000;
|
||||
|
||||
const GOOGLE_TOKENINFO_URL = "https://www.googleapis.com/oauth2/v1/tokeninfo";
|
||||
|
||||
/**
|
||||
* Verifies that the access token is still valid and not revoked (e.g. user removed app access).
|
||||
* Returns true if token is valid, false if invalid/revoked.
|
||||
*/
|
||||
const isAccessTokenValid = async (accessToken: string): Promise<boolean> => {
|
||||
try {
|
||||
const res = await fetch(`${GOOGLE_TOKENINFO_URL}?access_token=${encodeURIComponent(accessToken)}`);
|
||||
return res.ok;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const authorize = async (googleSheetIntegrationData: TIntegrationGoogleSheets) => {
|
||||
const client_id = GOOGLE_SHEETS_CLIENT_ID;
|
||||
const client_secret = GOOGLE_SHEETS_CLIENT_SECRET;
|
||||
const redirect_uri = GOOGLE_SHEETS_REDIRECT_URL;
|
||||
const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uri);
|
||||
const refresh_token = googleSheetIntegrationData.config.key.refresh_token;
|
||||
oAuth2Client.setCredentials({
|
||||
refresh_token,
|
||||
});
|
||||
const { credentials } = await oAuth2Client.refreshAccessToken();
|
||||
await createOrUpdateIntegration(googleSheetIntegrationData.environmentId, {
|
||||
type: "googleSheets",
|
||||
config: {
|
||||
data: googleSheetIntegrationData.config?.data ?? [],
|
||||
email: googleSheetIntegrationData.config?.email ?? "",
|
||||
key: credentials,
|
||||
},
|
||||
});
|
||||
const key = googleSheetIntegrationData.config.key;
|
||||
|
||||
oAuth2Client.setCredentials(credentials);
|
||||
const hasStoredCredentials =
|
||||
key.access_token && key.expiry_date && key.expiry_date > Date.now() + TOKEN_EXPIRY_BUFFER_MS;
|
||||
|
||||
return oAuth2Client;
|
||||
if (hasStoredCredentials && (await isAccessTokenValid(key.access_token))) {
|
||||
oAuth2Client.setCredentials(key);
|
||||
return oAuth2Client;
|
||||
}
|
||||
|
||||
oAuth2Client.setCredentials({ refresh_token: key.refresh_token });
|
||||
|
||||
try {
|
||||
const { credentials } = await oAuth2Client.refreshAccessToken();
|
||||
const mergedCredentials = {
|
||||
...credentials,
|
||||
refresh_token: credentials.refresh_token ?? key.refresh_token,
|
||||
};
|
||||
await createOrUpdateIntegration(googleSheetIntegrationData.environmentId, {
|
||||
type: "googleSheets",
|
||||
config: {
|
||||
data: googleSheetIntegrationData.config?.data ?? [],
|
||||
email: googleSheetIntegrationData.config?.email ?? "",
|
||||
key: mergedCredentials,
|
||||
},
|
||||
});
|
||||
|
||||
oAuth2Client.setCredentials(mergedCredentials);
|
||||
return oAuth2Client;
|
||||
} catch (error) {
|
||||
if (isInvalidGrantError(error)) {
|
||||
throw new AuthenticationError(GOOGLE_SHEET_INTEGRATION_INVALID_GRANT);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -22,6 +22,7 @@ import { getElementsFromBlocks } from "@/lib/survey/utils";
|
||||
import { getIsQuotasEnabled } from "@/modules/ee/license-check/lib/utils";
|
||||
import { reduceQuotaLimits } from "@/modules/ee/quotas/lib/quotas";
|
||||
import { deleteFile } from "@/modules/storage/service";
|
||||
import { resolveStorageUrlsInObject } from "@/modules/storage/utils";
|
||||
import { getOrganizationIdFromEnvironmentId } from "@/modules/survey/lib/organization";
|
||||
import { getOrganizationBilling } from "@/modules/survey/lib/survey";
|
||||
import { ITEMS_PER_PAGE } from "../constants";
|
||||
@@ -408,9 +409,10 @@ export const getResponseDownloadFile = async (
|
||||
if (survey.isVerifyEmailEnabled) {
|
||||
headers.push("Verified Email");
|
||||
}
|
||||
const resolvedResponses = responses.map((r) => ({ ...r, data: resolveStorageUrlsInObject(r.data) }));
|
||||
const jsonData = getResponsesJson(
|
||||
survey,
|
||||
responses,
|
||||
resolvedResponses,
|
||||
elements,
|
||||
userAttributes,
|
||||
hiddenFields,
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Tabelle verlinken",
|
||||
"link_new_sheet": "Neues Blatt verknüpfen",
|
||||
"no_integrations_yet": "Deine verknüpften Tabellen werden hier angezeigt, sobald Du sie hinzufügst ⏲️",
|
||||
"spreadsheet_url": "Tabellen-URL"
|
||||
"reconnect_button": "Erneut verbinden",
|
||||
"reconnect_button_description": "Deine Google Sheets-Verbindung ist abgelaufen. Bitte verbinde dich erneut, um weiterhin Antworten zu synchronisieren. Deine bestehenden Tabellen-Links und Daten bleiben erhalten.",
|
||||
"reconnect_button_tooltip": "Verbinde die Integration erneut, um deinen Zugriff zu aktualisieren. Deine bestehenden Tabellen-Links und Daten bleiben erhalten.",
|
||||
"spreadsheet_permission_error": "Du hast keine Berechtigung, auf diese Tabelle zuzugreifen. Bitte stelle sicher, dass die Tabelle mit deinem Google-Konto geteilt ist und du Schreibzugriff auf die Tabelle hast.",
|
||||
"spreadsheet_url": "Tabellen-URL",
|
||||
"token_expired_error": "Das Google Sheets-Aktualisierungstoken ist abgelaufen oder wurde widerrufen. Bitte verbinde die Integration erneut."
|
||||
},
|
||||
"include_created_at": "Erstellungsdatum einbeziehen",
|
||||
"include_hidden_fields": "Versteckte Felder (hidden fields) einbeziehen",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Link Google Sheet",
|
||||
"link_new_sheet": "Link new Sheet",
|
||||
"no_integrations_yet": "Your google sheet integrations will appear here as soon as you add them. ⏲️",
|
||||
"spreadsheet_url": "Spreadsheet URL"
|
||||
"reconnect_button": "Reconnect",
|
||||
"reconnect_button_description": "Your Google Sheets connection has expired. Please reconnect to continue syncing responses. Your existing spreadsheet links and data will be preserved.",
|
||||
"reconnect_button_tooltip": "Reconnect the integration to refresh your access. Your existing spreadsheet links and data will be preserved.",
|
||||
"spreadsheet_permission_error": "You don't have permission to access this spreadsheet. Please ensure the spreadsheet is shared with your Google account and you have write access to the spreadsheet.",
|
||||
"spreadsheet_url": "Spreadsheet URL",
|
||||
"token_expired_error": "Google Sheets refresh token has expired or been revoked. Please reconnect the integration."
|
||||
},
|
||||
"include_created_at": "Include Created At",
|
||||
"include_hidden_fields": "Include Hidden Fields",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Vincular Google Sheet",
|
||||
"link_new_sheet": "Vincular nueva hoja",
|
||||
"no_integrations_yet": "Tus integraciones de Google Sheet aparecerán aquí tan pronto como las añadas. ⏲️",
|
||||
"spreadsheet_url": "URL de la hoja de cálculo"
|
||||
"reconnect_button": "Reconectar",
|
||||
"reconnect_button_description": "Tu conexión con Google Sheets ha caducado. Reconecta para continuar sincronizando respuestas. Tus enlaces de hojas de cálculo y datos existentes se conservarán.",
|
||||
"reconnect_button_tooltip": "Reconecta la integración para actualizar tu acceso. Tus enlaces de hojas de cálculo y datos existentes se conservarán.",
|
||||
"spreadsheet_permission_error": "No tienes permiso para acceder a esta hoja de cálculo. Asegúrate de que la hoja de cálculo esté compartida con tu cuenta de Google y de que tengas acceso de escritura a la hoja de cálculo.",
|
||||
"spreadsheet_url": "URL de la hoja de cálculo",
|
||||
"token_expired_error": "El token de actualización de Google Sheets ha caducado o ha sido revocado. Reconecta la integración."
|
||||
},
|
||||
"include_created_at": "Incluir fecha de creación",
|
||||
"include_hidden_fields": "Incluir campos ocultos",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Lien Google Sheet",
|
||||
"link_new_sheet": "Lier une nouvelle feuille",
|
||||
"no_integrations_yet": "Vos intégrations Google Sheets apparaîtront ici dès que vous les ajouterez. ⏲️",
|
||||
"spreadsheet_url": "URL de la feuille de calcul"
|
||||
"reconnect_button": "Reconnecter",
|
||||
"reconnect_button_description": "Votre connexion Google Sheets a expiré. Veuillez vous reconnecter pour continuer à synchroniser les réponses. Vos liens de feuilles de calcul et données existants seront préservés.",
|
||||
"reconnect_button_tooltip": "Reconnectez l'intégration pour actualiser votre accès. Vos liens de feuilles de calcul et données existants seront préservés.",
|
||||
"spreadsheet_permission_error": "Vous n'avez pas la permission d'accéder à cette feuille de calcul. Veuillez vous assurer que la feuille de calcul est partagée avec votre compte Google et que vous disposez d'un accès en écriture.",
|
||||
"spreadsheet_url": "URL de la feuille de calcul",
|
||||
"token_expired_error": "Le jeton d'actualisation Google Sheets a expiré ou a été révoqué. Veuillez reconnecter l'intégration."
|
||||
},
|
||||
"include_created_at": "Inclure la date de création",
|
||||
"include_hidden_fields": "Inclure les champs cachés",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Google Táblázatok összekapcsolása",
|
||||
"link_new_sheet": "Új táblázat összekapcsolása",
|
||||
"no_integrations_yet": "A Google Táblázatok integrációi itt fognak megjelenni, amint hozzáadja azokat. ⏲️",
|
||||
"spreadsheet_url": "Táblázat URL-e"
|
||||
"reconnect_button": "Újrakapcsolódás",
|
||||
"reconnect_button_description": "A Google Táblázatok kapcsolata lejárt. Kérjük, csatlakozzon újra a válaszok szinkronizálásának folytatásához. A meglévő táblázathivatkozások és adatok megmaradnak.",
|
||||
"reconnect_button_tooltip": "Csatlakoztassa újra az integrációt a hozzáférés frissítéséhez. A meglévő táblázathivatkozások és adatok megmaradnak.",
|
||||
"spreadsheet_permission_error": "Nincs jogosultsága a táblázat eléréséhez. Kérjük, győződjön meg arról, hogy a táblázat meg van osztva a Google-fiókjával, és írási jogosultsággal rendelkezik a táblázathoz.",
|
||||
"spreadsheet_url": "Táblázat URL-e",
|
||||
"token_expired_error": "A Google Táblázatok frissítési tokenje lejárt vagy visszavonásra került. Kérjük, csatlakoztassa újra az integrációt."
|
||||
},
|
||||
"include_created_at": "Létrehozva felvétele",
|
||||
"include_hidden_fields": "Rejtett mezők felvétele",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "スプレッドシートをリンク",
|
||||
"link_new_sheet": "新しいシートをリンク",
|
||||
"no_integrations_yet": "Google スプレッドシート連携は、追加するとここに表示されます。⏲️",
|
||||
"spreadsheet_url": "スプレッドシートURL"
|
||||
"reconnect_button": "再接続",
|
||||
"reconnect_button_description": "Google Sheetsの接続が期限切れになりました。回答の同期を続けるには再接続してください。既存のスプレッドシートリンクとデータは保持されます。",
|
||||
"reconnect_button_tooltip": "統合を再接続してアクセスを更新します。既存のスプレッドシートリンクとデータは保持されます。",
|
||||
"spreadsheet_permission_error": "このスプレッドシートにアクセスする権限がありません。スプレッドシートがGoogleアカウントと共有されており、書き込みアクセス権があることを確認してください。",
|
||||
"spreadsheet_url": "スプレッドシートURL",
|
||||
"token_expired_error": "Google Sheetsのリフレッシュトークンが期限切れになったか、取り消されました。統合を再接続してください。"
|
||||
},
|
||||
"include_created_at": "作成日時を含める",
|
||||
"include_hidden_fields": "非表示フィールドを含める",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Link Google Spreadsheet",
|
||||
"link_new_sheet": "Nieuw blad koppelen",
|
||||
"no_integrations_yet": "Uw Google Spreadsheet-integraties verschijnen hier zodra u ze toevoegt. ⏲️",
|
||||
"spreadsheet_url": "Spreadsheet-URL"
|
||||
"reconnect_button": "Maak opnieuw verbinding",
|
||||
"reconnect_button_description": "Je Google Sheets-verbinding is verlopen. Maak opnieuw verbinding om door te gaan met het synchroniseren van antwoorden. Je bestaande spreadsheetlinks en gegevens blijven behouden.",
|
||||
"reconnect_button_tooltip": "Maak opnieuw verbinding met de integratie om je toegang te vernieuwen. Je bestaande spreadsheetlinks en gegevens blijven behouden.",
|
||||
"spreadsheet_permission_error": "Je hebt geen toestemming om deze spreadsheet te openen. Zorg ervoor dat de spreadsheet is gedeeld met je Google-account en dat je schrijftoegang hebt tot de spreadsheet.",
|
||||
"spreadsheet_url": "Spreadsheet-URL",
|
||||
"token_expired_error": "Het vernieuwingstoken van Google Sheets is verlopen of ingetrokken. Maak opnieuw verbinding met de integratie."
|
||||
},
|
||||
"include_created_at": "Inclusief gemaakt op",
|
||||
"include_hidden_fields": "Inclusief verborgen velden",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Link da Planilha do Google",
|
||||
"link_new_sheet": "Vincular nova planilha",
|
||||
"no_integrations_yet": "Suas integrações do Google Sheets vão aparecer aqui assim que você adicioná-las. ⏲️",
|
||||
"spreadsheet_url": "URL da planilha"
|
||||
"reconnect_button": "Reconectar",
|
||||
"reconnect_button_description": "Sua conexão com o Google Sheets expirou. Reconecte para continuar sincronizando respostas. Seus links de planilhas e dados existentes serão preservados.",
|
||||
"reconnect_button_tooltip": "Reconecte a integração para atualizar seu acesso. Seus links de planilhas e dados existentes serão preservados.",
|
||||
"spreadsheet_permission_error": "Você não tem permissão para acessar esta planilha. Certifique-se de que a planilha está compartilhada com sua conta do Google e que você tem acesso de escrita à planilha.",
|
||||
"spreadsheet_url": "URL da planilha",
|
||||
"token_expired_error": "O token de atualização do Google Sheets expirou ou foi revogado. Reconecte a integração."
|
||||
},
|
||||
"include_created_at": "Incluir Data de Criação",
|
||||
"include_hidden_fields": "Incluir Campos Ocultos",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Ligar Folha do Google",
|
||||
"link_new_sheet": "Ligar nova Folha",
|
||||
"no_integrations_yet": "As suas integrações com o Google Sheets aparecerão aqui assim que as adicionar. ⏲️",
|
||||
"spreadsheet_url": "URL da folha de cálculo"
|
||||
"reconnect_button": "Reconectar",
|
||||
"reconnect_button_description": "A tua ligação ao Google Sheets expirou. Por favor, reconecta para continuar a sincronizar respostas. As tuas ligações de folhas de cálculo e dados existentes serão preservados.",
|
||||
"reconnect_button_tooltip": "Reconecta a integração para atualizar o teu acesso. As tuas ligações de folhas de cálculo e dados existentes serão preservados.",
|
||||
"spreadsheet_permission_error": "Não tens permissão para aceder a esta folha de cálculo. Por favor, certifica-te de que a folha de cálculo está partilhada com a tua conta Google e que tens acesso de escrita à folha de cálculo.",
|
||||
"spreadsheet_url": "URL da folha de cálculo",
|
||||
"token_expired_error": "O token de atualização do Google Sheets expirou ou foi revogado. Por favor, reconecta a integração."
|
||||
},
|
||||
"include_created_at": "Incluir Criado Em",
|
||||
"include_hidden_fields": "Incluir Campos Ocultos",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Leagă Google Sheet",
|
||||
"link_new_sheet": "Leagă un nou Sheet",
|
||||
"no_integrations_yet": "Integrațiile tale Google Sheet vor apărea aici de îndată ce le vei adăuga. ⏲️",
|
||||
"spreadsheet_url": "URL foaie de calcul"
|
||||
"reconnect_button": "Reconectează",
|
||||
"reconnect_button_description": "Conexiunea ta cu Google Sheets a expirat. Te rugăm să te reconectezi pentru a continua sincronizarea răspunsurilor. Linkurile și datele existente din foile de calcul vor fi păstrate.",
|
||||
"reconnect_button_tooltip": "Reconectează integrarea pentru a-ți reîmprospăta accesul. Linkurile și datele existente din foile de calcul vor fi păstrate.",
|
||||
"spreadsheet_permission_error": "Nu ai permisiunea de a accesa această foaie de calcul. Asigură-te că foaia de calcul este partajată cu contul tău Google și că ai acces de scriere la aceasta.",
|
||||
"spreadsheet_url": "URL foaie de calcul",
|
||||
"token_expired_error": "Tokenul de reîmprospătare Google Sheets a expirat sau a fost revocat. Te rugăm să reconectezi integrarea."
|
||||
},
|
||||
"include_created_at": "Include data creării",
|
||||
"include_hidden_fields": "Include câmpuri ascunse",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Связать с Google Sheet",
|
||||
"link_new_sheet": "Связать с новой таблицей",
|
||||
"no_integrations_yet": "Ваши интеграции с Google Sheet появятся здесь, как только вы их добавите. ⏲️",
|
||||
"spreadsheet_url": "URL таблицы"
|
||||
"reconnect_button": "Переподключить",
|
||||
"reconnect_button_description": "Срок действия подключения к Google Sheets истёк. Пожалуйста, переподключись, чтобы продолжить синхронизацию ответов. Все существующие ссылки на таблицы и данные будут сохранены.",
|
||||
"reconnect_button_tooltip": "Переподключи интеграцию, чтобы обновить доступ. Все существующие ссылки на таблицы и данные будут сохранены.",
|
||||
"spreadsheet_permission_error": "У тебя нет доступа к этой таблице. Убедись, что таблица открыта для твоего Google-аккаунта и у тебя есть права на запись.",
|
||||
"spreadsheet_url": "URL таблицы",
|
||||
"token_expired_error": "Срок действия токена обновления Google Sheets истёк или он был отозван. Пожалуйста, переподключи интеграцию."
|
||||
},
|
||||
"include_created_at": "Включить дату создания",
|
||||
"include_hidden_fields": "Включить скрытые поля",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "Länka Google Kalkylark",
|
||||
"link_new_sheet": "Länka nytt kalkylark",
|
||||
"no_integrations_yet": "Dina Google Kalkylark-integrationer visas här så snart du lägger till dem. ⏲️",
|
||||
"spreadsheet_url": "Kalkylblads-URL"
|
||||
"reconnect_button": "Återanslut",
|
||||
"reconnect_button_description": "Din Google Sheets-anslutning har gått ut. Återanslut för att fortsätta synkronisera svar. Dina befintliga kalkylarkslänkar och data kommer att sparas.",
|
||||
"reconnect_button_tooltip": "Återanslut integrationen för att uppdatera din åtkomst. Dina befintliga kalkylarkslänkar och data kommer att sparas.",
|
||||
"spreadsheet_permission_error": "Du har inte behörighet att komma åt det här kalkylarket. Kontrollera att kalkylarket är delat med ditt Google-konto och att du har skrivrättigheter till kalkylarket.",
|
||||
"spreadsheet_url": "Kalkylblads-URL",
|
||||
"token_expired_error": "Google Sheets refresh token har gått ut eller återkallats. Återanslut integrationen."
|
||||
},
|
||||
"include_created_at": "Inkludera Skapad vid",
|
||||
"include_hidden_fields": "Inkludera dolda fält",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "链接 Google 表格",
|
||||
"link_new_sheet": "链接 新 表格",
|
||||
"no_integrations_yet": "您的 Google Sheet 集成会在您 添加 后 出现在这里。 ⏲️",
|
||||
"spreadsheet_url": "电子表格 URL"
|
||||
"reconnect_button": "重新连接",
|
||||
"reconnect_button_description": "你的 Google Sheets 连接已过期。请重新连接以继续同步回复。你现有的表格链接和数据会被保留。",
|
||||
"reconnect_button_tooltip": "重新连接集成以刷新你的访问权限。你现有的表格链接和数据会被保留。",
|
||||
"spreadsheet_permission_error": "你没有权限访问此表格。请确保该表格已与你的 Google 账号共享,并且你拥有该表格的编辑权限。",
|
||||
"spreadsheet_url": "电子表格 URL",
|
||||
"token_expired_error": "Google Sheets 的刷新令牌已过期或被撤销。请重新连接集成。"
|
||||
},
|
||||
"include_created_at": "包括 创建 于",
|
||||
"include_hidden_fields": "包括 隐藏 字段",
|
||||
|
||||
@@ -752,7 +752,12 @@
|
||||
"link_google_sheet": "連結 Google 試算表",
|
||||
"link_new_sheet": "連結新試算表",
|
||||
"no_integrations_yet": "您的 Google 試算表整合將在您新增後立即顯示在此處。⏲️",
|
||||
"spreadsheet_url": "試算表網址"
|
||||
"reconnect_button": "重新連線",
|
||||
"reconnect_button_description": "你的 Google Sheets 連線已過期。請重新連線以繼續同步回應。你現有的試算表連結和資料都會被保留。",
|
||||
"reconnect_button_tooltip": "重新連線整合以刷新存取權限。你現有的試算表連結和資料都會被保留。",
|
||||
"spreadsheet_permission_error": "你沒有權限存取這個試算表。請確認該試算表已與你的 Google 帳戶分享,且你擁有寫入權限。",
|
||||
"spreadsheet_url": "試算表網址",
|
||||
"token_expired_error": "Google Sheets 的刷新權杖已過期或被撤銷。請重新連線整合。"
|
||||
},
|
||||
"include_created_at": "包含建立於",
|
||||
"include_hidden_fields": "包含隱藏欄位",
|
||||
|
||||
@@ -95,7 +95,7 @@ describe("validateResponseData", () => {
|
||||
mockGetElementsFromBlocks.mockReturnValue(mockElements);
|
||||
mockValidateBlockResponses.mockReturnValue({});
|
||||
|
||||
validateResponseData([], mockResponseData, "en", true, mockQuestions);
|
||||
validateResponseData([], mockResponseData, "en", mockQuestions);
|
||||
|
||||
expect(mockTransformQuestionsToBlocks).toHaveBeenCalledWith(mockQuestions, []);
|
||||
expect(mockGetElementsFromBlocks).toHaveBeenCalledWith(transformedBlocks);
|
||||
@@ -105,15 +105,15 @@ describe("validateResponseData", () => {
|
||||
mockGetElementsFromBlocks.mockReturnValue(mockElements);
|
||||
mockValidateBlockResponses.mockReturnValue({});
|
||||
|
||||
validateResponseData(mockBlocks, mockResponseData, "en", true, mockQuestions);
|
||||
validateResponseData(mockBlocks, mockResponseData, "en", mockQuestions);
|
||||
|
||||
expect(mockTransformQuestionsToBlocks).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("should return null when both blocks and questions are empty", () => {
|
||||
expect(validateResponseData([], mockResponseData, "en", true, [])).toBeNull();
|
||||
expect(validateResponseData(null, mockResponseData, "en", true, [])).toBeNull();
|
||||
expect(validateResponseData(undefined, mockResponseData, "en", true, null)).toBeNull();
|
||||
expect(validateResponseData([], mockResponseData, "en", [])).toBeNull();
|
||||
expect(validateResponseData(null, mockResponseData, "en", [])).toBeNull();
|
||||
expect(validateResponseData(undefined, mockResponseData, "en", null)).toBeNull();
|
||||
});
|
||||
|
||||
test("should use default language code", () => {
|
||||
@@ -125,25 +125,58 @@ describe("validateResponseData", () => {
|
||||
expect(mockValidateBlockResponses).toHaveBeenCalledWith(mockElements, mockResponseData, "en");
|
||||
});
|
||||
|
||||
test("should validate only present fields when finished is false", () => {
|
||||
test("should validate only fields present in responseData", () => {
|
||||
const partialResponseData: TResponseData = { element1: "test" };
|
||||
const partialElements = [mockElements[0]];
|
||||
const elementsToValidate = [mockElements[0]];
|
||||
mockGetElementsFromBlocks.mockReturnValue(mockElements);
|
||||
mockValidateBlockResponses.mockReturnValue({});
|
||||
|
||||
validateResponseData(mockBlocks, partialResponseData, "en", false);
|
||||
validateResponseData(mockBlocks, partialResponseData, "en");
|
||||
|
||||
expect(mockValidateBlockResponses).toHaveBeenCalledWith(partialElements, partialResponseData, "en");
|
||||
expect(mockValidateBlockResponses).toHaveBeenCalledWith(elementsToValidate, partialResponseData, "en");
|
||||
});
|
||||
|
||||
test("should validate all fields when finished is true", () => {
|
||||
const partialResponseData: TResponseData = { element1: "test" };
|
||||
mockGetElementsFromBlocks.mockReturnValue(mockElements);
|
||||
test("should never validate elements not in responseData", () => {
|
||||
const blocksWithTwoElements: TSurveyBlock[] = [
|
||||
...mockBlocks,
|
||||
{
|
||||
id: "block2",
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
{
|
||||
id: "element2",
|
||||
type: TSurveyElementTypeEnum.OpenText,
|
||||
headline: { default: "Q2" },
|
||||
required: true,
|
||||
inputType: "text",
|
||||
charLimit: { enabled: false },
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const allElements = [
|
||||
...mockElements,
|
||||
{
|
||||
id: "element2",
|
||||
type: TSurveyElementTypeEnum.OpenText,
|
||||
headline: { default: "Q2" },
|
||||
required: true,
|
||||
inputType: "text",
|
||||
charLimit: { enabled: false },
|
||||
},
|
||||
];
|
||||
const responseDataWithOnlyElement1: TResponseData = { element1: "test" };
|
||||
mockGetElementsFromBlocks.mockReturnValue(allElements);
|
||||
mockValidateBlockResponses.mockReturnValue({});
|
||||
|
||||
validateResponseData(mockBlocks, partialResponseData, "en", true);
|
||||
validateResponseData(blocksWithTwoElements, responseDataWithOnlyElement1, "en");
|
||||
|
||||
expect(mockValidateBlockResponses).toHaveBeenCalledWith(mockElements, partialResponseData, "en");
|
||||
// Only element1 should be validated, not element2 (even though it's required)
|
||||
expect(mockValidateBlockResponses).toHaveBeenCalledWith(
|
||||
[allElements[0]],
|
||||
responseDataWithOnlyElement1,
|
||||
"en"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -9,13 +9,13 @@ import { getElementsFromBlocks } from "@/lib/survey/utils";
|
||||
import { ApiErrorDetails } from "@/modules/api/v2/types/api-error";
|
||||
|
||||
/**
|
||||
* Validates response data against survey validation rules
|
||||
* Handles partial responses (in-progress) by only validating present fields when finished is false
|
||||
* Validates response data against survey validation rules.
|
||||
* Only validates elements that have data in responseData - never validates
|
||||
* all survey elements regardless of completion status.
|
||||
*
|
||||
* @param blocks - Survey blocks containing elements with validation rules (preferred)
|
||||
* @param responseData - Response data to validate (keyed by element ID)
|
||||
* @param languageCode - Language code for error messages (defaults to "en")
|
||||
* @param finished - Whether the response is finished (defaults to true for management APIs)
|
||||
* @param questions - Survey questions (legacy format, used as fallback if blocks are empty)
|
||||
* @returns Validation error map keyed by element ID, or null if validation passes
|
||||
*/
|
||||
@@ -23,7 +23,6 @@ export const validateResponseData = (
|
||||
blocks: TSurveyBlock[] | undefined | null,
|
||||
responseData: TResponseData,
|
||||
languageCode: string = "en",
|
||||
finished: boolean = true,
|
||||
questions?: TSurveyQuestion[] | undefined | null
|
||||
): TValidationErrorMap | null => {
|
||||
// Use blocks if available, otherwise transform questions to blocks
|
||||
@@ -42,11 +41,8 @@ export const validateResponseData = (
|
||||
// Extract elements from blocks
|
||||
const allElements = getElementsFromBlocks(blocksToUse);
|
||||
|
||||
// If response is not finished, only validate elements that are present in the response data
|
||||
// This prevents "required" errors for fields the user hasn't reached yet
|
||||
const elementsToValidate = finished
|
||||
? allElements
|
||||
: allElements.filter((element) => Object.keys(responseData).includes(element.id));
|
||||
// Always validate only elements that are present in responseData
|
||||
const elementsToValidate = allElements.filter((element) => Object.keys(responseData).includes(element.id));
|
||||
|
||||
// Validate selected elements
|
||||
const errorMap = validateBlockResponses(elementsToValidate, responseData, languageCode);
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { getSurveyQuestions } from "@/modules/api/v2/management/responses/[responseId]/lib/survey";
|
||||
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { validateFileUploads } from "@/modules/storage/utils";
|
||||
import { resolveStorageUrlsInObject, validateFileUploads } from "@/modules/storage/utils";
|
||||
import { ZResponseIdSchema, ZResponseUpdateSchema } from "./types/responses";
|
||||
|
||||
export const GET = async (request: Request, props: { params: Promise<{ responseId: string }> }) =>
|
||||
@@ -51,7 +51,10 @@ export const GET = async (request: Request, props: { params: Promise<{ responseI
|
||||
return handleApiError(request, response.error as ApiErrorResponseV2);
|
||||
}
|
||||
|
||||
return responses.successResponse(response);
|
||||
return responses.successResponse({
|
||||
...response,
|
||||
data: { ...response.data, data: resolveStorageUrlsInObject(response.data.data) },
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
@@ -198,7 +201,6 @@ export const PUT = (request: Request, props: { params: Promise<{ responseId: str
|
||||
questionsResponse.data.blocks,
|
||||
body.data,
|
||||
body.language ?? "en",
|
||||
body.finished,
|
||||
questionsResponse.data.questions
|
||||
);
|
||||
|
||||
@@ -244,7 +246,10 @@ export const PUT = (request: Request, props: { params: Promise<{ responseId: str
|
||||
auditLog.newObject = response.data;
|
||||
}
|
||||
|
||||
return responses.successResponse(response);
|
||||
return responses.successResponse({
|
||||
...response,
|
||||
data: { ...response.data, data: resolveStorageUrlsInObject(response.data.data) },
|
||||
});
|
||||
},
|
||||
action: "updated",
|
||||
targetType: "response",
|
||||
|
||||
@@ -12,7 +12,7 @@ import { getSurveyQuestions } from "@/modules/api/v2/management/responses/[respo
|
||||
import { ZGetResponsesFilter, ZResponseInput } from "@/modules/api/v2/management/responses/types/responses";
|
||||
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||
import { hasPermission } from "@/modules/organization/settings/api-keys/lib/utils";
|
||||
import { validateFileUploads } from "@/modules/storage/utils";
|
||||
import { resolveStorageUrlsInObject, validateFileUploads } from "@/modules/storage/utils";
|
||||
import { createResponseWithQuotaEvaluation, getResponses } from "./lib/response";
|
||||
|
||||
export const GET = async (request: NextRequest) =>
|
||||
@@ -44,7 +44,9 @@ export const GET = async (request: NextRequest) =>
|
||||
|
||||
environmentResponses.push(...res.data.data);
|
||||
|
||||
return responses.successResponse({ data: environmentResponses });
|
||||
return responses.successResponse({
|
||||
data: environmentResponses.map((r) => ({ ...r, data: resolveStorageUrlsInObject(r.data) })),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
@@ -134,7 +136,6 @@ export const POST = async (request: Request) =>
|
||||
surveyQuestions.data.blocks,
|
||||
body.data,
|
||||
body.language ?? "en",
|
||||
body.finished,
|
||||
surveyQuestions.data.questions
|
||||
);
|
||||
|
||||
|
||||
@@ -54,7 +54,6 @@ export const prepareNewSDKAttributeForStorage = (
|
||||
};
|
||||
|
||||
const handleStringType = (value: TRawValue): TAttributeStorageColumns => {
|
||||
// String type - only use value column
|
||||
let stringValue: string;
|
||||
|
||||
if (value instanceof Date) {
|
||||
|
||||
@@ -437,4 +437,22 @@ describe("updateAttributes", () => {
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.messages).toContainEqual({ code: "email_or_userid_required", params: {} });
|
||||
});
|
||||
|
||||
test("coerces boolean attribute values to strings", async () => {
|
||||
vi.mocked(getContactAttributeKeys).mockResolvedValue(attributeKeys);
|
||||
vi.mocked(getContactAttributes).mockResolvedValue({ name: "Jane", email: "jane@example.com" });
|
||||
vi.mocked(hasEmailAttribute).mockResolvedValue(false);
|
||||
vi.mocked(hasUserIdAttribute).mockResolvedValue(false);
|
||||
vi.mocked(prisma.$transaction).mockResolvedValue(undefined);
|
||||
vi.mocked(prisma.contactAttribute.deleteMany).mockResolvedValue({ count: 0 });
|
||||
|
||||
const attributes = { name: true, email: "john@example.com" };
|
||||
const result = await updateAttributes(contactId, userId, environmentId, attributes);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(prisma.$transaction).toHaveBeenCalled();
|
||||
const transactionCall = vi.mocked(prisma.$transaction).mock.calls[0][0];
|
||||
// Both name (coerced from boolean) and email should be upserted
|
||||
expect(transactionCall).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -130,7 +130,12 @@ export const updateAttributes = async (
|
||||
const messages: TAttributeUpdateMessage[] = [];
|
||||
const errors: TAttributeUpdateMessage[] = [];
|
||||
|
||||
// Convert email and userId to strings for lookup (they should always be strings, but handle numbers gracefully)
|
||||
// Coerce boolean values to strings (SDK may send booleans for string attributes)
|
||||
const coercedAttributes: Record<string, string | number> = {};
|
||||
for (const [key, value] of Object.entries(contactAttributesParam)) {
|
||||
coercedAttributes[key] = typeof value === "boolean" ? String(value) : value;
|
||||
}
|
||||
|
||||
const emailValue =
|
||||
contactAttributesParam.email === null || contactAttributesParam.email === undefined
|
||||
? null
|
||||
@@ -154,7 +159,7 @@ export const updateAttributes = async (
|
||||
const userIdExists = !!existingUserIdAttribute;
|
||||
|
||||
// Remove email and/or userId from attributes if they already exist on another contact
|
||||
let contactAttributes = { ...contactAttributesParam };
|
||||
let contactAttributes = { ...coercedAttributes };
|
||||
|
||||
// Determine what the final email and userId values will be after this update
|
||||
// Only consider a value as "submitted" if it was explicitly included in the attributes
|
||||
|
||||
@@ -5,10 +5,14 @@ import { getSegment } from "../segments";
|
||||
import { segmentFilterToPrismaQuery } from "./prisma-query";
|
||||
|
||||
const mockQueryRawUnsafe = vi.fn();
|
||||
const mockFindFirst = vi.fn();
|
||||
|
||||
vi.mock("@formbricks/database", () => ({
|
||||
prisma: {
|
||||
$queryRawUnsafe: (...args: unknown[]) => mockQueryRawUnsafe(...args),
|
||||
contactAttribute: {
|
||||
findFirst: (...args: unknown[]) => mockFindFirst(...args),
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -26,7 +30,9 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Default mock: number filter raw SQL returns one matching contact
|
||||
// Default: backfill is complete, no un-migrated rows
|
||||
mockFindFirst.mockResolvedValue(null);
|
||||
// Fallback path mock: raw SQL returns one matching contact when un-migrated rows exist
|
||||
mockQueryRawUnsafe.mockResolvedValue([{ contactId: "mock-contact-1" }]);
|
||||
});
|
||||
|
||||
@@ -145,7 +151,16 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
},
|
||||
},
|
||||
],
|
||||
OR: [{ id: { in: ["mock-contact-1"] } }],
|
||||
OR: [
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gt: 30 },
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -757,7 +772,12 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
});
|
||||
|
||||
expect(subgroup.AND[0].AND[2]).toStrictEqual({
|
||||
id: { in: ["mock-contact-1"] },
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gte: 18 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Segment inclusion
|
||||
@@ -1158,10 +1178,23 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
},
|
||||
});
|
||||
|
||||
// Second subgroup (numeric operators - now use raw SQL subquery returning contact IDs)
|
||||
// Second subgroup (numeric operators - uses clean Prisma filter post-backfill)
|
||||
const secondSubgroup = whereClause.AND?.[0];
|
||||
expect(secondSubgroup.AND[1].AND).toContainEqual({
|
||||
id: { in: ["mock-contact-1"] },
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "loginCount" },
|
||||
valueNumber: { gt: 5 },
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(secondSubgroup.AND[1].AND).toContainEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "purchaseAmount" },
|
||||
valueNumber: { lte: 1000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Third subgroup (negation operators in OR clause)
|
||||
@@ -1196,6 +1229,104 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
}
|
||||
});
|
||||
|
||||
test("number filter falls back to raw SQL when un-migrated rows exist", async () => {
|
||||
mockFindFirst.mockResolvedValue({ id: "unmigrated-row-1" });
|
||||
mockQueryRawUnsafe.mockResolvedValue([{ contactId: "mock-contact-1" }]);
|
||||
|
||||
const filters: TBaseFilters = [
|
||||
{
|
||||
id: "filter_1",
|
||||
connector: null,
|
||||
resource: {
|
||||
id: "attr_1",
|
||||
root: {
|
||||
type: "attribute" as const,
|
||||
contactAttributeKey: "age",
|
||||
},
|
||||
value: 25,
|
||||
qualifier: {
|
||||
operator: "greaterThan",
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const result = await segmentFilterToPrismaQuery(mockSegmentId, filters, mockEnvironmentId);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
const filterClause = result.data.whereClause.AND?.[1] as any;
|
||||
expect(filterClause.AND[0]).toEqual({
|
||||
OR: [
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "age" },
|
||||
valueNumber: { gt: 25 },
|
||||
},
|
||||
},
|
||||
},
|
||||
{ id: { in: ["mock-contact-1"] } },
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockFindFirst).toHaveBeenCalledWith({
|
||||
where: {
|
||||
attributeKey: {
|
||||
key: "age",
|
||||
environmentId: mockEnvironmentId,
|
||||
dataType: "number",
|
||||
},
|
||||
valueNumber: null,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
expect(mockQueryRawUnsafe).toHaveBeenCalled();
|
||||
const sqlCall = mockQueryRawUnsafe.mock.calls[0];
|
||||
expect(sqlCall[0]).toContain('cak."environmentId" = $4');
|
||||
expect(sqlCall[4]).toBe(mockEnvironmentId);
|
||||
});
|
||||
|
||||
test("number filter uses clean Prisma query when backfill is complete", async () => {
|
||||
const filters: TBaseFilters = [
|
||||
{
|
||||
id: "filter_1",
|
||||
connector: null,
|
||||
resource: {
|
||||
id: "attr_1",
|
||||
root: {
|
||||
type: "attribute" as const,
|
||||
contactAttributeKey: "score",
|
||||
},
|
||||
value: 100,
|
||||
qualifier: {
|
||||
operator: "lessEqual",
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const result = await segmentFilterToPrismaQuery(mockSegmentId, filters, mockEnvironmentId);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
const filterClause = result.data.whereClause.AND?.[1] as any;
|
||||
expect(filterClause.AND[0]).toEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "score" },
|
||||
valueNumber: { lte: 100 },
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockFindFirst).toHaveBeenCalled();
|
||||
expect(mockQueryRawUnsafe).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// ==========================================
|
||||
// DATE FILTER TESTS
|
||||
// ==========================================
|
||||
@@ -1232,7 +1363,7 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "purchaseDate" },
|
||||
attributeKey: { key: "purchaseDate", dataType: "date" },
|
||||
OR: [
|
||||
{ valueDate: { lt: new Date(targetDate) } },
|
||||
{ valueDate: null, value: { lt: new Date(targetDate).toISOString() } },
|
||||
@@ -1276,7 +1407,7 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "signupDate" },
|
||||
attributeKey: { key: "signupDate", dataType: "date" },
|
||||
OR: [
|
||||
{ valueDate: { gt: new Date(targetDate) } },
|
||||
{ valueDate: null, value: { gt: new Date(targetDate).toISOString() } },
|
||||
@@ -1321,7 +1452,7 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
{
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "lastActivityDate" },
|
||||
attributeKey: { key: "lastActivityDate", dataType: "date" },
|
||||
OR: [
|
||||
{ valueDate: { gte: new Date(startDate), lte: new Date(endDate) } },
|
||||
{
|
||||
@@ -1638,8 +1769,15 @@ describe("segmentFilterToPrismaQuery", () => {
|
||||
mode: "insensitive",
|
||||
});
|
||||
|
||||
// Number filter uses raw SQL subquery (transition code) returning contact IDs
|
||||
expect(andConditions[1]).toEqual({ id: { in: ["mock-contact-1"] } });
|
||||
// Number filter uses clean Prisma filter post-backfill
|
||||
expect(andConditions[1]).toEqual({
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: "purchaseCount" },
|
||||
valueNumber: { gt: 5 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Date filter uses OR fallback with 'valueDate' and string 'value'
|
||||
expect((andConditions[2] as unknown as any).attributes.some.OR[0].valueDate).toHaveProperty("gte");
|
||||
|
||||
@@ -107,7 +107,7 @@ const buildDateAttributeFilterWhereClause = (filter: TSegmentAttributeFilter): P
|
||||
return {
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: contactAttributeKey },
|
||||
attributeKey: { key: contactAttributeKey, dataType: "date" },
|
||||
OR: [{ valueDate: dateCondition }, { valueDate: null, value: stringDateCondition }],
|
||||
},
|
||||
},
|
||||
@@ -116,59 +116,102 @@ const buildDateAttributeFilterWhereClause = (filter: TSegmentAttributeFilter): P
|
||||
|
||||
/**
|
||||
* Builds a Prisma where clause for number attribute filters.
|
||||
* Uses a raw SQL subquery to handle both migrated rows (valueNumber populated)
|
||||
* and un-migrated rows (valueNumber NULL, value contains numeric string).
|
||||
* This is transition code for the deferred value backfill.
|
||||
* Uses a clean Prisma query when all rows have valueNumber populated (post-backfill).
|
||||
* Falls back to a raw SQL subquery for un-migrated rows (valueNumber NULL, value contains numeric string).
|
||||
*
|
||||
* TODO: After the backfill script has been run and all valueNumber columns are populated,
|
||||
* revert this to the clean Prisma-only version that queries valueNumber directly.
|
||||
* remove the un-migrated fallback path entirely.
|
||||
*/
|
||||
const buildNumberAttributeFilterWhereClause = async (
|
||||
filter: TSegmentAttributeFilter
|
||||
filter: TSegmentAttributeFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root, qualifier, value } = filter;
|
||||
const { contactAttributeKey } = root;
|
||||
const { operator } = qualifier;
|
||||
|
||||
const numericValue = typeof value === "number" ? value : Number(value);
|
||||
const sqlOp = SQL_OPERATORS[operator];
|
||||
|
||||
if (!sqlOp) {
|
||||
return {};
|
||||
let valueNumberCondition: Prisma.FloatNullableFilter;
|
||||
|
||||
switch (operator) {
|
||||
case "greaterThan":
|
||||
valueNumberCondition = { gt: numericValue };
|
||||
break;
|
||||
case "greaterEqual":
|
||||
valueNumberCondition = { gte: numericValue };
|
||||
break;
|
||||
case "lessThan":
|
||||
valueNumberCondition = { lt: numericValue };
|
||||
break;
|
||||
case "lessEqual":
|
||||
valueNumberCondition = { lte: numericValue };
|
||||
break;
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
|
||||
const matchingContactIds = await prisma.$queryRawUnsafe<{ contactId: string }[]>(
|
||||
const migratedFilter: Prisma.ContactWhereInput = {
|
||||
attributes: {
|
||||
some: {
|
||||
attributeKey: { key: contactAttributeKey },
|
||||
valueNumber: valueNumberCondition,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const hasUnmigratedRows = await prisma.contactAttribute.findFirst({
|
||||
where: {
|
||||
attributeKey: {
|
||||
key: contactAttributeKey,
|
||||
environmentId,
|
||||
dataType: "number",
|
||||
},
|
||||
valueNumber: null,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!hasUnmigratedRows) {
|
||||
return migratedFilter;
|
||||
}
|
||||
|
||||
const sqlOp = SQL_OPERATORS[operator];
|
||||
const unmigratedMatchingIds = await prisma.$queryRawUnsafe<{ contactId: string }[]>(
|
||||
`
|
||||
SELECT DISTINCT ca."contactId"
|
||||
FROM "ContactAttribute" ca
|
||||
JOIN "ContactAttributeKey" cak ON ca."attributeKeyId" = cak.id
|
||||
WHERE cak.key = $1
|
||||
AND (
|
||||
(ca."valueNumber" IS NOT NULL AND ca."valueNumber" ${sqlOp} $2)
|
||||
OR
|
||||
(ca."valueNumber" IS NULL AND ca.value ~ $3 AND ca.value::double precision ${sqlOp} $2)
|
||||
)
|
||||
AND cak."environmentId" = $4
|
||||
AND cak."dataType" = 'number'
|
||||
AND ca."valueNumber" IS NULL
|
||||
AND ca.value ~ $3
|
||||
AND ca.value::double precision ${sqlOp} $2
|
||||
`,
|
||||
contactAttributeKey,
|
||||
numericValue,
|
||||
NUMBER_PATTERN_SQL
|
||||
NUMBER_PATTERN_SQL,
|
||||
environmentId
|
||||
);
|
||||
|
||||
const contactIds = matchingContactIds.map((r) => r.contactId);
|
||||
|
||||
if (contactIds.length === 0) {
|
||||
// Return an impossible condition so the filter correctly excludes all contacts
|
||||
return { id: "__NUMBER_FILTER_NO_MATCH__" };
|
||||
if (unmigratedMatchingIds.length === 0) {
|
||||
return migratedFilter;
|
||||
}
|
||||
|
||||
return { id: { in: contactIds } };
|
||||
const contactIds = unmigratedMatchingIds.map((r) => r.contactId);
|
||||
|
||||
return {
|
||||
OR: [migratedFilter, { id: { in: contactIds } }],
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds a Prisma where clause from a segment attribute filter
|
||||
*/
|
||||
const buildAttributeFilterWhereClause = async (
|
||||
filter: TSegmentAttributeFilter
|
||||
filter: TSegmentAttributeFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root, qualifier, value } = filter;
|
||||
const { contactAttributeKey } = root;
|
||||
@@ -215,7 +258,7 @@ const buildAttributeFilterWhereClause = async (
|
||||
|
||||
// Handle number operators
|
||||
if (["greaterThan", "greaterEqual", "lessThan", "lessEqual"].includes(operator)) {
|
||||
return await buildNumberAttributeFilterWhereClause(filter);
|
||||
return await buildNumberAttributeFilterWhereClause(filter, environmentId);
|
||||
}
|
||||
|
||||
// For string operators, ensure value is a primitive (not an object or array)
|
||||
@@ -253,7 +296,8 @@ const buildAttributeFilterWhereClause = async (
|
||||
* Builds a Prisma where clause from a person filter
|
||||
*/
|
||||
const buildPersonFilterWhereClause = async (
|
||||
filter: TSegmentPersonFilter
|
||||
filter: TSegmentPersonFilter,
|
||||
environmentId: string
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { personIdentifier } = filter.root;
|
||||
|
||||
@@ -265,7 +309,7 @@ const buildPersonFilterWhereClause = async (
|
||||
contactAttributeKey: personIdentifier,
|
||||
},
|
||||
};
|
||||
return await buildAttributeFilterWhereClause(personFilter);
|
||||
return await buildAttributeFilterWhereClause(personFilter, environmentId);
|
||||
}
|
||||
|
||||
return {};
|
||||
@@ -314,6 +358,7 @@ const buildDeviceFilterWhereClause = (
|
||||
const buildSegmentFilterWhereClause = async (
|
||||
filter: TSegmentSegmentFilter,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root } = filter;
|
||||
@@ -337,7 +382,7 @@ const buildSegmentFilterWhereClause = async (
|
||||
const newPath = new Set(segmentPath);
|
||||
newPath.add(segmentId);
|
||||
|
||||
return processFilters(segment.filters, newPath, deviceType);
|
||||
return processFilters(segment.filters, newPath, environmentId, deviceType);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -346,19 +391,25 @@ const buildSegmentFilterWhereClause = async (
|
||||
const processSingleFilter = async (
|
||||
filter: TSegmentFilter,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
const { root } = filter;
|
||||
|
||||
switch (root.type) {
|
||||
case "attribute":
|
||||
return await buildAttributeFilterWhereClause(filter as TSegmentAttributeFilter);
|
||||
return await buildAttributeFilterWhereClause(filter as TSegmentAttributeFilter, environmentId);
|
||||
case "person":
|
||||
return await buildPersonFilterWhereClause(filter as TSegmentPersonFilter);
|
||||
return await buildPersonFilterWhereClause(filter as TSegmentPersonFilter, environmentId);
|
||||
case "device":
|
||||
return buildDeviceFilterWhereClause(filter as TSegmentDeviceFilter, deviceType);
|
||||
case "segment":
|
||||
return await buildSegmentFilterWhereClause(filter as TSegmentSegmentFilter, segmentPath, deviceType);
|
||||
return await buildSegmentFilterWhereClause(
|
||||
filter as TSegmentSegmentFilter,
|
||||
segmentPath,
|
||||
environmentId,
|
||||
deviceType
|
||||
);
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
@@ -370,6 +421,7 @@ const processSingleFilter = async (
|
||||
const processFilters = async (
|
||||
filters: TBaseFilters,
|
||||
segmentPath: Set<string>,
|
||||
environmentId: string,
|
||||
deviceType?: "phone" | "desktop"
|
||||
): Promise<Prisma.ContactWhereInput> => {
|
||||
if (filters.length === 0) return {};
|
||||
@@ -386,10 +438,10 @@ const processFilters = async (
|
||||
// Process the resource based on its type
|
||||
if (isResourceFilter(resource)) {
|
||||
// If it's a single filter, process it directly
|
||||
whereClause = await processSingleFilter(resource, segmentPath, deviceType);
|
||||
whereClause = await processSingleFilter(resource, segmentPath, environmentId, deviceType);
|
||||
} else {
|
||||
// If it's a group of filters, process it recursively
|
||||
whereClause = await processFilters(resource, segmentPath, deviceType);
|
||||
whereClause = await processFilters(resource, segmentPath, environmentId, deviceType);
|
||||
}
|
||||
|
||||
if (Object.keys(whereClause).length === 0) continue;
|
||||
@@ -432,7 +484,7 @@ export const segmentFilterToPrismaQuery = reactCache(
|
||||
|
||||
// Initialize an empty stack for tracking the current evaluation path
|
||||
const segmentPath = new Set<string>([segmentId]);
|
||||
const filtersWhereClause = await processFilters(filters, segmentPath, deviceType);
|
||||
const filtersWhereClause = await processFilters(filters, segmentPath, environmentId, deviceType);
|
||||
|
||||
const whereClause = {
|
||||
AND: [baseWhereClause, filtersWhereClause],
|
||||
|
||||
@@ -37,6 +37,7 @@ vi.mock("@formbricks/database", () => ({
|
||||
create: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
update: vi.fn(),
|
||||
upsert: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
},
|
||||
survey: {
|
||||
@@ -206,6 +207,73 @@ describe("Segment Service Tests", () => {
|
||||
vi.mocked(prisma.segment.create).mockRejectedValue(new Error("DB error"));
|
||||
await expect(createSegment(mockSegmentCreateInput)).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test("should upsert a private segment without surveyId", async () => {
|
||||
const privateInput: TSegmentCreateInput = {
|
||||
...mockSegmentCreateInput,
|
||||
isPrivate: true,
|
||||
};
|
||||
const privateSegmentPrisma = { ...mockSegmentPrisma, isPrivate: true };
|
||||
vi.mocked(prisma.segment.upsert).mockResolvedValue(privateSegmentPrisma);
|
||||
const segment = await createSegment(privateInput);
|
||||
expect(segment).toEqual({ ...mockSegment, isPrivate: true });
|
||||
expect(prisma.segment.upsert).toHaveBeenCalledWith({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title: privateInput.title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title: privateInput.title,
|
||||
description: undefined,
|
||||
isPrivate: true,
|
||||
filters: [],
|
||||
},
|
||||
update: {
|
||||
description: undefined,
|
||||
filters: [],
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
expect(prisma.segment.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("should upsert a private segment with surveyId", async () => {
|
||||
const privateInputWithSurvey: TSegmentCreateInput = {
|
||||
...mockSegmentCreateInput,
|
||||
isPrivate: true,
|
||||
surveyId,
|
||||
};
|
||||
const privateSegmentPrisma = { ...mockSegmentPrisma, isPrivate: true };
|
||||
vi.mocked(prisma.segment.upsert).mockResolvedValue(privateSegmentPrisma);
|
||||
const segment = await createSegment(privateInputWithSurvey);
|
||||
expect(segment).toEqual({ ...mockSegment, isPrivate: true });
|
||||
expect(prisma.segment.upsert).toHaveBeenCalledWith({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title: privateInputWithSurvey.title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title: privateInputWithSurvey.title,
|
||||
description: undefined,
|
||||
isPrivate: true,
|
||||
filters: [],
|
||||
surveys: { connect: { id: surveyId } },
|
||||
},
|
||||
update: {
|
||||
description: undefined,
|
||||
filters: [],
|
||||
surveys: { connect: { id: surveyId } },
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
expect(prisma.segment.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cloneSegment", () => {
|
||||
|
||||
@@ -136,28 +136,48 @@ export const createSegment = async (segmentCreateInput: TSegmentCreateInput): Pr
|
||||
|
||||
const { description, environmentId, filters, isPrivate, surveyId, title } = segmentCreateInput;
|
||||
|
||||
let data: Prisma.SegmentCreateArgs["data"] = {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
};
|
||||
|
||||
if (surveyId) {
|
||||
data = {
|
||||
...data,
|
||||
surveys: {
|
||||
connect: {
|
||||
id: surveyId,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
const surveyConnect = surveyId ? { surveys: { connect: { id: surveyId } } } : {};
|
||||
|
||||
try {
|
||||
// Private segments use upsert because auto-save may have already created a
|
||||
// default (empty-filter) segment via connectOrCreate before the user publishes.
|
||||
// Without upsert the second create hits the (environmentId, title) unique constraint.
|
||||
if (isPrivate) {
|
||||
const segment = await prisma.segment.upsert({
|
||||
where: {
|
||||
environmentId_title: {
|
||||
environmentId,
|
||||
title,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
update: {
|
||||
description,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
|
||||
return transformPrismaSegment(segment);
|
||||
}
|
||||
|
||||
const segment = await prisma.segment.create({
|
||||
data,
|
||||
data: {
|
||||
environmentId,
|
||||
title,
|
||||
description,
|
||||
isPrivate,
|
||||
filters,
|
||||
...surveyConnect,
|
||||
},
|
||||
select: selectSegment,
|
||||
});
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
isValidFileTypeForExtension,
|
||||
isValidImageFile,
|
||||
resolveStorageUrl,
|
||||
resolveStorageUrlAuto,
|
||||
resolveStorageUrlsInObject,
|
||||
sanitizeFileName,
|
||||
validateFileUploads,
|
||||
validateSingleFile,
|
||||
@@ -406,7 +408,7 @@ describe("storage utils", () => {
|
||||
expect(resolveStorageUrl("")).toBe("");
|
||||
});
|
||||
|
||||
test("should return absolute URL unchanged (backward compatibility)", () => {
|
||||
test("should return absolute URL unchanged", () => {
|
||||
const httpsUrl = "https://example.com/storage/env-123/public/image.jpg";
|
||||
const httpUrl = "http://example.com/storage/env-123/public/image.jpg";
|
||||
|
||||
@@ -415,14 +417,12 @@ describe("storage utils", () => {
|
||||
});
|
||||
|
||||
test("should resolve relative /storage/ path to absolute URL", async () => {
|
||||
// Use actual implementation with mocked dependencies
|
||||
const { resolveStorageUrl: actualResolveStorageUrl } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const relativePath = "/storage/env-123/public/image.jpg";
|
||||
const result = actualResolveStorageUrl(relativePath);
|
||||
|
||||
// Should prepend the base URL (from mocked WEBAPP_URL or getPublicDomain)
|
||||
expect(result).toContain("/storage/env-123/public/image.jpg");
|
||||
expect(result.startsWith("http")).toBe(true);
|
||||
});
|
||||
@@ -432,4 +432,209 @@ describe("storage utils", () => {
|
||||
expect(resolveStorageUrl("relative/path.jpg")).toBe("relative/path.jpg");
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveStorageUrlAuto", () => {
|
||||
test("should return non-storage strings unchanged", () => {
|
||||
expect(resolveStorageUrlAuto("hello world")).toBe("hello world");
|
||||
expect(resolveStorageUrlAuto("/some/other/path")).toBe("/some/other/path");
|
||||
expect(resolveStorageUrlAuto("https://example.com/image.jpg")).toBe("https://example.com/image.jpg");
|
||||
});
|
||||
|
||||
test("should NOT transform free-text values that merely start with /storage/", () => {
|
||||
expect(resolveStorageUrlAuto("/storage/help")).toBe("/storage/help");
|
||||
expect(resolveStorageUrlAuto("/storage/")).toBe("/storage/");
|
||||
expect(resolveStorageUrlAuto("/storage/some-text")).toBe("/storage/some-text");
|
||||
expect(resolveStorageUrlAuto("/storage/foo/bar")).toBe("/storage/foo/bar");
|
||||
});
|
||||
|
||||
test("should resolve public storage URL", async () => {
|
||||
const { resolveStorageUrlAuto: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const result = actual("/storage/env-123/public/image.jpg");
|
||||
expect(result).toContain("/storage/env-123/public/image.jpg");
|
||||
expect(result.startsWith("http")).toBe(true);
|
||||
});
|
||||
|
||||
test("should detect private access type from URL path", () => {
|
||||
const privateUrl = "/storage/env-123/private/file.pdf";
|
||||
const publicUrl = "/storage/env-123/public/image.jpg";
|
||||
|
||||
expect(privateUrl.includes("/private/")).toBe(true);
|
||||
expect(publicUrl.includes("/private/")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveStorageUrlsInObject", () => {
|
||||
test("should return null and undefined as-is", () => {
|
||||
expect(resolveStorageUrlsInObject(null)).toBeNull();
|
||||
expect(resolveStorageUrlsInObject(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should return primitive values unchanged", () => {
|
||||
expect(resolveStorageUrlsInObject(42)).toBe(42);
|
||||
expect(resolveStorageUrlsInObject(true)).toBe(true);
|
||||
expect(resolveStorageUrlsInObject("hello")).toBe("hello");
|
||||
});
|
||||
|
||||
test("should NOT transform free-text that merely starts with /storage/", () => {
|
||||
expect(resolveStorageUrlsInObject("/storage/help")).toBe("/storage/help");
|
||||
expect(resolveStorageUrlsInObject("/storage/")).toBe("/storage/");
|
||||
|
||||
const input = {
|
||||
questionId1: "/storage/",
|
||||
questionId2: "/storage/help",
|
||||
questionId3: "/storage/some-text",
|
||||
questionId4: "/storage/foo/bar",
|
||||
realUrl: "/storage/env-123/public/image.jpg",
|
||||
};
|
||||
const result = resolveStorageUrlsInObject(input);
|
||||
expect(result.questionId1).toBe("/storage/");
|
||||
expect(result.questionId2).toBe("/storage/help");
|
||||
expect(result.questionId3).toBe("/storage/some-text");
|
||||
expect(result.questionId4).toBe("/storage/foo/bar");
|
||||
// realUrl still gets resolved because it matches the actual format
|
||||
expect(result.realUrl).not.toBe("/storage/env-123/public/image.jpg");
|
||||
});
|
||||
|
||||
test("should preserve Date instances", () => {
|
||||
const date = new Date("2026-01-01");
|
||||
expect(resolveStorageUrlsInObject(date)).toBe(date);
|
||||
});
|
||||
|
||||
test("should resolve storage URL strings", async () => {
|
||||
const { resolveStorageUrlsInObject: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const result = actual("/storage/env-123/public/image.jpg");
|
||||
expect(typeof result).toBe("string");
|
||||
expect(result).toContain("/storage/env-123/public/image.jpg");
|
||||
expect((result as string).startsWith("http")).toBe(true);
|
||||
});
|
||||
|
||||
test("should resolve URLs in arrays", async () => {
|
||||
const { resolveStorageUrlsInObject: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const input = ["/storage/env-123/public/a.jpg", "plain text"];
|
||||
const result = actual(input);
|
||||
|
||||
expect(result[0]).toContain("/storage/env-123/public/a.jpg");
|
||||
expect(result[0].startsWith("http")).toBe(true);
|
||||
expect(result[1]).toBe("plain text");
|
||||
});
|
||||
|
||||
test("should resolve URLs in nested objects", async () => {
|
||||
const { resolveStorageUrlsInObject: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const input = {
|
||||
name: "Test Survey",
|
||||
welcomeCard: {
|
||||
fileUrl: "/storage/env-123/public/welcome.png",
|
||||
headline: "Hello",
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
imageUrl: "/storage/env-123/public/q1.jpg",
|
||||
choices: [
|
||||
{ id: "c1", imageUrl: "/storage/env-123/public/choice1.jpg" },
|
||||
{ id: "c2", imageUrl: "https://external.com/image.jpg" },
|
||||
],
|
||||
},
|
||||
],
|
||||
count: 5,
|
||||
createdAt: new Date("2026-01-01"),
|
||||
};
|
||||
|
||||
const result = actual(input);
|
||||
|
||||
expect(result.welcomeCard.fileUrl.startsWith("http")).toBe(true);
|
||||
expect(result.welcomeCard.headline).toBe("Hello");
|
||||
expect(result.elements[0].imageUrl.startsWith("http")).toBe(true);
|
||||
expect(result.elements[0].choices[0].imageUrl.startsWith("http")).toBe(true);
|
||||
expect(result.elements[0].choices[1].imageUrl).toBe("https://external.com/image.jpg");
|
||||
expect(result.count).toBe(5);
|
||||
expect(result.createdAt).toEqual(new Date("2026-01-01"));
|
||||
expect(result.name).toBe("Test Survey");
|
||||
});
|
||||
|
||||
test("should resolve URLs in deeply nested objects", async () => {
|
||||
const { resolveStorageUrlsInObject: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const input = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
level4: {
|
||||
level5: {
|
||||
imageUrl: "/storage/env-123/public/deep.png",
|
||||
items: [
|
||||
{
|
||||
nested: {
|
||||
url: "/storage/env-123/public/nested.jpg",
|
||||
label: "keep me",
|
||||
},
|
||||
},
|
||||
"plain string",
|
||||
42,
|
||||
null,
|
||||
],
|
||||
},
|
||||
},
|
||||
sibling: "/storage/env-123/public/sibling.png",
|
||||
},
|
||||
},
|
||||
untouched: { a: { b: { c: "no change" } } },
|
||||
},
|
||||
};
|
||||
|
||||
const result = actual(input);
|
||||
|
||||
expect(result.level1.level2.level3.level4.level5.imageUrl).toContain(
|
||||
"/storage/env-123/public/deep.png"
|
||||
);
|
||||
expect(result.level1.level2.level3.level4.level5.imageUrl.startsWith("http")).toBe(true);
|
||||
|
||||
// @ts-expect-error - items is an array of unknown types
|
||||
expect(result.level1.level2.level3.level4.level5.items[0].nested.url).toContain(
|
||||
"/storage/env-123/public/nested.jpg"
|
||||
);
|
||||
// @ts-expect-error - items is an array of unknown types
|
||||
expect(result.level1.level2.level3.level4.level5.items[0].nested.url.startsWith("http")).toBe(true);
|
||||
// @ts-expect-error - items is an array of unknown types
|
||||
expect(result.level1.level2.level3.level4.level5.items[0].nested.label).toBe("keep me");
|
||||
|
||||
expect(result.level1.level2.level3.level4.level5.items[1]).toBe("plain string");
|
||||
expect(result.level1.level2.level3.level4.level5.items[2]).toBe(42);
|
||||
expect(result.level1.level2.level3.level4.level5.items[3]).toBeNull();
|
||||
|
||||
expect(result.level1.level2.level3.sibling).toContain("/storage/env-123/public/sibling.png");
|
||||
expect(result.level1.level2.level3.sibling.startsWith("http")).toBe(true);
|
||||
|
||||
expect(result.level1.untouched.a.b.c).toBe("no change");
|
||||
});
|
||||
|
||||
test("should handle response data with file upload URLs", async () => {
|
||||
const { resolveStorageUrlsInObject: actual } =
|
||||
await vi.importActual<typeof import("@/modules/storage/utils")>("@/modules/storage/utils");
|
||||
|
||||
const responseData = {
|
||||
questionId1: "text answer",
|
||||
questionId2: 42,
|
||||
fileUploadId: ["/storage/env-123/public/doc.pdf", "/storage/env-123/public/img.png"],
|
||||
};
|
||||
|
||||
const result = actual(responseData);
|
||||
|
||||
expect(result.questionId1).toBe("text answer");
|
||||
expect(result.questionId2).toBe(42);
|
||||
const fileUrls = result.fileUploadId;
|
||||
expect(fileUrls[0]).toContain("/storage/env-123/public/doc.pdf");
|
||||
expect(fileUrls[0].startsWith("http")).toBe(true);
|
||||
expect(fileUrls[1]).toContain("/storage/env-123/public/img.png");
|
||||
expect(fileUrls[1].startsWith("http")).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -151,7 +151,7 @@ export const getErrorResponseFromStorageError = (
|
||||
|
||||
/**
|
||||
* Resolves a storage URL to an absolute URL.
|
||||
* - If already absolute, returns as-is (backward compatibility for old data)
|
||||
* - If already absolute, returns as-is
|
||||
* - If relative (/storage/...), prepends the appropriate base URL
|
||||
* @param url The storage URL (relative or absolute)
|
||||
* @param accessType The access type to determine which base URL to use (defaults to "public")
|
||||
@@ -163,7 +163,7 @@ export const resolveStorageUrl = (
|
||||
): string => {
|
||||
if (!url) return "";
|
||||
|
||||
// Already absolute URL - return as-is (backward compatibility for old data)
|
||||
// Already absolute URL - return as-is
|
||||
if (url.startsWith("http://") || url.startsWith("https://")) {
|
||||
return url;
|
||||
}
|
||||
@@ -176,3 +176,41 @@ export const resolveStorageUrl = (
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
// Matches the actual storage URL format: /storage/{id}/{public|private}/{filename...}
|
||||
const STORAGE_URL_PATTERN = /^\/storage\/[^/]+\/(public|private)\/.+/;
|
||||
|
||||
const isStorageUrl = (value: string): boolean => STORAGE_URL_PATTERN.test(value);
|
||||
|
||||
export const resolveStorageUrlAuto = (url: string): string => {
|
||||
if (!isStorageUrl(url)) return url;
|
||||
const accessType = url.includes("/private/") ? "private" : "public";
|
||||
return resolveStorageUrl(url, accessType);
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively walks an object/array and resolves all relative storage URLs
|
||||
* Preserves the original structure; skips Date instances and non-object primitives.
|
||||
*/
|
||||
export const resolveStorageUrlsInObject = <T>(obj: T): T => {
|
||||
if (obj === null || obj === undefined) return obj;
|
||||
|
||||
if (typeof obj === "string") {
|
||||
return resolveStorageUrlAuto(obj) as T;
|
||||
}
|
||||
|
||||
if (typeof obj !== "object") return obj;
|
||||
|
||||
if (obj instanceof Date) return obj;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map((item) => resolveStorageUrlsInObject(item)) as T;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
|
||||
result[key] = resolveStorageUrlsInObject(value);
|
||||
}
|
||||
|
||||
return result as T;
|
||||
};
|
||||
|
||||
@@ -1,13 +1,3 @@
|
||||
import { Viewport } from "next";
|
||||
|
||||
export const viewport: Viewport = {
|
||||
width: "device-width",
|
||||
initialScale: 1.0,
|
||||
maximumScale: 1.0,
|
||||
userScalable: false,
|
||||
viewportFit: "contain",
|
||||
};
|
||||
|
||||
export const LinkSurveyLayout = ({ children }) => {
|
||||
return <div className="h-dvh">{children}</div>;
|
||||
};
|
||||
|
||||
@@ -4,12 +4,182 @@ description: "Formbricks Self-hosted version migration"
|
||||
icon: "arrow-right"
|
||||
---
|
||||
|
||||
## v4.7
|
||||
|
||||
Formbricks v4.7 introduces **typed contact attributes** with native `number` and `date` data types. This enables comparison-based segment filters (e.g. "signup date before 2025-01-01") that were previously not possible with string-only attribute values.
|
||||
|
||||
### What Happens Automatically
|
||||
|
||||
When Formbricks v4.7 starts for the first time, the data migration will:
|
||||
|
||||
1. Analyze all existing contact attribute keys and infer their data types (`text`, `number`, or `date`) based on the stored values
|
||||
2. Update the `ContactAttributeKey` table with the detected `dataType` for each key
|
||||
3. **If your instance has fewer than 1,000,000 contact attribute rows**: backfill the new `valueNumber` and `valueDate` columns inline. No manual action is needed.
|
||||
4. **If your instance has 1,000,000 or more contact attribute rows**: the value backfill is skipped to avoid hitting the migration timeout. You will need to run a standalone backfill script after the upgrade.
|
||||
|
||||
<Info>
|
||||
Most self-hosted instances have far fewer than 1,000,000 contact attribute rows (a typical setup with 100K
|
||||
contacts and 5-10 attributes each lands around 500K-1M rows). If you are below the threshold, the migration
|
||||
handles everything automatically and you can skip the manual backfill step below.
|
||||
</Info>
|
||||
|
||||
### Steps to Migrate
|
||||
|
||||
**1. Backup your Database**
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
Before running these steps, navigate to the `formbricks` directory where your `docker-compose.yml` file is located.
|
||||
|
||||
```bash
|
||||
docker exec formbricks-postgres-1 pg_dump -Fc -U postgres -d formbricks > formbricks_pre_v4.7_$(date +%Y%m%d_%H%M%S).dump
|
||||
```
|
||||
|
||||
<Info>
|
||||
If you run into "**No such container**", use `docker ps` to find your container name, e.g.
|
||||
`formbricks_postgres_1`.
|
||||
</Info>
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
If you are using the **in-cluster PostgreSQL** deployed by the Helm chart:
|
||||
|
||||
```bash
|
||||
kubectl exec -n formbricks formbricks-postgresql-0 -- pg_dump -Fc -U formbricks -d formbricks > formbricks_pre_v4.7_$(date +%Y%m%d_%H%M%S).dump
|
||||
```
|
||||
|
||||
<Info>
|
||||
If your PostgreSQL pod has a different name, run `kubectl get pods -n formbricks` to find it.
|
||||
</Info>
|
||||
|
||||
If you are using a **managed PostgreSQL** service (e.g. AWS RDS, Cloud SQL), use your provider's backup/snapshot feature or run `pg_dump` directly against the external host.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
**2. Upgrade to Formbricks v4.7**
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
# Pull the latest version
|
||||
docker compose pull
|
||||
|
||||
# Stop the current instance
|
||||
docker compose down
|
||||
|
||||
# Start with Formbricks v4.7
|
||||
docker compose up -d
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
helm upgrade formbricks oci://ghcr.io/formbricks/helm-charts/formbricks \
|
||||
-n formbricks \
|
||||
--set deployment.image.tag=v4.7.0
|
||||
```
|
||||
|
||||
<Info>
|
||||
The Helm chart includes a migration Job that automatically runs Prisma schema migrations as a
|
||||
PreSync hook before the new pods start. No manual migration step is needed.
|
||||
</Info>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
**3. Check the Migration Logs**
|
||||
|
||||
After Formbricks starts, check the logs to see whether the value backfill was completed or skipped:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
docker compose logs formbricks | grep -i "backfill"
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
# Check the application pod logs
|
||||
kubectl logs -n formbricks -l app.kubernetes.io/name=formbricks --tail=200 | grep -i "backfill"
|
||||
```
|
||||
|
||||
If the Helm migration Job ran, you can also inspect its logs:
|
||||
|
||||
```bash
|
||||
kubectl logs -n formbricks job/formbricks-migration
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
If you see a message like `Skipping value backfill (X rows >= 1000000 threshold)`, proceed to step 4. Otherwise, the migration is complete and no further action is needed.
|
||||
|
||||
**4. Run the Backfill Script (large datasets only)**
|
||||
|
||||
If the migration skipped the value backfill, run the standalone backfill script inside the running Formbricks container:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
```bash
|
||||
docker exec formbricks node packages/database/dist/scripts/backfill-attribute-values.js
|
||||
```
|
||||
|
||||
<Info>Replace `formbricks` with your actual container name if it differs. Use `docker ps` to find it.</Info>
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
```bash
|
||||
kubectl exec -n formbricks deploy/formbricks -- node packages/database/dist/scripts/backfill-attribute-values.js
|
||||
```
|
||||
|
||||
<Info>
|
||||
If your Formbricks deployment has a different name, run `kubectl get deploy -n formbricks` to find it.
|
||||
</Info>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
The script will output progress as it runs:
|
||||
|
||||
```
|
||||
========================================
|
||||
Attribute Value Backfill Script
|
||||
========================================
|
||||
|
||||
Fetching number-type attribute keys...
|
||||
Found 12 number-type keys. Backfilling valueNumber...
|
||||
Number backfill progress: 10/12 keys (48230 rows updated)
|
||||
Number backfill progress: 12/12 keys (52104 rows updated)
|
||||
|
||||
Fetching date-type attribute keys...
|
||||
Found 5 date-type keys. Backfilling valueDate...
|
||||
Date backfill progress: 5/5 keys (31200 rows updated)
|
||||
|
||||
========================================
|
||||
Backfill Complete!
|
||||
========================================
|
||||
valueNumber rows updated: 52104
|
||||
valueDate rows updated: 31200
|
||||
Duration: 42.3s
|
||||
========================================
|
||||
```
|
||||
|
||||
Key characteristics of the backfill script:
|
||||
|
||||
- **Safe to run while Formbricks is live** -- it does not lock the entire table or wrap work in a long transaction
|
||||
- **Idempotent** -- it only updates rows where the typed columns are still `NULL`, so you can safely run it multiple times
|
||||
- **Resumable** -- each batch commits independently, so if the process is interrupted you can re-run it and it picks up where it left off
|
||||
- **No timeout risk** -- unlike the migration, this script runs outside the migration transaction and has no time limit
|
||||
|
||||
**5. Verify the Upgrade**
|
||||
|
||||
- Access your Formbricks instance at the same URL as before
|
||||
- If you use contact segments with number or date filters, verify they return the expected results
|
||||
- Check that existing surveys and response data are intact
|
||||
|
||||
---
|
||||
|
||||
## v4.0
|
||||
|
||||
<Warning>
|
||||
**Important: Migration Required**
|
||||
|
||||
Formbricks 4 introduces additional requirements for self-hosting setups and makes a dedicated Redis cache as well as S3-compatible file storage mandatory.
|
||||
Formbricks 4 introduces additional requirements for self-hosting setups and makes a dedicated Redis cache as well as S3-compatible file storage mandatory.
|
||||
|
||||
</Warning>
|
||||
|
||||
Formbricks 4.0 is a **major milestone** that sets up the technical foundation for future iterations and feature improvements. This release focuses on modernizing core infrastructure components to improve reliability, scalability, and enable advanced features going forward.
|
||||
@@ -17,9 +187,11 @@ Formbricks 4.0 is a **major milestone** that sets up the technical foundation fo
|
||||
### What's New in Formbricks 4.0
|
||||
|
||||
**🚀 New Enterprise Features:**
|
||||
|
||||
- **Quotas Management**: Advanced quota controls for enterprise users
|
||||
|
||||
**🏗️ Technical Foundation Improvements:**
|
||||
|
||||
- **Enhanced File Storage**: Improved file handling with better performance and reliability
|
||||
- **Improved Caching**: New caching functionality improving speed, extensibility and reliability
|
||||
- **Database Optimization**: Removal of unused database tables and fields for better performance
|
||||
@@ -39,7 +211,8 @@ These services are already included in the updated one-click setup for self-host
|
||||
We know this represents more moving parts in your infrastructure and might even introduce more complexity in hosting Formbricks, and we don't take this decision lightly. As Formbricks grows into a comprehensive Survey and Experience Management platform, we've reached a point where the simple, single-service approach was holding back our ability to deliver the reliable, feature-rich product our users demand and deserve.
|
||||
|
||||
By moving to dedicated, professional-grade services for these critical functions, we're building the foundation needed to deliver:
|
||||
- **Enterprise-grade reliability** with proper redundancy and backup capabilities
|
||||
|
||||
- **Enterprise-grade reliability** with proper redundancy and backup capabilities
|
||||
- **Advanced features** that require sophisticated caching and file processing
|
||||
- **Better performance** through optimized, dedicated services
|
||||
- **Future scalability** to support larger deployments and more complex use cases without the need to maintain two different approaches
|
||||
@@ -52,7 +225,7 @@ Additional migration steps are needed if you are using a self-hosted Formbricks
|
||||
|
||||
### One-Click Setup
|
||||
|
||||
For users using our official one-click setup, we provide an automated migration using a migration script:
|
||||
For users using our official one-click setup, we provide an automated migration using a migration script:
|
||||
|
||||
```bash
|
||||
# Download the latest script
|
||||
@@ -67,11 +240,11 @@ chmod +x migrate-to-v4.sh
|
||||
```
|
||||
|
||||
This script guides you through the steps for the infrastructure migration and does the following:
|
||||
|
||||
- Adds a Redis service to your setup and configures it
|
||||
- Adds a MinIO service (open source S3-alternative) to your setup, configures it and migrates local files to it
|
||||
- Pulls the latest Formbricks image and updates your instance
|
||||
|
||||
|
||||
### Manual Setup
|
||||
|
||||
If you use a different setup to host your Formbricks instance, you need to make sure to make the necessary adjustments to run Formbricks 4.0.
|
||||
@@ -87,6 +260,7 @@ You need to configure the `REDIS_URL` environment variable and point it to your
|
||||
To use file storage (e.g., file upload questions, image choice questions, custom survey backgrounds, etc.), you need to have S3-compatible file storage set up and connected to Formbricks.
|
||||
|
||||
Formbricks supports multiple storage providers (among many other S3-compatible storages):
|
||||
|
||||
- AWS S3
|
||||
- Digital Ocean Spaces
|
||||
- Hetzner Object Storage
|
||||
@@ -101,6 +275,7 @@ Please make sure to set up a storage bucket with one of these solutions and then
|
||||
S3_BUCKET_NAME: formbricks-uploads
|
||||
S3_ENDPOINT_URL: http://minio:9000 # not needed for AWS S3
|
||||
```
|
||||
|
||||
#### Upgrade Process
|
||||
|
||||
**1. Backup your Database**
|
||||
@@ -112,8 +287,8 @@ docker exec formbricks-postgres-1 pg_dump -Fc -U postgres -d formbricks > formbr
|
||||
```
|
||||
|
||||
<Info>
|
||||
If you run into "**No such container**", use `docker ps` to find your container name,
|
||||
e.g. `formbricks_postgres_1`.
|
||||
If you run into "**No such container**", use `docker ps` to find your container name, e.g.
|
||||
`formbricks_postgres_1`.
|
||||
</Info>
|
||||
|
||||
**2. Upgrade to Formbricks 4.0**
|
||||
@@ -134,6 +309,7 @@ docker compose up -d
|
||||
**3. Automatic Database Migration**
|
||||
|
||||
When you start Formbricks 4.0 for the first time, it will **automatically**:
|
||||
|
||||
- Detect and apply required database schema updates
|
||||
- Remove unused database tables and fields
|
||||
- Optimize the database structure for better performance
|
||||
|
||||
@@ -85,6 +85,7 @@ When PUBLIC_URL is configured, the following routes are automatically served fro
|
||||
|
||||
- `/s/{surveyId}` - Individual survey access
|
||||
- `/c/{jwt}` - Personalized link survey access (JWT-based access)
|
||||
- `/p/{survey-slug}` - Pretty URL survey access
|
||||
- Embedded survey endpoints
|
||||
|
||||
#### API Routes
|
||||
|
||||
@@ -137,6 +137,11 @@ const checkRequiredField = (
|
||||
return null;
|
||||
}
|
||||
|
||||
// CTA elements never block progression (informational only)
|
||||
if (element.type === TSurveyElementTypeEnum.CTA) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (element.type === TSurveyElementTypeEnum.Ranking) {
|
||||
return validateRequiredRanking(value, t);
|
||||
}
|
||||
|
||||
@@ -16,5 +16,5 @@ export type TContactAttribute = z.infer<typeof ZContactAttribute>;
|
||||
export const ZContactAttributes = z.record(z.string());
|
||||
export type TContactAttributes = z.infer<typeof ZContactAttributes>;
|
||||
|
||||
export const ZContactAttributesInput = z.record(z.union([z.string(), z.number()]));
|
||||
export const ZContactAttributesInput = z.record(z.union([z.string(), z.number(), z.boolean()]));
|
||||
export type TContactAttributesInput = z.infer<typeof ZContactAttributesInput>;
|
||||
|
||||
Reference in New Issue
Block a user