mirror of
https://github.com/formbricks/formbricks.git
synced 2025-12-22 06:00:51 -06:00
Compare commits
26 Commits
fix-hidden
...
x-frame-op
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4f155b6bc | ||
|
|
da001834f5 | ||
|
|
f54352dd82 | ||
|
|
0fba0fae73 | ||
|
|
406ec88515 | ||
|
|
b97957d166 | ||
|
|
655ad6b9e0 | ||
|
|
f5ce42fc2d | ||
|
|
709cdf260d | ||
|
|
5c583028e0 | ||
|
|
c70008d1be | ||
|
|
13fa716fe8 | ||
|
|
c3af5b428f | ||
|
|
40e2f28e94 | ||
|
|
2964f2e079 | ||
|
|
e1a5291123 | ||
|
|
ef41f35209 | ||
|
|
2f64b202c1 | ||
|
|
2500c739ae | ||
|
|
63a9a6135b | ||
|
|
417005c6e9 | ||
|
|
cd1739c901 | ||
|
|
709917eb8f | ||
|
|
3ba70122d5 | ||
|
|
5ff025543e | ||
|
|
896d5bad12 |
@@ -80,6 +80,9 @@ S3_ENDPOINT_URL=
|
|||||||
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
||||||
S3_FORCE_PATH_STYLE=0
|
S3_FORCE_PATH_STYLE=0
|
||||||
|
|
||||||
|
# Set this URL to add a custom domain to your survey links(default is WEBAPP_URL)
|
||||||
|
# SURVEY_URL=https://survey.example.com
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Disable Features #
|
# Disable Features #
|
||||||
#####################
|
#####################
|
||||||
|
|||||||
1
.github/actions/cache-build-web/action.yml
vendored
1
.github/actions/cache-build-web/action.yml
vendored
@@ -56,6 +56,7 @@ runs:
|
|||||||
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
||||||
run: |
|
run: |
|
||||||
RANDOM_KEY=$(openssl rand -hex 32)
|
RANDOM_KEY=$(openssl rand -hex 32)
|
||||||
|
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||||
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|||||||
84
.github/dependabot.yml
vendored
Normal file
84
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
# To get started with Dependabot version updates, you'll need to specify which
|
||||||
|
# package ecosystems to update and where the package manifests are located.
|
||||||
|
# Please see the documentation for all configuration options:
|
||||||
|
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm" # For pnpm monorepos, use npm ecosystem
|
||||||
|
directory: "/" # Root package.json
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
versioning-strategy: increase
|
||||||
|
|
||||||
|
# Apps directory packages
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/apps/demo"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/apps/demo-react-native"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/apps/storybook"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/apps/web"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
# Packages directory
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/database"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/lib"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/types"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/config-eslint"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/config-prettier"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/config-typescript"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/js-core"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/surveys"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/packages/logger"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
33
.github/workflows/cron-surveyStatusUpdate.yml
vendored
33
.github/workflows/cron-surveyStatusUpdate.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
name: Cron - Survey status update
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
|
||||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
|
||||||
schedule:
|
|
||||||
# Runs "At 00:00." (see https://crontab.guru)
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cron-weeklySummary:
|
|
||||||
env:
|
|
||||||
APP_URL: ${{ secrets.APP_URL }}
|
|
||||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: cURL request
|
|
||||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
|
||||||
run: |
|
|
||||||
curl ${{ env.APP_URL }}/api/cron/survey-status \
|
|
||||||
-X POST \
|
|
||||||
-H 'content-type: application/json' \
|
|
||||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
|
||||||
--fail
|
|
||||||
33
.github/workflows/cron-weeklySummary.yml
vendored
33
.github/workflows/cron-weeklySummary.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
name: Cron - Weekly summary
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
|
||||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
|
||||||
schedule:
|
|
||||||
# Runs “At 08:00 on Monday.” (see https://crontab.guru)
|
|
||||||
- cron: "0 8 * * 1"
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cron-weeklySummary:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
APP_URL: ${{ secrets.APP_URL }}
|
|
||||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
- name: cURL request
|
|
||||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
|
||||||
run: |
|
|
||||||
curl ${{ env.APP_URL }}/api/cron/weekly-summary \
|
|
||||||
-X POST \
|
|
||||||
-H 'content-type: application/json' \
|
|
||||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
|
||||||
--fail
|
|
||||||
@@ -15,7 +15,6 @@ env:
|
|||||||
IMAGE_NAME: ${{ github.repository }}-experimental
|
IMAGE_NAME: ${{ github.repository }}-experimental
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -80,6 +79,9 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
secrets: |
|
||||||
|
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
|
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/release-docker-github.yml
vendored
4
.github/workflows/release-docker-github.yml
vendored
@@ -19,7 +19,6 @@ env:
|
|||||||
IMAGE_NAME: ${{ github.repository }}
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -100,6 +99,9 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
secrets: |
|
||||||
|
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
|
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
|||||||
34
.github/workflows/terrafrom-plan-and-apply.yml
vendored
34
.github/workflows/terrafrom-plan-and-apply.yml
vendored
@@ -3,16 +3,21 @@ name: 'Terraform'
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
# TODO: enable it back when migration is completed.
|
# TODO: enable it back when migration is completed.
|
||||||
# push:
|
push:
|
||||||
# branches:
|
branches:
|
||||||
# - main
|
- main
|
||||||
# pull_request:
|
paths:
|
||||||
# branches:
|
- "infra/terraform/**"
|
||||||
# - main
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- "infra/terraform/**"
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write
|
id-token: write
|
||||||
contents: write
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
terraform:
|
terraform:
|
||||||
@@ -58,18 +63,17 @@ jobs:
|
|||||||
run: terraform plan -out .planfile
|
run: terraform plan -out .planfile
|
||||||
working-directory: infra/terraform
|
working-directory: infra/terraform
|
||||||
|
|
||||||
# - name: Post PR comment
|
- name: Post PR comment
|
||||||
# uses: borchero/terraform-plan-comment@3399d8dbae8b05185e815e02361ede2949cd99c4 # v2.4.0
|
uses: borchero/terraform-plan-comment@3399d8dbae8b05185e815e02361ede2949cd99c4 # v2.4.0
|
||||||
# if: always() && github.ref != 'refs/heads/main' && (steps.validate.outcome == 'success' || steps.validate.outcome == 'failure')
|
if: always() && github.ref != 'refs/heads/main' && (steps.plan.outcome == 'success' || steps.plan.outcome == 'failure')
|
||||||
# with:
|
with:
|
||||||
# token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
# planfile: .planfile
|
planfile: .planfile
|
||||||
# working-directory: "infra/terraform"
|
working-directory: "infra/terraform"
|
||||||
# skip-comment: true
|
|
||||||
|
|
||||||
- name: Terraform Apply
|
- name: Terraform Apply
|
||||||
id: apply
|
id: apply
|
||||||
# if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||||
run: terraform apply .planfile
|
run: terraform apply .planfile
|
||||||
working-directory: "infra/terraform"
|
working-directory: "infra/terraform"
|
||||||
|
|
||||||
|
|||||||
@@ -11,30 +11,30 @@
|
|||||||
"clean": "rimraf .turbo node_modules dist storybook-static"
|
"clean": "rimraf .turbo node_modules dist storybook-static"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"eslint-plugin-react-refresh": "0.4.16",
|
"eslint-plugin-react-refresh": "0.4.19",
|
||||||
"react": "19.0.0",
|
"react": "19.1.0",
|
||||||
"react-dom": "19.0.0"
|
"react-dom": "19.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@chromatic-com/storybook": "3.2.2",
|
"@chromatic-com/storybook": "3.2.6",
|
||||||
"@formbricks/config-typescript": "workspace:*",
|
"@formbricks/config-typescript": "workspace:*",
|
||||||
"@storybook/addon-a11y": "8.4.7",
|
"@storybook/addon-a11y": "8.6.11",
|
||||||
"@storybook/addon-essentials": "8.4.7",
|
"@storybook/addon-essentials": "8.6.11",
|
||||||
"@storybook/addon-interactions": "8.4.7",
|
"@storybook/addon-interactions": "8.6.11",
|
||||||
"@storybook/addon-links": "8.4.7",
|
"@storybook/addon-links": "8.6.11",
|
||||||
"@storybook/addon-onboarding": "8.4.7",
|
"@storybook/addon-onboarding": "8.6.11",
|
||||||
"@storybook/blocks": "8.4.7",
|
"@storybook/blocks": "8.6.11",
|
||||||
"@storybook/react": "8.4.7",
|
"@storybook/react": "8.6.11",
|
||||||
"@storybook/react-vite": "8.4.7",
|
"@storybook/react-vite": "8.6.11",
|
||||||
"@storybook/test": "8.4.7",
|
"@storybook/test": "8.6.11",
|
||||||
"@typescript-eslint/eslint-plugin": "8.18.0",
|
"@typescript-eslint/eslint-plugin": "8.29.0",
|
||||||
"@typescript-eslint/parser": "8.18.0",
|
"@typescript-eslint/parser": "8.29.0",
|
||||||
"@vitejs/plugin-react": "4.3.4",
|
"@vitejs/plugin-react": "4.3.4",
|
||||||
"esbuild": "0.25.1",
|
"esbuild": "0.25.2",
|
||||||
"eslint-plugin-storybook": "0.11.1",
|
"eslint-plugin-storybook": "0.12.0",
|
||||||
"prop-types": "15.8.1",
|
"prop-types": "15.8.1",
|
||||||
"storybook": "8.4.7",
|
"storybook": "8.6.11",
|
||||||
"tsup": "8.3.5",
|
"tsup": "8.4.0",
|
||||||
"vite": "6.0.12"
|
"vite": "6.2.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,11 +24,27 @@ RUN corepack enable
|
|||||||
# Install necessary build tools and compilers
|
# Install necessary build tools and compilers
|
||||||
RUN apk update && apk add --no-cache g++ cmake make gcc python3 openssl-dev jq
|
RUN apk update && apk add --no-cache g++ cmake make gcc python3 openssl-dev jq
|
||||||
|
|
||||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
# BuildKit secret handling without hardcoded fallback values
|
||||||
|
# This approach relies entirely on secrets passed from GitHub Actions
|
||||||
|
RUN echo '#!/bin/sh' > /tmp/read-secrets.sh && \
|
||||||
|
echo 'if [ -f "/run/secrets/database_url" ]; then' >> /tmp/read-secrets.sh && \
|
||||||
|
echo ' export DATABASE_URL=$(cat /run/secrets/database_url)' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'else' >> /tmp/read-secrets.sh && \
|
||||||
|
echo ' echo "DATABASE_URL secret not found. Build may fail if this is required."' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'fi' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'if [ -f "/run/secrets/encryption_key" ]; then' >> /tmp/read-secrets.sh && \
|
||||||
|
echo ' export ENCRYPTION_KEY=$(cat /run/secrets/encryption_key)' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'else' >> /tmp/read-secrets.sh && \
|
||||||
|
echo ' echo "ENCRYPTION_KEY secret not found. Build may fail if this is required."' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'fi' >> /tmp/read-secrets.sh && \
|
||||||
|
echo 'exec "$@"' >> /tmp/read-secrets.sh && \
|
||||||
|
chmod +x /tmp/read-secrets.sh
|
||||||
|
|
||||||
ARG SENTRY_AUTH_TOKEN
|
ARG SENTRY_AUTH_TOKEN
|
||||||
|
|
||||||
# Increase Node.js memory limit
|
# Increase Node.js memory limit as a regular build argument
|
||||||
# ENV NODE_OPTIONS="--max_old_space_size=4096"
|
ARG NODE_OPTIONS="--max_old_space_size=4096"
|
||||||
|
ENV NODE_OPTIONS=${NODE_OPTIONS}
|
||||||
|
|
||||||
# Set the working directory
|
# Set the working directory
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -47,8 +63,11 @@ RUN touch apps/web/.env
|
|||||||
# Install the dependencies
|
# Install the dependencies
|
||||||
RUN pnpm install
|
RUN pnpm install
|
||||||
|
|
||||||
# Build the project
|
# Build the project using our secret reader script
|
||||||
RUN NODE_OPTIONS="--max_old_space_size=4096" pnpm build --filter=@formbricks/web...
|
# This mounts the secrets only during this build step without storing them in layers
|
||||||
|
RUN --mount=type=secret,id=database_url \
|
||||||
|
--mount=type=secret,id=encryption_key \
|
||||||
|
/tmp/read-secrets.sh pnpm build --filter=@formbricks/web...
|
||||||
|
|
||||||
# Extract Prisma version
|
# Extract Prisma version
|
||||||
RUN jq -r '.devDependencies.prisma' packages/database/package.json > /prisma_version.txt
|
RUN jq -r '.devDependencies.prisma' packages/database/package.json > /prisma_version.txt
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ export const TopControlButtons = ({
|
|||||||
|
|
||||||
<TooltipRenderer tooltipContent={t("common.share_feedback")}>
|
<TooltipRenderer tooltipContent={t("common.share_feedback")}>
|
||||||
<Button variant="ghost" size="icon" className="h-fit w-fit bg-slate-50 p-1" asChild>
|
<Button variant="ghost" size="icon" className="h-fit w-fit bg-slate-50 p-1" asChild>
|
||||||
<Link href="https://github.com/formbricks/formbricks/issues/new/choose" target="_blank">
|
<Link href="https://github.com/formbricks/formbricks/issues" target="_blank">
|
||||||
<BugIcon />
|
<BugIcon />
|
||||||
</Link>
|
</Link>
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import {
|
|||||||
RESPONSES_PER_PAGE,
|
RESPONSES_PER_PAGE,
|
||||||
WEBAPP_URL,
|
WEBAPP_URL,
|
||||||
} from "@formbricks/lib/constants";
|
} from "@formbricks/lib/constants";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { getResponseCountBySurveyId } from "@formbricks/lib/response/service";
|
import { getResponseCountBySurveyId } from "@formbricks/lib/response/service";
|
||||||
import { getSurvey } from "@formbricks/lib/survey/service";
|
import { getSurvey } from "@formbricks/lib/survey/service";
|
||||||
import { getTagsByEnvironmentId } from "@formbricks/lib/tag/service";
|
import { getTagsByEnvironmentId } from "@formbricks/lib/tag/service";
|
||||||
@@ -47,6 +48,7 @@ const Page = async (props) => {
|
|||||||
});
|
});
|
||||||
const shouldGenerateInsights = needsInsightsGeneration(survey);
|
const shouldGenerateInsights = needsInsightsGeneration(survey);
|
||||||
const locale = await findMatchingLocale();
|
const locale = await findMatchingLocale();
|
||||||
|
const surveyDomain = getSurveyDomain();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PageContentWrapper>
|
<PageContentWrapper>
|
||||||
@@ -57,8 +59,8 @@ const Page = async (props) => {
|
|||||||
environment={environment}
|
environment={environment}
|
||||||
survey={survey}
|
survey={survey}
|
||||||
isReadOnly={isReadOnly}
|
isReadOnly={isReadOnly}
|
||||||
webAppUrl={WEBAPP_URL}
|
|
||||||
user={user}
|
user={user}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
/>
|
/>
|
||||||
}>
|
}>
|
||||||
{isAIEnabled && shouldGenerateInsights && (
|
{isAIEnabled && shouldGenerateInsights && (
|
||||||
|
|||||||
@@ -23,19 +23,19 @@ import { PanelInfoView } from "./shareEmbedModal/PanelInfoView";
|
|||||||
|
|
||||||
interface ShareEmbedSurveyProps {
|
interface ShareEmbedSurveyProps {
|
||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
|
surveyDomain: string;
|
||||||
open: boolean;
|
open: boolean;
|
||||||
modalView: "start" | "embed" | "panel";
|
modalView: "start" | "embed" | "panel";
|
||||||
setOpen: React.Dispatch<React.SetStateAction<boolean>>;
|
setOpen: React.Dispatch<React.SetStateAction<boolean>>;
|
||||||
webAppUrl: string;
|
|
||||||
user: TUser;
|
user: TUser;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ShareEmbedSurvey = ({
|
export const ShareEmbedSurvey = ({
|
||||||
survey,
|
survey,
|
||||||
|
surveyDomain,
|
||||||
open,
|
open,
|
||||||
modalView,
|
modalView,
|
||||||
setOpen,
|
setOpen,
|
||||||
webAppUrl,
|
|
||||||
user,
|
user,
|
||||||
}: ShareEmbedSurveyProps) => {
|
}: ShareEmbedSurveyProps) => {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@@ -104,8 +104,8 @@ export const ShareEmbedSurvey = ({
|
|||||||
<DialogDescription className="hidden" />
|
<DialogDescription className="hidden" />
|
||||||
<ShareSurveyLink
|
<ShareSurveyLink
|
||||||
survey={survey}
|
survey={survey}
|
||||||
webAppUrl={webAppUrl}
|
|
||||||
surveyUrl={surveyUrl}
|
surveyUrl={surveyUrl}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
setSurveyUrl={setSurveyUrl}
|
setSurveyUrl={setSurveyUrl}
|
||||||
locale={user.locale}
|
locale={user.locale}
|
||||||
/>
|
/>
|
||||||
@@ -159,8 +159,8 @@ export const ShareEmbedSurvey = ({
|
|||||||
survey={survey}
|
survey={survey}
|
||||||
email={email}
|
email={email}
|
||||||
surveyUrl={surveyUrl}
|
surveyUrl={surveyUrl}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
setSurveyUrl={setSurveyUrl}
|
setSurveyUrl={setSurveyUrl}
|
||||||
webAppUrl={webAppUrl}
|
|
||||||
locale={user.locale}
|
locale={user.locale}
|
||||||
/>
|
/>
|
||||||
) : showView === "panel" ? (
|
) : showView === "panel" ? (
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ interface SurveyAnalysisCTAProps {
|
|||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
environment: TEnvironment;
|
environment: TEnvironment;
|
||||||
isReadOnly: boolean;
|
isReadOnly: boolean;
|
||||||
webAppUrl: string;
|
|
||||||
user: TUser;
|
user: TUser;
|
||||||
|
surveyDomain: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ModalState {
|
interface ModalState {
|
||||||
@@ -35,8 +35,8 @@ export const SurveyAnalysisCTA = ({
|
|||||||
survey,
|
survey,
|
||||||
environment,
|
environment,
|
||||||
isReadOnly,
|
isReadOnly,
|
||||||
webAppUrl,
|
|
||||||
user,
|
user,
|
||||||
|
surveyDomain,
|
||||||
}: SurveyAnalysisCTAProps) => {
|
}: SurveyAnalysisCTAProps) => {
|
||||||
const { t } = useTranslate();
|
const { t } = useTranslate();
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
@@ -50,7 +50,7 @@ export const SurveyAnalysisCTA = ({
|
|||||||
dropdown: false,
|
dropdown: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
const surveyUrl = useMemo(() => `${webAppUrl}/s/${survey.id}`, [survey.id, webAppUrl]);
|
const surveyUrl = useMemo(() => `${surveyDomain}/s/${survey.id}`, [survey.id, surveyDomain]);
|
||||||
const { refreshSingleUseId } = useSingleUseId(survey);
|
const { refreshSingleUseId } = useSingleUseId(survey);
|
||||||
|
|
||||||
const widgetSetupCompleted = survey.type === "app" && environment.appSetupCompleted;
|
const widgetSetupCompleted = survey.type === "app" && environment.appSetupCompleted;
|
||||||
@@ -172,9 +172,9 @@ export const SurveyAnalysisCTA = ({
|
|||||||
<ShareEmbedSurvey
|
<ShareEmbedSurvey
|
||||||
key={key}
|
key={key}
|
||||||
survey={survey}
|
survey={survey}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
open={modalState[key as keyof ModalState]}
|
open={modalState[key as keyof ModalState]}
|
||||||
setOpen={setOpen}
|
setOpen={setOpen}
|
||||||
webAppUrl={webAppUrl}
|
|
||||||
user={user}
|
user={user}
|
||||||
modalView={modalView}
|
modalView={modalView}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ interface EmbedViewProps {
|
|||||||
survey: any;
|
survey: any;
|
||||||
email: string;
|
email: string;
|
||||||
surveyUrl: string;
|
surveyUrl: string;
|
||||||
|
surveyDomain: string;
|
||||||
setSurveyUrl: React.Dispatch<React.SetStateAction<string>>;
|
setSurveyUrl: React.Dispatch<React.SetStateAction<string>>;
|
||||||
webAppUrl: string;
|
|
||||||
locale: TUserLocale;
|
locale: TUserLocale;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,8 +35,8 @@ export const EmbedView = ({
|
|||||||
survey,
|
survey,
|
||||||
email,
|
email,
|
||||||
surveyUrl,
|
surveyUrl,
|
||||||
|
surveyDomain,
|
||||||
setSurveyUrl,
|
setSurveyUrl,
|
||||||
webAppUrl,
|
|
||||||
locale,
|
locale,
|
||||||
}: EmbedViewProps) => {
|
}: EmbedViewProps) => {
|
||||||
const { t } = useTranslate();
|
const { t } = useTranslate();
|
||||||
@@ -82,8 +82,8 @@ export const EmbedView = ({
|
|||||||
) : activeId === "link" ? (
|
) : activeId === "link" ? (
|
||||||
<LinkTab
|
<LinkTab
|
||||||
survey={survey}
|
survey={survey}
|
||||||
webAppUrl={webAppUrl}
|
|
||||||
surveyUrl={surveyUrl}
|
surveyUrl={surveyUrl}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
setSurveyUrl={setSurveyUrl}
|
setSurveyUrl={setSurveyUrl}
|
||||||
locale={locale}
|
locale={locale}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ import { TUserLocale } from "@formbricks/types/user";
|
|||||||
|
|
||||||
interface LinkTabProps {
|
interface LinkTabProps {
|
||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
webAppUrl: string;
|
|
||||||
surveyUrl: string;
|
surveyUrl: string;
|
||||||
|
surveyDomain: string;
|
||||||
setSurveyUrl: (url: string) => void;
|
setSurveyUrl: (url: string) => void;
|
||||||
locale: TUserLocale;
|
locale: TUserLocale;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LinkTab = ({ survey, webAppUrl, surveyUrl, setSurveyUrl, locale }: LinkTabProps) => {
|
export const LinkTab = ({ survey, surveyUrl, surveyDomain, setSurveyUrl, locale }: LinkTabProps) => {
|
||||||
const { t } = useTranslate();
|
const { t } = useTranslate();
|
||||||
|
|
||||||
const docsLinks = [
|
const docsLinks = [
|
||||||
@@ -43,8 +43,8 @@ export const LinkTab = ({ survey, webAppUrl, surveyUrl, setSurveyUrl, locale }:
|
|||||||
</p>
|
</p>
|
||||||
<ShareSurveyLink
|
<ShareSurveyLink
|
||||||
survey={survey}
|
survey={survey}
|
||||||
webAppUrl={webAppUrl}
|
|
||||||
surveyUrl={surveyUrl}
|
surveyUrl={surveyUrl}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
setSurveyUrl={setSurveyUrl}
|
setSurveyUrl={setSurveyUrl}
|
||||||
locale={locale}
|
locale={locale}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ const dummySurvey = {
|
|||||||
} as unknown as TSurvey;
|
} as unknown as TSurvey;
|
||||||
const dummyEnvironment = { id: "env123", appSetupCompleted: true } as TEnvironment;
|
const dummyEnvironment = { id: "env123", appSetupCompleted: true } as TEnvironment;
|
||||||
const dummyUser = { id: "user123", name: "Test User" } as TUser;
|
const dummyUser = { id: "user123", name: "Test User" } as TUser;
|
||||||
const webAppUrl = "http://example.com";
|
const surveyDomain = "https://surveys.test.formbricks.com";
|
||||||
|
|
||||||
describe("SurveyAnalysisCTA - handleCopyLink", () => {
|
describe("SurveyAnalysisCTA - handleCopyLink", () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -91,7 +91,7 @@ describe("SurveyAnalysisCTA - handleCopyLink", () => {
|
|||||||
survey={dummySurvey}
|
survey={dummySurvey}
|
||||||
environment={dummyEnvironment}
|
environment={dummyEnvironment}
|
||||||
isReadOnly={false}
|
isReadOnly={false}
|
||||||
webAppUrl={webAppUrl}
|
surveyDomain={surveyDomain}
|
||||||
user={dummyUser}
|
user={dummyUser}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
@@ -101,7 +101,9 @@ describe("SurveyAnalysisCTA - handleCopyLink", () => {
|
|||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(refreshSingleUseIdSpy).toHaveBeenCalled();
|
expect(refreshSingleUseIdSpy).toHaveBeenCalled();
|
||||||
expect(writeTextMock).toHaveBeenCalledWith("http://example.com/s/survey123?id=newSingleUseId");
|
expect(writeTextMock).toHaveBeenCalledWith(
|
||||||
|
"https://surveys.test.formbricks.com/s/survey123?id=newSingleUseId"
|
||||||
|
);
|
||||||
expect(toast.success).toHaveBeenCalledWith("common.copied_to_clipboard");
|
expect(toast.success).toHaveBeenCalledWith("common.copied_to_clipboard");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -113,7 +115,7 @@ describe("SurveyAnalysisCTA - handleCopyLink", () => {
|
|||||||
survey={dummySurvey}
|
survey={dummySurvey}
|
||||||
environment={dummyEnvironment}
|
environment={dummyEnvironment}
|
||||||
isReadOnly={false}
|
isReadOnly={false}
|
||||||
webAppUrl={webAppUrl}
|
surveyDomain={surveyDomain}
|
||||||
user={dummyUser}
|
user={dummyUser}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { getPreviewEmailTemplateHtml } from "@/modules/email/components/preview-email-template";
|
import { getPreviewEmailTemplateHtml } from "@/modules/email/components/preview-email-template";
|
||||||
import { getTranslate } from "@/tolgee/server";
|
import { getTranslate } from "@/tolgee/server";
|
||||||
import { WEBAPP_URL } from "@formbricks/lib/constants";
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { getProjectByEnvironmentId } from "@formbricks/lib/project/service";
|
import { getProjectByEnvironmentId } from "@formbricks/lib/project/service";
|
||||||
import { getSurvey } from "@formbricks/lib/survey/service";
|
import { getSurvey } from "@formbricks/lib/survey/service";
|
||||||
import { getStyling } from "@formbricks/lib/utils/styling";
|
import { getStyling } from "@formbricks/lib/utils/styling";
|
||||||
@@ -17,7 +17,7 @@ export const getEmailTemplateHtml = async (surveyId: string, locale: string) =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
const styling = getStyling(project, survey);
|
const styling = getStyling(project, survey);
|
||||||
const surveyUrl = WEBAPP_URL + "/s/" + survey.id;
|
const surveyUrl = getSurveyDomain() + "/s/" + survey.id;
|
||||||
const html = await getPreviewEmailTemplateHtml(survey, surveyUrl, styling, locale, t);
|
const html = await getPreviewEmailTemplateHtml(survey, surveyUrl, styling, locale, t);
|
||||||
const doctype =
|
const doctype =
|
||||||
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">';
|
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">';
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import {
|
|||||||
MAX_RESPONSES_FOR_INSIGHT_GENERATION,
|
MAX_RESPONSES_FOR_INSIGHT_GENERATION,
|
||||||
WEBAPP_URL,
|
WEBAPP_URL,
|
||||||
} from "@formbricks/lib/constants";
|
} from "@formbricks/lib/constants";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { getResponseCountBySurveyId } from "@formbricks/lib/response/service";
|
import { getResponseCountBySurveyId } from "@formbricks/lib/response/service";
|
||||||
import { getSurvey } from "@formbricks/lib/survey/service";
|
import { getSurvey } from "@formbricks/lib/survey/service";
|
||||||
import { getUser } from "@formbricks/lib/user/service";
|
import { getUser } from "@formbricks/lib/user/service";
|
||||||
@@ -54,6 +55,7 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
|
|||||||
billing: organization.billing,
|
billing: organization.billing,
|
||||||
});
|
});
|
||||||
const shouldGenerateInsights = needsInsightsGeneration(survey);
|
const shouldGenerateInsights = needsInsightsGeneration(survey);
|
||||||
|
const surveyDomain = getSurveyDomain();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PageContentWrapper>
|
<PageContentWrapper>
|
||||||
@@ -64,8 +66,8 @@ const SurveyPage = async (props: { params: Promise<{ environmentId: string; surv
|
|||||||
environment={environment}
|
environment={environment}
|
||||||
survey={survey}
|
survey={survey}
|
||||||
isReadOnly={isReadOnly}
|
isReadOnly={isReadOnly}
|
||||||
webAppUrl={WEBAPP_URL}
|
|
||||||
user={user}
|
user={user}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
/>
|
/>
|
||||||
}>
|
}>
|
||||||
{isAIEnabled && shouldGenerateInsights && (
|
{isAIEnabled && shouldGenerateInsights && (
|
||||||
|
|||||||
@@ -47,12 +47,6 @@ vi.mock("@/app/intercom/IntercomClientWrapper", () => ({
|
|||||||
vi.mock("@/modules/ui/components/no-mobile-overlay", () => ({
|
vi.mock("@/modules/ui/components/no-mobile-overlay", () => ({
|
||||||
NoMobileOverlay: () => <div data-testid="no-mobile-overlay" />,
|
NoMobileOverlay: () => <div data-testid="no-mobile-overlay" />,
|
||||||
}));
|
}));
|
||||||
vi.mock("@/modules/ui/components/post-hog-client", () => ({
|
|
||||||
PHProvider: ({ children }: { children: React.ReactNode }) => (
|
|
||||||
<div data-testid="ph-provider">{children}</div>
|
|
||||||
),
|
|
||||||
PostHogPageview: () => <div data-testid="ph-pageview" />,
|
|
||||||
}));
|
|
||||||
vi.mock("@/modules/ui/components/toaster-client", () => ({
|
vi.mock("@/modules/ui/components/toaster-client", () => ({
|
||||||
ToasterClient: () => <div data-testid="toaster-client" />,
|
ToasterClient: () => <div data-testid="toaster-client" />,
|
||||||
}));
|
}));
|
||||||
@@ -74,8 +68,6 @@ describe("(app) AppLayout", () => {
|
|||||||
render(element);
|
render(element);
|
||||||
|
|
||||||
expect(screen.getByTestId("no-mobile-overlay")).toBeInTheDocument();
|
expect(screen.getByTestId("no-mobile-overlay")).toBeInTheDocument();
|
||||||
expect(screen.getByTestId("ph-pageview")).toBeInTheDocument();
|
|
||||||
expect(screen.getByTestId("ph-provider")).toBeInTheDocument();
|
|
||||||
expect(screen.getByTestId("mock-intercom-wrapper")).toBeInTheDocument();
|
expect(screen.getByTestId("mock-intercom-wrapper")).toBeInTheDocument();
|
||||||
expect(screen.getByTestId("toaster-client")).toBeInTheDocument();
|
expect(screen.getByTestId("toaster-client")).toBeInTheDocument();
|
||||||
expect(screen.getByTestId("child-content")).toHaveTextContent("Hello from children");
|
expect(screen.getByTestId("child-content")).toHaveTextContent("Hello from children");
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
import { responses } from "@/app/lib/api/response";
|
|
||||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
|
||||||
import { AsyncParser } from "@json2csv/node";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { NextRequest } from "next/server";
|
|
||||||
import { logger } from "@formbricks/logger";
|
|
||||||
|
|
||||||
export const POST = async (request: NextRequest) => {
|
|
||||||
const session = await getServerSession(authOptions);
|
|
||||||
|
|
||||||
if (!session) {
|
|
||||||
return responses.unauthorizedResponse();
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await request.json();
|
|
||||||
let csv: string = "";
|
|
||||||
|
|
||||||
const { json, fields, fileName } = data;
|
|
||||||
|
|
||||||
const fallbackFileName = fileName.replace(/[^A-Za-z0-9_.-]/g, "_");
|
|
||||||
const encodedFileName = encodeURIComponent(fileName)
|
|
||||||
.replace(/['()]/g, (match) => "%" + match.charCodeAt(0).toString(16))
|
|
||||||
.replace(/\*/g, "%2A");
|
|
||||||
|
|
||||||
const parser = new AsyncParser({
|
|
||||||
fields,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
csv = await parser.parse(json).promise();
|
|
||||||
} catch (err) {
|
|
||||||
logger.error({ error: err, url: request.url }, "Failed to convert to CSV");
|
|
||||||
throw new Error("Failed to convert to CSV");
|
|
||||||
}
|
|
||||||
|
|
||||||
const headers = new Headers();
|
|
||||||
headers.set("Content-Type", "text/csv;charset=utf-8;");
|
|
||||||
headers.set(
|
|
||||||
"Content-Disposition",
|
|
||||||
`attachment; filename="${fallbackFileName}"; filename*=UTF-8''${encodedFileName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
return Response.json(
|
|
||||||
{
|
|
||||||
fileResponse: csv,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
headers,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
import { responses } from "@/app/lib/api/response";
|
|
||||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { NextRequest } from "next/server";
|
|
||||||
import * as xlsx from "xlsx";
|
|
||||||
|
|
||||||
export const POST = async (request: NextRequest) => {
|
|
||||||
const session = await getServerSession(authOptions);
|
|
||||||
|
|
||||||
if (!session) {
|
|
||||||
return responses.unauthorizedResponse();
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await request.json();
|
|
||||||
|
|
||||||
const { json, fields, fileName } = data;
|
|
||||||
|
|
||||||
const fallbackFileName = fileName.replace(/[^A-Za-z0-9_.-]/g, "_");
|
|
||||||
const encodedFileName = encodeURIComponent(fileName)
|
|
||||||
.replace(/['()]/g, (match) => "%" + match.charCodeAt(0).toString(16))
|
|
||||||
.replace(/\*/g, "%2A");
|
|
||||||
|
|
||||||
const wb = xlsx.utils.book_new();
|
|
||||||
const ws = xlsx.utils.json_to_sheet(json, { header: fields });
|
|
||||||
xlsx.utils.book_append_sheet(wb, ws, "Sheet1");
|
|
||||||
|
|
||||||
const buffer = xlsx.write(wb, { type: "buffer", bookType: "xlsx" }) as Buffer;
|
|
||||||
const base64String = buffer.toString("base64");
|
|
||||||
|
|
||||||
const headers = new Headers();
|
|
||||||
|
|
||||||
headers.set("Content-Type", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet");
|
|
||||||
headers.set(
|
|
||||||
"Content-Disposition",
|
|
||||||
`attachment; filename="${fallbackFileName}"; filename*=UTF-8''${encodedFileName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
return Response.json(
|
|
||||||
{
|
|
||||||
fileResponse: base64String,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
headers,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
OPTIONS,
|
OPTIONS,
|
||||||
PUT,
|
PUT,
|
||||||
} from "@/modules/ee/contacts/api/client/[environmentId]/contacts/[userId]/attributes/route";
|
} from "@/modules/ee/contacts/api/v1/client/[environmentId]/contacts/[userId]/attributes/route";
|
||||||
|
|
||||||
export { OPTIONS, PUT };
|
export { OPTIONS, PUT };
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
GET,
|
GET,
|
||||||
OPTIONS,
|
OPTIONS,
|
||||||
} from "@/modules/ee/contacts/api/client/[environmentId]/identify/contacts/[userId]/route";
|
} from "@/modules/ee/contacts/api/v1/client/[environmentId]/identify/contacts/[userId]/route";
|
||||||
|
|
||||||
export { GET, OPTIONS };
|
export { GET, OPTIONS };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import { OPTIONS, POST } from "@/modules/ee/contacts/api/client/[environmentId]/user/route";
|
import { OPTIONS, POST } from "@/modules/ee/contacts/api/v1/client/[environmentId]/user/route";
|
||||||
|
|
||||||
export { POST, OPTIONS };
|
export { POST, OPTIONS };
|
||||||
|
|||||||
@@ -2,6 +2,6 @@ import {
|
|||||||
DELETE,
|
DELETE,
|
||||||
GET,
|
GET,
|
||||||
PUT,
|
PUT,
|
||||||
} from "@/modules/ee/contacts/api/management/contact-attribute-keys/[contactAttributeKeyId]/route";
|
} from "@/modules/ee/contacts/api/v1/management/contact-attribute-keys/[contactAttributeKeyId]/route";
|
||||||
|
|
||||||
export { DELETE, GET, PUT };
|
export { DELETE, GET, PUT };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import { GET, POST } from "@/modules/ee/contacts/api/management/contact-attribute-keys/route";
|
import { GET, POST } from "@/modules/ee/contacts/api/v1/management/contact-attribute-keys/route";
|
||||||
|
|
||||||
export { GET, POST };
|
export { GET, POST };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import { GET } from "@/modules/ee/contacts/api/management/contact-attributes/route";
|
import { GET } from "@/modules/ee/contacts/api/v1/management/contact-attributes/route";
|
||||||
|
|
||||||
export { GET };
|
export { GET };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import { DELETE, GET } from "@/modules/ee/contacts/api/management/contacts/[contactId]/route";
|
import { DELETE, GET } from "@/modules/ee/contacts/api/v1/management/contacts/[contactId]/route";
|
||||||
|
|
||||||
export { DELETE, GET };
|
export { DELETE, GET };
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { GET } from "@/modules/ee/contacts/api/management/contacts/route";
|
import { GET } from "@/modules/ee/contacts/api/v1/management/contacts/route";
|
||||||
|
|
||||||
export { GET };
|
export { GET };
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { authenticateRequest, handleErrorResponse } from "@/app/api/v1/auth";
|
import { authenticateRequest, handleErrorResponse } from "@/app/api/v1/auth";
|
||||||
import { responses } from "@/app/lib/api/response";
|
import { responses } from "@/app/lib/api/response";
|
||||||
import { NextRequest } from "next/server";
|
import { NextRequest } from "next/server";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { getSurvey } from "@formbricks/lib/survey/service";
|
import { getSurvey } from "@formbricks/lib/survey/service";
|
||||||
import { generateSurveySingleUseIds } from "@formbricks/lib/utils/singleUseSurveys";
|
import { generateSurveySingleUseIds } from "@formbricks/lib/utils/singleUseSurveys";
|
||||||
|
|
||||||
@@ -36,9 +37,10 @@ export const GET = async (
|
|||||||
|
|
||||||
const singleUseIds = generateSurveySingleUseIds(limit, survey.singleUse.isEncrypted);
|
const singleUseIds = generateSurveySingleUseIds(limit, survey.singleUse.isEncrypted);
|
||||||
|
|
||||||
|
const surveyDomain = getSurveyDomain();
|
||||||
// map single use ids to survey links
|
// map single use ids to survey links
|
||||||
const surveyLinks = singleUseIds.map(
|
const surveyLinks = singleUseIds.map(
|
||||||
(singleUseId) => `${process.env.WEBAPP_URL}/s/${survey.id}?suId=${singleUseId}`
|
(singleUseId) => `${surveyDomain}/s/${survey.id}?suId=${singleUseId}`
|
||||||
);
|
);
|
||||||
|
|
||||||
return responses.successResponse(surveyLinks);
|
return responses.successResponse(surveyLinks);
|
||||||
|
|||||||
@@ -29,7 +29,6 @@ export const GET = async (req: NextRequest) => {
|
|||||||
<h2 tw="flex flex-col text-[8] sm:text-4xl font-bold tracking-tight text-slate-900 text-left mt-15">
|
<h2 tw="flex flex-col text-[8] sm:text-4xl font-bold tracking-tight text-slate-900 text-left mt-15">
|
||||||
{name}
|
{name}
|
||||||
</h2>
|
</h2>
|
||||||
<span tw="text-slate-600 text-xl">Complete in ~ 4 minutes</span>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div tw="flex justify-end mr-10 ">
|
<div tw="flex justify-end mr-10 ">
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
OPTIONS,
|
OPTIONS,
|
||||||
PUT,
|
PUT,
|
||||||
} from "@/modules/ee/contacts/api/client/[environmentId]/contacts/[userId]/attributes/route";
|
} from "@/modules/ee/contacts/api/v1/client/[environmentId]/contacts/[userId]/attributes/route";
|
||||||
|
|
||||||
export { OPTIONS, PUT };
|
export { OPTIONS, PUT };
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
GET,
|
GET,
|
||||||
OPTIONS,
|
OPTIONS,
|
||||||
} from "@/modules/ee/contacts/api/client/[environmentId]/identify/contacts/[userId]/route";
|
} from "@/modules/ee/contacts/api/v1/client/[environmentId]/identify/contacts/[userId]/route";
|
||||||
|
|
||||||
export { GET, OPTIONS };
|
export { GET, OPTIONS };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import { OPTIONS, POST } from "@/modules/ee/contacts/api/client/[environmentId]/user/route";
|
import { OPTIONS, POST } from "@/modules/ee/contacts/api/v1/client/[environmentId]/user/route";
|
||||||
|
|
||||||
export { POST, OPTIONS };
|
export { POST, OPTIONS };
|
||||||
|
|||||||
3
apps/web/app/api/v2/management/contacts/bulk/route.ts
Normal file
3
apps/web/app/api/v2/management/contacts/bulk/route.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { PUT } from "@/modules/ee/contacts/api/v2/management/contacts/bulk/route";
|
||||||
|
|
||||||
|
export { PUT };
|
||||||
3
apps/web/app/api/v2/management/roles/route.ts
Normal file
3
apps/web/app/api/v2/management/roles/route.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { GET } from "@/modules/api/v2/management/roles/route";
|
||||||
|
|
||||||
|
export { GET };
|
||||||
@@ -29,6 +29,7 @@ vi.mock("@formbricks/lib/constants", () => ({
|
|||||||
OIDC_SIGNING_ALGORITHM: "test-oidc-signing-algorithm",
|
OIDC_SIGNING_ALGORITHM: "test-oidc-signing-algorithm",
|
||||||
WEBAPP_URL: "test-webapp-url",
|
WEBAPP_URL: "test-webapp-url",
|
||||||
IS_PRODUCTION: false,
|
IS_PRODUCTION: false,
|
||||||
|
SENTRY_DSN: "mock-sentry-dsn",
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock("@/tolgee/language", () => ({
|
vi.mock("@/tolgee/language", () => ({
|
||||||
@@ -69,6 +70,15 @@ vi.mock("@/tolgee/client", () => ({
|
|||||||
),
|
),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/app/sentry/SentryProvider", () => ({
|
||||||
|
SentryProvider: ({ children, sentryDsn }: { children: React.ReactNode; sentryDsn?: string }) => (
|
||||||
|
<div data-testid="sentry-provider">
|
||||||
|
SentryProvider: {sentryDsn}
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
}));
|
||||||
|
|
||||||
describe("RootLayout", () => {
|
describe("RootLayout", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
cleanup();
|
cleanup();
|
||||||
@@ -95,8 +105,8 @@ describe("RootLayout", () => {
|
|||||||
console.log("vercel", process.env.VERCEL);
|
console.log("vercel", process.env.VERCEL);
|
||||||
|
|
||||||
expect(screen.getByTestId("speed-insights")).toBeInTheDocument();
|
expect(screen.getByTestId("speed-insights")).toBeInTheDocument();
|
||||||
expect(screen.getByTestId("ph-provider")).toBeInTheDocument();
|
|
||||||
expect(screen.getByTestId("tolgee-next-provider")).toBeInTheDocument();
|
expect(screen.getByTestId("tolgee-next-provider")).toBeInTheDocument();
|
||||||
|
expect(screen.getByTestId("sentry-provider")).toBeInTheDocument();
|
||||||
expect(screen.getByTestId("child")).toHaveTextContent("Child Content");
|
expect(screen.getByTestId("child")).toHaveTextContent("Child Content");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { PHProvider } from "@/modules/ui/components/post-hog-client";
|
import { SentryProvider } from "@/app/sentry/SentryProvider";
|
||||||
import { TolgeeNextProvider } from "@/tolgee/client";
|
import { TolgeeNextProvider } from "@/tolgee/client";
|
||||||
import { getLocale } from "@/tolgee/language";
|
import { getLocale } from "@/tolgee/language";
|
||||||
import { getTolgee } from "@/tolgee/server";
|
import { getTolgee } from "@/tolgee/server";
|
||||||
@@ -6,7 +6,7 @@ import { TolgeeStaticData } from "@tolgee/react";
|
|||||||
import { SpeedInsights } from "@vercel/speed-insights/next";
|
import { SpeedInsights } from "@vercel/speed-insights/next";
|
||||||
import { Metadata } from "next";
|
import { Metadata } from "next";
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import { IS_POSTHOG_CONFIGURED } from "@formbricks/lib/constants";
|
import { SENTRY_DSN } from "@formbricks/lib/constants";
|
||||||
import "../modules/ui/globals.css";
|
import "../modules/ui/globals.css";
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
@@ -27,11 +27,11 @@ const RootLayout = async ({ children }: { children: React.ReactNode }) => {
|
|||||||
<html lang={locale} translate="no">
|
<html lang={locale} translate="no">
|
||||||
<body className="flex h-dvh flex-col transition-all ease-in-out">
|
<body className="flex h-dvh flex-col transition-all ease-in-out">
|
||||||
{process.env.VERCEL === "1" && <SpeedInsights sampleRate={0.1} />}
|
{process.env.VERCEL === "1" && <SpeedInsights sampleRate={0.1} />}
|
||||||
<PHProvider posthogEnabled={IS_POSTHOG_CONFIGURED}>
|
<SentryProvider sentryDsn={SENTRY_DSN}>
|
||||||
<TolgeeNextProvider language={locale} staticData={staticData as unknown as TolgeeStaticData}>
|
<TolgeeNextProvider language={locale} staticData={staticData as unknown as TolgeeStaticData}>
|
||||||
{children}
|
{children}
|
||||||
</TolgeeNextProvider>
|
</TolgeeNextProvider>
|
||||||
</PHProvider>
|
</SentryProvider>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
export const fetchFile = async (
|
|
||||||
data: { json: any; fields?: string[]; fileName?: string },
|
|
||||||
filetype: string
|
|
||||||
) => {
|
|
||||||
const endpoint = filetype === "csv" ? "csv-conversion" : "excel-conversion";
|
|
||||||
|
|
||||||
const response = await fetch(`/api/${endpoint}`, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) throw new Error("Failed to convert to file");
|
|
||||||
|
|
||||||
return response.json();
|
|
||||||
};
|
|
||||||
101
apps/web/app/sentry/SentryProvider.test.tsx
Normal file
101
apps/web/app/sentry/SentryProvider.test.tsx
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import * as Sentry from "@sentry/nextjs";
|
||||||
|
import { cleanup, render, screen } from "@testing-library/react";
|
||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
|
import { SentryProvider } from "./SentryProvider";
|
||||||
|
|
||||||
|
vi.mock("@sentry/nextjs", async () => {
|
||||||
|
const actual = await vi.importActual<typeof import("@sentry/nextjs")>("@sentry/nextjs");
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
replayIntegration: (options: any) => {
|
||||||
|
return {
|
||||||
|
name: "Replay",
|
||||||
|
id: "Replay",
|
||||||
|
options,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("SentryProvider", () => {
|
||||||
|
afterEach(() => {
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("calls Sentry.init when sentryDsn is provided", () => {
|
||||||
|
const sentryDsn = "https://examplePublicKey@o0.ingest.sentry.io/0";
|
||||||
|
const initSpy = vi.spyOn(Sentry, "init").mockImplementation(() => undefined);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<SentryProvider sentryDsn={sentryDsn}>
|
||||||
|
<div data-testid="child">Test Content</div>
|
||||||
|
</SentryProvider>
|
||||||
|
);
|
||||||
|
|
||||||
|
// The useEffect runs after mount, so Sentry.init should have been called.
|
||||||
|
expect(initSpy).toHaveBeenCalled();
|
||||||
|
expect(initSpy).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
dsn: sentryDsn,
|
||||||
|
tracesSampleRate: 1,
|
||||||
|
debug: false,
|
||||||
|
replaysOnErrorSampleRate: 1.0,
|
||||||
|
replaysSessionSampleRate: 0.1,
|
||||||
|
integrations: expect.any(Array),
|
||||||
|
beforeSend: expect.any(Function),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not call Sentry.init when sentryDsn is not provided", () => {
|
||||||
|
const initSpy = vi.spyOn(Sentry, "init").mockImplementation(() => undefined);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<SentryProvider>
|
||||||
|
<div data-testid="child">Test Content</div>
|
||||||
|
</SentryProvider>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(initSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("renders children", () => {
|
||||||
|
const sentryDsn = "https://examplePublicKey@o0.ingest.sentry.io/0";
|
||||||
|
render(
|
||||||
|
<SentryProvider sentryDsn={sentryDsn}>
|
||||||
|
<div data-testid="child">Test Content</div>
|
||||||
|
</SentryProvider>
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId("child")).toHaveTextContent("Test Content");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("processes beforeSend correctly", () => {
|
||||||
|
const sentryDsn = "https://examplePublicKey@o0.ingest.sentry.io/0";
|
||||||
|
const initSpy = vi.spyOn(Sentry, "init").mockImplementation(() => undefined);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<SentryProvider sentryDsn={sentryDsn}>
|
||||||
|
<div data-testid="child">Test Content</div>
|
||||||
|
</SentryProvider>
|
||||||
|
);
|
||||||
|
|
||||||
|
const config = initSpy.mock.calls[0][0];
|
||||||
|
expect(config).toHaveProperty("beforeSend");
|
||||||
|
const beforeSend = config.beforeSend;
|
||||||
|
|
||||||
|
if (!beforeSend) {
|
||||||
|
throw new Error("beforeSend is not defined");
|
||||||
|
}
|
||||||
|
|
||||||
|
const dummyEvent = { some: "event" } as unknown as Sentry.ErrorEvent;
|
||||||
|
|
||||||
|
const hintWithNextNotFound = { originalException: { digest: "NEXT_NOT_FOUND" } };
|
||||||
|
expect(beforeSend(dummyEvent, hintWithNextNotFound)).toBeNull();
|
||||||
|
|
||||||
|
const hintWithOtherError = { originalException: { digest: "OTHER_ERROR" } };
|
||||||
|
expect(beforeSend(dummyEvent, hintWithOtherError)).toEqual(dummyEvent);
|
||||||
|
|
||||||
|
const hintWithoutError = { originalException: undefined };
|
||||||
|
expect(beforeSend(dummyEvent, hintWithoutError)).toEqual(dummyEvent);
|
||||||
|
});
|
||||||
|
});
|
||||||
53
apps/web/app/sentry/SentryProvider.tsx
Normal file
53
apps/web/app/sentry/SentryProvider.tsx
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import * as Sentry from "@sentry/nextjs";
|
||||||
|
import { useEffect } from "react";
|
||||||
|
|
||||||
|
interface SentryProviderProps {
|
||||||
|
children: React.ReactNode;
|
||||||
|
sentryDsn?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SentryProvider = ({ children, sentryDsn }: SentryProviderProps) => {
|
||||||
|
useEffect(() => {
|
||||||
|
if (sentryDsn) {
|
||||||
|
Sentry.init({
|
||||||
|
dsn: sentryDsn,
|
||||||
|
|
||||||
|
// Adjust this value in production, or use tracesSampler for greater control
|
||||||
|
tracesSampleRate: 1,
|
||||||
|
|
||||||
|
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||||
|
debug: false,
|
||||||
|
|
||||||
|
replaysOnErrorSampleRate: 1.0,
|
||||||
|
|
||||||
|
// This sets the sample rate to be 10%. You may want this to be 100% while
|
||||||
|
// in development and sample at a lower rate in production
|
||||||
|
replaysSessionSampleRate: 0.1,
|
||||||
|
|
||||||
|
// You can remove this option if you're not planning to use the Sentry Session Replay feature:
|
||||||
|
integrations: [
|
||||||
|
Sentry.replayIntegration({
|
||||||
|
// Additional Replay configuration goes in here, for example:
|
||||||
|
maskAllText: true,
|
||||||
|
blockAllMedia: true,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
|
||||||
|
beforeSend(event, hint) {
|
||||||
|
const error = hint.originalException as Error;
|
||||||
|
|
||||||
|
// @ts-expect-error
|
||||||
|
if (error && error.digest === "NEXT_NOT_FOUND") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return event;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return <>{children}</>;
|
||||||
|
};
|
||||||
@@ -19,7 +19,7 @@ export const getFile = async (
|
|||||||
headers: {
|
headers: {
|
||||||
"Content-Type": metaData.contentType,
|
"Content-Type": metaData.contentType,
|
||||||
"Content-Disposition": "attachment",
|
"Content-Disposition": "attachment",
|
||||||
"Cache-Control": "public, max-age=1200, s-maxage=1200, stale-while-revalidate=300",
|
"Cache-Control": "public, max-age=300, s-maxage=300, stale-while-revalidate=300",
|
||||||
Vary: "Accept-Encoding",
|
Vary: "Accept-Encoding",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -35,10 +35,7 @@ export const getFile = async (
|
|||||||
status: 302,
|
status: 302,
|
||||||
headers: {
|
headers: {
|
||||||
Location: signedUrl,
|
Location: signedUrl,
|
||||||
"Cache-Control":
|
"Cache-Control": "public, max-age=300, s-maxage=300, stale-while-revalidate=300",
|
||||||
accessType === "public"
|
|
||||||
? `public, max-age=3600, s-maxage=3600, stale-while-revalidate=300`
|
|
||||||
: `public, max-age=600, s-maxage=3600, stale-while-revalidate=300`,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
|
|||||||
@@ -1,8 +1,14 @@
|
|||||||
import { env } from "@formbricks/lib/env";
|
import { PROMETHEUS_ENABLED, SENTRY_DSN } from "@formbricks/lib/constants";
|
||||||
|
|
||||||
// instrumentation.ts
|
// instrumentation.ts
|
||||||
export const register = async () => {
|
export const register = async () => {
|
||||||
if (process.env.NEXT_RUNTIME === "nodejs" && env.PROMETHEUS_ENABLED) {
|
if (process.env.NEXT_RUNTIME === "nodejs" && PROMETHEUS_ENABLED) {
|
||||||
await import("./instrumentation-node");
|
await import("./instrumentation-node");
|
||||||
}
|
}
|
||||||
|
if (process.env.NEXT_RUNTIME === "nodejs" && SENTRY_DSN) {
|
||||||
|
await import("./sentry.server.config");
|
||||||
|
}
|
||||||
|
if (process.env.NEXT_RUNTIME === "edge" && SENTRY_DSN) {
|
||||||
|
await import("./sentry.edge.config");
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -24,15 +24,27 @@ import { ipAddress } from "@vercel/functions";
|
|||||||
import { getToken } from "next-auth/jwt";
|
import { getToken } from "next-auth/jwt";
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { E2E_TESTING, IS_PRODUCTION, RATE_LIMITING_DISABLED, WEBAPP_URL } from "@formbricks/lib/constants";
|
import {
|
||||||
|
E2E_TESTING,
|
||||||
|
IS_PRODUCTION,
|
||||||
|
RATE_LIMITING_DISABLED,
|
||||||
|
SURVEY_URL,
|
||||||
|
WEBAPP_URL,
|
||||||
|
} from "@formbricks/lib/constants";
|
||||||
import { isValidCallbackUrl } from "@formbricks/lib/utils/url";
|
import { isValidCallbackUrl } from "@formbricks/lib/utils/url";
|
||||||
|
import { logger } from "@formbricks/logger";
|
||||||
|
|
||||||
const enforceHttps = (request: NextRequest): Response | null => {
|
const enforceHttps = (request: NextRequest): Response | null => {
|
||||||
const forwardedProto = request.headers.get("x-forwarded-proto") ?? "http";
|
const forwardedProto = request.headers.get("x-forwarded-proto") ?? "http";
|
||||||
if (IS_PRODUCTION && !E2E_TESTING && forwardedProto !== "https") {
|
if (IS_PRODUCTION && !E2E_TESTING && forwardedProto !== "https") {
|
||||||
const apiError: ApiErrorResponseV2 = {
|
const apiError: ApiErrorResponseV2 = {
|
||||||
type: "forbidden",
|
type: "forbidden",
|
||||||
details: [{ field: "", issue: "Only HTTPS connections are allowed on the management endpoint." }],
|
details: [
|
||||||
|
{
|
||||||
|
field: "",
|
||||||
|
issue: "Only HTTPS connections are allowed on the management and contacts bulk endpoints.",
|
||||||
|
},
|
||||||
|
],
|
||||||
};
|
};
|
||||||
logApiError(request, apiError);
|
logApiError(request, apiError);
|
||||||
return NextResponse.json(apiError, { status: 403 });
|
return NextResponse.json(apiError, { status: 403 });
|
||||||
@@ -78,7 +90,34 @@ const applyRateLimiting = (request: NextRequest, ip: string) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleSurveyDomain = (request: NextRequest): Response | null => {
|
||||||
|
try {
|
||||||
|
if (!SURVEY_URL) return null;
|
||||||
|
|
||||||
|
const host = request.headers.get("host") || "";
|
||||||
|
const surveyDomain = SURVEY_URL ? new URL(SURVEY_URL).host : "";
|
||||||
|
if (host !== surveyDomain) return null;
|
||||||
|
|
||||||
|
return new NextResponse(null, { status: 404 });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Error handling survey domain");
|
||||||
|
return new NextResponse(null, { status: 404 });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const isSurveyRoute = (request: NextRequest) => {
|
||||||
|
return request.nextUrl.pathname.startsWith("/c/") || request.nextUrl.pathname.startsWith("/s/");
|
||||||
|
};
|
||||||
|
|
||||||
export const middleware = async (originalRequest: NextRequest) => {
|
export const middleware = async (originalRequest: NextRequest) => {
|
||||||
|
if (isSurveyRoute(originalRequest)) {
|
||||||
|
return NextResponse.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle survey domain routing.
|
||||||
|
const surveyResponse = handleSurveyDomain(originalRequest);
|
||||||
|
if (surveyResponse) return surveyResponse;
|
||||||
|
|
||||||
// Create a new Request object to override headers and add a unique request ID header
|
// Create a new Request object to override headers and add a unique request ID header
|
||||||
const request = new NextRequest(originalRequest, {
|
const request = new NextRequest(originalRequest, {
|
||||||
headers: new Headers(originalRequest.headers),
|
headers: new Headers(originalRequest.headers),
|
||||||
@@ -88,6 +127,7 @@ export const middleware = async (originalRequest: NextRequest) => {
|
|||||||
request.headers.set("x-start-time", Date.now().toString());
|
request.headers.set("x-start-time", Date.now().toString());
|
||||||
|
|
||||||
// Create a new NextResponse object to forward the new request with headers
|
// Create a new NextResponse object to forward the new request with headers
|
||||||
|
|
||||||
const nextResponseWithCustomHeader = NextResponse.next({
|
const nextResponseWithCustomHeader = NextResponse.next({
|
||||||
request: {
|
request: {
|
||||||
headers: request.headers,
|
headers: request.headers,
|
||||||
@@ -132,20 +172,6 @@ export const middleware = async (originalRequest: NextRequest) => {
|
|||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
matcher: [
|
matcher: [
|
||||||
"/api/auth/callback/credentials",
|
"/((?!_next/static|_next/image|favicon.ico|sitemap.xml|robots.txt|js|css|images|fonts|icons|public|api/v1/og).*)", // Exclude the Open Graph image generation route from middleware
|
||||||
"/api/(.*)/client/:path*",
|
|
||||||
"/api/v1/js/actions",
|
|
||||||
"/api/v1/client/storage",
|
|
||||||
"/share/(.*)/:path",
|
|
||||||
"/environments/:path*",
|
|
||||||
"/setup/organization/:path*",
|
|
||||||
"/api/auth/signout",
|
|
||||||
"/auth/login",
|
|
||||||
"/auth/signup",
|
|
||||||
"/api/packages/:path*",
|
|
||||||
"/auth/verification-requested",
|
|
||||||
"/auth/forgot-password",
|
|
||||||
"/api/v1/management/:path*",
|
|
||||||
"/api/v2/management/:path*",
|
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import { SurveyLinkDisplay } from "./components/SurveyLinkDisplay";
|
|||||||
|
|
||||||
interface ShareSurveyLinkProps {
|
interface ShareSurveyLinkProps {
|
||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
webAppUrl: string;
|
surveyDomain: string;
|
||||||
surveyUrl: string;
|
surveyUrl: string;
|
||||||
setSurveyUrl: (url: string) => void;
|
setSurveyUrl: (url: string) => void;
|
||||||
locale: TUserLocale;
|
locale: TUserLocale;
|
||||||
@@ -23,8 +23,8 @@ interface ShareSurveyLinkProps {
|
|||||||
|
|
||||||
export const ShareSurveyLink = ({
|
export const ShareSurveyLink = ({
|
||||||
survey,
|
survey,
|
||||||
webAppUrl,
|
|
||||||
surveyUrl,
|
surveyUrl,
|
||||||
|
surveyDomain,
|
||||||
setSurveyUrl,
|
setSurveyUrl,
|
||||||
locale,
|
locale,
|
||||||
}: ShareSurveyLinkProps) => {
|
}: ShareSurveyLinkProps) => {
|
||||||
@@ -32,7 +32,7 @@ export const ShareSurveyLink = ({
|
|||||||
const [language, setLanguage] = useState("default");
|
const [language, setLanguage] = useState("default");
|
||||||
|
|
||||||
const getUrl = useCallback(async () => {
|
const getUrl = useCallback(async () => {
|
||||||
let url = `${webAppUrl}/s/${survey.id}`;
|
let url = `${surveyDomain}/s/${survey.id}`;
|
||||||
const queryParams: string[] = [];
|
const queryParams: string[] = [];
|
||||||
|
|
||||||
if (survey.singleUse?.enabled) {
|
if (survey.singleUse?.enabled) {
|
||||||
@@ -58,7 +58,9 @@ export const ShareSurveyLink = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
setSurveyUrl(url);
|
setSurveyUrl(url);
|
||||||
}, [survey, webAppUrl, language]);
|
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [survey, surveyDomain, language]);
|
||||||
|
|
||||||
const generateNewSingleUseLink = () => {
|
const generateNewSingleUseLink = () => {
|
||||||
getUrl();
|
getUrl();
|
||||||
|
|||||||
@@ -257,6 +257,34 @@ const successResponse = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const multiStatusResponse = ({
|
||||||
|
data,
|
||||||
|
meta,
|
||||||
|
cors = false,
|
||||||
|
cache = "private, no-store",
|
||||||
|
}: {
|
||||||
|
data: Object;
|
||||||
|
meta?: Record<string, unknown>;
|
||||||
|
cors?: boolean;
|
||||||
|
cache?: string;
|
||||||
|
}) => {
|
||||||
|
const headers = {
|
||||||
|
...(cors && corsHeaders),
|
||||||
|
"Cache-Control": cache,
|
||||||
|
};
|
||||||
|
|
||||||
|
return Response.json(
|
||||||
|
{
|
||||||
|
data,
|
||||||
|
meta,
|
||||||
|
} as ApiSuccessResponse,
|
||||||
|
{
|
||||||
|
status: 207,
|
||||||
|
headers,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export const responses = {
|
export const responses = {
|
||||||
badRequestResponse,
|
badRequestResponse,
|
||||||
unauthorizedResponse,
|
unauthorizedResponse,
|
||||||
@@ -267,4 +295,5 @@ export const responses = {
|
|||||||
tooManyRequestsResponse,
|
tooManyRequestsResponse,
|
||||||
internalServerErrorResponse,
|
internalServerErrorResponse,
|
||||||
successResponse,
|
successResponse,
|
||||||
|
multiStatusResponse,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { responses } from "@/modules/api/v2/lib/response";
|
import { responses } from "@/modules/api/v2/lib/response";
|
||||||
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||||
import { ZodError } from "zod";
|
import { ZodCustomIssue, ZodIssue } from "zod";
|
||||||
import { logger } from "@formbricks/logger";
|
import { logger } from "@formbricks/logger";
|
||||||
|
|
||||||
export const handleApiError = (request: Request, err: ApiErrorResponseV2): Response => {
|
export const handleApiError = (request: Request, err: ApiErrorResponseV2): Response => {
|
||||||
@@ -34,11 +34,16 @@ export const handleApiError = (request: Request, err: ApiErrorResponseV2): Respo
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const formatZodError = (error: ZodError) => {
|
export const formatZodError = (error: { issues: (ZodIssue | ZodCustomIssue)[] }) => {
|
||||||
return error.issues.map((issue) => ({
|
return error.issues.map((issue) => {
|
||||||
field: issue.path.join("."),
|
const issueParams = issue.code === "custom" ? issue.params : undefined;
|
||||||
issue: issue.message,
|
|
||||||
}));
|
return {
|
||||||
|
field: issue.path.join("."),
|
||||||
|
issue: issue.message ?? "An error occurred while processing your request. Please try again later.",
|
||||||
|
...(issueParams && { meta: issueParams }),
|
||||||
|
};
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const logApiRequest = (request: Request, responseStatus: number): void => {
|
export const logApiRequest = (request: Request, responseStatus: number): void => {
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { checkRateLimitAndThrowError } from "@/modules/api/v2/lib/rate-limit";
|
|||||||
import { formatZodError, handleApiError } from "@/modules/api/v2/lib/utils";
|
import { formatZodError, handleApiError } from "@/modules/api/v2/lib/utils";
|
||||||
import { ZodRawShape, z } from "zod";
|
import { ZodRawShape, z } from "zod";
|
||||||
import { TAuthenticationApiKey } from "@formbricks/types/auth";
|
import { TAuthenticationApiKey } from "@formbricks/types/auth";
|
||||||
import { err } from "@formbricks/types/error-handlers";
|
|
||||||
import { authenticateRequest } from "./authenticate-request";
|
import { authenticateRequest } from "./authenticate-request";
|
||||||
|
|
||||||
export type HandlerFn<TInput = Record<string, unknown>> = ({
|
export type HandlerFn<TInput = Record<string, unknown>> = ({
|
||||||
@@ -41,65 +40,63 @@ export const apiWrapper = async <S extends ExtendedSchemas>({
|
|||||||
rateLimit?: boolean;
|
rateLimit?: boolean;
|
||||||
handler: HandlerFn<ParsedSchemas<S>>;
|
handler: HandlerFn<ParsedSchemas<S>>;
|
||||||
}): Promise<Response> => {
|
}): Promise<Response> => {
|
||||||
try {
|
const authentication = await authenticateRequest(request);
|
||||||
const authentication = await authenticateRequest(request);
|
if (!authentication.ok) {
|
||||||
if (!authentication.ok) return handleApiError(request, authentication.error);
|
return handleApiError(request, authentication.error);
|
||||||
|
|
||||||
let parsedInput: ParsedSchemas<S> = {} as ParsedSchemas<S>;
|
|
||||||
|
|
||||||
if (schemas?.body) {
|
|
||||||
const bodyData = await request.json();
|
|
||||||
const bodyResult = schemas.body.safeParse(bodyData);
|
|
||||||
|
|
||||||
if (!bodyResult.success) {
|
|
||||||
throw err({
|
|
||||||
type: "bad_request",
|
|
||||||
details: formatZodError(bodyResult.error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
parsedInput.body = bodyResult.data as ParsedSchemas<S>["body"];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (schemas?.query) {
|
|
||||||
const url = new URL(request.url);
|
|
||||||
const queryObject = Object.fromEntries(url.searchParams.entries());
|
|
||||||
const queryResult = schemas.query.safeParse(queryObject);
|
|
||||||
if (!queryResult.success) {
|
|
||||||
throw err({
|
|
||||||
type: "unprocessable_entity",
|
|
||||||
details: formatZodError(queryResult.error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
parsedInput.query = queryResult.data as ParsedSchemas<S>["query"];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (schemas?.params) {
|
|
||||||
const paramsObject = (await externalParams) || {};
|
|
||||||
const paramsResult = schemas.params.safeParse(paramsObject);
|
|
||||||
if (!paramsResult.success) {
|
|
||||||
throw err({
|
|
||||||
type: "unprocessable_entity",
|
|
||||||
details: formatZodError(paramsResult.error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
parsedInput.params = paramsResult.data as ParsedSchemas<S>["params"];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rateLimit) {
|
|
||||||
const rateLimitResponse = await checkRateLimitAndThrowError({
|
|
||||||
identifier: authentication.data.hashedApiKey,
|
|
||||||
});
|
|
||||||
if (!rateLimitResponse.ok) {
|
|
||||||
throw rateLimitResponse.error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return handler({
|
|
||||||
authentication: authentication.data,
|
|
||||||
parsedInput,
|
|
||||||
request,
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
return handleApiError(request, err.error);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let parsedInput: ParsedSchemas<S> = {} as ParsedSchemas<S>;
|
||||||
|
|
||||||
|
if (schemas?.body) {
|
||||||
|
const bodyData = await request.json();
|
||||||
|
const bodyResult = schemas.body.safeParse(bodyData);
|
||||||
|
|
||||||
|
if (!bodyResult.success) {
|
||||||
|
return handleApiError(request, {
|
||||||
|
type: "unprocessable_entity",
|
||||||
|
details: formatZodError(bodyResult.error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
parsedInput.body = bodyResult.data as ParsedSchemas<S>["body"];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (schemas?.query) {
|
||||||
|
const url = new URL(request.url);
|
||||||
|
const queryObject = Object.fromEntries(url.searchParams.entries());
|
||||||
|
const queryResult = schemas.query.safeParse(queryObject);
|
||||||
|
if (!queryResult.success) {
|
||||||
|
return handleApiError(request, {
|
||||||
|
type: "unprocessable_entity",
|
||||||
|
details: formatZodError(queryResult.error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
parsedInput.query = queryResult.data as ParsedSchemas<S>["query"];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (schemas?.params) {
|
||||||
|
const paramsObject = (await externalParams) || {};
|
||||||
|
const paramsResult = schemas.params.safeParse(paramsObject);
|
||||||
|
if (!paramsResult.success) {
|
||||||
|
return handleApiError(request, {
|
||||||
|
type: "unprocessable_entity",
|
||||||
|
details: formatZodError(paramsResult.error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
parsedInput.params = paramsResult.data as ParsedSchemas<S>["params"];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rateLimit) {
|
||||||
|
const rateLimitResponse = await checkRateLimitAndThrowError({
|
||||||
|
identifier: authentication.data.hashedApiKey,
|
||||||
|
});
|
||||||
|
if (!rateLimitResponse.ok) {
|
||||||
|
return handleApiError(request, rateLimitResponse.error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return handler({
|
||||||
|
authentication: authentication.data,
|
||||||
|
parsedInput,
|
||||||
|
request,
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { logApiRequest } from "@/modules/api/v2/lib/utils";
|
import { handleApiError, logApiRequest } from "@/modules/api/v2/lib/utils";
|
||||||
|
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||||
import { ExtendedSchemas, HandlerFn, ParsedSchemas, apiWrapper } from "./api-wrapper";
|
import { ExtendedSchemas, HandlerFn, ParsedSchemas, apiWrapper } from "./api-wrapper";
|
||||||
|
|
||||||
export const authenticatedApiClient = async <S extends ExtendedSchemas>({
|
export const authenticatedApiClient = async <S extends ExtendedSchemas>({
|
||||||
@@ -14,16 +15,28 @@ export const authenticatedApiClient = async <S extends ExtendedSchemas>({
|
|||||||
rateLimit?: boolean;
|
rateLimit?: boolean;
|
||||||
handler: HandlerFn<ParsedSchemas<S>>;
|
handler: HandlerFn<ParsedSchemas<S>>;
|
||||||
}): Promise<Response> => {
|
}): Promise<Response> => {
|
||||||
const response = await apiWrapper({
|
try {
|
||||||
request,
|
const response = await apiWrapper({
|
||||||
schemas,
|
request,
|
||||||
externalParams,
|
schemas,
|
||||||
rateLimit,
|
externalParams,
|
||||||
handler,
|
rateLimit,
|
||||||
});
|
handler,
|
||||||
if (response.ok) {
|
});
|
||||||
logApiRequest(request, response.status);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
if (response.ok) {
|
||||||
|
logApiRequest(request, response.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (err) {
|
||||||
|
if ("type" in err) {
|
||||||
|
return handleApiError(request, err as ApiErrorResponseV2);
|
||||||
|
}
|
||||||
|
|
||||||
|
return handleApiError(request, {
|
||||||
|
type: "internal_server_error",
|
||||||
|
details: [{ field: "error", issue: "An error occurred while processing your request." }],
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -19,6 +19,11 @@ vi.mock("@/modules/api/v2/lib/utils", () => ({
|
|||||||
handleApiError: vi.fn(),
|
handleApiError: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/modules/api/v2/lib/utils", () => ({
|
||||||
|
formatZodError: vi.fn(),
|
||||||
|
handleApiError: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
describe("apiWrapper", () => {
|
describe("apiWrapper", () => {
|
||||||
it("should handle request and return response", async () => {
|
it("should handle request and return response", async () => {
|
||||||
const request = new Request("http://localhost", {
|
const request = new Request("http://localhost", {
|
||||||
|
|||||||
26
apps/web/modules/api/v2/management/roles/lib/openapi.ts
Normal file
26
apps/web/modules/api/v2/management/roles/lib/openapi.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { ZodOpenApiOperationObject, ZodOpenApiPathsObject } from "zod-openapi";
|
||||||
|
|
||||||
|
export const getRolesEndpoint: ZodOpenApiOperationObject = {
|
||||||
|
operationId: "getRoles",
|
||||||
|
summary: "Get roles",
|
||||||
|
description: "Gets roles from the database.",
|
||||||
|
requestParams: {},
|
||||||
|
tags: ["Management API > Roles"],
|
||||||
|
responses: {
|
||||||
|
"200": {
|
||||||
|
description: "Roles retrieved successfully.",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: z.array(z.string()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const rolePaths: ZodOpenApiPathsObject = {
|
||||||
|
"/roles": {
|
||||||
|
get: getRolesEndpoint,
|
||||||
|
},
|
||||||
|
};
|
||||||
26
apps/web/modules/api/v2/management/roles/lib/roles.ts
Normal file
26
apps/web/modules/api/v2/management/roles/lib/roles.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||||
|
import { ApiResponse } from "@/modules/api/v2/types/api-success";
|
||||||
|
import { prisma } from "@formbricks/database";
|
||||||
|
import { Result, err, ok } from "@formbricks/types/error-handlers";
|
||||||
|
|
||||||
|
export const getRoles = async (): Promise<Result<ApiResponse<string[]>, ApiErrorResponseV2>> => {
|
||||||
|
try {
|
||||||
|
// We use a raw query to get all the roles because we can't list enum options with prisma
|
||||||
|
const results = await prisma.$queryRaw<{ unnest: string }[]>`
|
||||||
|
SELECT unnest(enum_range(NULL::"OrganizationRole"));
|
||||||
|
`;
|
||||||
|
|
||||||
|
if (!results) {
|
||||||
|
// We set internal_server_error because it's an enum and we should always have the roles
|
||||||
|
return err({ type: "internal_server_error", details: [{ field: "roles", issue: "not found" }] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const roles = results.map((row) => row.unnest);
|
||||||
|
|
||||||
|
return ok({
|
||||||
|
data: roles,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return err({ type: "internal_server_error", details: [{ field: "roles", issue: error.message }] });
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import { describe, expect, it, vi } from "vitest";
|
||||||
|
import { prisma } from "@formbricks/database";
|
||||||
|
import { getRoles } from "../roles";
|
||||||
|
|
||||||
|
// Mock prisma with a $queryRaw function
|
||||||
|
vi.mock("@formbricks/database", () => ({
|
||||||
|
prisma: {
|
||||||
|
$queryRaw: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("getRoles", () => {
|
||||||
|
it("returns roles on success", async () => {
|
||||||
|
(prisma.$queryRaw as any).mockResolvedValueOnce([{ unnest: "ADMIN" }, { unnest: "MEMBER" }]);
|
||||||
|
|
||||||
|
const result = await getRoles();
|
||||||
|
expect(result.ok).toBe(true);
|
||||||
|
|
||||||
|
if (result.ok) {
|
||||||
|
expect(result.data.data).toEqual(["ADMIN", "MEMBER"]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns error if no results are found", async () => {
|
||||||
|
(prisma.$queryRaw as any).mockResolvedValueOnce(null);
|
||||||
|
|
||||||
|
const result = await getRoles();
|
||||||
|
expect(result.ok).toBe(false);
|
||||||
|
|
||||||
|
if (!result.ok) {
|
||||||
|
expect(result.error?.type).toBe("internal_server_error");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns error on exception", async () => {
|
||||||
|
vi.mocked(prisma.$queryRaw).mockRejectedValueOnce(new Error("Test DB error"));
|
||||||
|
|
||||||
|
const result = await getRoles();
|
||||||
|
expect(result.ok).toBe(false);
|
||||||
|
|
||||||
|
if (!result.ok) {
|
||||||
|
expect(result.error.type).toBe("internal_server_error");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
19
apps/web/modules/api/v2/management/roles/route.ts
Normal file
19
apps/web/modules/api/v2/management/roles/route.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { responses } from "@/modules/api/v2/lib/response";
|
||||||
|
import { handleApiError } from "@/modules/api/v2/lib/utils";
|
||||||
|
import { authenticatedApiClient } from "@/modules/api/v2/management/auth/authenticated-api-client";
|
||||||
|
import { getRoles } from "@/modules/api/v2/management/roles/lib/roles";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
export const GET = async (request: NextRequest) =>
|
||||||
|
authenticatedApiClient({
|
||||||
|
request,
|
||||||
|
handler: async () => {
|
||||||
|
const res = await getRoles();
|
||||||
|
|
||||||
|
if (res.ok) {
|
||||||
|
return responses.successResponse(res.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
return handleApiError(request, res.error);
|
||||||
|
},
|
||||||
|
});
|
||||||
@@ -2,8 +2,10 @@ import { contactAttributeKeyPaths } from "@/modules/api/v2/management/contact-at
|
|||||||
import { contactAttributePaths } from "@/modules/api/v2/management/contact-attributes/lib/openapi";
|
import { contactAttributePaths } from "@/modules/api/v2/management/contact-attributes/lib/openapi";
|
||||||
import { contactPaths } from "@/modules/api/v2/management/contacts/lib/openapi";
|
import { contactPaths } from "@/modules/api/v2/management/contacts/lib/openapi";
|
||||||
import { responsePaths } from "@/modules/api/v2/management/responses/lib/openapi";
|
import { responsePaths } from "@/modules/api/v2/management/responses/lib/openapi";
|
||||||
|
import { rolePaths } from "@/modules/api/v2/management/roles/lib/openapi";
|
||||||
import { surveyPaths } from "@/modules/api/v2/management/surveys/lib/openapi";
|
import { surveyPaths } from "@/modules/api/v2/management/surveys/lib/openapi";
|
||||||
import { webhookPaths } from "@/modules/api/v2/management/webhooks/lib/openapi";
|
import { webhookPaths } from "@/modules/api/v2/management/webhooks/lib/openapi";
|
||||||
|
import { bulkContactPaths } from "@/modules/ee/contacts/api/v2/management/contacts/bulk/lib/openapi";
|
||||||
import * as yaml from "yaml";
|
import * as yaml from "yaml";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createDocument, extendZodWithOpenApi } from "zod-openapi";
|
import { createDocument, extendZodWithOpenApi } from "zod-openapi";
|
||||||
@@ -25,11 +27,13 @@ const document = createDocument({
|
|||||||
},
|
},
|
||||||
paths: {
|
paths: {
|
||||||
...responsePaths,
|
...responsePaths,
|
||||||
|
...bulkContactPaths,
|
||||||
...contactPaths,
|
...contactPaths,
|
||||||
...contactAttributePaths,
|
...contactAttributePaths,
|
||||||
...contactAttributeKeyPaths,
|
...contactAttributeKeyPaths,
|
||||||
...surveyPaths,
|
...surveyPaths,
|
||||||
...webhookPaths,
|
...webhookPaths,
|
||||||
|
...rolePaths,
|
||||||
},
|
},
|
||||||
servers: [
|
servers: [
|
||||||
{
|
{
|
||||||
@@ -62,6 +66,10 @@ const document = createDocument({
|
|||||||
name: "Management API > Webhooks",
|
name: "Management API > Webhooks",
|
||||||
description: "Operations for managing webhooks.",
|
description: "Operations for managing webhooks.",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Management API > Roles",
|
||||||
|
description: "Operations for managing roles.",
|
||||||
|
},
|
||||||
],
|
],
|
||||||
components: {
|
components: {
|
||||||
securitySchemes: {
|
securitySchemes: {
|
||||||
@@ -79,6 +87,7 @@ const document = createDocument({
|
|||||||
contactAttributeKey: ZContactAttributeKey,
|
contactAttributeKey: ZContactAttributeKey,
|
||||||
survey: ZSurveyWithoutQuestionType,
|
survey: ZSurveyWithoutQuestionType,
|
||||||
webhook: ZWebhook,
|
webhook: ZWebhook,
|
||||||
|
role: z.array(z.string()),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
security: [
|
security: [
|
||||||
|
|||||||
@@ -1,4 +1,12 @@
|
|||||||
export type ApiErrorDetails = { field: string; issue: string }[];
|
// We're naming the "params" field from zod (or otherwise) to "meta" since "params" is a bit confusing
|
||||||
|
// We're still using the "params" type from zod though because it allows us to not reference `any` and directly use the zod types
|
||||||
|
export type ApiErrorDetails = {
|
||||||
|
field: string;
|
||||||
|
issue: string;
|
||||||
|
meta?: {
|
||||||
|
[k: string]: unknown;
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
|
||||||
export type ApiErrorResponseV2 =
|
export type ApiErrorResponseV2 =
|
||||||
| {
|
| {
|
||||||
|
|||||||
@@ -173,6 +173,9 @@ export const authOptions: NextAuthOptions = {
|
|||||||
// Conditionally add enterprise SSO providers
|
// Conditionally add enterprise SSO providers
|
||||||
...(ENTERPRISE_LICENSE_KEY ? getSSOProviders() : []),
|
...(ENTERPRISE_LICENSE_KEY ? getSSOProviders() : []),
|
||||||
],
|
],
|
||||||
|
session: {
|
||||||
|
maxAge: 3600,
|
||||||
|
},
|
||||||
callbacks: {
|
callbacks: {
|
||||||
async jwt({ token }) {
|
async jwt({ token }) {
|
||||||
const existingUser = await getUserByEmail(token?.email!);
|
const existingUser = await getUserByEmail(token?.email!);
|
||||||
|
|||||||
@@ -215,6 +215,10 @@ export const PricingCard = ({
|
|||||||
text={t("environments.settings.billing.switch_plan_confirmation_text", {
|
text={t("environments.settings.billing.switch_plan_confirmation_text", {
|
||||||
plan: t(plan.name),
|
plan: t(plan.name),
|
||||||
price: planPeriod === "monthly" ? plan.price.monthly : plan.price.yearly,
|
price: planPeriod === "monthly" ? plan.price.monthly : plan.price.yearly,
|
||||||
|
period:
|
||||||
|
planPeriod === "monthly"
|
||||||
|
? t("environments.settings.billing.per_month")
|
||||||
|
: t("environments.settings.billing.per_year"),
|
||||||
})}
|
})}
|
||||||
buttonVariant="default"
|
buttonVariant="default"
|
||||||
buttonLoading={loading}
|
buttonLoading={loading}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { contactCache } from "@/lib/cache/contact";
|
import { contactCache } from "@/lib/cache/contact";
|
||||||
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
||||||
import { getContactByUserId } from "@/modules/ee/contacts/api/client/[environmentId]/identify/contacts/[userId]/lib/contact";
|
import { getContactByUserId } from "@/modules/ee/contacts/api/v1/client/[environmentId]/identify/contacts/[userId]/lib/contact";
|
||||||
import { prisma } from "@formbricks/database";
|
import { prisma } from "@formbricks/database";
|
||||||
import { cache } from "@formbricks/lib/cache";
|
import { cache } from "@formbricks/lib/cache";
|
||||||
import { segmentCache } from "@formbricks/lib/cache/segment";
|
import { segmentCache } from "@formbricks/lib/cache/segment";
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
||||||
import { getContactAttributes } from "@/modules/ee/contacts/api/client/[environmentId]/identify/contacts/[userId]/lib/attributes";
|
import { getContactAttributes } from "@/modules/ee/contacts/api/v1/client/[environmentId]/identify/contacts/[userId]/lib/attributes";
|
||||||
import { evaluateSegment } from "@/modules/ee/contacts/segments/lib/segments";
|
import { evaluateSegment } from "@/modules/ee/contacts/segments/lib/segments";
|
||||||
import { Prisma } from "@prisma/client";
|
import { Prisma } from "@prisma/client";
|
||||||
import { cache as reactCache } from "react";
|
import { cache as reactCache } from "react";
|
||||||
@@ -0,0 +1,398 @@
|
|||||||
|
import { contactCache } from "@/lib/cache/contact";
|
||||||
|
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
||||||
|
import { contactAttributeKeyCache } from "@/lib/cache/contact-attribute-key";
|
||||||
|
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||||
|
import { TContactBulkUploadContact } from "@/modules/ee/contacts/types/contact";
|
||||||
|
import { createId } from "@paralleldrive/cuid2";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
|
import { prisma } from "@formbricks/database";
|
||||||
|
import { logger } from "@formbricks/logger";
|
||||||
|
import { Result, err, ok } from "@formbricks/types/error-handlers";
|
||||||
|
|
||||||
|
export const upsertBulkContacts = async (
|
||||||
|
contacts: TContactBulkUploadContact[],
|
||||||
|
environmentId: string,
|
||||||
|
parsedEmails: string[]
|
||||||
|
): Promise<
|
||||||
|
Result<
|
||||||
|
{
|
||||||
|
contactIdxWithConflictingUserIds: number[];
|
||||||
|
},
|
||||||
|
ApiErrorResponseV2
|
||||||
|
>
|
||||||
|
> => {
|
||||||
|
const emailAttributeKey = "email";
|
||||||
|
const contactIdxWithConflictingUserIds: number[] = [];
|
||||||
|
|
||||||
|
let userIdsInContacts: string[] = [];
|
||||||
|
let attributeKeysSet: Set<string> = new Set();
|
||||||
|
let attributeKeys: string[] = [];
|
||||||
|
|
||||||
|
// both can be done with a single loop:
|
||||||
|
contacts.forEach((contact) => {
|
||||||
|
contact.attributes.forEach((attr) => {
|
||||||
|
if (attr.attributeKey.key === "userId") {
|
||||||
|
userIdsInContacts.push(attr.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!attributeKeysSet.has(attr.attributeKey.key)) {
|
||||||
|
attributeKeys.push(attr.attributeKey.key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the attribute key to the set
|
||||||
|
attributeKeysSet.add(attr.attributeKey.key);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const [existingUserIds, existingContactsByEmail, existingAttributeKeys] = await Promise.all([
|
||||||
|
prisma.contactAttribute.findMany({
|
||||||
|
where: {
|
||||||
|
attributeKey: {
|
||||||
|
environmentId,
|
||||||
|
key: "userId",
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
in: userIdsInContacts,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
value: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
prisma.contact.findMany({
|
||||||
|
where: {
|
||||||
|
environmentId,
|
||||||
|
attributes: {
|
||||||
|
some: {
|
||||||
|
attributeKey: { key: emailAttributeKey },
|
||||||
|
value: { in: parsedEmails },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
attributes: {
|
||||||
|
select: {
|
||||||
|
attributeKey: { select: { key: true } },
|
||||||
|
createdAt: true,
|
||||||
|
id: true,
|
||||||
|
value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
prisma.contactAttributeKey.findMany({
|
||||||
|
where: {
|
||||||
|
key: { in: attributeKeys },
|
||||||
|
environmentId,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Build a map from email to contact id (if the email attribute exists)
|
||||||
|
const contactMap = new Map<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
contactId: string;
|
||||||
|
attributes: { id: string; attributeKey: { key: string }; createdAt: Date; value: string }[];
|
||||||
|
}
|
||||||
|
>();
|
||||||
|
|
||||||
|
existingContactsByEmail.forEach((contact) => {
|
||||||
|
const emailAttr = contact.attributes.find((attr) => attr.attributeKey.key === emailAttributeKey);
|
||||||
|
|
||||||
|
if (emailAttr) {
|
||||||
|
contactMap.set(emailAttr.value, {
|
||||||
|
contactId: contact.id,
|
||||||
|
attributes: contact.attributes.map((attr) => ({
|
||||||
|
id: attr.id,
|
||||||
|
attributeKey: { key: attr.attributeKey.key },
|
||||||
|
createdAt: attr.createdAt,
|
||||||
|
value: attr.value,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Split contacts into ones to update and ones to create
|
||||||
|
const contactsToUpdate: {
|
||||||
|
contactId: string;
|
||||||
|
attributes: {
|
||||||
|
id: string;
|
||||||
|
createdAt: Date;
|
||||||
|
value: string;
|
||||||
|
attributeKey: {
|
||||||
|
key: string;
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
}[] = [];
|
||||||
|
|
||||||
|
const contactsToCreate: {
|
||||||
|
attributes: {
|
||||||
|
value: string;
|
||||||
|
attributeKey: {
|
||||||
|
key: string;
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
}[] = [];
|
||||||
|
|
||||||
|
let filteredContacts: TContactBulkUploadContact[] = [];
|
||||||
|
|
||||||
|
contacts.forEach((contact, idx) => {
|
||||||
|
const emailAttr = contact.attributes.find((attr) => attr.attributeKey.key === emailAttributeKey);
|
||||||
|
|
||||||
|
if (emailAttr && contactMap.has(emailAttr.value)) {
|
||||||
|
// if all the attributes passed are the same as the existing attributes, skip the update:
|
||||||
|
const existingContact = contactMap.get(emailAttr.value);
|
||||||
|
if (existingContact) {
|
||||||
|
// Create maps of existing attributes by key
|
||||||
|
const existingAttributesByKey = new Map(
|
||||||
|
existingContact.attributes.map((attr) => [attr.attributeKey.key, attr.value])
|
||||||
|
);
|
||||||
|
|
||||||
|
// Determine which attributes need updating by comparing values.
|
||||||
|
const attributesToUpdate = contact.attributes.filter(
|
||||||
|
(attr) => existingAttributesByKey.get(attr.attributeKey.key) !== attr.value
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if any attributes need updating
|
||||||
|
const needsUpdate = attributesToUpdate.length > 0;
|
||||||
|
|
||||||
|
if (!needsUpdate) {
|
||||||
|
filteredContacts.push(contact);
|
||||||
|
// No attributes need to be updated
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the attributes to update have a userId that exists in the db, we need to skip the update
|
||||||
|
const userIdAttr = attributesToUpdate.find((attr) => attr.attributeKey.key === "userId");
|
||||||
|
|
||||||
|
if (userIdAttr) {
|
||||||
|
const existingUserId = existingUserIds.find(
|
||||||
|
(existingUserId) => existingUserId.value === userIdAttr.value
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingUserId) {
|
||||||
|
contactIdxWithConflictingUserIds.push(idx);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredContacts.push(contact);
|
||||||
|
contactsToUpdate.push({
|
||||||
|
contactId: existingContact.contactId,
|
||||||
|
attributes: attributesToUpdate.map((attr) => {
|
||||||
|
const existingAttr = existingContact.attributes.find(
|
||||||
|
(a) => a.attributeKey.key === attr.attributeKey.key
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!existingAttr) {
|
||||||
|
return {
|
||||||
|
id: createId(),
|
||||||
|
createdAt: new Date(),
|
||||||
|
value: attr.value,
|
||||||
|
attributeKey: attr.attributeKey,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: existingAttr.id,
|
||||||
|
createdAt: existingAttr.createdAt,
|
||||||
|
value: attr.value,
|
||||||
|
attributeKey: attr.attributeKey,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// There can't be a case where the emailAttr is not defined since that should be caught by zod.
|
||||||
|
|
||||||
|
// if the contact has a userId that already exists in the db, we need to skip the create
|
||||||
|
const userIdAttr = contact.attributes.find((attr) => attr.attributeKey.key === "userId");
|
||||||
|
if (userIdAttr) {
|
||||||
|
const existingUserId = existingUserIds.find(
|
||||||
|
(existingUserId) => existingUserId.value === userIdAttr.value
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingUserId) {
|
||||||
|
contactIdxWithConflictingUserIds.push(idx);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredContacts.push(contact);
|
||||||
|
contactsToCreate.push(contact);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Execute everything in ONE transaction
|
||||||
|
await prisma.$transaction(async (tx) => {
|
||||||
|
const attributeKeyMap = existingAttributeKeys.reduce<Record<string, string>>((acc, keyObj) => {
|
||||||
|
acc[keyObj.key] = keyObj.id;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
// Check for missing attribute keys and create them if needed.
|
||||||
|
const missingKeysMap = new Map<string, { key: string; name: string }>();
|
||||||
|
const attributeKeyNameUpdates = new Map<string, { key: string; name: string }>();
|
||||||
|
|
||||||
|
for (const contact of filteredContacts) {
|
||||||
|
for (const attr of contact.attributes) {
|
||||||
|
if (!attributeKeyMap[attr.attributeKey.key]) {
|
||||||
|
missingKeysMap.set(attr.attributeKey.key, attr.attributeKey);
|
||||||
|
} else {
|
||||||
|
// Check if the name has changed for existing attribute keys
|
||||||
|
const existingKey = existingAttributeKeys.find((ak) => ak.key === attr.attributeKey.key);
|
||||||
|
if (existingKey && existingKey.name !== attr.attributeKey.name) {
|
||||||
|
attributeKeyNameUpdates.set(attr.attributeKey.key, attr.attributeKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle both missing keys and name updates in a single batch operation
|
||||||
|
const keysToUpsert = new Map<string, { key: string; name: string }>();
|
||||||
|
|
||||||
|
// Collect all keys that need to be created or updated
|
||||||
|
for (const [key, value] of missingKeysMap) {
|
||||||
|
keysToUpsert.set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [key, value] of attributeKeyNameUpdates) {
|
||||||
|
keysToUpsert.set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (keysToUpsert.size > 0) {
|
||||||
|
const keysArray = Array.from(keysToUpsert.values());
|
||||||
|
const BATCH_SIZE = 10000;
|
||||||
|
|
||||||
|
for (let i = 0; i < keysArray.length; i += BATCH_SIZE) {
|
||||||
|
const batch = keysArray.slice(i, i + BATCH_SIZE);
|
||||||
|
|
||||||
|
// Use raw query to perform upsert
|
||||||
|
const upsertedKeys = await tx.$queryRaw<{ id: string; key: string }[]>`
|
||||||
|
INSERT INTO "ContactAttributeKey" ("id", "key", "name", "environmentId", "created_at", "updated_at")
|
||||||
|
SELECT
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map(() => createId())}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.key)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((k) => k.name)}]`}),
|
||||||
|
${environmentId},
|
||||||
|
NOW(),
|
||||||
|
NOW()
|
||||||
|
ON CONFLICT ("key", "environmentId")
|
||||||
|
DO UPDATE SET
|
||||||
|
"name" = EXCLUDED."name",
|
||||||
|
"updated_at" = NOW()
|
||||||
|
RETURNING "id", "key"
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Update attribute key map with upserted keys
|
||||||
|
for (const key of upsertedKeys) {
|
||||||
|
attributeKeyMap[key.key] = key.id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new contacts -- should be at most 1000, no need to batch
|
||||||
|
const newContacts = contactsToCreate.map(() => ({
|
||||||
|
id: createId(),
|
||||||
|
environmentId,
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (newContacts.length > 0) {
|
||||||
|
await tx.contact.createMany({
|
||||||
|
data: newContacts,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare attributes for both new and existing contacts
|
||||||
|
const attributesUpsertForCreatedUsers = contactsToCreate.flatMap((contact, idx) =>
|
||||||
|
contact.attributes.map((attr) => ({
|
||||||
|
id: createId(),
|
||||||
|
contactId: newContacts[idx].id,
|
||||||
|
attributeKeyId: attributeKeyMap[attr.attributeKey.key],
|
||||||
|
value: attr.value,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
const attributesUpsertForExistingUsers = contactsToUpdate.flatMap((contact) =>
|
||||||
|
contact.attributes.map((attr) => ({
|
||||||
|
id: attr.id,
|
||||||
|
contactId: contact.contactId,
|
||||||
|
attributeKeyId: attributeKeyMap[attr.attributeKey.key],
|
||||||
|
value: attr.value,
|
||||||
|
createdAt: attr.createdAt,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
const attributesToUpsert = [...attributesUpsertForCreatedUsers, ...attributesUpsertForExistingUsers];
|
||||||
|
|
||||||
|
// Skip the raw query if there are no attributes to upsert
|
||||||
|
if (attributesToUpsert.length > 0) {
|
||||||
|
// Process attributes in batches of 10,000
|
||||||
|
const BATCH_SIZE = 10000;
|
||||||
|
for (let i = 0; i < attributesToUpsert.length; i += BATCH_SIZE) {
|
||||||
|
const batch = attributesToUpsert.slice(i, i + BATCH_SIZE);
|
||||||
|
|
||||||
|
// Use a raw query to perform a bulk insert with an ON CONFLICT clause
|
||||||
|
await tx.$executeRaw`
|
||||||
|
INSERT INTO "ContactAttribute" (
|
||||||
|
"id", "created_at", "updated_at", "contactId", "value", "attributeKeyId"
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.id)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.createdAt)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.updatedAt)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.contactId)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.value)}]`}),
|
||||||
|
unnest(${Prisma.sql`ARRAY[${batch.map((a) => a.attributeKeyId)}]`})
|
||||||
|
ON CONFLICT ("contactId", "attributeKeyId") DO UPDATE SET
|
||||||
|
"value" = EXCLUDED."value",
|
||||||
|
"updated_at" = EXCLUDED."updated_at"
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
contactCache.revalidate({
|
||||||
|
environmentId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// revalidate all the new contacts:
|
||||||
|
for (const newContact of newContacts) {
|
||||||
|
contactCache.revalidate({
|
||||||
|
id: newContact.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// revalidate all the existing contacts:
|
||||||
|
for (const existingContact of existingContactsByEmail) {
|
||||||
|
contactCache.revalidate({
|
||||||
|
id: existingContact.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
contactAttributeKeyCache.revalidate({
|
||||||
|
environmentId,
|
||||||
|
});
|
||||||
|
|
||||||
|
contactAttributeCache.revalidate({ environmentId });
|
||||||
|
});
|
||||||
|
|
||||||
|
return ok({
|
||||||
|
contactIdxWithConflictingUserIds,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ error }, "Failed to upsert contacts");
|
||||||
|
|
||||||
|
return err({
|
||||||
|
type: "internal_server_error",
|
||||||
|
details: [{ field: "error", issue: "Failed to upsert contacts" }],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
import { ZContactBulkUploadRequest } from "@/modules/ee/contacts/types/contact";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { ZodOpenApiOperationObject, ZodOpenApiPathsObject } from "zod-openapi";
|
||||||
|
|
||||||
|
const bulkContactEndpoint: ZodOpenApiOperationObject = {
|
||||||
|
operationId: "uploadBulkContacts",
|
||||||
|
summary: "Upload Bulk Contacts",
|
||||||
|
description: "Uploads contacts in bulk",
|
||||||
|
requestBody: {
|
||||||
|
required: true,
|
||||||
|
description: "The contacts to upload",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: ZContactBulkUploadRequest,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags: ["Management API > Contacts"],
|
||||||
|
responses: {
|
||||||
|
"200": {
|
||||||
|
description: "Contacts uploaded successfully.",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: z.object({
|
||||||
|
data: z.object({
|
||||||
|
status: z.string(),
|
||||||
|
message: z.string(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"207": {
|
||||||
|
description: "Contacts uploaded partially successfully.",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: z.object({
|
||||||
|
data: z.object({
|
||||||
|
status: z.string(),
|
||||||
|
message: z.string(),
|
||||||
|
skippedContacts: z.array(
|
||||||
|
z.object({
|
||||||
|
index: z.number(),
|
||||||
|
userId: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const bulkContactPaths: ZodOpenApiPathsObject = {
|
||||||
|
"/contacts/bulk": {
|
||||||
|
put: bulkContactEndpoint,
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -0,0 +1,476 @@
|
|||||||
|
import { contactCache } from "@/lib/cache/contact";
|
||||||
|
import { contactAttributeCache } from "@/lib/cache/contact-attribute";
|
||||||
|
import { contactAttributeKeyCache } from "@/lib/cache/contact-attribute-key";
|
||||||
|
import { upsertBulkContacts } from "@/modules/ee/contacts/api/v2/management/contacts/bulk/lib/contact";
|
||||||
|
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||||
|
import { prisma } from "@formbricks/database";
|
||||||
|
|
||||||
|
// Ensure that createId always returns "mock-id" for predictability
|
||||||
|
vi.mock("@paralleldrive/cuid2", () => ({
|
||||||
|
createId: vi.fn(() => "mock-id"),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock prisma methods
|
||||||
|
vi.mock("@formbricks/database", () => ({
|
||||||
|
prisma: {
|
||||||
|
contactAttribute: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
contactAttributeKey: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
createManyAndReturn: vi.fn(),
|
||||||
|
},
|
||||||
|
contact: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
createMany: vi.fn(),
|
||||||
|
},
|
||||||
|
$transaction: vi.fn((callback) => callback(prisma)),
|
||||||
|
$executeRaw: vi.fn(),
|
||||||
|
$queryRaw: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock cache functions
|
||||||
|
vi.mock("@/lib/cache/contact", () => ({
|
||||||
|
contactCache: {
|
||||||
|
revalidate: vi.fn(),
|
||||||
|
tag: {
|
||||||
|
byId: (id: string) => `contacts-${id}`,
|
||||||
|
byEnvironmentId: (environmentId: string) => `environments-${environmentId}-contacts`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/cache/contact-attribute", () => ({
|
||||||
|
contactAttributeCache: {
|
||||||
|
revalidate: vi.fn(),
|
||||||
|
tag: {
|
||||||
|
byEnvironmentId: (environmentId: string) => `contactAttributes-${environmentId}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/cache/contact-attribute-key", () => ({
|
||||||
|
contactAttributeKeyCache: {
|
||||||
|
revalidate: vi.fn(),
|
||||||
|
tag: {
|
||||||
|
byEnvironmentId: (environmentId: string) => `environments-${environmentId}-contactAttributeKeys`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("upsertBulkContacts", () => {
|
||||||
|
const mockEnvironmentId = "env_123";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should create new contacts when all provided contacts have unique user IDs and emails", async () => {
|
||||||
|
// Mock data: two contacts with unique userId and email
|
||||||
|
const mockContacts = [
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "john@example.com" },
|
||||||
|
{ attributeKey: { key: "userId", name: "User ID" }, value: "user-123" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "John Doe" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "jane@example.com" },
|
||||||
|
{ attributeKey: { key: "userId", name: "User ID" }, value: "user-456" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "Jane Smith" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockParsedEmails = ["john@example.com", "jane@example.com"];
|
||||||
|
|
||||||
|
// Mock: no existing userIds in DB
|
||||||
|
vi.mocked(prisma.contactAttribute.findMany).mockResolvedValueOnce([]);
|
||||||
|
// Mock: all attribute keys already exist
|
||||||
|
const mockAttributeKeys = [
|
||||||
|
{ id: "attr-key-email", key: "email", environmentId: mockEnvironmentId, name: "Email" },
|
||||||
|
{ id: "attr-key-userId", key: "userId", environmentId: mockEnvironmentId, name: "User ID" },
|
||||||
|
{ id: "attr-key-name", key: "name", environmentId: mockEnvironmentId, name: "Name" },
|
||||||
|
];
|
||||||
|
vi.mocked(prisma.contactAttributeKey.findMany).mockResolvedValueOnce(mockAttributeKeys);
|
||||||
|
// Mock: no existing contacts by email
|
||||||
|
vi.mocked(prisma.contact.findMany).mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
// Execute the function
|
||||||
|
const result = await upsertBulkContacts(mockContacts, mockEnvironmentId, mockParsedEmails);
|
||||||
|
|
||||||
|
// Assert that the result is ok and data is as expected
|
||||||
|
if (!result.ok) throw new Error("Expected result.ok to be true");
|
||||||
|
expect(result.data).toEqual({ contactIdxWithConflictingUserIds: [] });
|
||||||
|
|
||||||
|
// Verify that existing user IDs were checked
|
||||||
|
expect(prisma.contactAttribute.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
attributeKey: {
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
key: "userId",
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
in: ["user-123", "user-456"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: { value: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that attribute keys were fetched
|
||||||
|
expect(prisma.contactAttributeKey.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
key: { in: ["email", "userId", "name"] },
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that existing contacts were looked up by email
|
||||||
|
expect(prisma.contact.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
attributes: {
|
||||||
|
some: {
|
||||||
|
attributeKey: { key: "email" },
|
||||||
|
value: { in: mockParsedEmails },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
attributes: {
|
||||||
|
select: {
|
||||||
|
attributeKey: { select: { key: true } },
|
||||||
|
createdAt: true,
|
||||||
|
id: true,
|
||||||
|
value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that new contacts were created in the transaction
|
||||||
|
expect(prisma.contact.createMany).toHaveBeenCalledWith({
|
||||||
|
data: [
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that the raw SQL query was executed to upsert attributes
|
||||||
|
expect(prisma.$executeRaw).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify that caches were revalidated
|
||||||
|
expect(contactCache.revalidate).toHaveBeenCalledWith({ environmentId: mockEnvironmentId });
|
||||||
|
// Since two new contacts are created with same id "mock-id", expect at least one revalidation with id "mock-id"
|
||||||
|
expect(contactCache.revalidate).toHaveBeenCalledWith({ id: "mock-id" });
|
||||||
|
expect(contactAttributeKeyCache.revalidate).toHaveBeenCalledWith({ environmentId: mockEnvironmentId });
|
||||||
|
expect(contactAttributeCache.revalidate).toHaveBeenCalledWith({ environmentId: mockEnvironmentId });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should update existing contacts when provided contacts match an existing email", async () => {
|
||||||
|
// Mock data: a contact that exists in the DB
|
||||||
|
const mockContacts = [
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "john@example.com" },
|
||||||
|
// No userId is provided so it should be treated as update
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockParsedEmails = ["john@example.com"];
|
||||||
|
|
||||||
|
// Mock: no existing userIds conflict
|
||||||
|
vi.mocked(prisma.contactAttribute.findMany).mockResolvedValueOnce([]);
|
||||||
|
// Mock: attribute keys for email exist
|
||||||
|
const mockAttributeKeys = [
|
||||||
|
{ id: "attr-key-email", key: "email", environmentId: mockEnvironmentId, name: "Email" },
|
||||||
|
];
|
||||||
|
vi.mocked(prisma.contactAttributeKey.findMany).mockResolvedValueOnce(mockAttributeKeys);
|
||||||
|
// Mock: an existing contact with the same email
|
||||||
|
vi.mocked(prisma.contact.findMany).mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: "existing-contact-id",
|
||||||
|
attributes: [
|
||||||
|
{
|
||||||
|
id: "existing-email-attr",
|
||||||
|
attributeKey: { key: "email", name: "Email" },
|
||||||
|
value: "john@example.com",
|
||||||
|
createdAt: new Date("2023-01-01"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Execute the function
|
||||||
|
const result = await upsertBulkContacts(mockContacts, mockEnvironmentId, mockParsedEmails);
|
||||||
|
|
||||||
|
if (!result.ok) throw new Error("Expected result.ok to be true");
|
||||||
|
expect(result.data).toEqual({ contactIdxWithConflictingUserIds: [] });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should return the indices of contacts with conflicting user IDs", async () => {
|
||||||
|
// Mock data - mix of valid and conflicting contacts
|
||||||
|
const mockContacts = [
|
||||||
|
{
|
||||||
|
// Contact 0: Valid contact with unique userId
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "john@example.com" },
|
||||||
|
{ attributeKey: { key: "userId", name: "User ID" }, value: "user-123" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "John Doe" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Contact 1: Conflicting contact (userId already exists)
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "jane@example.com" },
|
||||||
|
{ attributeKey: { key: "userId", name: "User ID" }, value: "existing-user-1" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "Jane Smith" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Contact 2: Valid contact with no userId
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "bob@example.com" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "Bob Johnson" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Contact 3: Conflicting contact (userId already exists)
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "alice@example.com" },
|
||||||
|
{ attributeKey: { key: "userId", name: "User ID" }, value: "existing-user-2" },
|
||||||
|
{ attributeKey: { key: "name", name: "Name" }, value: "Alice Brown" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockParsedEmails = ["john@example.com", "jane@example.com", "bob@example.com", "alice@example.com"];
|
||||||
|
|
||||||
|
// Mock existing user IDs - these will conflict with some of our contacts
|
||||||
|
const mockExistingUserIds = [{ value: "existing-user-1" }, { value: "existing-user-2" }];
|
||||||
|
vi.mocked(prisma.contactAttribute.findMany).mockResolvedValueOnce(mockExistingUserIds);
|
||||||
|
|
||||||
|
// Mock attribute keys
|
||||||
|
const mockAttributeKeys = [
|
||||||
|
{ id: "attr-key-email", key: "email", environmentId: mockEnvironmentId },
|
||||||
|
{ id: "attr-key-userId", key: "userId", environmentId: mockEnvironmentId },
|
||||||
|
{ id: "attr-key-name", key: "name", environmentId: mockEnvironmentId },
|
||||||
|
];
|
||||||
|
vi.mocked(prisma.contactAttributeKey.findMany).mockResolvedValueOnce(mockAttributeKeys);
|
||||||
|
|
||||||
|
// Mock existing contacts (none for this test case)
|
||||||
|
vi.mocked(prisma.contact.findMany).mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
// Execute the function
|
||||||
|
const result = await upsertBulkContacts(mockContacts, mockEnvironmentId, mockParsedEmails);
|
||||||
|
|
||||||
|
if (result.ok) {
|
||||||
|
// Assertions - verify that the function correctly identified contacts with conflicting user IDs
|
||||||
|
expect(result.data.contactIdxWithConflictingUserIds).toEqual([1, 3]);
|
||||||
|
|
||||||
|
// Verify that the function checked for existing user IDs
|
||||||
|
expect(prisma.contactAttribute.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
attributeKey: {
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
key: "userId",
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
in: ["user-123", "existing-user-1", "existing-user-2"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
value: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that the function fetched attribute keys for the filtered contacts (without conflicting userIds)
|
||||||
|
expect(prisma.contactAttributeKey.findMany).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify that the function checked for existing contacts by email
|
||||||
|
expect(prisma.contact.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
attributes: {
|
||||||
|
some: {
|
||||||
|
attributeKey: { key: "email" },
|
||||||
|
value: { in: mockParsedEmails },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
attributes: {
|
||||||
|
select: {
|
||||||
|
attributeKey: { select: { key: true } },
|
||||||
|
createdAt: true,
|
||||||
|
id: true,
|
||||||
|
value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that only non-conflicting contacts were processed
|
||||||
|
expect(prisma.contact.createMany).toHaveBeenCalledWith({
|
||||||
|
data: [
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that the transaction was executed
|
||||||
|
expect(prisma.$transaction).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify that caches were revalidated
|
||||||
|
expect(contactCache.revalidate).toHaveBeenCalledWith({
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
});
|
||||||
|
expect(contactAttributeKeyCache.revalidate).toHaveBeenCalledWith({
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
});
|
||||||
|
expect(contactAttributeCache.revalidate).toHaveBeenCalledWith({
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should create missing attribute keys when they are not found in the database", async () => {
|
||||||
|
// Mock data: contacts with attributes that include missing attribute keys
|
||||||
|
const mockContacts = [
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "john@example.com" },
|
||||||
|
{ attributeKey: { key: "newKey1", name: "New Key 1" }, value: "value1" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "jane@example.com" },
|
||||||
|
{ attributeKey: { key: "newKey2", name: "New Key 2" }, value: "value2" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
const mockParsedEmails = ["john@example.com", "jane@example.com"];
|
||||||
|
|
||||||
|
// Mock: no existing user IDs
|
||||||
|
vi.mocked(prisma.contactAttribute.findMany).mockResolvedValueOnce([]);
|
||||||
|
// Mock: only "email" exists; new keys are missing
|
||||||
|
const mockAttributeKeys = [
|
||||||
|
{ id: "attr-key-email", key: "email", environmentId: mockEnvironmentId, name: "Email" },
|
||||||
|
{ id: "attr-key-newKey1", key: "newKey1", environmentId: mockEnvironmentId, name: "New Key 1" },
|
||||||
|
{ id: "attr-key-newKey2", key: "newKey2", environmentId: mockEnvironmentId, name: "New Key 2" },
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(prisma.contactAttributeKey.findMany).mockResolvedValueOnce(mockAttributeKeys);
|
||||||
|
|
||||||
|
// Mock: no existing contacts for update
|
||||||
|
vi.mocked(prisma.contact.findMany).mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
// Execute the function
|
||||||
|
const result = await upsertBulkContacts(mockContacts, mockEnvironmentId, mockParsedEmails);
|
||||||
|
|
||||||
|
// creation of new attribute keys now happens with a raw query
|
||||||
|
// so we need to mock that
|
||||||
|
vi.mocked(prisma.$queryRaw).mockResolvedValue([
|
||||||
|
{ id: "attr-key-newKey1", key: "newKey1" },
|
||||||
|
{ id: "attr-key-newKey2", key: "newKey2" },
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!result.ok) throw new Error("Expected result.ok to be true");
|
||||||
|
expect(result.data).toEqual({ contactIdxWithConflictingUserIds: [] });
|
||||||
|
|
||||||
|
// Verify that new contacts were created
|
||||||
|
expect(prisma.contact.createMany).toHaveBeenCalledWith({
|
||||||
|
data: [
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
{ id: "mock-id", environmentId: mockEnvironmentId },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that the raw SQL query was executed for inserting attributes
|
||||||
|
expect(prisma.$executeRaw).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify that caches were revalidated
|
||||||
|
expect(contactAttributeKeyCache.revalidate).toHaveBeenCalledWith({
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should update attribute key names when they change", async () => {
|
||||||
|
// Mock data: a contact with an attribute that has a new name for an existing key
|
||||||
|
const mockContacts = [
|
||||||
|
{
|
||||||
|
attributes: [
|
||||||
|
{ attributeKey: { key: "email", name: "Email" }, value: "john@example.com" },
|
||||||
|
{ attributeKey: { key: "name", name: "Full Name" }, value: "John Doe" }, // Changed name from "Name" to "Full Name"
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockParsedEmails = ["john@example.com"];
|
||||||
|
|
||||||
|
// Mock: no existing userIds conflict
|
||||||
|
vi.mocked(prisma.contactAttribute.findMany).mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
// Mock: attribute keys exist but with different names
|
||||||
|
const mockAttributeKeys = [
|
||||||
|
{ id: "attr-key-email", key: "email", environmentId: mockEnvironmentId, name: "Email" },
|
||||||
|
{ id: "attr-key-name", key: "name", environmentId: mockEnvironmentId, name: "Name" }, // Original name
|
||||||
|
];
|
||||||
|
vi.mocked(prisma.contactAttributeKey.findMany).mockResolvedValueOnce(mockAttributeKeys);
|
||||||
|
|
||||||
|
// Mock: an existing contact
|
||||||
|
vi.mocked(prisma.contact.findMany).mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: "existing-contact-id",
|
||||||
|
attributes: [
|
||||||
|
{
|
||||||
|
id: "existing-email-attr",
|
||||||
|
attributeKey: { key: "email", name: "Email" },
|
||||||
|
value: "john@example.com",
|
||||||
|
createdAt: new Date("2023-01-01"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "existing-name-attr",
|
||||||
|
attributeKey: { key: "name", name: "Name" },
|
||||||
|
value: "John Doe",
|
||||||
|
createdAt: new Date("2023-01-01"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Mock the transaction
|
||||||
|
const mockTransaction = {
|
||||||
|
contact: {
|
||||||
|
createMany: vi.fn().mockResolvedValue({ count: 0 }),
|
||||||
|
},
|
||||||
|
$executeRaw: vi.fn().mockResolvedValue({ count: 0 }),
|
||||||
|
$queryRaw: vi.fn().mockResolvedValue([{ id: "attr-key-name", key: "name", name: "Full Name" }]),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(prisma.$transaction).mockImplementationOnce((callback) => {
|
||||||
|
return callback(mockTransaction as any);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Execute the function
|
||||||
|
const result = await upsertBulkContacts(mockContacts, mockEnvironmentId, mockParsedEmails);
|
||||||
|
|
||||||
|
if (!result.ok) throw new Error("Expected result.ok to be true");
|
||||||
|
expect(result.data).toEqual({ contactIdxWithConflictingUserIds: [] });
|
||||||
|
|
||||||
|
// Verify that the raw SQL query was executed for updating attribute keys
|
||||||
|
vi.mocked(prisma.$queryRaw).mockResolvedValue([{ id: "attr-key-name", key: "name", name: "Full Name" }]);
|
||||||
|
|
||||||
|
// Verify that caches were revalidated
|
||||||
|
expect(contactAttributeKeyCache.revalidate).toHaveBeenCalledWith({
|
||||||
|
environmentId: mockEnvironmentId,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
import { responses } from "@/modules/api/v2/lib/response";
|
||||||
|
import { handleApiError } from "@/modules/api/v2/lib/utils";
|
||||||
|
import { authenticatedApiClient } from "@/modules/api/v2/management/auth/authenticated-api-client";
|
||||||
|
import { upsertBulkContacts } from "@/modules/ee/contacts/api/v2/management/contacts/bulk/lib/contact";
|
||||||
|
import { ZContactBulkUploadRequest } from "@/modules/ee/contacts/types/contact";
|
||||||
|
import { getIsContactsEnabled } from "@/modules/ee/license-check/lib/utils";
|
||||||
|
|
||||||
|
export const PUT = async (request: Request) =>
|
||||||
|
authenticatedApiClient({
|
||||||
|
request,
|
||||||
|
schemas: {
|
||||||
|
body: ZContactBulkUploadRequest,
|
||||||
|
},
|
||||||
|
handler: async ({ authentication, parsedInput }) => {
|
||||||
|
const isContactsEnabled = await getIsContactsEnabled();
|
||||||
|
if (!isContactsEnabled) {
|
||||||
|
return handleApiError(request, {
|
||||||
|
type: "forbidden",
|
||||||
|
details: [{ field: "error", issue: "Contacts are not enabled for this environment." }],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const { contacts } = parsedInput.body ?? { contacts: [] };
|
||||||
|
const { environmentId } = authentication;
|
||||||
|
|
||||||
|
const emails = contacts.map(
|
||||||
|
(contact) => contact.attributes.find((attr) => attr.attributeKey.key === "email")?.value!
|
||||||
|
);
|
||||||
|
|
||||||
|
const upsertBulkContactsResult = await upsertBulkContacts(contacts, environmentId, emails);
|
||||||
|
|
||||||
|
if (!upsertBulkContactsResult.ok) {
|
||||||
|
return handleApiError(request, upsertBulkContactsResult.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { contactIdxWithConflictingUserIds } = upsertBulkContactsResult.data;
|
||||||
|
|
||||||
|
if (contactIdxWithConflictingUserIds.length) {
|
||||||
|
return responses.multiStatusResponse({
|
||||||
|
data: {
|
||||||
|
status: "success",
|
||||||
|
message:
|
||||||
|
"Contacts bulk upload partially successful. Some contacts were skipped due to conflicting userIds.",
|
||||||
|
meta: {
|
||||||
|
skippedContacts: contactIdxWithConflictingUserIds.map((idx) => ({
|
||||||
|
index: idx,
|
||||||
|
userId: contacts[idx].attributes.find((attr) => attr.attributeKey.key === "userId")?.value,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return responses.successResponse({
|
||||||
|
data: {
|
||||||
|
status: "success",
|
||||||
|
message: "Contacts bulk upload successful",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
},
|
||||||
|
});
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
import { ENCRYPTION_KEY, WEBAPP_URL } from "@formbricks/lib/constants";
|
import { ENCRYPTION_KEY, SURVEY_URL } from "@formbricks/lib/constants";
|
||||||
import * as crypto from "@formbricks/lib/crypto";
|
import * as crypto from "@formbricks/lib/crypto";
|
||||||
import * as contactSurveyLink from "./contact-survey-link";
|
import * as contactSurveyLink from "./contact-survey-link";
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ vi.mock("jsonwebtoken", () => ({
|
|||||||
// Mock constants - MUST be a literal object without using variables
|
// Mock constants - MUST be a literal object without using variables
|
||||||
vi.mock("@formbricks/lib/constants", () => ({
|
vi.mock("@formbricks/lib/constants", () => ({
|
||||||
ENCRYPTION_KEY: "test-encryption-key-32-chars-long!",
|
ENCRYPTION_KEY: "test-encryption-key-32-chars-long!",
|
||||||
WEBAPP_URL: "https://test.formbricks.com",
|
SURVEY_URL: "https://test.formbricks.com",
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock("@formbricks/lib/crypto", () => ({
|
vi.mock("@formbricks/lib/crypto", () => ({
|
||||||
@@ -73,7 +73,7 @@ describe("Contact Survey Link", () => {
|
|||||||
// Verify the returned URL
|
// Verify the returned URL
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
ok: true,
|
ok: true,
|
||||||
data: `${WEBAPP_URL}/c/${mockToken}`,
|
data: `${SURVEY_URL}/c/${mockToken}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -98,7 +98,7 @@ describe("Contact Survey Link", () => {
|
|||||||
// Re‑mock constants to simulate missing ENCRYPTION_KEY
|
// Re‑mock constants to simulate missing ENCRYPTION_KEY
|
||||||
vi.doMock("@formbricks/lib/constants", () => ({
|
vi.doMock("@formbricks/lib/constants", () => ({
|
||||||
ENCRYPTION_KEY: undefined,
|
ENCRYPTION_KEY: undefined,
|
||||||
WEBAPP_URL: "https://test.formbricks.com",
|
SURVEY_URL: "https://test.formbricks.com",
|
||||||
}));
|
}));
|
||||||
// Re‑import the modules so they pick up the new mock
|
// Re‑import the modules so they pick up the new mock
|
||||||
const { getContactSurveyLink } = await import("./contact-survey-link");
|
const { getContactSurveyLink } = await import("./contact-survey-link");
|
||||||
@@ -172,7 +172,7 @@ describe("Contact Survey Link", () => {
|
|||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
vi.doMock("@formbricks/lib/constants", () => ({
|
vi.doMock("@formbricks/lib/constants", () => ({
|
||||||
ENCRYPTION_KEY: undefined,
|
ENCRYPTION_KEY: undefined,
|
||||||
WEBAPP_URL: "https://test.formbricks.com",
|
SURVEY_URL: "https://test.formbricks.com",
|
||||||
}));
|
}));
|
||||||
const { verifyContactSurveyToken } = await import("./contact-survey-link");
|
const { verifyContactSurveyToken } = await import("./contact-survey-link");
|
||||||
const result = verifyContactSurveyToken(mockToken);
|
const result = verifyContactSurveyToken(mockToken);
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
import { ApiErrorResponseV2 } from "@/modules/api/v2/types/api-error";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import { ENCRYPTION_KEY, WEBAPP_URL } from "@formbricks/lib/constants";
|
import { ENCRYPTION_KEY } from "@formbricks/lib/constants";
|
||||||
import { symmetricDecrypt, symmetricEncrypt } from "@formbricks/lib/crypto";
|
import { symmetricDecrypt, symmetricEncrypt } from "@formbricks/lib/crypto";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { Result, err, ok } from "@formbricks/types/error-handlers";
|
import { Result, err, ok } from "@formbricks/types/error-handlers";
|
||||||
|
|
||||||
// Creates an encrypted personalized survey link for a contact
|
// Creates an encrypted personalized survey link for a contact
|
||||||
@@ -41,7 +42,7 @@ export const getContactSurveyLink = (
|
|||||||
const token = jwt.sign(payload, ENCRYPTION_KEY, tokenOptions);
|
const token = jwt.sign(payload, ENCRYPTION_KEY, tokenOptions);
|
||||||
|
|
||||||
// Return the personalized URL
|
// Return the personalized URL
|
||||||
return ok(`${WEBAPP_URL}/c/${token}`);
|
return ok(`${getSurveyDomain()}/c/${token}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Validates and decrypts a contact survey JWT token
|
// Validates and decrypts a contact survey JWT token
|
||||||
|
|||||||
@@ -107,3 +107,138 @@ export const ZContactCSVAttributeMap = z.record(z.string(), z.string()).superRef
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
export type TContactCSVAttributeMap = z.infer<typeof ZContactCSVAttributeMap>;
|
export type TContactCSVAttributeMap = z.infer<typeof ZContactCSVAttributeMap>;
|
||||||
|
|
||||||
|
export const ZContactBulkUploadAttribute = z.object({
|
||||||
|
attributeKey: z.object({
|
||||||
|
key: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
}),
|
||||||
|
value: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ZContactBulkUploadContact = z.object({
|
||||||
|
attributes: z.array(ZContactBulkUploadAttribute),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TContactBulkUploadContact = z.infer<typeof ZContactBulkUploadContact>;
|
||||||
|
|
||||||
|
export const ZContactBulkUploadRequest = z.object({
|
||||||
|
contacts: z
|
||||||
|
.array(ZContactBulkUploadContact)
|
||||||
|
.max(1000, { message: "Maximum 1000 contacts allowed at a time." })
|
||||||
|
.superRefine((contacts, ctx) => {
|
||||||
|
// Track all data in a single pass
|
||||||
|
const seenEmails = new Set<string>();
|
||||||
|
const duplicateEmails = new Set<string>();
|
||||||
|
const seenUserIds = new Set<string>();
|
||||||
|
const duplicateUserIds = new Set<string>();
|
||||||
|
const contactsWithDuplicateKeys: { idx: number; duplicateKeys: string[] }[] = [];
|
||||||
|
|
||||||
|
// Process each contact in a single pass
|
||||||
|
contacts.forEach((contact, idx) => {
|
||||||
|
// 1. Check email existence and validity
|
||||||
|
const emailAttr = contact.attributes.find((attr) => attr.attributeKey.key === "email");
|
||||||
|
if (!emailAttr?.value) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: `Missing email attribute for contact at index ${idx}`,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Check email format
|
||||||
|
const parsedEmail = z.string().email().safeParse(emailAttr.value);
|
||||||
|
if (!parsedEmail.success) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: `Invalid email for contact at index ${idx}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicate emails
|
||||||
|
if (seenEmails.has(emailAttr.value)) {
|
||||||
|
duplicateEmails.add(emailAttr.value);
|
||||||
|
} else {
|
||||||
|
seenEmails.add(emailAttr.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check for userId duplicates
|
||||||
|
const userIdAttr = contact.attributes.find((attr) => attr.attributeKey.key === "userId");
|
||||||
|
if (userIdAttr?.value) {
|
||||||
|
if (seenUserIds.has(userIdAttr.value)) {
|
||||||
|
duplicateUserIds.add(userIdAttr.value);
|
||||||
|
} else {
|
||||||
|
seenUserIds.add(userIdAttr.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Check for duplicate attribute keys within the same contact
|
||||||
|
const keyOccurrences = new Map<string, number>();
|
||||||
|
const duplicateKeysForContact: string[] = [];
|
||||||
|
|
||||||
|
contact.attributes.forEach((attr) => {
|
||||||
|
const key = attr.attributeKey.key;
|
||||||
|
const count = (keyOccurrences.get(key) || 0) + 1;
|
||||||
|
keyOccurrences.set(key, count);
|
||||||
|
|
||||||
|
// If this is the second occurrence, add to duplicates
|
||||||
|
if (count === 2) {
|
||||||
|
duplicateKeysForContact.push(key);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (duplicateKeysForContact.length > 0) {
|
||||||
|
contactsWithDuplicateKeys.push({ idx, duplicateKeys: duplicateKeysForContact });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Report all validation issues after the single pass
|
||||||
|
if (duplicateEmails.size > 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "Duplicate emails found in the records, please ensure each email is unique.",
|
||||||
|
params: {
|
||||||
|
duplicateEmails: Array.from(duplicateEmails),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (duplicateUserIds.size > 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "Duplicate userIds found in the records, please ensure each userId is unique.",
|
||||||
|
params: {
|
||||||
|
duplicateUserIds: Array.from(duplicateUserIds),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contactsWithDuplicateKeys.length > 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message:
|
||||||
|
"Duplicate attribute keys found in the records, please ensure each attribute key is unique.",
|
||||||
|
params: {
|
||||||
|
contactsWithDuplicateKeys,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TContactBulkUploadRequest = z.infer<typeof ZContactBulkUploadRequest>;
|
||||||
|
|
||||||
|
export type TContactBulkUploadResponseBase = {
|
||||||
|
status: "success" | "error";
|
||||||
|
message: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TContactBulkUploadResponseError = TContactBulkUploadResponseBase & {
|
||||||
|
status: "error";
|
||||||
|
message: string;
|
||||||
|
errors: Record<string, string>[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TContactBulkUploadResponseSuccess = TContactBulkUploadResponseBase & {
|
||||||
|
processed: number;
|
||||||
|
failed: number;
|
||||||
|
};
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import {
|
|||||||
SMTP_USER,
|
SMTP_USER,
|
||||||
WEBAPP_URL,
|
WEBAPP_URL,
|
||||||
} from "@formbricks/lib/constants";
|
} from "@formbricks/lib/constants";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { createInviteToken, createToken, createTokenForLinkSurvey } from "@formbricks/lib/jwt";
|
import { createInviteToken, createToken, createTokenForLinkSurvey } from "@formbricks/lib/jwt";
|
||||||
import { getOrganizationByEnvironmentId } from "@formbricks/lib/organization/service";
|
import { getOrganizationByEnvironmentId } from "@formbricks/lib/organization/service";
|
||||||
import { logger } from "@formbricks/logger";
|
import { logger } from "@formbricks/logger";
|
||||||
@@ -270,9 +271,9 @@ export const sendLinkSurveyToVerifiedEmail = async (data: TLinkSurveyEmailData):
|
|||||||
const t = await getTranslate();
|
const t = await getTranslate();
|
||||||
const getSurveyLink = (): string => {
|
const getSurveyLink = (): string => {
|
||||||
if (singleUseId) {
|
if (singleUseId) {
|
||||||
return `${WEBAPP_URL}/s/${surveyId}?verify=${encodeURIComponent(token)}&suId=${singleUseId}`;
|
return `${getSurveyDomain()}/s/${surveyId}?verify=${encodeURIComponent(token)}&suId=${singleUseId}`;
|
||||||
}
|
}
|
||||||
return `${WEBAPP_URL}/s/${surveyId}?verify=${encodeURIComponent(token)}`;
|
return `${getSurveyDomain()}/s/${surveyId}?verify=${encodeURIComponent(token)}`;
|
||||||
};
|
};
|
||||||
const surveyLink = getSurveyLink();
|
const surveyLink = getSurveyLink();
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ interface LinkSurveyWrapperProps {
|
|||||||
IMPRINT_URL?: string;
|
IMPRINT_URL?: string;
|
||||||
PRIVACY_URL?: string;
|
PRIVACY_URL?: string;
|
||||||
IS_FORMBRICKS_CLOUD: boolean;
|
IS_FORMBRICKS_CLOUD: boolean;
|
||||||
webAppUrl: string;
|
surveyDomain: string;
|
||||||
isBrandingEnabled: boolean;
|
isBrandingEnabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ export const LinkSurveyWrapper = ({
|
|||||||
IMPRINT_URL,
|
IMPRINT_URL,
|
||||||
PRIVACY_URL,
|
PRIVACY_URL,
|
||||||
IS_FORMBRICKS_CLOUD,
|
IS_FORMBRICKS_CLOUD,
|
||||||
webAppUrl,
|
surveyDomain,
|
||||||
isBrandingEnabled,
|
isBrandingEnabled,
|
||||||
}: LinkSurveyWrapperProps) => {
|
}: LinkSurveyWrapperProps) => {
|
||||||
//for embedded survey strip away all surrounding css
|
//for embedded survey strip away all surrounding css
|
||||||
@@ -96,7 +96,7 @@ export const LinkSurveyWrapper = ({
|
|||||||
IMPRINT_URL={IMPRINT_URL}
|
IMPRINT_URL={IMPRINT_URL}
|
||||||
PRIVACY_URL={PRIVACY_URL}
|
PRIVACY_URL={PRIVACY_URL}
|
||||||
IS_FORMBRICKS_CLOUD={IS_FORMBRICKS_CLOUD}
|
IS_FORMBRICKS_CLOUD={IS_FORMBRICKS_CLOUD}
|
||||||
surveyUrl={webAppUrl + "/s/" + surveyId}
|
surveyUrl={surveyDomain + "/s/" + surveyId}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ interface LinkSurveyProps {
|
|||||||
emailVerificationStatus?: string;
|
emailVerificationStatus?: string;
|
||||||
singleUseId?: string;
|
singleUseId?: string;
|
||||||
singleUseResponse?: Pick<Response, "id" | "finished">;
|
singleUseResponse?: Pick<Response, "id" | "finished">;
|
||||||
|
surveyDomain: string;
|
||||||
webAppUrl: string;
|
webAppUrl: string;
|
||||||
responseCount?: number;
|
responseCount?: number;
|
||||||
verifiedEmail?: string;
|
verifiedEmail?: string;
|
||||||
@@ -39,6 +40,7 @@ export const LinkSurvey = ({
|
|||||||
emailVerificationStatus,
|
emailVerificationStatus,
|
||||||
singleUseId,
|
singleUseId,
|
||||||
singleUseResponse,
|
singleUseResponse,
|
||||||
|
surveyDomain,
|
||||||
webAppUrl,
|
webAppUrl,
|
||||||
responseCount,
|
responseCount,
|
||||||
verifiedEmail,
|
verifiedEmail,
|
||||||
@@ -166,7 +168,7 @@ export const LinkSurvey = ({
|
|||||||
handleResetSurvey={handleResetSurvey}
|
handleResetSurvey={handleResetSurvey}
|
||||||
determineStyling={determineStyling}
|
determineStyling={determineStyling}
|
||||||
isEmbed={isEmbed}
|
isEmbed={isEmbed}
|
||||||
webAppUrl={webAppUrl}
|
surveyDomain={surveyDomain}
|
||||||
IS_FORMBRICKS_CLOUD={IS_FORMBRICKS_CLOUD}
|
IS_FORMBRICKS_CLOUD={IS_FORMBRICKS_CLOUD}
|
||||||
IMPRINT_URL={IMPRINT_URL}
|
IMPRINT_URL={IMPRINT_URL}
|
||||||
PRIVACY_URL={PRIVACY_URL}
|
PRIVACY_URL={PRIVACY_URL}
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ interface PinScreenProps {
|
|||||||
emailVerificationStatus?: string;
|
emailVerificationStatus?: string;
|
||||||
singleUseId?: string;
|
singleUseId?: string;
|
||||||
singleUseResponse?: Pick<Response, "id" | "finished">;
|
singleUseResponse?: Pick<Response, "id" | "finished">;
|
||||||
|
surveyDomain: string;
|
||||||
webAppUrl: string;
|
webAppUrl: string;
|
||||||
IMPRINT_URL?: string;
|
IMPRINT_URL?: string;
|
||||||
PRIVACY_URL?: string;
|
PRIVACY_URL?: string;
|
||||||
@@ -32,6 +33,7 @@ export const PinScreen = (props: PinScreenProps) => {
|
|||||||
const {
|
const {
|
||||||
surveyId,
|
surveyId,
|
||||||
project,
|
project,
|
||||||
|
surveyDomain,
|
||||||
webAppUrl,
|
webAppUrl,
|
||||||
emailVerificationStatus,
|
emailVerificationStatus,
|
||||||
singleUseId,
|
singleUseId,
|
||||||
@@ -118,6 +120,7 @@ export const PinScreen = (props: PinScreenProps) => {
|
|||||||
emailVerificationStatus={emailVerificationStatus}
|
emailVerificationStatus={emailVerificationStatus}
|
||||||
singleUseId={singleUseId}
|
singleUseId={singleUseId}
|
||||||
singleUseResponse={singleUseResponse}
|
singleUseResponse={singleUseResponse}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
webAppUrl={webAppUrl}
|
webAppUrl={webAppUrl}
|
||||||
verifiedEmail={verifiedEmail}
|
verifiedEmail={verifiedEmail}
|
||||||
languageCode={languageCode}
|
languageCode={languageCode}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
|
|||||||
import { type Response } from "@prisma/client";
|
import { type Response } from "@prisma/client";
|
||||||
import { notFound } from "next/navigation";
|
import { notFound } from "next/navigation";
|
||||||
import { IMPRINT_URL, IS_FORMBRICKS_CLOUD, PRIVACY_URL, WEBAPP_URL } from "@formbricks/lib/constants";
|
import { IMPRINT_URL, IS_FORMBRICKS_CLOUD, PRIVACY_URL, WEBAPP_URL } from "@formbricks/lib/constants";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { findMatchingLocale } from "@formbricks/lib/utils/locale";
|
import { findMatchingLocale } from "@formbricks/lib/utils/locale";
|
||||||
import { TSurvey } from "@formbricks/types/surveys/types";
|
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||||
|
|
||||||
@@ -98,11 +99,13 @@ export const renderSurvey = async ({
|
|||||||
const languageCode = getLanguageCode();
|
const languageCode = getLanguageCode();
|
||||||
const isSurveyPinProtected = Boolean(survey.pin);
|
const isSurveyPinProtected = Boolean(survey.pin);
|
||||||
const responseCount = await getResponseCountBySurveyId(survey.id);
|
const responseCount = await getResponseCountBySurveyId(survey.id);
|
||||||
|
const surveyDomain = getSurveyDomain();
|
||||||
|
|
||||||
if (isSurveyPinProtected) {
|
if (isSurveyPinProtected) {
|
||||||
return (
|
return (
|
||||||
<PinScreen
|
<PinScreen
|
||||||
surveyId={survey.id}
|
surveyId={survey.id}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
project={project}
|
project={project}
|
||||||
emailVerificationStatus={emailVerificationStatus}
|
emailVerificationStatus={emailVerificationStatus}
|
||||||
singleUseId={singleUseId}
|
singleUseId={singleUseId}
|
||||||
@@ -125,6 +128,7 @@ export const renderSurvey = async ({
|
|||||||
<LinkSurvey
|
<LinkSurvey
|
||||||
survey={survey}
|
survey={survey}
|
||||||
project={project}
|
project={project}
|
||||||
|
surveyDomain={surveyDomain}
|
||||||
emailVerificationStatus={emailVerificationStatus}
|
emailVerificationStatus={emailVerificationStatus}
|
||||||
singleUseId={singleUseId}
|
singleUseId={singleUseId}
|
||||||
singleUseResponse={singleUseResponse}
|
singleUseResponse={singleUseResponse}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { getSurvey } from "@/modules/survey/lib/survey";
|
import { getSurvey } from "@/modules/survey/lib/survey";
|
||||||
import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
|
import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
|
||||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
import { IS_FORMBRICKS_CLOUD, WEBAPP_URL } from "@formbricks/lib/constants";
|
import { IS_FORMBRICKS_CLOUD, SURVEY_URL, WEBAPP_URL } from "@formbricks/lib/constants";
|
||||||
import { COLOR_DEFAULTS } from "@formbricks/lib/styling/constants";
|
import { COLOR_DEFAULTS } from "@formbricks/lib/styling/constants";
|
||||||
import { TSurvey, TSurveyWelcomeCard } from "@formbricks/types/surveys/types";
|
import { TSurvey, TSurveyWelcomeCard } from "@formbricks/types/surveys/types";
|
||||||
import {
|
import {
|
||||||
@@ -24,6 +24,7 @@ vi.mock("@/modules/survey/link/lib/project", () => ({
|
|||||||
vi.mock("@formbricks/lib/constants", () => ({
|
vi.mock("@formbricks/lib/constants", () => ({
|
||||||
IS_FORMBRICKS_CLOUD: vi.fn(() => false),
|
IS_FORMBRICKS_CLOUD: vi.fn(() => false),
|
||||||
WEBAPP_URL: "https://test.formbricks.com",
|
WEBAPP_URL: "https://test.formbricks.com",
|
||||||
|
SURVEY_URL: "https://surveys.test.formbricks.com",
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock("@formbricks/lib/styling/constants", () => ({
|
vi.mock("@formbricks/lib/styling/constants", () => ({
|
||||||
@@ -170,7 +171,7 @@ describe("Metadata Utils", () => {
|
|||||||
const result = getSurveyOpenGraphMetadata(surveyId, surveyName);
|
const result = getSurveyOpenGraphMetadata(surveyId, surveyName);
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
metadataBase: new URL(WEBAPP_URL),
|
metadataBase: new URL(SURVEY_URL),
|
||||||
openGraph: {
|
openGraph: {
|
||||||
title: surveyName,
|
title: surveyName,
|
||||||
description: "Thanks a lot for your time 🙏",
|
description: "Thanks a lot for your time 🙏",
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { getSurvey } from "@/modules/survey/lib/survey";
|
import { getSurvey } from "@/modules/survey/lib/survey";
|
||||||
import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
|
import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
|
||||||
import { Metadata } from "next";
|
import { Metadata } from "next";
|
||||||
import { IS_FORMBRICKS_CLOUD, WEBAPP_URL } from "@formbricks/lib/constants";
|
import { IS_FORMBRICKS_CLOUD } from "@formbricks/lib/constants";
|
||||||
|
import { getSurveyDomain } from "@formbricks/lib/getSurveyUrl";
|
||||||
import { COLOR_DEFAULTS } from "@formbricks/lib/styling/constants";
|
import { COLOR_DEFAULTS } from "@formbricks/lib/styling/constants";
|
||||||
import { TSurveyWelcomeCard } from "@formbricks/types/surveys/types";
|
import { TSurveyWelcomeCard } from "@formbricks/types/surveys/types";
|
||||||
|
|
||||||
@@ -72,7 +73,7 @@ export const getSurveyOpenGraphMetadata = (surveyId: string, surveyName: string)
|
|||||||
const ogImgURL = `/api/v1/og?brandColor=${brandColor}&name=${encodedName}`;
|
const ogImgURL = `/api/v1/og?brandColor=${brandColor}&name=${encodedName}`;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
metadataBase: new URL(WEBAPP_URL),
|
metadataBase: new URL(getSurveyDomain()),
|
||||||
openGraph: {
|
openGraph: {
|
||||||
title: surveyName,
|
title: surveyName,
|
||||||
description: "Thanks a lot for your time 🙏",
|
description: "Thanks a lot for your time 🙏",
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export const getSurveyMetadata = reactCache(async (surveyId: string) =>
|
|||||||
return survey;
|
return survey;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof Prisma.PrismaClientKnownRequestError) {
|
if (error instanceof Prisma.PrismaClientKnownRequestError) {
|
||||||
logger.error(error, "Error getting survey metadata");
|
logger.error(error);
|
||||||
throw new DatabaseError(error.message);
|
throw new DatabaseError(error.message);
|
||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ interface SurveyCardProps {
|
|||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
environmentId: string;
|
environmentId: string;
|
||||||
isReadOnly: boolean;
|
isReadOnly: boolean;
|
||||||
WEBAPP_URL: string;
|
surveyDomain: string;
|
||||||
duplicateSurvey: (survey: TSurvey) => void;
|
duplicateSurvey: (survey: TSurvey) => void;
|
||||||
deleteSurvey: (surveyId: string) => void;
|
deleteSurvey: (surveyId: string) => void;
|
||||||
locale: TUserLocale;
|
locale: TUserLocale;
|
||||||
@@ -25,7 +25,7 @@ export const SurveyCard = ({
|
|||||||
survey,
|
survey,
|
||||||
environmentId,
|
environmentId,
|
||||||
isReadOnly,
|
isReadOnly,
|
||||||
WEBAPP_URL,
|
surveyDomain,
|
||||||
deleteSurvey,
|
deleteSurvey,
|
||||||
duplicateSurvey,
|
duplicateSurvey,
|
||||||
locale,
|
locale,
|
||||||
@@ -102,7 +102,7 @@ export const SurveyCard = ({
|
|||||||
survey={survey}
|
survey={survey}
|
||||||
key={`surveys-${survey.id}`}
|
key={`surveys-${survey.id}`}
|
||||||
environmentId={environmentId}
|
environmentId={environmentId}
|
||||||
webAppUrl={WEBAPP_URL}
|
surveyDomain={surveyDomain}
|
||||||
disabled={isDraftAndReadOnly}
|
disabled={isDraftAndReadOnly}
|
||||||
refreshSingleUseId={refreshSingleUseId}
|
refreshSingleUseId={refreshSingleUseId}
|
||||||
isSurveyCreationDeletionDisabled={isSurveyCreationDeletionDisabled}
|
isSurveyCreationDeletionDisabled={isSurveyCreationDeletionDisabled}
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ import { CopySurveyModal } from "./copy-survey-modal";
|
|||||||
interface SurveyDropDownMenuProps {
|
interface SurveyDropDownMenuProps {
|
||||||
environmentId: string;
|
environmentId: string;
|
||||||
survey: TSurvey;
|
survey: TSurvey;
|
||||||
webAppUrl: string;
|
surveyDomain: string;
|
||||||
refreshSingleUseId: () => Promise<string | undefined>;
|
refreshSingleUseId: () => Promise<string | undefined>;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
isSurveyCreationDeletionDisabled?: boolean;
|
isSurveyCreationDeletionDisabled?: boolean;
|
||||||
@@ -47,7 +47,7 @@ interface SurveyDropDownMenuProps {
|
|||||||
export const SurveyDropDownMenu = ({
|
export const SurveyDropDownMenu = ({
|
||||||
environmentId,
|
environmentId,
|
||||||
survey,
|
survey,
|
||||||
webAppUrl,
|
surveyDomain,
|
||||||
refreshSingleUseId,
|
refreshSingleUseId,
|
||||||
disabled,
|
disabled,
|
||||||
isSurveyCreationDeletionDisabled,
|
isSurveyCreationDeletionDisabled,
|
||||||
@@ -61,7 +61,7 @@ export const SurveyDropDownMenu = ({
|
|||||||
const [isCopyFormOpen, setIsCopyFormOpen] = useState(false);
|
const [isCopyFormOpen, setIsCopyFormOpen] = useState(false);
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
|
||||||
const surveyUrl = useMemo(() => webAppUrl + "/s/" + survey.id, [survey.id, webAppUrl]);
|
const surveyLink = useMemo(() => surveyDomain + "/s/" + survey.id, [survey.id, surveyDomain]);
|
||||||
|
|
||||||
const handleDeleteSurvey = async (surveyId: string) => {
|
const handleDeleteSurvey = async (surveyId: string) => {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
@@ -82,7 +82,7 @@ export const SurveyDropDownMenu = ({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setIsDropDownOpen(false);
|
setIsDropDownOpen(false);
|
||||||
const newId = await refreshSingleUseId();
|
const newId = await refreshSingleUseId();
|
||||||
const copiedLink = copySurveyLink(surveyUrl, newId);
|
const copiedLink = copySurveyLink(surveyLink, newId);
|
||||||
navigator.clipboard.writeText(copiedLink);
|
navigator.clipboard.writeText(copiedLink);
|
||||||
toast.success(t("common.copied_to_clipboard"));
|
toast.success(t("common.copied_to_clipboard"));
|
||||||
router.refresh();
|
router.refresh();
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import { SurveyLoading } from "./survey-loading";
|
|||||||
interface SurveysListProps {
|
interface SurveysListProps {
|
||||||
environmentId: string;
|
environmentId: string;
|
||||||
isReadOnly: boolean;
|
isReadOnly: boolean;
|
||||||
WEBAPP_URL: string;
|
surveyDomain: string;
|
||||||
userId: string;
|
userId: string;
|
||||||
surveysPerPage: number;
|
surveysPerPage: number;
|
||||||
currentProjectChannel: TProjectConfigChannel;
|
currentProjectChannel: TProjectConfigChannel;
|
||||||
@@ -37,7 +37,7 @@ export const initialFilters: TSurveyFilters = {
|
|||||||
export const SurveysList = ({
|
export const SurveysList = ({
|
||||||
environmentId,
|
environmentId,
|
||||||
isReadOnly,
|
isReadOnly,
|
||||||
WEBAPP_URL,
|
surveyDomain,
|
||||||
userId,
|
userId,
|
||||||
surveysPerPage: surveysLimit,
|
surveysPerPage: surveysLimit,
|
||||||
currentProjectChannel,
|
currentProjectChannel,
|
||||||
@@ -156,7 +156,7 @@ export const SurveysList = ({
|
|||||||
survey={survey}
|
survey={survey}
|
||||||
environmentId={environmentId}
|
environmentId={environmentId}
|
||||||
isReadOnly={isReadOnly}
|
isReadOnly={isReadOnly}
|
||||||
WEBAPP_URL={WEBAPP_URL}
|
surveyDomain={surveyDomain}
|
||||||
duplicateSurvey={handleDuplicateSurvey}
|
duplicateSurvey={handleDuplicateSurvey}
|
||||||
deleteSurvey={handleDeleteSurvey}
|
deleteSurvey={handleDeleteSurvey}
|
||||||
locale={locale}
|
locale={locale}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user