Compare commits

...

9 Commits

Author SHA1 Message Date
Matti Nannt
3eee5bb50f chore(backport): updated release workflow (#6627) 2025-09-30 17:39:44 +02:00
Victor Hugo dos Santos
d341370b0d refactor: enhance JWT handling and improve security tests (#6619) (#6626) 2025-09-30 09:40:29 -03:00
Anshuman Pandey
391b3a3fb0 fix: backports formbricks.sh fix (#6550) 2025-09-15 17:58:45 +02:00
Victor Santos
0bcd85d658 refactor: enhance removeCondition logic and add comprehensive tests
- Updated the removeCondition function to return a boolean indicating success or failure.
- Implemented cleanup logic to remove empty condition groups after a condition is removed.
- Added tests to ensure correct behavior of removeCondition, including scenarios with nested groups and empty groups.
- Enhanced ConditionsEditor to disable the 'Create Group' button when only one condition exists, and ensure it is enabled when multiple conditions are present.
2025-09-01 08:58:11 -03:00
Matthias Nannt
f59df49588 chore: backport ecr build and push action 2025-08-20 14:55:58 +02:00
Matti Nannt
f08fabfb13 fix(backport): github release action fix (#6425) 2025-08-15 13:26:09 +02:00
Dhruwang Jariwala
ee8af9dd74 chore: metadata tweaks backport (#6421) 2025-08-15 13:02:28 +02:00
Dhruwang Jariwala
1091b40bd1 fix(backport): cross button hover (#6416) 2025-08-14 14:30:05 +02:00
Anshuman Pandey
87a2d727ed fix: disables tabs when single use is enabled [Backport] (#6412) 2025-08-14 04:07:35 -07:00
43 changed files with 5460 additions and 756 deletions

View File

@@ -0,0 +1,312 @@
name: Build and Push Docker Image
description: |
Unified Docker build and push action for both ECR and GHCR registries.
Supports:
- ECR builds for Formbricks Cloud deployment
- GHCR builds for community self-hosting
- Automatic version resolution and tagging
- Conditional signing and deployment tags
inputs:
registry_type:
description: "Registry type: 'ecr' or 'ghcr'"
required: true
# Version input
version:
description: "Explicit version (SemVer only, e.g., 1.2.3). If provided, this version is used directly. If empty, version is auto-generated from branch name."
required: false
experimental_mode:
description: "Enable experimental timestamped versions"
required: false
default: "false"
# ECR specific inputs
ecr_registry:
description: "ECR registry URL (required for ECR builds)"
required: false
ecr_repository:
description: "ECR repository name (required for ECR builds)"
required: false
ecr_region:
description: "ECR AWS region (required for ECR builds)"
required: false
aws_role_arn:
description: "AWS role ARN for ECR authentication (required for ECR builds)"
required: false
# GHCR specific inputs
ghcr_image_name:
description: "GHCR image name (required for GHCR builds)"
required: false
# Deployment options
deploy_production:
description: "Tag image for production deployment"
required: false
default: "false"
deploy_staging:
description: "Tag image for staging deployment"
required: false
default: "false"
is_prerelease:
description: "Whether this is a prerelease (auto-tags for staging/production)"
required: false
default: "false"
# Build options
dockerfile:
description: "Path to Dockerfile"
required: false
default: "apps/web/Dockerfile"
context:
description: "Build context"
required: false
default: "."
outputs:
image_tag:
description: "Resolved image tag used for the build"
value: ${{ steps.version.outputs.version }}
registry_tags:
description: "Complete registry tags that were pushed"
value: ${{ steps.build.outputs.tags }}
image_digest:
description: "Image digest from the build"
value: ${{ steps.build.outputs.digest }}
runs:
using: "composite"
steps:
- name: Validate inputs
shell: bash
env:
REGISTRY_TYPE: ${{ inputs.registry_type }}
ECR_REGISTRY: ${{ inputs.ecr_registry }}
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
ECR_REGION: ${{ inputs.ecr_region }}
AWS_ROLE_ARN: ${{ inputs.aws_role_arn }}
GHCR_IMAGE_NAME: ${{ inputs.ghcr_image_name }}
run: |
set -euo pipefail
if [[ "$REGISTRY_TYPE" != "ecr" && "$REGISTRY_TYPE" != "ghcr" ]]; then
echo "ERROR: registry_type must be 'ecr' or 'ghcr', got: $REGISTRY_TYPE"
exit 1
fi
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
if [[ -z "$ECR_REGISTRY" || -z "$ECR_REPOSITORY" || -z "$ECR_REGION" || -z "$AWS_ROLE_ARN" ]]; then
echo "ERROR: ECR builds require ecr_registry, ecr_repository, ecr_region, and aws_role_arn"
exit 1
fi
fi
if [[ "$REGISTRY_TYPE" == "ghcr" ]]; then
if [[ -z "$GHCR_IMAGE_NAME" ]]; then
echo "ERROR: GHCR builds require ghcr_image_name"
exit 1
fi
fi
echo "SUCCESS: Input validation passed for $REGISTRY_TYPE build"
- name: Resolve Docker version
id: version
uses: ./.github/actions/resolve-docker-version
with:
version: ${{ inputs.version }}
current_branch: ${{ github.ref_name }}
experimental_mode: ${{ inputs.experimental_mode }}
- name: Update package.json version
uses: ./.github/actions/update-package-version
with:
version: ${{ steps.version.outputs.version }}
- name: Configure AWS credentials (ECR only)
if: ${{ inputs.registry_type == 'ecr' }}
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a # v4.2.0
with:
role-to-assume: ${{ inputs.aws_role_arn }}
aws-region: ${{ inputs.ecr_region }}
- name: Log in to Amazon ECR (ECR only)
if: ${{ inputs.registry_type == 'ecr' }}
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1
- name: Set up Docker build tools
uses: ./.github/actions/docker-build-setup
with:
registry: ${{ inputs.registry_type == 'ghcr' && 'ghcr.io' || '' }}
setup_cosign: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
skip_login_on_pr: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
- name: Build ECR tag list
if: ${{ inputs.registry_type == 'ecr' }}
id: ecr-tags
shell: bash
env:
IMAGE_TAG: ${{ steps.version.outputs.version }}
ECR_REGISTRY: ${{ inputs.ecr_registry }}
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
DEPLOY_PRODUCTION: ${{ inputs.deploy_production }}
DEPLOY_STAGING: ${{ inputs.deploy_staging }}
IS_PRERELEASE: ${{ inputs.is_prerelease }}
run: |
set -euo pipefail
# Start with the base image tag
TAGS="${ECR_REGISTRY}/${ECR_REPOSITORY}:${IMAGE_TAG}"
# Handle automatic tagging based on release type
if [[ "${IS_PRERELEASE}" == "true" ]]; then
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
echo "Adding staging tag for prerelease"
elif [[ "${IS_PRERELEASE}" == "false" ]]; then
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
echo "Adding production tag for stable release"
fi
# Handle manual deployment overrides
if [[ "${DEPLOY_PRODUCTION}" == "true" ]]; then
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
echo "Adding production tag (manual override)"
fi
if [[ "${DEPLOY_STAGING}" == "true" ]]; then
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
echo "Adding staging tag (manual override)"
fi
echo "ECR tags generated:"
echo -e "${TAGS}"
{
echo "tags<<EOF"
echo -e "${TAGS}"
echo "EOF"
} >> "${GITHUB_OUTPUT}"
- name: Generate additional GHCR tags for releases
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
id: ghcr-extra-tags
shell: bash
env:
VERSION: ${{ steps.version.outputs.version }}
IMAGE_NAME: ${{ inputs.ghcr_image_name }}
IS_PRERELEASE: ${{ inputs.is_prerelease }}
run: |
set -euo pipefail
# Start with base version tag
TAGS="ghcr.io/${IMAGE_NAME}:${VERSION}"
# For proper SemVer releases, add major.minor and major tags
if [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
# Extract major and minor versions
MAJOR=$(echo "${VERSION}" | cut -d. -f1)
MINOR=$(echo "${VERSION}" | cut -d. -f2)
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}.${MINOR}"
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}"
echo "Added SemVer tags: ${MAJOR}.${MINOR}, ${MAJOR}"
fi
# Add latest tag for stable releases
if [[ "${IS_PRERELEASE}" == "false" ]]; then
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:latest"
echo "Added latest tag for stable release"
fi
echo "Generated GHCR tags:"
echo -e "${TAGS}"
# Debug: Show what will be passed to Docker build
echo "DEBUG: Tags for Docker build step:"
echo -e "${TAGS}"
{
echo "tags<<EOF"
echo -e "${TAGS}"
echo "EOF"
} >> "${GITHUB_OUTPUT}"
- name: Build GHCR metadata (experimental)
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' }}
id: ghcr-meta-experimental
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ghcr.io/${{ inputs.ghcr_image_name }}
tags: |
type=ref,event=branch
type=raw,value=${{ steps.version.outputs.version }}
- name: Debug Docker build tags
shell: bash
run: |
echo "=== DEBUG: Docker Build Configuration ==="
echo "Registry Type: ${{ inputs.registry_type }}"
echo "Experimental Mode: ${{ inputs.experimental_mode }}"
echo "Event Name: ${{ github.event_name }}"
echo "Is Prerelease: ${{ inputs.is_prerelease }}"
echo "Version: ${{ steps.version.outputs.version }}"
if [[ "${{ inputs.registry_type }}" == "ecr" ]]; then
echo "ECR Tags: ${{ steps.ecr-tags.outputs.tags }}"
elif [[ "${{ inputs.experimental_mode }}" == "true" ]]; then
echo "GHCR Experimental Tags: ${{ steps.ghcr-meta-experimental.outputs.tags }}"
else
echo "GHCR Extra Tags: ${{ steps.ghcr-extra-tags.outputs.tags }}"
fi
- name: Build and push Docker image
id: build
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
with:
project: tw0fqmsx3c
token: ${{ env.DEPOT_PROJECT_TOKEN }}
context: ${{ inputs.context }}
file: ${{ inputs.dockerfile }}
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ inputs.registry_type == 'ecr' && steps.ecr-tags.outputs.tags || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags) || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && steps.ghcr-extra-tags.outputs.tags) || (inputs.registry_type == 'ghcr' && format('ghcr.io/{0}:{1}', inputs.ghcr_image_name, steps.version.outputs.version)) || (inputs.registry_type == 'ecr' && format('{0}/{1}:{2}', inputs.ecr_registry, inputs.ecr_repository, steps.version.outputs.version)) }}
labels: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.labels || '' }}
secrets: |
database_url=${{ env.DUMMY_DATABASE_URL }}
encryption_key=${{ env.DUMMY_ENCRYPTION_KEY }}
redis_url=${{ env.DUMMY_REDIS_URL }}
sentry_auth_token=${{ env.SENTRY_AUTH_TOKEN }}
env:
DEPOT_PROJECT_TOKEN: ${{ env.DEPOT_PROJECT_TOKEN }}
DUMMY_DATABASE_URL: ${{ env.DUMMY_DATABASE_URL }}
DUMMY_ENCRYPTION_KEY: ${{ env.DUMMY_ENCRYPTION_KEY }}
DUMMY_REDIS_URL: ${{ env.DUMMY_REDIS_URL }}
SENTRY_AUTH_TOKEN: ${{ env.SENTRY_AUTH_TOKEN }}
- name: Sign GHCR image (GHCR only)
if: ${{ inputs.registry_type == 'ghcr' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
shell: bash
env:
TAGS: ${{ inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags || steps.ghcr-extra-tags.outputs.tags }}
DIGEST: ${{ steps.build.outputs.digest }}
run: |
set -euo pipefail
echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
- name: Output build summary
shell: bash
env:
REGISTRY_TYPE: ${{ inputs.registry_type }}
IMAGE_TAG: ${{ steps.version.outputs.version }}
VERSION_SOURCE: ${{ steps.version.outputs.source }}
run: |
echo "SUCCESS: Built and pushed Docker image to $REGISTRY_TYPE"
echo "Image Tag: $IMAGE_TAG (source: $VERSION_SOURCE)"
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
echo "ECR Registry: ${{ inputs.ecr_registry }}"
echo "ECR Repository: ${{ inputs.ecr_repository }}"
else
echo "GHCR Image: ghcr.io/${{ inputs.ghcr_image_name }}"
fi

View File

@@ -0,0 +1,106 @@
name: Docker Build Setup
description: |
Sets up common Docker build tools and authentication with security validation.
Security Features:
- Registry URL validation
- Input sanitization
- Conditional setup based on event type
- Post-setup verification
Supports Depot CLI, Cosign signing, and Docker registry authentication.
inputs:
registry:
description: "Docker registry hostname to login to (e.g., ghcr.io, registry.example.com:5000). No paths allowed."
required: false
default: "ghcr.io"
setup_cosign:
description: "Whether to install cosign for image signing"
required: false
default: "true"
skip_login_on_pr:
description: "Whether to skip registry login on pull requests"
required: false
default: "true"
runs:
using: "composite"
steps:
- name: Validate inputs
shell: bash
env:
REGISTRY: ${{ inputs.registry }}
SETUP_COSIGN: ${{ inputs.setup_cosign }}
SKIP_LOGIN_ON_PR: ${{ inputs.skip_login_on_pr }}
run: |
set -euo pipefail
# Security: Validate registry input - must be hostname[:port] only, no paths
# Allow empty registry for cases where login is handled externally (e.g., ECR)
if [[ -n "$REGISTRY" ]]; then
if [[ "$REGISTRY" =~ / ]]; then
echo "ERROR: Invalid registry format: $REGISTRY"
echo "Registry must be host[:port] with no path (e.g., 'ghcr.io' or 'registry.example.com:5000')"
echo "Path components like 'ghcr.io/org' are not allowed as they break docker login"
exit 1
fi
# Validate hostname with optional port format
if [[ ! "$REGISTRY" =~ ^[a-zA-Z0-9.-]+(\:[0-9]+)?$ ]]; then
echo "ERROR: Invalid registry hostname format: $REGISTRY"
echo "Registry must be a valid hostname optionally with port (e.g., 'ghcr.io' or 'registry.example.com:5000')"
exit 1
fi
fi
# Validate boolean inputs
if [[ "$SETUP_COSIGN" != "true" && "$SETUP_COSIGN" != "false" ]]; then
echo "ERROR: setup_cosign must be 'true' or 'false', got: $SETUP_COSIGN"
exit 1
fi
if [[ "$SKIP_LOGIN_ON_PR" != "true" && "$SKIP_LOGIN_ON_PR" != "false" ]]; then
echo "ERROR: skip_login_on_pr must be 'true' or 'false', got: $SKIP_LOGIN_ON_PR"
exit 1
fi
echo "SUCCESS: Input validation passed"
- name: Set up Depot CLI
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
- name: Install cosign
# Install cosign when requested AND when we might actually sign images
# (i.e., non-PR contexts or when we login on PRs)
if: ${{ inputs.setup_cosign == 'true' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
- name: Log into registry
if: ${{ inputs.registry != '' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ${{ inputs.registry }}
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Verify setup completion
shell: bash
run: |
set -euo pipefail
# Verify Depot CLI is available
if ! command -v depot >/dev/null 2>&1; then
echo "ERROR: Depot CLI not found in PATH"
exit 1
fi
# Verify cosign if it should be installed (same conditions as install step)
if [[ "${{ inputs.setup_cosign }}" == "true" ]] && [[ "${{ inputs.skip_login_on_pr }}" == "false" || "${{ github.event_name }}" != "pull_request" ]]; then
if ! command -v cosign >/dev/null 2>&1; then
echo "ERROR: Cosign not found in PATH despite being requested"
exit 1
fi
fi
echo "SUCCESS: Docker build setup completed successfully"

View File

@@ -0,0 +1,192 @@
name: Resolve Docker Version
description: |
Resolves and validates Docker-compatible SemVer versions for container builds with comprehensive security.
Security Features:
- Command injection protection
- Input sanitization and validation
- Docker tag character restrictions
- Length limits and boundary checks
- Safe branch name handling
Supports multiple modes: release, manual override, branch auto-detection, and experimental timestamped versions.
inputs:
version:
description: "Explicit version (SemVer only, e.g., 1.2.3-beta). If provided, this version is used directly. If empty, version is auto-generated from branch name."
required: false
current_branch:
description: "Current branch name for auto-detection"
required: true
experimental_mode:
description: "Enable experimental mode with timestamp-based versions"
required: false
default: "false"
outputs:
version:
description: "Resolved Docker-compatible SemVer version"
value: ${{ steps.resolve.outputs.version }}
source:
description: "Source of version (release|override|branch)"
value: ${{ steps.resolve.outputs.source }}
normalized:
description: "Whether the version was normalized (true/false)"
value: ${{ steps.resolve.outputs.normalized }}
runs:
using: "composite"
steps:
- name: Resolve and validate Docker version
id: resolve
shell: bash
env:
EXPLICIT_VERSION: ${{ inputs.version }}
CURRENT_BRANCH: ${{ inputs.current_branch }}
EXPERIMENTAL_MODE: ${{ inputs.experimental_mode }}
run: |
set -euo pipefail
# Function to validate SemVer format (Docker-compatible, no '+' build metadata)
validate_semver() {
local version="$1"
local context="$2"
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
echo "ERROR: Invalid $context format. Must be semver without build metadata (e.g., 1.2.3, 1.2.3-alpha)"
echo "Provided: $version"
echo "Note: Docker tags cannot contain '+' characters. Use prerelease identifiers instead."
exit 1
fi
}
# Function to generate branch-based version
generate_branch_version() {
local branch="$1"
local use_timestamp="${2:-true}"
local timestamp
if [[ "$use_timestamp" == "true" ]]; then
timestamp=$(date +%s)
else
timestamp=""
fi
# Sanitize branch name for Docker compatibility
local sanitized_branch=$(echo "$branch" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
# Additional safety: truncate if too long (reserve space for prefix and timestamp)
if (( ${#sanitized_branch} > 80 )); then
sanitized_branch="${sanitized_branch:0:80}"
echo "INFO: Branch name truncated for Docker compatibility" >&2
fi
local version
# Generate version based on branch name (unified approach)
# All branches get alpha versions with sanitized branch name
if [[ -n "$timestamp" ]]; then
version="0.0.0-alpha-$sanitized_branch-$timestamp"
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
else
version="0.0.0-alpha-$sanitized_branch"
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
fi
echo "$version"
}
# Input validation and sanitization
if [[ -z "$CURRENT_BRANCH" ]]; then
echo "ERROR: current_branch input is required"
exit 1
fi
# Security: Validate inputs to prevent command injection
# Use grep to check for dangerous characters (more reliable than bash regex)
validate_input() {
local input="$1"
local name="$2"
# Check for dangerous characters using grep
if echo "$input" | grep -q '[;|&`$(){}\\[:space:]]'; then
echo "ERROR: $name contains potentially dangerous characters: $input"
echo "Input should only contain letters, numbers, hyphens, underscores, dots, and forward slashes"
return 1
fi
return 0
}
# Validate current branch
if ! validate_input "$CURRENT_BRANCH" "Branch name"; then
exit 1
fi
# Validate explicit version if provided
if [[ -n "$EXPLICIT_VERSION" ]] && ! validate_input "$EXPLICIT_VERSION" "Explicit version"; then
exit 1
fi
# Main resolution logic (ultra-simplified)
NORMALIZED="false"
if [[ -n "$EXPLICIT_VERSION" ]]; then
# Use provided explicit version (from either workflow_call or manual input)
validate_semver "$EXPLICIT_VERSION" "explicit version"
# Normalize to lowercase for Docker/ECR compatibility
RESOLVED_VERSION="${EXPLICIT_VERSION,,}"
if [[ "$EXPLICIT_VERSION" != "$RESOLVED_VERSION" ]]; then
NORMALIZED="true"
echo "INFO: Original version contained uppercase characters, normalized: $EXPLICIT_VERSION -> $RESOLVED_VERSION"
fi
SOURCE="explicit"
echo "INFO: Using explicit version: $RESOLVED_VERSION"
else
# Auto-generate version from branch name
if [[ "$EXPERIMENTAL_MODE" == "true" ]]; then
# Use timestamped version generation
echo "INFO: Experimental mode: generating timestamped version from branch: $CURRENT_BRANCH"
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "true")
SOURCE="experimental"
else
# Standard branch version (no timestamp)
echo "INFO: Auto-detecting version from branch: $CURRENT_BRANCH"
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "false")
SOURCE="branch"
fi
echo "Generated version: $RESOLVED_VERSION"
fi
# Final validation - ensure result is valid Docker tag
if [[ -z "$RESOLVED_VERSION" ]]; then
echo "ERROR: Failed to resolve version"
exit 1
fi
if (( ${#RESOLVED_VERSION} > 128 )); then
echo "ERROR: Version must be at most 128 characters (Docker limitation)"
echo "Generated version: $RESOLVED_VERSION (${#RESOLVED_VERSION} chars)"
exit 1
fi
if [[ ! "$RESOLVED_VERSION" =~ ^[a-z0-9._-]+$ ]]; then
echo "ERROR: Version contains invalid characters for Docker tags"
echo "Version: $RESOLVED_VERSION"
exit 1
fi
if [[ "$RESOLVED_VERSION" =~ ^[.-] || "$RESOLVED_VERSION" =~ [.-]$ ]]; then
echo "ERROR: Version must not start or end with '.' or '-'"
echo "Version: $RESOLVED_VERSION"
exit 1
fi
# Output results
echo "SUCCESS: Resolved Docker version: $RESOLVED_VERSION (source: $SOURCE)"
echo "version=$RESOLVED_VERSION" >> $GITHUB_OUTPUT
echo "source=$SOURCE" >> $GITHUB_OUTPUT
echo "normalized=$NORMALIZED" >> $GITHUB_OUTPUT

View File

@@ -0,0 +1,160 @@
name: Update Package Version
description: |
Safely updates package.json version with comprehensive validation and atomic operations.
Security Features:
- Path traversal protection
- SemVer validation with length limits
- Atomic file operations with backup/recovery
- JSON validation before applying changes
This action is designed to be secure by default and prevent common attack vectors.
inputs:
version:
description: "Version to set in package.json (must be valid SemVer)"
required: true
package_path:
description: "Path to package.json file"
required: false
default: "./apps/web/package.json"
outputs:
updated_version:
description: "The version that was actually set in package.json"
value: ${{ steps.update.outputs.updated_version }}
runs:
using: "composite"
steps:
- name: Update and verify package.json version
id: update
shell: bash
env:
VERSION: ${{ inputs.version }}
PACKAGE_PATH: ${{ inputs.package_path }}
run: |
set -euo pipefail
# Validate inputs
if [[ -z "$VERSION" ]]; then
echo "ERROR: version input is required"
exit 1
fi
# Security: Validate package_path to prevent path traversal attacks
# Only allow paths within the workspace and must end with package.json
if [[ "$PACKAGE_PATH" =~ \.\./|^/|^~ ]]; then
echo "ERROR: Invalid package path - path traversal detected: $PACKAGE_PATH"
echo "Package path must be relative to workspace root and cannot contain '../', start with '/', or '~'"
exit 1
fi
if [[ ! "$PACKAGE_PATH" =~ package\.json$ ]]; then
echo "ERROR: Package path must end with 'package.json': $PACKAGE_PATH"
exit 1
fi
# Resolve to absolute path within workspace for additional security
WORKSPACE_ROOT="${GITHUB_WORKSPACE:-$(pwd)}"
# Use realpath to resolve both paths and handle symlinks properly
WORKSPACE_ROOT=$(realpath "$WORKSPACE_ROOT")
RESOLVED_PATH=$(realpath "${WORKSPACE_ROOT}/${PACKAGE_PATH}")
# Ensure WORKSPACE_ROOT has a trailing slash for proper prefix matching
WORKSPACE_ROOT="${WORKSPACE_ROOT}/"
# Use shell string matching to ensure RESOLVED_PATH is within workspace
# This is more secure than regex and handles edge cases properly
if [[ "$RESOLVED_PATH" != "$WORKSPACE_ROOT"* ]]; then
echo "ERROR: Resolved path is outside workspace: $RESOLVED_PATH"
echo "Workspace root: $WORKSPACE_ROOT"
exit 1
fi
if [[ ! -f "$RESOLVED_PATH" ]]; then
echo "ERROR: package.json not found at: $RESOLVED_PATH"
exit 1
fi
# Use resolved path for operations
PACKAGE_PATH="$RESOLVED_PATH"
# Validate SemVer format with additional security checks
if [[ ${#VERSION} -gt 128 ]]; then
echo "ERROR: Version string too long (${#VERSION} chars, max 128): $VERSION"
exit 1
fi
if [[ ! "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
echo "ERROR: Invalid SemVer format: $VERSION"
echo "Expected format: MAJOR.MINOR.PATCH[-PRERELEASE]"
echo "Only alphanumeric characters, dots, and hyphens allowed in prerelease"
exit 1
fi
# Additional validation: Check for reasonable version component sizes
# Extract base version (MAJOR.MINOR.PATCH) without prerelease/build metadata
if [[ "$VERSION" =~ ^([0-9]+\.[0-9]+\.[0-9]+) ]]; then
BASE_VERSION="${BASH_REMATCH[1]}"
else
echo "ERROR: Could not extract base version from: $VERSION"
exit 1
fi
# Split version components safely
IFS='.' read -ra VERSION_PARTS <<< "$BASE_VERSION"
# Validate component sizes (should have exactly 3 parts due to regex above)
if (( ${VERSION_PARTS[0]} > 999 || ${VERSION_PARTS[1]} > 999 || ${VERSION_PARTS[2]} > 999 )); then
echo "ERROR: Version components too large (max 999 each): $VERSION"
echo "Components: ${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
exit 1
fi
echo "Updating package.json version to: $VERSION"
# Create backup for atomic operations
BACKUP_PATH="${PACKAGE_PATH}.backup.$$"
cp "$PACKAGE_PATH" "$BACKUP_PATH"
# Use jq to safely update the version field with error handling
if ! jq --arg version "$VERSION" '.version = $version' "$PACKAGE_PATH" > "${PACKAGE_PATH}.tmp"; then
echo "ERROR: jq failed to process package.json"
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
exit 1
fi
# Validate the generated JSON before applying changes
if ! jq empty "${PACKAGE_PATH}.tmp" 2>/dev/null; then
echo "ERROR: Generated invalid JSON"
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
exit 1
fi
# Atomic move operation
if ! mv "${PACKAGE_PATH}.tmp" "$PACKAGE_PATH"; then
echo "ERROR: Failed to update package.json"
# Restore backup
mv "$BACKUP_PATH" "$PACKAGE_PATH"
exit 1
fi
# Verify the update was successful
UPDATED_VERSION=$(jq -r '.version' "$PACKAGE_PATH" 2>/dev/null)
if [[ "$UPDATED_VERSION" != "$VERSION" ]]; then
echo "ERROR: Version update failed!"
echo "Expected: $VERSION"
echo "Actual: $UPDATED_VERSION"
# Restore backup
mv "$BACKUP_PATH" "$PACKAGE_PATH"
exit 1
fi
# Clean up backup on success
rm -f "$BACKUP_PATH"
echo "SUCCESS: Updated package.json version to: $UPDATED_VERSION"
echo "updated_version=$UPDATED_VERSION" >> $GITHUB_OUTPUT

View File

@@ -1,104 +0,0 @@
name: "Upload Sentry Sourcemaps"
description: "Extract sourcemaps from Docker image and upload to Sentry"
inputs:
docker_image:
description: "Docker image to extract sourcemaps from"
required: true
release_version:
description: "Sentry release version (e.g., v1.2.3)"
required: true
sentry_auth_token:
description: "Sentry authentication token"
required: true
environment:
description: "Sentry environment (e.g., production, staging)"
required: false
default: "staging"
runs:
using: "composite"
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Extract sourcemaps from Docker image
shell: bash
env:
DOCKER_IMAGE: ${{ inputs.docker_image }}
run: |
set -euo pipefail
# Validate docker image format (basic validation)
if [[ ! "$DOCKER_IMAGE" =~ ^[a-zA-Z0-9._/-]+:[a-zA-Z0-9._-]+$ ]] && [[ ! "$DOCKER_IMAGE" =~ ^[a-zA-Z0-9._/-]+@sha256:[A-Fa-f0-9]{64}$ ]]; then
echo "❌ Error: Invalid docker image format. Must be in format 'image:tag' or 'image@sha256:hash'"
echo "Provided: ${DOCKER_IMAGE}"
exit 1
fi
echo "📦 Extracting sourcemaps from Docker image: ${DOCKER_IMAGE}"
# Create temporary container from the image and capture its ID
echo "Creating temporary container..."
CONTAINER_ID=$(docker create "$DOCKER_IMAGE")
echo "Container created with ID: ${CONTAINER_ID}"
# Set up cleanup function to ensure container is removed on script exit
cleanup_container() {
# Capture the current exit code to preserve it
local original_exit_code=$?
echo "🧹 Cleaning up Docker container..."
# Remove the container if it exists (ignore errors if already removed)
if [ -n "$CONTAINER_ID" ]; then
docker rm -f "$CONTAINER_ID" 2>/dev/null || true
echo "Container ${CONTAINER_ID} removed"
fi
# Exit with the original exit code to preserve script success/failure status
exit $original_exit_code
}
# Register cleanup function to run on script exit (success or failure)
trap cleanup_container EXIT
# Extract .next directory containing sourcemaps
docker cp "$CONTAINER_ID:/home/nextjs/apps/web/.next" ./extracted-next
# Verify sourcemaps exist
if [ ! -d "./extracted-next/static/chunks" ]; then
echo "❌ Error: .next/static/chunks directory not found in Docker image"
echo "Expected structure: /home/nextjs/apps/web/.next/static/chunks/"
exit 1
fi
sourcemap_count=$(find ./extracted-next/static/chunks -name "*.map" | wc -l)
echo "✅ Found ${sourcemap_count} sourcemap files"
if [ "$sourcemap_count" -eq 0 ]; then
echo "❌ Error: No sourcemap files found. Check that productionBrowserSourceMaps is enabled."
exit 1
fi
- name: Create Sentry release and upload sourcemaps
uses: getsentry/action-release@v3
env:
SENTRY_AUTH_TOKEN: ${{ inputs.sentry_auth_token }}
SENTRY_ORG: formbricks
SENTRY_PROJECT: formbricks-cloud
with:
environment: ${{ inputs.environment }}
version: ${{ inputs.release_version }}
sourcemaps: "./extracted-next/"
- name: Clean up extracted files
shell: bash
if: always()
run: |
set -euo pipefail
# Clean up extracted files
rm -rf ./extracted-next
echo "🧹 Cleaned up extracted files"

View File

@@ -0,0 +1,88 @@
name: Build Cloud Deployment Images
# This workflow builds Formbricks Docker images for ECR deployment:
# - workflow_call: Used by releases with explicit SemVer versions
# - workflow_dispatch: Auto-detects version from current branch or uses override
on:
workflow_dispatch:
inputs:
version_override:
description: "Override version (SemVer only, e.g., 1.2.3). Leave empty to auto-detect from branch."
required: false
type: string
deploy_production:
description: "Tag image for production deployment"
required: false
default: false
type: boolean
deploy_staging:
description: "Tag image for staging deployment"
required: false
default: false
type: boolean
workflow_call:
inputs:
image_tag:
description: "Image tag to push (required for workflow_call)"
required: true
type: string
IS_PRERELEASE:
description: "Whether this is a prerelease (auto-tags for staging/production)"
required: false
type: boolean
default: false
outputs:
IMAGE_TAG:
description: "Normalized image tag used for the build"
value: ${{ jobs.build-and-push.outputs.IMAGE_TAG }}
TAGS:
description: "Newline-separated list of ECR tags pushed"
value: ${{ jobs.build-and-push.outputs.TAGS }}
permissions:
contents: read
id-token: write
env:
ECR_REGION: ${{ vars.ECR_REGION }}
# ECR settings are sourced from repository/environment variables for portability across envs/forks
ECR_REGISTRY: ${{ vars.ECR_REGISTRY }}
ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }}
jobs:
build-and-push:
name: Build and Push
runs-on: ubuntu-latest
timeout-minutes: 45
outputs:
IMAGE_TAG: ${{ steps.build.outputs.image_tag }}
TAGS: ${{ steps.build.outputs.registry_tags }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Build and push cloud deployment image
id: build
uses: ./.github/actions/build-and-push-docker
with:
registry_type: "ecr"
ecr_registry: ${{ env.ECR_REGISTRY }}
ecr_repository: ${{ env.ECR_REPOSITORY }}
ecr_region: ${{ env.ECR_REGION }}
aws_role_arn: ${{ secrets.AWS_ECR_PUSH_ROLE_ARN }}
version: ${{ inputs.version_override || inputs.image_tag }}
deploy_production: ${{ inputs.deploy_production }}
deploy_staging: ${{ inputs.deploy_staging }}
is_prerelease: ${{ inputs.IS_PRERELEASE }}
env:
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch:
inputs:
VERSION:
description: "The version of the Docker image to release, full image tag if image tag is v0.0.0 enter v0.0.0."
description: "The version of the Docker image to release (clean SemVer, e.g., 1.2.3)"
required: true
type: string
REPOSITORY:
@@ -37,7 +37,7 @@ on:
permissions:
id-token: write
contents: write
contents: read
jobs:
helmfile-deploy:

View File

@@ -21,10 +21,10 @@ jobs:
name: Validate Docker Build
runs-on: ubuntu-latest
# Add PostgreSQL service container
# Add PostgreSQL and Redis service containers
services:
postgres:
image: pgvector/pgvector:pg17
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
env:
POSTGRES_USER: test
POSTGRES_PASSWORD: test
@@ -38,6 +38,11 @@ jobs:
--health-timeout 5s
--health-retries 5
redis:
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
ports:
- 6379:6379
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
@@ -67,6 +72,7 @@ jobs:
secrets: |
database_url=${{ secrets.DUMMY_DATABASE_URL }}
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
redis_url=redis://localhost:6379
- name: Verify and Initialize PostgreSQL
run: |
@@ -96,6 +102,29 @@ jobs:
echo "Network configuration:"
netstat -tulpn | grep 5432 || echo "No process listening on port 5432"
- name: Verify Redis/Valkey Connection
run: |
echo "Verifying Redis/Valkey connection..."
# Install Redis client to test connection
sudo apt-get update && sudo apt-get install -y redis-tools
# Test connection using redis-cli with timeout and proper error handling
echo "Testing Redis connection with 30 second timeout..."
if timeout 30 bash -c 'until redis-cli -h localhost -p 6379 ping >/dev/null 2>&1; do
echo "Waiting for Redis to be ready..."
sleep 2
done'; then
echo "✅ Redis connection successful"
redis-cli -h localhost -p 6379 info server | head -5
else
echo "❌ Redis connection failed after 30 seconds"
exit 1
fi
# Show network configuration for Redis
echo "Redis network configuration:"
netstat -tulpn | grep 6379 || echo "No process listening on port 6379"
- name: Test Docker Image with Health Check
shell: bash
env:
@@ -113,6 +142,7 @@ jobs:
-p 3000:3000 \
-e DATABASE_URL="postgresql://test:test@host.docker.internal:5432/formbricks" \
-e ENCRYPTION_KEY="$DUMMY_ENCRYPTION_KEY" \
-e REDIS_URL="redis://host.docker.internal:6379" \
-d "formbricks-test:$GITHUB_SHA"
# Start health check polling immediately (every 5 seconds for up to 5 minutes)

View File

@@ -17,7 +17,34 @@ jobs:
scan:
name: Vulnerability Scan
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Harden the runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout (for SARIF fingerprinting only)
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 1
- name: Determine ref and commit for upload
id: gitref
shell: bash
env:
EVENT_NAME: ${{ github.event_name }}
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
run: |
set -euo pipefail
if [[ "${EVENT_NAME}" == "workflow_run" ]]; then
echo "ref=refs/heads/${HEAD_BRANCH}" >> "$GITHUB_OUTPUT"
echo "sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT"
else
echo "ref=${GITHUB_REF}" >> "$GITHUB_OUTPUT"
echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT"
fi
- name: Log in to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
@@ -35,6 +62,9 @@ jobs:
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@a4e1a019f5e24960714ff6296aee04b736cbc3cf # v3.29.6
if: ${{ always() && hashFiles('trivy-results.sarif') != '' }}
if: ${{ always() }}
with:
sarif_file: "trivy-results.sarif"
ref: ${{ steps.gitref.outputs.ref }}
sha: ${{ steps.gitref.outputs.sha }}
category: "trivy-container-scan"

View File

@@ -182,4 +182,4 @@ jobs:
- name: Output App Logs
if: failure()
run: cat app.log
run: cat app.log

View File

@@ -7,61 +7,75 @@ on:
permissions:
contents: read
env:
ENVIRONMENT: ${{ github.event.release.prerelease && 'staging' || 'production' }}
jobs:
docker-build:
name: Build & release docker image
docker-build-community:
name: Build & release community docker image
permissions:
contents: read
packages: write
id-token: write
uses: ./.github/workflows/release-docker-github.yml
secrets: inherit
with:
IS_PRERELEASE: ${{ github.event.release.prerelease }}
docker-build-cloud:
name: Build & push Formbricks Cloud to ECR
permissions:
contents: read
id-token: write
uses: ./.github/workflows/build-and-push-ecr.yml
secrets: inherit
with:
image_tag: ${{ needs.docker-build-community.outputs.VERSION }}
IS_PRERELEASE: ${{ github.event.release.prerelease }}
needs:
- docker-build-community
helm-chart-release:
name: Release Helm Chart
permissions:
contents: read
packages: write
uses: ./.github/workflows/release-helm-chart.yml
secrets: inherit
needs:
- docker-build
- docker-build-community
with:
VERSION: ${{ needs.docker-build.outputs.VERSION }}
VERSION: ${{ needs.docker-build-community.outputs.VERSION }}
deploy-formbricks-cloud:
name: Deploy Helm Chart to Formbricks Cloud
secrets: inherit
uses: ./.github/workflows/deploy-formbricks-cloud.yml
needs:
- docker-build
- helm-chart-release
with:
VERSION: v${{ needs.docker-build.outputs.VERSION }}
ENVIRONMENT: ${{ env.ENVIRONMENT }}
upload-sentry-sourcemaps:
name: Upload Sentry Sourcemaps
verify-cloud-build:
name: Verify Cloud Build Outputs
runs-on: ubuntu-latest
permissions:
contents: read
timeout-minutes: 5 # Simple verification should be quick
needs:
- docker-build
- deploy-formbricks-cloud
- docker-build-cloud
steps:
- name: Harden the runner (Audit all outbound calls)
- name: Harden the runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Display ECR build outputs
env:
IMAGE_TAG: ${{ needs.docker-build-cloud.outputs.IMAGE_TAG }}
TAGS: ${{ needs.docker-build-cloud.outputs.TAGS }}
run: |
set -euo pipefail
- name: Upload Sentry Sourcemaps
uses: ./.github/actions/upload-sentry-sourcemaps
continue-on-error: true
with:
docker_image: ghcr.io/formbricks/formbricks:v${{ needs.docker-build.outputs.VERSION }}
release_version: v${{ needs.docker-build.outputs.VERSION }}
sentry_auth_token: ${{ secrets.SENTRY_AUTH_TOKEN }}
environment: ${{ env.ENVIRONMENT }}
echo "✅ ECR Build Completed Successfully"
echo "Image Tag: ${IMAGE_TAG}"
echo "ECR Tags:"
printf '%s\n' "${TAGS}"
move-stable-tag:
name: Move stable tag to release
permissions:
contents: write # Required for tag push operations in called workflow
uses: ./.github/workflows/move-stable-tag.yml
needs:
- docker-build-community # Ensure release is successful first
with:
release_tag: ${{ github.event.release.tag_name }}
commit_sha: ${{ github.sha }}
is_prerelease: ${{ github.event.release.prerelease }}

96
.github/workflows/move-stable-tag.yml vendored Normal file
View File

@@ -0,0 +1,96 @@
name: Move Stable Tag
on:
workflow_call:
inputs:
release_tag:
description: "The release tag name (e.g., 1.2.3)"
required: true
type: string
commit_sha:
description: "The commit SHA to point the stable tag to"
required: true
type: string
is_prerelease:
description: "Whether this is a prerelease (stable tag won't be moved for prereleases)"
required: false
type: boolean
default: false
permissions:
contents: read
# Prevent concurrent stable tag operations to avoid race conditions
concurrency:
group: move-stable-tag-${{ github.repository }}
cancel-in-progress: true
jobs:
move-stable-tag:
name: Move stable tag to release
runs-on: ubuntu-latest
timeout-minutes: 10 # Prevent hung git operations
permissions:
contents: write # Required to push tags
# Only move stable tag for non-prerelease versions
if: ${{ !inputs.is_prerelease }}
steps:
- name: Harden the runner
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0 # Full history needed for tag operations
- name: Validate inputs
env:
RELEASE_TAG: ${{ inputs.release_tag }}
COMMIT_SHA: ${{ inputs.commit_sha }}
run: |
set -euo pipefail
# Validate release tag format
if [[ ! "$RELEASE_TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
echo "❌ Error: Invalid release tag format. Expected format: 1.2.3, 1.2.3-alpha"
echo "Provided: $RELEASE_TAG"
exit 1
fi
# Validate commit SHA format (40 character hex)
if [[ ! "$COMMIT_SHA" =~ ^[a-f0-9]{40}$ ]]; then
echo "❌ Error: Invalid commit SHA format. Expected 40 character hex string"
echo "Provided: $COMMIT_SHA"
exit 1
fi
echo "✅ Input validation passed"
echo "Release tag: $RELEASE_TAG"
echo "Commit SHA: $COMMIT_SHA"
- name: Move stable tag
env:
RELEASE_TAG: ${{ inputs.release_tag }}
COMMIT_SHA: ${{ inputs.commit_sha }}
run: |
set -euo pipefail
# Configure git
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
# Verify the commit exists
if ! git cat-file -e "$COMMIT_SHA"; then
echo "❌ Error: Commit $COMMIT_SHA does not exist in this repository"
exit 1
fi
# Move stable tag to the release commit
echo "📌 Moving stable tag to commit: $COMMIT_SHA (release: $RELEASE_TAG)"
git tag -f stable "$COMMIT_SHA"
git push origin stable --force
echo "✅ Successfully moved stable tag to release $RELEASE_TAG"
echo "🔗 Stable tag now points to: https://github.com/${{ github.repository }}/commit/$COMMIT_SHA"

View File

@@ -1,41 +1,31 @@
name: Docker Release to Github Experimental
name: Build Community Testing Images
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# This workflow builds experimental/testing versions of Formbricks for self-hosting customers
# to test fixes and features before official releases. Images are pushed to GHCR with
# timestamped experimental versions for easy identification and testing.
on:
workflow_dispatch:
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}-experimental
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
inputs:
version_override:
description: "Override version (SemVer only, e.g., 1.2.3-beta). Leave empty for auto-generated experimental version."
required: false
type: string
permissions:
contents: read
packages: write
id-token: write
jobs:
build:
build-community-testing:
name: Build Community Testing Image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write
outputs:
DOCKER_IMAGE: ${{ steps.extract_image_info.outputs.DOCKER_IMAGE }}
RELEASE_VERSION: ${{ steps.extract_image_info.outputs.RELEASE_VERSION }}
timeout-minutes: 45
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
@@ -44,151 +34,17 @@ jobs:
with:
fetch-depth: 0
- name: Generate SemVer version from branch or tag
id: generate_version
- name: Build and push community testing image
uses: ./.github/actions/build-and-push-docker
with:
registry_type: "ghcr"
ghcr_image_name: "${{ github.repository }}-experimental"
experimental_mode: "true"
version: ${{ inputs.version_override }}
env:
REF_NAME: ${{ github.ref_name }}
REF_TYPE: ${{ github.ref_type }}
run: |
# Get reference name and type from environment variables
echo "Reference type: $REF_TYPE"
echo "Reference name: $REF_NAME"
if [[ "$REF_TYPE" == "tag" ]]; then
# If running from a tag, use the tag name
if [[ "$REF_NAME" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
# Tag looks like a SemVer, use it directly (remove 'v' prefix if present)
VERSION=$(echo "$REF_NAME" | sed 's/^v//')
echo "Using SemVer tag: $VERSION"
else
# Tag is not SemVer, treat as prerelease
SANITIZED_TAG=$(echo "$REF_NAME" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
VERSION="0.0.0-$SANITIZED_TAG"
echo "Using tag as prerelease: $VERSION"
fi
else
# Running from branch, use branch name as prerelease
SANITIZED_BRANCH=$(echo "$REF_NAME" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
VERSION="0.0.0-$SANITIZED_BRANCH"
echo "Using branch as prerelease: $VERSION"
fi
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
echo "Generated SemVer version: $VERSION"
- name: Update package.json version
run: |
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.VERSION }}\"/" ./apps/web/package.json
cat ./apps/web/package.json | grep version
- name: Set Sentry environment in .env
run: |
if ! grep -q "^SENTRY_ENVIRONMENT=staging$" .env 2>/dev/null; then
echo "SENTRY_ENVIRONMENT=staging" >> .env
echo "Added SENTRY_ENVIRONMENT=staging to .env file"
else
echo "SENTRY_ENVIRONMENT=staging already exists in .env file"
fi
- name: Set up Depot CLI
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
with:
project: tw0fqmsx3c
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
context: .
file: ./apps/web/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
secrets: |
database_url=${{ secrets.DUMMY_DATABASE_URL }}
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
- name: Extract image info for sourcemap upload
id: extract_image_info
run: |
# Use the first readable tag from metadata action output
DOCKER_IMAGE=$(echo "${{ steps.meta.outputs.tags }}" | head -n1 | xargs)
echo "DOCKER_IMAGE=$DOCKER_IMAGE" >> $GITHUB_OUTPUT
# Use the generated version for Sentry release
RELEASE_VERSION="$VERSION"
echo "RELEASE_VERSION=$RELEASE_VERSION" >> $GITHUB_OUTPUT
echo "Docker image: $DOCKER_IMAGE"
echo "Release version: $RELEASE_VERSION"
echo "Available tags: ${{ steps.meta.outputs.tags }}"
# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
upload-sentry-sourcemaps:
name: Upload Sentry Sourcemaps
runs-on: ubuntu-latest
permissions:
contents: read
needs:
- build
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Upload Sentry Sourcemaps
uses: ./.github/actions/upload-sentry-sourcemaps
continue-on-error: true
with:
docker_image: ${{ needs.build.outputs.DOCKER_IMAGE }}
release_version: ${{ needs.build.outputs.RELEASE_VERSION }}
sentry_auth_token: ${{ secrets.SENTRY_AUTH_TOKEN }}
environment: staging
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -1,4 +1,4 @@
name: Docker Release to Github
name: Release Community Docker Images
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
@@ -23,8 +23,6 @@ env:
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
permissions:
contents: read
@@ -32,6 +30,7 @@ permissions:
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 45
permissions:
contents: read
packages: write
@@ -44,102 +43,60 @@ jobs:
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get Release Tag
- name: Extract release version from tag
id: extract_release_tag
run: |
# Extract version from tag (e.g., refs/tags/v1.2.3 -> 1.2.3)
TAG="$GITHUB_REF"
TAG=${TAG#refs/tags/v}
set -euo pipefail
# Validate the extracted tag format
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
echo "❌ Error: Invalid release tag format after extraction. Must be semver (e.g., 1.2.3, 1.2.3-alpha)"
echo "Original ref: $GITHUB_REF"
echo "Extracted tag: $TAG"
# Extract tag name with fallback logic for different trigger contexts
if [[ -n "${RELEASE_TAG:-}" ]]; then
TAG="$RELEASE_TAG"
echo "Using RELEASE_TAG override: $TAG"
elif [[ "$GITHUB_REF_NAME" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]] || [[ "$GITHUB_REF_NAME" =~ ^v[0-9] ]]; then
TAG="$GITHUB_REF_NAME"
echo "Using GITHUB_REF_NAME (looks like tag): $TAG"
else
# Fallback: extract from GITHUB_REF for direct tag triggers
TAG="${GITHUB_REF#refs/tags/}"
if [[ -z "$TAG" || "$TAG" == "$GITHUB_REF" ]]; then
TAG="$GITHUB_REF_NAME"
echo "Using GITHUB_REF_NAME as final fallback: $TAG"
else
echo "Extracted from GITHUB_REF: $TAG"
fi
fi
# Strip v-prefix if present (normalize to clean SemVer)
TAG=${TAG#[vV]}
# Validate SemVer format (supports prereleases like 4.0.0-rc.1)
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
echo "ERROR: Invalid tag format '$TAG'. Expected SemVer (e.g., 1.2.3, 4.0.0-rc.1)"
exit 1
fi
# Safely add to environment variables
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
echo "Using tag-based version: $TAG"
echo "Using version: $TAG"
- name: Update package.json version
run: |
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.RELEASE_TAG }}\"/" ./apps/web/package.json
cat ./apps/web/package.json | grep version
- name: Set up Depot CLI
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
- name: Build and push community release image
id: build
uses: ./.github/actions/build-and-push-docker
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
# Default semver tags (version, major.minor, major)
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
# Only tag as 'latest' for stable releases (not prereleases)
type=raw,value=latest,enable=${{ inputs.IS_PRERELEASE != 'true' }}
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
with:
project: tw0fqmsx3c
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
context: .
file: ./apps/web/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
secrets: |
database_url=${{ secrets.DUMMY_DATABASE_URL }}
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
registry_type: "ghcr"
ghcr_image_name: ${{ env.IMAGE_NAME }}
version: ${{ steps.extract_release_tag.outputs.VERSION }}
is_prerelease: ${{ inputs.IS_PRERELEASE }}
env:
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -19,7 +19,7 @@ jobs:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
@@ -59,14 +59,35 @@ jobs:
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
- name: Update Chart.yaml with new version
env:
VERSION: ${{ env.VERSION }}
run: |
yq -i ".version = \"$VERSION\"" helm-chart/Chart.yaml
yq -i ".appVersion = \"v$VERSION\"" helm-chart/Chart.yaml
set -euo pipefail
echo "Updating Chart.yaml with version: ${VERSION}"
yq -i ".version = \"${VERSION}\"" helm-chart/Chart.yaml
yq -i ".appVersion = \"${VERSION}\"" helm-chart/Chart.yaml
echo "✅ Successfully updated Chart.yaml"
- name: Package Helm chart
env:
VERSION: ${{ env.VERSION }}
run: |
set -euo pipefail
echo "Packaging Helm chart version: ${VERSION}"
helm package ./helm-chart
echo "✅ Successfully packaged formbricks-${VERSION}.tgz"
- name: Push Helm chart to GitHub Container Registry
env:
VERSION: ${{ env.VERSION }}
run: |
helm push "formbricks-$VERSION.tgz" oci://ghcr.io/formbricks/helm-charts
set -euo pipefail
echo "Pushing Helm chart to registry: formbricks-${VERSION}.tgz"
helm push "formbricks-${VERSION}.tgz" oci://ghcr.io/formbricks/helm-charts
echo "✅ Successfully pushed Helm chart to registry"

View File

@@ -1,48 +0,0 @@
name: Upload Sentry Sourcemaps (Manual)
on:
workflow_dispatch:
inputs:
docker_image:
description: "Docker image to extract sourcemaps from"
required: true
type: string
release_version:
description: "Release version (e.g., v1.2.3)"
required: true
type: string
tag_version:
description: "Docker image tag (leave empty to use release_version)"
required: false
type: string
permissions:
contents: read
jobs:
upload-sourcemaps:
name: Upload Sourcemaps to Sentry
runs-on: ubuntu-latest
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Set Docker Image
run: echo "DOCKER_IMAGE=${DOCKER_IMAGE}" >> $GITHUB_ENV
env:
DOCKER_IMAGE: ${{ inputs.docker_image }}:${{ inputs.tag_version != '' && inputs.tag_version || inputs.release_version }}
- name: Upload Sourcemaps to Sentry
uses: ./.github/actions/upload-sentry-sourcemaps
with:
docker_image: ${{ env.DOCKER_IMAGE }}
release_version: ${{ inputs.release_version }}
sentry_auth_token: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -29,7 +29,7 @@ import {
SquareStack,
UserIcon,
} from "lucide-react";
import { useEffect, useMemo, useState } from "react";
import { useCallback, useEffect, useMemo, useState } from "react";
import { TSegment } from "@formbricks/types/segment";
import { TSurvey } from "@formbricks/types/surveys/types";
import { TUser } from "@formbricks/types/user";
@@ -77,6 +77,7 @@ export const ShareSurveyModal = ({
description: string;
componentType: React.ComponentType<unknown>;
componentProps: unknown;
disabled?: boolean;
}[] = useMemo(
() => [
{
@@ -111,6 +112,7 @@ export const ShareSurveyModal = ({
isContactsEnabled,
isFormbricksCloud,
},
disabled: survey.singleUse?.enabled,
},
{
id: ShareViaType.WEBSITE_EMBED,
@@ -121,6 +123,7 @@ export const ShareSurveyModal = ({
description: t("environments.surveys.share.embed_on_website.description"),
componentType: WebsiteEmbedTab,
componentProps: { surveyUrl },
disabled: survey.singleUse?.enabled,
},
{
id: ShareViaType.EMAIL,
@@ -131,6 +134,7 @@ export const ShareSurveyModal = ({
description: t("environments.surveys.share.send_email.description"),
componentType: EmailTab,
componentProps: { surveyId: survey.id, email },
disabled: survey.singleUse?.enabled,
},
{
id: ShareViaType.SOCIAL_MEDIA,
@@ -141,6 +145,7 @@ export const ShareSurveyModal = ({
description: t("environments.surveys.share.social_media.description"),
componentType: SocialMediaTab,
componentProps: { surveyUrl, surveyTitle: survey.name },
disabled: survey.singleUse?.enabled,
},
{
id: ShareViaType.QR_CODE,
@@ -151,6 +156,7 @@ export const ShareSurveyModal = ({
description: t("environments.surveys.summary.qr_code_description"),
componentType: QRCodeTab,
componentProps: { surveyUrl },
disabled: survey.singleUse?.enabled,
},
{
id: ShareViaType.DYNAMIC_POPUP,
@@ -177,9 +183,9 @@ export const ShareSurveyModal = ({
t,
survey,
publicDomain,
setSurveyUrl,
user.locale,
surveyUrl,
isReadOnly,
environmentId,
segments,
isContactsEnabled,
@@ -188,9 +194,14 @@ export const ShareSurveyModal = ({
]
);
const [activeId, setActiveId] = useState<ShareViaType | ShareSettingsType>(
survey.type === "link" ? ShareViaType.ANON_LINKS : ShareViaType.APP
);
const getDefaultActiveId = useCallback(() => {
if (survey.type !== "link") {
return ShareViaType.APP;
}
return ShareViaType.ANON_LINKS;
}, [survey.type]);
const [activeId, setActiveId] = useState<ShareViaType | ShareSettingsType>(getDefaultActiveId());
useEffect(() => {
if (open) {
@@ -198,11 +209,19 @@ export const ShareSurveyModal = ({
}
}, [open, modalView]);
// Ensure active tab is not disabled - if it is, switch to default
useEffect(() => {
const activeTab = linkTabs.find((tab) => tab.id === activeId);
if (activeTab?.disabled) {
setActiveId(getDefaultActiveId());
}
}, [activeId, linkTabs, getDefaultActiveId]);
const handleOpenChange = (open: boolean) => {
setOpen(open);
if (!open) {
setShowView("start");
setActiveId(ShareViaType.ANON_LINKS);
setActiveId(getDefaultActiveId());
}
};

View File

@@ -150,13 +150,13 @@ export const LinkSettingsTab = ({ isReadOnly, locale }: LinkSettingsTabProps) =>
name: "title",
label: t("environments.surveys.share.link_settings.link_title"),
description: t("environments.surveys.share.link_settings.link_title_description"),
placeholder: t("environments.surveys.share.link_settings.link_title_placeholder"),
placeholder: survey.name,
},
{
name: "description",
label: t("environments.surveys.share.link_settings.link_description"),
description: t("environments.surveys.share.link_settings.link_description_description"),
placeholder: t("environments.surveys.share.link_settings.link_description_placeholder"),
placeholder: "Please complete this survey.",
},
];

View File

@@ -34,6 +34,7 @@ interface ShareViewProps {
componentProps: any;
title: string;
description?: string;
disabled?: boolean;
}>;
activeId: ShareViaType | ShareSettingsType;
setActiveId: React.Dispatch<React.SetStateAction<ShareViaType | ShareSettingsType>>;
@@ -109,12 +110,13 @@ export const ShareView = ({ tabs, activeId, setActiveId }: ShareViewProps) => {
onClick={() => setActiveId(tab.id)}
className={cn(
"flex w-full justify-start rounded-md p-2 text-slate-600 hover:bg-slate-100 hover:text-slate-900",
tab.id === activeId
tab.id === activeId && !tab.disabled
? "bg-slate-100 font-medium text-slate-900"
: "text-slate-700"
)}
tooltip={tab.label}
isActive={tab.id === activeId}>
isActive={tab.id === activeId}
disabled={tab.disabled}>
<tab.icon className="h-4 w-4 text-slate-700" />
<span>{tab.label}</span>
</SidebarMenuButton>
@@ -136,9 +138,10 @@ export const ShareView = ({ tabs, activeId, setActiveId }: ShareViewProps) => {
<Button
variant="ghost"
onClick={() => setActiveId(tab.id)}
disabled={tab.disabled}
className={cn(
"rounded-md px-4 py-2",
tab.id === activeId
tab.id === activeId && !tab.disabled
? "bg-white text-slate-900 shadow-sm hover:bg-white"
: "border-transparent text-slate-700 hover:text-slate-900"
)}>

View File

@@ -1,4 +1,5 @@
import { env } from "@/lib/env";
import * as crypto from "@/lib/crypto";
import jwt from "jsonwebtoken";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { prisma } from "@formbricks/database";
import {
@@ -14,12 +15,69 @@ import {
verifyTokenForLinkSurvey,
} from "./jwt";
const TEST_ENCRYPTION_KEY = "0".repeat(32); // 32-byte key for AES-256-GCM
const TEST_NEXTAUTH_SECRET = "test-nextauth-secret";
const DIFFERENT_SECRET = "different-secret";
// Error message constants
const NEXTAUTH_SECRET_ERROR = "NEXTAUTH_SECRET is not set";
const ENCRYPTION_KEY_ERROR = "ENCRYPTION_KEY is not set";
// Helper function to test error cases for missing secrets/keys
const testMissingSecretsError = async (
testFn: (...args: any[]) => any,
args: any[],
options: {
testNextAuthSecret?: boolean;
testEncryptionKey?: boolean;
isAsync?: boolean;
} = {}
) => {
const { testNextAuthSecret = true, testEncryptionKey = true, isAsync = false } = options;
if (testNextAuthSecret) {
const constants = await import("@/lib/constants");
const originalSecret = (constants as any).NEXTAUTH_SECRET;
(constants as any).NEXTAUTH_SECRET = undefined;
if (isAsync) {
await expect(testFn(...args)).rejects.toThrow(NEXTAUTH_SECRET_ERROR);
} else {
expect(() => testFn(...args)).toThrow(NEXTAUTH_SECRET_ERROR);
}
// Restore
(constants as any).NEXTAUTH_SECRET = originalSecret;
}
if (testEncryptionKey) {
const constants = await import("@/lib/constants");
const originalKey = (constants as any).ENCRYPTION_KEY;
(constants as any).ENCRYPTION_KEY = undefined;
if (isAsync) {
await expect(testFn(...args)).rejects.toThrow(ENCRYPTION_KEY_ERROR);
} else {
expect(() => testFn(...args)).toThrow(ENCRYPTION_KEY_ERROR);
}
// Restore
(constants as any).ENCRYPTION_KEY = originalKey;
}
};
// Mock environment variables
vi.mock("@/lib/env", () => ({
env: {
ENCRYPTION_KEY: "0".repeat(32), // 32-byte key for AES-256-GCM
ENCRYPTION_KEY: "0".repeat(32),
NEXTAUTH_SECRET: "test-nextauth-secret",
} as typeof env,
},
}));
// Mock constants
vi.mock("@/lib/constants", () => ({
NEXTAUTH_SECRET: "test-nextauth-secret",
ENCRYPTION_KEY: "0".repeat(32),
}));
// Mock prisma
@@ -31,22 +89,65 @@ vi.mock("@formbricks/database", () => ({
},
}));
describe("JWT Functions", () => {
// Mock logger
vi.mock("@formbricks/logger", () => ({
logger: {
error: vi.fn(),
warn: vi.fn(),
info: vi.fn(),
},
}));
describe("JWT Functions - Comprehensive Security Tests", () => {
const mockUser = {
id: "test-user-id",
email: "test@example.com",
};
let mockSymmetricEncrypt: any;
let mockSymmetricDecrypt: any;
beforeEach(() => {
vi.clearAllMocks();
// Setup default crypto mocks
mockSymmetricEncrypt = vi
.spyOn(crypto, "symmetricEncrypt")
.mockImplementation((text: string) => `encrypted_${text}`);
mockSymmetricDecrypt = vi
.spyOn(crypto, "symmetricDecrypt")
.mockImplementation((encryptedText: string) => encryptedText.replace("encrypted_", ""));
(prisma.user.findUnique as any).mockResolvedValue(mockUser);
});
describe("createToken", () => {
test("should create a valid token", () => {
const token = createToken(mockUser.id, mockUser.email);
test("should create a valid token with encrypted user ID", () => {
const token = createToken(mockUser.id);
expect(token).toBeDefined();
expect(typeof token).toBe("string");
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.id, TEST_ENCRYPTION_KEY);
});
test("should accept custom options", () => {
const customOptions = { expiresIn: "1h" };
const token = createToken(mockUser.id, customOptions);
expect(token).toBeDefined();
// Verify the token contains the expected expiration
const decoded = jwt.decode(token) as any;
expect(decoded.exp).toBeDefined();
expect(decoded.iat).toBeDefined();
// Should expire in approximately 1 hour (3600 seconds)
expect(decoded.exp - decoded.iat).toBe(3600);
});
test("should throw error if NEXTAUTH_SECRET is not set", async () => {
await testMissingSecretsError(createToken, [mockUser.id], {
testNextAuthSecret: true,
testEncryptionKey: false,
});
});
});
@@ -56,6 +157,18 @@ describe("JWT Functions", () => {
const token = createTokenForLinkSurvey(surveyId, mockUser.email);
expect(token).toBeDefined();
expect(typeof token).toBe("string");
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.email, TEST_ENCRYPTION_KEY);
});
test("should include surveyId in payload", () => {
const surveyId = "test-survey-id";
const token = createTokenForLinkSurvey(surveyId, mockUser.email);
const decoded = jwt.decode(token) as any;
expect(decoded.surveyId).toBe(surveyId);
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(createTokenForLinkSurvey, ["survey-id", mockUser.email]);
});
});
@@ -64,24 +177,30 @@ describe("JWT Functions", () => {
const token = createEmailToken(mockUser.email);
expect(token).toBeDefined();
expect(typeof token).toBe("string");
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.email, TEST_ENCRYPTION_KEY);
});
test("should throw error if NEXTAUTH_SECRET is not set", () => {
const originalSecret = env.NEXTAUTH_SECRET;
try {
(env as any).NEXTAUTH_SECRET = undefined;
expect(() => createEmailToken(mockUser.email)).toThrow("NEXTAUTH_SECRET is not set");
} finally {
(env as any).NEXTAUTH_SECRET = originalSecret;
}
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(createEmailToken, [mockUser.email]);
});
});
describe("getEmailFromEmailToken", () => {
test("should extract email from valid token", () => {
const token = createEmailToken(mockUser.email);
const extractedEmail = getEmailFromEmailToken(token);
expect(extractedEmail).toBe(mockUser.email);
describe("createEmailChangeToken", () => {
test("should create a valid email change token with 1 day expiration", () => {
const token = createEmailChangeToken(mockUser.id, mockUser.email);
expect(token).toBeDefined();
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.id, TEST_ENCRYPTION_KEY);
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.email, TEST_ENCRYPTION_KEY);
const decoded = jwt.decode(token) as any;
expect(decoded.exp).toBeDefined();
expect(decoded.iat).toBeDefined();
// Should expire in approximately 1 day (86400 seconds)
expect(decoded.exp - decoded.iat).toBe(86400);
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(createEmailChangeToken, [mockUser.id, mockUser.email]);
});
});
@@ -91,6 +210,50 @@ describe("JWT Functions", () => {
const token = createInviteToken(inviteId, mockUser.email);
expect(token).toBeDefined();
expect(typeof token).toBe("string");
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(inviteId, TEST_ENCRYPTION_KEY);
expect(mockSymmetricEncrypt).toHaveBeenCalledWith(mockUser.email, TEST_ENCRYPTION_KEY);
});
test("should accept custom options", () => {
const inviteId = "test-invite-id";
const customOptions = { expiresIn: "24h" };
const token = createInviteToken(inviteId, mockUser.email, customOptions);
expect(token).toBeDefined();
const decoded = jwt.decode(token) as any;
expect(decoded.exp).toBeDefined();
expect(decoded.iat).toBeDefined();
// Should expire in approximately 24 hours (86400 seconds)
expect(decoded.exp - decoded.iat).toBe(86400);
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(createInviteToken, ["invite-id", mockUser.email]);
});
});
describe("getEmailFromEmailToken", () => {
test("should extract email from valid token", () => {
const token = createEmailToken(mockUser.email);
const extractedEmail = getEmailFromEmailToken(token);
expect(extractedEmail).toBe(mockUser.email);
expect(mockSymmetricDecrypt).toHaveBeenCalledWith(`encrypted_${mockUser.email}`, TEST_ENCRYPTION_KEY);
});
test("should fall back to original email if decryption fails", () => {
mockSymmetricDecrypt.mockImplementationOnce(() => {
throw new Error("Decryption failed");
});
// Create token manually with unencrypted email for legacy compatibility
const legacyToken = jwt.sign({ email: mockUser.email }, TEST_NEXTAUTH_SECRET);
const extractedEmail = getEmailFromEmailToken(legacyToken);
expect(extractedEmail).toBe(mockUser.email);
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
const token = jwt.sign({ email: "test@example.com" }, TEST_NEXTAUTH_SECRET);
await testMissingSecretsError(getEmailFromEmailToken, [token]);
});
});
@@ -106,23 +269,194 @@ describe("JWT Functions", () => {
const result = verifyTokenForLinkSurvey("invalid-token", "test-survey-id");
expect(result).toBeNull();
});
test("should return null if NEXTAUTH_SECRET is not set", async () => {
const constants = await import("@/lib/constants");
const originalSecret = (constants as any).NEXTAUTH_SECRET;
(constants as any).NEXTAUTH_SECRET = undefined;
const result = verifyTokenForLinkSurvey("any-token", "test-survey-id");
expect(result).toBeNull();
// Restore
(constants as any).NEXTAUTH_SECRET = originalSecret;
});
test("should return null if surveyId doesn't match", () => {
const surveyId = "test-survey-id";
const differentSurveyId = "different-survey-id";
const token = createTokenForLinkSurvey(surveyId, mockUser.email);
const result = verifyTokenForLinkSurvey(token, differentSurveyId);
expect(result).toBeNull();
});
test("should return null if email is missing from payload", () => {
const tokenWithoutEmail = jwt.sign({ surveyId: "test-survey-id" }, TEST_NEXTAUTH_SECRET);
const result = verifyTokenForLinkSurvey(tokenWithoutEmail, "test-survey-id");
expect(result).toBeNull();
});
test("should fall back to original email if decryption fails", () => {
mockSymmetricDecrypt.mockImplementationOnce(() => {
throw new Error("Decryption failed");
});
// Create legacy token with unencrypted email
const legacyToken = jwt.sign(
{
email: mockUser.email,
surveyId: "test-survey-id",
},
TEST_NEXTAUTH_SECRET
);
const result = verifyTokenForLinkSurvey(legacyToken, "test-survey-id");
expect(result).toBe(mockUser.email);
});
test("should fall back to original email if ENCRYPTION_KEY is not set", async () => {
const constants = await import("@/lib/constants");
const originalKey = (constants as any).ENCRYPTION_KEY;
(constants as any).ENCRYPTION_KEY = undefined;
// Create a token with unencrypted email (as it would be if ENCRYPTION_KEY was not set during creation)
const token = jwt.sign(
{
email: mockUser.email,
surveyId: "survey-id",
},
TEST_NEXTAUTH_SECRET
);
const result = verifyTokenForLinkSurvey(token, "survey-id");
expect(result).toBe(mockUser.email);
// Restore
(constants as any).ENCRYPTION_KEY = originalKey;
});
test("should verify legacy survey tokens with surveyId-based secret", async () => {
const surveyId = "test-survey-id";
// Create legacy token with old format (NEXTAUTH_SECRET + surveyId)
const legacyToken = jwt.sign({ email: `encrypted_${mockUser.email}` }, TEST_NEXTAUTH_SECRET + surveyId);
const result = verifyTokenForLinkSurvey(legacyToken, surveyId);
expect(result).toBe(mockUser.email);
});
test("should reject survey tokens that fail both new and legacy verification", async () => {
const surveyId = "test-survey-id";
const invalidToken = jwt.sign({ email: "encrypted_test@example.com" }, "wrong-secret");
const result = verifyTokenForLinkSurvey(invalidToken, surveyId);
expect(result).toBeNull();
// Verify error logging
const { logger } = await import("@formbricks/logger");
expect(logger.error).toHaveBeenCalledWith(expect.any(Error), "Survey link token verification failed");
});
test("should reject legacy survey tokens for wrong survey", () => {
const correctSurveyId = "correct-survey-id";
const wrongSurveyId = "wrong-survey-id";
// Create legacy token for one survey
const legacyToken = jwt.sign(
{ email: `encrypted_${mockUser.email}` },
TEST_NEXTAUTH_SECRET + correctSurveyId
);
// Try to verify with different survey ID
const result = verifyTokenForLinkSurvey(legacyToken, wrongSurveyId);
expect(result).toBeNull();
});
});
describe("verifyToken", () => {
test("should verify valid token", async () => {
const token = createToken(mockUser.id, mockUser.email);
const token = createToken(mockUser.id);
const verified = await verifyToken(token);
expect(verified).toEqual({
id: mockUser.id,
id: mockUser.id, // Returns the decrypted user ID
email: mockUser.email,
});
});
test("should throw error if user not found", async () => {
(prisma.user.findUnique as any).mockResolvedValue(null);
const token = createToken(mockUser.id, mockUser.email);
const token = createToken(mockUser.id);
await expect(verifyToken(token)).rejects.toThrow("User not found");
});
test("should throw error if NEXTAUTH_SECRET is not set", async () => {
await testMissingSecretsError(verifyToken, ["any-token"], {
testNextAuthSecret: true,
testEncryptionKey: false,
isAsync: true,
});
});
test("should throw error for invalid token signature", async () => {
const invalidToken = jwt.sign({ id: "test-id" }, DIFFERENT_SECRET);
await expect(verifyToken(invalidToken)).rejects.toThrow("Invalid token");
});
test("should throw error if token payload is missing id", async () => {
const tokenWithoutId = jwt.sign({ email: mockUser.email }, TEST_NEXTAUTH_SECRET);
await expect(verifyToken(tokenWithoutId)).rejects.toThrow("Invalid token");
});
test("should return raw id from payload", async () => {
// Create token with unencrypted id
const token = jwt.sign({ id: mockUser.id }, TEST_NEXTAUTH_SECRET);
const verified = await verifyToken(token);
expect(verified).toEqual({
id: mockUser.id, // Returns the raw ID from payload
email: mockUser.email,
});
});
test("should verify legacy tokens with email-based secret", async () => {
// Create legacy token with old format (NEXTAUTH_SECRET + userEmail)
const legacyToken = jwt.sign({ id: `encrypted_${mockUser.id}` }, TEST_NEXTAUTH_SECRET + mockUser.email);
const verified = await verifyToken(legacyToken);
expect(verified).toEqual({
id: mockUser.id, // Returns the decrypted user ID
email: mockUser.email,
});
});
test("should prioritize new tokens over legacy tokens", async () => {
// Create both new and legacy tokens for the same user
const newToken = createToken(mockUser.id);
const legacyToken = jwt.sign({ id: `encrypted_${mockUser.id}` }, TEST_NEXTAUTH_SECRET + mockUser.email);
// New token should verify without triggering legacy path
const verifiedNew = await verifyToken(newToken);
expect(verifiedNew.id).toBe(mockUser.id); // Returns decrypted user ID
// Legacy token should trigger legacy path
const verifiedLegacy = await verifyToken(legacyToken);
expect(verifiedLegacy.id).toBe(mockUser.id); // Returns decrypted user ID
});
test("should reject tokens that fail both new and legacy verification", async () => {
const invalidToken = jwt.sign({ id: "encrypted_test-id" }, "wrong-secret");
await expect(verifyToken(invalidToken)).rejects.toThrow("Invalid token");
// Verify both methods were attempted
const { logger } = await import("@formbricks/logger");
expect(logger.error).toHaveBeenCalledWith(
expect.any(Error),
"Token verification failed with new method"
);
expect(logger.error).toHaveBeenCalledWith(
expect.any(Error),
"Token verification failed with legacy method"
);
});
});
describe("verifyInviteToken", () => {
@@ -139,6 +473,53 @@ describe("JWT Functions", () => {
test("should throw error for invalid token", () => {
expect(() => verifyInviteToken("invalid-token")).toThrow("Invalid or expired invite token");
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(verifyInviteToken, ["any-token"]);
});
test("should throw error if inviteId is missing", () => {
const tokenWithoutInviteId = jwt.sign({ email: mockUser.email }, TEST_NEXTAUTH_SECRET);
expect(() => verifyInviteToken(tokenWithoutInviteId)).toThrow("Invalid or expired invite token");
});
test("should throw error if email is missing", () => {
const tokenWithoutEmail = jwt.sign({ inviteId: "test-invite-id" }, TEST_NEXTAUTH_SECRET);
expect(() => verifyInviteToken(tokenWithoutEmail)).toThrow("Invalid or expired invite token");
});
test("should fall back to original values if decryption fails", () => {
mockSymmetricDecrypt.mockImplementation(() => {
throw new Error("Decryption failed");
});
const inviteId = "test-invite-id";
const legacyToken = jwt.sign(
{
inviteId,
email: mockUser.email,
},
TEST_NEXTAUTH_SECRET
);
const verified = verifyInviteToken(legacyToken);
expect(verified).toEqual({
inviteId,
email: mockUser.email,
});
});
test("should throw error for token with wrong signature", () => {
const invalidToken = jwt.sign(
{
inviteId: "test-invite-id",
email: mockUser.email,
},
DIFFERENT_SECRET
);
expect(() => verifyInviteToken(invalidToken)).toThrow("Invalid or expired invite token");
});
});
describe("verifyEmailChangeToken", () => {
@@ -150,22 +531,478 @@ describe("JWT Functions", () => {
expect(result).toEqual({ id: userId, email });
});
test("should throw error if NEXTAUTH_SECRET or ENCRYPTION_KEY is not set", async () => {
await testMissingSecretsError(verifyEmailChangeToken, ["any-token"], { isAsync: true });
});
test("should throw error if token is invalid or missing fields", async () => {
// Create a token with missing fields
const jwt = await import("jsonwebtoken");
const token = jwt.sign({ foo: "bar" }, env.NEXTAUTH_SECRET as string);
const token = jwt.sign({ foo: "bar" }, TEST_NEXTAUTH_SECRET);
await expect(verifyEmailChangeToken(token)).rejects.toThrow(
"Token is invalid or missing required fields"
);
});
test("should throw error if id is missing", async () => {
const token = jwt.sign({ email: "test@example.com" }, TEST_NEXTAUTH_SECRET);
await expect(verifyEmailChangeToken(token)).rejects.toThrow(
"Token is invalid or missing required fields"
);
});
test("should throw error if email is missing", async () => {
const token = jwt.sign({ id: "test-id" }, TEST_NEXTAUTH_SECRET);
await expect(verifyEmailChangeToken(token)).rejects.toThrow(
"Token is invalid or missing required fields"
);
});
test("should return original id/email if decryption fails", async () => {
// Create a token with non-encrypted id/email
const jwt = await import("jsonwebtoken");
mockSymmetricDecrypt.mockImplementation(() => {
throw new Error("Decryption failed");
});
const payload = { id: "plain-id", email: "plain@example.com" };
const token = jwt.sign(payload, env.NEXTAUTH_SECRET as string);
const token = jwt.sign(payload, TEST_NEXTAUTH_SECRET);
const result = await verifyEmailChangeToken(token);
expect(result).toEqual(payload);
});
test("should throw error for token with wrong signature", async () => {
const invalidToken = jwt.sign(
{
id: "test-id",
email: "test@example.com",
},
DIFFERENT_SECRET
);
await expect(verifyEmailChangeToken(invalidToken)).rejects.toThrow();
});
});
// SECURITY SCENARIO TESTS
describe("Security Scenarios", () => {
describe("Algorithm Confusion Attack Prevention", () => {
test("should reject 'none' algorithm tokens in verifyToken", async () => {
// Create malicious token with "none" algorithm
const maliciousToken =
Buffer.from(
JSON.stringify({
alg: "none",
typ: "JWT",
})
).toString("base64url") +
"." +
Buffer.from(
JSON.stringify({
id: "encrypted_malicious-id",
})
).toString("base64url") +
".";
await expect(verifyToken(maliciousToken)).rejects.toThrow("Invalid token");
});
test("should reject 'none' algorithm tokens in verifyTokenForLinkSurvey", () => {
const maliciousToken =
Buffer.from(
JSON.stringify({
alg: "none",
typ: "JWT",
})
).toString("base64url") +
"." +
Buffer.from(
JSON.stringify({
email: "encrypted_attacker@evil.com",
surveyId: "test-survey-id",
})
).toString("base64url") +
".";
const result = verifyTokenForLinkSurvey(maliciousToken, "test-survey-id");
expect(result).toBeNull();
});
test("should reject 'none' algorithm tokens in verifyInviteToken", () => {
const maliciousToken =
Buffer.from(
JSON.stringify({
alg: "none",
typ: "JWT",
})
).toString("base64url") +
"." +
Buffer.from(
JSON.stringify({
inviteId: "encrypted_malicious-invite",
email: "encrypted_attacker@evil.com",
})
).toString("base64url") +
".";
expect(() => verifyInviteToken(maliciousToken)).toThrow("Invalid or expired invite token");
});
test("should reject 'none' algorithm tokens in verifyEmailChangeToken", async () => {
const maliciousToken =
Buffer.from(
JSON.stringify({
alg: "none",
typ: "JWT",
})
).toString("base64url") +
"." +
Buffer.from(
JSON.stringify({
id: "encrypted_malicious-id",
email: "encrypted_attacker@evil.com",
})
).toString("base64url") +
".";
await expect(verifyEmailChangeToken(maliciousToken)).rejects.toThrow();
});
test("should reject RS256 algorithm tokens (HS256/RS256 confusion)", async () => {
// Create malicious token with RS256 algorithm header but HS256 signature
const maliciousHeader = Buffer.from(
JSON.stringify({
alg: "RS256",
typ: "JWT",
})
).toString("base64url");
const maliciousPayload = Buffer.from(
JSON.stringify({
id: "encrypted_malicious-id",
})
).toString("base64url");
// Create signature using HMAC (as if it were HS256)
const crypto = require("crypto");
const signature = crypto
.createHmac("sha256", TEST_NEXTAUTH_SECRET)
.update(`${maliciousHeader}.${maliciousPayload}`)
.digest("base64url");
const maliciousToken = `${maliciousHeader}.${maliciousPayload}.${signature}`;
await expect(verifyToken(maliciousToken)).rejects.toThrow("Invalid token");
});
test("should only accept HS256 algorithm", async () => {
// Test that other valid algorithms are rejected
const otherAlgorithms = ["HS384", "HS512", "RS256", "RS384", "RS512", "ES256", "ES384", "ES512"];
for (const alg of otherAlgorithms) {
const maliciousHeader = Buffer.from(
JSON.stringify({
alg,
typ: "JWT",
})
).toString("base64url");
const maliciousPayload = Buffer.from(
JSON.stringify({
id: "encrypted_test-id",
})
).toString("base64url");
const maliciousToken = `${maliciousHeader}.${maliciousPayload}.fake-signature`;
await expect(verifyToken(maliciousToken)).rejects.toThrow("Invalid token");
}
});
});
describe("Token Tampering", () => {
test("should reject tokens with modified payload", async () => {
const token = createToken(mockUser.id);
const [header, payload, signature] = token.split(".");
// Modify the payload
const decodedPayload = JSON.parse(Buffer.from(payload, "base64url").toString());
decodedPayload.id = "malicious-id";
const tamperedPayload = Buffer.from(JSON.stringify(decodedPayload)).toString("base64url");
const tamperedToken = `${header}.${tamperedPayload}.${signature}`;
await expect(verifyToken(tamperedToken)).rejects.toThrow("Invalid token");
});
test("should reject tokens with modified signature", async () => {
const token = createToken(mockUser.id);
const [header, payload] = token.split(".");
const tamperedToken = `${header}.${payload}.tamperedsignature`;
await expect(verifyToken(tamperedToken)).rejects.toThrow("Invalid token");
});
test("should reject malformed tokens", async () => {
const malformedTokens = [
"not.a.jwt",
"only.two.parts",
"too.many.parts.here.invalid",
"",
"invalid-base64",
];
for (const malformedToken of malformedTokens) {
await expect(verifyToken(malformedToken)).rejects.toThrow();
}
});
});
describe("Cross-Survey Token Reuse", () => {
test("should reject survey tokens used for different surveys", () => {
const surveyId1 = "survey-1";
const surveyId2 = "survey-2";
const token = createTokenForLinkSurvey(surveyId1, mockUser.email);
const result = verifyTokenForLinkSurvey(token, surveyId2);
expect(result).toBeNull();
});
});
describe("Expired Tokens", () => {
test("should reject expired tokens", async () => {
const expiredToken = jwt.sign(
{
id: "encrypted_test-id",
exp: Math.floor(Date.now() / 1000) - 3600, // Expired 1 hour ago
},
TEST_NEXTAUTH_SECRET
);
await expect(verifyToken(expiredToken)).rejects.toThrow("Invalid token");
});
test("should reject expired email change tokens", async () => {
const expiredToken = jwt.sign(
{
id: "encrypted_test-id",
email: "encrypted_test@example.com",
exp: Math.floor(Date.now() / 1000) - 3600, // Expired 1 hour ago
},
TEST_NEXTAUTH_SECRET
);
await expect(verifyEmailChangeToken(expiredToken)).rejects.toThrow();
});
});
describe("Encryption Key Attacks", () => {
test("should fail gracefully with wrong encryption key", async () => {
mockSymmetricDecrypt.mockImplementation(() => {
throw new Error("Authentication tag verification failed");
});
// Mock findUnique to only return user for correct decrypted ID, not ciphertext
(prisma.user.findUnique as any).mockImplementation(({ where }: { where: { id: string } }) => {
if (where.id === mockUser.id) {
return Promise.resolve(mockUser);
}
return Promise.resolve(null); // Return null for ciphertext IDs
});
const token = createToken(mockUser.id);
// Should fail because ciphertext passed as userId won't match any user in DB
await expect(verifyToken(token)).rejects.toThrow(/User not found/i);
});
test("should handle encryption key not set gracefully", async () => {
const constants = await import("@/lib/constants");
const originalKey = (constants as any).ENCRYPTION_KEY;
(constants as any).ENCRYPTION_KEY = undefined;
const token = jwt.sign(
{
email: "test@example.com",
surveyId: "test-survey-id",
},
TEST_NEXTAUTH_SECRET
);
const result = verifyTokenForLinkSurvey(token, "test-survey-id");
expect(result).toBe("test@example.com");
// Restore
(constants as any).ENCRYPTION_KEY = originalKey;
});
});
describe("SQL Injection Attempts", () => {
test("should safely handle malicious user IDs", async () => {
const maliciousIds = [
"'; DROP TABLE users; --",
"1' OR '1'='1",
"admin'/*",
"<script>alert('xss')</script>",
"../../etc/passwd",
];
for (const maliciousId of maliciousIds) {
mockSymmetricDecrypt.mockReturnValueOnce(maliciousId);
const token = jwt.sign({ id: "encrypted_malicious" }, TEST_NEXTAUTH_SECRET);
// The function should look up the user safely
await verifyToken(token);
expect(prisma.user.findUnique).toHaveBeenCalledWith({
where: { id: maliciousId },
});
}
});
});
describe("Token Reuse and Replay Attacks", () => {
test("should allow legitimate token reuse within validity period", async () => {
const token = createToken(mockUser.id);
// First use
const result1 = await verifyToken(token);
expect(result1.id).toBe(mockUser.id); // Returns decrypted user ID
// Second use (should still work)
const result2 = await verifyToken(token);
expect(result2.id).toBe(mockUser.id); // Returns decrypted user ID
});
});
describe("Legacy Token Compatibility", () => {
test("should handle legacy unencrypted tokens gracefully", async () => {
// Legacy token with plain text data
const legacyToken = jwt.sign({ id: mockUser.id }, TEST_NEXTAUTH_SECRET);
const result = await verifyToken(legacyToken);
expect(result.id).toBe(mockUser.id); // Returns raw ID from payload
expect(result.email).toBe(mockUser.email);
});
test("should handle mixed encrypted/unencrypted fields", async () => {
mockSymmetricDecrypt
.mockImplementationOnce(() => mockUser.id) // id decrypts successfully
.mockImplementationOnce(() => {
throw new Error("Email not encrypted");
}); // email fails
const token = jwt.sign(
{
id: "encrypted_test-id",
email: "plain-email@example.com",
},
TEST_NEXTAUTH_SECRET
);
const result = await verifyEmailChangeToken(token);
expect(result.id).toBe(mockUser.id);
expect(result.email).toBe("plain-email@example.com");
});
test("should verify old format user tokens with email-based secrets", async () => {
// Simulate old token format with per-user secret
const oldFormatToken = jwt.sign(
{ id: `encrypted_${mockUser.id}` },
TEST_NEXTAUTH_SECRET + mockUser.email
);
const result = await verifyToken(oldFormatToken);
expect(result.id).toBe(mockUser.id); // Returns decrypted user ID
expect(result.email).toBe(mockUser.email);
});
test("should verify old format survey tokens with survey-based secrets", () => {
const surveyId = "legacy-survey-id";
// Simulate old survey token format
const oldFormatSurveyToken = jwt.sign(
{ email: `encrypted_${mockUser.email}` },
TEST_NEXTAUTH_SECRET + surveyId
);
const result = verifyTokenForLinkSurvey(oldFormatSurveyToken, surveyId);
expect(result).toBe(mockUser.email);
});
test("should gracefully handle database errors during legacy verification", async () => {
// Create token that will fail new method
const legacyToken = jwt.sign(
{ id: `encrypted_${mockUser.id}` },
TEST_NEXTAUTH_SECRET + mockUser.email
);
// Make database lookup fail
(prisma.user.findUnique as any).mockRejectedValueOnce(new Error("DB connection lost"));
await expect(verifyToken(legacyToken)).rejects.toThrow("DB connection lost");
});
});
describe("Edge Cases and Error Handling", () => {
test("should handle database connection errors gracefully", async () => {
(prisma.user.findUnique as any).mockRejectedValue(new Error("Database connection failed"));
const token = createToken(mockUser.id);
await expect(verifyToken(token)).rejects.toThrow("Database connection failed");
});
test("should handle crypto module errors", () => {
mockSymmetricEncrypt.mockImplementation(() => {
throw new Error("Crypto module error");
});
expect(() => createToken(mockUser.id)).toThrow("Crypto module error");
});
test("should validate email format in tokens", () => {
const invalidEmails = ["", "not-an-email", "missing@", "@missing-local.com", "spaces in@email.com"];
invalidEmails.forEach((invalidEmail) => {
expect(() => createEmailToken(invalidEmail)).not.toThrow();
// Note: JWT functions don't validate email format, they just encrypt/decrypt
// Email validation should happen at a higher level
});
});
test("should handle extremely long inputs", () => {
const longString = "a".repeat(10000);
expect(() => createToken(longString)).not.toThrow();
expect(() => createEmailToken(longString)).not.toThrow();
});
test("should handle special characters in user data", () => {
const specialChars = "!@#$%^&*()_+-=[]{}|;:'\",.<>?/~`";
expect(() => createToken(specialChars)).not.toThrow();
expect(() => createEmailToken(specialChars)).not.toThrow();
});
});
describe("Performance and Resource Exhaustion", () => {
test("should handle rapid token creation without memory leaks", () => {
const tokens: string[] = [];
for (let i = 0; i < 1000; i++) {
tokens.push(createToken(`user-${i}`));
}
expect(tokens.length).toBe(1000);
expect(tokens.every((token) => typeof token === "string")).toBe(true);
});
test("should handle rapid token verification", async () => {
const token = createToken(mockUser.id);
const verifications: Promise<any>[] = [];
for (let i = 0; i < 100; i++) {
verifications.push(verifyToken(token));
}
const results = await Promise.all(verifications);
expect(results.length).toBe(100);
expect(results.every((result: any) => result.id === mockUser.id)).toBe(true); // Returns decrypted user ID
});
});
});
});

View File

@@ -1,43 +1,64 @@
import { ENCRYPTION_KEY, NEXTAUTH_SECRET } from "@/lib/constants";
import { symmetricDecrypt, symmetricEncrypt } from "@/lib/crypto";
import { env } from "@/lib/env";
import jwt, { JwtPayload } from "jsonwebtoken";
import { prisma } from "@formbricks/database";
import { logger } from "@formbricks/logger";
export const createToken = (userId: string, userEmail: string, options = {}): string => {
const encryptedUserId = symmetricEncrypt(userId, env.ENCRYPTION_KEY);
return jwt.sign({ id: encryptedUserId }, env.NEXTAUTH_SECRET + userEmail, options);
};
export const createTokenForLinkSurvey = (surveyId: string, userEmail: string): string => {
const encryptedEmail = symmetricEncrypt(userEmail, env.ENCRYPTION_KEY);
return jwt.sign({ email: encryptedEmail }, env.NEXTAUTH_SECRET + surveyId);
// Helper function to decrypt with fallback to plain text
const decryptWithFallback = (encryptedText: string, key: string): string => {
try {
return symmetricDecrypt(encryptedText, key);
} catch {
return encryptedText; // Return as-is if decryption fails (legacy format)
}
};
export const verifyEmailChangeToken = async (token: string): Promise<{ id: string; email: string }> => {
if (!env.NEXTAUTH_SECRET) {
export const createToken = (userId: string, options = {}): string => {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
const payload = jwt.verify(token, env.NEXTAUTH_SECRET) as { id: string; email: string };
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const encryptedUserId = symmetricEncrypt(userId, ENCRYPTION_KEY);
return jwt.sign({ id: encryptedUserId }, NEXTAUTH_SECRET, options);
};
export const createTokenForLinkSurvey = (surveyId: string, userEmail: string): string => {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const encryptedEmail = symmetricEncrypt(userEmail, ENCRYPTION_KEY);
return jwt.sign({ email: encryptedEmail, surveyId }, NEXTAUTH_SECRET);
};
export const verifyEmailChangeToken = async (token: string): Promise<{ id: string; email: string }> => {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const payload = jwt.verify(token, NEXTAUTH_SECRET, { algorithms: ["HS256"] }) as {
id: string;
email: string;
};
if (!payload?.id || !payload?.email) {
throw new Error("Token is invalid or missing required fields");
}
let decryptedId: string;
let decryptedEmail: string;
try {
decryptedId = symmetricDecrypt(payload.id, env.ENCRYPTION_KEY);
} catch {
decryptedId = payload.id;
}
try {
decryptedEmail = symmetricDecrypt(payload.email, env.ENCRYPTION_KEY);
} catch {
decryptedEmail = payload.email;
}
// Decrypt both fields with fallback
const decryptedId = decryptWithFallback(payload.id, ENCRYPTION_KEY);
const decryptedEmail = decryptWithFallback(payload.email, ENCRYPTION_KEY);
return {
id: decryptedId,
@@ -46,127 +67,230 @@ export const verifyEmailChangeToken = async (token: string): Promise<{ id: strin
};
export const createEmailChangeToken = (userId: string, email: string): string => {
const encryptedUserId = symmetricEncrypt(userId, env.ENCRYPTION_KEY);
const encryptedEmail = symmetricEncrypt(email, env.ENCRYPTION_KEY);
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const encryptedUserId = symmetricEncrypt(userId, ENCRYPTION_KEY);
const encryptedEmail = symmetricEncrypt(email, ENCRYPTION_KEY);
const payload = {
id: encryptedUserId,
email: encryptedEmail,
};
return jwt.sign(payload, env.NEXTAUTH_SECRET as string, {
return jwt.sign(payload, NEXTAUTH_SECRET, {
expiresIn: "1d",
});
};
export const createEmailToken = (email: string): string => {
if (!env.NEXTAUTH_SECRET) {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
const encryptedEmail = symmetricEncrypt(email, env.ENCRYPTION_KEY);
return jwt.sign({ email: encryptedEmail }, env.NEXTAUTH_SECRET);
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const encryptedEmail = symmetricEncrypt(email, ENCRYPTION_KEY);
return jwt.sign({ email: encryptedEmail }, NEXTAUTH_SECRET);
};
export const getEmailFromEmailToken = (token: string): string => {
if (!env.NEXTAUTH_SECRET) {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
const payload = jwt.verify(token, env.NEXTAUTH_SECRET) as JwtPayload;
try {
// Try to decrypt first (for newer tokens)
const decryptedEmail = symmetricDecrypt(payload.email, env.ENCRYPTION_KEY);
return decryptedEmail;
} catch {
// If decryption fails, return the original email (for older tokens)
return payload.email;
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const payload = jwt.verify(token, NEXTAUTH_SECRET, { algorithms: ["HS256"] }) as JwtPayload & {
email: string;
};
return decryptWithFallback(payload.email, ENCRYPTION_KEY);
};
export const createInviteToken = (inviteId: string, email: string, options = {}): string => {
if (!env.NEXTAUTH_SECRET) {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
const encryptedInviteId = symmetricEncrypt(inviteId, env.ENCRYPTION_KEY);
const encryptedEmail = symmetricEncrypt(email, env.ENCRYPTION_KEY);
return jwt.sign({ inviteId: encryptedInviteId, email: encryptedEmail }, env.NEXTAUTH_SECRET, options);
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
const encryptedInviteId = symmetricEncrypt(inviteId, ENCRYPTION_KEY);
const encryptedEmail = symmetricEncrypt(email, ENCRYPTION_KEY);
return jwt.sign({ inviteId: encryptedInviteId, email: encryptedEmail }, NEXTAUTH_SECRET, options);
};
export const verifyTokenForLinkSurvey = (token: string, surveyId: string): string | null => {
if (!NEXTAUTH_SECRET) {
return null;
}
try {
const { email } = jwt.verify(token, env.NEXTAUTH_SECRET + surveyId) as JwtPayload;
let payload: JwtPayload & { email: string; surveyId?: string };
// Try primary method first (consistent secret)
try {
// Try to decrypt first (for newer tokens)
if (!env.ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
payload = jwt.verify(token, NEXTAUTH_SECRET, { algorithms: ["HS256"] }) as JwtPayload & {
email: string;
surveyId: string;
};
} catch (primaryError) {
logger.error(primaryError, "Token verification failed with primary method");
// Fallback to legacy method (surveyId-based secret)
try {
payload = jwt.verify(token, NEXTAUTH_SECRET + surveyId, { algorithms: ["HS256"] }) as JwtPayload & {
email: string;
};
} catch (legacyError) {
logger.error(legacyError, "Token verification failed with legacy method");
throw new Error("Invalid token");
}
const decryptedEmail = symmetricDecrypt(email, env.ENCRYPTION_KEY);
return decryptedEmail;
} catch {
// If decryption fails, return the original email (for older tokens)
return email;
}
} catch (err) {
// Verify the surveyId matches if present in payload (new format)
if (payload.surveyId && payload.surveyId !== surveyId) {
return null;
}
const { email } = payload;
if (!email) {
return null;
}
// Decrypt email with fallback to plain text
if (!ENCRYPTION_KEY) {
return email; // Return as-is if encryption key not set
}
return decryptWithFallback(email, ENCRYPTION_KEY);
} catch (error) {
logger.error(error, "Survey link token verification failed");
return null;
}
};
export const verifyToken = async (token: string): Promise<JwtPayload> => {
// First decode to get the ID
const decoded = jwt.decode(token);
const payload: JwtPayload = decoded as JwtPayload;
// Helper function to get user email for legacy verification
const getUserEmailForLegacyVerification = async (
token: string,
userId?: string
): Promise<{ userId: string; userEmail: string }> => {
if (!userId) {
const decoded = jwt.decode(token);
if (!payload) {
throw new Error("Token is invalid");
// Validate decoded token structure before using it
if (
!decoded ||
typeof decoded !== "object" ||
!decoded.id ||
typeof decoded.id !== "string" ||
decoded.id.trim() === ""
) {
logger.error("Invalid token: missing or invalid user ID");
throw new Error("Invalid token");
}
userId = decoded.id;
}
const { id } = payload;
if (!id) {
throw new Error("Token missing required field: id");
const decryptedId = decryptWithFallback(userId, ENCRYPTION_KEY);
// Validate decrypted ID before database query
if (!decryptedId || typeof decryptedId !== "string" || decryptedId.trim() === "") {
logger.error("Invalid token: missing or invalid user ID");
throw new Error("Invalid token");
}
// Try to decrypt the ID (for newer tokens), if it fails use the ID as-is (for older tokens)
let decryptedId: string;
try {
decryptedId = symmetricDecrypt(id, env.ENCRYPTION_KEY);
} catch {
decryptedId = id;
}
// If no email provided, look up the user
const foundUser = await prisma.user.findUnique({
where: { id: decryptedId },
});
if (!foundUser) {
throw new Error("User not found");
const errorMessage = "User not found";
logger.error(errorMessage);
throw new Error(errorMessage);
}
const userEmail = foundUser.email;
return { userId: decryptedId, userEmail: foundUser.email };
};
return { id: decryptedId, email: userEmail };
export const verifyToken = async (token: string): Promise<JwtPayload> => {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
let payload: JwtPayload & { id: string };
let userData: { userId: string; userEmail: string } | null = null;
// Try new method first, with smart fallback to legacy
try {
payload = jwt.verify(token, NEXTAUTH_SECRET, { algorithms: ["HS256"] }) as JwtPayload & {
id: string;
};
} catch (newMethodError) {
logger.error(newMethodError, "Token verification failed with new method");
// Get user email for legacy verification
userData = await getUserEmailForLegacyVerification(token);
// Try legacy verification with email-based secret
try {
payload = jwt.verify(token, NEXTAUTH_SECRET + userData.userEmail, {
algorithms: ["HS256"],
}) as JwtPayload & {
id: string;
};
} catch (legacyMethodError) {
logger.error(legacyMethodError, "Token verification failed with legacy method");
throw new Error("Invalid token");
}
}
if (!payload?.id) {
throw new Error("Invalid token");
}
// Get user email if we don't have it yet
userData ??= await getUserEmailForLegacyVerification(token, payload.id);
return { id: userData.userId, email: userData.userEmail };
};
export const verifyInviteToken = (token: string): { inviteId: string; email: string } => {
if (!NEXTAUTH_SECRET) {
throw new Error("NEXTAUTH_SECRET is not set");
}
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY is not set");
}
try {
const decoded = jwt.decode(token);
const payload: JwtPayload = decoded as JwtPayload;
const payload = jwt.verify(token, NEXTAUTH_SECRET, { algorithms: ["HS256"] }) as JwtPayload & {
inviteId: string;
email: string;
};
const { inviteId, email } = payload;
const { inviteId: encryptedInviteId, email: encryptedEmail } = payload;
let decryptedInviteId: string;
let decryptedEmail: string;
try {
// Try to decrypt first (for newer tokens)
decryptedInviteId = symmetricDecrypt(inviteId, env.ENCRYPTION_KEY);
decryptedEmail = symmetricDecrypt(email, env.ENCRYPTION_KEY);
} catch {
// If decryption fails, use original values (for older tokens)
decryptedInviteId = inviteId;
decryptedEmail = email;
if (!encryptedInviteId || !encryptedEmail) {
throw new Error("Invalid token");
}
// Decrypt both fields with fallback to original values
const decryptedInviteId = decryptWithFallback(encryptedInviteId, ENCRYPTION_KEY);
const decryptedEmail = decryptWithFallback(encryptedEmail, ENCRYPTION_KEY);
return {
inviteId: decryptedInviteId,
email: decryptedEmail,

View File

@@ -1,12 +1,12 @@
import { describe, expect, test, vi } from "vitest";
import { TJsEnvironmentStateSurvey } from "@formbricks/types/js";
import { TResponseData, TResponseVariables } from "@formbricks/types/responses";
import { TSurveyQuestionTypeEnum } from "@formbricks/types/surveys/types";
import {
TConditionGroup,
TSingleCondition,
TSurveyLogic,
TSurveyLogicAction,
TSurveyQuestionTypeEnum,
} from "@formbricks/types/surveys/types";
import {
addConditionBelow,
@@ -109,6 +109,7 @@ describe("surveyLogic", () => {
languages: [],
triggers: [],
segment: null,
recaptcha: null,
};
const simpleGroup = (): TConditionGroup => ({
@@ -175,7 +176,8 @@ describe("surveyLogic", () => {
},
],
};
removeCondition(group, "c");
const result = removeCondition(group, "c");
expect(result).toBe(true);
expect(group.conditions).toHaveLength(0);
});
@@ -433,6 +435,8 @@ describe("surveyLogic", () => {
)
).toBe(true);
expect(evaluateLogic(mockSurvey, { f: "foo" }, vars, group(baseCond("isSet")), "en")).toBe(true);
expect(evaluateLogic(mockSurvey, { f: "foo" }, vars, group(baseCond("isNotEmpty")), "en")).toBe(true);
expect(evaluateLogic(mockSurvey, { f: "" }, vars, group(baseCond("isNotSet")), "en")).toBe(true);
expect(evaluateLogic(mockSurvey, { f: "" }, vars, group(baseCond("isEmpty")), "en")).toBe(true);
expect(
evaluateLogic(mockSurvey, { f: "foo" }, vars, group({ ...baseCond("isAnyOf", ["foo", "bar"]) }), "en")
@@ -510,7 +514,8 @@ describe("surveyLogic", () => {
expect(group.conditions.length).toBe(2);
toggleGroupConnector(group, "notfound");
expect(group.connector).toBe("and");
removeCondition(group, "notfound");
const result = removeCondition(group, "notfound");
expect(result).toBe(false);
expect(group.conditions.length).toBe(2);
duplicateCondition(group, "notfound");
expect(group.conditions.length).toBe(2);
@@ -520,6 +525,192 @@ describe("surveyLogic", () => {
expect(group.conditions.length).toBe(2);
});
test("removeCondition returns false when condition not found in nested groups", () => {
const nestedGroup: TConditionGroup = {
id: "nested",
connector: "and",
conditions: [
{
id: "nestedC1",
leftOperand: { type: "hiddenField", value: "nf1" },
operator: "equals",
rightOperand: { type: "static", value: "nv1" },
},
],
};
const group: TConditionGroup = {
id: "parent",
connector: "and",
conditions: [nestedGroup],
};
const result = removeCondition(group, "nonexistent");
expect(result).toBe(false);
expect(group.conditions).toHaveLength(1);
});
test("removeCondition successfully removes from nested groups and cleans up", () => {
const nestedGroup: TConditionGroup = {
id: "nested",
connector: "and",
conditions: [
{
id: "nestedC1",
leftOperand: { type: "hiddenField", value: "nf1" },
operator: "equals",
rightOperand: { type: "static", value: "nv1" },
},
{
id: "nestedC2",
leftOperand: { type: "hiddenField", value: "nf2" },
operator: "equals",
rightOperand: { type: "static", value: "nv2" },
},
],
};
const otherCondition: TSingleCondition = {
id: "otherCondition",
leftOperand: { type: "hiddenField", value: "other" },
operator: "equals",
rightOperand: { type: "static", value: "value" },
};
const group: TConditionGroup = {
id: "parent",
connector: "and",
conditions: [nestedGroup, otherCondition],
};
const result = removeCondition(group, "nestedC1");
expect(result).toBe(true);
expect(group.conditions).toHaveLength(2);
expect((group.conditions[0] as TConditionGroup).conditions).toHaveLength(1);
expect((group.conditions[0] as TConditionGroup).conditions[0].id).toBe("nestedC2");
expect(group.conditions[1].id).toBe("otherCondition");
});
test("removeCondition flattens group when nested group has only one condition left", () => {
const deeplyNestedGroup: TConditionGroup = {
id: "deepNested",
connector: "or",
conditions: [
{
id: "deepC1",
leftOperand: { type: "hiddenField", value: "df1" },
operator: "equals",
rightOperand: { type: "static", value: "dv1" },
},
],
};
const nestedGroup: TConditionGroup = {
id: "nested",
connector: "and",
conditions: [
{
id: "nestedC1",
leftOperand: { type: "hiddenField", value: "nf1" },
operator: "equals",
rightOperand: { type: "static", value: "nv1" },
},
deeplyNestedGroup,
],
};
const otherCondition: TSingleCondition = {
id: "otherCondition",
leftOperand: { type: "hiddenField", value: "other" },
operator: "equals",
rightOperand: { type: "static", value: "value" },
};
const group: TConditionGroup = {
id: "parent",
connector: "and",
conditions: [nestedGroup, otherCondition],
};
// Remove the regular condition, leaving only the deeply nested group in the nested group
const result = removeCondition(group, "nestedC1");
expect(result).toBe(true);
// The parent group should still have 2 conditions: the nested group and the other condition
expect(group.conditions).toHaveLength(2);
// The nested group should still be there but now contain only the deeply nested group
expect(group.conditions[0].id).toBe("nested");
expect((group.conditions[0] as TConditionGroup).conditions).toHaveLength(1);
// The nested group should contain the flattened content from the deeply nested group
expect((group.conditions[0] as TConditionGroup).conditions[0].id).toBe("deepC1");
expect(group.conditions[1].id).toBe("otherCondition");
});
test("removeCondition removes empty groups after cleanup", () => {
const emptyNestedGroup: TConditionGroup = {
id: "emptyNested",
connector: "and",
conditions: [
{
id: "toBeRemoved",
leftOperand: { type: "hiddenField", value: "f1" },
operator: "equals",
rightOperand: { type: "static", value: "v1" },
},
],
};
const group: TConditionGroup = {
id: "parent",
connector: "and",
conditions: [
emptyNestedGroup,
{
id: "keepThis",
leftOperand: { type: "hiddenField", value: "f2" },
operator: "equals",
rightOperand: { type: "static", value: "v2" },
},
],
};
// Remove the only condition from the nested group
const result = removeCondition(group, "toBeRemoved");
expect(result).toBe(true);
// The empty nested group should be removed, leaving only the other condition
expect(group.conditions).toHaveLength(1);
expect(group.conditions[0].id).toBe("keepThis");
});
test("deleteEmptyGroups with complex nested structure", () => {
const deepEmptyGroup: TConditionGroup = { id: "deepEmpty", connector: "and", conditions: [] };
const middleGroup: TConditionGroup = {
id: "middle",
connector: "or",
conditions: [deepEmptyGroup],
};
const topGroup: TConditionGroup = {
id: "top",
connector: "and",
conditions: [
middleGroup,
{
id: "validCondition",
leftOperand: { type: "hiddenField", value: "f" },
operator: "equals",
rightOperand: { type: "static", value: "v" },
},
],
};
deleteEmptyGroups(topGroup);
// Should remove the nested empty groups and keep only the valid condition
expect(topGroup.conditions).toHaveLength(1);
expect(topGroup.conditions[0].id).toBe("validCondition");
});
// Additional tests for complete coverage
test("addConditionBelow with nested group correctly adds condition", () => {

View File

@@ -94,21 +94,48 @@ export const toggleGroupConnector = (group: TConditionGroup, resourceId: string)
}
};
export const removeCondition = (group: TConditionGroup, resourceId: string) => {
for (let i = 0; i < group.conditions.length; i++) {
export const removeCondition = (group: TConditionGroup, resourceId: string): boolean => {
for (let i = group.conditions.length - 1; i >= 0; i--) {
const item = group.conditions[i];
if (item.id === resourceId) {
group.conditions.splice(i, 1);
return;
cleanupGroup(group);
return true;
}
if (isConditionGroup(item)) {
removeCondition(item, resourceId);
if (isConditionGroup(item) && removeCondition(item, resourceId)) {
cleanupGroup(group);
return true;
}
}
deleteEmptyGroups(group);
return false;
};
const cleanupGroup = (group: TConditionGroup) => {
// Remove empty condition groups first
for (let i = group.conditions.length - 1; i >= 0; i--) {
const condition = group.conditions[i];
if (isConditionGroup(condition)) {
cleanupGroup(condition);
// Remove if empty after cleanup
if (condition.conditions.length === 0) {
group.conditions.splice(i, 1);
}
}
}
// Flatten if group has only one condition and it's a condition group
if (group.conditions.length === 1 && isConditionGroup(group.conditions[0])) {
group.connector = group.conditions[0].connector || "and";
group.conditions = group.conditions[0].conditions;
}
};
export const deleteEmptyGroups = (group: TConditionGroup) => {
cleanupGroup(group);
};
export const duplicateCondition = (group: TConditionGroup, resourceId: string) => {
@@ -130,18 +157,6 @@ export const duplicateCondition = (group: TConditionGroup, resourceId: string) =
}
};
export const deleteEmptyGroups = (group: TConditionGroup) => {
for (let i = 0; i < group.conditions.length; i++) {
const resource = group.conditions[i];
if (isConditionGroup(resource) && resource.conditions.length === 0) {
group.conditions.splice(i, 1);
} else if (isConditionGroup(resource)) {
deleteEmptyGroups(resource);
}
}
};
export const createGroupFromResource = (group: TConditionGroup, resourceId: string) => {
for (let i = 0; i < group.conditions.length; i++) {
const item = group.conditions[i];
@@ -670,8 +685,9 @@ const performCalculation = (
if (typeof val === "number" || typeof val === "string") {
if (variable.type === "number" && !isNaN(Number(val))) {
operandValue = Number(val);
} else {
operandValue = val;
}
operandValue = val;
}
break;
}

View File

@@ -141,7 +141,6 @@
"apply_filters": "Filter anwenden",
"are_you_sure": "Bist Du sicher?",
"attributes": "Attribute",
"avatar": "Avatar",
"back": "Zurück",
"billing": "Abrechnung",
"booked": "Gebucht",
@@ -748,6 +747,7 @@
"api_key_label": "API-Schlüssel Label",
"api_key_security_warning": "Aus Sicherheitsgründen wird der API-Schlüssel nur einmal nach der Erstellung angezeigt. Bitte kopiere ihn sofort an einen sicheren Ort.",
"api_key_updated": "API-Schlüssel aktualisiert",
"delete_permission": "Berechtigung löschen",
"duplicate_access": "Doppelter Projektzugriff nicht erlaubt",
"no_api_keys_yet": "Du hast noch keine API-Schlüssel",
"no_env_permissions_found": "Keine Umgebungsberechtigungen gefunden",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Was passiert, wenn Du das Konto löschst",
"avatar_update_failed": "Aktualisierung des Avatars fehlgeschlagen. Bitte versuche es erneut.",
"backup_code": "Backup-Code",
"change_image": "Bild ändern",
"confirm_delete_account": "Lösche dein Konto mit all deinen persönlichen Informationen und Daten",
"confirm_delete_my_account": "Konto löschen",
"confirm_your_current_password_to_get_started": "Bestätige dein aktuelles Passwort, um loszulegen.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "Deine Anfrage zur Änderung der E-Mail wurde eingeleitet.",
"enable_two_factor_authentication": "Zwei-Faktor-Authentifizierung aktivieren",
"enter_the_code_from_your_authenticator_app_below": "Gib den Code aus deiner Authentifizierungs-App unten ein.",
"file_size_must_be_less_than_10mb": "Dateigröße muss weniger als 10MB sein.",
"invalid_file_type": "Ungültiger Dateityp. Nur JPEG-, PNG- und WEBP-Dateien sind erlaubt.",
"lost_access": "Zugriff verloren",
"or_enter_the_following_code_manually": "Oder gib den folgenden Code manuell ein:",
"organization_identification": "Hilf deiner Organisation, Dich auf Formbricks zu identifizieren",
"organizations_delete_message": "Du bist der einzige Besitzer dieser Organisationen, also werden sie <b>auch gelöscht.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Dauerhafte Entfernung all deiner persönlichen Informationen und Daten",
"personal_information": "Persönliche Informationen",
"please_enter_email_to_confirm_account_deletion": "Bitte gib {email} in das folgende Feld ein, um die endgültige Löschung deines Kontos zu bestätigen:",
"profile_updated_successfully": "Dein Profil wurde erfolgreich aktualisiert",
"remove_image": "Bild entfernen",
"save_the_following_backup_codes_in_a_safe_place": "Speichere die folgenden Backup-Codes an einem sicheren Ort.",
"scan_the_qr_code_below_with_your_authenticator_app": "Scanne den QR-Code unten mit deiner Authentifizierungs-App.",
"security_description": "Verwalte dein Passwort und andere Sicherheitseinstellungen wie Zwei-Faktor-Authentifizierung (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Zwei-Faktor-Code",
"unlock_two_factor_authentication": "Zwei-Faktor-Authentifizierung mit einem höheren Plan freischalten",
"update_personal_info": "Persönliche Daten aktualisieren",
"upload_image": "Bild hochladen",
"warning_cannot_delete_account": "Du bist der einzige Besitzer dieser Organisation. Bitte übertrage das Eigentum zuerst an ein anderes Mitglied.",
"warning_cannot_undo": "Das kann nicht rückgängig gemacht werden",
"you_must_select_a_file": "Du musst eine Datei auswählen."
"warning_cannot_undo": "Das kann nicht rückgängig gemacht werden"
},
"teams": {
"add_members_description": "Füge Mitglieder zum Team hinzu und bestimme ihre Rolle.",
@@ -1715,10 +1707,8 @@
"language_help_text": "Die Meta-Daten werden basierend auf dem `lang` Wert in der URL geladen.",
"link_description": "Linkbeschreibung",
"link_description_description": "Beschreibung mit 55-200 Zeichen funktionieren am besten.",
"link_description_placeholder": "Hilf uns, indem du deine Gedanken teilst.",
"link_title": "Linktitel",
"link_title_description": "Kurze Titel funktionieren am besten als Meta-Titel.",
"link_title_placeholder": "Kundenfeedback-Umfrage",
"preview_image": "Vorschaubild",
"preview_image_description": "Querformatige Bilder mit kleiner Dateigröße (<4MB) funktionieren am besten.",
"title": "Link-Einstellungen"

View File

@@ -141,7 +141,6 @@
"apply_filters": "Apply filters",
"are_you_sure": "Are you sure?",
"attributes": "Attributes",
"avatar": "Avatar",
"back": "Back",
"billing": "Billing",
"booked": "Booked",
@@ -748,6 +747,7 @@
"api_key_label": "API Key Label",
"api_key_security_warning": "For security reasons, the API key will only be shown once after creation. Please copy it to your destination right away.",
"api_key_updated": "API Key updated",
"delete_permission": "Delete permission",
"duplicate_access": "Duplicate project access not allowed",
"no_api_keys_yet": "You don't have any API keys yet",
"no_env_permissions_found": "No environment permissions found",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Account deletion consequences",
"avatar_update_failed": "Avatar update failed. Please try again.",
"backup_code": "Backup Code",
"change_image": "Change image",
"confirm_delete_account": "Delete your account with all of your personal information and data",
"confirm_delete_my_account": "Delete My Account",
"confirm_your_current_password_to_get_started": "Confirm your current password to get started.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "Your email change request has been initiated.",
"enable_two_factor_authentication": "Enable two factor authentication",
"enter_the_code_from_your_authenticator_app_below": "Enter the code from your authenticator app below.",
"file_size_must_be_less_than_10mb": "File size must be less than 10MB.",
"invalid_file_type": "Invalid file type. Only JPEG, PNG, and WEBP files are allowed.",
"lost_access": "Lost access",
"or_enter_the_following_code_manually": "Or enter the following code manually:",
"organization_identification": "Assist your organization in identifying you on Formbricks",
"organizations_delete_message": "You are the only owner of these organizations, so they <b>will be deleted as well.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Permanent removal of all of your personal information and data",
"personal_information": "Personal information",
"please_enter_email_to_confirm_account_deletion": "Please enter {email} in the following field to confirm the definitive deletion of your account:",
"profile_updated_successfully": "Your profile was updated successfully",
"remove_image": "Remove image",
"save_the_following_backup_codes_in_a_safe_place": "Save the following backup codes in a safe place.",
"scan_the_qr_code_below_with_your_authenticator_app": "Scan the QR code below with your authenticator app.",
"security_description": "Manage your password and other security settings like two-factor authentication (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Two-Factor Code",
"unlock_two_factor_authentication": "Unlock two-factor authentication with a higher plan",
"update_personal_info": "Update your personal information",
"upload_image": "Upload image",
"warning_cannot_delete_account": "You are the only owner of this organization. Please transfer ownership to another member first.",
"warning_cannot_undo": "This cannot be undone",
"you_must_select_a_file": "You must select a file."
"warning_cannot_undo": "This cannot be undone"
},
"teams": {
"add_members_description": "Add members to the team and determine their role.",
@@ -1715,10 +1707,8 @@
"language_help_text": "The meta data is loaded based on the `lang` value in the URL.",
"link_description": "Link description",
"link_description_description": "Descriptions between 55-200 characters perform best.",
"link_description_placeholder": "Help us improve by sharing your thoughts.",
"link_title": "Link title",
"link_title_description": "Short titles perform best as Meta Titles.",
"link_title_placeholder": "Customer Feedback Survey",
"preview_image": "Preview image",
"preview_image_description": "Landscape images with small file sizes (<4MB) perform best.",
"title": "Link settings"

View File

@@ -141,7 +141,6 @@
"apply_filters": "Appliquer des filtres",
"are_you_sure": "Es-tu sûr ?",
"attributes": "Attributs",
"avatar": "Avatar",
"back": "Retour",
"billing": "Facturation",
"booked": "Réservé",
@@ -748,6 +747,7 @@
"api_key_label": "Étiquette de clé API",
"api_key_security_warning": "Pour des raisons de sécurité, la clé API ne sera affichée qu'une seule fois après sa création. Veuillez la copier immédiatement à votre destination.",
"api_key_updated": "Clé API mise à jour",
"delete_permission": "Supprimer une permission",
"duplicate_access": "L'accès en double au projet n'est pas autorisé",
"no_api_keys_yet": "Vous n'avez pas encore de clés API.",
"no_env_permissions_found": "Aucune autorisation d'environnement trouvée",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Conséquences de la suppression de compte",
"avatar_update_failed": "La mise à jour de l'avatar a échoué. Veuillez réessayer.",
"backup_code": "Code de sauvegarde",
"change_image": "Changer l'image",
"confirm_delete_account": "Supprimez votre compte avec toutes vos informations personnelles et données.",
"confirm_delete_my_account": "Supprimer mon compte",
"confirm_your_current_password_to_get_started": "Confirmez votre mot de passe actuel pour commencer.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "Votre demande de changement d'email a été initiée.",
"enable_two_factor_authentication": "Activer l'authentification à deux facteurs",
"enter_the_code_from_your_authenticator_app_below": "Entrez le code de votre application d'authentification ci-dessous.",
"file_size_must_be_less_than_10mb": "La taille du fichier doit être inférieure à 10 Mo.",
"invalid_file_type": "Type de fichier invalide. Seuls les fichiers JPEG, PNG et WEBP sont autorisés.",
"lost_access": "Accès perdu",
"or_enter_the_following_code_manually": "Ou entrez le code suivant manuellement :",
"organization_identification": "Aidez votre organisation à vous identifier sur Formbricks",
"organizations_delete_message": "Tu es le seul propriétaire de ces organisations, elles <b>seront aussi supprimées.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Suppression permanente de toutes vos informations et données personnelles.",
"personal_information": "Informations personnelles",
"please_enter_email_to_confirm_account_deletion": "Veuillez entrer {email} dans le champ suivant pour confirmer la suppression définitive de votre compte :",
"profile_updated_successfully": "Votre profil a été mis à jour avec succès.",
"remove_image": "Supprimer l'image",
"save_the_following_backup_codes_in_a_safe_place": "Enregistrez les codes de sauvegarde suivants dans un endroit sûr.",
"scan_the_qr_code_below_with_your_authenticator_app": "Scannez le code QR ci-dessous avec votre application d'authentification.",
"security_description": "Gérez votre mot de passe et d'autres paramètres de sécurité comme l'authentification à deux facteurs (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Code à deux facteurs",
"unlock_two_factor_authentication": "Débloquez l'authentification à deux facteurs avec une offre supérieure",
"update_personal_info": "Mettez à jour vos informations personnelles",
"upload_image": "Télécharger l'image",
"warning_cannot_delete_account": "Tu es le seul propriétaire de cette organisation. Transfère la propriété à un autre membre d'abord.",
"warning_cannot_undo": "Ceci ne peut pas être annulé",
"you_must_select_a_file": "Vous devez sélectionner un fichier."
"warning_cannot_undo": "Ceci ne peut pas être annulé"
},
"teams": {
"add_members_description": "Ajoutez des membres à l'équipe et déterminez leur rôle.",
@@ -1715,10 +1707,8 @@
"language_help_text": "Les métadonnées sont chargées en fonction de la valeur « lang » dans l'URL.",
"link_description": "Description du lien",
"link_description_description": "« Les descriptions entre 55 et 200 caractères donnent les meilleurs résultats. »",
"link_description_placeholder": "Aidez-nous à nous améliorer en partageant vos pensées.",
"link_title": "Titre du lien",
"link_title_description": "Les titres courts fonctionnent mieux comme titres méta.",
"link_title_placeholder": "Sondage de Retour Clients",
"preview_image": "Aperçu de l'image",
"preview_image_description": "Les images en paysage avec de petites tailles de fichier (<4MB) fonctionnent le mieux.",
"title": "Paramètres de lien"

2865
apps/web/locales/ja-JP.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -141,7 +141,6 @@
"apply_filters": "Aplicar filtros",
"are_you_sure": "Certeza?",
"attributes": "atributos",
"avatar": "Avatar",
"back": "Voltar",
"billing": "Faturamento",
"booked": "Reservado",
@@ -748,6 +747,7 @@
"api_key_label": "Rótulo da Chave API",
"api_key_security_warning": "Por motivos de segurança, a chave da API será mostrada apenas uma vez após a criação. Por favor, copie-a para o seu destino imediatamente.",
"api_key_updated": "Chave de API atualizada",
"delete_permission": "Remover permissão",
"duplicate_access": "Acesso duplicado ao projeto não permitido",
"no_api_keys_yet": "Você ainda não tem nenhuma chave de API",
"no_env_permissions_found": "Nenhuma permissão de ambiente encontrada",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Consequências da exclusão da conta",
"avatar_update_failed": "Falha ao atualizar o avatar. Por favor, tente novamente.",
"backup_code": "Código de Backup",
"change_image": "Mudar imagem",
"confirm_delete_account": "Apague sua conta com todas as suas informações pessoais e dados",
"confirm_delete_my_account": "Excluir Minha Conta",
"confirm_your_current_password_to_get_started": "Confirme sua senha atual para começar.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "Sua solicitação de alteração de e-mail foi iniciada.",
"enable_two_factor_authentication": "Ativar autenticação de dois fatores",
"enter_the_code_from_your_authenticator_app_below": "Digite o código do seu app autenticador abaixo.",
"file_size_must_be_less_than_10mb": "O tamanho do arquivo deve ser menor que 10MB.",
"invalid_file_type": "Tipo de arquivo inválido. Só são permitidos arquivos JPEG, PNG e WEBP.",
"lost_access": "Perdi o acesso",
"or_enter_the_following_code_manually": "Ou insira o seguinte código manualmente:",
"organization_identification": "Ajude sua organização a te identificar no Formbricks",
"organizations_delete_message": "Você é o único dono dessas organizações, então elas <b>também serão apagadas.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Remoção permanente de todas as suas informações e dados pessoais",
"personal_information": "Informações pessoais",
"please_enter_email_to_confirm_account_deletion": "Por favor, insira {email} no campo abaixo para confirmar a exclusão definitiva da sua conta:",
"profile_updated_successfully": "Seu perfil foi atualizado com sucesso",
"remove_image": "Remover imagem",
"save_the_following_backup_codes_in_a_safe_place": "Guarde os seguintes códigos de backup em um lugar seguro.",
"scan_the_qr_code_below_with_your_authenticator_app": "Escaneie o código QR abaixo com seu app autenticador.",
"security_description": "Gerencie sua senha e outras configurações de segurança como a autenticação de dois fatores (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Código de Dois Fatores",
"unlock_two_factor_authentication": "Desbloqueia a autenticação de dois fatores com um plano melhor",
"update_personal_info": "Atualize suas informações pessoais",
"upload_image": "Enviar imagem",
"warning_cannot_delete_account": "Você é o único dono desta organização. Transfere a propriedade para outra pessoa primeiro.",
"warning_cannot_undo": "Isso não pode ser desfeito",
"you_must_select_a_file": "Você tem que selecionar um arquivo."
"warning_cannot_undo": "Isso não pode ser desfeito"
},
"teams": {
"add_members_description": "Adicione membros à equipe e determine sua função.",
@@ -1715,10 +1707,8 @@
"language_help_text": "Os metadados são carregados com base no valor `lang` na URL.",
"link_description": "Descrição do link",
"link_description_description": "\"Descrições entre 55-200 caracteres têm um melhor desempenho.\"",
"link_description_placeholder": "Ajude-nos a melhorar compartilhando suas opiniões.",
"link_title": "Título do link",
"link_title_description": "Títulos curtos têm melhor desempenho como Meta Títulos.",
"link_title_placeholder": "Pesquisa de Feedback do Cliente",
"preview_image": "Imagem de prévia",
"preview_image_description": "Imagens em paisagem com tamanhos de arquivo pequenos (<4MB) têm o melhor desempenho.",
"title": "Configurações de link"

View File

@@ -141,7 +141,6 @@
"apply_filters": "Aplicar filtros",
"are_you_sure": "Tem a certeza?",
"attributes": "Atributos",
"avatar": "Avatar",
"back": "Voltar",
"billing": "Faturação",
"booked": "Reservado",
@@ -748,6 +747,7 @@
"api_key_label": "Etiqueta da Chave API",
"api_key_security_warning": "Por razões de segurança, a chave API será mostrada apenas uma vez após a criação. Por favor, copie-a para o seu destino imediatamente.",
"api_key_updated": "Chave API atualizada",
"delete_permission": "Eliminar permissão",
"duplicate_access": "Acesso duplicado ao projeto não permitido",
"no_api_keys_yet": "Ainda não tem nenhuma chave API",
"no_env_permissions_found": "Nenhuma permissão de ambiente encontrada",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Consequências da eliminação da conta",
"avatar_update_failed": "Falha na atualização do avatar. Por favor, tente novamente.",
"backup_code": "Código de Backup",
"change_image": "Alterar imagem",
"confirm_delete_account": "Eliminar a sua conta com todas as suas informações e dados pessoais",
"confirm_delete_my_account": "Eliminar a Minha Conta",
"confirm_your_current_password_to_get_started": "Confirme a sua palavra-passe atual para começar.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "O seu pedido de alteração de email foi iniciado.",
"enable_two_factor_authentication": "Ativar autenticação de dois fatores",
"enter_the_code_from_your_authenticator_app_below": "Introduza o código da sua aplicação de autenticação abaixo.",
"file_size_must_be_less_than_10mb": "O tamanho do ficheiro deve ser inferior a 10MB.",
"invalid_file_type": "Tipo de ficheiro inválido. Apenas são permitidos ficheiros JPEG, PNG e WEBP.",
"lost_access": "Perdeu o acesso",
"or_enter_the_following_code_manually": "Ou insira o seguinte código manualmente:",
"organization_identification": "Ajude a sua organização a identificá-lo no Formbricks",
"organizations_delete_message": "É o único proprietário destas organizações, por isso <b>também serão eliminadas.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Remoção permanente de todas as suas informações e dados pessoais",
"personal_information": "Informações pessoais",
"please_enter_email_to_confirm_account_deletion": "Por favor, insira {email} no campo seguinte para confirmar a eliminação definitiva da sua conta:",
"profile_updated_successfully": "O seu perfil foi atualizado com sucesso",
"remove_image": "Remover imagem",
"save_the_following_backup_codes_in_a_safe_place": "Guarde os seguintes códigos de backup num local seguro.",
"scan_the_qr_code_below_with_your_authenticator_app": "Digitalize o código QR abaixo com a sua aplicação de autenticação.",
"security_description": "Gerir a sua palavra-passe e outras definições de segurança, como a autenticação de dois fatores (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Código de Dois Fatores",
"unlock_two_factor_authentication": "Desbloqueie a autenticação de dois fatores com um plano superior",
"update_personal_info": "Atualize as suas informações pessoais",
"upload_image": "Carregar imagem",
"warning_cannot_delete_account": "É o único proprietário desta organização. Transfira a propriedade para outro membro primeiro.",
"warning_cannot_undo": "Isto não pode ser desfeito",
"you_must_select_a_file": "Deve selecionar um ficheiro."
"warning_cannot_undo": "Isto não pode ser desfeito"
},
"teams": {
"add_members_description": "Adicionar membros à equipa e determinar o seu papel.",
@@ -1715,10 +1707,8 @@
"language_help_text": "Os metadados são carregados com base no valor `lang` no URL.",
"link_description": "Descrição do link",
"link_description_description": "Descrições entre 55 a 200 caracteres têm melhor desempenho.",
"link_description_placeholder": "Ajude-nos a melhorar compartilhando suas opiniões.",
"link_title": "Título do Link",
"link_title_description": "Títulos curtos têm melhor desempenho como Meta Titles.",
"link_title_placeholder": "Inquérito de Feedback do Cliente",
"preview_image": "Pré-visualização da imagem",
"preview_image_description": "Imagens de paisagem com tamanhos pequenos (<4MB) apresentam melhor desempenho.",
"title": "Definições de ligação"

View File

@@ -141,7 +141,6 @@
"apply_filters": "Aplică filtre",
"are_you_sure": "Ești sigur?",
"attributes": "Atribute",
"avatar": "Avatar",
"back": "Înapoi",
"billing": "Facturare",
"booked": "Rezervat",
@@ -748,6 +747,7 @@
"api_key_label": "Etichetă Cheie API",
"api_key_security_warning": "Din motive de securitate, cheia API va fi afișată o singură dată după creare. Vă rugăm să o copiați imediat la destinație.",
"api_key_updated": "Cheie API actualizată",
"delete_permission": "Șterge permisiunea",
"duplicate_access": "Accesul dublu la proiect nu este permis",
"no_api_keys_yet": "Nu aveți încă chei API",
"no_env_permissions_found": "Nu s-au găsit permisiuni pentru mediu",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "Consecințele ștergerii contului",
"avatar_update_failed": "Actualizarea avatarului a eșuat. Vă rugăm să încercați din nou.",
"backup_code": "Cod de rezervă",
"change_image": "Schimbă imaginea",
"confirm_delete_account": "Șterge contul tău cu toate informațiile personale și datele tale",
"confirm_delete_my_account": "Șterge Contul Meu",
"confirm_your_current_password_to_get_started": "Confirmaţi parola curentă pentru a începe.",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "Cererea dvs. de schimbare a e-mailului a fost inițiată.",
"enable_two_factor_authentication": "Activează autentificarea în doi pași",
"enter_the_code_from_your_authenticator_app_below": "Introduceți codul din aplicația dvs. de autentificare mai jos.",
"file_size_must_be_less_than_10mb": "Dimensiunea fișierului trebuie să fie mai mică de 10MB.",
"invalid_file_type": "Tip de fișier invalid. Sunt permise numai fișiere JPEG, PNG și WEBP.",
"lost_access": "Acces pierdut",
"or_enter_the_following_code_manually": "Sau introduceți manual următorul cod:",
"organization_identification": "Ajutați organizația să vă identifice pe Formbricks",
"organizations_delete_message": "Ești singurul proprietar al acestor organizații, deci ele <b>vor fi șterse și ele.</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "Ștergerea permanentă a tuturor informațiilor și datelor tale personale",
"personal_information": "Informații personale",
"please_enter_email_to_confirm_account_deletion": "Vă rugăm să introduceți {email} în câmpul următor pentru a confirma ștergerea definitivă a contului dumneavoastră:",
"profile_updated_successfully": "Profilul dvs. a fost actualizat cu succes",
"remove_image": "Șterge imaginea",
"save_the_following_backup_codes_in_a_safe_place": "Salvează următoarele coduri de rezervă într-un loc sigur.",
"scan_the_qr_code_below_with_your_authenticator_app": "Scanați codul QR de mai jos cu aplicația dvs. de autentificare.",
"security_description": "Gestionează parola și alte setări de securitate, precum autentificarea în doi pași (2FA).",
@@ -1144,10 +1138,8 @@
"two_factor_code": "Codul cu doi factori",
"unlock_two_factor_authentication": "Deblocați autentificarea în doi pași cu un plan superior",
"update_personal_info": "Actualizează informațiile tale personale",
"upload_image": "Încărcați imagine",
"warning_cannot_delete_account": "Ești singurul proprietar al acestei organizații. Te rugăm să transferi proprietatea către un alt membru mai întâi.",
"warning_cannot_undo": "Aceasta nu poate fi anulată",
"you_must_select_a_file": "Trebuie să selectați un fișier."
"warning_cannot_undo": "Aceasta nu poate fi anulată"
},
"teams": {
"add_members_description": "Adaugă membri în echipă și stabilește rolul lor.",
@@ -1715,10 +1707,8 @@
"language_help_text": "Meta datele sunt încărcate pe baza valorii `lang` din URL.",
"link_description": "Descriere legătură",
"link_description_description": "Descrierile între 55-200 de caractere au cele mai bune performanțe.",
"link_description_placeholder": "Ajutați-ne să ne îmbunătățim împărtășindu-vă gândurile.",
"link_title": "Titlu link",
"link_title_description": "Titlurile scurte funcționează cel mai bine ca Meta Title-uri.",
"link_title_placeholder": "Chestionar de feedback al clienților",
"preview_image": "Previzualizare imagine",
"preview_image_description": "Imaginile panoramice cu dimensiuni de fișier mici (<4MB) au cel mai bun randament.",
"title": "Setări link"

View File

@@ -141,7 +141,6 @@
"apply_filters": "套用篩選器",
"are_you_sure": "您確定嗎?",
"attributes": "屬性",
"avatar": "頭像",
"back": "返回",
"billing": "帳單",
"booked": "已預訂",
@@ -748,6 +747,7 @@
"api_key_label": "API 金鑰標籤",
"api_key_security_warning": "為安全起見API 金鑰僅在建立後顯示一次。請立即將其複製到您的目的地。",
"api_key_updated": "API 金鑰已更新",
"delete_permission": "刪除 權限",
"duplicate_access": "不允許重複的 project 存取",
"no_api_keys_yet": "您還沒有任何 API 金鑰",
"no_env_permissions_found": "找不到環境權限",
@@ -1111,9 +1111,7 @@
},
"profile": {
"account_deletion_consequences_warning": "帳戶刪除後果",
"avatar_update_failed": "頭像更新失敗。請再試一次。",
"backup_code": "備份碼",
"change_image": "變更圖片",
"confirm_delete_account": "刪除您的帳戶以及您的所有個人資訊和資料",
"confirm_delete_my_account": "刪除我的帳戶",
"confirm_your_current_password_to_get_started": "確認您目前的密碼以開始使用。",
@@ -1124,17 +1122,13 @@
"email_change_initiated": "您的 email 更改請求已啟動。",
"enable_two_factor_authentication": "啟用雙重驗證",
"enter_the_code_from_your_authenticator_app_below": "在下方輸入您驗證器應用程式中的程式碼。",
"file_size_must_be_less_than_10mb": "檔案大小必須小於 10MB。",
"invalid_file_type": "無效的檔案類型。僅允許 JPEG、PNG 和 WEBP 檔案。",
"lost_access": "無法存取",
"or_enter_the_following_code_manually": "或手動輸入下列程式碼:",
"organization_identification": "協助您的組織在 Formbricks 上識別您",
"organizations_delete_message": "您是這些組織的唯一擁有者,因此它們也 <b>將被刪除。</b>",
"permanent_removal_of_all_of_your_personal_information_and_data": "永久移除您的所有個人資訊和資料",
"personal_information": "個人資訊",
"please_enter_email_to_confirm_account_deletion": "請在以下欄位中輸入 '{'email'}' 以確認永久刪除您的帳戶:",
"profile_updated_successfully": "您的個人資料已成功更新",
"remove_image": "移除圖片",
"save_the_following_backup_codes_in_a_safe_place": "將下列備份碼儲存在安全的地方。",
"scan_the_qr_code_below_with_your_authenticator_app": "使用您的驗證器應用程式掃描下方的 QR 碼。",
"security_description": "管理您的密碼和其他安全性設定,例如雙重驗證 (2FA)。",
@@ -1144,10 +1138,8 @@
"two_factor_code": "雙重驗證碼",
"unlock_two_factor_authentication": "使用更高等級的方案解鎖雙重驗證",
"update_personal_info": "更新您的個人資訊",
"upload_image": "上傳圖片",
"warning_cannot_delete_account": "您是此組織的唯一擁有者。請先將所有權轉讓給其他成員。",
"warning_cannot_undo": "此操作無法復原",
"you_must_select_a_file": "您必須選取檔案。"
"warning_cannot_undo": "此操作無法復原"
},
"teams": {
"add_members_description": "將成員新增至團隊並確定其角色。",
@@ -1715,10 +1707,8 @@
"language_help_text": "中 繼資料 會 根據 URL 中 的 `lang` 值 載入。",
"link_description": "連結描述",
"link_description_description": "描述在 55 - 200 個字符之間的表現最好。",
"link_description_placeholder": "幫助 我們 改善 , 分享 您 的 想法 。",
"link_title": "連結標題",
"link_title_description": "短 標題 在 Meta Titles 中表現最佳。",
"link_title_placeholder": "顧客 回饋 調查",
"preview_image": "預覽 圖片",
"preview_image_description": "景觀 圖片 檔案 大小 小於 4MB 效果 最佳。",
"title": "連結 設定"

View File

@@ -31,7 +31,7 @@ vi.mock("@/lib/constants", () => ({
SESSION_MAX_AGE: 86400,
NEXTAUTH_SECRET: "test-secret",
WEBAPP_URL: "http://localhost:3000",
ENCRYPTION_KEY: "test-encryption-key-32-chars-long",
ENCRYPTION_KEY: "12345678901234567890123456789012", // 32 bytes for AES-256
REDIS_URL: undefined,
AUDIT_LOG_ENABLED: false,
AUDIT_LOG_GET_USER_IP: false,
@@ -148,7 +148,6 @@ describe("authOptions", () => {
email: mockUser.email,
password: mockHashedPassword,
emailVerified: new Date(),
imageUrl: "http://example.com/avatar.png",
twoFactorEnabled: false,
};
@@ -161,7 +160,6 @@ describe("authOptions", () => {
id: fakeUser.id,
email: fakeUser.email,
emailVerified: fakeUser.emailVerified,
imageUrl: fakeUser.imageUrl,
});
});
@@ -263,7 +261,7 @@ describe("authOptions", () => {
vi.mocked(applyIPRateLimit).mockResolvedValue(); // Rate limiting passes
vi.spyOn(prisma.user, "findUnique").mockResolvedValue(mockUser as any);
const credentials = { token: createToken(mockUser.id, mockUser.email) };
const credentials = { token: createToken(mockUser.id) };
await expect(tokenProvider.options.authorize(credentials, {})).rejects.toThrow(
"Email already verified"
@@ -282,7 +280,7 @@ describe("authOptions", () => {
groupId: null,
} as any);
const credentials = { token: createToken(mockUserId, mockUser.email) };
const credentials = { token: createToken(mockUserId) };
const result = await tokenProvider.options.authorize(credentials, {});
expect(result.email).toBe(mockUser.email);
@@ -305,7 +303,7 @@ describe("authOptions", () => {
groupId: null,
} as any);
const credentials = { token: createToken(mockUserId, mockUser.email) };
const credentials = { token: createToken(mockUserId) };
await tokenProvider.options.authorize(credentials, {});
@@ -317,7 +315,7 @@ describe("authOptions", () => {
new Error("Maximum number of requests reached. Please try again later.")
);
const credentials = { token: createToken(mockUserId, mockUser.email) };
const credentials = { token: createToken(mockUserId) };
await expect(tokenProvider.options.authorize(credentials, {})).rejects.toThrow(
"Maximum number of requests reached. Please try again later."
@@ -341,7 +339,7 @@ describe("authOptions", () => {
groupId: null,
} as any);
const credentials = { token: createToken(mockUserId, mockUser.email) };
const credentials = { token: createToken(mockUserId) };
await tokenProvider.options.authorize(credentials, {});

View File

@@ -111,7 +111,7 @@ export const sendVerificationEmail = async ({
}): Promise<boolean> => {
try {
const t = await getTranslate();
const token = createToken(id, email, {
const token = createToken(id, {
expiresIn: "1d",
});
const verifyLink = `${WEBAPP_URL}/auth/verify?token=${encodeURIComponent(token)}`;
@@ -136,7 +136,7 @@ export const sendForgotPasswordEmail = async (user: {
locale: TUserLocale;
}): Promise<boolean> => {
const t = await getTranslate();
const token = createToken(user.id, user.email, {
const token = createToken(user.id, {
expiresIn: "1d",
});
const verifyLink = `${WEBAPP_URL}/auth/forgot-password/reset?token=${encodeURIComponent(token)}`;

View File

@@ -92,7 +92,7 @@ describe("contact-survey page", () => {
params: Promise.resolve({ jwt: "token" }),
searchParams: Promise.resolve({}),
});
expect(meta).toEqual({ title: "Survey", description: "Complete this survey" });
expect(meta).toEqual({ title: "Survey", description: "Please complete this survey." });
});
test("generateMetadata returns default when verify throws", async () => {
@@ -103,7 +103,7 @@ describe("contact-survey page", () => {
params: Promise.resolve({ jwt: "token" }),
searchParams: Promise.resolve({}),
});
expect(meta).toEqual({ title: "Survey", description: "Complete this survey" });
expect(meta).toEqual({ title: "Survey", description: "Please complete this survey." });
});
test("generateMetadata returns basic metadata when token valid", async () => {

View File

@@ -31,7 +31,7 @@ export const generateMetadata = async (props: ContactSurveyPageProps): Promise<M
if (!result.ok) {
return {
title: "Survey",
description: "Complete this survey",
description: "Please complete this survey.",
};
}
const { surveyId } = result.data;
@@ -40,7 +40,7 @@ export const generateMetadata = async (props: ContactSurveyPageProps): Promise<M
// If the token is invalid, we'll return generic metadata
return {
title: "Survey",
description: "Complete this survey",
description: "Please complete this survey.",
};
}
};

View File

@@ -77,7 +77,7 @@ describe("Metadata Utils", () => {
expect(getSurvey).toHaveBeenCalledWith(mockSurveyId);
expect(result).toEqual({
title: "Survey",
description: "Complete this survey",
description: "Please complete this survey.",
survey: null,
ogImage: undefined,
});
@@ -108,10 +108,9 @@ describe("Metadata Utils", () => {
const result = await getBasicSurveyMetadata(mockSurveyId);
expect(getSurvey).toHaveBeenCalledWith(mockSurveyId);
expect(getProjectByEnvironmentId).toHaveBeenCalledWith(mockEnvironmentId);
expect(result).toEqual({
title: "Welcome Headline | Test Project",
description: "Complete this survey",
title: "Welcome Headline",
description: "Please complete this survey.",
survey: mockSurvey,
ogImage: undefined,
});
@@ -129,13 +128,12 @@ describe("Metadata Utils", () => {
} as TSurvey;
vi.mocked(getSurvey).mockResolvedValue(mockSurvey);
vi.mocked(getProjectByEnvironmentId).mockResolvedValue({ name: "Test Project" } as any);
const result = await getBasicSurveyMetadata(mockSurveyId);
expect(result).toEqual({
title: "Test Survey | Test Project",
description: "Complete this survey",
title: "Test Survey",
description: "Please complete this survey.",
survey: mockSurvey,
ogImage: undefined,
});

View File

@@ -1,8 +1,8 @@
import { IS_FORMBRICKS_CLOUD } from "@/lib/constants";
import { getPublicDomain } from "@/lib/getPublicUrl";
import { getLocalizedValue } from "@/lib/i18n/utils";
import { COLOR_DEFAULTS } from "@/lib/styling/constants";
import { getSurvey } from "@/modules/survey/lib/survey";
import { getProjectByEnvironmentId } from "@/modules/survey/link/lib/project";
import { Metadata } from "next";
type TBasicSurveyMetadata = {
@@ -12,22 +12,16 @@ type TBasicSurveyMetadata = {
ogImage?: string;
};
/**
* Utility function to encode name for URL usage
*/
export const getNameForURL = (url: string) => url.replace(/ /g, "%20");
export const getNameForURL = (value: string) => encodeURIComponent(value);
/**
* Utility function to encode brand color for URL usage
*/
export const getBrandColorForURL = (url: string) => url.replace(/#/g, "%23");
export const getBrandColorForURL = (value: string) => encodeURIComponent(value);
/**
* Get basic survey metadata (title and description) based on link metadata, welcome card or survey name
*/
export const getBasicSurveyMetadata = async (
surveyId: string,
languageCode?: string
languageCode = "default"
): Promise<TBasicSurveyMetadata> => {
const survey = await getSurvey(surveyId);
@@ -35,7 +29,7 @@ export const getBasicSurveyMetadata = async (
if (!survey) {
return {
title: "Survey",
description: "Complete this survey",
description: "Please complete this survey.",
survey: null,
ogImage: undefined,
};
@@ -43,38 +37,33 @@ export const getBasicSurveyMetadata = async (
const metadata = survey.metadata;
const welcomeCard = survey.welcomeCard;
const useDefaultLanguageCode =
languageCode === "default" ||
survey.languages.find((lang) => lang.language.code === languageCode)?.default;
// Determine language code to use for metadata
const langCode = languageCode || "default";
const langCode = useDefaultLanguageCode ? "default" : languageCode;
// Set title - priority: custom link metadata > welcome card > survey name
let title = "Survey";
if (metadata.title?.[langCode]) {
title = metadata.title[langCode];
} else if (welcomeCard.enabled && welcomeCard.headline?.default) {
title = welcomeCard.headline.default;
} else {
title = survey.name;
}
const titleFromMetadata = metadata?.title ? getLocalizedValue(metadata.title, langCode) || "" : undefined;
const titleFromWelcome =
welcomeCard?.enabled && welcomeCard.headline
? getLocalizedValue(welcomeCard.headline, langCode) || ""
: undefined;
let title = titleFromMetadata || titleFromWelcome || survey.name;
// Set description - priority: custom link metadata > welcome card > default
let description = "Complete this survey";
if (metadata.description?.[langCode]) {
description = metadata.description[langCode];
}
const descriptionFromMetadata = metadata?.description
? getLocalizedValue(metadata.description, langCode) || ""
: undefined;
let description = descriptionFromMetadata || "Please complete this survey.";
// Get OG image from link metadata if available
const { ogImage } = metadata;
// Add product name in title if it's Formbricks cloud and not using custom metadata
if (!metadata.title?.[langCode]) {
if (!titleFromMetadata) {
if (IS_FORMBRICKS_CLOUD) {
title = `${title} | Formbricks`;
} else {
const project = await getProjectByEnvironmentId(survey.environmentId);
if (project) {
title = `${title} | ${project.name}`;
}
}
}
@@ -89,10 +78,13 @@ export const getBasicSurveyMetadata = async (
/**
* Generate Open Graph metadata for survey
*/
export const getSurveyOpenGraphMetadata = (surveyId: string, surveyName: string): Metadata => {
const brandColor = getBrandColorForURL(COLOR_DEFAULTS.brandColor); // Default color
export const getSurveyOpenGraphMetadata = (
surveyId: string,
surveyName: string,
surveyBrandColor?: string
): Metadata => {
const encodedName = getNameForURL(surveyName);
const brandColor = getBrandColorForURL(surveyBrandColor ?? COLOR_DEFAULTS.brandColor);
const ogImgURL = `/api/v1/client/og?brandColor=${brandColor}&name=${encodedName}`;
return {

View File

@@ -20,7 +20,7 @@ vi.mock("./lib/metadata-utils", () => ({
describe("getMetadataForLinkSurvey", () => {
const mockSurveyId = "survey-123";
const mockSurveyName = "Test Survey";
const mockDescription = "Complete this survey";
const mockDescription = "Please complete this survey.";
const mockOgImageUrl = "https://example.com/custom-image.png";
beforeEach(() => {
@@ -60,7 +60,7 @@ describe("getMetadataForLinkSurvey", () => {
expect(getSurveyMetadata).toHaveBeenCalledWith(mockSurveyId);
expect(getBasicSurveyMetadata).toHaveBeenCalledWith(mockSurveyId, undefined);
expect(getSurveyOpenGraphMetadata).toHaveBeenCalledWith(mockSurveyId, mockSurveyName);
expect(getSurveyOpenGraphMetadata).toHaveBeenCalledWith(mockSurveyId, mockSurveyName, undefined);
expect(result).toEqual({
title: mockSurveyName,

View File

@@ -15,9 +15,10 @@ export const getMetadataForLinkSurvey = async (
// Get enhanced metadata that includes custom link metadata
const { title, description, ogImage } = await getBasicSurveyMetadata(surveyId, languageCode);
const surveyBrandColor = survey.styling?.brandColor?.light;
// Use the shared function for creating the base metadata but override with custom data
const baseMetadata = getSurveyOpenGraphMetadata(survey.id, title);
const baseMetadata = getSurveyOpenGraphMetadata(survey.id, title, surveyBrandColor);
// Override with the custom image URL
if (baseMetadata.openGraph) {

View File

@@ -233,12 +233,31 @@ describe("ConditionsEditor", () => {
expect(mockCallbacks.onDuplicateCondition).toHaveBeenCalledWith("cond1");
});
test("calls onCreateGroup from the dropdown menu", async () => {
test("calls onCreateGroup from the dropdown menu when enabled", async () => {
const user = userEvent.setup();
render(
<ConditionsEditor conditions={multipleConditions} config={mockConfig} callbacks={mockCallbacks} />
);
const createGroupButtons = screen.getAllByText("environments.surveys.edit.create_group");
await user.click(createGroupButtons[0]); // Click the first one
expect(mockCallbacks.onCreateGroup).toHaveBeenCalledWith("cond1");
});
test("disables the 'Create Group' button when there's only one condition", () => {
render(<ConditionsEditor conditions={singleCondition} config={mockConfig} callbacks={mockCallbacks} />);
const createGroupButton = screen.getByText("environments.surveys.edit.create_group");
await user.click(createGroupButton);
expect(mockCallbacks.onCreateGroup).toHaveBeenCalledWith("cond1");
expect(createGroupButton).toBeDisabled();
});
test("enables the 'Create Group' button when there are multiple conditions", () => {
render(
<ConditionsEditor conditions={multipleConditions} config={mockConfig} callbacks={mockCallbacks} />
);
const createGroupButtons = screen.getAllByText("environments.surveys.edit.create_group");
// Both buttons should be enabled since the main group has multiple conditions
createGroupButtons.forEach((button) => {
expect(button).not.toBeDisabled();
});
});
test("calls onToggleGroupConnector when the connector is changed", async () => {

View File

@@ -233,7 +233,8 @@ export function ConditionsEditor({ conditions, config, callbacks, depth = 0 }: C
</DropdownMenuItem>
<DropdownMenuItem
onClick={() => callbacks.onCreateGroup(condition.id)}
icon={<WorkflowIcon className="h-4 w-4" />}>
icon={<WorkflowIcon className="h-4 w-4" />}
disabled={conditions.conditions.length <= 1}>
{t("environments.surveys.edit.create_group")}
</DropdownMenuItem>
</DropdownMenuContent>

View File

@@ -256,7 +256,7 @@ EOT
fi
echo "📥 Downloading docker-compose.yml from Formbricks GitHub repository..."
curl -o docker-compose.yml https://raw.githubusercontent.com/formbricks/formbricks/main/docker/docker-compose.yml
curl -o docker-compose.yml https://raw.githubusercontent.com/formbricks/formbricks/stable/docker/docker-compose.yml
echo "🚙 Updating docker-compose.yml with your custom inputs..."
sed -i "/WEBAPP_URL:/s|WEBAPP_URL:.*|WEBAPP_URL: \"https://$domain_name\"|" docker-compose.yml

View File

@@ -765,7 +765,7 @@ export function Survey({
<LanguageSwitch
surveyLanguages={localSurvey.languages}
setSelectedLanguageCode={setselectedLanguage}
hoverColor={styling.inputColor?.light ?? "#000000"}
hoverColor={styling.inputColor?.light ?? "#f8fafc"}
borderRadius={styling.roundness ?? 8}
/>
)}
@@ -776,7 +776,7 @@ export function Survey({
{isCloseButtonVisible && (
<SurveyCloseButton
onClose={onClose}
hoverColor={styling.inputColor?.light ?? "#000000"}
hoverColor={styling.inputColor?.light ?? "#f8fafc"}
borderRadius={styling.roundness ?? 8}
/>
)}