Compare commits
1 Commits
poc-featur
...
stepsecuri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93f60db26c |
@@ -1,352 +0,0 @@
|
||||
# Create New Question Element
|
||||
|
||||
Use this command to scaffold a new question element component in `packages/survey-ui/src/elements/`.
|
||||
|
||||
## Usage
|
||||
|
||||
When creating a new question type (e.g., `single-select`, `rating`, `nps`), follow these steps:
|
||||
|
||||
1. **Create the component file** `{question-type}.tsx` with this structure:
|
||||
|
||||
```typescript
|
||||
import * as React from "react";
|
||||
import { ElementHeader } from "../components/element-header";
|
||||
import { useTextDirection } from "../hooks/use-text-direction";
|
||||
import { cn } from "../lib/utils";
|
||||
|
||||
interface {QuestionType}Props {
|
||||
/** Unique identifier for the element container */
|
||||
elementId: string;
|
||||
/** The main question or prompt text displayed as the headline */
|
||||
headline: string;
|
||||
/** Optional descriptive text displayed below the headline */
|
||||
description?: string;
|
||||
/** Unique identifier for the input/control group */
|
||||
inputId: string;
|
||||
/** Current value */
|
||||
value?: {ValueType};
|
||||
/** Callback function called when the value changes */
|
||||
onChange: (value: {ValueType}) => void;
|
||||
/** Whether the field is required (shows asterisk indicator) */
|
||||
required?: boolean;
|
||||
/** Error message to display */
|
||||
errorMessage?: string;
|
||||
/** Text direction: 'ltr' (left-to-right), 'rtl' (right-to-left), or 'auto' (auto-detect from content) */
|
||||
dir?: "ltr" | "rtl" | "auto";
|
||||
/** Whether the controls are disabled */
|
||||
disabled?: boolean;
|
||||
// Add question-specific props here
|
||||
}
|
||||
|
||||
function {QuestionType}({
|
||||
elementId,
|
||||
headline,
|
||||
description,
|
||||
inputId,
|
||||
value,
|
||||
onChange,
|
||||
required = false,
|
||||
errorMessage,
|
||||
dir = "auto",
|
||||
disabled = false,
|
||||
// ... question-specific props
|
||||
}: {QuestionType}Props): React.JSX.Element {
|
||||
// Ensure value is always the correct type (handle undefined/null)
|
||||
const currentValue = value ?? {defaultValue};
|
||||
|
||||
// Detect text direction from content
|
||||
const detectedDir = useTextDirection({
|
||||
dir,
|
||||
textContent: [headline, description ?? "", /* add other text content from question */],
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="w-full space-y-4" id={elementId} dir={detectedDir}>
|
||||
{/* Headline */}
|
||||
<ElementHeader
|
||||
headline={headline}
|
||||
description={description}
|
||||
required={required}
|
||||
htmlFor={inputId}
|
||||
/>
|
||||
|
||||
{/* Question-specific controls */}
|
||||
{/* TODO: Add your question-specific UI here */}
|
||||
|
||||
{/* Error message */}
|
||||
{errorMessage && (
|
||||
<div className="text-destructive flex items-center gap-1 text-sm" dir={detectedDir}>
|
||||
<span>{errorMessage}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export { {QuestionType} };
|
||||
export type { {QuestionType}Props };
|
||||
```
|
||||
|
||||
2. **Create the Storybook file** `{question-type}.stories.tsx`:
|
||||
|
||||
```typescript
|
||||
import type { Decorator, Meta, StoryObj } from "@storybook/react";
|
||||
import React from "react";
|
||||
import { {QuestionType}, type {QuestionType}Props } from "./{question-type}";
|
||||
|
||||
// Styling options for the StylingPlayground story
|
||||
interface StylingOptions {
|
||||
// Question styling
|
||||
questionHeadlineFontFamily: string;
|
||||
questionHeadlineFontSize: string;
|
||||
questionHeadlineFontWeight: string;
|
||||
questionHeadlineColor: string;
|
||||
questionDescriptionFontFamily: string;
|
||||
questionDescriptionFontWeight: string;
|
||||
questionDescriptionFontSize: string;
|
||||
questionDescriptionColor: string;
|
||||
// Add component-specific styling options here
|
||||
}
|
||||
|
||||
type StoryProps = {QuestionType}Props & Partial<StylingOptions>;
|
||||
|
||||
const meta: Meta<StoryProps> = {
|
||||
title: "UI-package/Elements/{QuestionType}",
|
||||
component: {QuestionType},
|
||||
parameters: {
|
||||
layout: "centered",
|
||||
docs: {
|
||||
description: {
|
||||
component: "A complete {question type} question element...",
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: ["autodocs"],
|
||||
argTypes: {
|
||||
headline: {
|
||||
control: "text",
|
||||
description: "The main question text",
|
||||
table: { category: "Content" },
|
||||
},
|
||||
description: {
|
||||
control: "text",
|
||||
description: "Optional description or subheader text",
|
||||
table: { category: "Content" },
|
||||
},
|
||||
value: {
|
||||
control: "object",
|
||||
description: "Current value",
|
||||
table: { category: "State" },
|
||||
},
|
||||
required: {
|
||||
control: "boolean",
|
||||
description: "Whether the field is required",
|
||||
table: { category: "Validation" },
|
||||
},
|
||||
errorMessage: {
|
||||
control: "text",
|
||||
description: "Error message to display",
|
||||
table: { category: "Validation" },
|
||||
},
|
||||
dir: {
|
||||
control: { type: "select" },
|
||||
options: ["ltr", "rtl", "auto"],
|
||||
description: "Text direction for RTL support",
|
||||
table: { category: "Layout" },
|
||||
},
|
||||
disabled: {
|
||||
control: "boolean",
|
||||
description: "Whether the controls are disabled",
|
||||
table: { category: "State" },
|
||||
},
|
||||
onChange: {
|
||||
action: "changed",
|
||||
table: { category: "Events" },
|
||||
},
|
||||
// Add question-specific argTypes here
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<StoryProps>;
|
||||
|
||||
// Decorator to apply CSS variables from story args
|
||||
const withCSSVariables: Decorator<StoryProps> = (Story, context) => {
|
||||
const args = context.args as StoryProps;
|
||||
const {
|
||||
questionHeadlineFontFamily,
|
||||
questionHeadlineFontSize,
|
||||
questionHeadlineFontWeight,
|
||||
questionHeadlineColor,
|
||||
questionDescriptionFontFamily,
|
||||
questionDescriptionFontSize,
|
||||
questionDescriptionFontWeight,
|
||||
questionDescriptionColor,
|
||||
// Extract component-specific styling options
|
||||
} = args;
|
||||
|
||||
const cssVarStyle: React.CSSProperties & Record<string, string | undefined> = {
|
||||
"--fb-question-headline-font-family": questionHeadlineFontFamily,
|
||||
"--fb-question-headline-font-size": questionHeadlineFontSize,
|
||||
"--fb-question-headline-font-weight": questionHeadlineFontWeight,
|
||||
"--fb-question-headline-color": questionHeadlineColor,
|
||||
"--fb-question-description-font-family": questionDescriptionFontFamily,
|
||||
"--fb-question-description-font-size": questionDescriptionFontSize,
|
||||
"--fb-question-description-font-weight": questionDescriptionFontWeight,
|
||||
"--fb-question-description-color": questionDescriptionColor,
|
||||
// Add component-specific CSS variables
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={cssVarStyle} className="w-[600px]">
|
||||
<Story />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export const StylingPlayground: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
description: "Example description",
|
||||
// Default styling values
|
||||
questionHeadlineFontFamily: "system-ui, sans-serif",
|
||||
questionHeadlineFontSize: "1.125rem",
|
||||
questionHeadlineFontWeight: "600",
|
||||
questionHeadlineColor: "#1e293b",
|
||||
questionDescriptionFontFamily: "system-ui, sans-serif",
|
||||
questionDescriptionFontSize: "0.875rem",
|
||||
questionDescriptionFontWeight: "400",
|
||||
questionDescriptionColor: "#64748b",
|
||||
// Add component-specific default values
|
||||
},
|
||||
argTypes: {
|
||||
// Question styling argTypes
|
||||
questionHeadlineFontFamily: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineFontSize: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineFontWeight: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineColor: {
|
||||
control: "color",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontFamily: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontSize: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontWeight: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionColor: {
|
||||
control: "color",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
// Add component-specific argTypes
|
||||
},
|
||||
decorators: [withCSSVariables],
|
||||
};
|
||||
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
// Add default props
|
||||
},
|
||||
};
|
||||
|
||||
export const WithDescription: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
description: "Example description text",
|
||||
},
|
||||
};
|
||||
|
||||
export const Required: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
required: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const WithError: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
errorMessage: "This field is required",
|
||||
required: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const Disabled: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
disabled: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const RTL: Story = {
|
||||
args: {
|
||||
headline: "مثال على السؤال؟",
|
||||
description: "مثال على الوصف",
|
||||
// Add RTL-specific props
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
3. **Add CSS variables** to `packages/survey-ui/src/styles/globals.css` if needed:
|
||||
|
||||
```css
|
||||
/* Component-specific CSS variables */
|
||||
--fb-{component}-{property}: {default-value};
|
||||
```
|
||||
|
||||
4. **Export from** `packages/survey-ui/src/index.ts`:
|
||||
|
||||
```typescript
|
||||
export { {QuestionType}, type {QuestionType}Props } from "./elements/{question-type}";
|
||||
```
|
||||
|
||||
## Key Requirements
|
||||
|
||||
- ✅ Always use `ElementHeader` component for headline/description
|
||||
- ✅ Always use `useTextDirection` hook for RTL support
|
||||
- ✅ Always handle undefined/null values safely (e.g., `Array.isArray(value) ? value : []`)
|
||||
- ✅ Always include error message display if applicable
|
||||
- ✅ Always support disabled state if applicable
|
||||
- ✅ Always add JSDoc comments to props interface
|
||||
- ✅ Always create Storybook stories with styling playground
|
||||
- ✅ Always export types from component file
|
||||
- ✅ Always add to index.ts exports
|
||||
|
||||
## Examples
|
||||
|
||||
- `open-text.tsx` - Text input/textarea question (string value)
|
||||
- `multi-select.tsx` - Multiple checkbox selection (string[] value)
|
||||
|
||||
## Checklist
|
||||
|
||||
When creating a new question element, verify:
|
||||
|
||||
- [ ] Component file created with proper structure
|
||||
- [ ] Props interface with JSDoc comments for all props
|
||||
- [ ] Uses `ElementHeader` component (don't duplicate header logic)
|
||||
- [ ] Uses `useTextDirection` hook for RTL support
|
||||
- [ ] Handles undefined/null values safely
|
||||
- [ ] Storybook file created with styling playground
|
||||
- [ ] Includes common stories: Default, WithDescription, Required, WithError, Disabled, RTL
|
||||
- [ ] CSS variables added to `globals.css` if component needs custom styling
|
||||
- [ ] Exported from `index.ts` with types
|
||||
- [ ] TypeScript types properly exported
|
||||
- [ ] Error message display included if applicable
|
||||
- [ ] Disabled state supported if applicable
|
||||
|
||||
76
.env.example
@@ -9,12 +9,8 @@
|
||||
WEBAPP_URL=http://localhost:3000
|
||||
|
||||
# Required for next-auth. Should be the same as WEBAPP_URL
|
||||
# If your pplication uses a custom base path, specify the route to the API endpoint in full, e.g. NEXTAUTH_URL=https://example.com/custom-route/api/auth
|
||||
NEXTAUTH_URL=http://localhost:3000
|
||||
|
||||
# Can be used to deploy the application under a sub-path of a domain. This can only be set at build time
|
||||
# BASE_PATH=
|
||||
|
||||
# Encryption keys
|
||||
# Please set both for now, we will change this in the future
|
||||
|
||||
@@ -29,9 +25,6 @@ NEXTAUTH_SECRET=
|
||||
# You can use: `openssl rand -hex 32` to generate a secure one
|
||||
CRON_SECRET=
|
||||
|
||||
# Set the minimum log level(debug, info, warn, error, fatal)
|
||||
LOG_LEVEL=info
|
||||
|
||||
##############
|
||||
# DATABASE #
|
||||
##############
|
||||
@@ -46,7 +39,6 @@ DATABASE_URL='postgresql://postgres:postgres@localhost:5432/formbricks?schema=pu
|
||||
# See optional configurations below if you want to disable these features.
|
||||
|
||||
MAIL_FROM=noreply@example.com
|
||||
MAIL_FROM_NAME=Formbricks
|
||||
SMTP_HOST=localhost
|
||||
SMTP_PORT=1025
|
||||
# Enable SMTP_SECURE_ENABLED for TLS (port 465)
|
||||
@@ -66,6 +58,9 @@ SMTP_PASSWORD=smtpPassword
|
||||
|
||||
# Uncomment the variables you would like to use and customize the values.
|
||||
|
||||
# Custom local storage path for file uploads
|
||||
#UPLOADS_DIR=
|
||||
|
||||
##############
|
||||
# S3 STORAGE #
|
||||
##############
|
||||
@@ -81,9 +76,6 @@ S3_ENDPOINT_URL=
|
||||
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
||||
S3_FORCE_PATH_STYLE=0
|
||||
|
||||
# Set this URL to add a public domain for all your client facing routes(default is WEBAPP_URL)
|
||||
# PUBLIC_URL=https://survey.example.com
|
||||
|
||||
#####################
|
||||
# Disable Features #
|
||||
#####################
|
||||
@@ -94,13 +86,16 @@ EMAIL_VERIFICATION_DISABLED=1
|
||||
# Password Reset. If you enable Password Reset functionality you have to setup SMTP-Settings, too.
|
||||
PASSWORD_RESET_DISABLED=1
|
||||
|
||||
# Signup. Disable the ability for new users to create an account.
|
||||
# Note: This variable is only available to the SaaS setup of Formbricks Cloud. Signup is disable by default for self-hosting.
|
||||
# SIGNUP_DISABLED=1
|
||||
|
||||
# Email login. Disable the ability for users to login with email.
|
||||
# EMAIL_AUTH_DISABLED=1
|
||||
|
||||
# Organization Invite. Disable the ability for invited users to create an account.
|
||||
# INVITE_DISABLED=1
|
||||
|
||||
|
||||
##########
|
||||
# Other #
|
||||
##########
|
||||
@@ -112,13 +107,9 @@ IMPRINT_URL=
|
||||
IMPRINT_ADDRESS=
|
||||
|
||||
# Configure Turnstile in signup flow
|
||||
# TURNSTILE_SITE_KEY=
|
||||
# NEXT_PUBLIC_TURNSTILE_SITE_KEY=
|
||||
# TURNSTILE_SECRET_KEY=
|
||||
|
||||
# Google reCAPTCHA v3 keys
|
||||
RECAPTCHA_SITE_KEY=
|
||||
RECAPTCHA_SECRET_KEY=
|
||||
|
||||
# Configure Github Login
|
||||
GITHUB_ID=
|
||||
GITHUB_SECRET=
|
||||
@@ -153,6 +144,11 @@ NOTION_OAUTH_CLIENT_SECRET=
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
||||
# Configure Formbricks usage within Formbricks
|
||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=
|
||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=
|
||||
NEXT_PUBLIC_FORMBRICKS_ONBOARDING_SURVEY_ID=
|
||||
|
||||
# Oauth credentials for Google sheet integration
|
||||
GOOGLE_SHEETS_CLIENT_ID=
|
||||
GOOGLE_SHEETS_CLIENT_SECRET=
|
||||
@@ -168,14 +164,11 @@ SLACK_CLIENT_SECRET=
|
||||
# Enterprise License Key
|
||||
ENTERPRISE_LICENSE_KEY=
|
||||
|
||||
# Internal Environment (production, staging) - used for internal staging environment
|
||||
# ENVIRONMENT=production
|
||||
|
||||
# Automatically assign new users to a specific organization and role within that organization
|
||||
# Insert an existing organization id or generate a valid CUID for a new one at https://www.getuniqueid.com/cuid (e.g. cjld2cjxh0000qzrmn831i7rn)
|
||||
# (Role Management is an Enterprise feature)
|
||||
# AUTH_SSO_DEFAULT_TEAM_ID=
|
||||
# AUTH_SKIP_INVITE_FOR_SSO=
|
||||
# DEFAULT_ORGANIZATION_ID=
|
||||
# DEFAULT_ORGANIZATION_ROLE=owner
|
||||
|
||||
# Send new users to Brevo
|
||||
# BREVO_API_KEY=
|
||||
@@ -191,38 +184,19 @@ ENTERPRISE_LICENSE_KEY=
|
||||
UNSPLASH_ACCESS_KEY=
|
||||
|
||||
# The below is used for Next Caching (uses In-Memory from Next Cache if not provided)
|
||||
REDIS_URL=redis://localhost:6379
|
||||
# REDIS_URL=redis://localhost:6379
|
||||
|
||||
# The below is used for Rate Limiting (uses In-Memory LRU Cache if not provided) (You can use a service like Webdis for this)
|
||||
# REDIS_HTTP_URL:
|
||||
|
||||
# Chatwoot
|
||||
# CHATWOOT_BASE_URL=
|
||||
# CHATWOOT_WEBSITE_TOKEN=
|
||||
# Disable custom cache handler if necessary (e.g. if deployed on Vercel)
|
||||
# CUSTOM_CACHE_DISABLED=1
|
||||
|
||||
# Enable Prometheus metrics
|
||||
# PROMETHEUS_ENABLED=
|
||||
# PROMETHEUS_EXPORTER_PORT=
|
||||
# Azure AI settings
|
||||
# AI_AZURE_RESSOURCE_NAME=
|
||||
# AI_AZURE_API_KEY=
|
||||
# AI_AZURE_EMBEDDINGS_DEPLOYMENT_ID=
|
||||
# AI_AZURE_LLM_DEPLOYMENT_ID=
|
||||
|
||||
# The SENTRY_DSN is used for error tracking and performance monitoring with Sentry.
|
||||
# SENTRY_DSN=
|
||||
# The SENTRY_AUTH_TOKEN variable is picked up by the Sentry Build Plugin.
|
||||
# It's used automatically by Sentry during the build for authentication when uploading source maps.
|
||||
# SENTRY_AUTH_TOKEN=
|
||||
# The SENTRY_ENVIRONMENT is the environment which the error will belong to in the Sentry dashboard
|
||||
# SENTRY_ENVIRONMENT=
|
||||
|
||||
# Configure the minimum role for user management from UI(owner, manager, disabled)
|
||||
# USER_MANAGEMENT_MINIMUM_ROLE="manager"
|
||||
|
||||
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
|
||||
# SESSION_MAX_AGE=86400
|
||||
|
||||
# Audit logs options. Default 0.
|
||||
# AUDIT_LOG_ENABLED=0
|
||||
# If the ip should be added in the log or not. Default 0
|
||||
# AUDIT_LOG_GET_USER_IP=0
|
||||
|
||||
|
||||
# Lingo.dev API key for translation generation
|
||||
LINGODOTDEV_API_KEY=your_api_key_here
|
||||
# NEXT_PUBLIC_INTERCOM_APP_ID=
|
||||
# INTERCOM_SECRET_KEY=
|
||||
@@ -1,13 +0,0 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
ignorePatterns: ["node_modules/", "dist/", "coverage/"],
|
||||
overrides: [
|
||||
{
|
||||
files: ["packages/cache/**/*.{ts,js}"],
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
parserOptions: {
|
||||
project: "./packages/cache/tsconfig.json",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,8 +1,6 @@
|
||||
name: Bug report
|
||||
description: "Found a bug? Please fill out the sections below. \U0001F44D"
|
||||
type: bug
|
||||
projects: "formbricks/8"
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: issue-summary
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,4 @@
|
||||
blank_issues_enabled: true
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Questions
|
||||
url: https://github.com/formbricks/formbricks/discussions
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,7 +1,6 @@
|
||||
name: Feature request
|
||||
description: "Suggest an idea for this project \U0001F680"
|
||||
type: feature
|
||||
projects: "formbricks/21"
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-description
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/task.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
name: Task (internal)
|
||||
description: "Template for creating a task. Used by the Formbricks Team only \U0001f4e5"
|
||||
type: task
|
||||
body:
|
||||
- type: textarea
|
||||
id: task-summary
|
||||
attributes:
|
||||
label: Task description
|
||||
description: A clear detailed-rich description of the task.
|
||||
validations:
|
||||
required: true
|
||||
319
.github/actions/build-and-push-docker/action.yml
vendored
@@ -1,319 +0,0 @@
|
||||
name: Build and Push Docker Image
|
||||
description: |
|
||||
Unified Docker build and push action for both ECR and GHCR registries.
|
||||
|
||||
Supports:
|
||||
- ECR builds for Formbricks Cloud deployment
|
||||
- GHCR builds for community self-hosting
|
||||
- Automatic version resolution and tagging
|
||||
- Conditional signing and deployment tags
|
||||
|
||||
inputs:
|
||||
registry_type:
|
||||
description: "Registry type: 'ecr' or 'ghcr'"
|
||||
required: true
|
||||
|
||||
# Version input
|
||||
version:
|
||||
description: "Explicit version (SemVer only, e.g., 1.2.3). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||
required: false
|
||||
experimental_mode:
|
||||
description: "Enable experimental timestamped versions"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
# ECR specific inputs
|
||||
ecr_registry:
|
||||
description: "ECR registry URL (required for ECR builds)"
|
||||
required: false
|
||||
ecr_repository:
|
||||
description: "ECR repository name (required for ECR builds)"
|
||||
required: false
|
||||
ecr_region:
|
||||
description: "ECR AWS region (required for ECR builds)"
|
||||
required: false
|
||||
aws_role_arn:
|
||||
description: "AWS role ARN for ECR authentication (required for ECR builds)"
|
||||
required: false
|
||||
|
||||
# GHCR specific inputs
|
||||
ghcr_image_name:
|
||||
description: "GHCR image name (required for GHCR builds)"
|
||||
required: false
|
||||
|
||||
# Deployment options
|
||||
deploy_production:
|
||||
description: "Tag image for production deployment"
|
||||
required: false
|
||||
default: "false"
|
||||
deploy_staging:
|
||||
description: "Tag image for staging deployment"
|
||||
required: false
|
||||
default: "false"
|
||||
is_prerelease:
|
||||
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||
required: false
|
||||
default: "false"
|
||||
make_latest:
|
||||
description: "Whether to tag as latest/production (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
# Build options
|
||||
dockerfile:
|
||||
description: "Path to Dockerfile"
|
||||
required: false
|
||||
default: "apps/web/Dockerfile"
|
||||
context:
|
||||
description: "Build context"
|
||||
required: false
|
||||
default: "."
|
||||
|
||||
outputs:
|
||||
image_tag:
|
||||
description: "Resolved image tag used for the build"
|
||||
value: ${{ steps.version.outputs.version }}
|
||||
registry_tags:
|
||||
description: "Complete registry tags that were pushed"
|
||||
value: ${{ steps.build.outputs.tags }}
|
||||
image_digest:
|
||||
description: "Image digest from the build"
|
||||
value: ${{ steps.build.outputs.digest }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||
ECR_REGION: ${{ inputs.ecr_region }}
|
||||
AWS_ROLE_ARN: ${{ inputs.aws_role_arn }}
|
||||
GHCR_IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "$REGISTRY_TYPE" != "ecr" && "$REGISTRY_TYPE" != "ghcr" ]]; then
|
||||
echo "ERROR: registry_type must be 'ecr' or 'ghcr', got: $REGISTRY_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||
if [[ -z "$ECR_REGISTRY" || -z "$ECR_REPOSITORY" || -z "$ECR_REGION" || -z "$AWS_ROLE_ARN" ]]; then
|
||||
echo "ERROR: ECR builds require ecr_registry, ecr_repository, ecr_region, and aws_role_arn"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$REGISTRY_TYPE" == "ghcr" ]]; then
|
||||
if [[ -z "$GHCR_IMAGE_NAME" ]]; then
|
||||
echo "ERROR: GHCR builds require ghcr_image_name"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Input validation passed for $REGISTRY_TYPE build"
|
||||
|
||||
- name: Resolve Docker version
|
||||
id: version
|
||||
uses: ./.github/actions/resolve-docker-version
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
current_branch: ${{ github.ref_name }}
|
||||
experimental_mode: ${{ inputs.experimental_mode }}
|
||||
|
||||
- name: Update package.json version
|
||||
uses: ./.github/actions/update-package-version
|
||||
with:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
- name: Configure AWS credentials (ECR only)
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ inputs.aws_role_arn }}
|
||||
aws-region: ${{ inputs.ecr_region }}
|
||||
|
||||
- name: Log in to Amazon ECR (ECR only)
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1
|
||||
|
||||
- name: Set up Docker build tools
|
||||
uses: ./.github/actions/docker-build-setup
|
||||
with:
|
||||
registry: ${{ inputs.registry_type == 'ghcr' && 'ghcr.io' || '' }}
|
||||
setup_cosign: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||
skip_login_on_pr: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||
|
||||
- name: Build ECR tag list
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
id: ecr-tags
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||
DEPLOY_PRODUCTION: ${{ inputs.deploy_production }}
|
||||
DEPLOY_STAGING: ${{ inputs.deploy_staging }}
|
||||
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start with the base image tag
|
||||
TAGS="${ECR_REGISTRY}/${ECR_REPOSITORY}:${IMAGE_TAG}"
|
||||
|
||||
# Handle automatic tagging based on release type
|
||||
if [[ "${IS_PRERELEASE}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||
echo "Adding staging tag for prerelease"
|
||||
elif [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||
echo "Adding production tag for stable release marked as latest"
|
||||
fi
|
||||
|
||||
# Handle manual deployment overrides
|
||||
if [[ "${DEPLOY_PRODUCTION}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||
echo "Adding production tag (manual override)"
|
||||
fi
|
||||
if [[ "${DEPLOY_STAGING}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||
echo "Adding staging tag (manual override)"
|
||||
fi
|
||||
|
||||
echo "ECR tags generated:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo -e "${TAGS}"
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Generate additional GHCR tags for releases
|
||||
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||
id: ghcr-extra-tags
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ steps.version.outputs.version }}
|
||||
IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start with base version tag
|
||||
TAGS="ghcr.io/${IMAGE_NAME}:${VERSION}"
|
||||
|
||||
# For proper SemVer releases, add major.minor and major tags
|
||||
if [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
# Extract major and minor versions
|
||||
MAJOR=$(echo "${VERSION}" | cut -d. -f1)
|
||||
MINOR=$(echo "${VERSION}" | cut -d. -f2)
|
||||
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}.${MINOR}"
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}"
|
||||
|
||||
echo "Added SemVer tags: ${MAJOR}.${MINOR}, ${MAJOR}"
|
||||
fi
|
||||
|
||||
# Add latest tag for stable releases marked as latest
|
||||
if [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:latest"
|
||||
echo "Added latest tag for stable release marked as latest"
|
||||
fi
|
||||
|
||||
echo "Generated GHCR tags:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
# Debug: Show what will be passed to Docker build
|
||||
echo "DEBUG: Tags for Docker build step:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo -e "${TAGS}"
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Build GHCR metadata (experimental)
|
||||
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' }}
|
||||
id: ghcr-meta-experimental
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ghcr.io/${{ inputs.ghcr_image_name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ steps.version.outputs.version }}
|
||||
|
||||
- name: Debug Docker build tags
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=== DEBUG: Docker Build Configuration ==="
|
||||
echo "Registry Type: ${{ inputs.registry_type }}"
|
||||
echo "Experimental Mode: ${{ inputs.experimental_mode }}"
|
||||
echo "Event Name: ${{ github.event_name }}"
|
||||
echo "Is Prerelease: ${{ inputs.is_prerelease }}"
|
||||
echo "Make Latest: ${{ inputs.make_latest }}"
|
||||
echo "Version: ${{ steps.version.outputs.version }}"
|
||||
|
||||
if [[ "${{ inputs.registry_type }}" == "ecr" ]]; then
|
||||
echo "ECR Tags: ${{ steps.ecr-tags.outputs.tags }}"
|
||||
elif [[ "${{ inputs.experimental_mode }}" == "true" ]]; then
|
||||
echo "GHCR Experimental Tags: ${{ steps.ghcr-meta-experimental.outputs.tags }}"
|
||||
else
|
||||
echo "GHCR Extra Tags: ${{ steps.ghcr-extra-tags.outputs.tags }}"
|
||||
fi
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||
context: ${{ inputs.context }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ inputs.registry_type == 'ecr' && steps.ecr-tags.outputs.tags || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags) || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && steps.ghcr-extra-tags.outputs.tags) || (inputs.registry_type == 'ghcr' && format('ghcr.io/{0}:{1}', inputs.ghcr_image_name, steps.version.outputs.version)) || (inputs.registry_type == 'ecr' && format('{0}/{1}:{2}', inputs.ecr_registry, inputs.ecr_repository, steps.version.outputs.version)) }}
|
||||
labels: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.labels || '' }}
|
||||
secrets: |
|
||||
database_url=${{ env.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||
redis_url=${{ env.DUMMY_REDIS_URL }}
|
||||
sentry_auth_token=${{ env.SENTRY_AUTH_TOKEN }}
|
||||
env:
|
||||
DEPOT_PROJECT_TOKEN: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ env.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ env.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Sign GHCR image (GHCR only)
|
||||
if: ${{ inputs.registry_type == 'ghcr' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||
shell: bash
|
||||
env:
|
||||
TAGS: ${{ inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags || steps.ghcr-extra-tags.outputs.tags }}
|
||||
DIGEST: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
|
||||
|
||||
- name: Output build summary
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||
VERSION_SOURCE: ${{ steps.version.outputs.source }}
|
||||
run: |
|
||||
echo "SUCCESS: Built and pushed Docker image to $REGISTRY_TYPE"
|
||||
echo "Image Tag: $IMAGE_TAG (source: $VERSION_SOURCE)"
|
||||
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||
echo "ECR Registry: ${{ inputs.ecr_registry }}"
|
||||
echo "ECR Repository: ${{ inputs.ecr_repository }}"
|
||||
else
|
||||
echo "GHCR Image: ghcr.io/${{ inputs.ghcr_image_name }}"
|
||||
fi
|
||||
21
.github/actions/cache-build-web/action.yml
vendored
@@ -8,14 +8,6 @@ on:
|
||||
required: false
|
||||
default: "0"
|
||||
|
||||
inputs:
|
||||
turbo_token:
|
||||
description: "Turborepo token"
|
||||
required: false
|
||||
turbo_team:
|
||||
description: "Turborepo team"
|
||||
required: false
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@@ -49,7 +41,7 @@ runs:
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@v4
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -62,18 +54,17 @@ runs:
|
||||
shell: bash
|
||||
|
||||
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
||||
env:
|
||||
E2E_TESTING_MODE: ${{ inputs.e2e_testing_mode }}
|
||||
run: |
|
||||
RANDOM_KEY=$(openssl rand -hex 32)
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
echo "E2E_TESTING=$E2E_TESTING_MODE" >> .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${RANDOM_KEY}/" .env
|
||||
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
pnpm build --filter=@formbricks/web...
|
||||
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.turbo_token }}
|
||||
TURBO_TEAM: ${{ inputs.turbo_team }}
|
||||
|
||||
106
.github/actions/docker-build-setup/action.yml
vendored
@@ -1,106 +0,0 @@
|
||||
name: Docker Build Setup
|
||||
description: |
|
||||
Sets up common Docker build tools and authentication with security validation.
|
||||
|
||||
Security Features:
|
||||
- Registry URL validation
|
||||
- Input sanitization
|
||||
- Conditional setup based on event type
|
||||
- Post-setup verification
|
||||
|
||||
Supports Depot CLI, Cosign signing, and Docker registry authentication.
|
||||
|
||||
inputs:
|
||||
registry:
|
||||
description: "Docker registry hostname to login to (e.g., ghcr.io, registry.example.com:5000). No paths allowed."
|
||||
required: false
|
||||
default: "ghcr.io"
|
||||
setup_cosign:
|
||||
description: "Whether to install cosign for image signing"
|
||||
required: false
|
||||
default: "true"
|
||||
skip_login_on_pr:
|
||||
description: "Whether to skip registry login on pull requests"
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY: ${{ inputs.registry }}
|
||||
SETUP_COSIGN: ${{ inputs.setup_cosign }}
|
||||
SKIP_LOGIN_ON_PR: ${{ inputs.skip_login_on_pr }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Security: Validate registry input - must be hostname[:port] only, no paths
|
||||
# Allow empty registry for cases where login is handled externally (e.g., ECR)
|
||||
if [[ -n "$REGISTRY" ]]; then
|
||||
if [[ "$REGISTRY" =~ / ]]; then
|
||||
echo "ERROR: Invalid registry format: $REGISTRY"
|
||||
echo "Registry must be host[:port] with no path (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||
echo "Path components like 'ghcr.io/org' are not allowed as they break docker login"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate hostname with optional port format
|
||||
if [[ ! "$REGISTRY" =~ ^[a-zA-Z0-9.-]+(\:[0-9]+)?$ ]]; then
|
||||
echo "ERROR: Invalid registry hostname format: $REGISTRY"
|
||||
echo "Registry must be a valid hostname optionally with port (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate boolean inputs
|
||||
if [[ "$SETUP_COSIGN" != "true" && "$SETUP_COSIGN" != "false" ]]; then
|
||||
echo "ERROR: setup_cosign must be 'true' or 'false', got: $SETUP_COSIGN"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_LOGIN_ON_PR" != "true" && "$SKIP_LOGIN_ON_PR" != "false" ]]; then
|
||||
echo "ERROR: skip_login_on_pr must be 'true' or 'false', got: $SKIP_LOGIN_ON_PR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Input validation passed"
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
- name: Install cosign
|
||||
# Install cosign when requested AND when we might actually sign images
|
||||
# (i.e., non-PR contexts or when we login on PRs)
|
||||
if: ${{ inputs.setup_cosign == 'true' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
- name: Log into registry
|
||||
if: ${{ inputs.registry != '' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ${{ inputs.registry }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
- name: Verify setup completion
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Verify Depot CLI is available
|
||||
if ! command -v depot >/dev/null 2>&1; then
|
||||
echo "ERROR: Depot CLI not found in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify cosign if it should be installed (same conditions as install step)
|
||||
if [[ "${{ inputs.setup_cosign }}" == "true" ]] && [[ "${{ inputs.skip_login_on_pr }}" == "false" || "${{ github.event_name }}" != "pull_request" ]]; then
|
||||
if ! command -v cosign >/dev/null 2>&1; then
|
||||
echo "ERROR: Cosign not found in PATH despite being requested"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Docker build setup completed successfully"
|
||||
192
.github/actions/resolve-docker-version/action.yml
vendored
@@ -1,192 +0,0 @@
|
||||
name: Resolve Docker Version
|
||||
description: |
|
||||
Resolves and validates Docker-compatible SemVer versions for container builds with comprehensive security.
|
||||
|
||||
Security Features:
|
||||
- Command injection protection
|
||||
- Input sanitization and validation
|
||||
- Docker tag character restrictions
|
||||
- Length limits and boundary checks
|
||||
- Safe branch name handling
|
||||
|
||||
Supports multiple modes: release, manual override, branch auto-detection, and experimental timestamped versions.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Explicit version (SemVer only, e.g., 1.2.3-beta). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||
required: false
|
||||
current_branch:
|
||||
description: "Current branch name for auto-detection"
|
||||
required: true
|
||||
experimental_mode:
|
||||
description: "Enable experimental mode with timestamp-based versions"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
outputs:
|
||||
version:
|
||||
description: "Resolved Docker-compatible SemVer version"
|
||||
value: ${{ steps.resolve.outputs.version }}
|
||||
source:
|
||||
description: "Source of version (release|override|branch)"
|
||||
value: ${{ steps.resolve.outputs.source }}
|
||||
normalized:
|
||||
description: "Whether the version was normalized (true/false)"
|
||||
value: ${{ steps.resolve.outputs.normalized }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Resolve and validate Docker version
|
||||
id: resolve
|
||||
shell: bash
|
||||
env:
|
||||
EXPLICIT_VERSION: ${{ inputs.version }}
|
||||
CURRENT_BRANCH: ${{ inputs.current_branch }}
|
||||
EXPERIMENTAL_MODE: ${{ inputs.experimental_mode }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Function to validate SemVer format (Docker-compatible, no '+' build metadata)
|
||||
validate_semver() {
|
||||
local version="$1"
|
||||
local context="$2"
|
||||
|
||||
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid $context format. Must be semver without build metadata (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Provided: $version"
|
||||
echo "Note: Docker tags cannot contain '+' characters. Use prerelease identifiers instead."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to generate branch-based version
|
||||
generate_branch_version() {
|
||||
local branch="$1"
|
||||
local use_timestamp="${2:-true}"
|
||||
local timestamp
|
||||
|
||||
if [[ "$use_timestamp" == "true" ]]; then
|
||||
timestamp=$(date +%s)
|
||||
else
|
||||
timestamp=""
|
||||
fi
|
||||
|
||||
# Sanitize branch name for Docker compatibility
|
||||
local sanitized_branch=$(echo "$branch" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
|
||||
|
||||
# Additional safety: truncate if too long (reserve space for prefix and timestamp)
|
||||
if (( ${#sanitized_branch} > 80 )); then
|
||||
sanitized_branch="${sanitized_branch:0:80}"
|
||||
echo "INFO: Branch name truncated for Docker compatibility" >&2
|
||||
fi
|
||||
local version
|
||||
|
||||
# Generate version based on branch name (unified approach)
|
||||
# All branches get alpha versions with sanitized branch name
|
||||
if [[ -n "$timestamp" ]]; then
|
||||
version="0.0.0-alpha-$sanitized_branch-$timestamp"
|
||||
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||
else
|
||||
version="0.0.0-alpha-$sanitized_branch"
|
||||
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
|
||||
# Input validation and sanitization
|
||||
if [[ -z "$CURRENT_BRANCH" ]]; then
|
||||
echo "ERROR: current_branch input is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Security: Validate inputs to prevent command injection
|
||||
# Use grep to check for dangerous characters (more reliable than bash regex)
|
||||
validate_input() {
|
||||
local input="$1"
|
||||
local name="$2"
|
||||
|
||||
# Check for dangerous characters using grep
|
||||
if echo "$input" | grep -q '[;|&`$(){}\\[:space:]]'; then
|
||||
echo "ERROR: $name contains potentially dangerous characters: $input"
|
||||
echo "Input should only contain letters, numbers, hyphens, underscores, dots, and forward slashes"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Validate current branch
|
||||
if ! validate_input "$CURRENT_BRANCH" "Branch name"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate explicit version if provided
|
||||
if [[ -n "$EXPLICIT_VERSION" ]] && ! validate_input "$EXPLICIT_VERSION" "Explicit version"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Main resolution logic (ultra-simplified)
|
||||
NORMALIZED="false"
|
||||
|
||||
if [[ -n "$EXPLICIT_VERSION" ]]; then
|
||||
# Use provided explicit version (from either workflow_call or manual input)
|
||||
validate_semver "$EXPLICIT_VERSION" "explicit version"
|
||||
|
||||
# Normalize to lowercase for Docker/ECR compatibility
|
||||
RESOLVED_VERSION="${EXPLICIT_VERSION,,}"
|
||||
if [[ "$EXPLICIT_VERSION" != "$RESOLVED_VERSION" ]]; then
|
||||
NORMALIZED="true"
|
||||
echo "INFO: Original version contained uppercase characters, normalized: $EXPLICIT_VERSION -> $RESOLVED_VERSION"
|
||||
fi
|
||||
|
||||
SOURCE="explicit"
|
||||
echo "INFO: Using explicit version: $RESOLVED_VERSION"
|
||||
|
||||
else
|
||||
# Auto-generate version from branch name
|
||||
if [[ "$EXPERIMENTAL_MODE" == "true" ]]; then
|
||||
# Use timestamped version generation
|
||||
echo "INFO: Experimental mode: generating timestamped version from branch: $CURRENT_BRANCH"
|
||||
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "true")
|
||||
SOURCE="experimental"
|
||||
else
|
||||
# Standard branch version (no timestamp)
|
||||
echo "INFO: Auto-detecting version from branch: $CURRENT_BRANCH"
|
||||
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "false")
|
||||
SOURCE="branch"
|
||||
fi
|
||||
echo "Generated version: $RESOLVED_VERSION"
|
||||
fi
|
||||
|
||||
# Final validation - ensure result is valid Docker tag
|
||||
if [[ -z "$RESOLVED_VERSION" ]]; then
|
||||
echo "ERROR: Failed to resolve version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( ${#RESOLVED_VERSION} > 128 )); then
|
||||
echo "ERROR: Version must be at most 128 characters (Docker limitation)"
|
||||
echo "Generated version: $RESOLVED_VERSION (${#RESOLVED_VERSION} chars)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$RESOLVED_VERSION" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "ERROR: Version contains invalid characters for Docker tags"
|
||||
echo "Version: $RESOLVED_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$RESOLVED_VERSION" =~ ^[.-] || "$RESOLVED_VERSION" =~ [.-]$ ]]; then
|
||||
echo "ERROR: Version must not start or end with '.' or '-'"
|
||||
echo "Version: $RESOLVED_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Output results
|
||||
echo "SUCCESS: Resolved Docker version: $RESOLVED_VERSION (source: $SOURCE)"
|
||||
echo "version=$RESOLVED_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "source=$SOURCE" >> $GITHUB_OUTPUT
|
||||
echo "normalized=$NORMALIZED" >> $GITHUB_OUTPUT
|
||||
160
.github/actions/update-package-version/action.yml
vendored
@@ -1,160 +0,0 @@
|
||||
name: Update Package Version
|
||||
description: |
|
||||
Safely updates package.json version with comprehensive validation and atomic operations.
|
||||
|
||||
Security Features:
|
||||
- Path traversal protection
|
||||
- SemVer validation with length limits
|
||||
- Atomic file operations with backup/recovery
|
||||
- JSON validation before applying changes
|
||||
|
||||
This action is designed to be secure by default and prevent common attack vectors.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to set in package.json (must be valid SemVer)"
|
||||
required: true
|
||||
package_path:
|
||||
description: "Path to package.json file"
|
||||
required: false
|
||||
default: "./apps/web/package.json"
|
||||
|
||||
outputs:
|
||||
updated_version:
|
||||
description: "The version that was actually set in package.json"
|
||||
value: ${{ steps.update.outputs.updated_version }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Update and verify package.json version
|
||||
id: update
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ inputs.version }}
|
||||
PACKAGE_PATH: ${{ inputs.package_path }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate inputs
|
||||
if [[ -z "$VERSION" ]]; then
|
||||
echo "ERROR: version input is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Security: Validate package_path to prevent path traversal attacks
|
||||
# Only allow paths within the workspace and must end with package.json
|
||||
if [[ "$PACKAGE_PATH" =~ \.\./|^/|^~ ]]; then
|
||||
echo "ERROR: Invalid package path - path traversal detected: $PACKAGE_PATH"
|
||||
echo "Package path must be relative to workspace root and cannot contain '../', start with '/', or '~'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$PACKAGE_PATH" =~ package\.json$ ]]; then
|
||||
echo "ERROR: Package path must end with 'package.json': $PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Resolve to absolute path within workspace for additional security
|
||||
WORKSPACE_ROOT="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
|
||||
# Use realpath to resolve both paths and handle symlinks properly
|
||||
WORKSPACE_ROOT=$(realpath "$WORKSPACE_ROOT")
|
||||
RESOLVED_PATH=$(realpath "${WORKSPACE_ROOT}/${PACKAGE_PATH}")
|
||||
|
||||
# Ensure WORKSPACE_ROOT has a trailing slash for proper prefix matching
|
||||
WORKSPACE_ROOT="${WORKSPACE_ROOT}/"
|
||||
|
||||
# Use shell string matching to ensure RESOLVED_PATH is within workspace
|
||||
# This is more secure than regex and handles edge cases properly
|
||||
if [[ "$RESOLVED_PATH" != "$WORKSPACE_ROOT"* ]]; then
|
||||
echo "ERROR: Resolved path is outside workspace: $RESOLVED_PATH"
|
||||
echo "Workspace root: $WORKSPACE_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$RESOLVED_PATH" ]]; then
|
||||
echo "ERROR: package.json not found at: $RESOLVED_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use resolved path for operations
|
||||
PACKAGE_PATH="$RESOLVED_PATH"
|
||||
|
||||
# Validate SemVer format with additional security checks
|
||||
if [[ ${#VERSION} -gt 128 ]]; then
|
||||
echo "ERROR: Version string too long (${#VERSION} chars, max 128): $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid SemVer format: $VERSION"
|
||||
echo "Expected format: MAJOR.MINOR.PATCH[-PRERELEASE]"
|
||||
echo "Only alphanumeric characters, dots, and hyphens allowed in prerelease"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: Check for reasonable version component sizes
|
||||
# Extract base version (MAJOR.MINOR.PATCH) without prerelease/build metadata
|
||||
if [[ "$VERSION" =~ ^([0-9]+\.[0-9]+\.[0-9]+) ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
else
|
||||
echo "ERROR: Could not extract base version from: $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Split version components safely
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$BASE_VERSION"
|
||||
|
||||
# Validate component sizes (should have exactly 3 parts due to regex above)
|
||||
if (( ${VERSION_PARTS[0]} > 999 || ${VERSION_PARTS[1]} > 999 || ${VERSION_PARTS[2]} > 999 )); then
|
||||
echo "ERROR: Version components too large (max 999 each): $VERSION"
|
||||
echo "Components: ${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Updating package.json version to: $VERSION"
|
||||
|
||||
# Create backup for atomic operations
|
||||
BACKUP_PATH="${PACKAGE_PATH}.backup.$$"
|
||||
cp "$PACKAGE_PATH" "$BACKUP_PATH"
|
||||
|
||||
# Use jq to safely update the version field with error handling
|
||||
if ! jq --arg version "$VERSION" '.version = $version' "$PACKAGE_PATH" > "${PACKAGE_PATH}.tmp"; then
|
||||
echo "ERROR: jq failed to process package.json"
|
||||
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate the generated JSON before applying changes
|
||||
if ! jq empty "${PACKAGE_PATH}.tmp" 2>/dev/null; then
|
||||
echo "ERROR: Generated invalid JSON"
|
||||
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Atomic move operation
|
||||
if ! mv "${PACKAGE_PATH}.tmp" "$PACKAGE_PATH"; then
|
||||
echo "ERROR: Failed to update package.json"
|
||||
# Restore backup
|
||||
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify the update was successful
|
||||
UPDATED_VERSION=$(jq -r '.version' "$PACKAGE_PATH" 2>/dev/null)
|
||||
|
||||
if [[ "$UPDATED_VERSION" != "$VERSION" ]]; then
|
||||
echo "ERROR: Version update failed!"
|
||||
echo "Expected: $VERSION"
|
||||
echo "Actual: $UPDATED_VERSION"
|
||||
# Restore backup
|
||||
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean up backup on success
|
||||
rm -f "$BACKUP_PATH"
|
||||
|
||||
echo "SUCCESS: Updated package.json version to: $UPDATED_VERSION"
|
||||
echo "updated_version=$UPDATED_VERSION" >> $GITHUB_OUTPUT
|
||||
96
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /apps/demo-react-native
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /apps/demo
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /apps/storybook
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: docker
|
||||
directory: /apps/web
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /apps/web
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/api
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/config-eslint
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/config-prettier
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/config-typescript
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/database
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/js-core
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/js
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/lib
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/react-native
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/surveys
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/types
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /packages/vite-plugins
|
||||
schedule:
|
||||
interval: daily
|
||||
82
.github/workflows/apply-issue-labels-to-pr.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: "Apply issue labels to PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
label_on_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: none
|
||||
issues: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Apply labels from linked issue to PR
|
||||
uses: actions/github-script@211cb3fefb35a799baa5156f9321bb774fe56294 # v5.2.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
async function getLinkedIssues(owner, repo, prNumber) {
|
||||
const query = `query GetLinkedIssues($owner: String!, $repo: String!, $prNumber: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
pullRequest(number: $prNumber) {
|
||||
closingIssuesReferences(first: 10) {
|
||||
nodes {
|
||||
number
|
||||
labels(first: 10) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const variables = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
prNumber: prNumber,
|
||||
};
|
||||
|
||||
const result = await github.graphql(query, variables);
|
||||
return result.repository.pullRequest.closingIssuesReferences.nodes;
|
||||
}
|
||||
|
||||
const pr = context.payload.pull_request;
|
||||
const linkedIssues = await getLinkedIssues(
|
||||
context.repo.owner,
|
||||
context.repo.repo,
|
||||
pr.number
|
||||
);
|
||||
|
||||
const labelsToAdd = new Set();
|
||||
for (const issue of linkedIssues) {
|
||||
if (issue.labels && issue.labels.nodes) {
|
||||
for (const label of issue.labels.nodes) {
|
||||
labelsToAdd.add(label.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (labelsToAdd.size) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels: Array.from(labelsToAdd),
|
||||
});
|
||||
}
|
||||
94
.github/workflows/build-and-push-ecr.yml
vendored
@@ -1,94 +0,0 @@
|
||||
name: Build Cloud Deployment Images
|
||||
|
||||
# This workflow builds Formbricks Docker images for ECR deployment:
|
||||
# - workflow_call: Used by releases with explicit SemVer versions
|
||||
# - workflow_dispatch: Auto-detects version from current branch or uses override
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_override:
|
||||
description: "Override version (SemVer only, e.g., 1.2.3). Leave empty to auto-detect from branch."
|
||||
required: false
|
||||
type: string
|
||||
deploy_production:
|
||||
description: "Tag image for production deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
deploy_staging:
|
||||
description: "Tag image for staging deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_tag:
|
||||
description: "Image tag to push (required for workflow_call)"
|
||||
required: true
|
||||
type: string
|
||||
IS_PRERELEASE:
|
||||
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
MAKE_LATEST:
|
||||
description: "Whether to tag for production (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
outputs:
|
||||
IMAGE_TAG:
|
||||
description: "Normalized image tag used for the build"
|
||||
value: ${{ jobs.build-and-push.outputs.IMAGE_TAG }}
|
||||
TAGS:
|
||||
description: "Newline-separated list of ECR tags pushed"
|
||||
value: ${{ jobs.build-and-push.outputs.TAGS }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
ECR_REGION: ${{ vars.ECR_REGION }}
|
||||
# ECR settings are sourced from repository/environment variables for portability across envs/forks
|
||||
ECR_REGISTRY: ${{ vars.ECR_REGISTRY }}
|
||||
ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
outputs:
|
||||
IMAGE_TAG: ${{ steps.build.outputs.image_tag }}
|
||||
TAGS: ${{ steps.build.outputs.registry_tags }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Build and push cloud deployment image
|
||||
id: build
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
with:
|
||||
registry_type: "ecr"
|
||||
ecr_registry: ${{ env.ECR_REGISTRY }}
|
||||
ecr_repository: ${{ env.ECR_REPOSITORY }}
|
||||
ecr_region: ${{ env.ECR_REGION }}
|
||||
aws_role_arn: ${{ secrets.AWS_ECR_PUSH_ROLE_ARN }}
|
||||
version: ${{ inputs.version_override || inputs.image_tag }}
|
||||
deploy_production: ${{ inputs.deploy_production }}
|
||||
deploy_staging: ${{ inputs.deploy_staging }}
|
||||
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||
env:
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
8
.github/workflows/build-web.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Formbricks-web
|
||||
@@ -13,11 +13,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Build & Cache Web Binaries
|
||||
@@ -25,5 +25,3 @@ jobs:
|
||||
id: cache-build-web
|
||||
with:
|
||||
e2e_testing_mode: "0"
|
||||
turbo_token: ${{ secrets.TURBO_TOKEN }}
|
||||
turbo_team: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
32
.github/workflows/chromatic.yml
vendored
@@ -6,19 +6,13 @@ on:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
chromatic:
|
||||
name: Run Chromatic
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -26,34 +20,16 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
- uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Run Chromatic
|
||||
uses: chromaui/action@4c20b95e9d3209ecfdf9cd6aace6bbde71ba1694 # v13.3.4
|
||||
uses: chromaui/action@c93e0bc3a63aa176e14a75b61a31847cbfdd341c # latest
|
||||
with:
|
||||
# ⚠️ Make sure to configure a `CHROMATIC_PROJECT_TOKEN` repository secret
|
||||
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
|
||||
workingDir: apps/storybook
|
||||
zip: true
|
||||
|
||||
78
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: ["main"]
|
||||
schedule:
|
||||
- cron: "0 0 * * 1"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["javascript", "typescript"]
|
||||
# CodeQL supports [ $supported-codeql-languages ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
33
.github/workflows/cron-surveyStatusUpdate.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: Cron - Survey status update
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At 00:00." (see https://crontab.guru)
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
cron-weeklySummary:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
||||
run: |
|
||||
curl ${{ env.APP_URL }}/api/cron/survey-status \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
||||
--fail
|
||||
34
.github/workflows/cron-weeklySummary.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: Cron - Weekly summary
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs “At 08:00 on Monday.” (see https://crontab.guru)
|
||||
- cron: "0 8 * * 1"
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
cron-weeklySummary:
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
||||
run: |
|
||||
curl ${{ env.APP_URL }}/api/cron/weekly-summary \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
||||
--fail
|
||||
27
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Dependency Review Action
|
||||
#
|
||||
# This Action will scan dependency manifest files that change as part of a Pull Request,
|
||||
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
|
||||
# Once installed, if the workflow run is marked as required,
|
||||
# PRs introducing known-vulnerable packages will be blocked from merging.
|
||||
#
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0
|
||||
149
.github/workflows/deploy-formbricks-cloud.yml
vendored
@@ -1,149 +0,0 @@
|
||||
name: Formbricks Cloud Deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Docker image to release (clean SemVer, e.g., 1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
REPOSITORY:
|
||||
description: "The repository to use for the Docker image"
|
||||
required: false
|
||||
type: string
|
||||
default: "ghcr.io/formbricks/formbricks"
|
||||
ENVIRONMENT:
|
||||
description: "The environment to deploy to"
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Docker image to release"
|
||||
required: true
|
||||
type: string
|
||||
REPOSITORY:
|
||||
description: "The repository to use for the Docker image"
|
||||
required: false
|
||||
type: string
|
||||
default: "ghcr.io/formbricks/formbricks"
|
||||
ENVIRONMENT:
|
||||
description: "The environment to deploy to"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
helmfile-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@84a3f23bb4d843bcf4da6cf824ec1be473daf4de # v3.2.3
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||
tags: tag:github
|
||||
args: --accept-routes
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
||||
aws-region: "eu-central-1"
|
||||
|
||||
- name: Setup Cluster Access
|
||||
run: |
|
||||
aws eks update-kubeconfig --name formbricks-prod-eks --region eu-central-1
|
||||
env:
|
||||
AWS_REGION: eu-central-1
|
||||
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Production
|
||||
if: inputs.ENVIRONMENT == 'production'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
FORMBRICKS_S3_BUCKET: ${{ secrets.FORMBRICKS_S3_BUCKET }}
|
||||
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.FORMBRICKS_INGRESS_CERT_ARN }}
|
||||
FORMBRICKS_ROLE_ARN: ${{ secrets.FORMBRICKS_ROLE_ARN }}
|
||||
with:
|
||||
helmfile-version: "v1.0.0"
|
||||
helm-plugins: >
|
||||
https://github.com/databus23/helm-diff,
|
||||
https://github.com/jkroepke/helm-secrets
|
||||
helmfile-args: apply -l environment=prod
|
||||
helmfile-auto-init: "false"
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Staging
|
||||
if: inputs.ENVIRONMENT == 'staging'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.STAGE_FORMBRICKS_INGRESS_CERT_ARN }}
|
||||
FORMBRICKS_ROLE_ARN: ${{ secrets.STAGE_FORMBRICKS_ROLE_ARN }}
|
||||
with:
|
||||
helmfile-version: "v1.0.0"
|
||||
helm-plugins: >
|
||||
https://github.com/databus23/helm-diff,
|
||||
https://github.com/jkroepke/helm-secrets
|
||||
helmfile-args: apply -l environment=stage
|
||||
helmfile-auto-init: "false"
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- name: Purge Cloudflare Cache
|
||||
if: ${{ inputs.ENVIRONMENT == 'production' || inputs.ENVIRONMENT == 'staging' }}
|
||||
env:
|
||||
CF_ZONE_ID: ${{ secrets.CLOUDFLARE_ZONE_ID }}
|
||||
CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
ENVIRONMENT: ${{ inputs.ENVIRONMENT }}
|
||||
run: |
|
||||
# Set hostname based on environment
|
||||
if [[ "$ENVIRONMENT" == "production" ]]; then
|
||||
PURGE_HOST="app.formbricks.com"
|
||||
else
|
||||
PURGE_HOST="stage.app.formbricks.com"
|
||||
fi
|
||||
|
||||
echo "Purging Cloudflare cache for host: $PURGE_HOST (environment: $ENVIRONMENT, zone: $CF_ZONE_ID)"
|
||||
|
||||
# Prepare JSON payload for selective cache purge
|
||||
json_payload=$(cat << EOF
|
||||
{
|
||||
"hosts": ["$PURGE_HOST"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make API call to Cloudflare
|
||||
response=$(curl -s -X POST \
|
||||
"https://api.cloudflare.com/client/v4/zones/$CF_ZONE_ID/purge_cache" \
|
||||
-H "Authorization: Bearer $CF_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data "$json_payload")
|
||||
|
||||
echo "Cloudflare API response: $response"
|
||||
|
||||
# Verify the operation was successful
|
||||
if [[ "$(echo "$response" | jq -r .success)" == "true" ]]; then
|
||||
echo "✅ Successfully purged cache for $PURGE_HOST"
|
||||
else
|
||||
echo "❌ Cloudflare cache purge failed"
|
||||
echo "Error details: $(echo "$response" | jq -r .errors)"
|
||||
exit 1
|
||||
fi
|
||||
200
.github/workflows/docker-build-validation.yml
vendored
@@ -1,200 +0,0 @@
|
||||
name: Docker Build Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
merge_group:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
validate-docker-build:
|
||||
name: Validate Docker Build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Add PostgreSQL and Redis service containers
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||
env:
|
||||
POSTGRES_USER: test
|
||||
POSTGRES_PASSWORD: test
|
||||
POSTGRES_DB: formbricks
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Health check to ensure PostgreSQL is ready before using it
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
push: false
|
||||
load: true
|
||||
tags: formbricks-test:${{ env.GITHUB_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
redis_url=redis://localhost:6379
|
||||
|
||||
- name: Verify and Initialize PostgreSQL
|
||||
run: |
|
||||
echo "Verifying PostgreSQL connection..."
|
||||
# Install PostgreSQL client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y postgresql-client
|
||||
|
||||
# Test connection using psql with timeout and proper error handling
|
||||
echo "Testing PostgreSQL connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until PGPASSWORD=test psql -h localhost -U test -d formbricks -c "\dt" >/dev/null 2>&1; do
|
||||
echo "Waiting for PostgreSQL to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ PostgreSQL connection successful"
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "SELECT version();"
|
||||
|
||||
# Enable necessary extensions that might be required by migrations
|
||||
echo "Enabling required PostgreSQL extensions..."
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "CREATE EXTENSION IF NOT EXISTS vector;" || echo "Vector extension already exists or not available"
|
||||
|
||||
else
|
||||
echo "❌ PostgreSQL connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration
|
||||
echo "Network configuration:"
|
||||
netstat -tulpn | grep 5432 || echo "No process listening on port 5432"
|
||||
|
||||
- name: Verify Redis/Valkey Connection
|
||||
run: |
|
||||
echo "Verifying Redis/Valkey connection..."
|
||||
# Install Redis client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y redis-tools
|
||||
|
||||
# Test connection using redis-cli with timeout and proper error handling
|
||||
echo "Testing Redis connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until redis-cli -h localhost -p 6379 ping >/dev/null 2>&1; do
|
||||
echo "Waiting for Redis to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ Redis connection successful"
|
||||
redis-cli -h localhost -p 6379 info server | head -5
|
||||
else
|
||||
echo "❌ Redis connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration for Redis
|
||||
echo "Redis network configuration:"
|
||||
netstat -tulpn | grep 6379 || echo "No process listening on port 6379"
|
||||
|
||||
- name: Test Docker Image with Health Check
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
run: |
|
||||
echo "🧪 Testing if the Docker image starts correctly..."
|
||||
|
||||
# Add extra docker run args to support host.docker.internal on Linux
|
||||
DOCKER_RUN_ARGS="--add-host=host.docker.internal:host-gateway"
|
||||
|
||||
# Start the container with host.docker.internal pointing to the host
|
||||
docker run --name formbricks-test \
|
||||
$DOCKER_RUN_ARGS \
|
||||
-p 3000:3000 \
|
||||
-e DATABASE_URL="postgresql://test:test@host.docker.internal:5432/formbricks" \
|
||||
-e ENCRYPTION_KEY="$DUMMY_ENCRYPTION_KEY" \
|
||||
-e REDIS_URL="redis://host.docker.internal:6379" \
|
||||
-d "formbricks-test:$GITHUB_SHA"
|
||||
|
||||
# Start health check polling immediately (every 5 seconds for up to 5 minutes)
|
||||
echo "🏥 Polling /health endpoint every 5 seconds for up to 5 minutes..."
|
||||
MAX_RETRIES=60 # 60 attempts × 5 seconds = 5 minutes
|
||||
RETRY_COUNT=0
|
||||
HEALTH_CHECK_SUCCESS=false
|
||||
|
||||
set +e # Disable exit on error to allow for retries
|
||||
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
|
||||
# Check if container is still running
|
||||
if [ "$(docker inspect -f '{{.State.Running}}' formbricks-test 2>/dev/null)" != "true" ]; then
|
||||
echo "❌ Container stopped running after $((RETRY_COUNT * 5)) seconds!"
|
||||
echo "📋 Container logs:"
|
||||
docker logs formbricks-test
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show progress and diagnostic info every 12 attempts (1 minute intervals)
|
||||
if [ $((RETRY_COUNT % 12)) -eq 0 ] || [ $RETRY_COUNT -eq 1 ]; then
|
||||
echo "Health check attempt $RETRY_COUNT of $MAX_RETRIES ($(($RETRY_COUNT * 5)) seconds elapsed)..."
|
||||
echo "📋 Recent container logs:"
|
||||
docker logs --tail 10 formbricks-test
|
||||
fi
|
||||
|
||||
# Try health endpoint with shorter timeout for faster polling
|
||||
# Use -f flag to make curl fail on HTTP error status codes (4xx, 5xx)
|
||||
if curl -f -s -m 10 http://localhost:3000/health >/dev/null 2>&1; then
|
||||
echo "✅ Health check successful after $((RETRY_COUNT * 5)) seconds!"
|
||||
HEALTH_CHECK_SUCCESS=true
|
||||
break
|
||||
fi
|
||||
|
||||
# Wait 5 seconds before next attempt
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Show full container logs for debugging
|
||||
echo "📋 Full container logs:"
|
||||
docker logs formbricks-test
|
||||
|
||||
# Clean up the container
|
||||
echo "🧹 Cleaning up..."
|
||||
docker rm -f formbricks-test
|
||||
|
||||
# Exit with failure if health check did not succeed
|
||||
if [ "$HEALTH_CHECK_SUCCESS" != "true" ]; then
|
||||
echo "❌ Health check failed after $((MAX_RETRIES * 5)) seconds (5 minutes)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✨ Docker validation complete - all checks passed!"
|
||||
70
.github/workflows/docker-security-scan.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Docker Security Scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * *" # Daily at 2 AM UTC
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["Docker Release to Github"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
name: Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout (for SARIF fingerprinting only)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Determine ref and commit for upload
|
||||
id: gitref
|
||||
shell: bash
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "${EVENT_NAME}" == "workflow_run" ]]; then
|
||||
echo "ref=refs/heads/${HEAD_BRANCH}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "ref=${GITHUB_REF}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||
with:
|
||||
image-ref: "ghcr.io/${{ github.repository }}:latest"
|
||||
format: "sarif"
|
||||
output: "trivy-results.sarif"
|
||||
severity: "CRITICAL,HIGH,MEDIUM,LOW"
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@a4e1a019f5e24960714ff6296aee04b736cbc3cf # v3.29.6
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
sarif_file: "trivy-results.sarif"
|
||||
ref: ${{ steps.gitref.outputs.ref }}
|
||||
sha: ${{ steps.gitref.outputs.sha }}
|
||||
category: "trivy-container-scan"
|
||||
174
.github/workflows/e2e.yml
vendored
@@ -3,22 +3,25 @@ name: E2E Tests
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
AZURE_CLIENT_ID:
|
||||
required: false
|
||||
AZURE_TENANT_ID:
|
||||
required: false
|
||||
AZURE_SUBSCRIPTION_ID:
|
||||
required: false
|
||||
PLAYWRIGHT_SERVICE_URL:
|
||||
required: false
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN:
|
||||
required: false
|
||||
ENTERPRISE_LICENSE_KEY:
|
||||
required: true
|
||||
# Add other secrets if necessary
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||
TELEMETRY_DISABLED: 1
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
actions: read
|
||||
checks: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -27,7 +30,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||
image: pgvector/pgvector:pg17
|
||||
env:
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -35,31 +38,23 @@ jobs:
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd="pg_isready -U postgres"
|
||||
--health-cmd="pg_isready -U testuser"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
valkey:
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
allowed-endpoints: |
|
||||
ee.formbricks.com:443
|
||||
registry-1.docker.io:443
|
||||
docker.io:443
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 22.x
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2
|
||||
with:
|
||||
node-version: 22.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
@@ -78,73 +73,11 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${{ secrets.ENTERPRISE_LICENSE_KEY }}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=redis://localhost:6379|" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${RANDOM_KEY}/" .env
|
||||
echo "" >> .env
|
||||
echo "E2E_TESTING=1" >> .env
|
||||
echo "S3_REGION=us-east-1" >> .env
|
||||
echo "S3_BUCKET_NAME=formbricks-e2e" >> .env
|
||||
echo "S3_ENDPOINT_URL=http://localhost:9000" >> .env
|
||||
echo "S3_ACCESS_KEY=devminio" >> .env
|
||||
echo "S3_SECRET_KEY=devminio123" >> .env
|
||||
echo "S3_FORCE_PATH_STYLE=1" >> .env
|
||||
shell: bash
|
||||
|
||||
- name: Install MinIO client (mc)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MC_VERSION="RELEASE.2025-08-13T08-35-41Z"
|
||||
MC_BASE="https://dl.min.io/client/mc/release/linux-amd64/archive"
|
||||
MC_BIN="mc.${MC_VERSION}"
|
||||
MC_SUM="${MC_BIN}.sha256sum"
|
||||
|
||||
curl -fsSL "${MC_BASE}/${MC_BIN}" -o "${MC_BIN}"
|
||||
curl -fsSL "${MC_BASE}/${MC_SUM}" -o "${MC_SUM}"
|
||||
|
||||
sha256sum -c "${MC_SUM}"
|
||||
|
||||
chmod +x "${MC_BIN}"
|
||||
sudo mv "${MC_BIN}" /usr/local/bin/mc
|
||||
|
||||
- name: Start MinIO Server
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start MinIO server in background
|
||||
docker run -d \
|
||||
--name minio-server \
|
||||
-p 9000:9000 \
|
||||
-p 9001:9001 \
|
||||
-e MINIO_ROOT_USER=devminio \
|
||||
-e MINIO_ROOT_PASSWORD=devminio123 \
|
||||
minio/minio:RELEASE.2025-09-07T16-13-09Z \
|
||||
server /data --console-address :9001
|
||||
|
||||
echo "MinIO server started"
|
||||
|
||||
- name: Wait for MinIO and create S3 bucket
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Waiting for MinIO to be ready..."
|
||||
ready=0
|
||||
for i in {1..60}; do
|
||||
if curl -fsS http://localhost:9000/minio/health/live >/dev/null; then
|
||||
echo "MinIO is up after ${i} seconds"
|
||||
ready=1
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [ "$ready" -ne 1 ]; then
|
||||
echo "::error::MinIO did not become ready within 60 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mc alias set local http://localhost:9000 devminio devminio123
|
||||
mc mb --ignore-existing local/formbricks-e2e
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
pnpm build --filter=@formbricks/web...
|
||||
@@ -154,37 +87,9 @@ jobs:
|
||||
# pnpm prisma migrate deploy
|
||||
pnpm db:migrate:dev
|
||||
|
||||
- name: Run Rate Limiter Load Tests
|
||||
run: |
|
||||
echo "Running rate limiter load tests with Redis/Valkey..."
|
||||
cd apps/web && pnpm vitest run modules/core/rate-limit/rate-limit-load.test.ts
|
||||
shell: bash
|
||||
|
||||
- name: Run Cache Integration Tests
|
||||
run: |
|
||||
echo "Running cache integration tests with Redis/Valkey..."
|
||||
cd packages/cache && pnpm vitest run src/cache-integration.test.ts
|
||||
shell: bash
|
||||
|
||||
- name: Check for Enterprise License
|
||||
run: |
|
||||
LICENSE_KEY=$(grep '^ENTERPRISE_LICENSE_KEY=' .env | cut -d'=' -f2-)
|
||||
if [ -z "$LICENSE_KEY" ]; then
|
||||
echo "::error::ENTERPRISE_LICENSE_KEY in .env is empty. Please check your secret configuration."
|
||||
exit 1
|
||||
fi
|
||||
echo "License key length: ${#LICENSE_KEY}"
|
||||
|
||||
- name: Disable rate limiting for E2E tests
|
||||
run: |
|
||||
echo "RATE_LIMITING_DISABLED=1" >> .env
|
||||
echo "Rate limiting disabled for E2E tests"
|
||||
shell: bash
|
||||
|
||||
- name: Run App
|
||||
run: |
|
||||
echo "Starting app with enterprise license..."
|
||||
NODE_ENV=test pnpm start --filter=@formbricks/web | tee app.log 2>&1 &
|
||||
NODE_ENV=test pnpm start --filter=@formbricks/web &
|
||||
sleep 10 # Optional: gives some buffer for the app to start
|
||||
for attempt in {1..10}; do
|
||||
if [ $(curl -o /dev/null -s -w "%{http_code}" http://localhost:3000/health) -eq 200 ]; then
|
||||
@@ -202,32 +107,31 @@ jobs:
|
||||
- name: Install Playwright
|
||||
run: pnpm exec playwright install --with-deps
|
||||
|
||||
- name: Determine Playwright execution mode
|
||||
shell: bash
|
||||
env:
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||
- name: Set Azure Secret Variables
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -n "${PLAYWRIGHT_SERVICE_URL}" && -n "${PLAYWRIGHT_SERVICE_ACCESS_TOKEN}" ]]; then
|
||||
echo "PW_MODE=service" >> "$GITHUB_ENV"
|
||||
if [[ -n "${{ secrets.AZURE_CLIENT_ID }}" && -n "${{ secrets.AZURE_TENANT_ID }}" && -n "${{ secrets.AZURE_SUBSCRIPTION_ID }}" ]]; then
|
||||
echo "AZURE_ENABLED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PW_MODE=local" >> "$GITHUB_ENV"
|
||||
echo "AZURE_ENABLED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Run E2E Tests (Playwright Service)
|
||||
if: env.PW_MODE == 'service'
|
||||
- name: Azure login
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
uses: azure/login@a65d910e8af852a8061c627c456678983e180302 # v2.2.0
|
||||
with:
|
||||
client-id: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
|
||||
|
||||
- name: Run E2E Tests (Azure)
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
env:
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||
CI: true
|
||||
run: pnpm test-e2e:azure
|
||||
run: |
|
||||
pnpm test-e2e:azure
|
||||
|
||||
- name: Run E2E Tests (Local)
|
||||
if: env.PW_MODE == 'local'
|
||||
env:
|
||||
CI: true
|
||||
if: env.AZURE_ENABLED == 'false'
|
||||
run: |
|
||||
pnpm test:e2e
|
||||
|
||||
@@ -237,13 +141,3 @@ jobs:
|
||||
name: playwright-report
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
if: failure()
|
||||
with:
|
||||
name: app-logs
|
||||
path: app.log
|
||||
|
||||
- name: Output App Logs
|
||||
if: failure()
|
||||
run: cat app.log
|
||||
|
||||
157
.github/workflows/formbricks-release.yml
vendored
@@ -1,157 +0,0 @@
|
||||
name: Build, release & deploy Formbricks images
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check-latest-release:
|
||||
name: Check if this is the latest release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_latest: ${{ steps.compare_tags.outputs.is_latest }}
|
||||
# This job determines if the current release was marked as "Set as the latest release"
|
||||
# by comparing it with the latest release from GitHub API
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Get latest release tag from API
|
||||
id: get_latest_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get the latest release tag from GitHub API with error handling
|
||||
echo "Fetching latest release from GitHub API..."
|
||||
|
||||
# Use curl with error handling - API returns 404 if no releases exist
|
||||
http_code=$(curl -s -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
"https://api.github.com/repos/${REPO}/releases/latest" -o /tmp/latest_release.json)
|
||||
|
||||
if [[ "$http_code" == "404" ]]; then
|
||||
echo "⚠️ No previous releases found (404). This appears to be the first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
elif [[ "$http_code" == "200" ]]; then
|
||||
latest_release=$(jq -r .tag_name /tmp/latest_release.json)
|
||||
if [[ "$latest_release" == "null" || -z "$latest_release" ]]; then
|
||||
echo "⚠️ API returned null/empty tag_name. Treating as first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Latest release from API: ${latest_release}"
|
||||
echo "latest_release=${latest_release}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
else
|
||||
echo "❌ GitHub API error (HTTP ${http_code}). Treating as first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "Current release tag: ${{ github.event.release.tag_name }}"
|
||||
|
||||
- name: Compare release tags
|
||||
id: compare_tags
|
||||
env:
|
||||
CURRENT_TAG: ${{ github.event.release.tag_name }}
|
||||
LATEST_TAG: ${{ steps.get_latest_release.outputs.latest_release }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Handle first release case (no previous releases)
|
||||
if [[ -z "${LATEST_TAG}" ]]; then
|
||||
echo "🎉 This is the first release (${CURRENT_TAG}) - treating as latest"
|
||||
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${CURRENT_TAG}" == "${LATEST_TAG}" ]]; then
|
||||
echo "✅ This release (${CURRENT_TAG}) is marked as the latest release"
|
||||
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "ℹ️ This release (${CURRENT_TAG}) is not the latest release (latest: ${LATEST_TAG})"
|
||||
echo "is_latest=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
docker-build-community:
|
||||
name: Build & release community docker image
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
uses: ./.github/workflows/release-docker-github.yml
|
||||
secrets: inherit
|
||||
needs:
|
||||
- check-latest-release
|
||||
with:
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
|
||||
docker-build-cloud:
|
||||
name: Build & push Formbricks Cloud to ECR
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
uses: ./.github/workflows/build-and-push-ecr.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_tag: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
needs:
|
||||
- check-latest-release
|
||||
- docker-build-community
|
||||
|
||||
helm-chart-release:
|
||||
name: Release Helm Chart
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
uses: ./.github/workflows/release-helm-chart.yml
|
||||
secrets: inherit
|
||||
needs:
|
||||
- docker-build-community
|
||||
with:
|
||||
VERSION: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||
|
||||
verify-cloud-build:
|
||||
name: Verify Cloud Build Outputs
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5 # Simple verification should be quick
|
||||
needs:
|
||||
- docker-build-cloud
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Display ECR build outputs
|
||||
env:
|
||||
IMAGE_TAG: ${{ needs.docker-build-cloud.outputs.IMAGE_TAG }}
|
||||
TAGS: ${{ needs.docker-build-cloud.outputs.TAGS }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "✅ ECR Build Completed Successfully"
|
||||
echo "Image Tag: ${IMAGE_TAG}"
|
||||
echo "ECR Tags:"
|
||||
printf '%s\n' "${TAGS}"
|
||||
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
permissions:
|
||||
contents: write # Required for tag push operations in called workflow
|
||||
uses: ./.github/workflows/move-stable-tag.yml
|
||||
needs:
|
||||
- check-latest-release
|
||||
- docker-build-community # Ensure release is successful first
|
||||
with:
|
||||
release_tag: ${{ github.event.release.tag_name }}
|
||||
commit_sha: ${{ github.sha }}
|
||||
is_prerelease: ${{ github.event.release.prerelease }}
|
||||
make_latest: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
27
.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
name: Pull Request Labeler
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/labeler@ac9175f8a1f3625fd0d4fb234536d26811351594 # v4.3.0
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
# https://github.com/actions/labeler/issues/442#issuecomment-1297359481
|
||||
sync-labels: ""
|
||||
4
.github/workflows/lint.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
101
.github/workflows/move-stable-tag.yml
vendored
@@ -1,101 +0,0 @@
|
||||
name: Move Stable Tag
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "The release tag name (e.g., 1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
commit_sha:
|
||||
description: "The commit SHA to point the stable tag to"
|
||||
required: true
|
||||
type: string
|
||||
is_prerelease:
|
||||
description: "Whether this is a prerelease (stable tag won't be moved for prereleases)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
make_latest:
|
||||
description: "Whether to move stable tag (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# Prevent concurrent stable tag operations to avoid race conditions
|
||||
concurrency:
|
||||
group: move-stable-tag-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10 # Prevent hung git operations
|
||||
permissions:
|
||||
contents: write # Required to push tags
|
||||
# Only move stable tag for non-prerelease versions AND when make_latest is true
|
||||
if: ${{ !inputs.is_prerelease && inputs.make_latest }}
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # Full history needed for tag operations
|
||||
|
||||
- name: Validate inputs
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate release tag format
|
||||
if [[ ! "$RELEASE_TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid release tag format. Expected format: 1.2.3, 1.2.3-alpha"
|
||||
echo "Provided: $RELEASE_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate commit SHA format (40 character hex)
|
||||
if [[ ! "$COMMIT_SHA" =~ ^[a-f0-9]{40}$ ]]; then
|
||||
echo "❌ Error: Invalid commit SHA format. Expected 40 character hex string"
|
||||
echo "Provided: $COMMIT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Input validation passed"
|
||||
echo "Release tag: $RELEASE_TAG"
|
||||
echo "Commit SHA: $COMMIT_SHA"
|
||||
|
||||
- name: Move stable tag
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Configure git
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Verify the commit exists
|
||||
if ! git cat-file -e "$COMMIT_SHA"; then
|
||||
echo "❌ Error: Commit $COMMIT_SHA does not exist in this repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Move stable tag to the release commit
|
||||
echo "📌 Moving stable tag to commit: $COMMIT_SHA (release: $RELEASE_TAG)"
|
||||
git tag -f stable "$COMMIT_SHA"
|
||||
git push origin stable --force
|
||||
|
||||
echo "✅ Successfully moved stable tag to release $RELEASE_TAG"
|
||||
echo "🔗 Stable tag now points to: https://github.com/${{ github.repository }}/commit/$COMMIT_SHA"
|
||||
159
.github/workflows/pr-size-check.yml
vendored
@@ -1,159 +0,0 @@
|
||||
name: PR Size Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check-pr-size:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check PR size
|
||||
id: check-size
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Fetch the base branch
|
||||
git fetch origin "${{ github.base_ref }}"
|
||||
|
||||
# Get diff stats
|
||||
diff_output=$(git diff --numstat "origin/${{ github.base_ref }}"...HEAD)
|
||||
|
||||
# Count lines, excluding:
|
||||
# - Test files (*.test.ts, *.spec.tsx, etc.)
|
||||
# - Locale files (locales/*.json, i18n/*.json)
|
||||
# - Lock files (pnpm-lock.yaml, package-lock.json, yarn.lock)
|
||||
# - Generated files (dist/, coverage/, build/, .next/)
|
||||
# - Storybook stories (*.stories.tsx)
|
||||
|
||||
total_additions=0
|
||||
total_deletions=0
|
||||
counted_files=0
|
||||
excluded_files=0
|
||||
|
||||
while IFS=$'\t' read -r additions deletions file; do
|
||||
# Skip if additions or deletions are "-" (binary files)
|
||||
if [ "$additions" = "-" ] || [ "$deletions" = "-" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if file should be excluded
|
||||
case "$file" in
|
||||
*.test.ts|*.test.tsx|*.spec.ts|*.spec.tsx|*.test.js|*.test.jsx|*.spec.js|*.spec.jsx)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
*/locales/*.json|*/i18n/*.json)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
pnpm-lock.yaml|package-lock.json|yarn.lock)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
dist/*|coverage/*|build/*|node_modules/*|test-results/*|playwright-report/*|.next/*|*.tsbuildinfo)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
*.stories.ts|*.stories.tsx|*.stories.js|*.stories.jsx)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
total_additions=$((total_additions + additions))
|
||||
total_deletions=$((total_deletions + deletions))
|
||||
counted_files=$((counted_files + 1))
|
||||
done <<EOF
|
||||
${diff_output}
|
||||
EOF
|
||||
|
||||
total_changes=$((total_additions + total_deletions))
|
||||
|
||||
echo "counted_files=${counted_files}" >> "${GITHUB_OUTPUT}"
|
||||
echo "excluded_files=${excluded_files}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_additions=${total_additions}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_deletions=${total_deletions}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_changes=${total_changes}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Set flag if PR is too large (> 800 lines)
|
||||
if [ ${total_changes} -gt 800 ]; then
|
||||
echo "is_too_large=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "is_too_large=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
- name: Comment on PR if too large
|
||||
if: steps.check-size.outputs.is_too_large == 'true'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const totalChanges = ${{ steps.check-size.outputs.total_changes }};
|
||||
const countedFiles = ${{ steps.check-size.outputs.counted_files }};
|
||||
const excludedFiles = ${{ steps.check-size.outputs.excluded_files }};
|
||||
const additions = ${{ steps.check-size.outputs.total_additions }};
|
||||
const deletions = ${{ steps.check-size.outputs.total_deletions }};
|
||||
|
||||
const body = '## 🚨 PR Size Warning\n\n' +
|
||||
'This PR has approximately **' + totalChanges + ' lines** of changes (' + additions + ' additions, ' + deletions + ' deletions across ' + countedFiles + ' files).\n\n' +
|
||||
'Large PRs (>800 lines) are significantly harder to review and increase the chance of merge conflicts. Consider splitting this into smaller, self-contained PRs.\n\n' +
|
||||
'### 💡 Suggestions:\n' +
|
||||
'- **Split by feature or module** - Break down into logical, independent pieces\n' +
|
||||
'- **Create a sequence of PRs** - Each building on the previous one\n' +
|
||||
'- **Branch off PR branches** - Don\'t wait for reviews to continue dependent work\n\n' +
|
||||
'### 📊 What was counted:\n' +
|
||||
'- ✅ Source files, stylesheets, configuration files\n' +
|
||||
'- ❌ Excluded ' + excludedFiles + ' files (tests, locales, locks, generated files)\n\n' +
|
||||
'### 📚 Guidelines:\n' +
|
||||
'- **Ideal:** 300-500 lines per PR\n' +
|
||||
'- **Warning:** 500-800 lines\n' +
|
||||
'- **Critical:** 800+ lines ⚠️\n\n' +
|
||||
'If this large PR is unavoidable (e.g., migration, dependency update, major refactor), please explain in the PR description why it couldn\'t be split.';
|
||||
|
||||
// Check if we already commented
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(comment =>
|
||||
comment.user.type === 'Bot' &&
|
||||
comment.body.includes('🚨 PR Size Warning')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
|
||||
5
.github/workflows/pr.yml
vendored
@@ -10,6 +10,8 @@ permissions:
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -49,9 +51,10 @@ jobs:
|
||||
statuses: write
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0
|
||||
uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: fail if conditional jobs failed
|
||||
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') || contains(needs.*.result, 'cancelled')
|
||||
run: exit 1
|
||||
|
||||
67
.github/workflows/prepare-release.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Prepare release
|
||||
run-name: Prepare release ${{ inputs.next_version }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
next_version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Version name"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --local user.email "github-actions@github.com"
|
||||
git config --local user.name "GitHub Actions"
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
cd apps/web
|
||||
pnpm version ${{ inputs.next_version }} --no-workspaces-update
|
||||
|
||||
- name: Commit changes and create a branch
|
||||
run: |
|
||||
branch_name="release-v${{ inputs.next_version }}"
|
||||
git checkout -b "$branch_name"
|
||||
git add .
|
||||
git commit -m "chore: release v${{ inputs.next_version }}"
|
||||
git push origin "$branch_name"
|
||||
|
||||
- name: Create pull request
|
||||
run: |
|
||||
gh pr create \
|
||||
--base main \
|
||||
--head "release-v${{ inputs.next_version }}" \
|
||||
--title "chore: bump version to v${{ inputs.next_version }}" \
|
||||
--body "This PR contains the changes for the v${{ inputs.next_version }} release."
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
56
.github/workflows/release-changesets.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: Release Changesets
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
#push:
|
||||
# branches:
|
||||
# - main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
packages: write
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 # v2.7.0
|
||||
|
||||
- name: Setup Node.js 18.x
|
||||
uses: actions/setup-node@7c12f8017d5436eb855f1ed4399f037a36fbd9e8 # v2.5.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd # v2.2.4
|
||||
|
||||
- name: Install Dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Create Release Pull Request or Publish to npm
|
||||
id: changesets
|
||||
uses: changesets/action@c8bada60c408975afd1a20b3db81d6eee6789308 # v1.4.9
|
||||
with:
|
||||
# This expects you to have a script called release which does a build for your packages and calls changeset publish
|
||||
publish: pnpm release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -1,50 +1,99 @@
|
||||
name: Build Community Testing Images
|
||||
name: Docker Release to Github Experimental
|
||||
|
||||
# This workflow builds experimental/testing versions of Formbricks for self-hosting customers
|
||||
# to test fixes and features before official releases. Images are pushed to GHCR with
|
||||
# timestamped experimental versions for easy identification and testing.
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_override:
|
||||
description: "Override version (SemVer only, e.g., 1.2.3-beta). Leave empty for auto-generated experimental version."
|
||||
required: false
|
||||
type: string
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}-experimental
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build-community-testing:
|
||||
name: Build Community Testing Image
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
|
||||
- name: Build and push community testing image
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # v3.5.0
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry_type: "ghcr"
|
||||
ghcr_image_name: "${{ github.repository }}-experimental"
|
||||
experimental_mode: "true"
|
||||
version: ${{ inputs.version_override }}
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
# repository is public to avoid leaking data. If you would like to publish
|
||||
# transparency data even for private images, pass --force to cosign below.
|
||||
# https://github.com/sigstore/cosign
|
||||
- name: Sign the published Docker image
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
# This step uses the identity token to provision an ephemeral certificate
|
||||
# against the sigstore community Fulcio instance.
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
||||
|
||||
136
.github/workflows/release-docker-github.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Release Community Docker Images
|
||||
name: Docker Release to Github
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
@@ -6,28 +6,19 @@ name: Release Community Docker Images
|
||||
# documentation.
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
IS_PRERELEASE:
|
||||
description: "Whether this is a prerelease (affects latest tag)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
MAKE_LATEST:
|
||||
description: "Whether to tag as latest (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
outputs:
|
||||
VERSION:
|
||||
description: release version
|
||||
value: ${{ jobs.build.outputs.VERSION }}
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -35,74 +26,77 @@ permissions:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
|
||||
outputs:
|
||||
VERSION: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
|
||||
- name: Extract release version from tag
|
||||
id: extract_release_tag
|
||||
run: |
|
||||
set -euo pipefail
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Extract tag name with fallback logic for different trigger contexts
|
||||
if [[ -n "${RELEASE_TAG:-}" ]]; then
|
||||
TAG="$RELEASE_TAG"
|
||||
echo "Using RELEASE_TAG override: $TAG"
|
||||
elif [[ "$GITHUB_REF_NAME" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]] || [[ "$GITHUB_REF_NAME" =~ ^v[0-9] ]]; then
|
||||
TAG="$GITHUB_REF_NAME"
|
||||
echo "Using GITHUB_REF_NAME (looks like tag): $TAG"
|
||||
else
|
||||
# Fallback: extract from GITHUB_REF for direct tag triggers
|
||||
TAG="${GITHUB_REF#refs/tags/}"
|
||||
if [[ -z "$TAG" || "$TAG" == "$GITHUB_REF" ]]; then
|
||||
TAG="$GITHUB_REF_NAME"
|
||||
echo "Using GITHUB_REF_NAME as final fallback: $TAG"
|
||||
else
|
||||
echo "Extracted from GITHUB_REF: $TAG"
|
||||
fi
|
||||
fi
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # v3.5.0
|
||||
|
||||
# Strip v-prefix if present (normalize to clean SemVer)
|
||||
TAG=${TAG#[vV]}
|
||||
|
||||
# Validate SemVer format (supports prereleases like 4.0.0-rc.1)
|
||||
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid tag format '$TAG'. Expected SemVer (e.g., 1.2.3, 4.0.0-rc.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "Using version: $TAG"
|
||||
|
||||
- name: Build and push community release image
|
||||
id: build
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry_type: "ghcr"
|
||||
ghcr_image_name: ${{ env.IMAGE_NAME }}
|
||||
version: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
# repository is public to avoid leaking data. If you would like to publish
|
||||
# transparency data even for private images, pass --force to cosign below.
|
||||
# https://github.com/sigstore/cosign
|
||||
- name: Sign the published Docker image
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
# This step uses the identity token to provision an ephemeral certificate
|
||||
# against the sigstore community Fulcio instance.
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
||||
|
||||
54
.github/workflows/release-docker.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: Release on Dockerhub
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
release-image-on-dockerhub:
|
||||
name: Release on Dockerhub
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 # v2.7.0
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # v2.10.0
|
||||
|
||||
- name: Get Release Tag
|
||||
id: extract_release_tag
|
||||
run: |
|
||||
TAG=${{ github.ref }}
|
||||
TAG=${TAG#refs/tags/v}
|
||||
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@0a97817b6ade9f46837855d676c4cca3a2471fc9 # v4.2.1
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USERNAME }}/formbricks:${{ env.RELEASE_TAG }}
|
||||
${{ secrets.DOCKER_USERNAME }}/formbricks:latest
|
||||
93
.github/workflows/release-helm-chart.yml
vendored
@@ -1,93 +0,0 @@
|
||||
name: Publish Helm Chart
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Helm chart to release"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Validate input version
|
||||
env:
|
||||
INPUT_VERSION: ${{ inputs.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Validate input version format (expects clean semver without 'v' prefix)
|
||||
if [[ ! "$INPUT_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid version format. Must be clean semver (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Expected: clean version without 'v' prefix"
|
||||
echo "Provided: $INPUT_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Store validated version in environment variable
|
||||
echo "VERSION<<EOF" >> $GITHUB_ENV
|
||||
echo "$INPUT_VERSION" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_ACTOR: ${{ github.actor }}
|
||||
run: printf '%s' "$GITHUB_TOKEN" | helm registry login ghcr.io --username "$GITHUB_ACTOR" --password-stdin
|
||||
|
||||
- name: Install YQ
|
||||
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
|
||||
|
||||
- name: Update Chart.yaml with new version
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Updating Chart.yaml with version: ${VERSION}"
|
||||
yq -i ".version = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||
yq -i ".appVersion = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||
|
||||
echo "✅ Successfully updated Chart.yaml"
|
||||
|
||||
- name: Package Helm chart
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Packaging Helm chart version: ${VERSION}"
|
||||
helm package ./helm-chart
|
||||
|
||||
echo "✅ Successfully packaged formbricks-${VERSION}.tgz"
|
||||
|
||||
- name: Push Helm chart to GitHub Container Registry
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Pushing Helm chart to registry: formbricks-${VERSION}.tgz"
|
||||
helm push "formbricks-${VERSION}.tgz" oci://ghcr.io/formbricks/helm-charts
|
||||
|
||||
echo "✅ Successfully pushed Helm chart to registry"
|
||||
81
.github/workflows/scorecard.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||
# by a third-party and are governed by separate terms of service, privacy
|
||||
# policy, and support documentation.
|
||||
|
||||
name: Scorecard supply-chain security
|
||||
on:
|
||||
# For Branch-Protection check. Only the default branch is supported. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||
branch_protection_rule:
|
||||
# To guarantee Maintained check is occasionally updated. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||
schedule:
|
||||
- cron: "17 17 * * 6"
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecard analysis
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Needed to publish results and get a badge (see publish_results below).
|
||||
id-token: write
|
||||
# Add this permission
|
||||
actions: write # Required for artifact upload
|
||||
# Uncomment the permissions below if installing in a private repository.
|
||||
# contents: read
|
||||
# actions: read
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecard on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional.
|
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||
|
||||
# Public repositories:
|
||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||
# - Allows the repository to include the Scorecard badge.
|
||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories:
|
||||
# - `publish_results` will always be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: sarif
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard (optional).
|
||||
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
12
.github/workflows/semantic-pull-requests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
revert
|
||||
ossgg
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2
|
||||
- uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2.9.1
|
||||
# When the previous steps fails, the workflow would stop. By adding this
|
||||
# condition you can continue the execution with the populated error message.
|
||||
if: always() && (steps.lint_pr_title.outputs.error_message != null)
|
||||
@@ -56,3 +56,11 @@ jobs:
|
||||
```
|
||||
${{ steps.lint_pr_title.outputs.error_message }}
|
||||
```
|
||||
|
||||
# Delete a previous comment when the issue has been resolved
|
||||
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
|
||||
uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2.9.1
|
||||
with:
|
||||
header: pr-title-lint-error
|
||||
message: |
|
||||
Thank you for following the naming conventions for pull request titles! 🙏
|
||||
|
||||
15
.github/workflows/sonarqube.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -23,13 +23,13 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
|
||||
- name: Setup Node.js 22.x
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||
with:
|
||||
node-version: 22.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
@@ -43,13 +43,16 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Run tests with coverage
|
||||
run: |
|
||||
cd apps/web
|
||||
pnpm test:coverage
|
||||
cd ../../
|
||||
# The Vitest coverage config is in your vite.config.mts
|
||||
|
||||
- name: SonarQube Scan
|
||||
uses: SonarSource/sonarqube-scan-action@2500896589ef8f7247069a56136f8dc177c27ccf
|
||||
uses: SonarSource/sonarqube-scan-action@bfd4e558cda28cda6b5defafb9232d191be8c203
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
|
||||
5
.github/workflows/test.yml
vendored
@@ -14,11 +14,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
@@ -41,7 +41,6 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Test
|
||||
run: pnpm test
|
||||
|
||||
44
.github/workflows/tolgee-missing-key-check.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Check Missing Translations
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
check-missing-translations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Compare Tolgee Keys
|
||||
id: compare
|
||||
run: |
|
||||
tolgee compare --api-key ${{ secrets.TOLGEE_API_KEY }} > compare_output.txt
|
||||
cat compare_output.txt
|
||||
|
||||
- name: Check for Missing Translations
|
||||
run: |
|
||||
if grep -q "new key found" compare_output.txt; then
|
||||
echo "New keys found that may require translations:"
|
||||
exit 1
|
||||
else
|
||||
echo "No new keys found."
|
||||
fi
|
||||
89
.github/workflows/tolgee.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Tolgee Tagging on PR Merge
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
tag-production-keys:
|
||||
name: Tag Production Keys
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # This ensures we get the full git history
|
||||
|
||||
- name: Get source branch name
|
||||
id: branch-name
|
||||
run: |
|
||||
# For PR merges, use the head ref from the pull request event
|
||||
SOURCE_BRANCH="${{ github.head_ref }}"
|
||||
|
||||
# Only remove username prefix if needed
|
||||
if [[ "$SOURCE_BRANCH" =~ ^[a-zA-Z0-9][a-zA-Z0-9-]+/ ]]; then
|
||||
PREFIX=${SOURCE_BRANCH%%/*}
|
||||
if [[ ! "$PREFIX" =~ ^(feature|fix|bugfix|hotfix|release|chore|docs|test|refactor|style|perf|build|ci|revert)$ ]]; then
|
||||
SOURCE_BRANCH=${SOURCE_BRANCH#*/}
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "SOURCE_BRANCH=$SOURCE_BRANCH" >> $GITHUB_ENV
|
||||
echo "Detected source branch: $SOURCE_BRANCH"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18 # Ensure compatibility with your project
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Tag Production Keys
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-extracted \
|
||||
--filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag production \
|
||||
--untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Tag unused production keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag production \
|
||||
--tag deprecated --untag production
|
||||
|
||||
- name: Tag unused draft:current-branch keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag deprecated --untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Sync with backup
|
||||
run: |
|
||||
npx tolgee sync \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--backup ./tolgee-backup \
|
||||
--continue-on-warning \
|
||||
--yes
|
||||
|
||||
- name: Upload backup as artifact
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: tolgee-backup-${{ github.sha }}
|
||||
path: ./tolgee-backup
|
||||
retention-days: 90
|
||||
63
.github/workflows/translation-check.yml
vendored
@@ -1,63 +0,0 @@
|
||||
name: Translation Validation
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
|
||||
jobs:
|
||||
validate-translations:
|
||||
name: Validate Translation Keys
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
|
||||
with:
|
||||
version: 9.15.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Validate translation keys
|
||||
run: |
|
||||
echo ""
|
||||
echo "🔍 Validating translation keys..."
|
||||
echo ""
|
||||
pnpm run scan-translations
|
||||
|
||||
- name: Summary
|
||||
if: success()
|
||||
run: |
|
||||
echo ""
|
||||
echo "✅ Translation validation completed successfully!"
|
||||
echo ""
|
||||
32
.github/workflows/welcome-new-contributors.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: "Welcome new contributors"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: opened
|
||||
pull_request_target:
|
||||
types: opened
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
welcome-message:
|
||||
name: Welcoming New Users
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.event.action == 'opened'
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/first-interaction@3c71ce730280171fd1cfb57c00c774f8998586f7 # v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pr-message: |-
|
||||
Thank you so much for making your first Pull Request and taking the time to improve Formbricks! 🚀🙏❤️
|
||||
Feel free to join the conversation on [Github Discussions](https://github.com/formbricks/formbricks/discussions) if you need any help or have any questions. 😊
|
||||
issue-message: |
|
||||
Thank you for opening your first issue! 🙏❤️ One of our team members will review it and get back to you as soon as it possible. 😊
|
||||
11
.gitignore
vendored
@@ -53,13 +53,4 @@ yarn-error.log*
|
||||
packages/lib/uploads
|
||||
apps/web/public/js
|
||||
packages/database/migrations
|
||||
branch.json
|
||||
.vercel
|
||||
|
||||
# IntelliJ IDEA
|
||||
/.idea/
|
||||
/*.iml
|
||||
packages/ios/FormbricksSDK/FormbricksSDK.xcodeproj/project.xcworkspace/xcuserdata
|
||||
.cursorrules
|
||||
i18n.cache
|
||||
stats.html
|
||||
branch.json
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
images=($(yq eval '.services.*.image' docker-compose.dev.yml))
|
||||
images=($(yq eval '.services.*.image' packages/database/docker-compose.yml))
|
||||
|
||||
pull_image() {
|
||||
docker pull "$1"
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
# Load environment variables from .env files
|
||||
if [ -f .env ]; then
|
||||
set -a
|
||||
@@ -7,34 +10,12 @@ fi
|
||||
|
||||
pnpm lint-staged
|
||||
|
||||
# Run Lingo.dev i18n workflow if LINGODOTDEV_API_KEY is set
|
||||
if [ -n "$LINGODOTDEV_API_KEY" ]; then
|
||||
echo ""
|
||||
echo "🌍 Running Lingo.dev translation workflow..."
|
||||
echo ""
|
||||
|
||||
# Run translation generation and validation
|
||||
if pnpm run i18n; then
|
||||
echo ""
|
||||
echo "✅ Translation validation passed"
|
||||
echo ""
|
||||
# Add updated locale files to git
|
||||
git add apps/web/locales/*.json
|
||||
# Run tolgee-pull if branch.json exists and NEXT_PUBLIC_TOLGEE_API_KEY is not set
|
||||
if [ -f branch.json ]; then
|
||||
if [ -z "$NEXT_PUBLIC_TOLGEE_API_KEY" ]; then
|
||||
echo "Skipping tolgee-pull: NEXT_PUBLIC_TOLGEE_API_KEY is not set"
|
||||
else
|
||||
echo ""
|
||||
echo "❌ Translation validation failed!"
|
||||
echo ""
|
||||
echo "Please fix the translation issues above before committing:"
|
||||
echo " • Add missing translation keys to your locale files"
|
||||
echo " • Remove unused translation keys"
|
||||
echo ""
|
||||
echo "Or run 'pnpm i18n' to see the detailed report"
|
||||
echo ""
|
||||
exit 1
|
||||
pnpm run tolgee-pull
|
||||
git add packages/lib/messages
|
||||
fi
|
||||
else
|
||||
echo ""
|
||||
echo "⚠️ Skipping translation validation: LINGODOTDEV_API_KEY is not set"
|
||||
echo " (This is expected for community contributors)"
|
||||
echo ""
|
||||
fi
|
||||
18
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
repos:
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.16.3
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
- repo: https://github.com/jumanjihouse/pre-commit-hooks
|
||||
rev: 3.0.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||
rev: v8.38.0
|
||||
hooks:
|
||||
- id: eslint
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
35
.tolgeerc.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"$schema": "https://docs.tolgee.io/cli-schema.json",
|
||||
"format": "JSON_TOLGEE",
|
||||
"patterns": ["./apps/web/**/*.ts?(x)"],
|
||||
"projectId": 10304,
|
||||
"pull": {
|
||||
"path": "./packages/lib/messages"
|
||||
},
|
||||
"push": {
|
||||
"files": [
|
||||
{
|
||||
"language": "en-US",
|
||||
"path": "./packages/lib/messages/en-US.json"
|
||||
},
|
||||
{
|
||||
"language": "de-DE",
|
||||
"path": "./packages/lib/messages/de-DE.json"
|
||||
},
|
||||
{
|
||||
"language": "fr-FR",
|
||||
"path": "./packages/lib/messages/fr-FR.json"
|
||||
},
|
||||
{
|
||||
"language": "pt-BR",
|
||||
"path": "./packages/lib/messages/pt-BR.json"
|
||||
},
|
||||
{
|
||||
"language": "zh-Hant-TW",
|
||||
"path": "./packages/lib/messages/zh-Hant-TW.json"
|
||||
}
|
||||
],
|
||||
"forceMode": "OVERRIDE"
|
||||
},
|
||||
"strictNamespace": false
|
||||
}
|
||||
4
.vscode/extensions.json
vendored
@@ -6,8 +6,6 @@
|
||||
"dbaeumer.vscode-eslint", // eslint plugin
|
||||
"esbenp.prettier-vscode", // prettier plugin
|
||||
"Prisma.prisma", // syntax|format|completion for prisma
|
||||
"yzhang.markdown-all-in-one", // nicer markdown support
|
||||
"vitest.explorer", // run tests directly from the code window
|
||||
"sonarsource.sonarlint-vscode" // sonarqube linter for vscode
|
||||
"yzhang.markdown-all-in-one" // nicer markdown support
|
||||
]
|
||||
}
|
||||
|
||||
14
.vscode/settings.json
vendored
@@ -1,16 +1,4 @@
|
||||
{
|
||||
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
|
||||
"eslint.workingDirectories": [
|
||||
{
|
||||
"mode": "auto"
|
||||
}
|
||||
],
|
||||
"javascript.updateImportsOnFileMove.enabled": "always",
|
||||
"sonarlint.connectedMode.project": {
|
||||
"connectionId": "formbricks",
|
||||
"projectKey": "formbricks_formbricks"
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"typescript.updateImportsOnFileMove.enabled": "always"
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
}
|
||||
|
||||
82
AGENTS.md
@@ -1,82 +0,0 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## Project Structure & Module Organization
|
||||
|
||||
Formbricks runs as a pnpm/turbo monorepo. `apps/web` is the Next.js product surface, with feature modules under `app/` and `modules/`, assets in `public/` and `images/`, and Playwright specs in `apps/web/playwright/`. `apps/storybook` renders reusable UI pieces for review. Shared logic lives in `packages/*`: `database` (Prisma schemas/migrations), `surveys`, `js-core`, `types`, plus linting and TypeScript presets (`config-*`). Deployment collateral is kept in `docs/`, `docker/`, and `helm-chart/`. Unit tests sit next to their source as `*.test.ts` or inside `__tests__`.
|
||||
|
||||
## Build, Test & Development Commands
|
||||
|
||||
- `pnpm install` — install workspace dependencies pinned by `pnpm-lock.yaml`.
|
||||
- `pnpm db:up` / `pnpm db:down` — start/stop the Docker services backing the app.
|
||||
- `pnpm dev` — run all app and worker dev servers in parallel via Turborepo.
|
||||
- `pnpm build` — generate production builds for every package and app.
|
||||
- `pnpm lint` — apply the shared ESLint rules across the workspace.
|
||||
- `pnpm test` / `pnpm test:coverage` — execute Vitest suites with optional coverage.
|
||||
- `pnpm test:e2e` — launch the Playwright browser regression suite.
|
||||
- `pnpm db:migrate:dev` — apply Prisma migrations against the dev database.
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
|
||||
TypeScript, React, and Prisma are the primary languages. Use the shared ESLint presets (`@formbricks/eslint-config`) and Prettier preset (110-char width, semicolons, double quotes, sorted import groups). Two-space indentation is standard; prefer `PascalCase` for React components and folders under `modules/`, `camelCase` for functions/variables, and `SCREAMING_SNAKE_CASE` only for constants. When adding mocks, place them inside `__mocks__` so import ordering stays stable.
|
||||
We are using SonarQube to identify code smells and security hotspots.
|
||||
|
||||
## Architecture & Patterns
|
||||
|
||||
- Next.js app router lives in `apps/web/app` with route groups like `(app)` and `(auth)`. Services live in `apps/web/lib`, feature modules in `apps/web/modules`.
|
||||
- Server actions wrap service calls and return `{ data }` or `{ error }` consistently.
|
||||
- Context providers should guard against missing provider usage and use cleanup patterns that snapshot refs inside `useEffect` to avoid React hooks warnings
|
||||
|
||||
## Caching
|
||||
|
||||
- Use React `cache()` for request-level dedupe and `cache.withCache()` or explicit Redis for expensive data.
|
||||
- Do not use Next.js `unstable_cache()`.
|
||||
- Always use `createCacheKey.*` utilities for cache keys.
|
||||
|
||||
## i18n (Internationalization)
|
||||
|
||||
- All user-facing text must use the `t()` function from `react-i18next`.
|
||||
- Key naming: use lowercase with dots for nesting (e.g., `common.welcome`).
|
||||
- Translations are in `apps/web/locales/`. Default is `en-US.json`.
|
||||
- Lingo.dev is automatically translating strings from en-US into other languages on commit. Run `pnpm i18n` to generate missing translations and validate keys.
|
||||
|
||||
## Database & Prisma Performance
|
||||
|
||||
- Multi-tenancy: All data must be scoped by Organization or Environment.
|
||||
- Soft Deletion: Check for `isActive` or `deletedAt` fields; use proper filtering.
|
||||
- Never use `skip`/`offset` with `prisma.response.count()`; only use `where`.
|
||||
- Separate count and data queries and run in parallel (`Promise.all`).
|
||||
- Prefer cursor pagination for large datasets.
|
||||
- When filtering by `createdAt`, include indexed fields (e.g., `surveyId` + `createdAt`).
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
Prefer Vitest with Testing Library for logic in `.ts` files, keeping specs colocated with the code they exercise (`utility.test.ts`). Do not write tests for `.tsx` files—React components are covered by Playwright E2E tests instead. Mock network and storage boundaries through helpers from `@formbricks/*`. Run `pnpm test` before opening a PR and `pnpm test:coverage` when touching critical flows; keep coverage from regressing. End-to-end scenarios belong in `apps/web/playwright`, using descriptive filenames (`billing.spec.ts`) and tagging slow suites with `@slow` when necessary.
|
||||
|
||||
## Documentation (apps/docs)
|
||||
|
||||
- Add frontmatter with `title`, `description`, and `icon` at the top of the MDX file.
|
||||
- Do not start with an H1; use Camel Case headings (only capitalize the feature name).
|
||||
- Use Mintlify components for steps and callouts.
|
||||
- If Enterprise-only, add the Enterprise note block described in docs.
|
||||
|
||||
## Storybook
|
||||
|
||||
- Stories live in `stories.tsx` in the component folder and import from `"./index"`.
|
||||
- Use `@storybook/react-vite` and organize argTypes into `Behavior`, `Appearance`, `Content`.
|
||||
- Include Default, Disabled (if supported), WithIcon (if supported), all variants, and edge cases.
|
||||
|
||||
## GitHub Actions
|
||||
|
||||
- Always set minimal `permissions` for `GITHUB_TOKEN`.
|
||||
- On `ubuntu-latest`, add `step-security/harden-runner` as the first step.
|
||||
|
||||
## Quality Checklist
|
||||
|
||||
- Keep code DRY and small; remove dead code and unused imports.
|
||||
- Follow React hooks rules, keep effects focused, and avoid unnecessary `useMemo`/`useCallback`.
|
||||
- Prefer type inference, avoid `any`, and use shared types from `@formbricks/types`.
|
||||
- Keep components focused, avoid deep nesting, and ensure basic accessibility.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
Commits follow a lightweight Conventional Commit format (`fix:`, `chore:`, `feat:`) and usually append the PR number, e.g. `fix: update OpenAPI schema (#6617)`. Keep commits scoped and lint-clean. Pull requests should outline the problem, summarize the solution, and link to issues or product specs. Attach screenshots or gifs for UI-facing work, list any migrations or env changes, and paste the output of relevant commands (`pnpm test`, `pnpm lint`, `pnpm db:migrate:dev`) so reviewers can verify readiness.
|
||||
@@ -14,7 +14,17 @@ Are you brimming with brilliant ideas? For new features that can elevate Formbri
|
||||
|
||||
## 🛠 Crafting Pull Requests
|
||||
|
||||
For the time being, we don't have the capacity to properly facilitate community contributions. It's a lot of engineering attention often spent on issues which don't follow our prioritization, so we've decided to only facilitate community code contributions in rare exceptions in the coming months.
|
||||
Ready to dive into the code and make a real impact? Here's your path:
|
||||
|
||||
1. **Read our Best Practices**: [It takes 5 minutes](https://formbricks.com/docs/developer-docs/contributing/get-started) but will help you save hours 🤓
|
||||
|
||||
1. **Fork the Repository:** Fork our repository or use [Gitpod](https://gitpod.io) or use [Github Codespaces](https://github.com/features/codespaces) to get started instantly.
|
||||
|
||||
1. **Tweak and Transform:** Work your coding magic and apply your changes.
|
||||
|
||||
1. **Pull Request Act:** If you're ready to go, craft a new pull request closely following our PR template 🙏
|
||||
|
||||
Would you prefer a chat before you dive into a lot of work? [Github Discussions](https://github.com/formbricks/formbricks/discussions) is your harbor. Share your thoughts, and we'll meet you there with open arms. We're responsive and friendly, promise!
|
||||
|
||||
## 🚀 Aspiring Features
|
||||
|
||||
|
||||
@@ -1,417 +0,0 @@
|
||||
# Enterprise Feature Access: Status Quo Analysis
|
||||
|
||||
## Executive Summary
|
||||
|
||||
Formbricks currently uses **two completely different mechanisms** to gate enterprise features depending on deployment type:
|
||||
|
||||
| Deployment | Gating Mechanism | Activation | Feature Control |
|
||||
|------------|------------------|------------|-----------------|
|
||||
| **Cloud** (`IS_FORMBRICKS_CLOUD=1`) | Billing Plan (`organization.billing.plan`) | Stripe subscription | Plan-based (FREE/STARTUP/CUSTOM) |
|
||||
| **On-Premise** | License Key (`ENTERPRISE_LICENSE_KEY`) | License API validation | License feature flags |
|
||||
|
||||
This dual approach creates **significant complexity**, **code duplication**, and **inconsistent behavior** across the codebase.
|
||||
|
||||
---
|
||||
|
||||
## 1. Core Architecture
|
||||
|
||||
### 1.1 Cloud (Formbricks Cloud)
|
||||
|
||||
**Source of Truth:** `organization.billing.plan`
|
||||
|
||||
```typescript
|
||||
// packages/database/zod/organizations.ts
|
||||
plan: z.enum(["free", "startup", "scale", "enterprise"]).default("free")
|
||||
```
|
||||
|
||||
**Plans and Limits:**
|
||||
- `FREE`: 3 projects, 1,500 responses/month, 2,000 MIU
|
||||
- `STARTUP`: 3 projects, 5,000 responses/month, 7,500 MIU
|
||||
- `CUSTOM`: Unlimited (negotiated limits)
|
||||
|
||||
**Activation:** Stripe webhook updates `organization.billing` on checkout/subscription events.
|
||||
|
||||
### 1.2 On-Premise (Self-Hosted)
|
||||
|
||||
**Source of Truth:** `ENTERPRISE_LICENSE_KEY` environment variable
|
||||
|
||||
**License Features Schema:**
|
||||
```typescript
|
||||
// apps/web/modules/ee/license-check/types/enterprise-license.ts
|
||||
{
|
||||
isMultiOrgEnabled: boolean,
|
||||
contacts: boolean,
|
||||
projects: number | null,
|
||||
whitelabel: boolean,
|
||||
removeBranding: boolean,
|
||||
twoFactorAuth: boolean,
|
||||
sso: boolean,
|
||||
saml: boolean,
|
||||
spamProtection: boolean,
|
||||
ai: boolean,
|
||||
auditLogs: boolean,
|
||||
multiLanguageSurveys: boolean,
|
||||
accessControl: boolean,
|
||||
quotas: boolean,
|
||||
}
|
||||
```
|
||||
|
||||
**Activation:** License key validated against `https://ee.formbricks.com/api/licenses/check` (cached for 24h, grace period of 3 days).
|
||||
|
||||
---
|
||||
|
||||
## 2. Feature Gating Patterns
|
||||
|
||||
### 2.1 Pattern A: Dual-Path Check (Most Common)
|
||||
|
||||
Features that need **both** Cloud billing **and** on-premise license checks:
|
||||
|
||||
```typescript
|
||||
// apps/web/modules/ee/license-check/lib/utils.ts
|
||||
const getFeaturePermission = async (billingPlan, featureKey) => {
|
||||
const license = await getEnterpriseLicense();
|
||||
|
||||
if (IS_FORMBRICKS_CLOUD) {
|
||||
return license.active && billingPlan !== PROJECT_FEATURE_KEYS.FREE;
|
||||
} else {
|
||||
return license.active && !!license.features?.[featureKey];
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**Used by:**
|
||||
- `getRemoveBrandingPermission()` - Remove branding
|
||||
- `getWhiteLabelPermission()` - Whitelabel features
|
||||
- `getBiggerUploadFileSizePermission()` - Large file uploads
|
||||
- `getIsSpamProtectionEnabled()` - reCAPTCHA spam protection
|
||||
- `getMultiLanguagePermission()` - Multi-language surveys
|
||||
- `getAccessControlPermission()` - Teams & roles
|
||||
- `getIsQuotasEnabled()` - Quota management
|
||||
- `getOrganizationProjectsLimit()` - Project limits
|
||||
|
||||
### 2.2 Pattern B: License-Only Check
|
||||
|
||||
Features checked **only** against license (works same for cloud and on-premise):
|
||||
|
||||
```typescript
|
||||
// apps/web/modules/ee/license-check/lib/utils.ts
|
||||
const getSpecificFeatureFlag = async (featureKey) => {
|
||||
const licenseFeatures = await getLicenseFeatures();
|
||||
if (!licenseFeatures) return false;
|
||||
return licenseFeatures[featureKey] ?? false;
|
||||
};
|
||||
```
|
||||
|
||||
**Used by:**
|
||||
- `getIsMultiOrgEnabled()` - Multiple organizations
|
||||
- `getIsContactsEnabled()` - Contacts & segments
|
||||
- `getIsTwoFactorAuthEnabled()` - 2FA
|
||||
- `getIsSsoEnabled()` - SSO
|
||||
- `getIsAuditLogsEnabled()` - Audit logs
|
||||
|
||||
### 2.3 Pattern C: Cloud-Only (No License Check)
|
||||
|
||||
Features available only on Cloud, gated purely by billing plan:
|
||||
|
||||
```typescript
|
||||
// apps/web/modules/survey/lib/permission.ts
|
||||
export const getExternalUrlsPermission = async (billingPlan) => {
|
||||
if (IS_FORMBRICKS_CLOUD) return billingPlan !== PROJECT_FEATURE_KEYS.FREE;
|
||||
return true; // Always allowed on self-hosted
|
||||
};
|
||||
```
|
||||
|
||||
**Used by:**
|
||||
- External URLs permission
|
||||
- Survey follow-ups (Custom plan only)
|
||||
|
||||
### 2.4 Pattern D: On-Premise Only (Disabled on Cloud)
|
||||
|
||||
Features explicitly disabled on Cloud:
|
||||
|
||||
```typescript
|
||||
// apps/web/modules/ee/license-check/lib/utils.ts
|
||||
export const getIsSamlSsoEnabled = async () => {
|
||||
if (IS_FORMBRICKS_CLOUD) return false; // Never on Cloud
|
||||
const licenseFeatures = await getLicenseFeatures();
|
||||
return licenseFeatures.sso && licenseFeatures.saml;
|
||||
};
|
||||
```
|
||||
|
||||
**Used by:**
|
||||
- SAML SSO
|
||||
- Pretty URLs (slug feature)
|
||||
- Domain/Organization settings page
|
||||
|
||||
---
|
||||
|
||||
## 3. Files Using Enterprise Features
|
||||
|
||||
### 3.1 Core License/Feature Check Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `apps/web/modules/ee/license-check/lib/license.ts` | License fetching & caching |
|
||||
| `apps/web/modules/ee/license-check/lib/utils.ts` | Permission check functions |
|
||||
| `apps/web/modules/ee/license-check/types/enterprise-license.ts` | Type definitions |
|
||||
| `apps/web/lib/constants.ts` | `IS_FORMBRICKS_CLOUD`, `ENTERPRISE_LICENSE_KEY` |
|
||||
|
||||
### 3.2 Feature-Specific Implementation Files
|
||||
|
||||
#### Remove Branding
|
||||
- `apps/web/modules/ee/whitelabel/remove-branding/actions.ts`
|
||||
- `apps/web/modules/ee/whitelabel/remove-branding/components/branding-settings-card.tsx`
|
||||
- `apps/web/modules/projects/settings/look/page.tsx`
|
||||
- `apps/web/modules/projects/settings/actions.ts`
|
||||
|
||||
#### Whitelabel / Email Customization
|
||||
- `apps/web/modules/ee/whitelabel/email-customization/actions.ts`
|
||||
- `apps/web/app/(app)/environments/[environmentId]/settings/(organization)/general/page.tsx`
|
||||
- `apps/web/app/(app)/environments/[environmentId]/settings/(organization)/domain/page.tsx`
|
||||
|
||||
#### Multi-Language Surveys
|
||||
- `apps/web/modules/ee/multi-language-surveys/lib/actions.ts`
|
||||
- `apps/web/modules/ee/multi-language-surveys/components/*.tsx`
|
||||
- `apps/web/modules/ee/languages/page.tsx`
|
||||
|
||||
#### Contacts & Segments
|
||||
- `apps/web/modules/ee/contacts/segments/actions.ts`
|
||||
- `apps/web/modules/ee/contacts/page.tsx`
|
||||
- `apps/web/modules/ee/contacts/api/v1/**/*.ts`
|
||||
- `apps/web/modules/ee/contacts/api/v2/**/*.ts`
|
||||
|
||||
#### Teams & Access Control
|
||||
- `apps/web/modules/ee/teams/team-list/components/teams-view.tsx`
|
||||
- `apps/web/modules/ee/role-management/actions.ts`
|
||||
- `apps/web/modules/organization/settings/teams/page.tsx`
|
||||
- `apps/web/modules/organization/settings/teams/actions.ts`
|
||||
|
||||
#### SSO / SAML
|
||||
- `apps/web/modules/ee/sso/lib/sso-handlers.ts`
|
||||
- `apps/web/modules/ee/auth/saml/api/**/*.ts`
|
||||
- `apps/web/modules/ee/auth/saml/lib/*.ts`
|
||||
- `apps/web/modules/auth/lib/authOptions.ts`
|
||||
|
||||
#### Two-Factor Authentication
|
||||
- `apps/web/modules/ee/two-factor-auth/actions.ts`
|
||||
- `apps/web/modules/ee/two-factor-auth/components/*.tsx`
|
||||
|
||||
#### Quotas
|
||||
- `apps/web/modules/ee/quotas/actions.ts`
|
||||
- `apps/web/modules/ee/quotas/components/*.tsx`
|
||||
- `apps/web/modules/ee/quotas/lib/*.ts`
|
||||
|
||||
#### Audit Logs
|
||||
- `apps/web/modules/ee/audit-logs/lib/handler.ts`
|
||||
- `apps/web/modules/ee/audit-logs/lib/service.ts`
|
||||
|
||||
#### Billing (Cloud Only)
|
||||
- `apps/web/modules/ee/billing/page.tsx`
|
||||
- `apps/web/modules/ee/billing/api/lib/*.ts`
|
||||
- `apps/web/modules/ee/billing/components/*.tsx`
|
||||
|
||||
### 3.3 API Routes Using Feature Checks
|
||||
|
||||
| Route | Feature Check |
|
||||
|-------|---------------|
|
||||
| `apps/web/app/api/v1/client/[environmentId]/responses/route.ts` | Spam protection |
|
||||
| `apps/web/app/api/v2/client/[environmentId]/responses/route.ts` | Spam protection |
|
||||
| `apps/web/app/api/v1/client/[environmentId]/environment/lib/environmentState.ts` | Cloud limits |
|
||||
| `apps/web/modules/ee/contacts/api/v1/client/[environmentId]/user/route.ts` | Contacts |
|
||||
| `apps/web/modules/api/v2/management/responses/lib/response.ts` | Cloud limits |
|
||||
|
||||
### 3.4 UI Pages with Conditional Rendering
|
||||
|
||||
| Page | Condition |
|
||||
|------|-----------|
|
||||
| `apps/web/app/(app)/environments/[environmentId]/settings/(organization)/billing/` | Cloud only |
|
||||
| `apps/web/app/(app)/environments/[environmentId]/settings/(organization)/enterprise/page.tsx` | On-premise only |
|
||||
| `apps/web/app/(app)/environments/[environmentId]/settings/(organization)/domain/page.tsx` | On-premise only |
|
||||
| `apps/web/app/p/[slug]/page.tsx` (Pretty URLs) | On-premise only |
|
||||
|
||||
---
|
||||
|
||||
## 4. Configuration & Environment Variables
|
||||
|
||||
### 4.1 Key Environment Variables
|
||||
|
||||
| Variable | Purpose | Default |
|
||||
|----------|---------|---------|
|
||||
| `IS_FORMBRICKS_CLOUD` | Enables cloud mode | `"0"` |
|
||||
| `ENTERPRISE_LICENSE_KEY` | License key for on-premise | (empty) |
|
||||
| `STRIPE_SECRET_KEY` | Stripe API key (Cloud) | (empty) |
|
||||
| `AUDIT_LOG_ENABLED` | Enable audit logs | `"0"` |
|
||||
| `SAML_DATABASE_URL` | SAML configuration DB | (empty) |
|
||||
|
||||
### 4.2 Database Schema
|
||||
|
||||
```prisma
|
||||
// Organization billing stored in JSON column
|
||||
billing: {
|
||||
stripeCustomerId: string | null,
|
||||
plan: "free" | "startup" | "scale" | "enterprise",
|
||||
period: "monthly" | "yearly",
|
||||
limits: {
|
||||
projects: number | null,
|
||||
monthly: {
|
||||
responses: number | null,
|
||||
miu: number | null,
|
||||
}
|
||||
},
|
||||
periodStart: Date | null
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Problems with Current Approach
|
||||
|
||||
### 5.1 Code Duplication
|
||||
|
||||
Almost every feature check function has this pattern:
|
||||
```typescript
|
||||
if (IS_FORMBRICKS_CLOUD) {
|
||||
// Check billing plan
|
||||
} else {
|
||||
// Check license feature
|
||||
}
|
||||
```
|
||||
|
||||
This is repeated in:
|
||||
- 8+ permission check functions in `utils.ts`
|
||||
- 30+ files that consume these functions
|
||||
- Multiple API routes and pages
|
||||
|
||||
### 5.2 Inconsistent Feature Gating
|
||||
|
||||
| Feature | Cloud Gating | On-Premise Gating |
|
||||
|---------|--------------|-------------------|
|
||||
| Remove Branding | `plan !== FREE` | `license.features.removeBranding` |
|
||||
| Multi-Language | `plan === CUSTOM` OR `license.multiLanguageSurveys` | `license.multiLanguageSurveys` |
|
||||
| Follow-ups | `plan === CUSTOM` | Always allowed |
|
||||
| SAML SSO | Never allowed | `license.sso && license.saml` |
|
||||
| Teams | `plan === CUSTOM` OR `license.accessControl` | `license.accessControl` |
|
||||
|
||||
### 5.3 Confusing License Requirement on Cloud
|
||||
|
||||
Cloud deployments still require `ENTERPRISE_LICENSE_KEY` to be set for enterprise features to work:
|
||||
```typescript
|
||||
// utils.ts - getFeaturePermission
|
||||
if (IS_FORMBRICKS_CLOUD) {
|
||||
return license.active && billingPlan !== PROJECT_FEATURE_KEYS.FREE;
|
||||
// ^^^^^^^^^^^^^^ Still checks license!
|
||||
}
|
||||
```
|
||||
|
||||
This means Cloud needs **both**:
|
||||
1. Active billing plan (Stripe subscription)
|
||||
2. Active enterprise license
|
||||
|
||||
### 5.4 Fallback Logic Complexity
|
||||
|
||||
```typescript
|
||||
const featureFlagFallback = async (billingPlan) => {
|
||||
const license = await getEnterpriseLicense();
|
||||
if (IS_FORMBRICKS_CLOUD) return license.active && billingPlan === PROJECT_FEATURE_KEYS.CUSTOM;
|
||||
else if (!IS_FORMBRICKS_CLOUD) return license.active;
|
||||
return false;
|
||||
};
|
||||
```
|
||||
|
||||
Features have "fallback" behavior for backwards compatibility, adding another layer of complexity.
|
||||
|
||||
### 5.5 Testing Complexity
|
||||
|
||||
Tests must mock both:
|
||||
- `IS_FORMBRICKS_CLOUD` constant
|
||||
- `getEnterpriseLicense()` function
|
||||
- `organization.billing.plan` in some cases
|
||||
|
||||
See: `apps/web/modules/ee/license-check/lib/utils.test.ts` (400+ lines of test mocking)
|
||||
|
||||
---
|
||||
|
||||
## 6. Feature Availability Matrix
|
||||
|
||||
| Feature | Free (Cloud) | Startup (Cloud) | Custom (Cloud) | No License (On-Prem) | License (On-Prem) |
|
||||
|---------|--------------|-----------------|----------------|---------------------|-------------------|
|
||||
| Remove Branding | ❌ | ✅ | ✅ | ❌ | ✅* |
|
||||
| Whitelabel | ❌ | ✅ | ✅ | ❌ | ✅* |
|
||||
| Multi-Language | ❌ | ❌ | ✅ | ❌ | ✅* |
|
||||
| Teams & Roles | ❌ | ❌ | ✅ | ❌ | ✅* |
|
||||
| Contacts | ❌ | ❌ | ❌ | ❌ | ✅* |
|
||||
| SSO (OIDC) | ❌ | ❌ | ❌ | ❌ | ✅* |
|
||||
| SAML SSO | ❌ | ❌ | ❌ | ❌ | ✅* |
|
||||
| 2FA | ❌ | ❌ | ❌ | ❌ | ✅* |
|
||||
| Audit Logs | ❌ | ❌ | ❌ | ❌ | ✅* |
|
||||
| Quotas | ❌ | ❌ | ✅ | ❌ | ✅* |
|
||||
| Spam Protection | ❌ | ❌ | ✅ | ❌ | ✅* |
|
||||
| Follow-ups | ❌ | ❌ | ✅ | ✅ | ✅ |
|
||||
| Pretty URLs | ❌ | ❌ | ❌ | ✅ | ✅ |
|
||||
| Projects Limit | 3 | 3 | Custom | 3 | Custom* |
|
||||
|
||||
*Depends on specific license feature flags
|
||||
|
||||
---
|
||||
|
||||
## 7. Recommendations for Refactoring
|
||||
|
||||
### 7.1 Unified Feature Access Layer
|
||||
|
||||
Create a single `FeatureAccess` service that abstracts the deployment type:
|
||||
|
||||
```typescript
|
||||
interface FeatureAccessService {
|
||||
canAccessFeature(feature: FeatureKey, context: AccessContext): Promise<boolean>;
|
||||
getLimit(limit: LimitKey, context: LimitContext): Promise<number>;
|
||||
}
|
||||
```
|
||||
|
||||
### 7.2 Normalize Feature Flags
|
||||
|
||||
Both Cloud and On-Premise should use the same feature flag schema. Cloud billing plans should map to predefined feature sets.
|
||||
|
||||
### 7.3 Remove License Requirement from Cloud
|
||||
|
||||
Cloud should not need `ENTERPRISE_LICENSE_KEY`. The license server should be bypassed entirely, with features controlled by billing plan.
|
||||
|
||||
### 7.4 Consider Feature Entitlements
|
||||
|
||||
Move to an "entitlements" model where:
|
||||
- Cloud: Stripe subscription metadata defines entitlements
|
||||
- On-Premise: License API returns entitlements
|
||||
|
||||
Both resolve to the same `TFeatureEntitlements` type.
|
||||
|
||||
---
|
||||
|
||||
## 8. Files That Would Need Changes
|
||||
|
||||
### High Priority (Core Logic)
|
||||
1. `apps/web/modules/ee/license-check/lib/license.ts`
|
||||
2. `apps/web/modules/ee/license-check/lib/utils.ts`
|
||||
3. `apps/web/lib/constants.ts`
|
||||
|
||||
### Medium Priority (Feature Implementations)
|
||||
4. All files in `apps/web/modules/ee/*/actions.ts`
|
||||
5. `apps/web/modules/environments/lib/utils.ts`
|
||||
6. `apps/web/modules/survey/lib/permission.ts`
|
||||
7. `apps/web/modules/survey/follow-ups/lib/utils.ts`
|
||||
|
||||
### Lower Priority (UI Conditional Rendering)
|
||||
8. Settings pages with `IS_FORMBRICKS_CLOUD` checks
|
||||
9. `UpgradePrompt` component usages
|
||||
10. Navigation components
|
||||
|
||||
---
|
||||
|
||||
## 9. Summary
|
||||
|
||||
The current implementation has **organic complexity** from evolving independently for Cloud and On-Premise deployments. A refactor should:
|
||||
|
||||
1. **Unify** the feature access mechanism behind a single interface
|
||||
2. **Simplify** by removing the dual-check pattern
|
||||
3. **Normalize** feature definitions across deployment types
|
||||
4. **Test** with a cleaner mocking strategy
|
||||
|
||||
This would reduce the 100+ files touching enterprise features to a single source of truth, making the codebase more maintainable and reducing bugs from inconsistent feature gating.
|
||||
@@ -1,428 +0,0 @@
|
||||
I've spent ~2 days iterating over this, setting up Stripe, building our update pricing table, etc. So even though the formatting suggests this to be AI Slob, it's hand-crafted and I've read every line to make sure there is no misleading information 😇
|
||||
|
||||
------
|
||||
|
||||
### Unified Billing & Feature Access
|
||||
|
||||
**Document Version:** 2.1
|
||||
**Last Updated:** January 17, 2026
|
||||
**Status:** Ready for development
|
||||
|
||||
---
|
||||
|
||||
## 1. Executive Summary
|
||||
|
||||
Formbricks Cloud needs a unified, Stripe-native approach to billing, feature entitlements, and usage metering. The current implementation has billing logic scattered throughout the codebase, making it difficult to maintain pricing consistency and add new features.
|
||||
|
||||
This PRD outlines the requirements for:
|
||||
1. Using Stripe as the single source of truth for features and billing
|
||||
2. Implementing usage-based billing with graduated pricing
|
||||
3. Giving customers control through spending caps
|
||||
|
||||
**Scope**: This initiative focuses on Formbricks Cloud. On-Premise licensing will be addressed separately.
|
||||
|
||||
---
|
||||
|
||||
## 2. Problem Statement
|
||||
|
||||
### Current Pain Points
|
||||
|
||||
1. **Scattered Billing Logic**: Feature availability is determined by code checks against `organization.billing.plan`, requiring code changes for any pricing adjustment.
|
||||
|
||||
2. **Inconsistent Feature Gating**: Different features use different patterns to check access, making it unclear what's available on each plan.
|
||||
|
||||
3. **No Usage-Based Billing**: Current plans have hard limits. Customers hitting limits must upgrade to a higher tier even if they only need slightly more capacity.
|
||||
|
||||
4. **No Spending Controls**: Customers on usage-based plans have no way to cap their spending.
|
||||
|
||||
5. **Manual Usage Tracking**: Response and user counts are tracked locally without integration to billing.
|
||||
|
||||
---
|
||||
|
||||
## 3. Goals
|
||||
|
||||
1. **Stripe as Source of Truth**: All feature entitlements and pricing come from Stripe, not hardcoded in the application.
|
||||
|
||||
2. **Usage-Based Billing**: Implement graduated pricing where customers pay for what they use beyond included amounts.
|
||||
|
||||
3. **Customer Control**: Allow customers to set spending caps to avoid unexpected charges.
|
||||
|
||||
4. **Proactive Communication**: Notify customers as they approach usage limits.
|
||||
|
||||
---
|
||||
|
||||
## 4. Feature Requirements
|
||||
|
||||
### 4.1 Stripe as Single Source of Truth
|
||||
|
||||
**Requirement**: The Formbricks instance should not contain billing or pricing logic. All feature availability must be determined by querying Stripe.
|
||||
|
||||
**What This Means**:
|
||||
- No hardcoded plan names or feature mappings in the codebase
|
||||
- No `if (plan === 'pro')` style checks
|
||||
- Feature checks query Stripe Entitlements API
|
||||
- Pricing displayed in UI is fetched from Stripe Products/Prices
|
||||
- Plan changes take effect immediately via Stripe webhooks
|
||||
|
||||
**Benefits**:
|
||||
- Change pricing without code deployment
|
||||
- Add new plans without code changes
|
||||
- A/B test pricing externally
|
||||
- Single source of truth for sales, support, and product
|
||||
|
||||
---
|
||||
|
||||
### 4.2 Stripe Entitlements for Feature Access
|
||||
|
||||
**Requirement**: Use Stripe's Entitlements API to determine which features each customer can access.
|
||||
|
||||
**How It Works**:
|
||||
1. Define Features in Stripe (see inventory below)
|
||||
2. Attach Features to Products via ProductFeature
|
||||
3. When customer subscribes, Stripe creates Active Entitlements
|
||||
4. Application checks entitlements before enabling features
|
||||
5. Stripe is already setup correctly with all Products & Features ✅
|
||||
|
||||
**Multi-Item Subscriptions Simplify Entitlements**:
|
||||
- Each plan subscription includes multiple prices (flat fee + metered usage) on the **same Product**
|
||||
- Since all prices belong to one Product, calling `stripe.entitlements.activeEntitlements.list()` returns all features for that plan automatically
|
||||
- No need to check multiple products or stitch together entitlements from separate subscriptions
|
||||
|
||||
**Feature Inventory (not up-to-date but you get the idea)**:
|
||||
|
||||
| Feature Name | Lookup Key | Description |
|
||||
|--------------|------------|-------------|
|
||||
| Hide Branding | `hide-branding` | Hide "Powered by Formbricks" |
|
||||
| API Access | `api-access` | Gates API key generation & API page access |
|
||||
| Integrations | `integrations` | Gates integrations page & configuration |
|
||||
| Custom Webhooks | `webhooks` | Webhook integrations |
|
||||
| Email Follow-ups | `follow-ups` | Automated email follow-ups |
|
||||
| Custom Links in Surveys | `custom-links-in-surveys` | Custom links within surveys |
|
||||
| Custom Redirect URL | `custom-redirect-url` | Custom thank-you redirects |
|
||||
| Two Factor Auth | `two-fa` | 2FA for user accounts |
|
||||
| Contacts & Segments | `contacts` | Contact management & segmentation |
|
||||
| Teams & Access Roles | `rbac` | Team-based permissions |
|
||||
| Quota Management | `quota-management` | Response quota controls |
|
||||
| Spam Protection | `spam-protection` | reCAPTCHA integration |
|
||||
| Workspace Limit 1 | `workspace-limit-1` | Up to 1 workspaces |
|
||||
| Workspace Limit 3 | `workspace-limit-3` | Up to 3 workspaces |
|
||||
| Workspace Limit 5 | `workspace-limit-5` | Up to 5 workspaces |
|
||||
|
||||
<img width="915" height="827" alt="Image" src="https://github.com/user-attachments/assets/1f0e17b5-82c3-475c-9c05-968fdc51e948" />
|
||||
|
||||
---
|
||||
|
||||
### 4.3 Plan Structure
|
||||
|
||||
**Plans & Pricing**:
|
||||
|
||||
| Plan | Monthly Price | Annual Price | Savings |
|
||||
|------|---------------|--------------|---------|
|
||||
| **Hobby** | Free | Free | — |
|
||||
| **Pro** | $89/month | $890/year | 2 months free |
|
||||
| **Scale** | $390/month | $3,900/year | 2 months free |
|
||||
|
||||
**Usage Limits**:
|
||||
|
||||
| Plan | Workspaces | Responses/mo | Contacts/mo | Overage Billing |
|
||||
|------|------------|--------------|-------------|-----------------|
|
||||
| **Hobby** | 1 | 250 | — | No |
|
||||
| **Pro** | 3 | 2,000 | 5,000 | Yes |
|
||||
| **Scale** | 5 | 5,000 | 10,000 | Yes |
|
||||
|
||||
**Note**: Hobby plan does not include Respondent Identification or Contact Management. Overage billing is only available on Pro and Scale plans.
|
||||
|
||||
<img width="1205" height="955" alt="Image" src="https://github.com/user-attachments/assets/047d4097-f7ee-4022-920a-e2cbeb8ceb5d" />
|
||||
|
||||
---
|
||||
|
||||
### 4.4 Restricted Features (Hobby & Trial Exclusions)
|
||||
|
||||
**Requirement**: Certain high-risk features must be excluded from Free (Hobby) plan AND Trial users to prevent fraud and abuse. Other features are included in Trial to maximize conversion.
|
||||
|
||||
**Restricted Features (blocked from Hobby + Trial)**:
|
||||
|
||||
| Feature | Lookup Key | Abuse Risk | Why Restricted |
|
||||
|---------|------------|------------|----------------|
|
||||
| Custom Redirect URL | `custom-redirect-url` | High | Phishing redirects after survey |
|
||||
| Custom Links in Surveys | `custom-links-in-surveys` | High | Malicious link distribution in survey content |
|
||||
|
||||
**Trial-Included Features (to drive conversion)**:
|
||||
|
||||
| Feature | Lookup Key | Why Included in Trial |
|
||||
|---------|------------|----------------------|
|
||||
| Webhooks | `webhooks` | Low abuse risk, high setup effort = conversion driver |
|
||||
| API Access | `api-access` | Low abuse risk, high integration value |
|
||||
| Integrations | `integrations` | Low abuse risk, high integration value |
|
||||
| Email Follow-ups | `follow-ups` | Requires email verification, monitored |
|
||||
| Hide Branding | `hide-branding` | No abuse risk, strong conversion driver |
|
||||
| RBAC | `rbac` | No abuse risk, team adoption driver |
|
||||
| Spam Protection | `spam-protection` | Actually prevents abuse |
|
||||
| Quota Management | `quota-management` | Administrative feature |
|
||||
|
||||
**Implementation**:
|
||||
- Restricted features are NOT attached to Hobby or Trial products in Stripe
|
||||
- Trial includes most Pro/Scale features to maximize value demonstration
|
||||
- Application checks entitlements via Stripe API - if feature not present, show existing upgrade UI
|
||||
|
||||
---
|
||||
|
||||
### 4.5 Usage-Based Billing with Graduated Pricing
|
||||
|
||||
<img width="1041" height="125" alt="Image" src="https://github.com/user-attachments/assets/f12a56da-89d2-4784-b3c0-6c55dbee85e6" />
|
||||
|
||||
**Requirement**: Implement usage-based billing where customers pay a base fee that includes a usage allowance, with flat overage pricing.
|
||||
|
||||
**Metrics to Meter**:
|
||||
|
||||
| Metric | Event Name | Description |
|
||||
|--------|------------|-------------|
|
||||
| **Responses** | `response_created` | Survey responses submitted |
|
||||
| **Identified Contacts** | `unique_contact_identified` | Unique contacts identified per month |
|
||||
|
||||
**Identified Contacts Definition (ON HOLD)**:
|
||||
An identified contact is one that has been identified in the current billing period via:
|
||||
- SDK call: `formbricks.setUserId(userId)`
|
||||
- Personal Survey Link access
|
||||
- This OUT OF SCOPE for the first iteration to not become a blocker. We can add it if all works end-to-end
|
||||
|
||||
**Counting Rules**:
|
||||
- Each contact identification counts (even if same contact identified multiple times via different methods)
|
||||
- Same contact re-accessing their personal link = 1 count (same contact)
|
||||
- Billing period is monthly (even for annual subscribers)
|
||||
- Meter events sent immediately (real-time)
|
||||
|
||||
**Hard Limits via Stripe Metering**:
|
||||
- Usage is metered through Stripe for billing AND enforcement
|
||||
- When included usage is exhausted, overage rates apply
|
||||
- No separate local limit enforcement needed
|
||||
|
||||
---
|
||||
|
||||
### 4.6 Spending Caps
|
||||
|
||||
**Requirement**: Customers must be able to set a maximum monthly spend for usage-based charges.
|
||||
|
||||
**Behavior**:
|
||||
|
||||
| Cap Setting | Effect |
|
||||
|-------------|--------|
|
||||
| No cap (default) | Usage billed without limit |
|
||||
| Cap with "Warn" | Notifications sent, billing continues |
|
||||
| Cap with "Pause" | Surveys paused when cap reached |
|
||||
|
||||
**Configuration**:
|
||||
- Minimum spending cap: **$10**
|
||||
- No grace period when cap is hit
|
||||
- Immediate pause if "Pause" mode selected
|
||||
- Stripe does not provide spending caps out of the box, this is something we need to custom develop
|
||||
|
||||
**When Cap is Reached (Pause mode)**:
|
||||
- All surveys for the organization stop collecting responses (needs to be implemented)
|
||||
- Existing responses are preserved
|
||||
- In-app banner explains the pause
|
||||
- Email notification sent to billing contacts
|
||||
- Owner can lift pause or increase cap
|
||||
|
||||
<img width="925" height="501" alt="Image" src="https://github.com/user-attachments/assets/511d1ec6-4550-4aec-8f31-ab68e8c9e383" />
|
||||
|
||||
_The Pause vs. Alert mode is missing so far._
|
||||
|
||||
---
|
||||
|
||||
### 4.7 Usage Alerts via Stripe Meter Alerts
|
||||
|
||||
**Requirement**: Proactively notify customers as they approach their included usage limits.
|
||||
|
||||
**Alert Thresholds**:
|
||||
|
||||
| Threshold | Notification |
|
||||
|-----------|--------------|
|
||||
| 80% of included | Email notification |
|
||||
| 90% of included | Email + in-app banner |
|
||||
| 100% of included | Email + in-app + (if cap) action |
|
||||
|
||||
**Notification Content**:
|
||||
- Current usage vs included amount
|
||||
- What happens next (overage pricing applies)
|
||||
- Link to upgrade or adjust spending cap
|
||||
|
||||
<img width="415" height="348" alt="Image" src="https://github.com/user-attachments/assets/7bd990b4-7150-4357-af84-9c5e98f75140" />
|
||||
|
||||
---
|
||||
|
||||
### 4.8 Annual Billing with Monthly Limits
|
||||
|
||||
**Requirement**: Support annual payment option while keeping all usage limits monthly.
|
||||
|
||||
**Behavior**:
|
||||
- Annual subscribers pay upfront for 12 months
|
||||
- **2 months free** discount (annual = 10x monthly price)
|
||||
- Usage limits reset monthly (same as monthly subscribers)
|
||||
- Overage is billed monthly
|
||||
- Example: Annual Pro pays $890/year, gets 2,000 responses/month every month
|
||||
|
||||
<img width="1033" height="429" alt="Image" src="https://github.com/user-attachments/assets/58df55c7-e20f-448c-953d-e62c57268421" />
|
||||
|
||||
---
|
||||
|
||||
### 4.9 Reverse Trial Experience
|
||||
|
||||
**Requirement**: New users should experience premium features immediately through a Reverse Trial model.
|
||||
|
||||
**Trigger**: We have UI to present to them to opt into the free trial
|
||||
|
||||
**Trial Terms**:
|
||||
- Duration: 14 days
|
||||
- Features: Enroll to Trial Product (free)
|
||||
- Limits: We have to see how to enforce those, gotta check what Stripe API offers us. Probably a dedicated Trial Meter
|
||||
- No payment required to start
|
||||
- Stripe customer created immediately (for metering)
|
||||
|
||||
**Post-Trial (No Conversion)**:
|
||||
- Downgrade to Hobby (Free) tier immediately
|
||||
- Pro features disabled immediately
|
||||
- Data preserved but locked behind upgrade
|
||||
|
||||
---
|
||||
|
||||
### 4.10 Stripe Customer Creation on Signup
|
||||
|
||||
**Requirement**: Create a Stripe customer immediately when a new organization is created.
|
||||
|
||||
**Rationale**:
|
||||
- Enables usage metering from day one
|
||||
- Stripe handles hard limits via metering
|
||||
- Simplifies upgrade flow (customer already exists)
|
||||
|
||||
**What Gets Created**:
|
||||
- Stripe Customer with organization ID in metadata
|
||||
- No subscription (Hobby tier has no subscription)
|
||||
- No payment method (added on first upgrade)
|
||||
|
||||
---
|
||||
|
||||
### 5.1 Subscription Architecture: Multi-Item Subscriptions
|
||||
|
||||
**Key Insight**: Each plan uses a **Subscription with Multiple Items** — one flat-fee price and metered usage prices, all belonging to the same Product. This allows us to charge for the base plan, meter and charge per used item.
|
||||
|
||||
**How It Works**:
|
||||
|
||||
```javascript
|
||||
// Creating a Pro subscription with flat fee + usage metering
|
||||
const subscription = await stripe.subscriptions.create({
|
||||
customer: 'cus_12345',
|
||||
items: [
|
||||
{ price: 'price_pro_monthly' }, // $89/mo flat fee
|
||||
{ price: 'price_pro_responses_usage' }, // Metered responses
|
||||
{ price: 'price_pro_contacts_usage' }, // Metered contacts
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
**Why This Matters for Entitlements**:
|
||||
- All prices belong to the **same Product** (e.g., `prod_ProPlan`)
|
||||
- Stripe Entitlements API automatically returns all features attached to that Product
|
||||
- No need to check multiple products or subscriptions
|
||||
- Single source of truth for feature access
|
||||
|
||||
**What Customers See** (Single Invoice):
|
||||
|
||||
| Description | Qty | Amount |
|
||||
|-------------|-----|--------|
|
||||
| Pro Plan (Jan 1 - Feb 1) | 1 | $89.00 |
|
||||
| Pro Plan - Responses (Jan 1 - Feb 1) | 1,500 (First 1,000 included) | $40.00 |
|
||||
| Pro Plan - Contacts (Jan 1 - Feb 1) | 2,500 (All included) | $0.00 |
|
||||
| **Total** | | **$129.00** |
|
||||
|
||||
### 5.2 Products & Prices
|
||||
|
||||
Each plan Product contains multiple Prices:
|
||||
|
||||
| Product | Stripe ID | Prices |
|
||||
|---------|-----------|--------|
|
||||
| **Hobby Tier** | `prod_ToYKB5ESOMZZk5` | Free (no subscription required) |
|
||||
| **Pro Tier** | `prod_ToYKQ8WxS3ecgf` | `price_pro_monthly` ($89), `price_pro_yearly` ($890), `price_pro_usage_responses`, `price_pro_usage_contacts` |
|
||||
| **Scale Tier** | `prod_ToYLW5uCQTMa6v` | `price_scale_monthly` ($390), `price_scale_yearly` ($3,900), `price_scale_usage_responses`, `price_scale_usage_contacts` |
|
||||
| **Trial Tier** | `prod_TodVcJiEnK5ABK` | `price_trial_free` ($0), metered prices for tracking |
|
||||
|
||||
**Note**: Response and Contact metered prices use **graduated tiers** where the first N units are included (priced at $0), then overage rates apply.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 6. Non-Functional Requirements
|
||||
|
||||
### 6.1 Performance
|
||||
|
||||
- Entitlement checks: <100ms (p50), <200ms (p99) with caching
|
||||
- Usage metering: Non-blocking, immediate send
|
||||
- Spending cap checks: <50ms
|
||||
|
||||
### 6.2 Reliability
|
||||
|
||||
- Stripe unavailable: Use cached entitlements (max 5 min stale)
|
||||
- Meter event fails: Queue for retry (at-least-once delivery)
|
||||
- Webhook missed: Entitlements auto-refresh on access
|
||||
|
||||
### 6.3 Data Consistency
|
||||
|
||||
- Stripe is source of truth
|
||||
- Local `organization.billing` is a cache only
|
||||
- Cache invalidated via webhooks
|
||||
|
||||
---
|
||||
|
||||
## 7. Migration Considerations
|
||||
|
||||
### Existing Customers with Custom Limits
|
||||
|
||||
**Problem**: Some existing customers have negotiated custom limits that don't fit the new plan structure.
|
||||
|
||||
**Approach**: Grandfather indefinitely on legacy pricing until they choose to migrate.
|
||||
|
||||
- Existing customers keep their current plans and limits
|
||||
- No forced migration
|
||||
- New billing system only applies to new signups and customers who voluntarily upgrade/change plans
|
||||
- Legacy customers get a simplified view with the new usage meters UI and a the "Manage subscription" button. We hide all of the other UI and prompt them to reach out to Support to change their pricing (Alert component)
|
||||
|
||||
---
|
||||
|
||||
## 8. Key Decisions
|
||||
|
||||
| Topic | Decision |
|
||||
|-------|----------|
|
||||
| Free tier metering | Use Stripe for hard limits (no local enforcement) |
|
||||
| Annual discount | 2 months free |
|
||||
| Minimum spending cap | $10 |
|
||||
| Cap grace period | None (immediate) |
|
||||
| Contact identification counting | Each identification counts |
|
||||
| Personal link re-access | Same contact = 1 count |
|
||||
| Downgrade behavior for restricted features | Immediate disable |
|
||||
| Meter event timing | Immediate (real-time) |
|
||||
| Currency | USD only |
|
||||
| Default spending cap | No cap |
|
||||
| Overage visibility | Billing page |
|
||||
| Migration for custom limits | Grandfather indefinitely |
|
||||
|
||||
---
|
||||
|
||||
## 9. Out of Scope
|
||||
|
||||
1. **On-Premise licensing**: Will be addressed separately
|
||||
2. **Self-serve downgrade**: Handled via Stripe Customer Portal
|
||||
3. **Refunds**: Handled via Stripe Dashboard
|
||||
4. **Tax calculation**: Handled by Stripe Tax
|
||||
5. **Invoice customization**: Handled via Stripe settings
|
||||
|
||||
---
|
||||
|
||||
## 10. Setting up
|
||||
|
||||
**Stripe** Sandbox Cloud: Dev
|
||||
|
||||
<img width="300" height="180" alt="Image" src="https://github.com/user-attachments/assets/3e6a7fb6-8efb-4cb5-acf0-ccc2a5efabd2" />
|
||||
|
||||
In this branch you find;
|
||||
- All of the dummy UI screenshotted here. Make sure to clean up after it was successfully implemented (has dummy UI code)
|
||||
- A comprehensive analysis of our current, inconsistent feature flagging called ENTERPRISE_FEATURE_ANALYSIS
|
||||
2
LICENSE
@@ -3,7 +3,7 @@ Copyright (c) 2024 Formbricks GmbH
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
- All content that resides under the "apps/web/modules/ee" directory of this repository, if these directories exist, is licensed under the license defined in "apps/web/modules/ee/LICENSE".
|
||||
- All content that resides under the "packages/js/", "packages/android/", "packages/ios/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
||||
- All content that resides under the "packages/js/", "packages/react-native/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
||||
- All third party components incorporated into the Formbricks Software are licensed under the original license provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned directories or restrictions above is available under the "AGPLv3" license as defined below.
|
||||
|
||||
|
||||
11
README.md
@@ -21,7 +21,6 @@ The Open Source Qualtrics Alternative
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
||||
<a href="https://insights.linuxfoundation.org/project/formbricks"><img src="https://insights.linuxfoundation.org/api/badge/health-score?project=formbricks"></a>
|
||||
<a href="https://news.ycombinator.com/item?id=32303986"><img src="https://img.shields.io/badge/Hacker%20News-122-%23FF6600" alt="Hacker News"></a>
|
||||
<a href="[https://www.producthunt.com/products/formbricks](https://www.producthunt.com/posts/formbricks)"><img src="https://img.shields.io/badge/Product%20Hunt-455-orange?logo=producthunt&logoColor=%23fff" alt="Product Hunt"></a>
|
||||
<a href="https://github.blog/2023-04-12-github-accelerator-our-first-cohort-and-whats-next/"><img src="https://img.shields.io/badge/2023-blue?logo=github&label=Github%20Accelerator" alt="Github Accelerator"></a>
|
||||
@@ -193,7 +192,7 @@ Here are a few options:
|
||||
|
||||
- Upvote issues with 👍 reaction so we know what the demand for a particular issue is to prioritize it within the roadmap.
|
||||
|
||||
- Note: For the time being, we can only facilitate code contributions as an exception.
|
||||
Please check out [our contribution guide](https://formbricks.com/docs/developer-docs/contributing/get-started) and our [list of open issues](https://github.com/formbricks/formbricks/issues) for more information.
|
||||
|
||||
## All Thanks To Our Contributors
|
||||
|
||||
@@ -203,14 +202,6 @@ Here are a few options:
|
||||
|
||||
</a>
|
||||
|
||||
## Thanks
|
||||
|
||||
Formbricks is supported by the following companies who provide us with their tools for free as part of their open-source support:
|
||||
|
||||
<a href="https://www.chromatic.com/"><img src="https://user-images.githubusercontent.com/321738/84662277-e3db4f80-af1b-11ea-88f5-91d67a5e59f6.png" width="153" height="30" alt="Chromatic" /></a>
|
||||
|
||||
<a href="https://sentry.io/"><img src="https://github.com/user-attachments/assets/d743ffd4-b575-4802-a29a-10136be9227e" width="150" height="30" alt="Sentry" /></a>
|
||||
|
||||
<a id="contact-us"></a>
|
||||
|
||||
## 📆 Contact us
|
||||
|
||||
2
apps/demo-react-native/.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
EXPO_PUBLIC_APP_URL=http://192.168.0.197:3000
|
||||
EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=cm5p0cs7r000819182b32j0a1
|
||||
@@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
extends: ["@formbricks/eslint-config/react.js"],
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
tsconfigRootDir: __dirname,
|
||||
35
apps/demo-react-native/.gitignore
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files
|
||||
|
||||
# dependencies
|
||||
node_modules/
|
||||
|
||||
# Expo
|
||||
.expo/
|
||||
dist/
|
||||
web-build/
|
||||
|
||||
# Native
|
||||
*.orig.*
|
||||
*.jks
|
||||
*.p8
|
||||
*.p12
|
||||
*.key
|
||||
*.mobileprovision
|
||||
|
||||
# Metro
|
||||
.metro-health-check*
|
||||
|
||||
# debug
|
||||
npm-debug.*
|
||||
yarn-debug.*
|
||||
yarn-error.*
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
0
apps/demo-react-native/.npmrc
Normal file
35
apps/demo-react-native/app.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"expo": {
|
||||
"android": {
|
||||
"adaptiveIcon": {
|
||||
"backgroundColor": "#ffffff",
|
||||
"foregroundImage": "./assets/adaptive-icon.png"
|
||||
}
|
||||
},
|
||||
"assetBundlePatterns": ["**/*"],
|
||||
"icon": "./assets/icon.png",
|
||||
"ios": {
|
||||
"infoPlist": {
|
||||
"NSCameraUsageDescription": "Take pictures for certain activities.",
|
||||
"NSMicrophoneUsageDescription": "Need microphone access for recording videos.",
|
||||
"NSPhotoLibraryUsageDescription": "Select pictures for certain activities."
|
||||
},
|
||||
"supportsTablet": true
|
||||
},
|
||||
"jsEngine": "hermes",
|
||||
"name": "react-native-demo",
|
||||
"newArchEnabled": true,
|
||||
"orientation": "portrait",
|
||||
"slug": "react-native-demo",
|
||||
"splash": {
|
||||
"backgroundColor": "#ffffff",
|
||||
"image": "./assets/splash.png",
|
||||
"resizeMode": "contain"
|
||||
},
|
||||
"userInterfaceStyle": "light",
|
||||
"version": "1.0.0",
|
||||
"web": {
|
||||
"favicon": "./assets/favicon.png"
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
apps/demo-react-native/assets/adaptive-icon.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
apps/demo-react-native/assets/favicon.png
Normal file
|
After Width: | Height: | Size: 1.4 KiB |
BIN
apps/demo-react-native/assets/icon.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
apps/demo-react-native/assets/splash.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
6
apps/demo-react-native/babel.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = function babel(api) {
|
||||
api.cache(true);
|
||||
return {
|
||||
presets: ["babel-preset-expo"],
|
||||
};
|
||||
};
|
||||
7
apps/demo-react-native/index.js
Normal file
@@ -0,0 +1,7 @@
|
||||
import { registerRootComponent } from "expo";
|
||||
import { LogBox } from "react-native";
|
||||
import App from "./src/app";
|
||||
|
||||
registerRootComponent(App);
|
||||
|
||||
LogBox.ignoreAllLogs();
|
||||
21
apps/demo-react-native/metro.config.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// Learn more https://docs.expo.io/guides/customizing-metro
|
||||
const path = require("node:path");
|
||||
const { getDefaultConfig } = require("expo/metro-config");
|
||||
|
||||
// Find the workspace root, this can be replaced with `find-yarn-workspace-root`
|
||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
||||
const projectRoot = __dirname;
|
||||
|
||||
const config = getDefaultConfig(projectRoot);
|
||||
|
||||
// 1. Watch all files within the monorepo
|
||||
config.watchFolders = [workspaceRoot];
|
||||
// 2. Let Metro know where to resolve packages, and in what order
|
||||
config.resolver.nodeModulesPaths = [
|
||||
path.resolve(projectRoot, "node_modules"),
|
||||
path.resolve(workspaceRoot, "node_modules"),
|
||||
];
|
||||
// 3. Force Metro to resolve (sub)dependencies only from the `nodeModulesPaths`
|
||||
config.resolver.disableHierarchicalLookup = true;
|
||||
|
||||
module.exports = config;
|
||||
30
apps/demo-react-native/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@formbricks/demo-react-native",
|
||||
"version": "1.0.0",
|
||||
"main": "./index.js",
|
||||
"scripts": {
|
||||
"dev": "expo start",
|
||||
"android": "expo start --android",
|
||||
"ios": "expo start --ios",
|
||||
"web": "expo start --web",
|
||||
"eject": "expo eject",
|
||||
"clean": "rimraf .turbo node_modules .expo"
|
||||
},
|
||||
"dependencies": {
|
||||
"@formbricks/js": "workspace:*",
|
||||
"@formbricks/react-native": "workspace:*",
|
||||
"@react-native-async-storage/async-storage": "2.1.0",
|
||||
"expo": "52.0.28",
|
||||
"expo-status-bar": "2.0.1",
|
||||
"react": "18.3.1",
|
||||
"react-dom": "18.3.1",
|
||||
"react-native": "0.76.6",
|
||||
"react-native-webview": "13.12.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.26.0",
|
||||
"@types/react": "18.3.18",
|
||||
"typescript": "5.7.2"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
117
apps/demo-react-native/src/app.tsx
Normal file
@@ -0,0 +1,117 @@
|
||||
import { StatusBar } from "expo-status-bar";
|
||||
import React, { type JSX } from "react";
|
||||
import { Button, LogBox, StyleSheet, Text, View } from "react-native";
|
||||
import Formbricks, {
|
||||
logout,
|
||||
setAttribute,
|
||||
setAttributes,
|
||||
setLanguage,
|
||||
setUserId,
|
||||
track,
|
||||
} from "@formbricks/react-native";
|
||||
|
||||
LogBox.ignoreAllLogs();
|
||||
|
||||
export default function App(): JSX.Element {
|
||||
if (!process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID) {
|
||||
throw new Error("EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID is required");
|
||||
}
|
||||
|
||||
if (!process.env.EXPO_PUBLIC_APP_URL) {
|
||||
throw new Error("EXPO_PUBLIC_APP_URL is required");
|
||||
}
|
||||
|
||||
return (
|
||||
<View style={styles.container}>
|
||||
<Text>Formbricks React Native SDK Demo</Text>
|
||||
|
||||
<View
|
||||
style={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
gap: 10,
|
||||
}}>
|
||||
<Button
|
||||
title="Trigger Code Action"
|
||||
onPress={() => {
|
||||
track("code").catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error tracking event:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
title="Set User Id"
|
||||
onPress={() => {
|
||||
setUserId("random-user-id").catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error setting user id:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
title="Set User Attributess (multiple)"
|
||||
onPress={() => {
|
||||
setAttributes({
|
||||
testAttr: "attr-test",
|
||||
testAttr2: "attr-test-2",
|
||||
testAttr3: "attr-test-3",
|
||||
testAttr4: "attr-test-4",
|
||||
}).catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error setting user attributes:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
title="Set User Attributes (single)"
|
||||
onPress={() => {
|
||||
setAttribute("testSingleAttr", "testSingleAttr").catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error setting user attributes:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
title="Logout"
|
||||
onPress={() => {
|
||||
logout().catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error logging out:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
title="Set Language (de)"
|
||||
onPress={() => {
|
||||
setLanguage("de").catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error setting language:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</View>
|
||||
|
||||
<StatusBar style="auto" />
|
||||
|
||||
<Formbricks
|
||||
appUrl={process.env.EXPO_PUBLIC_APP_URL as string}
|
||||
environmentId={process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID as string}
|
||||
/>
|
||||
</View>
|
||||
);
|
||||
}
|
||||
|
||||
const styles = StyleSheet.create({
|
||||
container: {
|
||||
flex: 1,
|
||||
backgroundColor: "#fff",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
},
|
||||
});
|
||||
6
apps/demo-react-native/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true
|
||||
},
|
||||
"extends": "expo/tsconfig.base"
|
||||
}
|
||||
5
apps/demo/.env.example
Normal file
@@ -0,0 +1,5 @@
|
||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=http://localhost:3000
|
||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=YOUR_ENVIRONMENT_ID
|
||||
|
||||
# Copy the environment ID for the URL of your Formbricks App and
|
||||
# paste it above to connect your Formbricks App with the Demo App.
|
||||
@@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
extends: ["@formbricks/eslint-config/next.js"],
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
tsconfigRootDir: __dirname,
|
||||
36
apps/demo/.gitignore
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
13
apps/demo/components/layout-app.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Sidebar } from "./sidebar";
|
||||
|
||||
export function LayoutApp({ children }: { children: React.ReactNode }): React.JSX.Element {
|
||||
return (
|
||||
<div className="min-h-full">
|
||||
{/* Static sidebar for desktop */}
|
||||
<div className="hidden lg:fixed lg:inset-y-0 lg:flex lg:w-64 lg:flex-col">
|
||||
<Sidebar />
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col lg:pl-64">{children}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
65
apps/demo/components/sidebar.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import {
|
||||
ClockIcon,
|
||||
CogIcon,
|
||||
CreditCardIcon,
|
||||
FileBarChartIcon,
|
||||
HelpCircleIcon,
|
||||
HomeIcon,
|
||||
ScaleIcon,
|
||||
ShieldCheckIcon,
|
||||
UsersIcon,
|
||||
} from "lucide-react";
|
||||
import { classNames } from "../lib/utils";
|
||||
|
||||
const navigation = [
|
||||
{ name: "Home", href: "#", icon: HomeIcon, current: true },
|
||||
{ name: "History", href: "#", icon: ClockIcon, current: false },
|
||||
{ name: "Balances", href: "#", icon: ScaleIcon, current: false },
|
||||
{ name: "Cards", href: "#", icon: CreditCardIcon, current: false },
|
||||
{ name: "Recipients", href: "#", icon: UsersIcon, current: false },
|
||||
{ name: "Reports", href: "#", icon: FileBarChartIcon, current: false },
|
||||
];
|
||||
const secondaryNavigation = [
|
||||
{ name: "Settings", href: "#", icon: CogIcon },
|
||||
{ name: "Help", href: "#", icon: HelpCircleIcon },
|
||||
{ name: "Privacy", href: "#", icon: ShieldCheckIcon },
|
||||
];
|
||||
|
||||
export function Sidebar(): React.JSX.Element {
|
||||
return (
|
||||
<div className="flex flex-grow flex-col overflow-y-auto bg-cyan-700 pb-4 pt-5">
|
||||
<nav
|
||||
className="mt-5 flex flex-1 flex-col divide-y divide-cyan-800 overflow-y-auto"
|
||||
aria-label="Sidebar">
|
||||
<div className="space-y-1 px-2">
|
||||
{navigation.map((item) => (
|
||||
<a
|
||||
key={item.name}
|
||||
href={item.href}
|
||||
className={classNames(
|
||||
item.current ? "bg-cyan-800 text-white" : "text-cyan-100 hover:bg-cyan-600 hover:text-white",
|
||||
"group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6"
|
||||
)}
|
||||
aria-current={item.current ? "page" : undefined}>
|
||||
<item.icon className="mr-4 h-6 w-6 flex-shrink-0 text-cyan-200" aria-hidden="true" />
|
||||
{item.name}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
<div className="mt-6 pt-6">
|
||||
<div className="space-y-1 px-2">
|
||||
{secondaryNavigation.map((item) => (
|
||||
<a
|
||||
key={item.name}
|
||||
href={item.href}
|
||||
className="group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6 text-cyan-100 hover:bg-cyan-600 hover:text-white">
|
||||
<item.icon className="mr-4 h-6 w-6 text-cyan-200" aria-hidden="true" />
|
||||
{item.name}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
3
apps/demo/globals.css
Normal file
@@ -0,0 +1,3 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
3
apps/demo/lib/utils.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function classNames(...classes: string[]): string {
|
||||
return classes.filter(Boolean).join(" ");
|
||||
}
|
||||
5
apps/demo/next-env.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/pages/api-reference/config/typescript for more information.
|
||||
17
apps/demo/next.config.mjs
Normal file
@@ -0,0 +1,17 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
protocol: "https",
|
||||
hostname: "tailwindui.com",
|
||||
},
|
||||
{
|
||||
protocol: "https",
|
||||
hostname: "images.unsplash.com",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
24
apps/demo/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@formbricks/demo",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"clean": "rimraf .turbo node_modules .next",
|
||||
"dev": "next dev -p 3002 --turbopack",
|
||||
"go": "next dev -p 3002 --turbopack",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@formbricks/js": "workspace:*",
|
||||
"lucide-react": "0.468.0",
|
||||
"next": "15.1.2",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@formbricks/config-typescript": "workspace:*",
|
||||
"@formbricks/eslint-config": "workspace:*"
|
||||
}
|
||||
}
|
||||
20
apps/demo/pages/_app.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { AppProps } from "next/app";
|
||||
import Head from "next/head";
|
||||
import "../globals.css";
|
||||
|
||||
export default function App({ Component, pageProps }: AppProps): React.JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>Demo App</title>
|
||||
</Head>
|
||||
{(!process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID ||
|
||||
!process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) && (
|
||||
<div className="w-full bg-red-500 p-3 text-center text-sm text-white">
|
||||
Please set Formbricks environment variables in apps/demo/.env
|
||||
</div>
|
||||
)}
|
||||
<Component {...pageProps} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
13
apps/demo/pages/_document.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Head, Html, Main, NextScript } from "next/document";
|
||||
|
||||
export default function Document(): React.JSX.Element {
|
||||
return (
|
||||
<Html lang="en" className="h-full bg-slate-50">
|
||||
<Head />
|
||||
<body className="h-full">
|
||||
<Main />
|
||||
<NextScript />
|
||||
</body>
|
||||
</Html>
|
||||
);
|
||||
}
|
||||
257
apps/demo/pages/index.tsx
Normal file
@@ -0,0 +1,257 @@
|
||||
import Image from "next/image";
|
||||
import { useRouter } from "next/router";
|
||||
import { useEffect, useState } from "react";
|
||||
import formbricks from "@formbricks/js";
|
||||
import fbsetup from "../public/fb-setup.png";
|
||||
|
||||
declare const window: Window;
|
||||
|
||||
export default function AppPage(): React.JSX.Element {
|
||||
const [darkMode, setDarkMode] = useState(false);
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
if (darkMode) {
|
||||
document.body.classList.add("dark");
|
||||
} else {
|
||||
document.body.classList.remove("dark");
|
||||
}
|
||||
}, [darkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
const initFormbricks = () => {
|
||||
// enable Formbricks debug mode by adding formbricksDebug=true GET parameter
|
||||
const addFormbricksDebugParam = (): void => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
if (!urlParams.has("formbricksDebug")) {
|
||||
urlParams.set("formbricksDebug", "true");
|
||||
const newUrl = `${window.location.pathname}?${urlParams.toString()}`;
|
||||
window.history.replaceState({}, "", newUrl);
|
||||
}
|
||||
};
|
||||
|
||||
addFormbricksDebugParam();
|
||||
|
||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
||||
const userId = "THIS-IS-A-VERY-LONG-USER-ID-FOR-TESTING";
|
||||
const userInitAttributes = {
|
||||
language: "de",
|
||||
"Init Attribute 1": "eight",
|
||||
"Init Attribute 2": "two",
|
||||
};
|
||||
|
||||
void formbricks.init({
|
||||
environmentId: process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID,
|
||||
apiHost: process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST,
|
||||
userId,
|
||||
attributes: userInitAttributes,
|
||||
});
|
||||
}
|
||||
|
||||
// Connect next.js router to Formbricks
|
||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
||||
const handleRouteChange = formbricks.registerRouteChange;
|
||||
|
||||
router.events.on("routeChangeComplete", () => {
|
||||
void handleRouteChange();
|
||||
});
|
||||
|
||||
return () => {
|
||||
router.events.off("routeChangeComplete", () => {
|
||||
void handleRouteChange();
|
||||
});
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
initFormbricks();
|
||||
}, [router.events]);
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-white px-12 py-6 dark:bg-slate-800">
|
||||
<div className="flex flex-col justify-between md:flex-row">
|
||||
<div className="flex flex-col items-center gap-2 sm:flex-row">
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold text-slate-900 dark:text-white">
|
||||
Formbricks In-product Survey Demo App
|
||||
</h1>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
This app helps you test your app surveys. You can create and test user actions, create and
|
||||
update user attributes, etc.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="mt-2 rounded-lg bg-slate-200 px-6 py-1 dark:bg-slate-700 dark:text-slate-100"
|
||||
onClick={() => {
|
||||
setDarkMode(!darkMode);
|
||||
}}>
|
||||
{darkMode ? "Toggle Light Mode" : "Toggle Dark Mode"}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="my-4 grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||
<div>
|
||||
<div className="rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">1. Setup .env</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
Copy the environment ID of your Formbricks app to the env variable in /apps/demo/.env
|
||||
</p>
|
||||
<Image src={fbsetup} alt="fb setup" className="mt-4 rounded" priority />
|
||||
|
||||
<div className="mt-4 flex-col items-start text-sm text-slate-700 sm:flex sm:items-center sm:text-base dark:text-slate-300">
|
||||
<p className="mb-1 sm:mb-0 sm:mr-2">You're connected with env:</p>
|
||||
<div className="flex items-center">
|
||||
<strong className="w-32 truncate sm:w-auto">
|
||||
{process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID}
|
||||
</strong>
|
||||
<span className="relative ml-2 flex h-3 w-3">
|
||||
<span className="absolute inline-flex h-full w-full animate-ping rounded-full bg-green-500 opacity-75" />
|
||||
<span className="relative inline-flex h-3 w-3 rounded-full bg-green-500" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-4 rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">2. Widget Logs</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
Look at the logs to understand how the widget works.{" "}
|
||||
<strong className="dark:text-white">Open your browser console</strong> to see the logs.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="md:grid md:grid-cols-3">
|
||||
<div className="col-span-3 self-start rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold dark:text-white">
|
||||
Reset person / pull data from Formbricks app
|
||||
</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
On formbricks.reset() the local state will <strong>be deleted</strong> and formbricks gets{" "}
|
||||
<strong>reinitialized</strong>.
|
||||
</p>
|
||||
<button
|
||||
className="my-4 rounded-lg bg-slate-500 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600"
|
||||
type="button"
|
||||
onClick={() => {
|
||||
void formbricks.reset();
|
||||
}}>
|
||||
Reset
|
||||
</button>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
If you made a change in Formbricks app and it does not seem to work, hit 'Reset' and
|
||||
try again.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
No-Code Action
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sends a{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/actions/no-code"
|
||||
rel="noopener noreferrer"
|
||||
className="underline dark:text-blue-500"
|
||||
target="_blank">
|
||||
No Code Action
|
||||
</a>{" "}
|
||||
as long as you created it beforehand in the Formbricks App.{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/actions/no-code"
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
className="underline dark:text-blue-500">
|
||||
Here are instructions on how to do it.
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
void formbricks.setAttribute("Plan", "Free");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Plan to 'Free'
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/custom-attributes"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline dark:text-blue-500">
|
||||
attribute
|
||||
</a>{" "}
|
||||
'Plan' to 'Free'. If the attribute does not exist, it creates it.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
void formbricks.setAttribute("Plan", "Paid");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Plan to 'Paid'
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/custom-attributes"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline dark:text-blue-500">
|
||||
attribute
|
||||
</a>{" "}
|
||||
'Plan' to 'Paid'. If the attribute does not exist, it creates it.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
void formbricks.setEmail("test@web.com");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Email
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/identify-users"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline dark:text-blue-500">
|
||||
user email
|
||||
</a>{" "}
|
||||
'test@web.com'
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
6
apps/demo/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
BIN
apps/demo/public/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
apps/demo/public/fb-setup.png
Normal file
|
After Width: | Height: | Size: 6.2 KiB |
1
apps/demo/public/next.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
apps/demo/public/thirteen.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="40" height="31" fill="none"><g opacity=".9"><path fill="url(#a)" d="M13 .4v29.3H7V6.3h-.2L0 10.5V5L7.2.4H13Z"/><path fill="url(#b)" d="M28.8 30.1c-2.2 0-4-.3-5.7-1-1.7-.8-3-1.8-4-3.1a7.7 7.7 0 0 1-1.4-4.6h6.2c0 .8.3 1.4.7 2 .4.5 1 .9 1.7 1.2.7.3 1.6.4 2.5.4 1 0 1.7-.2 2.5-.5.7-.3 1.3-.8 1.7-1.4.4-.6.6-1.2.6-2s-.2-1.5-.7-2.1c-.4-.6-1-1-1.8-1.4-.8-.4-1.8-.5-2.9-.5h-2.7v-4.6h2.7a6 6 0 0 0 2.5-.5 4 4 0 0 0 1.7-1.3c.4-.6.6-1.3.6-2a3.5 3.5 0 0 0-2-3.3 5.6 5.6 0 0 0-4.5 0 4 4 0 0 0-1.7 1.2c-.4.6-.6 1.2-.6 2h-6c0-1.7.6-3.2 1.5-4.5 1-1.3 2.2-2.3 3.8-3C25 .4 26.8 0 28.8 0s3.8.4 5.3 1.1c1.5.7 2.7 1.7 3.6 3a7.2 7.2 0 0 1 1.2 4.2c0 1.6-.5 3-1.5 4a7 7 0 0 1-4 2.2v.2c2.2.3 3.8 1 5 2.2a6.4 6.4 0 0 1 1.6 4.6c0 1.7-.5 3.1-1.4 4.4a9.7 9.7 0 0 1-4 3.1c-1.7.8-3.7 1.1-5.8 1.1Z"/></g><defs><linearGradient id="a" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient><linearGradient id="b" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
1
apps/demo/public/vercel.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>
|
||||
|
After Width: | Height: | Size: 629 B |
13
apps/demo/tailwind.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./app/**/*.{js,ts,jsx,tsx}",
|
||||
"./pages/**/*.{js,ts,jsx,tsx}",
|
||||
"./components/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
darkMode: "class",
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [require("@tailwindcss/forms")],
|
||||
};
|
||||
5
apps/demo/tsconfig.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"exclude": ["node_modules"],
|
||||
"extends": "@formbricks/config-typescript/nextjs.json",
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"]
|
||||
}
|
||||
@@ -1,52 +1,27 @@
|
||||
import type { StorybookConfig } from "@storybook/react-vite";
|
||||
import { createRequire } from "module";
|
||||
import { dirname, join, resolve } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
import { dirname, join } from "path";
|
||||
|
||||
/**
|
||||
* This function is used to resolve the absolute path of a package.
|
||||
* It is needed in projects that use Yarn PnP or are set up within a monorepo.
|
||||
*/
|
||||
function getAbsolutePath(value: string): any {
|
||||
const getAbsolutePath = (value: string) => {
|
||||
return dirname(require.resolve(join(value, "package.json")));
|
||||
}
|
||||
};
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: ["../src/**/*.mdx", "../../../packages/survey-ui/src/**/*.stories.@(js|jsx|mjs|ts|tsx)"],
|
||||
stories: ["../src/**/*.mdx", "../../web/modules/ui/**/stories.@(js|jsx|mjs|ts|tsx)"],
|
||||
addons: [
|
||||
getAbsolutePath("@storybook/addon-onboarding"),
|
||||
getAbsolutePath("@storybook/addon-links"),
|
||||
getAbsolutePath("@storybook/addon-essentials"),
|
||||
getAbsolutePath("@chromatic-com/storybook"),
|
||||
getAbsolutePath("@storybook/addon-interactions"),
|
||||
getAbsolutePath("@storybook/addon-a11y"),
|
||||
getAbsolutePath("@storybook/addon-docs"),
|
||||
],
|
||||
framework: {
|
||||
name: getAbsolutePath("@storybook/react-vite"),
|
||||
options: {},
|
||||
},
|
||||
async viteFinal(config) {
|
||||
const surveyUiPath = resolve(__dirname, "../../../packages/survey-ui/src");
|
||||
const rootPath = resolve(__dirname, "../../../");
|
||||
|
||||
// Configure server to allow files from outside the storybook directory
|
||||
config.server = config.server || {};
|
||||
config.server.fs = {
|
||||
...config.server.fs,
|
||||
allow: [...(config.server.fs?.allow || []), rootPath],
|
||||
};
|
||||
|
||||
// Configure simple alias resolution
|
||||
config.resolve = config.resolve || {};
|
||||
config.resolve.alias = {
|
||||
...config.resolve.alias,
|
||||
"@": surveyUiPath,
|
||||
};
|
||||
|
||||
return config;
|
||||
},
|
||||
};
|
||||
export default config;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { Preview } from "@storybook/react-vite";
|
||||
import React from "react";
|
||||
import "../../../packages/survey-ui/src/styles/globals.css";
|
||||
import type { Preview } from "@storybook/react";
|
||||
import "../../web/modules/ui/globals.css";
|
||||
|
||||
const preview: Preview = {
|
||||
parameters: {
|
||||
@@ -9,23 +8,8 @@ const preview: Preview = {
|
||||
color: /(background|color)$/i,
|
||||
date: /Date$/i,
|
||||
},
|
||||
expanded: true,
|
||||
},
|
||||
backgrounds: {
|
||||
default: "light",
|
||||
},
|
||||
},
|
||||
decorators: [
|
||||
(Story) =>
|
||||
React.createElement(
|
||||
"div",
|
||||
{
|
||||
id: "fbjs",
|
||||
className: "w-full h-full min-h-screen p-4 bg-background font-sans antialiased text-foreground",
|
||||
},
|
||||
React.createElement(Story)
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default preview;
|
||||
|
||||
@@ -11,24 +11,30 @@
|
||||
"clean": "rimraf .turbo node_modules dist storybook-static"
|
||||
},
|
||||
"dependencies": {
|
||||
"@formbricks/survey-ui": "workspace:*"
|
||||
"eslint-plugin-react-refresh": "0.4.16",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "^5.0.0",
|
||||
"@storybook/addon-a11y": "10.1.11",
|
||||
"@storybook/addon-links": "10.1.11",
|
||||
"@storybook/addon-onboarding": "10.1.11",
|
||||
"@storybook/react-vite": "10.1.11",
|
||||
"@typescript-eslint/eslint-plugin": "8.53.0",
|
||||
"@tailwindcss/vite": "4.1.18",
|
||||
"@typescript-eslint/parser": "8.53.0",
|
||||
"@vitejs/plugin-react": "5.1.2",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-storybook": "10.1.11",
|
||||
"@chromatic-com/storybook": "3.2.2",
|
||||
"@formbricks/config-typescript": "workspace:*",
|
||||
"@storybook/addon-a11y": "8.4.7",
|
||||
"@storybook/addon-essentials": "8.4.7",
|
||||
"@storybook/addon-interactions": "8.4.7",
|
||||
"@storybook/addon-links": "8.4.7",
|
||||
"@storybook/addon-onboarding": "8.4.7",
|
||||
"@storybook/blocks": "8.4.7",
|
||||
"@storybook/react": "8.4.7",
|
||||
"@storybook/react-vite": "8.4.7",
|
||||
"@storybook/test": "8.4.7",
|
||||
"@typescript-eslint/eslint-plugin": "8.18.0",
|
||||
"@typescript-eslint/parser": "8.18.0",
|
||||
"@vitejs/plugin-react": "4.3.4",
|
||||
"esbuild": "0.25.0",
|
||||
"eslint-plugin-storybook": "0.11.1",
|
||||
"prop-types": "15.8.1",
|
||||
"storybook": "10.1.11",
|
||||
"vite": "7.3.1",
|
||||
"@storybook/addon-docs": "10.1.11"
|
||||
"storybook": "8.4.7",
|
||||
"tsup": "8.3.5",
|
||||
"vite": "6.0.9"
|
||||
}
|
||||
}
|
||||
|
||||
6
apps/storybook/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Meta } from "@storybook/addon-docs/blocks";
|
||||
import { Meta } from "@storybook/blocks";
|
||||
|
||||
import Accessibility from "./assets/accessibility.png";
|
||||
import AddonLibrary from "./assets/addon-library.png";
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
import surveyUi from "../../packages/survey-ui/tailwind.config";
|
||||
import base from "../web/tailwind.config";
|
||||
|
||||
export default {
|
||||
content: [
|
||||
"./index.html",
|
||||
"./src/**/*.{js,ts,jsx,tsx}",
|
||||
"../../packages/survey-ui/src/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
...surveyUi.theme?.extend,
|
||||
},
|
||||
},
|
||||
...base,
|
||||
content: ["./index.html", "./src/**/*.{js,ts,jsx,tsx}", "../web/modules/ui/**/*.{js,ts,jsx,tsx}"],
|
||||
};
|
||||
|
||||