feat: storage package with s3 client (#6449)

This commit is contained in:
Anshuman Pandey
2025-08-27 07:21:41 +05:30
committed by GitHub
25 changed files with 6362 additions and 26 deletions

View File

@@ -7,6 +7,7 @@ description: >
globs: []
alwaysApply: agent-requested
---
# Formbricks Database Schema Reference
This rule provides a reference to the Formbricks database structure. For the most up-to-date and complete schema definitions, please refer to the schema.prisma file directly.
@@ -16,6 +17,7 @@ This rule provides a reference to the Formbricks database structure. For the mos
Formbricks uses PostgreSQL with Prisma ORM. The schema is designed for multi-tenancy with strong data isolation between organizations.
### Core Hierarchy
```
Organization
└── Project
@@ -29,6 +31,7 @@ Organization
## Schema Reference
For the complete and up-to-date database schema, please refer to:
- Main schema: `packages/database/schema.prisma`
- JSON type definitions: `packages/database/json-types.ts`
@@ -37,17 +40,22 @@ The schema.prisma file contains all model definitions, relationships, enums, and
## Data Access Patterns
### Multi-tenancy
- All data is scoped by Organization
- Environment-level isolation for surveys and contacts
- Project-level grouping for related surveys
### Soft Deletion
Some models use soft deletion patterns:
- Check `isActive` fields where present
- Use proper filtering in queries
### Cascading Deletes
Configured cascade relationships:
- Organization deletion cascades to all child entities
- Survey deletion removes responses, displays, triggers
- Contact deletion removes attributes and responses
@@ -55,6 +63,7 @@ Configured cascade relationships:
## Common Query Patterns
### Survey with Responses
```typescript
// Include response count and latest responses
const survey = await prisma.survey.findUnique({
@@ -62,40 +71,40 @@ const survey = await prisma.survey.findUnique({
include: {
responses: {
take: 10,
orderBy: { createdAt: 'desc' }
orderBy: { createdAt: "desc" },
},
_count: {
select: { responses: true }
}
}
select: { responses: true },
},
},
});
```
### Environment Scoping
```typescript
// Always scope by environment
const surveys = await prisma.survey.findMany({
where: {
environmentId: environmentId,
// Additional filters...
}
},
});
```
### Contact with Attributes
```typescript
const contact = await prisma.contact.findUnique({
where: { id: contactId },
include: {
attributes: {
include: {
attributeKey: true
}
}
}
attributeKey: true,
},
},
},
});
```
This schema supports Formbricks' core functionality: multi-tenant survey management, user targeting, response collection, and analysis, all while maintaining strict data isolation and security.

View File

@@ -747,6 +747,7 @@
"api_key_label": "API-Schlüssel Label",
"api_key_security_warning": "Aus Sicherheitsgründen wird der API-Schlüssel nur einmal nach der Erstellung angezeigt. Bitte kopiere ihn sofort an einen sicheren Ort.",
"api_key_updated": "API-Schlüssel aktualisiert",
"delete_permission": "Berechtigung löschen",
"duplicate_access": "Doppelter Projektzugriff nicht erlaubt",
"no_api_keys_yet": "Du hast noch keine API-Schlüssel",
"no_env_permissions_found": "Keine Umgebungsberechtigungen gefunden",

View File

@@ -747,6 +747,7 @@
"api_key_label": "API Key Label",
"api_key_security_warning": "For security reasons, the API key will only be shown once after creation. Please copy it to your destination right away.",
"api_key_updated": "API Key updated",
"delete_permission": "Delete permission",
"duplicate_access": "Duplicate project access not allowed",
"no_api_keys_yet": "You don't have any API keys yet",
"no_env_permissions_found": "No environment permissions found",

View File

@@ -747,6 +747,7 @@
"api_key_label": "Étiquette de clé API",
"api_key_security_warning": "Pour des raisons de sécurité, la clé API ne sera affichée qu'une seule fois après sa création. Veuillez la copier immédiatement à votre destination.",
"api_key_updated": "Clé API mise à jour",
"delete_permission": "Supprimer une permission",
"duplicate_access": "L'accès en double au projet n'est pas autorisé",
"no_api_keys_yet": "Vous n'avez pas encore de clés API.",
"no_env_permissions_found": "Aucune autorisation d'environnement trouvée",

2865
apps/web/locales/ja-JP.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -747,6 +747,7 @@
"api_key_label": "Rótulo da Chave API",
"api_key_security_warning": "Por motivos de segurança, a chave da API será mostrada apenas uma vez após a criação. Por favor, copie-a para o seu destino imediatamente.",
"api_key_updated": "Chave de API atualizada",
"delete_permission": "Remover permissão",
"duplicate_access": "Acesso duplicado ao projeto não permitido",
"no_api_keys_yet": "Você ainda não tem nenhuma chave de API",
"no_env_permissions_found": "Nenhuma permissão de ambiente encontrada",

View File

@@ -747,6 +747,7 @@
"api_key_label": "Etiqueta da Chave API",
"api_key_security_warning": "Por razões de segurança, a chave API será mostrada apenas uma vez após a criação. Por favor, copie-a para o seu destino imediatamente.",
"api_key_updated": "Chave API atualizada",
"delete_permission": "Eliminar permissão",
"duplicate_access": "Acesso duplicado ao projeto não permitido",
"no_api_keys_yet": "Ainda não tem nenhuma chave API",
"no_env_permissions_found": "Nenhuma permissão de ambiente encontrada",

View File

@@ -747,6 +747,7 @@
"api_key_label": "Etichetă Cheie API",
"api_key_security_warning": "Din motive de securitate, cheia API va fi afișată o singură dată după creare. Vă rugăm să o copiați imediat la destinație.",
"api_key_updated": "Cheie API actualizată",
"delete_permission": "Șterge permisiunea",
"duplicate_access": "Accesul dublu la proiect nu este permis",
"no_api_keys_yet": "Nu aveți încă chei API",
"no_env_permissions_found": "Nu s-au găsit permisiuni pentru mediu",

View File

@@ -747,6 +747,7 @@
"api_key_label": "API 金鑰標籤",
"api_key_security_warning": "為安全起見API 金鑰僅在建立後顯示一次。請立即將其複製到您的目的地。",
"api_key_updated": "API 金鑰已更新",
"delete_permission": "刪除 權限",
"duplicate_access": "不允許重複的 project 存取",
"no_api_keys_yet": "您還沒有任何 API 金鑰",
"no_env_permissions_found": "找不到環境權限",

View File

@@ -0,0 +1,284 @@
# Storage Package Rules for Formbricks
## Package Purpose & Design Philosophy
The `@formbricks/storage` package provides a **type-safe, environment-agnostic S3 storage abstraction** for Formbricks. It's designed as a standalone library that can work with any S3-compatible storage provider (AWS S3, MinIO, LocalStack, etc.).
### Key Design Decisions
1. **Result Type Pattern**: All operations return `Result<T, StorageError>` instead of throwing exceptions, enabling explicit error handling
2. **Environment-based Configuration**: Zero hardcoded values - all configuration comes from environment variables
3. **Graceful Degradation**: When S3 is unavailable, the package fails gracefully without crashing the application
4. **Minimal Dependencies**: Only includes necessary AWS SDK packages, avoiding the bloated umbrella package
5. **Internal Implementation Hiding**: Only exports the public API, keeping client creation and constants internal
## Core Use Cases
### File Upload Flow
```typescript
// Generate presigned URL for secure client-side uploads
const uploadResult = await getSignedUploadUrl(
"user-avatar.jpg",
"image/jpeg",
"users/123/avatars",
5 * 1024 * 1024 // 5MB limit
);
if (uploadResult.ok) {
// Client uploads directly to S3 using signed URL
const { signedUrl, presignedFields } = uploadResult.data;
}
```
### File Download Flow
```typescript
// Generate temporary download links for private files
const downloadResult = await getSignedDownloadUrl("users/123/avatars/user-avatar.jpg");
if (downloadResult.ok) {
// Redirect user to temporary download URL (expires in 30 minutes)
return redirect(downloadResult.data);
}
```
### Cleanup Operations
```typescript
// Single file deletion
await deleteFile("users/123/temp/upload.pdf");
// Bulk cleanup (handles pagination automatically)
await deleteFilesByPrefix("surveys/456/responses/"); // Deletes all response files
```
## Package Architecture
### Module Responsibilities
- **`service.ts`**: Core business logic - the four main operations
- **`client.ts`**: S3 client factory with environment validation
- **`constants.ts`**: Environment variable exports (internal use only)
- **`types/error.ts`**: Result type system and error definitions
- **`index.ts`**: Public API exports (consumers only see this)
### Error Handling Strategy
```typescript
// All functions use consistent error types (see types/error.ts)
type StorageError = {
code: ErrorCode; // e.g., ErrorCode.S3ClientError, ErrorCode.S3CredentialsError
};
// Consumers handle errors explicitly
const result = await deleteFilesByPrefix("path/");
if (!result.ok) {
switch (result.error.code) {
case ErrorCode.S3CredentialsError:
// Handle missing/invalid credentials
case ErrorCode.FileNotFoundError:
// Handle missing files
default:
// Handle unexpected errors
}
}
```
## Environment Configuration
### Required Variables
```bash
S3_ACCESS_KEY=your-access-key
S3_SECRET_KEY=your-secret-key
S3_REGION=us-east-1
S3_BUCKET_NAME=formbricks-storage
```
### Optional Variables (for non-AWS providers)
```bash
S3_ENDPOINT_URL=http://localhost:9000 # MinIO/LocalStack
S3_FORCE_PATH_STYLE=1 # Required for MinIO
```
### Configuration Validation
- Validation happens at **client creation time**, not at startup
- Missing credentials result in `s3_credentials_error`
- Invalid credentials are detected during first operation
## Bulk Operations Design
### Why Pagination + Batching?
S3 has two key limitations:
1. **ListObjects** returns max 1000 objects per request → Use pagination
2. **DeleteObjects** accepts max 1000 objects per request → Use batching
### Implementation Pattern
```typescript
// 1. Paginate through all objects with prefix
const paginator = paginateListObjectsV2(client, { Bucket, Prefix });
for await (const page of paginator) {
// Collect all keys
}
// 2. Batch deletions in groups of 1000
for (let i = 0; i < keys.length; i += 1000) {
const batch = keys.slice(i, i + 1000);
await s3Client.send(new DeleteObjectsCommand({ Delete: { Objects: batch } }));
}
// 3. Handle partial failures gracefully
// Log errors but don't fail the entire operation
```
## Integration Patterns
### In Formbricks Web App
```typescript
// Survey file cleanup when survey is deleted
await deleteFilesByPrefix(`surveys/${surveyId}/`);
// Response file cleanup when response is deleted
await deleteFilesByPrefix(`surveys/${surveyId}/responses/${responseId}/`);
// User avatar upload
const uploadUrl = await getSignedUploadUrl(file.name, file.type, `users/${userId}/avatars`, maxAvatarSize);
```
### Testing Strategy
- **Mock the entire `@aws-sdk/client-s3` module** - don't try to mock individual operations
- **Use `paginateListObjectsV2` mocks** with async generators for bulk operations
- **Test error scenarios** - missing credentials, network failures, partial deletions
- **Mock environment variables** consistently across tests
## Performance Considerations
### Presigned URL Expiration
- **Upload URLs**: 2 minutes (short for security)
- **Download URLs**: 30 minutes (balance between security and UX)
### Bulk Operation Optimization
- **Concurrent batch processing**: Delete batches in parallel using `Promise.all()`
- **Memory efficient pagination**: Process one page at a time, don't load all keys into memory
- **Partial failure handling**: Continue processing even if some batches fail
### Client Reuse
- **Single client instance** created at module level
- **Avoid recreating clients** for each operation
- **Fail fast** if client creation fails due to missing credentials
## Common Pitfalls & Solutions
### ❌ Don't expose internal details
```typescript
// Wrong - exposes implementation
export { S3_BUCKET_NAME, createS3Client } from "./internal";
```
### ✅ Keep implementation internal
```typescript
// Correct - only expose business operations
export { deleteFile, getSignedUploadUrl } from "./service";
```
### ❌ Don't use generic error handling
```typescript
// Wrong - loses error context
catch (error) {
throw new Error("Something went wrong");
}
```
### ✅ Use specific error types
```typescript
// Correct - categorize errors appropriately
catch (error) {
logger.error({ error }, "S3 operation failed");
return err({ code: ErrorCode.S3ClientError });
}
```
### ❌ Don't hardcode configuration
```typescript
// Wrong - not environment-agnostic
const s3Client = new S3Client({
region: "us-east-1",
endpoint: "https://s3.amazonaws.com",
});
```
### ✅ Use environment variables
```typescript
// Correct - works with any S3-compatible provider
const s3Client = new S3Client({
region: S3_REGION,
endpoint: S3_ENDPOINT_URL,
forcePathStyle: S3_FORCE_PATH_STYLE,
});
```
## Dependencies & Versioning
### AWS SDK Strategy
- **Use specific packages** (`@aws-sdk/client-s3`) not umbrella package (`aws-sdk`)
- **Pin exact versions** to avoid breaking changes
- **External dependencies**: All AWS SDK packages are externalized in build
### Package Structure
```json
{
"exports": {
"import": "./dist/index.js",
"require": "./dist/index.cjs"
},
"main": "./dist/index.js",
"types": "./dist/index.d.ts"
}
```
## Function Reference
### `getSignedUploadUrl(fileName, contentType, filePath, maxSize?)`
**Purpose**: Generate presigned POST URL for secure client-side uploads
**Returns**: `Result<{ signedUrl: string; presignedFields: Record<string, string> }, StorageError>`
**Use Case**: File uploads from browser without exposing S3 credentials
### `getSignedDownloadUrl(fileKey)`
**Purpose**: Generate temporary download URL for private files
**Returns**: `Result<string, StorageError>` (temporary URL valid for 30 minutes)
**Use Case**: Serving private files without making S3 bucket public
### `deleteFile(fileKey)`
**Purpose**: Delete a single file from S3
**Returns**: `Result<void, StorageError>`
**Use Case**: Remove uploaded files when user deletes content
### `deleteFilesByPrefix(prefix)`
**Purpose**: Bulk delete all files matching a prefix pattern
**Returns**: `Result<{ deletedCount: number; partialFailures?: string[] }, StorageError>`
**Use Case**: Cleanup entire folders when surveys/users are deleted
Remember: This package is designed to be **infrastructure-agnostic** and **error-resilient**. It should work seamlessly whether you're using AWS S3, MinIO for local development, or any other S3-compatible storage provider.

View File

@@ -0,0 +1,7 @@
module.exports = {
extends: ["@formbricks/eslint-config/library.js"],
parserOptions: {
project: "tsconfig.json",
tsconfigRootDir: __dirname,
},
};

View File

@@ -0,0 +1,52 @@
{
"name": "@formbricks/storage",
"private": true,
"type": "module",
"version": "0.1.0",
"homepage": "https://formbricks.com",
"description": "Storage Controller for Formbricks",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/formbricks/formbricks"
},
"keywords": [
"Formbricks",
"storage",
"storage controller"
],
"files": [
"dist"
],
"exports": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.cjs"
},
"scripts": {
"clean": "rimraf .turbo node_modules coverage dist",
"lint": "eslint . --ext .ts,.js",
"lint:fix": "eslint . --ext .ts,.js --fix",
"lint:report": "eslint . --format json --output-file ../../lint-results/app-store.json",
"build": "tsc && vite build",
"test": "vitest run",
"test:coverage": "vitest run --coverage",
"go": "vite build --watch --mode dev"
},
"author": "Formbricks <hola@formbricks.com>",
"dependencies": {
"@formbricks/logger": "workspace:*",
"@aws-sdk/client-s3": "3.864.0",
"@aws-sdk/s3-presigned-post": "3.864.0",
"@aws-sdk/s3-request-presigner": "3.864.0"
},
"devDependencies": {
"@formbricks/config-typescript": "workspace:*",
"@formbricks/eslint-config": "workspace:*",
"vite": "6.3.5",
"vite-plugin-dts": "4.5.3",
"vitest": "3.1.3",
"@vitest/coverage-v8": "3.1.3"
}
}

View File

@@ -0,0 +1,250 @@
import { S3Client, type S3ClientConfig } from "@aws-sdk/client-s3";
import { beforeEach, describe, expect, test, vi } from "vitest";
// Mock the AWS SDK S3Client
vi.mock("@aws-sdk/client-s3", () => ({
S3Client: vi.fn().mockImplementation((config: S3ClientConfig) => ({
config,
send: vi.fn(),
})),
}));
const mockS3Client = vi.mocked(S3Client);
describe("client.ts", () => {
beforeEach(() => {
vi.clearAllMocks();
vi.resetModules();
});
const mockConstants = {
S3_ACCESS_KEY: "test-access-key",
S3_SECRET_KEY: "test-secret-key",
S3_REGION: "us-east-1",
S3_BUCKET_NAME: "test-bucket",
S3_ENDPOINT_URL: undefined,
S3_FORCE_PATH_STYLE: false,
};
describe("createS3ClientFromEnv", () => {
test("should create S3 client with valid credentials", async () => {
// Mock constants with valid credentials
vi.doMock("./constants", () => mockConstants);
// Dynamic import to get fresh module with mocked constants
const { createS3ClientFromEnv } = await import("./client");
const client = createS3ClientFromEnv();
expect(mockS3Client).toHaveBeenCalledWith({
credentials: {
accessKeyId: mockConstants.S3_ACCESS_KEY,
secretAccessKey: mockConstants.S3_SECRET_KEY,
},
region: mockConstants.S3_REGION,
endpoint: mockConstants.S3_ENDPOINT_URL,
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
});
expect(client.ok).toBe(true);
if (client.ok) {
expect(client.data).toBeDefined();
}
});
test("should create S3 client with endpoint URL", async () => {
// Mock constants with endpoint URL
vi.doMock("./constants", () => ({
...mockConstants,
S3_ENDPOINT_URL: "https://custom-endpoint.com",
S3_FORCE_PATH_STYLE: true,
}));
const { createS3ClientFromEnv } = await import("./client");
const client = createS3ClientFromEnv();
expect(mockS3Client).toHaveBeenCalledWith({
credentials: {
accessKeyId: mockConstants.S3_ACCESS_KEY,
secretAccessKey: mockConstants.S3_SECRET_KEY,
},
region: mockConstants.S3_REGION,
endpoint: "https://custom-endpoint.com",
forcePathStyle: true,
});
expect(client.ok).toBe(true);
if (client.ok) {
expect(client.data).toBeDefined();
}
});
test("should return error when access key is missing", async () => {
// Mock constants with missing access key
vi.doMock("./constants", () => ({
...mockConstants,
S3_ACCESS_KEY: undefined,
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe("s3_credentials_error");
}
});
test("should return error when secret key is missing", async () => {
// Mock constants with missing secret key
vi.doMock("./constants", () => ({
...mockConstants,
S3_SECRET_KEY: undefined,
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe("s3_credentials_error");
}
});
test("should return error when both credentials are missing", async () => {
// Mock constants with no credentials
vi.doMock("./constants", () => ({
...mockConstants,
S3_ACCESS_KEY: undefined,
S3_SECRET_KEY: undefined,
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe("s3_credentials_error");
}
});
test("should return error when credentials are empty strings", async () => {
// Mock constants with empty string credentials
vi.doMock("./constants", () => ({
...mockConstants,
S3_ACCESS_KEY: "",
S3_SECRET_KEY: "",
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe("s3_credentials_error");
}
});
test("should return error when mixed empty and undefined credentials", async () => {
// Mock constants with mixed empty and undefined
vi.doMock("./constants", () => ({
...mockConstants,
S3_ACCESS_KEY: "",
S3_SECRET_KEY: undefined,
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.error.code).toBe("s3_credentials_error");
}
});
test("should handle empty endpoint URL", async () => {
// Mock constants with empty endpoint URL
vi.doMock("./constants", () => ({
...mockConstants,
S3_ENDPOINT_URL: "",
}));
const { createS3ClientFromEnv } = await import("./client");
const result = createS3ClientFromEnv();
expect(mockS3Client).toHaveBeenCalledWith({
credentials: {
accessKeyId: mockConstants.S3_ACCESS_KEY,
secretAccessKey: mockConstants.S3_SECRET_KEY,
},
region: mockConstants.S3_REGION,
endpoint: "",
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
});
expect(result.ok).toBe(true);
if (result.ok) {
expect(result.data).toBeDefined();
}
});
});
describe("createS3Client", () => {
test("should return provided S3 client when passed", async () => {
// Use a fresh import to avoid module cache issues
const { createS3Client } = await import("./client");
const mockClient = new S3Client({});
const result = createS3Client(mockClient);
expect(result).toBe(mockClient);
});
test("should create new client from environment when no client provided", async () => {
// Mock constants for this test
vi.doMock("./constants", () => ({
...mockConstants,
}));
const { createS3Client } = await import("./client");
const result = createS3Client();
expect(mockS3Client).toHaveBeenCalledWith({
credentials: {
accessKeyId: mockConstants.S3_ACCESS_KEY,
secretAccessKey: mockConstants.S3_SECRET_KEY,
},
region: mockConstants.S3_REGION,
endpoint: mockConstants.S3_ENDPOINT_URL,
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
});
expect(result).toBeDefined();
});
test("should return undefined when creating from env fails and no client provided", async () => {
// Mock constants with missing credentials
vi.doMock("./constants", () => ({
...mockConstants,
S3_ACCESS_KEY: undefined,
S3_SECRET_KEY: undefined,
}));
const { createS3Client } = await import("./client");
const result = createS3Client();
expect(result).toBeUndefined();
});
});
});

View File

@@ -0,0 +1,56 @@
import { S3Client } from "@aws-sdk/client-s3";
import { logger } from "@formbricks/logger";
import { ErrorCode, type Result, type StorageError, err, ok } from "../types/error";
import {
S3_ACCESS_KEY,
S3_BUCKET_NAME,
S3_ENDPOINT_URL,
S3_FORCE_PATH_STYLE,
S3_REGION,
S3_SECRET_KEY,
} from "./constants";
/**
* Create an S3 client from environment variables
* @returns A Result containing the S3 client or an error: S3CredentialsError | UnknownError
*/
export const createS3ClientFromEnv = (): Result<S3Client, StorageError> => {
try {
if (!S3_ACCESS_KEY || !S3_SECRET_KEY || !S3_BUCKET_NAME || !S3_REGION) {
logger.error("S3 Client: S3 credentials are not set");
return err({
code: ErrorCode.S3CredentialsError,
});
}
const s3ClientInstance = new S3Client({
credentials: { accessKeyId: S3_ACCESS_KEY, secretAccessKey: S3_SECRET_KEY },
region: S3_REGION,
endpoint: S3_ENDPOINT_URL,
forcePathStyle: S3_FORCE_PATH_STYLE,
});
return ok(s3ClientInstance);
} catch (error) {
logger.error({ error }, "Error creating S3 client from environment variables");
return err({
code: ErrorCode.Unknown,
});
}
};
/**
* Create an S3 client from an existing client or from environment variables
* @param s3Client - An existing S3 client
* @returns An S3 client or undefined if the S3 credentials are not set in the environment variables or if there is an error creating the client
*/
export const createS3Client = (s3Client?: S3Client): S3Client | undefined => {
if (!s3Client) {
const result = createS3ClientFromEnv();
if (result.ok) return result.data;
return undefined;
}
return s3Client;
};

View File

@@ -0,0 +1,126 @@
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
describe("constants.ts", () => {
const originalEnv = process.env;
beforeEach(() => {
vi.resetModules();
// Reset process.env to a clean state
process.env = { ...originalEnv };
});
afterEach(() => {
// Restore original environment
process.env = originalEnv;
});
describe("environment variable exports", () => {
test("should export S3_ACCESS_KEY from environment", async () => {
process.env.S3_ACCESS_KEY = "test-access-key-123";
const { S3_ACCESS_KEY } = await import("./constants");
expect(S3_ACCESS_KEY).toBe("test-access-key-123");
});
test("should export undefined when S3_ACCESS_KEY is not set", async () => {
delete process.env.S3_ACCESS_KEY;
const { S3_ACCESS_KEY } = await import("./constants");
expect(S3_ACCESS_KEY).toBeUndefined();
});
test("should export S3_SECRET_KEY from environment", async () => {
process.env.S3_SECRET_KEY = "test-secret-key-456";
const { S3_SECRET_KEY } = await import("./constants");
expect(S3_SECRET_KEY).toBe("test-secret-key-456");
});
test("should export undefined when S3_SECRET_KEY is not set", async () => {
delete process.env.S3_SECRET_KEY;
const { S3_SECRET_KEY } = await import("./constants");
expect(S3_SECRET_KEY).toBeUndefined();
});
test("should export S3_REGION from environment", async () => {
process.env.S3_REGION = "eu-west-1";
const { S3_REGION } = await import("./constants");
expect(S3_REGION).toBe("eu-west-1");
});
test("should export undefined when S3_REGION is not set", async () => {
delete process.env.S3_REGION;
const { S3_REGION } = await import("./constants");
expect(S3_REGION).toBeUndefined();
});
test("should export S3_ENDPOINT_URL from environment", async () => {
process.env.S3_ENDPOINT_URL = "https://custom-s3-endpoint.com";
const { S3_ENDPOINT_URL } = await import("./constants");
expect(S3_ENDPOINT_URL).toBe("https://custom-s3-endpoint.com");
});
test("should export undefined when S3_ENDPOINT_URL is not set", async () => {
delete process.env.S3_ENDPOINT_URL;
const { S3_ENDPOINT_URL } = await import("./constants");
expect(S3_ENDPOINT_URL).toBeUndefined();
});
test("should export S3_BUCKET_NAME from environment", async () => {
process.env.S3_BUCKET_NAME = "my-storage-bucket";
const { S3_BUCKET_NAME } = await import("./constants");
expect(S3_BUCKET_NAME).toBe("my-storage-bucket");
});
test("should export undefined when S3_BUCKET_NAME is not set", async () => {
delete process.env.S3_BUCKET_NAME;
const { S3_BUCKET_NAME } = await import("./constants");
expect(S3_BUCKET_NAME).toBeUndefined();
});
});
describe("boolean conversion constants", () => {
describe("S3_FORCE_PATH_STYLE", () => {
test("should be true when S3_FORCE_PATH_STYLE is '1'", async () => {
process.env.S3_FORCE_PATH_STYLE = "1";
const { S3_FORCE_PATH_STYLE } = await import("./constants");
expect(S3_FORCE_PATH_STYLE).toBe(true);
});
test("should be false when S3_FORCE_PATH_STYLE is '0'", async () => {
process.env.S3_FORCE_PATH_STYLE = "0";
const { S3_FORCE_PATH_STYLE } = await import("./constants");
expect(S3_FORCE_PATH_STYLE).toBe(false);
});
test("should be false when S3_FORCE_PATH_STYLE is undefined", async () => {
delete process.env.S3_FORCE_PATH_STYLE;
const { S3_FORCE_PATH_STYLE } = await import("./constants");
expect(S3_FORCE_PATH_STYLE).toBe(false);
});
});
});
});

View File

@@ -0,0 +1,6 @@
export const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;
export const S3_SECRET_KEY = process.env.S3_SECRET_KEY;
export const S3_REGION = process.env.S3_REGION;
export const S3_ENDPOINT_URL = process.env.S3_ENDPOINT_URL;
export const S3_FORCE_PATH_STYLE = process.env.S3_FORCE_PATH_STYLE === "1";
export const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;

View File

@@ -0,0 +1 @@
export { deleteFile, getSignedDownloadUrl, getSignedUploadUrl, deleteFilesByPrefix } from "./service";

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,274 @@
import {
DeleteObjectCommand,
DeleteObjectsCommand,
type DeleteObjectsCommandOutput,
GetObjectCommand,
HeadObjectCommand,
paginateListObjectsV2,
} from "@aws-sdk/client-s3";
import {
type PresignedPost,
type PresignedPostOptions,
createPresignedPost,
} from "@aws-sdk/s3-presigned-post";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { logger } from "@formbricks/logger";
import { ErrorCode, type Result, type StorageError, err, ok } from "../types/error";
import { createS3Client } from "./client";
import { S3_BUCKET_NAME } from "./constants";
const s3Client = createS3Client();
/**
* Get a signed URL for uploading a file to S3
* @param fileName - The name of the file to upload
* @param contentType - The content type of the file
* @param filePath - The path to the file in S3
* @param maxSize - The maximum size of the file to upload or undefined if no limit is desired
* @returns A Result containing the signed URL and presigned fields or an error: StorageError
*/
export const getSignedUploadUrl = async (
fileName: string,
contentType: string,
filePath: string,
maxSize: number = 1024 * 1024 * 10 // 10MB
): Promise<
Result<
{
signedUrl: string;
presignedFields: PresignedPost["fields"];
},
StorageError
>
> => {
try {
if (!s3Client) {
logger.error("Failed to get signed upload URL: S3 client is not set");
return err({
code: ErrorCode.S3ClientError,
});
}
const postConditions: PresignedPostOptions["Conditions"] = maxSize
? [["content-length-range", 0, maxSize]]
: undefined;
if (!S3_BUCKET_NAME) {
logger.error("Failed to get signed upload URL: S3 bucket name is not set");
return err({
code: ErrorCode.S3CredentialsError,
});
}
const { fields, url } = await createPresignedPost(s3Client, {
Expires: 2 * 60, // 2 minutes
Bucket: S3_BUCKET_NAME,
Key: `${filePath}/${fileName}`,
Fields: {
"Content-Type": contentType,
"Content-Encoding": "base64",
},
Conditions: postConditions,
});
return ok({
signedUrl: url,
presignedFields: fields,
});
} catch (error) {
logger.error({ error }, "Failed to get signed upload URL");
return err({
code: ErrorCode.Unknown,
});
}
};
/**
* Get a signed URL for a file in S3
* @param fileKey - The key of the file in S3
* @returns A Result containing the signed URL or an error: StorageError
*/
export const getSignedDownloadUrl = async (fileKey: string): Promise<Result<string, StorageError>> => {
try {
if (!s3Client) {
return err({
code: ErrorCode.S3ClientError,
});
}
if (!S3_BUCKET_NAME) {
return err({
code: ErrorCode.S3CredentialsError,
});
}
// Check if file exists before generating signed URL
const headObjectCommand = new HeadObjectCommand({
Bucket: S3_BUCKET_NAME,
Key: fileKey,
});
try {
await s3Client.send(headObjectCommand);
} catch (error: unknown) {
logger.error({ error }, "Failed to check if file exists");
if (
(error as Error).name === "NotFound" ||
(error as { $metadata?: { httpStatusCode?: number } }).$metadata?.httpStatusCode === 404
) {
return err({
code: ErrorCode.FileNotFoundError,
});
}
logger.warn({ error, fileKey }, "HeadObject check failed; proceeding to sign download URL");
}
const getObjectCommand = new GetObjectCommand({
Bucket: S3_BUCKET_NAME,
Key: fileKey,
});
return ok(await getSignedUrl(s3Client, getObjectCommand, { expiresIn: 60 * 30 }));
} catch (error) {
logger.error({ error }, "Failed to get signed download URL");
return err({
code: ErrorCode.Unknown,
});
}
};
/**
* Delete a file from S3
* @param fileKey - The key of the file in S3 (e.g. "surveys/123/responses/456/file.pdf")
* @returns A Result containing the void or an error: StorageError
*/
export const deleteFile = async (fileKey: string): Promise<Result<void, StorageError>> => {
try {
if (!s3Client) {
return err({
code: ErrorCode.S3ClientError,
});
}
if (!S3_BUCKET_NAME) {
return err({
code: ErrorCode.S3CredentialsError,
});
}
const deleteObjectCommand = new DeleteObjectCommand({
Bucket: S3_BUCKET_NAME,
Key: fileKey,
});
await s3Client.send(deleteObjectCommand);
return ok(undefined);
} catch (error) {
logger.error({ error }, "Failed to delete file");
return err({
code: ErrorCode.Unknown,
});
}
};
export const deleteFilesByPrefix = async (prefix: string): Promise<Result<void, StorageError>> => {
try {
if (!s3Client) {
return err({
code: ErrorCode.S3ClientError,
});
}
if (!S3_BUCKET_NAME) {
return err({
code: ErrorCode.S3CredentialsError,
});
}
const normalizedPrefix = prefix.trim();
if (!normalizedPrefix || normalizedPrefix === "/") {
logger.error({ prefix }, "Refusing to delete files with an empty or root prefix");
return err({
code: ErrorCode.InvalidInput,
});
}
const keys: { Key: string }[] = [];
const paginator = paginateListObjectsV2(
{ client: s3Client },
{
Bucket: S3_BUCKET_NAME,
Prefix: normalizedPrefix,
}
);
for await (const page of paginator) {
const pageKeys = page.Contents?.flatMap((obj) => (obj.Key ? [{ Key: obj.Key }] : [])) ?? [];
keys.push(...pageKeys);
}
if (keys.length === 0) {
return ok(undefined);
}
const deletionPromises: Promise<DeleteObjectsCommandOutput>[] = [];
for (let i = 0; i < keys.length; i += 1000) {
const batch = keys.slice(i, i + 1000);
const deleteObjectsCommand = new DeleteObjectsCommand({
Bucket: S3_BUCKET_NAME,
Delete: {
Objects: batch,
},
});
deletionPromises.push(s3Client.send(deleteObjectsCommand));
}
const results = await Promise.all(deletionPromises);
// Check for partial failures and log them
let totalErrors = 0;
let totalDeleted = 0;
for (const result of results) {
if (result.Deleted) {
totalDeleted += result.Deleted.length;
logger.debug({ count: result.Deleted.length }, "Successfully deleted objects in batch");
}
if (result.Errors && result.Errors.length > 0) {
totalErrors += result.Errors.length;
logger.error(
{
errors: result.Errors.map((e) => ({
key: e.Key,
code: e.Code,
message: e.Message,
})),
},
"Some objects failed to delete"
);
}
}
// Log the issues
if (totalErrors > 0) {
logger.warn({ totalErrors, totalDeleted }, "Bulk delete completed with some failures");
}
return ok(undefined);
} catch (error) {
logger.error({ error }, "Failed to delete files by prefix");
return err({
code: ErrorCode.Unknown,
});
}
};

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"allowImportingTsExtensions": true,
"isolatedModules": true,
"noEmit": true,
"resolveJsonModule": true,
"strict": true
},
"exclude": ["node_modules"],
"extends": "@formbricks/config-typescript/js-library.json",
"include": ["src", "package.json", "types"]
}

View File

@@ -0,0 +1,32 @@
export type Result<T, E = Error> = { ok: true; data: T } | { ok: false; error: E };
export interface ResultError<T> {
ok: false;
error: T;
}
export interface ResultOk<T> {
ok: true;
data: T;
}
export const ok = <T, E>(data: T): Result<T, E> => ({ ok: true, data });
export const okVoid = <E>(): Result<void, E> => ({ ok: true, data: undefined });
export const err = <E = Error>(error: E): ResultError<E> => ({
ok: false,
error,
});
export enum ErrorCode {
Unknown = "unknown",
S3CredentialsError = "s3_credentials_error",
S3ClientError = "s3_client_error",
FileNotFoundError = "file_not_found_error",
InvalidInput = "invalid_input",
}
export interface StorageError {
code: ErrorCode;
}

View File

@@ -0,0 +1,33 @@
/// <reference types="vitest" />
import { resolve } from "path";
import { PluginOption, defineConfig } from "vite";
import dts from "vite-plugin-dts";
export default defineConfig({
build: {
lib: {
entry: resolve(__dirname, "src/index.ts"),
name: "formbricksStorage",
fileName: "index",
formats: ["es", "cjs"],
},
rollupOptions: {
external: [
"@aws-sdk/client-s3",
"@aws-sdk/s3-presigned-post",
"@aws-sdk/s3-request-presigner",
"@formbricks/logger",
],
},
},
test: {
environment: "node",
globals: true,
coverage: {
reporter: ["text", "json", "html", "lcov"],
exclude: ["src/types/**"],
include: ["src/**/*.ts"],
},
},
plugins: [dts({ rollupTypes: true }) as PluginOption],
});

1212
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,16 +2,16 @@ sonar.projectKey=formbricks_formbricks
sonar.organization=formbricks
# Sources
sonar.sources=apps/web,packages/surveys,packages/js-core
sonar.sources=apps/web,packages/surveys,packages/js-core,packages/storage
sonar.exclusions=**/node_modules/**,**/.next/**,**/dist/**,**/build/**,**/*.test.*,**/*.spec.*,**/__mocks__/**
# Tests
sonar.tests=apps/web,packages/surveys,packages/js-core
sonar.tests=apps/web,packages/surveys,packages/js-core,packages/storage
sonar.test.inclusions=**/*.test.*,**/*.spec.*
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info,packages/storage/coverage/lcov.info
# TypeScript configuration
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json,packages/storage/tsconfig.json
# SCM
sonar.scm.provider=git

View File

@@ -42,6 +42,23 @@
"dependsOn": ["@formbricks/database#db:setup"],
"persistent": true
},
"@formbricks/storage#build": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/storage#go": {
"cache": false,
"dependsOn": ["@formbricks/storage#build"],
"persistent": true
},
"@formbricks/storage#lint": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/storage#test": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/storage#test:coverage": {
"dependsOn": ["@formbricks/logger#build"]
},
"@formbricks/surveys#build": {
"dependsOn": ["^build"],
"outputs": ["dist/**"]