mirror of
https://github.com/formbricks/formbricks.git
synced 2025-12-24 15:10:36 -06:00
Compare commits
14 Commits
fix/race-c
...
feat/test-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9db89ecdd | ||
|
|
0155c41593 | ||
|
|
df63f2e5d9 | ||
|
|
7dd174ffea | ||
|
|
7154f6fe74 | ||
|
|
f25f257f24 | ||
|
|
b945900fbf | ||
|
|
f8869e7522 | ||
|
|
886eb8598a | ||
|
|
fe3c8e010f | ||
|
|
a6a76cc3cf | ||
|
|
9e7a4e38cf | ||
|
|
9cff5457d6 | ||
|
|
a362455878 |
@@ -1,6 +1,11 @@
|
||||
---
|
||||
description: It should be used **only when the agent explicitly requests database schema-level, details** to support tasks such as: writing/debugging Prisma queries, designing/reviewing data models, investigating multi-tenancy behavior, creating API endpoints, or understanding data relationships.
|
||||
alwaysApply: false
|
||||
description: >
|
||||
This rule provides comprehensive knowledge about the Formbricks database structure, relationships,
|
||||
and data patterns. It should be used **only when the agent explicitly requests database schema-level
|
||||
details** to support tasks such as: writing/debugging Prisma queries, designing/reviewing data models,
|
||||
investigating multi-tenancy behavior, creating API endpoints, or understanding data relationships.
|
||||
globs: []
|
||||
alwaysApply: agent-requested
|
||||
---
|
||||
|
||||
# Formbricks Database Schema Reference
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
---
|
||||
alwaysApply: true
|
||||
---
|
||||
|
||||
### Formbricks Monorepo Overview
|
||||
|
||||
- **Project**: Formbricks — open‑source survey and experience management platform. Repo: [formbricks/formbricks](https://github.com/formbricks/formbricks)
|
||||
- **Monorepo**: Turborepo + pnpm workspaces. Root configs: [package.json](mdc:package.json), [turbo.json](mdc:turbo.json)
|
||||
- **Core app**: Next.js app in `apps/web` with Prisma, Auth.js, TailwindCSS, Vitest, Playwright. Enterprise modules live in [apps/web/modules/ee](mdc:apps/web/modules/ee)
|
||||
- **Datastores**: PostgreSQL + Redis. Local dev via [docker-compose.dev.yml](mdc:docker-compose.dev.yml); Prisma schema at [packages/database/schema.prisma](mdc:packages/database/schema.prisma)
|
||||
- **Docs & Ops**: Docs in `docs/` (Mintlify), Helm in `helm-chart/`, IaC in `infra/`
|
||||
|
||||
### Apps
|
||||
|
||||
- **apps/web**: Next.js product application (API, UI, SSO, i18n, emails, uploads, integrations)
|
||||
- **apps/storybook**: Storybook for UI components; a11y addon + Vite builder
|
||||
|
||||
### Packages
|
||||
|
||||
- **@formbricks/database** (`packages/database`): Prisma schema, DB scripts, migrations, data layer
|
||||
- **@formbricks/js-core** (`packages/js-core`): Core runtime for web embed / async loader
|
||||
- **@formbricks/surveys** (`packages/surveys`): Embeddable survey rendering and helpers
|
||||
- **@formbricks/logger** (`packages/logger`): Shared logging (pino) + Zod types
|
||||
- **@formbricks/types** (`packages/types`): Shared types (Zod, Prisma clients)
|
||||
- **@formbricks/i18n-utils** (`packages/i18n-utils`): i18n helpers and build output
|
||||
- **@formbricks/eslint-config** (`packages/config-eslint`): Central ESLint config (Next, TS, Vitest, Prettier)
|
||||
- **@formbricks/config-typescript** (`packages/config-typescript`): Central TS config and types
|
||||
- **@formbricks/vite-plugins** (`packages/vite-plugins`): Internal Vite plugins
|
||||
- **packages/android, packages/ios**: Native SDKs (built with platform toolchains)
|
||||
|
||||
### Enterprise‑ready by design
|
||||
|
||||
- **Quality & safety**: Strict TypeScript, repo‑wide ESLint + Prettier, lint‑staged + Husky, CI checks, typed env validation
|
||||
- **Security‑first**: Auth.js, SSO/SAML/OIDC, session controls, rate limiting, Sentry, structured logging
|
||||
|
||||
### Accessible by design
|
||||
|
||||
- **UI foundations**: Radix UI, TailwindCSS, Storybook with `@storybook/addon-a11y`, keyboard and screen‑reader‑friendly components
|
||||
|
||||
### Root pnpm commands
|
||||
|
||||
```bash
|
||||
pnpm clean:all # Clean turbo cache, node_modules, lockfile, coverage, out
|
||||
pnpm clean # Clean turbo cache, node_modules, coverage, out
|
||||
pnpm build # Build all packages/apps (turbo)
|
||||
pnpm build:dev # Dev-optimized builds (where supported)
|
||||
pnpm dev # Run all dev servers in parallel
|
||||
pnpm start # Start built apps/services
|
||||
pnpm go # Start DB (docker compose) and run long-running dev tasks
|
||||
pnpm generate # Run generators (e.g., Prisma, API specs)
|
||||
pnpm lint # Lint all
|
||||
pnpm format # Prettier write across repo
|
||||
pnpm test # Unit tests
|
||||
pnpm test:coverage # Unit tests with coverage
|
||||
pnpm test:e2e # Playwright tests
|
||||
pnpm test-e2e:azure # Playwright tests with Azure config
|
||||
pnpm storybook # Run Storybook
|
||||
pnpm db:up # Start local Postgres/Redis via docker compose
|
||||
pnpm db:down # Stop local DB stack
|
||||
pnpm db:start # Project-level DB setup choreography
|
||||
pnpm db:push # Prisma db push (accept data loss in package script)
|
||||
pnpm db:migrate:dev # Apply dev migrations
|
||||
pnpm db:migrate:deploy # Apply prod migrations
|
||||
pnpm fb-migrate-dev # Create DB migration (database package) and prisma generate
|
||||
pnpm tolgee-pull # Pull translation keys for current branch and format
|
||||
```
|
||||
|
||||
### Essentials for every prompt
|
||||
|
||||
- **Tech stack**: Next.js, React 19, TypeScript, Prisma, Zod, TailwindCSS, Turborepo, Vitest, Playwright
|
||||
- **Environments**: See `.env.example`. Many tasks require DB up and env variables set
|
||||
- **Licensing**: Core under AGPLv3; Enterprise code in `apps/web/modules/ee` (included in Docker, unlocked via Enterprise License Key)
|
||||
|
||||
For deeper details, consult per‑package `package.json` and scripts (e.g., [apps/web/package.json](mdc:apps/web/package.json)).
|
||||
@@ -10,12 +10,8 @@ export const FormWrapper = ({ children }: FormWrapperProps) => {
|
||||
<div className="mx-auto flex flex-1 flex-col justify-center px-4 py-12 sm:px-6 lg:flex-none lg:px-20 xl:px-24">
|
||||
<div className="mx-auto w-full max-w-sm rounded-xl bg-white p-8 shadow-xl lg:w-96">
|
||||
<div className="mb-8 text-center">
|
||||
<Link
|
||||
target="_blank"
|
||||
href="https://formbricks.com?utm_source=ce"
|
||||
rel="noopener noreferrer"
|
||||
aria-label="Formbricks website">
|
||||
<Logo className="mx-auto w-3/4" variant="wordmark" aria-hidden="true" />
|
||||
<Link target="_blank" href="https://formbricks.com?utm_source=ce" rel="noopener noreferrer">
|
||||
<Logo className="mx-auto w-3/4" />
|
||||
</Link>
|
||||
</div>
|
||||
{children}
|
||||
|
||||
@@ -149,10 +149,10 @@ describe("AddApiKeyModal", () => {
|
||||
|
||||
test("handles label input", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack");
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack") as HTMLInputElement;
|
||||
|
||||
await userEvent.type(labelInput, "Test API Key");
|
||||
expect((labelInput as HTMLInputElement).value).toBe("Test API Key");
|
||||
expect(labelInput.value).toBe("Test API Key");
|
||||
});
|
||||
|
||||
test("handles permission changes", async () => {
|
||||
@@ -184,120 +184,21 @@ describe("AddApiKeyModal", () => {
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Verify new permission row is added
|
||||
const deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
const deleteButtons = screen.getAllByRole("button", { name: "" }); // Trash icons
|
||||
expect(deleteButtons).toHaveLength(2);
|
||||
|
||||
// Remove the new permission
|
||||
await userEvent.click(deleteButtons[1]);
|
||||
|
||||
// Check that only the original permission row remains
|
||||
const remainingDeleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(remainingDeleteButtons).toHaveLength(1);
|
||||
});
|
||||
|
||||
test("removes permissions from middle of list without breaking indices", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add first permission
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Add second permission
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Add third permission
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Verify we have 3 permission rows
|
||||
let deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(3);
|
||||
|
||||
// Remove the middle permission (index 1)
|
||||
await userEvent.click(deleteButtons[1]);
|
||||
|
||||
// Verify we now have 2 permission rows
|
||||
deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(2);
|
||||
|
||||
// Try to remove the second remaining permission (this was previously index 2, now index 1)
|
||||
await userEvent.click(deleteButtons[1]);
|
||||
|
||||
// Verify we now have 1 permission row
|
||||
deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(1);
|
||||
|
||||
// Remove the last remaining permission
|
||||
await userEvent.click(deleteButtons[0]);
|
||||
|
||||
// Verify no permission rows remain
|
||||
expect(
|
||||
screen.queryAllByRole("button", { name: "environments.project.api_keys.delete_permission" })
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("can modify permissions after deleting items from list", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add multiple permissions
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton); // First permission
|
||||
await userEvent.click(addButton); // Second permission
|
||||
await userEvent.click(addButton); // Third permission
|
||||
|
||||
// Verify we have 3 permission rows
|
||||
let deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(3);
|
||||
|
||||
// Remove the first permission (index 0)
|
||||
await userEvent.click(deleteButtons[0]);
|
||||
|
||||
// Verify we now have 2 permission rows
|
||||
deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(2);
|
||||
|
||||
// Try to modify the first remaining permission (which was originally index 1, now index 0)
|
||||
const projectDropdowns = screen.getAllByRole("button", { name: /Project 1/i });
|
||||
expect(projectDropdowns.length).toBeGreaterThan(0);
|
||||
|
||||
await userEvent.click(projectDropdowns[0]);
|
||||
|
||||
// Wait for dropdown content and select 'Project 2'
|
||||
const project2Option = await screen.findByRole("menuitem", { name: "Project 2" });
|
||||
await userEvent.click(project2Option);
|
||||
|
||||
// Verify project selection by checking the updated button text
|
||||
const updatedButton = await screen.findByRole("button", { name: "Project 2" });
|
||||
expect(updatedButton).toBeInTheDocument();
|
||||
|
||||
// Add another permission to verify the list is still functional
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Verify we now have 3 permission rows again
|
||||
deleteButtons = await screen.findAllByRole("button", {
|
||||
name: "environments.project.api_keys.delete_permission",
|
||||
});
|
||||
expect(deleteButtons).toHaveLength(3);
|
||||
expect(screen.getAllByRole("button", { name: "" })).toHaveLength(1);
|
||||
});
|
||||
|
||||
test("submits form with correct data", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Fill in label
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack");
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack") as HTMLInputElement;
|
||||
await userEvent.type(labelInput, "Test API Key");
|
||||
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
@@ -377,7 +278,7 @@ describe("AddApiKeyModal", () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Type something into the label
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack");
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack") as HTMLInputElement;
|
||||
await userEvent.type(labelInput, "Test API Key");
|
||||
|
||||
// Click the cancel button
|
||||
@@ -386,219 +287,6 @@ describe("AddApiKeyModal", () => {
|
||||
|
||||
// Verify modal is closed and form is reset
|
||||
expect(mockSetOpen).toHaveBeenCalledWith(false);
|
||||
expect((labelInput as HTMLInputElement).value).toBe("");
|
||||
});
|
||||
|
||||
test("updates permission field (non-environmentId)", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add a permission first
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Click on permission level dropdown (third dropdown in the row)
|
||||
const permissionDropdowns = screen.getAllByRole("button", { name: /read/i });
|
||||
await userEvent.click(permissionDropdowns[0]);
|
||||
|
||||
// Select 'write' permission
|
||||
const writeOption = await screen.findByRole("menuitem", { name: "write" });
|
||||
await userEvent.click(writeOption);
|
||||
|
||||
// Verify permission selection by checking the updated button text
|
||||
const updatedButton = await screen.findByRole("button", { name: "write" });
|
||||
expect(updatedButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("updates environmentId with valid environment", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add a permission first
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Click on environment dropdown (second dropdown in the row)
|
||||
const environmentDropdowns = screen.getAllByRole("button", { name: /production/i });
|
||||
await userEvent.click(environmentDropdowns[0]);
|
||||
|
||||
// Select 'development' environment
|
||||
const developmentOption = await screen.findByRole("menuitem", { name: "development" });
|
||||
await userEvent.click(developmentOption);
|
||||
|
||||
// Verify environment selection by checking the updated button text
|
||||
const updatedButton = await screen.findByRole("button", { name: "development" });
|
||||
expect(updatedButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("updates project and automatically selects first environment", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add a permission first
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Initially should show Project 1 and production environment
|
||||
expect(screen.getByRole("button", { name: "Project 1" })).toBeInTheDocument();
|
||||
expect(screen.getByRole("button", { name: /production/i })).toBeInTheDocument();
|
||||
|
||||
// Click on project dropdown (first dropdown in the row)
|
||||
const projectDropdowns = screen.getAllByRole("button", { name: /Project 1/i });
|
||||
await userEvent.click(projectDropdowns[0]);
|
||||
|
||||
// Select 'Project 2'
|
||||
const project2Option = await screen.findByRole("menuitem", { name: "Project 2" });
|
||||
await userEvent.click(project2Option);
|
||||
|
||||
// Verify project selection and that environment was auto-updated
|
||||
const updatedProjectButton = await screen.findByRole("button", { name: "Project 2" });
|
||||
expect(updatedProjectButton).toBeInTheDocument();
|
||||
|
||||
// Environment should still be production (first environment of Project 2)
|
||||
expect(screen.getByRole("button", { name: /production/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("handles edge case when project is not found", async () => {
|
||||
// Create a modified mock with corrupted project reference
|
||||
const corruptedProjects = [
|
||||
{
|
||||
...mockProjects[0],
|
||||
id: "different-id", // This will cause project lookup to fail
|
||||
},
|
||||
];
|
||||
|
||||
render(<AddApiKeyModal {...defaultProps} projects={corruptedProjects} />);
|
||||
|
||||
// Add a permission first
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// The component should still render without crashing
|
||||
expect(screen.getByRole("button", { name: /add_permission/i })).toBeInTheDocument();
|
||||
|
||||
// Try to interact with environment dropdown - should not crash
|
||||
const environmentDropdowns = screen.getAllByRole("button", { name: /production/i });
|
||||
await userEvent.click(environmentDropdowns[0]);
|
||||
|
||||
// Should be able to find and click on development option
|
||||
const developmentOption = await screen.findByRole("menuitem", { name: "development" });
|
||||
await userEvent.click(developmentOption);
|
||||
|
||||
// Verify environment selection works even when project lookup fails
|
||||
const updatedButton = await screen.findByRole("button", { name: "development" });
|
||||
expect(updatedButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("handles edge case when environment is not found", async () => {
|
||||
// Create a project with no environments
|
||||
const projectWithNoEnvs = [
|
||||
{
|
||||
...mockProjects[0],
|
||||
environments: [], // No environments available
|
||||
},
|
||||
];
|
||||
|
||||
render(<AddApiKeyModal {...defaultProps} projects={projectWithNoEnvs} />);
|
||||
|
||||
// Try to add a permission - this should handle the case gracefully
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
|
||||
// This might not add a permission if no environments exist, which is expected behavior
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Component should still be functional
|
||||
expect(screen.getByRole("button", { name: /add_permission/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("validates duplicate permissions detection", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Fill in a label
|
||||
const labelInput = screen.getByPlaceholderText("e.g. GitHub, PostHog, Slack");
|
||||
await userEvent.type(labelInput, "Test API Key");
|
||||
|
||||
// Add first permission
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Add second permission with same project/environment
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Both permissions should now have the same project and environment (Project 1, production)
|
||||
// Try to submit the form - it should show duplicate error
|
||||
const submitButton = screen.getByRole("button", {
|
||||
name: "environments.project.api_keys.add_api_key",
|
||||
});
|
||||
await userEvent.click(submitButton);
|
||||
|
||||
// The submit should not have been called due to duplicate detection
|
||||
expect(mockOnSubmit).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("handles updatePermission with environmentId but environment not found", async () => {
|
||||
// Create a project with limited environments to test the edge case
|
||||
const limitedProjects = [
|
||||
{
|
||||
...mockProjects[0],
|
||||
environments: [
|
||||
{
|
||||
id: "env1",
|
||||
type: "production" as const,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
projectId: "project1",
|
||||
appSetupCompleted: true,
|
||||
},
|
||||
// Only one environment, so we can test when trying to update to non-existent env
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
render(<AddApiKeyModal {...defaultProps} projects={limitedProjects} />);
|
||||
|
||||
// Add a permission first
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Verify permission was added with production environment
|
||||
expect(screen.getByRole("button", { name: /production/i })).toBeInTheDocument();
|
||||
|
||||
// Now test the edge case by manually calling the component's internal logic
|
||||
// Since we can't directly access the updatePermission function in tests,
|
||||
// we test through the UI interactions and verify the component doesn't crash
|
||||
|
||||
// The component should handle gracefully when environment lookup fails
|
||||
// This tests the branch: field === "environmentId" && !environment
|
||||
expect(screen.getByRole("button", { name: /production/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("covers all branches of updatePermission function", async () => {
|
||||
render(<AddApiKeyModal {...defaultProps} />);
|
||||
|
||||
// Add a permission to have something to update
|
||||
const addButton = screen.getByRole("button", { name: /add_permission/i });
|
||||
await userEvent.click(addButton);
|
||||
|
||||
// Test Branch 1: Update non-environmentId field (permission level)
|
||||
const permissionDropdowns = screen.getAllByRole("button", { name: /read/i });
|
||||
await userEvent.click(permissionDropdowns[0]);
|
||||
const manageOption = await screen.findByRole("menuitem", { name: "manage" });
|
||||
await userEvent.click(manageOption);
|
||||
expect(await screen.findByRole("button", { name: "manage" })).toBeInTheDocument();
|
||||
|
||||
// Test Branch 2: Update environmentId with valid environment
|
||||
const environmentDropdowns = screen.getAllByRole("button", { name: /production/i });
|
||||
await userEvent.click(environmentDropdowns[0]);
|
||||
const developmentOption = await screen.findByRole("menuitem", { name: "development" });
|
||||
await userEvent.click(developmentOption);
|
||||
expect(await screen.findByRole("button", { name: "development" })).toBeInTheDocument();
|
||||
|
||||
// Test Branch 3: Update project (which calls updateProjectAndEnvironment)
|
||||
const projectDropdowns = screen.getAllByRole("button", { name: /Project 1/i });
|
||||
await userEvent.click(projectDropdowns[0]);
|
||||
const project2Option = await screen.findByRole("menuitem", { name: "Project 2" });
|
||||
await userEvent.click(project2Option);
|
||||
expect(await screen.findByRole("button", { name: "Project 2" })).toBeInTheDocument();
|
||||
|
||||
// Verify all updates worked correctly and component is still functional
|
||||
expect(screen.getByRole("button", { name: /add_permission/i })).toBeInTheDocument();
|
||||
expect(labelInput.value).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -80,22 +80,23 @@ export const AddApiKeyModal = ({
|
||||
const [selectedOrganizationAccess, setSelectedOrganizationAccess] =
|
||||
useState<TOrganizationAccess>(defaultOrganizationAccess);
|
||||
|
||||
const getInitialPermissions = (): PermissionRecord[] => {
|
||||
const getInitialPermissions = () => {
|
||||
if (projects.length > 0 && projects[0].environments.length > 0) {
|
||||
return [
|
||||
{
|
||||
return {
|
||||
"permission-0": {
|
||||
projectId: projects[0].id,
|
||||
environmentId: projects[0].environments[0].id,
|
||||
permission: ApiKeyPermission.read,
|
||||
projectName: projects[0].name,
|
||||
environmentType: projects[0].environments[0].type,
|
||||
},
|
||||
];
|
||||
};
|
||||
}
|
||||
return [];
|
||||
return {} as Record<string, PermissionRecord>;
|
||||
};
|
||||
|
||||
const [selectedPermissions, setSelectedPermissions] = useState<PermissionRecord[]>([]);
|
||||
// Initialize with one permission by default
|
||||
const [selectedPermissions, setSelectedPermissions] = useState<Record<string, PermissionRecord>>({});
|
||||
|
||||
const projectOptions: ProjectOption[] = projects.map((project) => ({
|
||||
id: project.id,
|
||||
@@ -103,54 +104,58 @@ export const AddApiKeyModal = ({
|
||||
}));
|
||||
|
||||
const removePermission = (index: number) => {
|
||||
const updatedPermissions = [...selectedPermissions];
|
||||
updatedPermissions.splice(index, 1);
|
||||
const updatedPermissions = { ...selectedPermissions };
|
||||
delete updatedPermissions[`permission-${index}`];
|
||||
setSelectedPermissions(updatedPermissions);
|
||||
};
|
||||
|
||||
const addPermission = () => {
|
||||
const initialPermissions = getInitialPermissions();
|
||||
if (initialPermissions.length > 0) {
|
||||
setSelectedPermissions([...selectedPermissions, initialPermissions[0]]);
|
||||
const newIndex = Object.keys(selectedPermissions).length;
|
||||
const initialPermission = getInitialPermissions()["permission-0"];
|
||||
if (initialPermission) {
|
||||
setSelectedPermissions({
|
||||
...selectedPermissions,
|
||||
[`permission-${newIndex}`]: initialPermission,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const updatePermission = (index: number, field: string, value: string) => {
|
||||
const updatedPermissions = [...selectedPermissions];
|
||||
const project = projects.find((p) => p.id === updatedPermissions[index].projectId);
|
||||
const updatePermission = (key: string, field: string, value: string) => {
|
||||
const project = projects.find((p) => p.id === selectedPermissions[key].projectId);
|
||||
const environment = project?.environments.find((env) => env.id === value);
|
||||
|
||||
updatedPermissions[index] = {
|
||||
...updatedPermissions[index],
|
||||
[field]: value,
|
||||
...(field === "environmentId" && environment ? { environmentType: environment.type } : {}),
|
||||
};
|
||||
|
||||
setSelectedPermissions(updatedPermissions);
|
||||
setSelectedPermissions({
|
||||
...selectedPermissions,
|
||||
[key]: {
|
||||
...selectedPermissions[key],
|
||||
[field]: value,
|
||||
...(field === "environmentId" && environment ? { environmentType: environment.type } : {}),
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// Update environment when project changes
|
||||
const updateProjectAndEnvironment = (index: number, projectId: string) => {
|
||||
const updateProjectAndEnvironment = (key: string, projectId: string) => {
|
||||
const project = projects.find((p) => p.id === projectId);
|
||||
if (project && project.environments.length > 0) {
|
||||
const environment = project.environments[0];
|
||||
const updatedPermissions = [...selectedPermissions];
|
||||
|
||||
updatedPermissions[index] = {
|
||||
...updatedPermissions[index],
|
||||
projectId,
|
||||
environmentId: environment.id,
|
||||
projectName: project.name,
|
||||
environmentType: environment.type,
|
||||
};
|
||||
|
||||
setSelectedPermissions(updatedPermissions);
|
||||
setSelectedPermissions({
|
||||
...selectedPermissions,
|
||||
[key]: {
|
||||
...selectedPermissions[key],
|
||||
projectId,
|
||||
environmentId: environment.id,
|
||||
projectName: project.name,
|
||||
environmentType: environment.type,
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const checkForDuplicatePermissions = () => {
|
||||
const uniquePermissions = new Set(selectedPermissions.map((p) => `${p.projectId}-${p.environmentId}`));
|
||||
return uniquePermissions.size !== selectedPermissions.length;
|
||||
const permissions = Object.values(selectedPermissions);
|
||||
const uniquePermissions = new Set(permissions.map((p) => `${p.projectId}-${p.environmentId}`));
|
||||
return uniquePermissions.size !== permissions.length;
|
||||
};
|
||||
|
||||
const submitAPIKey = async () => {
|
||||
@@ -162,7 +167,7 @@ export const AddApiKeyModal = ({
|
||||
}
|
||||
|
||||
// Convert permissions to the format expected by the API
|
||||
const environmentPermissions = selectedPermissions.map((permission) => ({
|
||||
const environmentPermissions = Object.values(selectedPermissions).map((permission) => ({
|
||||
environmentId: permission.environmentId,
|
||||
permission: permission.permission,
|
||||
}));
|
||||
@@ -174,7 +179,7 @@ export const AddApiKeyModal = ({
|
||||
});
|
||||
|
||||
reset();
|
||||
setSelectedPermissions([]);
|
||||
setSelectedPermissions({});
|
||||
setSelectedOrganizationAccess(defaultOrganizationAccess);
|
||||
};
|
||||
|
||||
@@ -191,7 +196,7 @@ export const AddApiKeyModal = ({
|
||||
}
|
||||
|
||||
// Check if at least one project permission is set or one organization access toggle is ON
|
||||
const hasProjectAccess = selectedPermissions.length > 0;
|
||||
const hasProjectAccess = Object.keys(selectedPermissions).length > 0;
|
||||
|
||||
const hasOrganizationAccess = Object.values(selectedOrganizationAccess).some((accessGroup) =>
|
||||
Object.values(accessGroup).some((value) => value === true)
|
||||
@@ -230,9 +235,13 @@ export const AddApiKeyModal = ({
|
||||
<div className="space-y-2">
|
||||
<Label>{t("environments.project.api_keys.project_access")}</Label>
|
||||
<div className="space-y-2">
|
||||
{selectedPermissions.map((permission, index) => {
|
||||
{/* Permission rows */}
|
||||
{Object.keys(selectedPermissions).map((key) => {
|
||||
const permissionIndex = parseInt(key.split("-")[1]);
|
||||
const permission = selectedPermissions[key];
|
||||
return (
|
||||
<div key={index + permission.projectId} className="flex items-center gap-2">
|
||||
<div key={key} className="flex items-center gap-2">
|
||||
{/* Project dropdown */}
|
||||
<div className="w-1/3">
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
@@ -252,7 +261,7 @@ export const AddApiKeyModal = ({
|
||||
<DropdownMenuItem
|
||||
key={option.id}
|
||||
onClick={() => {
|
||||
updateProjectAndEnvironment(index, option.id);
|
||||
updateProjectAndEnvironment(key, option.id);
|
||||
}}>
|
||||
{option.name}
|
||||
</DropdownMenuItem>
|
||||
@@ -260,6 +269,8 @@ export const AddApiKeyModal = ({
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
|
||||
{/* Environment dropdown */}
|
||||
<div className="w-1/3">
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
@@ -281,7 +292,7 @@ export const AddApiKeyModal = ({
|
||||
<DropdownMenuItem
|
||||
key={env.id}
|
||||
onClick={() => {
|
||||
updatePermission(index, "environmentId", env.id);
|
||||
updatePermission(key, "environmentId", env.id);
|
||||
}}>
|
||||
{env.type}
|
||||
</DropdownMenuItem>
|
||||
@@ -289,6 +300,8 @@ export const AddApiKeyModal = ({
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
|
||||
{/* Permission level dropdown */}
|
||||
<div className="w-1/3">
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
@@ -310,7 +323,7 @@ export const AddApiKeyModal = ({
|
||||
<DropdownMenuItem
|
||||
key={option}
|
||||
onClick={() => {
|
||||
updatePermission(index, "permission", option);
|
||||
updatePermission(key, "permission", option);
|
||||
}}>
|
||||
{option}
|
||||
</DropdownMenuItem>
|
||||
@@ -318,16 +331,16 @@ export const AddApiKeyModal = ({
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
className="p-2"
|
||||
onClick={() => removePermission(index)}
|
||||
aria-label={t("environments.project.api_keys.delete_permission")}>
|
||||
|
||||
{/* Delete button */}
|
||||
<button type="button" className="p-2" onClick={() => removePermission(permissionIndex)}>
|
||||
<Trash2Icon className={"h-5 w-5 text-slate-500 hover:text-red-500"} />
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Add permission button */}
|
||||
<Button type="button" variant="outline" onClick={addPermission}>
|
||||
<span className="mr-2">+</span> {t("environments.settings.api_keys.add_permission")}
|
||||
</Button>
|
||||
@@ -384,7 +397,7 @@ export const AddApiKeyModal = ({
|
||||
onClick={() => {
|
||||
setOpen(false);
|
||||
reset();
|
||||
setSelectedPermissions([]);
|
||||
setSelectedPermissions({});
|
||||
}}>
|
||||
{t("common.cancel")}
|
||||
</Button>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Logo } from "@/modules/ui/components/logo";
|
||||
import { FormbricksLogo } from "@/modules/ui/components/formbricks-logo";
|
||||
import { Toaster } from "react-hot-toast";
|
||||
|
||||
export const SetupLayout = ({ children }: { children: React.ReactNode }) => {
|
||||
@@ -10,7 +10,7 @@ export const SetupLayout = ({ children }: { children: React.ReactNode }) => {
|
||||
style={{ scrollbarGutter: "stable both-edges" }}
|
||||
className="flex max-h-[90vh] w-[40rem] flex-col items-center space-y-4 overflow-auto rounded-lg border bg-white p-12 text-center shadow-md">
|
||||
<div className="h-20 w-20 rounded-lg bg-slate-900 p-2">
|
||||
<Logo className="h-full w-full" variant="image" />
|
||||
<FormbricksLogo className="h-full w-full" />
|
||||
</div>
|
||||
{children}
|
||||
</div>
|
||||
|
||||
@@ -41,8 +41,8 @@ vi.mock("next/link", () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/modules/ui/components/logo", () => ({
|
||||
Logo: () => <div data-testid="logo">Logo</div>,
|
||||
vi.mock("@/modules/ui/components/formbricks-logo", () => ({
|
||||
FormbricksLogo: () => <div data-testid="formbricks-logo">FormbricksLogo</div>,
|
||||
}));
|
||||
|
||||
vi.mock("@/modules/ui/components/button", () => ({
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Logo } from "@/modules/ui/components/logo";
|
||||
import { FormbricksLogo } from "@/modules/ui/components/formbricks-logo";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import Image, { StaticImageData } from "next/image";
|
||||
import Link from "next/link";
|
||||
@@ -51,7 +51,7 @@ export const ConnectIntegration = ({
|
||||
<div className="flex w-1/2 flex-col items-center justify-center rounded-lg bg-white p-8 shadow">
|
||||
<div className="flex w-1/2 justify-center -space-x-4">
|
||||
<div className="flex h-32 w-32 items-center justify-center rounded-full bg-white p-6 shadow-md">
|
||||
<Logo variant="image" />
|
||||
<FormbricksLogo />
|
||||
</div>
|
||||
<div className="flex h-32 w-32 items-center justify-center rounded-full bg-white p-4 shadow-md">
|
||||
<Image className="w-1/2" src={integrationLogoSrc} alt="logo" />
|
||||
|
||||
197
apps/web/modules/ui/components/formbricks-logo/index.tsx
Normal file
197
apps/web/modules/ui/components/formbricks-logo/index.tsx
Normal file
@@ -0,0 +1,197 @@
|
||||
interface FormbricksLogoProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export const FormbricksLogo = ({ className }: FormbricksLogoProps) => {
|
||||
return (
|
||||
<svg
|
||||
width="220"
|
||||
height="220"
|
||||
viewBox="0 0 220 220"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
className={className}>
|
||||
<path
|
||||
d="M39.1602 147.334H95.8321V175.67C95.8321 191.32 83.1457 204.006 67.4962 204.006C51.8466 204.006 39.1602 191.32 39.1602 175.67V147.334Z"
|
||||
fill="url(#paint0_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8071H152.504C168.154 81.8071 180.84 94.4936 180.84 110.143C180.84 125.793 168.154 138.479 152.504 138.479H39.1602V81.8071Z"
|
||||
fill="url(#paint1_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="url(#paint2_linear_415_2)"
|
||||
/>
|
||||
<mask
|
||||
id="mask0_415_2"
|
||||
style={{ maskType: "alpha" }}
|
||||
maskUnits="userSpaceOnUse"
|
||||
x="39"
|
||||
y="16"
|
||||
width="142"
|
||||
height="189">
|
||||
<path
|
||||
d="M39.1602 147.335H95.8321V175.671C95.8321 191.32 83.1457 204.007 67.4962 204.007C51.8466 204.007 39.1602 191.32 39.1602 175.671V147.335Z"
|
||||
fill="url(#paint3_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8081H152.504C168.154 81.8081 180.84 94.4946 180.84 110.144C180.84 125.794 168.154 138.48 152.504 138.48H39.1602V81.8081Z"
|
||||
fill="url(#paint4_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="url(#paint5_linear_415_2)"
|
||||
/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_415_2)">
|
||||
<g filter="url(#filter0_d_415_2)">
|
||||
<mask
|
||||
id="mask1_415_2"
|
||||
style={{ maskType: "alpha" }}
|
||||
maskUnits="userSpaceOnUse"
|
||||
x="39"
|
||||
y="16"
|
||||
width="142"
|
||||
height="189">
|
||||
<path
|
||||
d="M39.1602 147.335H95.8321V175.671C95.8321 191.32 83.1457 204.007 67.4962 204.007C51.8466 204.007 39.1602 191.32 39.1602 175.671V147.335Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8081H152.504C168.154 81.8081 180.84 94.4946 180.84 110.144C180.84 125.794 168.154 138.48 152.504 138.48H39.1602V81.8081Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_415_2)">
|
||||
<path
|
||||
d="M42.1331 -32.5321C64.3329 -54.1986 120.626 -32.5321 120.626 -32.5321H42.1331C36.6806 -27.2105 33.2847 -19.2749 33.2847 -7.76218C33.2847 50.6243 96.5317 71.8561 96.5317 112.55C96.5317 152.386 35.9231 176.962 33.3678 231.092H120.626C120.626 231.092 33.2847 291.248 33.2847 234.631C33.2847 233.437 33.3128 232.258 33.3678 231.092H-5.11523L2.41417 -32.5321H42.1331Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
<g filter="url(#filter1_f_415_2)">
|
||||
<circle cx="21.4498" cy="179.212" r="53.13" fill="#00C4B8" />
|
||||
</g>
|
||||
<g filter="url(#filter2_f_415_2)">
|
||||
<circle cx="21.4498" cy="44.6163" r="53.13" fill="#00C4B8" />
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<filter
|
||||
id="filter0_d_415_2"
|
||||
x="34.5149"
|
||||
y="-11.5917"
|
||||
width="137.209"
|
||||
height="243.47"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feColorMatrix
|
||||
in="SourceAlpha"
|
||||
type="matrix"
|
||||
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
|
||||
result="hardAlpha"
|
||||
/>
|
||||
<feOffset dx="23.2262" />
|
||||
<feGaussianBlur stdDeviation="13.9357" />
|
||||
<feComposite in2="hardAlpha" operator="out" />
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0" />
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_415_2" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_415_2" result="shape" />
|
||||
</filter>
|
||||
<filter
|
||||
id="filter1_f_415_2"
|
||||
x="-78.1326"
|
||||
y="79.6296"
|
||||
width="199.165"
|
||||
height="199.165"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape" />
|
||||
<feGaussianBlur stdDeviation="23.2262" result="effect1_foregroundBlur_415_2" />
|
||||
</filter>
|
||||
<filter
|
||||
id="filter2_f_415_2"
|
||||
x="-78.1326"
|
||||
y="-54.9661"
|
||||
width="199.165"
|
||||
height="199.165"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape" />
|
||||
<feGaussianBlur stdDeviation="23.2262" result="effect1_foregroundBlur_415_2" />
|
||||
</filter>
|
||||
<linearGradient
|
||||
id="paint0_linear_415_2"
|
||||
x1="96.0786"
|
||||
y1="174.643"
|
||||
x2="39.1553"
|
||||
y2="174.873"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop offset="1" stopColor="#00C4B8" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint1_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="109.116"
|
||||
x2="39.1602"
|
||||
y2="110.554"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00DDD0" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint2_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="43.5891"
|
||||
x2="39.1602"
|
||||
y2="45.0264"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00DDD0" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint3_linear_415_2"
|
||||
x1="96.0786"
|
||||
y1="174.644"
|
||||
x2="39.1553"
|
||||
y2="174.874"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint4_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="109.117"
|
||||
x2="39.1602"
|
||||
y2="110.555"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint5_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="43.5891"
|
||||
x2="39.1602"
|
||||
y2="45.0264"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
@@ -8,59 +8,33 @@ describe("Logo", () => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
describe("default variant", () => {
|
||||
test("renders default logo correctly", () => {
|
||||
const { container } = render(<Logo />);
|
||||
const svg = container.querySelector("svg");
|
||||
test("renders correctly", () => {
|
||||
const { container } = render(<Logo />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg).toBeInTheDocument();
|
||||
});
|
||||
expect(svg).toBeInTheDocument();
|
||||
expect(svg).toHaveAttribute("viewBox", "0 0 697 150");
|
||||
expect(svg).toHaveAttribute("fill", "none");
|
||||
expect(svg).toHaveAttribute("xmlns", "http://www.w3.org/2000/svg");
|
||||
});
|
||||
|
||||
describe("image variant", () => {
|
||||
test("renders image logo correctly", () => {
|
||||
const { container } = render(<Logo variant="image" />);
|
||||
const svg = container.querySelector("svg");
|
||||
test("accepts and passes through props", () => {
|
||||
const testClassName = "test-class";
|
||||
const { container } = render(<Logo className={testClassName} />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders image logo with className correctly", () => {
|
||||
const testClassName = "test-class";
|
||||
const { container } = render(<Logo variant="image" className={testClassName} />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg).toBeInTheDocument();
|
||||
expect(svg).toHaveAttribute("class", testClassName);
|
||||
});
|
||||
expect(svg).toBeInTheDocument();
|
||||
expect(svg).toHaveAttribute("class", testClassName);
|
||||
});
|
||||
|
||||
describe("wordmark variant", () => {
|
||||
test("renders wordmark logo correctly", () => {
|
||||
const { container } = render(<Logo variant="wordmark" />);
|
||||
const svg = container.querySelector("svg");
|
||||
test("contains expected svg elements", () => {
|
||||
const { container } = render(<Logo />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders wordmark logo with className correctly", () => {
|
||||
const testClassName = "test-class";
|
||||
const { container } = render(<Logo variant="wordmark" className={testClassName} />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg).toBeInTheDocument();
|
||||
expect(svg).toHaveAttribute("class", testClassName);
|
||||
});
|
||||
|
||||
test("contains expected svg elements", () => {
|
||||
const { container } = render(<Logo variant="wordmark" />);
|
||||
const svg = container.querySelector("svg");
|
||||
|
||||
expect(svg?.querySelectorAll("path").length).toBeGreaterThan(0);
|
||||
expect(svg?.querySelector("line")).toBeInTheDocument();
|
||||
expect(svg?.querySelectorAll("mask").length).toBe(2);
|
||||
expect(svg?.querySelectorAll("filter").length).toBe(3);
|
||||
expect(svg?.querySelectorAll("linearGradient").length).toBe(6);
|
||||
});
|
||||
expect(svg?.querySelectorAll("path").length).toBeGreaterThan(0);
|
||||
expect(svg?.querySelector("line")).toBeInTheDocument();
|
||||
expect(svg?.querySelectorAll("mask").length).toBe(2);
|
||||
expect(svg?.querySelectorAll("filter").length).toBe(3);
|
||||
expect(svg?.querySelectorAll("linearGradient").length).toBe(6);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,208 +1,4 @@
|
||||
interface LogoProps extends React.SVGProps<SVGSVGElement> {
|
||||
variant?: "image" | "wordmark";
|
||||
}
|
||||
|
||||
export const Logo = ({ variant = "wordmark", ...props }: LogoProps) => {
|
||||
if (variant === "image") return <ImageLogo {...props} />;
|
||||
|
||||
return <WordmarkLogo {...props} />;
|
||||
};
|
||||
|
||||
const ImageLogo = (props: React.SVGProps<SVGSVGElement>) => {
|
||||
return (
|
||||
<svg
|
||||
width="220"
|
||||
height="220"
|
||||
viewBox="0 0 220 220"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
{...props}>
|
||||
<path
|
||||
d="M39.1602 147.334H95.8321V175.67C95.8321 191.32 83.1457 204.006 67.4962 204.006C51.8466 204.006 39.1602 191.32 39.1602 175.67V147.334Z"
|
||||
fill="url(#paint0_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8071H152.504C168.154 81.8071 180.84 94.4936 180.84 110.143C180.84 125.793 168.154 138.479 152.504 138.479H39.1602V81.8071Z"
|
||||
fill="url(#paint1_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="url(#paint2_linear_415_2)"
|
||||
/>
|
||||
<mask
|
||||
id="mask0_415_2"
|
||||
style={{ maskType: "alpha" }}
|
||||
maskUnits="userSpaceOnUse"
|
||||
x="39"
|
||||
y="16"
|
||||
width="142"
|
||||
height="189">
|
||||
<path
|
||||
d="M39.1602 147.335H95.8321V175.671C95.8321 191.32 83.1457 204.007 67.4962 204.007C51.8466 204.007 39.1602 191.32 39.1602 175.671V147.335Z"
|
||||
fill="url(#paint3_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8081H152.504C168.154 81.8081 180.84 94.4946 180.84 110.144C180.84 125.794 168.154 138.48 152.504 138.48H39.1602V81.8081Z"
|
||||
fill="url(#paint4_linear_415_2)"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="url(#paint5_linear_415_2)"
|
||||
/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_415_2)">
|
||||
<g filter="url(#filter0_d_415_2)">
|
||||
<mask
|
||||
id="mask1_415_2"
|
||||
style={{ maskType: "alpha" }}
|
||||
maskUnits="userSpaceOnUse"
|
||||
x="39"
|
||||
y="16"
|
||||
width="142"
|
||||
height="189">
|
||||
<path
|
||||
d="M39.1602 147.335H95.8321V175.671C95.8321 191.32 83.1457 204.007 67.4962 204.007C51.8466 204.007 39.1602 191.32 39.1602 175.671V147.335Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 62.7322C39.1602 37.0773 59.9576 16.2798 85.6126 16.2798H152.504C168.154 16.2798 180.84 28.9662 180.84 44.6158C180.84 60.2653 168.154 72.9518 152.504 72.9518H39.1602V62.7322Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
<path
|
||||
d="M39.1602 81.8081H152.504C168.154 81.8081 180.84 94.4946 180.84 110.144C180.84 125.794 168.154 138.48 152.504 138.48H39.1602V81.8081Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_415_2)">
|
||||
<path
|
||||
d="M42.1331 -32.5321C64.3329 -54.1986 120.626 -32.5321 120.626 -32.5321H42.1331C36.6806 -27.2105 33.2847 -19.2749 33.2847 -7.76218C33.2847 50.6243 96.5317 71.8561 96.5317 112.55C96.5317 152.386 35.9231 176.962 33.3678 231.092H120.626C120.626 231.092 33.2847 291.248 33.2847 234.631C33.2847 233.437 33.3128 232.258 33.3678 231.092H-5.11523L2.41417 -32.5321H42.1331Z"
|
||||
fill="black"
|
||||
fillOpacity="0.1"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
<g filter="url(#filter1_f_415_2)">
|
||||
<circle cx="21.4498" cy="179.212" r="53.13" fill="#00C4B8" />
|
||||
</g>
|
||||
<g filter="url(#filter2_f_415_2)">
|
||||
<circle cx="21.4498" cy="44.6163" r="53.13" fill="#00C4B8" />
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<filter
|
||||
id="filter0_d_415_2"
|
||||
x="34.5149"
|
||||
y="-11.5917"
|
||||
width="137.209"
|
||||
height="243.47"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feColorMatrix
|
||||
in="SourceAlpha"
|
||||
type="matrix"
|
||||
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
|
||||
result="hardAlpha"
|
||||
/>
|
||||
<feOffset dx="23.2262" />
|
||||
<feGaussianBlur stdDeviation="13.9357" />
|
||||
<feComposite in2="hardAlpha" operator="out" />
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0" />
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_415_2" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_415_2" result="shape" />
|
||||
</filter>
|
||||
<filter
|
||||
id="filter1_f_415_2"
|
||||
x="-78.1326"
|
||||
y="79.6296"
|
||||
width="199.165"
|
||||
height="199.165"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape" />
|
||||
<feGaussianBlur stdDeviation="23.2262" result="effect1_foregroundBlur_415_2" />
|
||||
</filter>
|
||||
<filter
|
||||
id="filter2_f_415_2"
|
||||
x="-78.1326"
|
||||
y="-54.9661"
|
||||
width="199.165"
|
||||
height="199.165"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB">
|
||||
<feFlood floodOpacity="0" result="BackgroundImageFix" />
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape" />
|
||||
<feGaussianBlur stdDeviation="23.2262" result="effect1_foregroundBlur_415_2" />
|
||||
</filter>
|
||||
<linearGradient
|
||||
id="paint0_linear_415_2"
|
||||
x1="96.0786"
|
||||
y1="174.643"
|
||||
x2="39.1553"
|
||||
y2="174.873"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop offset="1" stopColor="#00C4B8" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint1_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="109.116"
|
||||
x2="39.1602"
|
||||
y2="110.554"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00DDD0" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint2_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="43.5891"
|
||||
x2="39.1602"
|
||||
y2="45.0264"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00DDD0" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint3_linear_415_2"
|
||||
x1="96.0786"
|
||||
y1="174.644"
|
||||
x2="39.1553"
|
||||
y2="174.874"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint4_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="109.117"
|
||||
x2="39.1602"
|
||||
y2="110.555"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint5_linear_415_2"
|
||||
x1="181.456"
|
||||
y1="43.5891"
|
||||
x2="39.1602"
|
||||
y2="45.0264"
|
||||
gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="#00FFE1" />
|
||||
<stop offset="1" stopColor="#01E0C6" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
const WordmarkLogo = (props: React.SVGProps<SVGSVGElement>) => {
|
||||
export const Logo = (props: any) => {
|
||||
return (
|
||||
<svg viewBox="0 0 697 150" fill="none" xmlns="http://www.w3.org/2000/svg" {...props}>
|
||||
<path
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
import { Meta, StoryObj } from "@storybook/react-vite";
|
||||
import { Logo } from "./index";
|
||||
|
||||
type StoryProps = React.ComponentProps<typeof Logo>;
|
||||
|
||||
const meta: Meta<StoryProps> = {
|
||||
title: "UI/Logo",
|
||||
component: Logo,
|
||||
tags: ["autodocs"],
|
||||
parameters: {
|
||||
layout: "centered",
|
||||
controls: { sort: "alpha", exclude: [] },
|
||||
docs: {
|
||||
description: {
|
||||
component:
|
||||
"** Logo ** renders the Formbricks brand as scalable SVG.It supports two variants('image' and 'wordmark') and is suitable for headers, navigation, and other branding areas.",
|
||||
},
|
||||
},
|
||||
},
|
||||
argTypes: {
|
||||
variant: {
|
||||
control: "select",
|
||||
options: ["image", "wordmark"],
|
||||
description: "The variant of the logo to display",
|
||||
table: {
|
||||
category: "Appearance",
|
||||
type: { summary: "string" },
|
||||
defaultValue: { summary: "wordmark" },
|
||||
},
|
||||
order: 1,
|
||||
},
|
||||
className: {
|
||||
control: "text",
|
||||
description: "Additional CSS classes for styling",
|
||||
table: {
|
||||
category: "Appearance",
|
||||
type: { summary: "string" },
|
||||
},
|
||||
order: 1,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<StoryProps>;
|
||||
|
||||
const renderLogoWithOptions = (args: StoryProps) => {
|
||||
const { ...logoProps } = args;
|
||||
|
||||
return <Logo {...logoProps} />;
|
||||
};
|
||||
|
||||
export const Default: Story = {
|
||||
render: renderLogoWithOptions,
|
||||
args: {
|
||||
className: "h-20",
|
||||
},
|
||||
};
|
||||
|
||||
export const Image: Story = {
|
||||
render: renderLogoWithOptions,
|
||||
args: {
|
||||
className: "h-20",
|
||||
variant: "image",
|
||||
},
|
||||
};
|
||||
|
||||
export const Wordmark: Story = {
|
||||
render: renderLogoWithOptions,
|
||||
args: {
|
||||
className: "h-20",
|
||||
variant: "wordmark",
|
||||
},
|
||||
};
|
||||
@@ -55,7 +55,7 @@ else
|
||||
fi
|
||||
|
||||
echo "🗃️ Running database migrations..."
|
||||
run_with_timeout 600 "database migration" sh -c '(cd packages/database && npm run db:migrate:deploy)'
|
||||
run_with_timeout 300 "database migration" sh -c '(cd packages/database && npm run db:migrate:deploy)'
|
||||
|
||||
echo "🗃️ Running SAML database setup..."
|
||||
run_with_timeout 60 "SAML database setup" sh -c '(cd packages/database && npm run db:create-saml-database:deploy)'
|
||||
|
||||
587
packages/storage/.cursor/rules/storage-package.md
Normal file
587
packages/storage/.cursor/rules/storage-package.md
Normal file
@@ -0,0 +1,587 @@
|
||||
# Storage Package Rules for Formbricks
|
||||
|
||||
## Package Overview
|
||||
|
||||
The `@formbricks/storage` package provides S3-compatible cloud storage functionality for Formbricks. It's a standalone TypeScript library that handles file uploads, downloads, and deletions with comprehensive error handling and type safety.
|
||||
|
||||
## Key Files
|
||||
|
||||
### Core Storage Infrastructure
|
||||
|
||||
- [packages/storage/src/service.ts](mdc:packages/storage/src/service.ts) - Main storage service with S3 operations
|
||||
- [packages/storage/src/client.ts](mdc:packages/storage/src/client.ts) - S3 client creation and configuration
|
||||
- [packages/storage/src/constants.ts](mdc:packages/storage/src/constants.ts) - Environment variable exports
|
||||
- [packages/storage/src/types/error.ts](mdc:packages/storage/src/types/error.ts) - Result type system and error definitions
|
||||
- [packages/storage/src/index.ts](mdc:packages/storage/src/index.ts) - Package exports
|
||||
|
||||
### Configuration Files
|
||||
|
||||
- [packages/storage/package.json](mdc:packages/storage/package.json) - Package configuration with AWS SDK dependencies
|
||||
- [packages/storage/vite.config.ts](mdc:packages/storage/vite.config.ts) - Build configuration for library bundling
|
||||
- [packages/storage/tsconfig.json](mdc:packages/storage/tsconfig.json) - TypeScript configuration
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
### Package Structure
|
||||
|
||||
```
|
||||
packages/storage/
|
||||
├── src/
|
||||
│ ├── client.ts # S3 client creation and configuration
|
||||
│ ├── service.ts # Core storage operations (upload, download, delete)
|
||||
│ ├── constants.ts # Environment variable exports
|
||||
│ ├── index.ts # Package exports
|
||||
│ ├── types/
|
||||
│ │ └── error.ts # Result type system and error definitions
|
||||
│ ├── *.test.ts # Unit tests for each module
|
||||
└── dist/ # Built library output
|
||||
```
|
||||
|
||||
### Result Type System
|
||||
|
||||
All storage operations use a Result type pattern for comprehensive error handling:
|
||||
|
||||
```typescript
|
||||
// ✅ Use Result<T, E> for all async operations
|
||||
export const storageOperation = async (): Promise<
|
||||
Result<SuccessData, UnknownError | S3CredentialsError | S3ClientError>
|
||||
> => {
|
||||
try {
|
||||
// Implementation
|
||||
return ok(data);
|
||||
} catch (error) {
|
||||
logger.error("Operation failed", { error });
|
||||
return err({
|
||||
code: "unknown",
|
||||
message: "Operation failed",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// ✅ Handle Results properly in calling code
|
||||
const result = await storageOperation();
|
||||
if (!result.ok) {
|
||||
// Handle error
|
||||
return result; // Propagate error
|
||||
}
|
||||
// Use result.data
|
||||
```
|
||||
|
||||
### Error Type Definitions
|
||||
|
||||
Always use the predefined error types:
|
||||
|
||||
```typescript
|
||||
// ✅ Standard error types
|
||||
interface UnknownError {
|
||||
code: "unknown";
|
||||
message: string;
|
||||
}
|
||||
|
||||
interface S3CredentialsError {
|
||||
code: "s3_credentials_error";
|
||||
message: string;
|
||||
}
|
||||
|
||||
interface S3ClientError {
|
||||
code: "s3_client_error";
|
||||
message: string;
|
||||
}
|
||||
|
||||
// ✅ Use ok() and err() utility functions
|
||||
return ok(successData);
|
||||
return err({ code: "s3_client_error", message: "Failed to connect" });
|
||||
```
|
||||
|
||||
## S3 Client Patterns
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
All S3 configuration comes from environment variables:
|
||||
|
||||
```typescript
|
||||
// ✅ Export environment variables from constants.ts
|
||||
export const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;
|
||||
export const S3_SECRET_KEY = process.env.S3_SECRET_KEY;
|
||||
export const S3_REGION = process.env.S3_REGION;
|
||||
export const S3_ENDPOINT_URL = process.env.S3_ENDPOINT_URL;
|
||||
export const S3_FORCE_PATH_STYLE = process.env.S3_FORCE_PATH_STYLE === "1";
|
||||
export const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;
|
||||
|
||||
// ✅ Validate in a function (e.g., inside createS3ClientFromEnv)
|
||||
if (!S3_ACCESS_KEY || !S3_SECRET_KEY || !S3_BUCKET_NAME || !S3_REGION) {
|
||||
return err({
|
||||
code: "s3_credentials_error",
|
||||
message: "S3 credentials are not set",
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Client Creation Pattern
|
||||
|
||||
Use the factory pattern for S3 client creation:
|
||||
|
||||
```typescript
|
||||
// ✅ Factory function with Result type
|
||||
export const createS3ClientFromEnv = (): Result<S3Client, S3CredentialsError | UnknownError> => {
|
||||
try {
|
||||
// Validation and client creation
|
||||
const s3ClientInstance = new S3Client({
|
||||
credentials: { accessKeyId: S3_ACCESS_KEY, secretAccessKey: S3_SECRET_KEY },
|
||||
region: S3_REGION,
|
||||
endpoint: S3_ENDPOINT_URL,
|
||||
forcePathStyle: S3_FORCE_PATH_STYLE,
|
||||
});
|
||||
|
||||
return ok(s3ClientInstance);
|
||||
} catch (error) {
|
||||
logger.error("Error creating S3 client", { error });
|
||||
return err({ code: "unknown", message: "Error creating S3 client" });
|
||||
}
|
||||
};
|
||||
|
||||
// ✅ Wrapper function for fallback handling
|
||||
export const createS3Client = (): S3Client | undefined => {
|
||||
const result = createS3ClientFromEnv();
|
||||
return result.ok ? result.data : undefined;
|
||||
};
|
||||
```
|
||||
|
||||
## Service Function Patterns
|
||||
|
||||
### Function Signature Standards
|
||||
|
||||
All service functions follow consistent patterns:
|
||||
|
||||
```typescript
|
||||
// ✅ Comprehensive TSDoc comments
|
||||
/**
|
||||
* Get a signed URL for uploading a file to S3
|
||||
* @param fileName - The name of the file to upload
|
||||
* @param contentType - The content type of the file
|
||||
* @param filePath - The path to the file in S3
|
||||
* @param maxSize - Maximum file size allowed (optional)
|
||||
* @returns A Result containing the signed URL and presigned fields or an error
|
||||
*/
|
||||
export const getSignedUploadUrl = async (
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
filePath: string,
|
||||
maxSize?: number
|
||||
): Promise<
|
||||
Result<
|
||||
{
|
||||
signedUrl: string;
|
||||
presignedFields: PresignedPostOptions["Fields"];
|
||||
},
|
||||
UnknownError | S3CredentialsError | S3ClientError
|
||||
>
|
||||
> => {
|
||||
// Implementation
|
||||
};
|
||||
```
|
||||
|
||||
### Error Handling Patterns
|
||||
|
||||
Always validate inputs and handle S3 client errors:
|
||||
|
||||
```typescript
|
||||
// ✅ Standard validation and error handling
|
||||
export const storageFunction = async (param: string): Promise<Result<Data, Errors>> => {
|
||||
try {
|
||||
// Client validation
|
||||
if (!s3Client) {
|
||||
logger.error("S3 client is not available");
|
||||
return err({
|
||||
code: "s3_credentials_error",
|
||||
message: "S3 credentials are not set",
|
||||
});
|
||||
}
|
||||
|
||||
// AWS SDK operations with error handling
|
||||
const command = new SomeS3Command({
|
||||
/* params */
|
||||
});
|
||||
const response = await s3Client.send(command);
|
||||
|
||||
return ok(response);
|
||||
} catch (error) {
|
||||
logger.error("S3 operation failed", { error, param });
|
||||
|
||||
// Categorize errors appropriately
|
||||
if (error.name === "CredentialsError") {
|
||||
return err({
|
||||
code: "s3_credentials_error",
|
||||
message: "Invalid S3 credentials",
|
||||
});
|
||||
}
|
||||
|
||||
return err({
|
||||
code: "s3_client_error",
|
||||
message: `S3 operation failed: ${error.message}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Testing Standards
|
||||
|
||||
### Test File Organization
|
||||
|
||||
Each source file should have a corresponding test file:
|
||||
|
||||
```typescript
|
||||
// ✅ Test file naming: [module].test.ts
|
||||
// packages/storage/src/client.test.ts
|
||||
// packages/storage/src/service.test.ts
|
||||
// packages/storage/src/constants.test.ts
|
||||
|
||||
// ✅ Test structure
|
||||
describe("Storage Client", () => {
|
||||
describe("createS3ClientFromEnv", () => {
|
||||
it("should create S3 client with valid credentials", () => {
|
||||
// Test implementation
|
||||
});
|
||||
|
||||
it("should return error with missing credentials", () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Mock Environment Variables
|
||||
|
||||
Always mock environment variables in tests:
|
||||
|
||||
```typescript
|
||||
// ✅ Mock environment setup
|
||||
beforeEach(() => {
|
||||
vi.stubEnv("S3_ACCESS_KEY", "test-access-key");
|
||||
vi.stubEnv("S3_SECRET_KEY", "test-secret-key");
|
||||
vi.stubEnv("S3_REGION", "us-east-1");
|
||||
vi.stubEnv("S3_BUCKET_NAME", "test-bucket");
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
```
|
||||
|
||||
## Build Configuration
|
||||
|
||||
### Vite Library Setup
|
||||
|
||||
Configure vite for library bundling with external dependencies:
|
||||
|
||||
```typescript
|
||||
// ✅ vite.config.ts pattern
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, "src/index.ts"),
|
||||
name: "formbricksStorage",
|
||||
fileName: "index",
|
||||
formats: ["es", "cjs"], // Both ESM and CommonJS
|
||||
},
|
||||
rollupOptions: {
|
||||
// Externalize AWS SDK and Formbricks dependencies
|
||||
external: [
|
||||
"@aws-sdk/client-s3",
|
||||
"@aws-sdk/s3-presigned-post",
|
||||
"@aws-sdk/s3-request-presigner",
|
||||
"@formbricks/logger",
|
||||
],
|
||||
},
|
||||
},
|
||||
test: {
|
||||
environment: "node",
|
||||
globals: true,
|
||||
coverage: {
|
||||
reporter: ["text", "json", "html", "lcov"],
|
||||
exclude: ["src/types/**"], // Exclude type definitions
|
||||
include: ["src/**/*.ts"],
|
||||
},
|
||||
},
|
||||
plugins: [dts({ rollupTypes: true })], // Generate type declarations
|
||||
});
|
||||
```
|
||||
|
||||
### Package.json Configuration
|
||||
|
||||
Essential package.json fields for the storage library:
|
||||
|
||||
```json
|
||||
{
|
||||
"exports": {
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.cjs",
|
||||
"types": "./dist/index.d.ts"
|
||||
},
|
||||
"files": ["dist"],
|
||||
"main": "./dist/index.js",
|
||||
"name": "@formbricks/storage",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "tsc && vite build",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage"
|
||||
},
|
||||
"type": "module",
|
||||
"types": "./dist/index.d.ts"
|
||||
}
|
||||
```
|
||||
|
||||
## AWS SDK Integration
|
||||
|
||||
### Dependency Management
|
||||
|
||||
Use specific AWS SDK packages, not the umbrella package:
|
||||
|
||||
```json
|
||||
// ✅ Specific AWS SDK dependencies
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "3.864.0",
|
||||
"@aws-sdk/s3-presigned-post": "3.864.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.864.0"
|
||||
}
|
||||
|
||||
// ❌ Don't use umbrella package
|
||||
"dependencies": {
|
||||
"aws-sdk": "..." // Too large and unnecessary
|
||||
}
|
||||
```
|
||||
|
||||
### Command Patterns
|
||||
|
||||
Use the AWS SDK v3 command pattern:
|
||||
|
||||
```typescript
|
||||
// ✅ AWS SDK v3 command pattern
|
||||
import { DeleteObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { createPresignedPost } from "@aws-sdk/s3-presigned-post";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
|
||||
// Delete operation
|
||||
const deleteCommand = new DeleteObjectCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: filePath,
|
||||
});
|
||||
await s3Client.send(deleteCommand);
|
||||
|
||||
// Presigned URL for download
|
||||
const getCommand = new GetObjectCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: filePath,
|
||||
});
|
||||
const signedUrl = await getSignedUrl(s3Client, getCommand, { expiresIn: 3600 });
|
||||
|
||||
// Presigned POST for upload
|
||||
const { url, fields } = await createPresignedPost(s3Client, {
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: filePath,
|
||||
Conditions: [
|
||||
["content-length-range", 0, maxSize || DEFAULT_MAX_SIZE],
|
||||
["eq", "$Content-Type", contentType],
|
||||
],
|
||||
Expires: 3600,
|
||||
});
|
||||
```
|
||||
|
||||
## Export Patterns
|
||||
|
||||
### Selective Exports
|
||||
|
||||
Only export the main service functions:
|
||||
|
||||
```typescript
|
||||
// ✅ packages/storage/src/index.ts
|
||||
export { deleteFile, getSignedDownloadUrl, getSignedUploadUrl } from "./service";
|
||||
|
||||
// ❌ Don't export internal utilities
|
||||
// export { createS3Client } from "./client"; // Internal only
|
||||
// export { S3_BUCKET_NAME } from "./constants"; // Internal only
|
||||
```
|
||||
|
||||
### Type Exports
|
||||
|
||||
Export types that consumers might need:
|
||||
|
||||
```typescript
|
||||
// ✅ Export relevant types if needed by consumers
|
||||
export type { Result, UnknownError, S3CredentialsError, S3ClientError } from "./types/error";
|
||||
```
|
||||
|
||||
## Logging Standards
|
||||
|
||||
### Use Formbricks Logger
|
||||
|
||||
Always use the Formbricks logger for consistency:
|
||||
|
||||
```typescript
|
||||
// ✅ Import and use Formbricks logger
|
||||
import { logger } from "@formbricks/logger";
|
||||
|
||||
// Error logging with context
|
||||
logger.error("S3 operation failed", {
|
||||
operation: "upload",
|
||||
fileName,
|
||||
error: error.message,
|
||||
});
|
||||
|
||||
// Warning for recoverable issues
|
||||
logger.warn("S3 client fallback used", { reason: "credentials_error" });
|
||||
```
|
||||
|
||||
### Logging Levels
|
||||
|
||||
Use appropriate logging levels:
|
||||
|
||||
```typescript
|
||||
// ✅ Error for failures that need attention
|
||||
logger.error("Critical S3 operation failed", { error });
|
||||
|
||||
// ✅ Warn for recoverable issues
|
||||
logger.warn("S3 credentials not set, client unavailable");
|
||||
|
||||
// ✅ Debug for development (avoid in production)
|
||||
logger.debug("S3 operation successful", { operation, duration });
|
||||
|
||||
// ❌ Avoid info logging for routine operations
|
||||
// logger.info("File uploaded successfully"); // Too verbose
|
||||
```
|
||||
|
||||
## Common Pitfalls to Avoid
|
||||
|
||||
1. **Don't expose internal implementation details** - Keep client creation and constants internal
|
||||
2. **Always validate S3 client availability** - Check for undefined client before operations
|
||||
3. **Use specific error types** - Don't use generic Error objects
|
||||
4. **Handle AWS SDK errors appropriately** - Categorize errors by type
|
||||
5. **Don't hardcode S3 configuration** - Always use environment variables
|
||||
6. **Include comprehensive TSDoc** - Document all parameters and return types
|
||||
7. **Test error scenarios** - Test both success and failure cases
|
||||
8. **Use Result types consistently** - Never throw exceptions in service functions
|
||||
9. **Version pin AWS SDK dependencies** - Avoid breaking changes from updates
|
||||
10. **Keep package.json focused** - Only include necessary dependencies and scripts
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### Required Variables
|
||||
|
||||
The storage package requires these environment variables:
|
||||
|
||||
```bash
|
||||
# ✅ Required S3 configuration
|
||||
S3_ACCESS_KEY=your-access-key
|
||||
S3_SECRET_KEY=your-secret-key
|
||||
S3_REGION=us-east-1
|
||||
S3_BUCKET_NAME=your-bucket-name
|
||||
|
||||
# ✅ Optional S3 configuration
|
||||
S3_ENDPOINT_URL=https://s3.amazonaws.com # For custom endpoints
|
||||
S3_FORCE_PATH_STYLE=1 # For minio/localstack compatibility
|
||||
```
|
||||
|
||||
### Validation Strategy
|
||||
|
||||
Always validate required environment variables at startup:
|
||||
|
||||
```typescript
|
||||
// ✅ Fail fast on missing required variables
|
||||
const requiredVars = [S3_ACCESS_KEY, S3_SECRET_KEY, S3_BUCKET_NAME, S3_REGION];
|
||||
const missingVars = requiredVars.filter(v => !v);
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
return err({
|
||||
code: "s3_credentials_error",
|
||||
message: "Required S3 environment variables are not set",
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### S3 Client Reuse
|
||||
|
||||
Create S3 client once and reuse:
|
||||
|
||||
```typescript
|
||||
// ✅ Single client instance
|
||||
const s3Client = createS3Client(); // Created once at module level
|
||||
|
||||
// ✅ Reuse in all operations
|
||||
export const uploadFile = async () => {
|
||||
if (!s3Client) return err(/* credentials error */);
|
||||
// Use s3Client
|
||||
};
|
||||
|
||||
// ❌ Don't create new clients for each operation
|
||||
export const uploadFile = async () => {
|
||||
const client = createS3Client(); // Inefficient
|
||||
};
|
||||
```
|
||||
|
||||
### Presigned URL Expiration
|
||||
|
||||
Use appropriate expiration times:
|
||||
|
||||
```typescript
|
||||
// ✅ Reasonable expiration times
|
||||
const UPLOAD_URL_EXPIRY = 3600; // 1 hour for uploads
|
||||
const DOWNLOAD_URL_EXPIRY = 3600; // 1 hour for downloads
|
||||
|
||||
// ❌ Don't use excessively long expiration
|
||||
const LONG_EXPIRY = 86400 * 7; // 7 days - security risk
|
||||
```
|
||||
|
||||
### Error Message Safety
|
||||
|
||||
Don't expose sensitive information in error messages:
|
||||
|
||||
```typescript
|
||||
// ✅ Safe error messages
|
||||
return err({
|
||||
code: "s3_client_error",
|
||||
message: "File operation failed", // Generic message
|
||||
});
|
||||
|
||||
// ❌ Don't expose internal details
|
||||
return err({
|
||||
code: "s3_client_error",
|
||||
message: `AWS Error: ${awsError.message}`, // May contain sensitive info
|
||||
});
|
||||
```
|
||||
|
||||
## Integration Guidelines
|
||||
|
||||
### Usage in Other Packages
|
||||
|
||||
When using the storage package in other Formbricks packages:
|
||||
|
||||
```typescript
|
||||
// ✅ Import specific functions
|
||||
import { deleteFile, getSignedUploadUrl } from "@formbricks/storage";
|
||||
|
||||
// ✅ Handle Result types properly
|
||||
const uploadResult = await getSignedUploadUrl(fileName, contentType, filePath);
|
||||
if (!uploadResult.ok) {
|
||||
// Handle error appropriately
|
||||
throw new Error(uploadResult.error.message);
|
||||
}
|
||||
|
||||
// Use uploadResult.data
|
||||
const { signedUrl, presignedFields } = uploadResult.data;
|
||||
```
|
||||
|
||||
### Dependency Declaration
|
||||
|
||||
Add storage package as workspace dependency:
|
||||
|
||||
```json
|
||||
// ✅ In dependent package's package.json
|
||||
"dependencies": {
|
||||
"@formbricks/storage": "workspace:*"
|
||||
}
|
||||
```
|
||||
|
||||
Remember: The storage package is designed to be a self-contained, reusable library that provides type-safe S3 operations with comprehensive error handling. Follow these patterns to maintain consistency and reliability across the Formbricks storage infrastructure.
|
||||
7
packages/storage/.eslintrc.cjs
Normal file
7
packages/storage/.eslintrc.cjs
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
};
|
||||
52
packages/storage/package.json
Normal file
52
packages/storage/package.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "@formbricks/storage",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"version": "0.1.0",
|
||||
"homepage": "https://formbricks.com",
|
||||
"description": "Storage Controller for Formbricks",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/formbricks/formbricks"
|
||||
},
|
||||
"keywords": [
|
||||
"Formbricks",
|
||||
"storage",
|
||||
"storage controller"
|
||||
],
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.cjs"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf .turbo node_modules coverage dist",
|
||||
"lint": "eslint . --ext .ts,.js",
|
||||
"lint:fix": "eslint . --ext .ts,.js --fix",
|
||||
"lint:report": "eslint . --format json --output-file ../../lint-results/app-store.json",
|
||||
"build": "tsc && vite build",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"go": "vite build --watch --mode dev"
|
||||
},
|
||||
"author": "Formbricks <hola@formbricks.com>",
|
||||
"dependencies": {
|
||||
"@formbricks/logger": "workspace:*",
|
||||
"@aws-sdk/client-s3": "3.864.0",
|
||||
"@aws-sdk/s3-presigned-post": "3.864.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.864.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@formbricks/config-typescript": "workspace:*",
|
||||
"@formbricks/eslint-config": "workspace:*",
|
||||
"vite": "6.3.5",
|
||||
"vite-plugin-dts": "4.5.3",
|
||||
"vitest": "3.1.3",
|
||||
"@vitest/coverage-v8": "3.1.3"
|
||||
}
|
||||
}
|
||||
250
packages/storage/src/client.test.ts
Normal file
250
packages/storage/src/client.test.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import { S3Client, type S3ClientConfig } from "@aws-sdk/client-s3";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
// Mock the AWS SDK S3Client
|
||||
vi.mock("@aws-sdk/client-s3", () => ({
|
||||
S3Client: vi.fn().mockImplementation((config: S3ClientConfig) => ({
|
||||
config,
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockS3Client = vi.mocked(S3Client);
|
||||
|
||||
describe("client.ts", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
const mockConstants = {
|
||||
S3_ACCESS_KEY: "test-access-key",
|
||||
S3_SECRET_KEY: "test-secret-key",
|
||||
S3_REGION: "us-east-1",
|
||||
S3_BUCKET_NAME: "test-bucket",
|
||||
S3_ENDPOINT_URL: undefined,
|
||||
S3_FORCE_PATH_STYLE: false,
|
||||
};
|
||||
|
||||
describe("createS3ClientFromEnv", () => {
|
||||
test("should create S3 client with valid credentials", async () => {
|
||||
// Mock constants with valid credentials
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
// Dynamic import to get fresh module with mocked constants
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const client = createS3ClientFromEnv();
|
||||
|
||||
expect(mockS3Client).toHaveBeenCalledWith({
|
||||
credentials: {
|
||||
accessKeyId: mockConstants.S3_ACCESS_KEY,
|
||||
secretAccessKey: mockConstants.S3_SECRET_KEY,
|
||||
},
|
||||
region: mockConstants.S3_REGION,
|
||||
endpoint: mockConstants.S3_ENDPOINT_URL,
|
||||
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
|
||||
});
|
||||
|
||||
expect(client.ok).toBe(true);
|
||||
|
||||
if (client.ok) {
|
||||
expect(client.data).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should create S3 client with endpoint URL", async () => {
|
||||
// Mock constants with endpoint URL
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ENDPOINT_URL: "https://custom-endpoint.com",
|
||||
S3_FORCE_PATH_STYLE: true,
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const client = createS3ClientFromEnv();
|
||||
|
||||
expect(mockS3Client).toHaveBeenCalledWith({
|
||||
credentials: {
|
||||
accessKeyId: mockConstants.S3_ACCESS_KEY,
|
||||
secretAccessKey: mockConstants.S3_SECRET_KEY,
|
||||
},
|
||||
region: mockConstants.S3_REGION,
|
||||
endpoint: "https://custom-endpoint.com",
|
||||
forcePathStyle: true,
|
||||
});
|
||||
|
||||
expect(client.ok).toBe(true);
|
||||
|
||||
if (client.ok) {
|
||||
expect(client.data).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error when access key is missing", async () => {
|
||||
// Mock constants with missing access key
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ACCESS_KEY: undefined,
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error when secret key is missing", async () => {
|
||||
// Mock constants with missing secret key
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_SECRET_KEY: undefined,
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error when both credentials are missing", async () => {
|
||||
// Mock constants with no credentials
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ACCESS_KEY: undefined,
|
||||
S3_SECRET_KEY: undefined,
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error when credentials are empty strings", async () => {
|
||||
// Mock constants with empty string credentials
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ACCESS_KEY: "",
|
||||
S3_SECRET_KEY: "",
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error when mixed empty and undefined credentials", async () => {
|
||||
// Mock constants with mixed empty and undefined
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ACCESS_KEY: "",
|
||||
S3_SECRET_KEY: undefined,
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle empty endpoint URL", async () => {
|
||||
// Mock constants with empty endpoint URL
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ENDPOINT_URL: "",
|
||||
}));
|
||||
|
||||
const { createS3ClientFromEnv } = await import("./client");
|
||||
|
||||
const result = createS3ClientFromEnv();
|
||||
|
||||
expect(mockS3Client).toHaveBeenCalledWith({
|
||||
credentials: {
|
||||
accessKeyId: mockConstants.S3_ACCESS_KEY,
|
||||
secretAccessKey: mockConstants.S3_SECRET_KEY,
|
||||
},
|
||||
region: mockConstants.S3_REGION,
|
||||
endpoint: "",
|
||||
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("createS3Client", () => {
|
||||
test("should return provided S3 client when passed", async () => {
|
||||
// Use a fresh import to avoid module cache issues
|
||||
const { createS3Client } = await import("./client");
|
||||
const mockClient = new S3Client({});
|
||||
|
||||
const result = createS3Client(mockClient);
|
||||
|
||||
expect(result).toBe(mockClient);
|
||||
});
|
||||
|
||||
test("should create new client from environment when no client provided", async () => {
|
||||
// Mock constants for this test
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const { createS3Client } = await import("./client");
|
||||
|
||||
const result = createS3Client();
|
||||
|
||||
expect(mockS3Client).toHaveBeenCalledWith({
|
||||
credentials: {
|
||||
accessKeyId: mockConstants.S3_ACCESS_KEY,
|
||||
secretAccessKey: mockConstants.S3_SECRET_KEY,
|
||||
},
|
||||
region: mockConstants.S3_REGION,
|
||||
endpoint: mockConstants.S3_ENDPOINT_URL,
|
||||
forcePathStyle: mockConstants.S3_FORCE_PATH_STYLE,
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
test("should return undefined when creating from env fails and no client provided", async () => {
|
||||
// Mock constants with missing credentials
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_ACCESS_KEY: undefined,
|
||||
S3_SECRET_KEY: undefined,
|
||||
}));
|
||||
|
||||
const { createS3Client } = await import("./client");
|
||||
|
||||
const result = createS3Client();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
56
packages/storage/src/client.ts
Normal file
56
packages/storage/src/client.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { S3Client } from "@aws-sdk/client-s3";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { ErrorCode, type Result, type StorageError, err, ok } from "../types/error";
|
||||
import {
|
||||
S3_ACCESS_KEY,
|
||||
S3_BUCKET_NAME,
|
||||
S3_ENDPOINT_URL,
|
||||
S3_FORCE_PATH_STYLE,
|
||||
S3_REGION,
|
||||
S3_SECRET_KEY,
|
||||
} from "./constants";
|
||||
|
||||
/**
|
||||
* Create an S3 client from environment variables
|
||||
* @returns A Result containing the S3 client or an error: S3CredentialsError | UnknownError
|
||||
*/
|
||||
export const createS3ClientFromEnv = (): Result<S3Client, StorageError> => {
|
||||
try {
|
||||
if (!S3_ACCESS_KEY || !S3_SECRET_KEY || !S3_BUCKET_NAME || !S3_REGION) {
|
||||
logger.error("S3 Client: S3 credentials are not set");
|
||||
return err({
|
||||
code: ErrorCode.S3CredentialsError,
|
||||
});
|
||||
}
|
||||
|
||||
const s3ClientInstance = new S3Client({
|
||||
credentials: { accessKeyId: S3_ACCESS_KEY, secretAccessKey: S3_SECRET_KEY },
|
||||
region: S3_REGION,
|
||||
endpoint: S3_ENDPOINT_URL,
|
||||
forcePathStyle: S3_FORCE_PATH_STYLE,
|
||||
});
|
||||
|
||||
return ok(s3ClientInstance);
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Error creating S3 client from environment variables");
|
||||
return err({
|
||||
code: ErrorCode.Unknown,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create an S3 client from an existing client or from environment variables
|
||||
* @param s3Client - An existing S3 client
|
||||
* @returns An S3 client or undefined if the S3 credentials are not set in the environment variables or if there is an error creating the client
|
||||
*/
|
||||
export const createS3Client = (s3Client?: S3Client): S3Client | undefined => {
|
||||
if (!s3Client) {
|
||||
const result = createS3ClientFromEnv();
|
||||
if (result.ok) return result.data;
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return s3Client;
|
||||
};
|
||||
126
packages/storage/src/constants.test.ts
Normal file
126
packages/storage/src/constants.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
describe("constants.ts", () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
// Reset process.env to a clean state
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe("environment variable exports", () => {
|
||||
test("should export S3_ACCESS_KEY from environment", async () => {
|
||||
process.env.S3_ACCESS_KEY = "test-access-key-123";
|
||||
|
||||
const { S3_ACCESS_KEY } = await import("./constants");
|
||||
|
||||
expect(S3_ACCESS_KEY).toBe("test-access-key-123");
|
||||
});
|
||||
|
||||
test("should export undefined when S3_ACCESS_KEY is not set", async () => {
|
||||
delete process.env.S3_ACCESS_KEY;
|
||||
|
||||
const { S3_ACCESS_KEY } = await import("./constants");
|
||||
|
||||
expect(S3_ACCESS_KEY).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should export S3_SECRET_KEY from environment", async () => {
|
||||
process.env.S3_SECRET_KEY = "test-secret-key-456";
|
||||
|
||||
const { S3_SECRET_KEY } = await import("./constants");
|
||||
|
||||
expect(S3_SECRET_KEY).toBe("test-secret-key-456");
|
||||
});
|
||||
|
||||
test("should export undefined when S3_SECRET_KEY is not set", async () => {
|
||||
delete process.env.S3_SECRET_KEY;
|
||||
|
||||
const { S3_SECRET_KEY } = await import("./constants");
|
||||
|
||||
expect(S3_SECRET_KEY).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should export S3_REGION from environment", async () => {
|
||||
process.env.S3_REGION = "eu-west-1";
|
||||
|
||||
const { S3_REGION } = await import("./constants");
|
||||
|
||||
expect(S3_REGION).toBe("eu-west-1");
|
||||
});
|
||||
|
||||
test("should export undefined when S3_REGION is not set", async () => {
|
||||
delete process.env.S3_REGION;
|
||||
|
||||
const { S3_REGION } = await import("./constants");
|
||||
|
||||
expect(S3_REGION).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should export S3_ENDPOINT_URL from environment", async () => {
|
||||
process.env.S3_ENDPOINT_URL = "https://custom-s3-endpoint.com";
|
||||
|
||||
const { S3_ENDPOINT_URL } = await import("./constants");
|
||||
|
||||
expect(S3_ENDPOINT_URL).toBe("https://custom-s3-endpoint.com");
|
||||
});
|
||||
|
||||
test("should export undefined when S3_ENDPOINT_URL is not set", async () => {
|
||||
delete process.env.S3_ENDPOINT_URL;
|
||||
|
||||
const { S3_ENDPOINT_URL } = await import("./constants");
|
||||
|
||||
expect(S3_ENDPOINT_URL).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should export S3_BUCKET_NAME from environment", async () => {
|
||||
process.env.S3_BUCKET_NAME = "my-storage-bucket";
|
||||
|
||||
const { S3_BUCKET_NAME } = await import("./constants");
|
||||
|
||||
expect(S3_BUCKET_NAME).toBe("my-storage-bucket");
|
||||
});
|
||||
|
||||
test("should export undefined when S3_BUCKET_NAME is not set", async () => {
|
||||
delete process.env.S3_BUCKET_NAME;
|
||||
|
||||
const { S3_BUCKET_NAME } = await import("./constants");
|
||||
|
||||
expect(S3_BUCKET_NAME).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("boolean conversion constants", () => {
|
||||
describe("S3_FORCE_PATH_STYLE", () => {
|
||||
test("should be true when S3_FORCE_PATH_STYLE is '1'", async () => {
|
||||
process.env.S3_FORCE_PATH_STYLE = "1";
|
||||
|
||||
const { S3_FORCE_PATH_STYLE } = await import("./constants");
|
||||
|
||||
expect(S3_FORCE_PATH_STYLE).toBe(true);
|
||||
});
|
||||
|
||||
test("should be false when S3_FORCE_PATH_STYLE is '0'", async () => {
|
||||
process.env.S3_FORCE_PATH_STYLE = "0";
|
||||
|
||||
const { S3_FORCE_PATH_STYLE } = await import("./constants");
|
||||
|
||||
expect(S3_FORCE_PATH_STYLE).toBe(false);
|
||||
});
|
||||
|
||||
test("should be false when S3_FORCE_PATH_STYLE is undefined", async () => {
|
||||
delete process.env.S3_FORCE_PATH_STYLE;
|
||||
|
||||
const { S3_FORCE_PATH_STYLE } = await import("./constants");
|
||||
|
||||
expect(S3_FORCE_PATH_STYLE).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
6
packages/storage/src/constants.ts
Normal file
6
packages/storage/src/constants.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;
|
||||
export const S3_SECRET_KEY = process.env.S3_SECRET_KEY;
|
||||
export const S3_REGION = process.env.S3_REGION;
|
||||
export const S3_ENDPOINT_URL = process.env.S3_ENDPOINT_URL;
|
||||
export const S3_FORCE_PATH_STYLE = process.env.S3_FORCE_PATH_STYLE === "1";
|
||||
export const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;
|
||||
1
packages/storage/src/index.ts
Normal file
1
packages/storage/src/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { deleteFile, getSignedDownloadUrl, getSignedUploadUrl, deleteFilesByPrefix } from "./service";
|
||||
801
packages/storage/src/service.test.ts
Normal file
801
packages/storage/src/service.test.ts
Normal file
@@ -0,0 +1,801 @@
|
||||
import {
|
||||
DeleteObjectCommand,
|
||||
DeleteObjectsCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
ListObjectsCommand,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import { createPresignedPost } from "@aws-sdk/s3-presigned-post";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
// Mock AWS SDK modules
|
||||
vi.mock("@aws-sdk/client-s3", () => ({
|
||||
DeleteObjectCommand: vi.fn(),
|
||||
DeleteObjectsCommand: vi.fn(),
|
||||
GetObjectCommand: vi.fn(),
|
||||
HeadObjectCommand: vi.fn(),
|
||||
ListObjectsCommand: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@aws-sdk/s3-presigned-post", () => ({
|
||||
createPresignedPost: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@aws-sdk/s3-request-presigner", () => ({
|
||||
getSignedUrl: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock client module
|
||||
vi.mock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockDeleteObjectCommand = vi.mocked(DeleteObjectCommand);
|
||||
const mockDeleteObjectsCommand = vi.mocked(DeleteObjectsCommand);
|
||||
const mockGetObjectCommand = vi.mocked(GetObjectCommand);
|
||||
const mockHeadObjectCommand = vi.mocked(HeadObjectCommand);
|
||||
const mockListObjectsCommand = vi.mocked(ListObjectsCommand);
|
||||
const mockCreatePresignedPost = vi.mocked(createPresignedPost);
|
||||
const mockGetSignedUrl = vi.mocked(getSignedUrl);
|
||||
|
||||
describe("service.ts", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
const mockConstants = {
|
||||
S3_BUCKET_NAME: "test-bucket",
|
||||
};
|
||||
|
||||
const mockMaxSize = 1024 * 1024 * 10; // 10MB
|
||||
|
||||
describe("getSignedUploadUrl", () => {
|
||||
test("should create presigned upload URL", async () => {
|
||||
// Mock constants for non-cloud environment
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
// Mock createPresignedPost response
|
||||
const mockResponse = {
|
||||
fields: { key: "test-field" },
|
||||
url: "https://example.com",
|
||||
};
|
||||
|
||||
mockCreatePresignedPost.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const { getSignedUploadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedUploadUrl("test-file.jpg", "image/jpeg", "uploads/images", mockMaxSize);
|
||||
|
||||
expect(mockCreatePresignedPost).toHaveBeenCalledWith(expect.any(Object), {
|
||||
Expires: 2 * 60,
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "uploads/images/test-file.jpg",
|
||||
Fields: {
|
||||
"Content-Type": "image/jpeg",
|
||||
"Content-Encoding": "base64",
|
||||
},
|
||||
Conditions: [["content-length-range", 0, mockMaxSize]],
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toEqual({
|
||||
signedUrl: mockResponse.url,
|
||||
presignedFields: mockResponse.fields,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error if bucket name is not set", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_BUCKET_NAME: undefined,
|
||||
}));
|
||||
|
||||
const { getSignedUploadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedUploadUrl("test.txt", "text/plain", "text", mockMaxSize);
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error if s3Client is null", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => undefined),
|
||||
}));
|
||||
|
||||
const { getSignedUploadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedUploadUrl("test.txt", "text/plain", "text", mockMaxSize);
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_client_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle createPresignedPost throwing an error", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
mockCreatePresignedPost.mockRejectedValueOnce(new Error("AWS Error"));
|
||||
|
||||
const { getSignedUploadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedUploadUrl("test.txt", "text/plain", "text", mockMaxSize);
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("unknown");
|
||||
}
|
||||
});
|
||||
|
||||
test("should create presigned upload URL without maxSize", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockResponse = {
|
||||
fields: { key: "test-field" },
|
||||
url: "https://example.com",
|
||||
};
|
||||
|
||||
mockCreatePresignedPost.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const { getSignedUploadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedUploadUrl("test-file.jpg", "image/jpeg", "uploads/images");
|
||||
|
||||
expect(mockCreatePresignedPost).toHaveBeenCalledWith(expect.any(Object), {
|
||||
Expires: 2 * 60,
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "uploads/images/test-file.jpg",
|
||||
Fields: {
|
||||
"Content-Type": "image/jpeg",
|
||||
"Content-Encoding": "base64",
|
||||
},
|
||||
Conditions: [["content-length-range", 0, mockMaxSize]],
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toEqual({
|
||||
signedUrl: mockResponse.url,
|
||||
presignedFields: mockResponse.fields,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("getSignedDownloadUrl", () => {
|
||||
test("should return error if bucket name is not set", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_BUCKET_NAME: undefined,
|
||||
}));
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("documents/important-file.pdf");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should create signed download URL", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({}) // HeadObjectCommand response (file exists)
|
||||
.mockResolvedValueOnce({}), // Any other send calls
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const mockSignedUrl = "https://example.com/important-file.pdf?signature=abc123";
|
||||
mockGetSignedUrl.mockResolvedValueOnce(mockSignedUrl);
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("documents/important-file.pdf");
|
||||
|
||||
expect(mockHeadObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "documents/important-file.pdf",
|
||||
});
|
||||
|
||||
expect(mockGetObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "documents/important-file.pdf",
|
||||
});
|
||||
|
||||
expect(mockGetSignedUrl).toHaveBeenCalledWith(
|
||||
expect.any(Object), // s3Client
|
||||
expect.any(Object), // GetObjectCommand instance
|
||||
{ expiresIn: 60 * 30 } // 30 minutes
|
||||
);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBe(mockSignedUrl);
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle different file keys", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({}) // HeadObjectCommand response (file exists)
|
||||
.mockResolvedValueOnce({}), // Any other send calls
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
mockGetSignedUrl.mockResolvedValueOnce("https://example.com/nested/file.jpg");
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
await getSignedDownloadUrl("path/to/nested/file.jpg");
|
||||
|
||||
expect(mockHeadObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "path/to/nested/file.jpg",
|
||||
});
|
||||
|
||||
expect(mockGetObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "path/to/nested/file.jpg",
|
||||
});
|
||||
});
|
||||
|
||||
test("should return error if s3Client is null", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => undefined),
|
||||
}));
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("test-file.pdf");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_client_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle getSignedUrl throwing an error", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({}) // HeadObjectCommand response (file exists)
|
||||
.mockResolvedValueOnce({}), // Any other send calls
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
mockGetSignedUrl.mockRejectedValueOnce(new Error("AWS Error"));
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("test-file.pdf");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("unknown");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return file not found error when file does not exist", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const notFoundError = new Error("Not Found");
|
||||
notFoundError.name = "NotFound";
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockRejectedValueOnce(notFoundError),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("non-existent-file.pdf");
|
||||
|
||||
expect(mockHeadObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "non-existent-file.pdf",
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("file_not_found_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return file not found error when S3 returns 404", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const notFoundError = {
|
||||
$metadata: { httpStatusCode: 404 },
|
||||
};
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockRejectedValueOnce(notFoundError),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { getSignedDownloadUrl } = await import("./service");
|
||||
|
||||
const result = await getSignedDownloadUrl("another-non-existent-file.pdf");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("file_not_found_error");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteFile", () => {
|
||||
test("should return error if bucket name is not set", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_BUCKET_NAME: undefined,
|
||||
}));
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("test-file.txt");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should delete file from S3", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockResolvedValueOnce({}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("files/to-delete.txt");
|
||||
|
||||
expect(mockDeleteObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "files/to-delete.txt",
|
||||
});
|
||||
|
||||
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(Object));
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle different file keys for deletion", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockResolvedValueOnce({}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("deep/nested/path/file.zip");
|
||||
|
||||
expect(mockDeleteObjectCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Key: "deep/nested/path/file.zip",
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should not return anything", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockResolvedValueOnce({}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("test-file.txt");
|
||||
|
||||
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(Object));
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error if s3Client is null", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => undefined),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("test-file.txt");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_client_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle s3Client.send throwing an error", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockRejectedValueOnce(new Error("AWS Error")),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFile } = await import("./service");
|
||||
|
||||
const result = await deleteFile("test-file.txt");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("unknown");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteFilesByPrefix", () => {
|
||||
test("should return error if bucket name is not set", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
S3_BUCKET_NAME: undefined,
|
||||
}));
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => ({
|
||||
send: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/images/");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_credentials_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should return error if s3Client is null", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => undefined),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/images/");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("s3_client_error");
|
||||
}
|
||||
});
|
||||
|
||||
test("should delete multiple files with given prefix", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({
|
||||
Contents: [
|
||||
{ Key: "uploads/images/file1.jpg" },
|
||||
{ Key: "uploads/images/file2.png" },
|
||||
{ Key: "uploads/images/subfolder/file3.gif" },
|
||||
],
|
||||
})
|
||||
.mockResolvedValueOnce({}), // DeleteObjectsCommand response
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/images/");
|
||||
|
||||
expect(mockListObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Prefix: "uploads/images/",
|
||||
});
|
||||
|
||||
expect(mockDeleteObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Delete: {
|
||||
Objects: [
|
||||
{ Key: "uploads/images/file1.jpg" },
|
||||
{ Key: "uploads/images/file2.png" },
|
||||
{ Key: "uploads/images/subfolder/file3.gif" },
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(mockS3Client.send).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle empty result list (no files found)", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockResolvedValueOnce({
|
||||
Contents: undefined, // No files found
|
||||
}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/non-existent/");
|
||||
|
||||
expect(mockListObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Prefix: "uploads/non-existent/",
|
||||
});
|
||||
|
||||
// Should not call DeleteObjectsCommand when no files found
|
||||
expect(mockDeleteObjectsCommand).not.toHaveBeenCalled();
|
||||
expect(mockS3Client.send).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle empty Contents array", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockResolvedValueOnce({
|
||||
Contents: [], // Empty array
|
||||
}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/empty/");
|
||||
|
||||
expect(mockListObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Prefix: "uploads/empty/",
|
||||
});
|
||||
|
||||
// Should not call DeleteObjectsCommand when Contents is empty
|
||||
expect(mockDeleteObjectsCommand).not.toHaveBeenCalled();
|
||||
expect(mockS3Client.send).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
|
||||
if (result.ok) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle different prefix patterns", async () => {
|
||||
vi.doMock("./constants", () => ({
|
||||
...mockConstants,
|
||||
}));
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({
|
||||
Contents: [{ Key: "surveys/123/responses/response1.json" }],
|
||||
})
|
||||
.mockResolvedValueOnce({}),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("surveys/123/responses/");
|
||||
|
||||
expect(mockListObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Prefix: "surveys/123/responses/",
|
||||
});
|
||||
|
||||
expect(mockDeleteObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Delete: {
|
||||
Objects: [{ Key: "surveys/123/responses/response1.json" }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle ListObjectsCommand throwing an error", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi.fn().mockRejectedValueOnce(new Error("AWS ListObjects Error")),
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/test/");
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("unknown");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle DeleteObjectsCommand throwing an error", async () => {
|
||||
vi.doMock("./constants", () => mockConstants);
|
||||
|
||||
const mockS3Client = {
|
||||
send: vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce({
|
||||
Contents: [{ Key: "test-file.txt" }],
|
||||
})
|
||||
.mockRejectedValueOnce(new Error("AWS Delete Error")), // DeleteObjectsCommand fails
|
||||
};
|
||||
|
||||
vi.doMock("./client", () => ({
|
||||
createS3Client: vi.fn(() => mockS3Client),
|
||||
}));
|
||||
|
||||
const { deleteFilesByPrefix } = await import("./service");
|
||||
|
||||
const result = await deleteFilesByPrefix("uploads/test/");
|
||||
|
||||
expect(mockListObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Prefix: "uploads/test/",
|
||||
});
|
||||
|
||||
expect(mockDeleteObjectsCommand).toHaveBeenCalledWith({
|
||||
Bucket: mockConstants.S3_BUCKET_NAME,
|
||||
Delete: {
|
||||
Objects: [{ Key: "test-file.txt" }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.ok).toBe(false);
|
||||
|
||||
if (!result.ok) {
|
||||
expect(result.error.code).toBe("unknown");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
227
packages/storage/src/service.ts
Normal file
227
packages/storage/src/service.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import {
|
||||
DeleteObjectCommand,
|
||||
DeleteObjectsCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
ListObjectsCommand,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import {
|
||||
type PresignedPost,
|
||||
type PresignedPostOptions,
|
||||
createPresignedPost,
|
||||
} from "@aws-sdk/s3-presigned-post";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { ErrorCode, type Result, type StorageError, err, ok } from "../types/error";
|
||||
import { createS3Client } from "./client";
|
||||
import { S3_BUCKET_NAME } from "./constants";
|
||||
|
||||
const s3Client = createS3Client();
|
||||
|
||||
/**
|
||||
* Get a signed URL for uploading a file to S3
|
||||
* @param fileName - The name of the file to upload
|
||||
* @param contentType - The content type of the file
|
||||
* @param filePath - The path to the file in S3
|
||||
* @param maxSize - The maximum size of the file to upload or undefined if no limit is desired
|
||||
* @returns A Result containing the signed URL and presigned fields or an error: StorageError
|
||||
*/
|
||||
export const getSignedUploadUrl = async (
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
filePath: string,
|
||||
maxSize: number = 1024 * 1024 * 10 // 10MB
|
||||
): Promise<
|
||||
Result<
|
||||
{
|
||||
signedUrl: string;
|
||||
presignedFields: PresignedPost["fields"];
|
||||
},
|
||||
StorageError
|
||||
>
|
||||
> => {
|
||||
try {
|
||||
if (!s3Client) {
|
||||
logger.error("Failed to get signed upload URL: S3 client is not set");
|
||||
return err({
|
||||
code: ErrorCode.S3ClientError,
|
||||
});
|
||||
}
|
||||
|
||||
const postConditions: PresignedPostOptions["Conditions"] = maxSize
|
||||
? [["content-length-range", 0, maxSize]]
|
||||
: undefined;
|
||||
|
||||
if (!S3_BUCKET_NAME) {
|
||||
logger.error("Failed to get signed upload URL: S3 bucket name is not set");
|
||||
return err({
|
||||
code: ErrorCode.S3CredentialsError,
|
||||
});
|
||||
}
|
||||
|
||||
const { fields, url } = await createPresignedPost(s3Client, {
|
||||
Expires: 2 * 60, // 2 minutes
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: `${filePath}/${fileName}`,
|
||||
Fields: {
|
||||
"Content-Type": contentType,
|
||||
"Content-Encoding": "base64",
|
||||
},
|
||||
Conditions: postConditions,
|
||||
});
|
||||
|
||||
return ok({
|
||||
signedUrl: url,
|
||||
presignedFields: fields,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to get signed upload URL");
|
||||
|
||||
return err({
|
||||
code: ErrorCode.Unknown,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a signed URL for a file in S3
|
||||
* @param fileKey - The key of the file in S3
|
||||
* @returns A Result containing the signed URL or an error: StorageError
|
||||
*/
|
||||
export const getSignedDownloadUrl = async (fileKey: string): Promise<Result<string, StorageError>> => {
|
||||
try {
|
||||
if (!s3Client) {
|
||||
return err({
|
||||
code: ErrorCode.S3ClientError,
|
||||
});
|
||||
}
|
||||
|
||||
if (!S3_BUCKET_NAME) {
|
||||
return err({
|
||||
code: ErrorCode.S3CredentialsError,
|
||||
});
|
||||
}
|
||||
|
||||
// Check if file exists before generating signed URL
|
||||
const headObjectCommand = new HeadObjectCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: fileKey,
|
||||
});
|
||||
|
||||
try {
|
||||
await s3Client.send(headObjectCommand);
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, "Failed to check if file exists");
|
||||
if (
|
||||
(error as Error).name === "NotFound" ||
|
||||
(error as { $metadata?: { httpStatusCode?: number } }).$metadata?.httpStatusCode === 404
|
||||
) {
|
||||
return err({
|
||||
code: ErrorCode.FileNotFoundError,
|
||||
});
|
||||
}
|
||||
|
||||
logger.warn({ error, fileKey }, "HeadObject check failed; proceeding to sign download URL");
|
||||
}
|
||||
|
||||
const getObjectCommand = new GetObjectCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: fileKey,
|
||||
});
|
||||
|
||||
return ok(await getSignedUrl(s3Client, getObjectCommand, { expiresIn: 60 * 30 }));
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to get signed download URL");
|
||||
return err({
|
||||
code: ErrorCode.Unknown,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a file from S3
|
||||
* @param fileKey - The key of the file in S3 (e.g. "surveys/123/responses/456/file.pdf")
|
||||
* @returns A Result containing the void or an error: StorageError
|
||||
*/
|
||||
export const deleteFile = async (fileKey: string): Promise<Result<void, StorageError>> => {
|
||||
try {
|
||||
if (!s3Client) {
|
||||
return err({
|
||||
code: ErrorCode.S3ClientError,
|
||||
});
|
||||
}
|
||||
|
||||
if (!S3_BUCKET_NAME) {
|
||||
return err({
|
||||
code: ErrorCode.S3CredentialsError,
|
||||
});
|
||||
}
|
||||
|
||||
const deleteObjectCommand = new DeleteObjectCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Key: fileKey,
|
||||
});
|
||||
|
||||
await s3Client.send(deleteObjectCommand);
|
||||
|
||||
return ok(undefined);
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to delete file");
|
||||
|
||||
return err({
|
||||
code: ErrorCode.Unknown,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteFilesByPrefix = async (prefix: string): Promise<Result<void, StorageError>> => {
|
||||
try {
|
||||
if (!s3Client) {
|
||||
return err({
|
||||
code: ErrorCode.S3ClientError,
|
||||
});
|
||||
}
|
||||
|
||||
if (!S3_BUCKET_NAME) {
|
||||
return err({
|
||||
code: ErrorCode.S3CredentialsError,
|
||||
});
|
||||
}
|
||||
|
||||
const listObjectsCommand = new ListObjectsCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Prefix: prefix,
|
||||
});
|
||||
|
||||
const listObjectsOutput = await s3Client.send(listObjectsCommand);
|
||||
|
||||
if (!listObjectsOutput.Contents) {
|
||||
return ok(undefined);
|
||||
}
|
||||
|
||||
const objectsToDelete = listObjectsOutput.Contents.map((obj) => {
|
||||
return { Key: obj.Key };
|
||||
});
|
||||
|
||||
if (!objectsToDelete.length) {
|
||||
return ok(undefined);
|
||||
}
|
||||
|
||||
const deleteObjectsCommand = new DeleteObjectsCommand({
|
||||
Bucket: S3_BUCKET_NAME,
|
||||
Delete: {
|
||||
Objects: objectsToDelete,
|
||||
},
|
||||
});
|
||||
|
||||
await s3Client.send(deleteObjectsCommand);
|
||||
|
||||
return ok(undefined);
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to delete files by prefix");
|
||||
|
||||
return err({
|
||||
code: ErrorCode.Unknown,
|
||||
});
|
||||
}
|
||||
};
|
||||
12
packages/storage/tsconfig.json
Normal file
12
packages/storage/tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowImportingTsExtensions": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"resolveJsonModule": true,
|
||||
"strict": true
|
||||
},
|
||||
"exclude": ["node_modules"],
|
||||
"extends": "@formbricks/config-typescript/js-library.json",
|
||||
"include": ["src", "package.json", "types"]
|
||||
}
|
||||
31
packages/storage/types/error.ts
Normal file
31
packages/storage/types/error.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
export type Result<T, E = Error> = { ok: true; data: T } | { ok: false; error: E };
|
||||
|
||||
export interface ResultError<T> {
|
||||
ok: false;
|
||||
error: T;
|
||||
}
|
||||
|
||||
export interface ResultOk<T> {
|
||||
ok: true;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export const ok = <T, E>(data: T): Result<T, E> => ({ ok: true, data });
|
||||
|
||||
export const okVoid = <E>(): Result<void, E> => ({ ok: true, data: undefined });
|
||||
|
||||
export const err = <E = Error>(error: E): ResultError<E> => ({
|
||||
ok: false,
|
||||
error,
|
||||
});
|
||||
|
||||
export enum ErrorCode {
|
||||
Unknown = "unknown",
|
||||
S3CredentialsError = "s3_credentials_error",
|
||||
S3ClientError = "s3_client_error",
|
||||
FileNotFoundError = "file_not_found_error",
|
||||
}
|
||||
|
||||
export interface StorageError {
|
||||
code: ErrorCode;
|
||||
}
|
||||
33
packages/storage/vite.config.ts
Normal file
33
packages/storage/vite.config.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/// <reference types="vitest" />
|
||||
import { resolve } from "path";
|
||||
import { PluginOption, defineConfig } from "vite";
|
||||
import dts from "vite-plugin-dts";
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, "src/index.ts"),
|
||||
name: "formbricksStorage",
|
||||
fileName: "index",
|
||||
formats: ["es", "cjs"],
|
||||
},
|
||||
rollupOptions: {
|
||||
external: [
|
||||
"@aws-sdk/client-s3",
|
||||
"@aws-sdk/s3-presigned-post",
|
||||
"@aws-sdk/s3-request-presigner",
|
||||
"@formbricks/logger",
|
||||
],
|
||||
},
|
||||
},
|
||||
test: {
|
||||
environment: "node",
|
||||
globals: true,
|
||||
coverage: {
|
||||
reporter: ["text", "json", "html", "lcov"],
|
||||
exclude: ["src/types/**"],
|
||||
include: ["src/**/*.ts"],
|
||||
},
|
||||
},
|
||||
plugins: [dts({ rollupTypes: true }) as PluginOption],
|
||||
});
|
||||
1212
pnpm-lock.yaml
generated
1212
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -2,16 +2,16 @@ sonar.projectKey=formbricks_formbricks
|
||||
sonar.organization=formbricks
|
||||
|
||||
# Sources
|
||||
sonar.sources=apps/web,packages/surveys,packages/js-core
|
||||
sonar.sources=apps/web,packages/surveys,packages/js-core,packages/storage
|
||||
sonar.exclusions=**/node_modules/**,**/.next/**,**/dist/**,**/build/**,**/*.test.*,**/*.spec.*,**/__mocks__/**
|
||||
|
||||
# Tests
|
||||
sonar.tests=apps/web,packages/surveys,packages/js-core
|
||||
sonar.tests=apps/web,packages/surveys,packages/js-core,packages/storage
|
||||
sonar.test.inclusions=**/*.test.*,**/*.spec.*
|
||||
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info
|
||||
sonar.javascript.lcov.reportPaths=apps/web/coverage/lcov.info,packages/surveys/coverage/lcov.info,packages/js-core/coverage/lcov.info,packages/storage/coverage/lcov.info
|
||||
|
||||
# TypeScript configuration
|
||||
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json
|
||||
sonar.typescript.tsconfigPath=apps/web/tsconfig.json,packages/surveys/tsconfig.json,packages/js-core/tsconfig.json,packages/storage/tsconfig.json
|
||||
|
||||
# SCM
|
||||
sonar.scm.provider=git
|
||||
|
||||
17
turbo.json
17
turbo.json
@@ -42,6 +42,23 @@
|
||||
"dependsOn": ["@formbricks/database#db:setup"],
|
||||
"persistent": true
|
||||
},
|
||||
"@formbricks/storage#build": {
|
||||
"dependsOn": ["@formbricks/logger#build"]
|
||||
},
|
||||
"@formbricks/storage#go": {
|
||||
"cache": false,
|
||||
"dependsOn": ["@formbricks/storage#build"],
|
||||
"persistent": true
|
||||
},
|
||||
"@formbricks/storage#lint": {
|
||||
"dependsOn": ["@formbricks/logger#build"]
|
||||
},
|
||||
"@formbricks/storage#test": {
|
||||
"dependsOn": ["@formbricks/logger#build"]
|
||||
},
|
||||
"@formbricks/storage#test:coverage": {
|
||||
"dependsOn": ["@formbricks/logger#build"]
|
||||
},
|
||||
"@formbricks/surveys#build": {
|
||||
"dependsOn": ["^build"],
|
||||
"outputs": ["dist/**"]
|
||||
|
||||
Reference in New Issue
Block a user