mirror of
https://github.com/formbricks/formbricks.git
synced 2025-12-26 16:30:21 -06:00
Compare commits
3 Commits
feat/creat
...
fix/cross-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41d60c8a02 | ||
|
|
a6269f0fd3 | ||
|
|
9c0d0a16a7 |
@@ -45,7 +45,7 @@ afterEach(() => {
|
||||
});
|
||||
|
||||
describe("LandingSidebar component", () => {
|
||||
const user = { id: "u1", name: "Alice", email: "alice@example.com", imageUrl: "" } as any;
|
||||
const user = { id: "u1", name: "Alice", email: "alice@example.com" } as any;
|
||||
const organization = { id: "o1", name: "orgOne" } as any;
|
||||
const organizations = [
|
||||
{ id: "o2", name: "betaOrg" },
|
||||
|
||||
@@ -82,7 +82,7 @@ export const LandingSidebar = ({
|
||||
id="userDropdownTrigger"
|
||||
className="w-full rounded-br-xl border-t p-4 transition-colors duration-200 hover:bg-slate-50 focus:outline-none">
|
||||
<div tabIndex={0} className={cn("flex cursor-pointer flex-row items-center gap-3")}>
|
||||
<ProfileAvatar userId={user.id} imageUrl={user.imageUrl} />
|
||||
<ProfileAvatar userId={user.id} />
|
||||
<>
|
||||
<div className="grow overflow-hidden">
|
||||
<p
|
||||
|
||||
@@ -113,7 +113,6 @@ const mockUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -111,7 +111,6 @@ const mockUser = {
|
||||
id: "user1",
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
imageUrl: "http://example.com/avatar.png",
|
||||
emailVerified: new Date(),
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
|
||||
@@ -342,7 +342,7 @@ export const MainNavigation = ({
|
||||
"flex cursor-pointer flex-row items-center gap-3",
|
||||
isCollapsed ? "justify-center px-2" : "px-4"
|
||||
)}>
|
||||
<ProfileAvatar userId={user.id} imageUrl={user.imageUrl} />
|
||||
<ProfileAvatar userId={user.id} />
|
||||
{!isCollapsed && !isTextVisible && (
|
||||
<>
|
||||
<div
|
||||
|
||||
@@ -37,7 +37,6 @@ describe("EnvironmentPage", () => {
|
||||
id: mockUserId,
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -5,8 +5,6 @@ import {
|
||||
verifyUserPassword,
|
||||
} from "@/app/(app)/environments/[environmentId]/settings/(account)/profile/lib/user";
|
||||
import { EMAIL_VERIFICATION_DISABLED } from "@/lib/constants";
|
||||
import { deleteFile } from "@/lib/storage/service";
|
||||
import { getFileNameWithIdFromUrl } from "@/lib/storage/utils";
|
||||
import { getUser, updateUser } from "@/lib/user/service";
|
||||
import { authenticatedActionClient } from "@/lib/utils/action-client";
|
||||
import { AuthenticatedActionClientCtx } from "@/lib/utils/action-client/types/context";
|
||||
@@ -15,8 +13,6 @@ import { applyRateLimit } from "@/modules/core/rate-limit/helpers";
|
||||
import { rateLimitConfigs } from "@/modules/core/rate-limit/rate-limit-configs";
|
||||
import { withAuditLogging } from "@/modules/ee/audit-logs/lib/handler";
|
||||
import { sendForgotPasswordEmail, sendVerificationNewEmail } from "@/modules/email";
|
||||
import { z } from "zod";
|
||||
import { ZId } from "@formbricks/types/common";
|
||||
import { AuthenticationError, AuthorizationError, OperationNotAllowedError } from "@formbricks/types/errors";
|
||||
import {
|
||||
TUserPersonalInfoUpdateInput,
|
||||
@@ -97,58 +93,6 @@ export const updateUserAction = authenticatedActionClient.schema(ZUserPersonalIn
|
||||
)
|
||||
);
|
||||
|
||||
const ZUpdateAvatarAction = z.object({
|
||||
avatarUrl: z.string(),
|
||||
});
|
||||
|
||||
export const updateAvatarAction = authenticatedActionClient.schema(ZUpdateAvatarAction).action(
|
||||
withAuditLogging(
|
||||
"updated",
|
||||
"user",
|
||||
async ({ ctx, parsedInput }: { ctx: AuthenticatedActionClientCtx; parsedInput: Record<string, any> }) => {
|
||||
const oldObject = await getUser(ctx.user.id);
|
||||
const result = await updateUser(ctx.user.id, { imageUrl: parsedInput.avatarUrl });
|
||||
ctx.auditLoggingCtx.userId = ctx.user.id;
|
||||
ctx.auditLoggingCtx.oldObject = oldObject;
|
||||
ctx.auditLoggingCtx.newObject = result;
|
||||
return result;
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const ZRemoveAvatarAction = z.object({
|
||||
environmentId: ZId,
|
||||
});
|
||||
|
||||
export const removeAvatarAction = authenticatedActionClient.schema(ZRemoveAvatarAction).action(
|
||||
withAuditLogging(
|
||||
"updated",
|
||||
"user",
|
||||
async ({ ctx, parsedInput }: { ctx: AuthenticatedActionClientCtx; parsedInput: Record<string, any> }) => {
|
||||
const oldObject = await getUser(ctx.user.id);
|
||||
const imageUrl = ctx.user.imageUrl;
|
||||
if (!imageUrl) {
|
||||
throw new Error("Image not found");
|
||||
}
|
||||
|
||||
const fileName = getFileNameWithIdFromUrl(imageUrl);
|
||||
if (!fileName) {
|
||||
throw new Error("Invalid filename");
|
||||
}
|
||||
|
||||
const deletionResult = await deleteFile(parsedInput.environmentId, "public", fileName);
|
||||
if (!deletionResult.success) {
|
||||
throw new Error("Deletion failed");
|
||||
}
|
||||
const result = await updateUser(ctx.user.id, { imageUrl: null });
|
||||
ctx.auditLoggingCtx.userId = ctx.user.id;
|
||||
ctx.auditLoggingCtx.oldObject = oldObject;
|
||||
ctx.auditLoggingCtx.newObject = result;
|
||||
return result;
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
export const resetPasswordAction = authenticatedActionClient.action(
|
||||
withAuditLogging(
|
||||
"passwordReset",
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
import * as profileActions from "@/app/(app)/environments/[environmentId]/settings/(account)/profile/actions";
|
||||
import * as fileUploadHooks from "@/app/lib/fileUpload";
|
||||
import { cleanup, render, screen, waitFor } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { Session } from "next-auth";
|
||||
import toast from "react-hot-toast";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { EditProfileAvatarForm } from "./EditProfileAvatarForm";
|
||||
|
||||
vi.mock("@/modules/ui/components/avatars", () => ({
|
||||
ProfileAvatar: ({ imageUrl }) => <div data-testid="profile-avatar">{imageUrl || "No Avatar"}</div>,
|
||||
}));
|
||||
|
||||
vi.mock("next/navigation", () => ({
|
||||
useRouter: () => ({
|
||||
refresh: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("@/app/(app)/environments/[environmentId]/settings/(account)/profile/actions", () => ({
|
||||
updateAvatarAction: vi.fn(),
|
||||
removeAvatarAction: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/app/lib/fileUpload", () => ({
|
||||
handleFileUpload: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockSession: Session = {
|
||||
user: { id: "user-id" },
|
||||
expires: "session-expires-at",
|
||||
};
|
||||
const environmentId = "test-env-id";
|
||||
|
||||
describe("EditProfileAvatarForm", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(profileActions.updateAvatarAction).mockResolvedValue({});
|
||||
vi.mocked(profileActions.removeAvatarAction).mockResolvedValue({});
|
||||
vi.mocked(fileUploadHooks.handleFileUpload).mockResolvedValue({
|
||||
url: "new-avatar.jpg",
|
||||
error: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
test("renders correctly without an existing image", () => {
|
||||
render(<EditProfileAvatarForm session={mockSession} environmentId={environmentId} imageUrl={null} />);
|
||||
expect(screen.getByTestId("profile-avatar")).toHaveTextContent("No Avatar");
|
||||
expect(screen.getByText("environments.settings.profile.upload_image")).toBeInTheDocument();
|
||||
expect(screen.queryByText("environments.settings.profile.remove_image")).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders correctly with an existing image", () => {
|
||||
render(
|
||||
<EditProfileAvatarForm
|
||||
session={mockSession}
|
||||
environmentId={environmentId}
|
||||
imageUrl="existing-avatar.jpg"
|
||||
/>
|
||||
);
|
||||
expect(screen.getByTestId("profile-avatar")).toHaveTextContent("existing-avatar.jpg");
|
||||
expect(screen.getByText("environments.settings.profile.change_image")).toBeInTheDocument();
|
||||
expect(screen.getByText("environments.settings.profile.remove_image")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("handles image removal successfully", async () => {
|
||||
render(
|
||||
<EditProfileAvatarForm
|
||||
session={mockSession}
|
||||
environmentId={environmentId}
|
||||
imageUrl="existing-avatar.jpg"
|
||||
/>
|
||||
);
|
||||
const removeButton = screen.getByText("environments.settings.profile.remove_image");
|
||||
await userEvent.click(removeButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(profileActions.removeAvatarAction).toHaveBeenCalledWith({ environmentId });
|
||||
});
|
||||
});
|
||||
|
||||
test("shows error if removeAvatarAction fails", async () => {
|
||||
vi.mocked(profileActions.removeAvatarAction).mockRejectedValue(new Error("API error"));
|
||||
render(
|
||||
<EditProfileAvatarForm
|
||||
session={mockSession}
|
||||
environmentId={environmentId}
|
||||
imageUrl="existing-avatar.jpg"
|
||||
/>
|
||||
);
|
||||
const removeButton = screen.getByText("environments.settings.profile.remove_image");
|
||||
await userEvent.click(removeButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(vi.mocked(toast.error)).toHaveBeenCalledWith(
|
||||
"environments.settings.profile.avatar_update_failed"
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,178 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
removeAvatarAction,
|
||||
updateAvatarAction,
|
||||
} from "@/app/(app)/environments/[environmentId]/settings/(account)/profile/actions";
|
||||
import { handleFileUpload } from "@/app/lib/fileUpload";
|
||||
import { ProfileAvatar } from "@/modules/ui/components/avatars";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { FormError, FormField, FormItem, FormProvider } from "@/modules/ui/components/form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import { Session } from "next-auth";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useRef, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import toast from "react-hot-toast";
|
||||
import { z } from "zod";
|
||||
|
||||
interface EditProfileAvatarFormProps {
|
||||
session: Session;
|
||||
environmentId: string;
|
||||
imageUrl: string | null;
|
||||
}
|
||||
|
||||
export const EditProfileAvatarForm = ({ session, environmentId, imageUrl }: EditProfileAvatarFormProps) => {
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const router = useRouter();
|
||||
const { t } = useTranslate();
|
||||
const fileSchema =
|
||||
typeof window !== "undefined"
|
||||
? z
|
||||
.instanceof(FileList)
|
||||
.refine((files) => files.length === 1, t("environments.settings.profile.you_must_select_a_file"))
|
||||
.refine((files) => {
|
||||
const file = files[0];
|
||||
const allowedTypes = ["image/jpeg", "image/png", "image/webp"];
|
||||
return allowedTypes.includes(file.type);
|
||||
}, t("environments.settings.profile.invalid_file_type"))
|
||||
.refine((files) => {
|
||||
const file = files[0];
|
||||
const maxSize = 10 * 1024 * 1024;
|
||||
return file.size <= maxSize;
|
||||
}, t("environments.settings.profile.file_size_must_be_less_than_10mb"))
|
||||
: z.any();
|
||||
|
||||
const formSchema = z.object({
|
||||
file: fileSchema,
|
||||
});
|
||||
|
||||
type FormValues = z.infer<typeof formSchema>;
|
||||
|
||||
const form = useForm<FormValues>({
|
||||
mode: "onChange",
|
||||
resolver: zodResolver(formSchema),
|
||||
});
|
||||
|
||||
const handleUpload = async (file: File, environmentId: string) => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
if (imageUrl) {
|
||||
// If avatar image already exists, then remove it before update action
|
||||
await removeAvatarAction({ environmentId });
|
||||
}
|
||||
const { url, error } = await handleFileUpload(file, environmentId);
|
||||
|
||||
if (error) {
|
||||
toast.error(error);
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
await updateAvatarAction({ avatarUrl: url });
|
||||
router.refresh();
|
||||
} catch (err) {
|
||||
toast.error(t("environments.settings.profile.avatar_update_failed"));
|
||||
setIsLoading(false);
|
||||
}
|
||||
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const handleRemove = async () => {
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
await removeAvatarAction({ environmentId });
|
||||
} catch (err) {
|
||||
toast.error(t("environments.settings.profile.avatar_update_failed"));
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
form.reset();
|
||||
}
|
||||
};
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
const file = data.file[0];
|
||||
if (file) {
|
||||
await handleUpload(file, environmentId);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="relative h-10 w-10 overflow-hidden rounded-full">
|
||||
{isLoading && (
|
||||
<div className="absolute inset-0 flex items-center justify-center bg-black bg-opacity-30">
|
||||
<svg className="h-7 w-7 animate-spin text-slate-200" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path
|
||||
className="opacity-75"
|
||||
fill="currentColor"
|
||||
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ProfileAvatar userId={session.user.id} imageUrl={imageUrl} />
|
||||
</div>
|
||||
|
||||
<FormProvider {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit)} className="mt-4">
|
||||
<FormField
|
||||
name="file"
|
||||
control={form.control}
|
||||
render={({ field, fieldState }) => (
|
||||
<FormItem>
|
||||
<div className="flex">
|
||||
<Button
|
||||
type="button"
|
||||
size="sm"
|
||||
className="mr-2"
|
||||
variant={!!fieldState.error?.message ? "destructive" : "secondary"}
|
||||
onClick={() => {
|
||||
inputRef.current?.click();
|
||||
}}>
|
||||
{imageUrl
|
||||
? t("environments.settings.profile.change_image")
|
||||
: t("environments.settings.profile.upload_image")}
|
||||
<input
|
||||
type="file"
|
||||
id="hiddenFileInput"
|
||||
ref={(e) => {
|
||||
field.ref(e);
|
||||
inputRef.current = e;
|
||||
}}
|
||||
className="hidden"
|
||||
accept="image/jpeg, image/png, image/webp"
|
||||
onChange={(e) => {
|
||||
field.onChange(e.target.files);
|
||||
form.handleSubmit(onSubmit)();
|
||||
}}
|
||||
/>
|
||||
</Button>
|
||||
|
||||
{imageUrl && (
|
||||
<Button
|
||||
type="button"
|
||||
className="mr-2"
|
||||
variant="destructive"
|
||||
size="sm"
|
||||
onClick={handleRemove}>
|
||||
{t("environments.settings.profile.remove_image")}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<FormError />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</form>
|
||||
</FormProvider>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -49,15 +49,12 @@ describe("Loading", () => {
|
||||
);
|
||||
|
||||
const loadingCards = screen.getAllByTestId("loading-card");
|
||||
expect(loadingCards).toHaveLength(3);
|
||||
expect(loadingCards).toHaveLength(2);
|
||||
|
||||
expect(loadingCards[0]).toHaveTextContent("environments.settings.profile.personal_information");
|
||||
expect(loadingCards[0]).toHaveTextContent("environments.settings.profile.update_personal_info");
|
||||
|
||||
expect(loadingCards[1]).toHaveTextContent("common.avatar");
|
||||
expect(loadingCards[1]).toHaveTextContent("environments.settings.profile.organization_identification");
|
||||
|
||||
expect(loadingCards[2]).toHaveTextContent("environments.settings.profile.delete_account");
|
||||
expect(loadingCards[2]).toHaveTextContent("environments.settings.profile.confirm_delete_account");
|
||||
expect(loadingCards[1]).toHaveTextContent("environments.settings.profile.delete_account");
|
||||
expect(loadingCards[1]).toHaveTextContent("environments.settings.profile.confirm_delete_account");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -19,11 +19,6 @@ const Loading = () => {
|
||||
{ classes: "h-6 w-64" },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: t("common.avatar"),
|
||||
description: t("environments.settings.profile.organization_identification"),
|
||||
skeletonLines: [{ classes: "h-10 w-10" }, { classes: "h-8 w-24" }],
|
||||
},
|
||||
{
|
||||
title: t("environments.settings.profile.delete_account"),
|
||||
description: t("environments.settings.profile.confirm_delete_account"),
|
||||
|
||||
@@ -55,11 +55,6 @@ vi.mock(
|
||||
vi.mock("./components/DeleteAccount", () => ({
|
||||
DeleteAccount: ({ user }) => <div data-testid="delete-account">DeleteAccount: {user.id}</div>,
|
||||
}));
|
||||
vi.mock("./components/EditProfileAvatarForm", () => ({
|
||||
EditProfileAvatarForm: ({ _, environmentId }) => (
|
||||
<div data-testid="edit-profile-avatar-form">EditProfileAvatarForm: {environmentId}</div>
|
||||
),
|
||||
}));
|
||||
vi.mock("./components/EditProfileDetailsForm", () => ({
|
||||
EditProfileDetailsForm: ({ user }) => (
|
||||
<div data-testid="edit-profile-details-form">EditProfileDetailsForm: {user.id}</div>
|
||||
@@ -73,7 +68,6 @@ const mockUser = {
|
||||
id: "user-123",
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
imageUrl: "http://example.com/avatar.png",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
notificationSettings: { alert: {}, unsubscribedOrganizationIds: [] },
|
||||
@@ -117,7 +111,6 @@ describe("ProfilePage", () => {
|
||||
"AccountSettingsNavbar: env-123 profile"
|
||||
);
|
||||
expect(screen.getByTestId("edit-profile-details-form")).toBeInTheDocument();
|
||||
expect(screen.getByTestId("edit-profile-avatar-form")).toBeInTheDocument();
|
||||
expect(screen.getByTestId("account-security")).toBeInTheDocument(); // Shown because 2FA license is enabled
|
||||
expect(screen.queryByTestId("upgrade-prompt")).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId("delete-account")).toBeInTheDocument();
|
||||
|
||||
@@ -12,7 +12,6 @@ import { UpgradePrompt } from "@/modules/ui/components/upgrade-prompt";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { SettingsCard } from "../../components/SettingsCard";
|
||||
import { DeleteAccount } from "./components/DeleteAccount";
|
||||
import { EditProfileAvatarForm } from "./components/EditProfileAvatarForm";
|
||||
import { EditProfileDetailsForm } from "./components/EditProfileDetailsForm";
|
||||
|
||||
const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
|
||||
@@ -50,17 +49,6 @@ const Page = async (props: { params: Promise<{ environmentId: string }> }) => {
|
||||
isPasswordResetEnabled={isPasswordResetEnabled}
|
||||
/>
|
||||
</SettingsCard>
|
||||
<SettingsCard
|
||||
title={t("common.avatar")}
|
||||
description={t("environments.settings.profile.organization_identification")}>
|
||||
{user && (
|
||||
<EditProfileAvatarForm
|
||||
session={session}
|
||||
environmentId={environmentId}
|
||||
imageUrl={user.imageUrl}
|
||||
/>
|
||||
)}
|
||||
</SettingsCard>
|
||||
{user.identityProvider === "email" && (
|
||||
<SettingsCard
|
||||
title={t("common.security")}
|
||||
|
||||
@@ -126,7 +126,6 @@ const mockUser = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
notificationSettings: { alert: {} },
|
||||
|
||||
@@ -128,7 +128,6 @@ const mockUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -145,7 +145,6 @@ const mockUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -291,7 +291,6 @@ const mockUser: TUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "https://example.com/avatar.jpg",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -250,7 +250,6 @@ const mockUser: TUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "https://example.com/avatar.jpg",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -29,7 +29,7 @@ import {
|
||||
SquareStack,
|
||||
UserIcon,
|
||||
} from "lucide-react";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { TSegment } from "@formbricks/types/segment";
|
||||
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
@@ -77,6 +77,7 @@ export const ShareSurveyModal = ({
|
||||
description: string;
|
||||
componentType: React.ComponentType<unknown>;
|
||||
componentProps: unknown;
|
||||
disabled?: boolean;
|
||||
}[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
@@ -111,6 +112,7 @@ export const ShareSurveyModal = ({
|
||||
isContactsEnabled,
|
||||
isFormbricksCloud,
|
||||
},
|
||||
disabled: survey.singleUse?.enabled,
|
||||
},
|
||||
{
|
||||
id: ShareViaType.WEBSITE_EMBED,
|
||||
@@ -121,6 +123,7 @@ export const ShareSurveyModal = ({
|
||||
description: t("environments.surveys.share.embed_on_website.description"),
|
||||
componentType: WebsiteEmbedTab,
|
||||
componentProps: { surveyUrl },
|
||||
disabled: survey.singleUse?.enabled,
|
||||
},
|
||||
{
|
||||
id: ShareViaType.EMAIL,
|
||||
@@ -131,6 +134,7 @@ export const ShareSurveyModal = ({
|
||||
description: t("environments.surveys.share.send_email.description"),
|
||||
componentType: EmailTab,
|
||||
componentProps: { surveyId: survey.id, email },
|
||||
disabled: survey.singleUse?.enabled,
|
||||
},
|
||||
{
|
||||
id: ShareViaType.SOCIAL_MEDIA,
|
||||
@@ -141,6 +145,7 @@ export const ShareSurveyModal = ({
|
||||
description: t("environments.surveys.share.social_media.description"),
|
||||
componentType: SocialMediaTab,
|
||||
componentProps: { surveyUrl, surveyTitle: survey.name },
|
||||
disabled: survey.singleUse?.enabled,
|
||||
},
|
||||
{
|
||||
id: ShareViaType.QR_CODE,
|
||||
@@ -151,6 +156,7 @@ export const ShareSurveyModal = ({
|
||||
description: t("environments.surveys.summary.qr_code_description"),
|
||||
componentType: QRCodeTab,
|
||||
componentProps: { surveyUrl },
|
||||
disabled: survey.singleUse?.enabled,
|
||||
},
|
||||
{
|
||||
id: ShareViaType.DYNAMIC_POPUP,
|
||||
@@ -177,9 +183,9 @@ export const ShareSurveyModal = ({
|
||||
t,
|
||||
survey,
|
||||
publicDomain,
|
||||
setSurveyUrl,
|
||||
user.locale,
|
||||
surveyUrl,
|
||||
isReadOnly,
|
||||
environmentId,
|
||||
segments,
|
||||
isContactsEnabled,
|
||||
@@ -188,9 +194,15 @@ export const ShareSurveyModal = ({
|
||||
]
|
||||
);
|
||||
|
||||
const [activeId, setActiveId] = useState<ShareViaType | ShareSettingsType>(
|
||||
survey.type === "link" ? ShareViaType.ANON_LINKS : ShareViaType.APP
|
||||
);
|
||||
const getDefaultActiveId = useCallback(() => {
|
||||
if (survey.type !== "link") {
|
||||
return ShareViaType.APP;
|
||||
}
|
||||
|
||||
return ShareViaType.ANON_LINKS;
|
||||
}, [survey.type]);
|
||||
|
||||
const [activeId, setActiveId] = useState<ShareViaType | ShareSettingsType>(getDefaultActiveId());
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
@@ -198,11 +210,19 @@ export const ShareSurveyModal = ({
|
||||
}
|
||||
}, [open, modalView]);
|
||||
|
||||
// Ensure active tab is not disabled - if it is, switch to default
|
||||
useEffect(() => {
|
||||
const activeTab = linkTabs.find((tab) => tab.id === activeId);
|
||||
if (activeTab?.disabled) {
|
||||
setActiveId(getDefaultActiveId());
|
||||
}
|
||||
}, [activeId, linkTabs, getDefaultActiveId]);
|
||||
|
||||
const handleOpenChange = (open: boolean) => {
|
||||
setOpen(open);
|
||||
if (!open) {
|
||||
setShowView("start");
|
||||
setActiveId(ShareViaType.ANON_LINKS);
|
||||
setActiveId(getDefaultActiveId());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ interface ShareViewProps {
|
||||
componentProps: any;
|
||||
title: string;
|
||||
description?: string;
|
||||
disabled?: boolean;
|
||||
}>;
|
||||
activeId: ShareViaType | ShareSettingsType;
|
||||
setActiveId: React.Dispatch<React.SetStateAction<ShareViaType | ShareSettingsType>>;
|
||||
@@ -109,12 +110,13 @@ export const ShareView = ({ tabs, activeId, setActiveId }: ShareViewProps) => {
|
||||
onClick={() => setActiveId(tab.id)}
|
||||
className={cn(
|
||||
"flex w-full justify-start rounded-md p-2 text-slate-600 hover:bg-slate-100 hover:text-slate-900",
|
||||
tab.id === activeId
|
||||
tab.id === activeId && !tab.disabled
|
||||
? "bg-slate-100 font-medium text-slate-900"
|
||||
: "text-slate-700"
|
||||
)}
|
||||
tooltip={tab.label}
|
||||
isActive={tab.id === activeId}>
|
||||
isActive={tab.id === activeId}
|
||||
disabled={tab.disabled}>
|
||||
<tab.icon className="h-4 w-4 text-slate-700" />
|
||||
<span>{tab.label}</span>
|
||||
</SidebarMenuButton>
|
||||
@@ -136,9 +138,10 @@ export const ShareView = ({ tabs, activeId, setActiveId }: ShareViewProps) => {
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => setActiveId(tab.id)}
|
||||
disabled={tab.disabled}
|
||||
className={cn(
|
||||
"rounded-md px-4 py-2",
|
||||
tab.id === activeId
|
||||
tab.id === activeId && !tab.disabled
|
||||
? "bg-white text-slate-900 shadow-sm hover:bg-white"
|
||||
: "border-transparent text-slate-700 hover:text-slate-900"
|
||||
)}>
|
||||
|
||||
@@ -158,7 +158,6 @@ const mockUser = {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
@@ -174,7 +173,6 @@ const mockSession = {
|
||||
id: mockUserId,
|
||||
name: mockUser.name,
|
||||
email: mockUser.email,
|
||||
image: mockUser.imageUrl,
|
||||
role: mockUser.role,
|
||||
plan: "free",
|
||||
status: "active",
|
||||
|
||||
@@ -118,7 +118,6 @@ describe("Page", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
@@ -161,7 +160,6 @@ describe("Page", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
@@ -250,7 +248,6 @@ describe("Page", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
@@ -339,7 +336,6 @@ describe("Page", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -122,7 +122,6 @@ export const mockUser: TUser = {
|
||||
name: "mock User",
|
||||
email: "test@unit.com",
|
||||
emailVerified: currentDate,
|
||||
imageUrl: "https://www.google.com",
|
||||
createdAt: currentDate,
|
||||
updatedAt: currentDate,
|
||||
twoFactorEnabled: false,
|
||||
|
||||
@@ -3,7 +3,7 @@ import { IdentityProvider, Objective, Prisma, Role } from "@prisma/client";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { PrismaErrorType } from "@formbricks/database/types/error";
|
||||
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { TOrganization } from "@formbricks/types/organizations";
|
||||
import { TUserLocale, TUserUpdateInput } from "@formbricks/types/user";
|
||||
import { deleteUser, getUser, getUserByEmail, getUsersWithOrganization, updateUser } from "./service";
|
||||
@@ -20,10 +20,6 @@ vi.mock("@formbricks/database", () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/fileValidation", () => ({
|
||||
isValidImageFile: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/organization/service", () => ({
|
||||
getOrganizationsWhereUserIsSingleOwner: vi.fn(),
|
||||
deleteOrganization: vi.fn(),
|
||||
@@ -39,7 +35,6 @@ describe("User Service", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: new Date(),
|
||||
imageUrl: null,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
role: Role.project_manager,
|
||||
@@ -200,13 +195,6 @@ describe("User Service", () => {
|
||||
|
||||
await expect(updateUser("nonexistent", { name: "New Name" })).rejects.toThrow(ResourceNotFoundError);
|
||||
});
|
||||
|
||||
test("should throw InvalidInputError when invalid image URL is provided", async () => {
|
||||
const { isValidImageFile } = await import("@/lib/fileValidation");
|
||||
vi.mocked(isValidImageFile).mockReturnValue(false);
|
||||
|
||||
await expect(updateUser("user1", { imageUrl: "invalid-image-url" })).rejects.toThrow(InvalidInputError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteUser", () => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import "server-only";
|
||||
import { isValidImageFile } from "@/lib/fileValidation";
|
||||
import { deleteOrganization, getOrganizationsWhereUserIsSingleOwner } from "@/lib/organization/service";
|
||||
import { deleteBrevoCustomerByEmail } from "@/modules/auth/lib/brevo";
|
||||
import { Prisma } from "@prisma/client";
|
||||
@@ -8,7 +7,7 @@ import { z } from "zod";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { PrismaErrorType } from "@formbricks/database/types/error";
|
||||
import { ZId } from "@formbricks/types/common";
|
||||
import { DatabaseError, InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { DatabaseError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { TUser, TUserLocale, TUserUpdateInput, ZUserUpdateInput } from "@formbricks/types/user";
|
||||
import { validateInputs } from "../utils/validate";
|
||||
|
||||
@@ -17,7 +16,6 @@ const responseSelection = {
|
||||
name: true,
|
||||
email: true,
|
||||
emailVerified: true,
|
||||
imageUrl: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
role: true,
|
||||
@@ -79,7 +77,6 @@ export const getUserByEmail = reactCache(async (email: string): Promise<TUser |
|
||||
// function to update a user's user
|
||||
export const updateUser = async (personId: string, data: TUserUpdateInput): Promise<TUser> => {
|
||||
validateInputs([personId, ZId], [data, ZUserUpdateInput.partial()]);
|
||||
if (data.imageUrl && !isValidImageFile(data.imageUrl)) throw new InvalidInputError("Invalid image file");
|
||||
|
||||
try {
|
||||
const updatedUser = await prisma.user.update({
|
||||
|
||||
@@ -112,7 +112,6 @@ describe("withAuditLogging", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email" as const,
|
||||
createdAt: new Date(),
|
||||
@@ -151,7 +150,6 @@ describe("withAuditLogging", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email" as const,
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -148,7 +148,6 @@ describe("authOptions", () => {
|
||||
email: mockUser.email,
|
||||
password: mockHashedPassword,
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "http://example.com/avatar.png",
|
||||
twoFactorEnabled: false,
|
||||
};
|
||||
|
||||
@@ -161,7 +160,6 @@ describe("authOptions", () => {
|
||||
id: fakeUser.id,
|
||||
email: fakeUser.email,
|
||||
emailVerified: fakeUser.emailVerified,
|
||||
imageUrl: fakeUser.imageUrl,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -206,7 +206,6 @@ export const authOptions: NextAuthOptions = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified,
|
||||
imageUrl: user.imageUrl,
|
||||
};
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -5,7 +5,6 @@ export const mockUser: TUser = {
|
||||
name: "mock User",
|
||||
email: "john.doe@example.com",
|
||||
emailVerified: new Date("2024-01-01T00:00:00.000Z"),
|
||||
imageUrl: "https://www.google.com",
|
||||
createdAt: new Date("2024-01-01T00:00:00.000Z"),
|
||||
updatedAt: new Date("2024-01-01T00:00:00.000Z"),
|
||||
twoFactorEnabled: false,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { isValidImageFile } from "@/lib/fileValidation";
|
||||
import { validateInputs } from "@/lib/utils/validate";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { cache as reactCache } from "react";
|
||||
@@ -11,10 +10,6 @@ import { TUserCreateInput, TUserUpdateInput, ZUserEmail, ZUserUpdateInput } from
|
||||
export const updateUser = async (id: string, data: TUserUpdateInput) => {
|
||||
validateInputs([id, ZId], [data, ZUserUpdateInput.partial()]);
|
||||
|
||||
if (data.imageUrl && !isValidImageFile(data.imageUrl)) {
|
||||
throw new InvalidInputError("Invalid image file");
|
||||
}
|
||||
|
||||
try {
|
||||
const updatedUser = await prisma.user.update({
|
||||
where: {
|
||||
|
||||
@@ -94,7 +94,6 @@ const fullUser = {
|
||||
updatedAt: new Date(),
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
organizationId: "org1",
|
||||
|
||||
@@ -28,7 +28,6 @@ describe("ResponseTimeline", () => {
|
||||
name: "Test User",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
imageUrl: null,
|
||||
objective: null,
|
||||
role: "founder",
|
||||
email: "test@example.com",
|
||||
|
||||
@@ -51,7 +51,6 @@ export const getSSOProviders = () => [
|
||||
id: profile.sub,
|
||||
name: profile.name,
|
||||
email: profile.email,
|
||||
image: profile.picture,
|
||||
};
|
||||
},
|
||||
},
|
||||
@@ -76,7 +75,6 @@ export const getSSOProviders = () => [
|
||||
id: profile.id,
|
||||
email: profile.email,
|
||||
name: [profile.firstName, profile.lastName].filter(Boolean).join(" "),
|
||||
image: null,
|
||||
};
|
||||
},
|
||||
options: {
|
||||
|
||||
@@ -13,7 +13,6 @@ export const mockUser: TUser = {
|
||||
unsubscribedOrganizationIds: [],
|
||||
},
|
||||
emailVerified: new Date(),
|
||||
imageUrl: "https://example.com/image.png",
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "google",
|
||||
locale: "en-US",
|
||||
|
||||
@@ -56,7 +56,6 @@ const mockUser = {
|
||||
id: "user-123",
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email",
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -131,7 +131,6 @@ describe("CreateOrganizationPage", () => {
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: null,
|
||||
imageUrl: null,
|
||||
twoFactorEnabled: false,
|
||||
identityProvider: "email" as const,
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { isValidImageFile } from "@/lib/fileValidation";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { PrismaErrorType } from "@formbricks/database/types/error";
|
||||
import { InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
import { updateUser } from "./user";
|
||||
|
||||
@@ -24,7 +23,6 @@ describe("updateUser", () => {
|
||||
id: "user-123",
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
imageUrl: "https://example.com/image.png",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
role: "project_manager",
|
||||
@@ -41,7 +39,6 @@ describe("updateUser", () => {
|
||||
});
|
||||
|
||||
test("successfully updates a user", async () => {
|
||||
vi.mocked(isValidImageFile).mockReturnValue(true);
|
||||
vi.mocked(prisma.user.update).mockResolvedValue(mockUser as any);
|
||||
|
||||
const updateData = { name: "Updated Name" };
|
||||
@@ -55,7 +52,6 @@ describe("updateUser", () => {
|
||||
name: true,
|
||||
email: true,
|
||||
emailVerified: true,
|
||||
imageUrl: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
role: true,
|
||||
@@ -72,17 +68,7 @@ describe("updateUser", () => {
|
||||
expect(result).toEqual(mockUser);
|
||||
});
|
||||
|
||||
test("throws InvalidInputError when image file is invalid", async () => {
|
||||
vi.mocked(isValidImageFile).mockReturnValue(false);
|
||||
|
||||
const updateData = { imageUrl: "invalid-image.xyz" };
|
||||
await expect(updateUser("user-123", updateData)).rejects.toThrow(InvalidInputError);
|
||||
expect(prisma.user.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("throws ResourceNotFoundError when user does not exist", async () => {
|
||||
vi.mocked(isValidImageFile).mockReturnValue(true);
|
||||
|
||||
const prismaError = new Prisma.PrismaClientKnownRequestError("Record not found", {
|
||||
code: PrismaErrorType.RecordDoesNotExist,
|
||||
clientVersion: "1.0.0",
|
||||
@@ -96,8 +82,6 @@ describe("updateUser", () => {
|
||||
});
|
||||
|
||||
test("re-throws other errors", async () => {
|
||||
vi.mocked(isValidImageFile).mockReturnValue(true);
|
||||
|
||||
const otherError = new Error("Some other error");
|
||||
vi.mocked(prisma.user.update).mockRejectedValue(otherError);
|
||||
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { isValidImageFile } from "@/lib/fileValidation";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { PrismaErrorType } from "@formbricks/database/types/error";
|
||||
import { InvalidInputError, ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { ResourceNotFoundError } from "@formbricks/types/errors";
|
||||
import { TUser, TUserUpdateInput } from "@formbricks/types/user";
|
||||
|
||||
// function to update a user's user
|
||||
export const updateUser = async (personId: string, data: TUserUpdateInput): Promise<TUser> => {
|
||||
if (data.imageUrl && !isValidImageFile(data.imageUrl)) throw new InvalidInputError("Invalid image file");
|
||||
|
||||
try {
|
||||
const updatedUser = await prisma.user.update({
|
||||
where: {
|
||||
@@ -20,7 +17,6 @@ export const updateUser = async (personId: string, data: TUserUpdateInput): Prom
|
||||
name: true,
|
||||
email: true,
|
||||
emailVerified: true,
|
||||
imageUrl: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
role: true,
|
||||
|
||||
@@ -12,13 +12,6 @@ vi.mock("boring-avatars", () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock next/image
|
||||
vi.mock("next/image", () => ({
|
||||
default: ({ src, width, height, className, alt }: any) => (
|
||||
<img src={src} width={width} height={height} className={className} alt={alt} data-testid="next-image" />
|
||||
),
|
||||
}));
|
||||
|
||||
describe("Avatar Components", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
@@ -44,7 +37,7 @@ describe("Avatar Components", () => {
|
||||
});
|
||||
|
||||
describe("ProfileAvatar", () => {
|
||||
test("renders Boring Avatar when imageUrl is not provided", () => {
|
||||
test("renders Boring Avatar", () => {
|
||||
render(<ProfileAvatar userId="user-123" />);
|
||||
|
||||
const avatar = screen.getByTestId("boring-avatar-bauhaus");
|
||||
@@ -52,32 +45,5 @@ describe("Avatar Components", () => {
|
||||
expect(avatar).toHaveAttribute("data-size", "40");
|
||||
expect(avatar).toHaveAttribute("data-name", "user-123");
|
||||
});
|
||||
|
||||
test("renders Boring Avatar when imageUrl is null", () => {
|
||||
render(<ProfileAvatar userId="user-123" imageUrl={null} />);
|
||||
|
||||
const avatar = screen.getByTestId("boring-avatar-bauhaus");
|
||||
expect(avatar).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders Image component when imageUrl is provided", () => {
|
||||
render(<ProfileAvatar userId="user-123" imageUrl="https://example.com/avatar.jpg" />);
|
||||
|
||||
const image = screen.getByTestId("next-image");
|
||||
expect(image).toBeInTheDocument();
|
||||
expect(image).toHaveAttribute("src", "https://example.com/avatar.jpg");
|
||||
expect(image).toHaveAttribute("width", "40");
|
||||
expect(image).toHaveAttribute("height", "40");
|
||||
expect(image).toHaveAttribute("alt", "Avatar placeholder");
|
||||
expect(image).toHaveClass("h-10", "w-10", "rounded-full", "object-cover");
|
||||
});
|
||||
|
||||
test("renders Image component with different imageUrl", () => {
|
||||
render(<ProfileAvatar userId="user-123" imageUrl="https://example.com/different-avatar.png" />);
|
||||
|
||||
const image = screen.getByTestId("next-image");
|
||||
expect(image).toBeInTheDocument();
|
||||
expect(image).toHaveAttribute("src", "https://example.com/different-avatar.png");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import Avatar from "boring-avatars";
|
||||
import Image from "next/image";
|
||||
|
||||
const colors = ["#00C4B8", "#ccfbf1", "#334155"];
|
||||
|
||||
@@ -13,20 +12,8 @@ export const PersonAvatar: React.FC<PersonAvatarProps> = ({ personId }) => {
|
||||
|
||||
interface ProfileAvatar {
|
||||
userId: string;
|
||||
imageUrl?: string | null;
|
||||
}
|
||||
|
||||
export const ProfileAvatar: React.FC<ProfileAvatar> = ({ userId, imageUrl }) => {
|
||||
if (imageUrl) {
|
||||
return (
|
||||
<Image
|
||||
src={imageUrl}
|
||||
width="40"
|
||||
height="40"
|
||||
className="h-10 w-10 rounded-full object-cover"
|
||||
alt="Avatar placeholder"
|
||||
/>
|
||||
);
|
||||
}
|
||||
export const ProfileAvatar: React.FC<ProfileAvatar> = ({ userId }) => {
|
||||
return <Avatar size={40} name={userId} variant="bauhaus" colors={colors} />;
|
||||
};
|
||||
|
||||
@@ -82,7 +82,7 @@
|
||||
"@vercel/functions": "2.2.8",
|
||||
"@vercel/og": "0.8.5",
|
||||
"bcryptjs": "3.0.2",
|
||||
"boring-avatars": "1.11.2",
|
||||
"boring-avatars": "2.0.1",
|
||||
"cache-manager": "6.4.3",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
### Nix Flakes
|
||||
|
||||
This project uses Nix Flakes via direnv.
|
||||
|
||||
Ensure your `~/.config/nix/nix.conf` (or `/etc/nix/nix.conf`) contains:
|
||||
|
||||
```bash
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
If your environment does not support flakes, you can still enter the development shell with:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
@@ -16,28 +16,12 @@
|
||||
inherit system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
helm-with-plugins = (
|
||||
pkgs.wrapHelm pkgs.kubernetes-helm {
|
||||
plugins = with pkgs.kubernetes-helmPlugins; [
|
||||
helm-secrets
|
||||
helm-diff
|
||||
helm-s3
|
||||
helm-git
|
||||
];
|
||||
}
|
||||
);
|
||||
helmfile-with-plugins = pkgs.helmfile-wrapped.override {
|
||||
inherit (helm-with-plugins) pluginsDir;
|
||||
};
|
||||
in
|
||||
with pkgs;
|
||||
{
|
||||
devShells.default = mkShell {
|
||||
buildInputs = [
|
||||
awscli
|
||||
kubectl
|
||||
helm-with-plugins
|
||||
helmfile-with-plugins
|
||||
terraform
|
||||
];
|
||||
};
|
||||
|
||||
@@ -70,9 +70,6 @@ deployment:
|
||||
app-env:
|
||||
nameSuffix: app-env
|
||||
type: secret
|
||||
db-secrets:
|
||||
nameSuffix: db-secrets
|
||||
type: secret
|
||||
nodeSelector:
|
||||
karpenter.sh/capacity-type: spot
|
||||
reloadOnChange: true
|
||||
@@ -106,9 +103,6 @@ externalSecret:
|
||||
app-secrets:
|
||||
dataFrom:
|
||||
key: stage/formbricks/secrets
|
||||
db-secrets:
|
||||
dataFrom:
|
||||
key: stage/formbricks/terraform/rds/credentials
|
||||
refreshInterval: 1m
|
||||
secretStore:
|
||||
kind: ClusterSecretStore
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
resource "aws_sns_topic" "this" {
|
||||
name = "lambda-metrics-alarm"
|
||||
}
|
||||
|
||||
module "alarm" {
|
||||
source = "terraform-aws-modules/cloudwatch/aws//modules/metric-alarm"
|
||||
version = "~> 3.0"
|
||||
|
||||
alarm_name = "lambda-duration-lbda-rotate-db-secret"
|
||||
alarm_description = "Lambda duration is too high"
|
||||
comparison_operator = "GreaterThanOrEqualToThreshold"
|
||||
evaluation_periods = 1
|
||||
threshold = 10
|
||||
period = 60
|
||||
unit = "Milliseconds"
|
||||
|
||||
namespace = "AWS/Lambda"
|
||||
metric_name = "Duration"
|
||||
statistic = "Maximum"
|
||||
|
||||
dimensions = {
|
||||
FunctionName = module.lambda_rotate_db_secret.lambda_function_name
|
||||
}
|
||||
|
||||
alarm_actions = [aws_sns_topic.this.arn]
|
||||
}
|
||||
|
||||
module "alarm_metric_query" {
|
||||
source = "terraform-aws-modules/cloudwatch/aws//modules/metric-alarm"
|
||||
version = "~> 3.0"
|
||||
|
||||
alarm_name = "mq-lambda-duration-lbda-rotate-db-secret"
|
||||
alarm_description = "Lambda error rate is too high"
|
||||
comparison_operator = "GreaterThanOrEqualToThreshold"
|
||||
evaluation_periods = 1
|
||||
threshold = 10
|
||||
|
||||
metric_query = [{
|
||||
id = "e1"
|
||||
|
||||
return_data = true
|
||||
expression = "m2/m1*100"
|
||||
label = "Error Rate"
|
||||
}, {
|
||||
id = "m1"
|
||||
|
||||
metric = [{
|
||||
namespace = "AWS/Lambda"
|
||||
metric_name = "Invocations"
|
||||
period = 60
|
||||
stat = "Sum"
|
||||
unit = "Count"
|
||||
|
||||
dimensions = {
|
||||
FunctionName = module.lambda_rotate_db_secret.lambda_function_name
|
||||
}
|
||||
}]
|
||||
}, {
|
||||
id = "m2"
|
||||
|
||||
metric = [{
|
||||
namespace = "AWS/Lambda"
|
||||
metric_name = "Errors"
|
||||
period = 60
|
||||
stat = "Sum"
|
||||
unit = "Count"
|
||||
|
||||
dimensions = {
|
||||
FunctionName = module.lambda_rotate_db_secret.lambda_function_name
|
||||
}
|
||||
}]
|
||||
}]
|
||||
|
||||
alarm_actions = [aws_sns_topic.this.arn]
|
||||
|
||||
tags = {
|
||||
Secure = "maybe"
|
||||
}
|
||||
}
|
||||
|
||||
module "alarm_anomaly" {
|
||||
source = "terraform-aws-modules/cloudwatch/aws//modules/metric-alarm"
|
||||
version = "~> 3.0"
|
||||
|
||||
alarm_name = "lambda-invocations-anomaly-lbda-rotate-db-secret"
|
||||
alarm_description = "Lambda invocations anomaly"
|
||||
comparison_operator = "LessThanLowerOrGreaterThanUpperThreshold"
|
||||
evaluation_periods = 1
|
||||
threshold_metric_id = "ad1"
|
||||
|
||||
metric_query = [{
|
||||
id = "ad1"
|
||||
|
||||
return_data = true
|
||||
expression = "ANOMALY_DETECTION_BAND(m1, 2)"
|
||||
label = "Invocations (expected)"
|
||||
return_data = "true"
|
||||
},
|
||||
{
|
||||
id = "m1"
|
||||
|
||||
metric = [{
|
||||
namespace = "AWS/Lambda"
|
||||
metric_name = "Invocations"
|
||||
period = 60
|
||||
stat = "Sum"
|
||||
unit = "Count"
|
||||
|
||||
dimensions = {
|
||||
FunctionName = module.lambda_rotate_db_secret.lambda_function_name
|
||||
}
|
||||
}]
|
||||
return_data = "true"
|
||||
}]
|
||||
|
||||
alarm_actions = [aws_sns_topic.this.arn]
|
||||
|
||||
tags = {
|
||||
Secure = "maybe"
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
data "aws_region" "selected" {}
|
||||
|
||||
data "aws_secretsmanager_secret" "rds_credentials" {
|
||||
arn = data.terraform_remote_state.main.outputs.rds_secret_staging_arn
|
||||
}
|
||||
|
||||
# Default KMS key for Secrets Manager
|
||||
data "aws_kms_key" "secretsmanager" {
|
||||
key_id = "alias/aws/secretsmanager"
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "main" {
|
||||
backend = "s3"
|
||||
|
||||
config = {
|
||||
bucket = "715841356175-terraform"
|
||||
key = "terraform.tfstate"
|
||||
region = "eu-central-1"
|
||||
}
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
resource "aws_lambda_layer_version" "psycopg2_layer" {
|
||||
layer_name = "psycopg2-layer"
|
||||
description = "Psycopg2 PostgreSQL driver for AWS Lambda"
|
||||
compatible_runtimes = ["python3.9"]
|
||||
filename = "./lambda/deps/psycopg2-layer.zip"
|
||||
}
|
||||
|
||||
module "lambda_rotate_db_secret" {
|
||||
source = "terraform-aws-modules/lambda/aws"
|
||||
version = "7.20.1"
|
||||
|
||||
function_name = "lbda-rotate-db-secret"
|
||||
description = "Rotate Aurora Serverless PostgreSQL DB secret"
|
||||
handler = "lambda_function.lambda_handler"
|
||||
source_path = "./lambda/src/lambda_function.py"
|
||||
create_package = true
|
||||
package_type = "Zip"
|
||||
runtime = "python3.9"
|
||||
timeout = 30
|
||||
memory_size = 128
|
||||
layers = [aws_lambda_layer_version.psycopg2_layer.arn]
|
||||
create_role = true
|
||||
role_name = "iamr-lbda-rotate-db-secret-role"
|
||||
policy_name = "iamp-lbda-rotate-db-secret-policy"
|
||||
attach_policy_json = true
|
||||
policy_json = jsonencode({
|
||||
Version = "2012-10-17"
|
||||
Statement = [
|
||||
{
|
||||
Action = [
|
||||
"kms:GenerateDataKey",
|
||||
"kms:Encrypt",
|
||||
"kms:DescribeKey",
|
||||
"kms:Decrypt"
|
||||
]
|
||||
Effect = "Allow"
|
||||
Resource = "*"
|
||||
Sid = "AllowKMS"
|
||||
},
|
||||
{
|
||||
Action = [
|
||||
"secretsmanager:UpdateSecretVersionStage",
|
||||
"secretsmanager:PutSecretValue",
|
||||
"secretsmanager:GetSecretValue",
|
||||
"secretsmanager:DescribeSecret"
|
||||
]
|
||||
Effect = "Allow"
|
||||
Resource = "*"
|
||||
Sid = "AllowSecretsManager"
|
||||
},
|
||||
{
|
||||
Action = "secretsmanager:GetRandomPassword"
|
||||
Effect = "Allow"
|
||||
Resource = "*"
|
||||
Sid = "AllowSecretsManagerRandomPassword"
|
||||
}
|
||||
]
|
||||
})
|
||||
tags = {
|
||||
Environment = "dev"
|
||||
Project = "aurora-serverless"
|
||||
Zone = "db-zone"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "AllowSecretsManager" {
|
||||
statement_id = "AllowSecretsManager"
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = module.lambda_rotate_db_secret.lambda_function_name
|
||||
principal = "secretsmanager.amazonaws.com"
|
||||
}
|
||||
@@ -1,589 +0,0 @@
|
||||
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: MIT-0
|
||||
# https://github.com/aws-samples/aws-secrets-manager-rotation-lambdas/blob/master/SecretsManagerRDSPostgreSQLRotationSingleUser/lambda_function.py
|
||||
# Updated this function library from pg, pgdb to psycopg2 to support python3.9
|
||||
|
||||
import re
|
||||
import boto3
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg2
|
||||
from psycopg2 import sql
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
"""Secrets Manager RDS PostgreSQL Handler
|
||||
|
||||
This handler uses the single-user rotation scheme to rotate an RDS PostgreSQL user credential. This rotation
|
||||
scheme logs into the database as the user and rotates the user's own password, immediately invalidating the
|
||||
user's previous password.
|
||||
|
||||
The Secret SecretString is expected to be a JSON string with the following format:
|
||||
{
|
||||
'engine': <required: must be set to 'postgres'>,
|
||||
'host': <required: instance host name>,
|
||||
'username': <required: username>,
|
||||
'password': <required: password>,
|
||||
'dbname': <optional: database name, default to 'postgres'>,
|
||||
'port': <optional: if not specified, default port 5432 will be used>
|
||||
}
|
||||
|
||||
Args:
|
||||
event (dict): Lambda dictionary of event parameters. These keys must include the following:
|
||||
- SecretId: The secret ARN or identifier
|
||||
- ClientRequestToken: The ClientRequestToken of the secret version
|
||||
- Step: The rotation step (one of createSecret, setSecret, testSecret, or finishSecret)
|
||||
|
||||
context (LambdaContext): The Lambda runtime information
|
||||
|
||||
Raises:
|
||||
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
|
||||
|
||||
ValueError: If the secret is not properly configured for rotation
|
||||
|
||||
KeyError: If the secret json does not contain the expected keys
|
||||
|
||||
"""
|
||||
arn = event["SecretId"]
|
||||
token = event["ClientRequestToken"]
|
||||
step = event["Step"]
|
||||
|
||||
# Setup the client
|
||||
service_client = boto3.client(
|
||||
"secretsmanager", endpoint_url=os.environ["SECRETS_MANAGER_ENDPOINT"]
|
||||
)
|
||||
|
||||
# Make sure the version is staged correctly
|
||||
metadata = service_client.describe_secret(SecretId=arn)
|
||||
if "RotationEnabled" in metadata and not metadata["RotationEnabled"]:
|
||||
logger.error("Secret %s is not enabled for rotation" % arn)
|
||||
raise ValueError("Secret %s is not enabled for rotation" % arn)
|
||||
versions = metadata["VersionIdsToStages"]
|
||||
if token not in versions:
|
||||
logger.error(
|
||||
"Secret version %s has no stage for rotation of secret %s." % (token, arn)
|
||||
)
|
||||
raise ValueError(
|
||||
"Secret version %s has no stage for rotation of secret %s." % (token, arn)
|
||||
)
|
||||
if "AWSCURRENT" in versions[token]:
|
||||
logger.info(
|
||||
"Secret version %s already set as AWSCURRENT for secret %s." % (token, arn)
|
||||
)
|
||||
return
|
||||
elif "AWSPENDING" not in versions[token]:
|
||||
logger.error(
|
||||
"Secret version %s not set as AWSPENDING for rotation of secret %s."
|
||||
% (token, arn)
|
||||
)
|
||||
raise ValueError(
|
||||
"Secret version %s not set as AWSPENDING for rotation of secret %s."
|
||||
% (token, arn)
|
||||
)
|
||||
|
||||
# Call the appropriate step
|
||||
if step == "createSecret":
|
||||
create_secret(service_client, arn, token)
|
||||
|
||||
elif step == "setSecret":
|
||||
set_secret(service_client, arn, token)
|
||||
|
||||
elif step == "testSecret":
|
||||
test_secret(service_client, arn, token)
|
||||
|
||||
elif step == "finishSecret":
|
||||
finish_secret(service_client, arn, token)
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
"lambda_handler: Invalid step parameter %s for secret %s" % (step, arn)
|
||||
)
|
||||
raise ValueError("Invalid step parameter %s for secret %s" % (step, arn))
|
||||
|
||||
|
||||
def create_secret(service_client, arn, token):
|
||||
"""Generate a new secret
|
||||
|
||||
This method first checks for the existence of a secret for the passed in token. If one does not exist, it will generate a
|
||||
new secret and put it with the passed in token.
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
arn (string): The secret ARN or other identifier
|
||||
|
||||
token (string): The ClientRequestToken associated with the secret version
|
||||
|
||||
Raises:
|
||||
ValueError: If the current secret is not valid JSON
|
||||
|
||||
KeyError: If the secret json does not contain the expected keys
|
||||
|
||||
"""
|
||||
# Make sure the current secret exists
|
||||
current_dict = get_secret_dict(service_client, arn, "AWSCURRENT")
|
||||
|
||||
# Now try to get the secret version, if that fails, put a new secret
|
||||
try:
|
||||
get_secret_dict(service_client, arn, "AWSPENDING", token)
|
||||
logger.info("createSecret: Successfully retrieved secret for %s." % arn)
|
||||
except service_client.exceptions.ResourceNotFoundException:
|
||||
# Generate a random password
|
||||
current_dict["password"] = get_random_password(service_client)
|
||||
# Put the secret
|
||||
service_client.put_secret_value(
|
||||
SecretId=arn,
|
||||
ClientRequestToken=token,
|
||||
SecretString=json.dumps(current_dict),
|
||||
VersionStages=["AWSPENDING"],
|
||||
)
|
||||
logger.info(
|
||||
"createSecret: Successfully put secret for ARN %s and version %s."
|
||||
% (arn, token)
|
||||
)
|
||||
|
||||
|
||||
def set_secret(service_client, arn, token):
|
||||
"""Set the pending secret in the database
|
||||
|
||||
This method tries to login to the database with the AWSPENDING secret and returns on success. If that fails, it
|
||||
tries to login with the AWSCURRENT and AWSPREVIOUS secrets. If either one succeeds, it sets the AWSPENDING password
|
||||
as the user password in the database. Else, it throws a ValueError.
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
arn (string): The secret ARN or other identifier
|
||||
|
||||
token (string): The ClientRequestToken associated with the secret version
|
||||
|
||||
Raises:
|
||||
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
|
||||
|
||||
ValueError: If the secret is not valid JSON or valid credentials are found to login to the database
|
||||
|
||||
KeyError: If the secret json does not contain the expected keys
|
||||
|
||||
"""
|
||||
try:
|
||||
previous_dict = get_secret_dict(service_client, arn, "AWSPREVIOUS")
|
||||
except (service_client.exceptions.ResourceNotFoundException, KeyError):
|
||||
previous_dict = None
|
||||
current_dict = get_secret_dict(service_client, arn, "AWSCURRENT")
|
||||
pending_dict = get_secret_dict(service_client, arn, "AWSPENDING", token)
|
||||
|
||||
# First try to login with the pending secret, if it succeeds, return
|
||||
conn = get_connection(pending_dict)
|
||||
if conn:
|
||||
conn.close()
|
||||
logger.info(
|
||||
"setSecret: AWSPENDING secret is already set as password in PostgreSQL DB for secret arn %s."
|
||||
% arn
|
||||
)
|
||||
return
|
||||
|
||||
# Make sure the user from current and pending match
|
||||
if current_dict["username"] != pending_dict["username"]:
|
||||
logger.error(
|
||||
"setSecret: Attempting to modify user %s other than current user %s"
|
||||
% (pending_dict["username"], current_dict["username"])
|
||||
)
|
||||
raise ValueError(
|
||||
"Attempting to modify user %s other than current user %s"
|
||||
% (pending_dict["username"], current_dict["username"])
|
||||
)
|
||||
|
||||
# Make sure the host from current and pending match
|
||||
if current_dict["host"] != pending_dict["host"]:
|
||||
logger.error(
|
||||
"setSecret: Attempting to modify user for host %s other than current host %s"
|
||||
% (pending_dict["host"], current_dict["host"])
|
||||
)
|
||||
raise ValueError(
|
||||
"Attempting to modify user for host %s other than current host %s"
|
||||
% (pending_dict["host"], current_dict["host"])
|
||||
)
|
||||
|
||||
# Now try the current password
|
||||
conn = get_connection(current_dict)
|
||||
|
||||
# If both current and pending do not work, try previous
|
||||
if not conn and previous_dict:
|
||||
# Update previous_dict to leverage current SSL settings
|
||||
previous_dict.pop("ssl", None)
|
||||
if "ssl" in current_dict:
|
||||
previous_dict["ssl"] = current_dict["ssl"]
|
||||
|
||||
conn = get_connection(previous_dict)
|
||||
|
||||
# Make sure the user/host from previous and pending match
|
||||
if previous_dict["username"] != pending_dict["username"]:
|
||||
logger.error(
|
||||
"setSecret: Attempting to modify user %s other than previous valid user %s"
|
||||
% (pending_dict["username"], previous_dict["username"])
|
||||
)
|
||||
raise ValueError(
|
||||
"Attempting to modify user %s other than previous valid user %s"
|
||||
% (pending_dict["username"], previous_dict["username"])
|
||||
)
|
||||
if previous_dict["host"] != pending_dict["host"]:
|
||||
logger.error(
|
||||
"setSecret: Attempting to modify user for host %s other than previous valid host %s"
|
||||
% (pending_dict["host"], previous_dict["host"])
|
||||
)
|
||||
raise ValueError(
|
||||
"Attempting to modify user for host %s other than current previous valid %s"
|
||||
% (pending_dict["host"], previous_dict["host"])
|
||||
)
|
||||
|
||||
# If we still don't have a connection, raise a ValueError
|
||||
if not conn:
|
||||
logger.error(
|
||||
"setSecret: Unable to log into database with previous, current, or pending secret of secret arn %s"
|
||||
% arn
|
||||
)
|
||||
raise ValueError(
|
||||
"Unable to log into database with previous, current, or pending secret of secret arn %s"
|
||||
% arn
|
||||
)
|
||||
|
||||
# Now set the password to the pending password
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# Get escaped username via quote_ident
|
||||
cur.execute("SELECT quote_ident(%s)", (pending_dict["username"],))
|
||||
escaped_username = cur.fetchone()[0]
|
||||
|
||||
alter_role = "ALTER USER %s" % escaped_username
|
||||
cur.execute(alter_role + " WITH PASSWORD %s", (pending_dict["password"],))
|
||||
conn.commit()
|
||||
logger.info(
|
||||
"setSecret: Successfully set password for user %s in PostgreSQL DB for secret arn %s."
|
||||
% (pending_dict["username"], arn)
|
||||
)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_secret(service_client, arn, token):
|
||||
"""Test the pending secret against the database
|
||||
|
||||
This method tries to log into the database with the secrets staged with AWSPENDING and runs
|
||||
a permissions check to ensure the user has the corrrect permissions.
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
arn (string): The secret ARN or other identifier
|
||||
|
||||
token (string): The ClientRequestToken associated with the secret version
|
||||
|
||||
Raises:
|
||||
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
|
||||
|
||||
ValueError: If the secret is not valid JSON or valid credentials are found to login to the database
|
||||
|
||||
KeyError: If the secret json does not contain the expected keys
|
||||
|
||||
"""
|
||||
# Try to login with the pending secret, if it succeeds, return
|
||||
conn = get_connection(get_secret_dict(service_client, arn, "AWSPENDING", token))
|
||||
if conn:
|
||||
# This is where the lambda will validate the user's permissions. Uncomment/modify the below lines to
|
||||
# tailor these validations to your needs
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT NOW()")
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
logger.info(
|
||||
"testSecret: Successfully signed into PostgreSQL DB with AWSPENDING secret in %s."
|
||||
% arn
|
||||
)
|
||||
return
|
||||
else:
|
||||
logger.error(
|
||||
"testSecret: Unable to log into database with pending secret of secret ARN %s"
|
||||
% arn
|
||||
)
|
||||
raise ValueError(
|
||||
"Unable to log into database with pending secret of secret ARN %s" % arn
|
||||
)
|
||||
|
||||
|
||||
def finish_secret(service_client, arn, token):
|
||||
"""Finish the rotation by marking the pending secret as current
|
||||
|
||||
This method finishes the secret rotation by staging the secret staged AWSPENDING with the AWSCURRENT stage.
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
arn (string): The secret ARN or other identifier
|
||||
|
||||
token (string): The ClientRequestToken associated with the secret version
|
||||
|
||||
"""
|
||||
# First describe the secret to get the current version
|
||||
metadata = service_client.describe_secret(SecretId=arn)
|
||||
current_version = None
|
||||
for version in metadata["VersionIdsToStages"]:
|
||||
if "AWSCURRENT" in metadata["VersionIdsToStages"][version]:
|
||||
if version == token:
|
||||
# The correct version is already marked as current, return
|
||||
logger.info(
|
||||
"finishSecret: Version %s already marked as AWSCURRENT for %s"
|
||||
% (version, arn)
|
||||
)
|
||||
return
|
||||
current_version = version
|
||||
break
|
||||
|
||||
# Finalize by staging the secret version current
|
||||
service_client.update_secret_version_stage(
|
||||
SecretId=arn,
|
||||
VersionStage="AWSCURRENT",
|
||||
MoveToVersionId=token,
|
||||
RemoveFromVersionId=current_version,
|
||||
)
|
||||
logger.info(
|
||||
"finishSecret: Successfully set AWSCURRENT stage to version %s for secret %s."
|
||||
% (token, arn)
|
||||
)
|
||||
|
||||
|
||||
def get_connection(secret_dict):
|
||||
"""Gets a connection to PostgreSQL DB from a secret dictionary
|
||||
|
||||
This helper function uses connectivity information from the secret dictionary to initiate
|
||||
connection attempt(s) to the database. Will attempt a fallback, non-SSL connection when
|
||||
initial connection fails using SSL and fall_back is True.
|
||||
|
||||
Args:
|
||||
secret_dict (dict): The Secret Dictionary
|
||||
|
||||
Returns:
|
||||
Connection: The psycopg2 connection object if successful. None otherwise
|
||||
|
||||
Raises:
|
||||
KeyError: If the secret json does not contain the expected keys
|
||||
|
||||
"""
|
||||
# Parse and validate the secret JSON string
|
||||
port = int(secret_dict.get("port", 5432))
|
||||
dbname = secret_dict.get("dbname", "postgres")
|
||||
|
||||
# Get SSL connectivity configuration
|
||||
use_ssl, fall_back = get_ssl_config(secret_dict)
|
||||
|
||||
# Attempt initial connection
|
||||
conn = connect_and_authenticate(secret_dict, port, dbname, use_ssl)
|
||||
if conn or not fall_back:
|
||||
return conn
|
||||
|
||||
# Attempt fallback connection without SSL
|
||||
return connect_and_authenticate(secret_dict, port, dbname, False)
|
||||
|
||||
|
||||
def get_ssl_config(secret_dict):
|
||||
"""Gets the desired SSL and fall back behavior using a secret dictionary
|
||||
|
||||
This helper function uses the existance and value the 'ssl' key in a secret dictionary
|
||||
to determine desired SSL connectivity configuration. Its behavior is as follows:
|
||||
- 'ssl' key DNE or invalid type/value: return True, True
|
||||
- 'ssl' key is bool: return secret_dict['ssl'], False
|
||||
- 'ssl' key equals "true" ignoring case: return True, False
|
||||
- 'ssl' key equals "false" ignoring case: return False, False
|
||||
|
||||
Args:
|
||||
secret_dict (dict): The Secret Dictionary
|
||||
|
||||
Returns:
|
||||
Tuple(use_ssl, fall_back): SSL configuration
|
||||
- use_ssl (bool): Flag indicating if an SSL connection should be attempted
|
||||
- fall_back (bool): Flag indicating if non-SSL connection should be attempted if SSL connection fails
|
||||
|
||||
"""
|
||||
# Default to True for SSL and fall_back mode if 'ssl' key DNE
|
||||
if "ssl" not in secret_dict:
|
||||
return True, True
|
||||
|
||||
# Handle type bool
|
||||
if isinstance(secret_dict["ssl"], bool):
|
||||
return secret_dict["ssl"], False
|
||||
|
||||
# Handle type string
|
||||
if isinstance(secret_dict["ssl"], str):
|
||||
ssl = secret_dict["ssl"].lower()
|
||||
if ssl == "true":
|
||||
return True, False
|
||||
elif ssl == "false":
|
||||
return False, False
|
||||
else:
|
||||
# Invalid string value, default to True for both SSL and fall_back mode
|
||||
return True, True
|
||||
|
||||
# Invalid type, default to True for both SSL and fall_back mode
|
||||
return True, True
|
||||
|
||||
|
||||
def connect_and_authenticate(secret_dict, port, dbname, use_ssl):
|
||||
"""Attempt to connect and authenticate to a PostgreSQL instance using psycopg2
|
||||
|
||||
Args:
|
||||
secret_dict (dict): The Secret Dictionary
|
||||
port (int): The database port to connect to
|
||||
dbname (str): Name of the database
|
||||
use_ssl (bool): Flag indicating whether connection should use SSL/TLS
|
||||
|
||||
Returns:
|
||||
Connection: The psycopg2 connection object if successful. None otherwise
|
||||
"""
|
||||
try:
|
||||
conn_params = {
|
||||
"host": secret_dict["host"],
|
||||
"user": secret_dict["username"],
|
||||
"password": secret_dict["password"],
|
||||
"dbname": dbname,
|
||||
"port": port,
|
||||
"connect_timeout": 5,
|
||||
}
|
||||
|
||||
if use_ssl:
|
||||
conn_params.update(
|
||||
{"sslmode": "verify-full", "sslrootcert": "/etc/pki/tls/cert.pem"}
|
||||
)
|
||||
else:
|
||||
conn_params["sslmode"] = "disable"
|
||||
|
||||
conn = psycopg2.connect(**conn_params)
|
||||
logging.info(
|
||||
"Successfully established %s connection as user '%s' with host: '%s'",
|
||||
"SSL/TLS" if use_ssl else "non SSL/TLS",
|
||||
secret_dict["username"],
|
||||
secret_dict["host"],
|
||||
)
|
||||
return conn
|
||||
except psycopg2.OperationalError as e:
|
||||
error_message = str(e)
|
||||
if "server does not support SSL, but SSL was required" in error_message:
|
||||
logging.error(
|
||||
"Unable to establish SSL/TLS handshake, SSL/TLS is not enabled on the host: %s",
|
||||
secret_dict["host"],
|
||||
)
|
||||
elif re.search(
|
||||
r'server common name ".+" does not match host name ".+"', error_message
|
||||
):
|
||||
logging.error(
|
||||
"Hostname verification failed when establishing SSL/TLS Handshake with host: %s",
|
||||
secret_dict["host"],
|
||||
)
|
||||
elif re.search(r'no pg_hba.conf entry for host ".+", SSL off', error_message):
|
||||
logging.error(
|
||||
"Unable to establish SSL/TLS handshake, SSL/TLS is enforced on the host: %s",
|
||||
secret_dict["host"],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def get_secret_dict(service_client, arn, stage, token=None):
|
||||
"""Gets the secret dictionary corresponding for the secret arn, stage, and token
|
||||
|
||||
This helper function gets credentials for the arn and stage passed in and returns the dictionary by parsing the JSON string
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
arn (string): The secret ARN or other identifier
|
||||
|
||||
token (string): The ClientRequestToken associated with the secret version, or None if no validation is desired
|
||||
|
||||
stage (string): The stage identifying the secret version
|
||||
|
||||
Returns:
|
||||
SecretDictionary: Secret dictionary
|
||||
|
||||
Raises:
|
||||
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
|
||||
|
||||
ValueError: If the secret is not valid JSON
|
||||
|
||||
"""
|
||||
required_fields = ["host", "username", "password"]
|
||||
|
||||
# Only do VersionId validation against the stage if a token is passed in
|
||||
if token:
|
||||
secret = service_client.get_secret_value(
|
||||
SecretId=arn, VersionId=token, VersionStage=stage
|
||||
)
|
||||
else:
|
||||
secret = service_client.get_secret_value(SecretId=arn, VersionStage=stage)
|
||||
plaintext = secret["SecretString"]
|
||||
secret_dict = json.loads(plaintext)
|
||||
|
||||
# Run validations against the secret
|
||||
supported_engines = ["postgres", "aurora-postgresql"]
|
||||
if "engine" not in secret_dict or secret_dict["engine"] not in supported_engines:
|
||||
raise KeyError(
|
||||
"Database engine must be set to 'postgres' in order to use this rotation lambda"
|
||||
)
|
||||
for field in required_fields:
|
||||
if field not in secret_dict:
|
||||
raise KeyError("%s key is missing from secret JSON" % field)
|
||||
|
||||
# Parse and return the secret JSON string
|
||||
return secret_dict
|
||||
|
||||
|
||||
def get_environment_bool(variable_name, default_value):
|
||||
"""Loads the environment variable and converts it to the boolean.
|
||||
|
||||
Args:
|
||||
variable_name (string): Name of environment variable
|
||||
|
||||
default_value (bool): The result will fallback to the default_value when the environment variable with the given name doesn't exist.
|
||||
|
||||
Returns:
|
||||
bool: True when the content of environment variable contains either 'true', '1', 'y' or 'yes'
|
||||
"""
|
||||
variable = os.environ.get(variable_name, str(default_value))
|
||||
return variable.lower() in ["true", "1", "y", "yes"]
|
||||
|
||||
|
||||
def get_random_password(service_client):
|
||||
"""Generates a random new password. Generator loads parameters that affects the content of the resulting password from the environment
|
||||
variables. When environment variable is missing sensible defaults are chosen.
|
||||
|
||||
Supported environment variables:
|
||||
- EXCLUDE_CHARACTERS
|
||||
- PASSWORD_LENGTH
|
||||
- EXCLUDE_NUMBERS
|
||||
- EXCLUDE_PUNCTUATION
|
||||
- EXCLUDE_UPPERCASE
|
||||
- EXCLUDE_LOWERCASE
|
||||
- REQUIRE_EACH_INCLUDED_TYPE
|
||||
|
||||
Args:
|
||||
service_client (client): The secrets manager service client
|
||||
|
||||
Returns:
|
||||
string: The randomly generated password.
|
||||
"""
|
||||
passwd = service_client.get_random_password(
|
||||
ExcludeCharacters=os.environ.get("EXCLUDE_CHARACTERS", ":/@\"'\\"),
|
||||
PasswordLength=int(os.environ.get("PASSWORD_LENGTH", 32)),
|
||||
ExcludeNumbers=get_environment_bool("EXCLUDE_NUMBERS", False),
|
||||
ExcludePunctuation=get_environment_bool("EXCLUDE_PUNCTUATION", True),
|
||||
ExcludeUppercase=get_environment_bool("EXCLUDE_UPPERCASE", False),
|
||||
ExcludeLowercase=get_environment_bool("EXCLUDE_LOWERCASE", False),
|
||||
RequireEachIncludedType=get_environment_bool(
|
||||
"REQUIRE_EACH_INCLUDED_TYPE", True
|
||||
),
|
||||
)
|
||||
return passwd["RandomPassword"]
|
||||
@@ -1,173 +0,0 @@
|
||||
locals {
|
||||
env_roles = {
|
||||
staging = { dev_users = "ro", ops_users = "rw", sa_rw_users = "rw", sa_ro_users = "ro", admin_users = "admin" }
|
||||
production = { dev_users = "ro", ops_users = "ro", sa_rw_users = "rw", sa_ro_users = "ro", admin_users = "admin" }
|
||||
}
|
||||
|
||||
# List of application user identities
|
||||
app_users = {
|
||||
dev_users = [
|
||||
"harsh",
|
||||
]
|
||||
ops_users = [
|
||||
"piotr",
|
||||
]
|
||||
admin_users = [
|
||||
"johannes",
|
||||
"matti",
|
||||
]
|
||||
sa_rw_users = [
|
||||
"formbricks-app",
|
||||
]
|
||||
}
|
||||
|
||||
# Flatten users across all teams, creating a map of username => role
|
||||
db_users = merge([
|
||||
for team, users in local.app_users : {
|
||||
for user in users : user => {
|
||||
role = local.env_roles[var.env_name][team]
|
||||
}
|
||||
}
|
||||
]...)
|
||||
|
||||
# FIXME: this shouldn't be hardcoded here
|
||||
rds_database_name = "formbricks-cloud"
|
||||
|
||||
role_prefix = replace(local.rds_database_name, "-", "_")
|
||||
|
||||
# Map of username => role
|
||||
sql_users_map = merge([
|
||||
for team, users in local.app_users : {
|
||||
for user in users : user => {
|
||||
role = "${local.role_prefix}_user_${local.env_roles[var.env_name][team]}"
|
||||
}
|
||||
}
|
||||
]...)
|
||||
|
||||
# SQL to create read-only role
|
||||
sql_create_read_only_role = {
|
||||
sql = <<EOF
|
||||
DO
|
||||
\$\$
|
||||
DECLARE
|
||||
schema_name TEXT;
|
||||
BEGIN
|
||||
-- Create the read-only role if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${local.role_prefix}_user_ro') THEN
|
||||
CREATE ROLE ${local.role_prefix}_user_ro;
|
||||
END IF;
|
||||
|
||||
-- Loop through all schemas in the database, excluding system schemas
|
||||
FOR schema_name IN
|
||||
SELECT schemata.schema_name
|
||||
FROM information_schema.schemata AS schemata
|
||||
WHERE schemata.catalog_name = '${local.rds_database_name}'
|
||||
AND schemata.schema_name NOT IN ('pg_catalog', 'information_schema')
|
||||
LOOP
|
||||
-- Grant USAGE on the schema
|
||||
EXECUTE format('GRANT USAGE ON SCHEMA %I TO ${local.role_prefix}_user_ro;', schema_name);
|
||||
|
||||
-- Grant SELECT on all tables in the schema
|
||||
EXECUTE format('GRANT SELECT ON ALL TABLES IN SCHEMA %I TO ${local.role_prefix}_user_ro;', schema_name);
|
||||
END LOOP;
|
||||
END
|
||||
\$\$;
|
||||
EOF
|
||||
}
|
||||
|
||||
# SQL to create read-write role
|
||||
sql_create_read_write_role = {
|
||||
sql = <<EOF
|
||||
DO
|
||||
\$\$
|
||||
DECLARE
|
||||
schema_name TEXT;
|
||||
BEGIN
|
||||
-- Create the read-write role if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${local.role_prefix}_user_rw') THEN
|
||||
CREATE ROLE ${local.role_prefix}_user_rw;
|
||||
END IF;
|
||||
|
||||
-- Loop through all schemas in the database, excluding system schemas
|
||||
FOR schema_name IN
|
||||
SELECT schemata.schema_name
|
||||
FROM information_schema.schemata AS schemata
|
||||
WHERE schemata.catalog_name = '${local.rds_database_name}'
|
||||
AND schemata.schema_name NOT IN ('pg_catalog', 'information_schema')
|
||||
LOOP
|
||||
-- Grant USAGE and CREATE on the schema
|
||||
EXECUTE format('GRANT USAGE, CREATE ON SCHEMA %I TO ${local.role_prefix}_user_rw;', schema_name);
|
||||
|
||||
-- Grant CRUD permissions on all existing tables
|
||||
EXECUTE format('GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA %I TO ${local.role_prefix}_user_rw;', schema_name);
|
||||
END LOOP;
|
||||
END
|
||||
\$\$;
|
||||
EOF
|
||||
}
|
||||
|
||||
# SQL to create admin role
|
||||
sql_create_admin_role = {
|
||||
sql = <<EOF
|
||||
DO
|
||||
\$\$
|
||||
DECLARE
|
||||
schema_name TEXT;
|
||||
BEGIN
|
||||
-- Create the admin role if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${local.role_prefix}_user_admin') THEN
|
||||
CREATE ROLE ${local.role_prefix}_user_admin;
|
||||
END IF;
|
||||
|
||||
-- Loop through all schemas in the database, excluding system schemas
|
||||
FOR schema_name IN
|
||||
SELECT schemata.schema_name
|
||||
FROM information_schema.schemata AS schemata
|
||||
WHERE schemata.catalog_name = '${local.rds_database_name}'
|
||||
AND schemata.schema_name NOT IN ('pg_catalog', 'information_schema')
|
||||
LOOP
|
||||
-- Grant USAGE and CREATE on the schema (allowing schema usage and object creation)
|
||||
EXECUTE format('GRANT USAGE, CREATE ON SCHEMA %I TO ${local.role_prefix}_user_admin;', schema_name);
|
||||
|
||||
-- Grant INSERT, UPDATE, DELETE on existing tables in the schema
|
||||
EXECUTE format('GRANT INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA %I TO ${local.role_prefix}_user_admin;', schema_name);
|
||||
|
||||
-- Grant full privileges on schema (implicitly includes ability to alter the schema)
|
||||
EXECUTE format('GRANT ALL PRIVILEGES ON SCHEMA %I TO ${local.role_prefix}_user_admin;', schema_name);
|
||||
|
||||
-- Grant the ability to drop tables (delete tables) by owning the tables
|
||||
EXECUTE format('GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA %I TO ${local.role_prefix}_user_admin;', schema_name);
|
||||
END LOOP;
|
||||
END
|
||||
\$\$;
|
||||
EOF
|
||||
}
|
||||
|
||||
# Generate SQL statements to create users and set passwords
|
||||
sql_create_user = {
|
||||
for user, user_info in local.sql_users_map : user => {
|
||||
sql = <<EOF
|
||||
DO
|
||||
\$\$
|
||||
BEGIN
|
||||
-- Create user if it does not exist
|
||||
IF NOT EXISTS (SELECT FROM pg_user WHERE usename = '${user}') THEN
|
||||
EXECUTE format('CREATE USER %I WITH PASSWORD %L;', '${user}', '${random_password.db_user_secrets[user].result}');
|
||||
ELSE
|
||||
-- Update password if the user already exists
|
||||
EXECUTE format('ALTER USER %I WITH PASSWORD %L;', '${user}', '${random_password.db_user_secrets[user].result}');
|
||||
END IF;
|
||||
|
||||
-- Ensure role exists
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${user_info.role}') THEN
|
||||
RAISE EXCEPTION 'Role ${user_info.role} does not exist';
|
||||
END IF;
|
||||
|
||||
-- Assign role to the user
|
||||
EXECUTE format('GRANT %I TO %I;', '${user_info.role}', '${user}');
|
||||
END
|
||||
\$\$;
|
||||
EOF
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
provider "aws" {
|
||||
region = "eu-central-1"
|
||||
}
|
||||
|
||||
terraform {
|
||||
backend "s3" {
|
||||
bucket = "715841356175-terraform"
|
||||
key = "formbricks/db_users/terraform.tfstate"
|
||||
region = "eu-central-1"
|
||||
dynamodb_table = "terraform-lock"
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
module "create_postgres_user_read_only_role" {
|
||||
|
||||
source = "digitickets/cli/aws"
|
||||
version = "7.0.0"
|
||||
|
||||
role_session_name = "CreatePostgresUserRoles"
|
||||
aws_cli_commands = [
|
||||
"rds-data", "execute-statement",
|
||||
format("--resource-arn=%s", data.terraform_remote_state.main.outputs.rds["stage"].cluster_arn),
|
||||
format("--secret-arn=%s", data.aws_secretsmanager_secret.rds_credentials.arn),
|
||||
format("--region=%s", data.aws_region.selected.name),
|
||||
format("--database=%s", local.rds_database_name),
|
||||
format("--sql=\"%s\"", local.sql_create_read_only_role.sql)
|
||||
]
|
||||
}
|
||||
|
||||
module "create_postgres_user_read_write_role" {
|
||||
|
||||
source = "digitickets/cli/aws"
|
||||
version = "7.0.0"
|
||||
|
||||
role_session_name = "CreatePostgresUserRoles"
|
||||
aws_cli_commands = [
|
||||
"rds-data", "execute-statement",
|
||||
format("--resource-arn=%s", data.terraform_remote_state.main.outputs.rds["stage"].cluster_arn),
|
||||
format("--secret-arn=%s", data.aws_secretsmanager_secret.rds_credentials.arn),
|
||||
format("--region=%s", data.aws_region.selected.name),
|
||||
format("--database=%s", local.rds_database_name),
|
||||
format("--sql=\"%s\"", local.sql_create_read_write_role.sql)
|
||||
]
|
||||
|
||||
depends_on = [
|
||||
module.create_postgres_user_read_only_role
|
||||
]
|
||||
}
|
||||
|
||||
module "create_postgres_user_admin_role" {
|
||||
|
||||
source = "digitickets/cli/aws"
|
||||
version = "7.0.0"
|
||||
|
||||
role_session_name = "CreatePostgresUserRoles"
|
||||
aws_cli_commands = [
|
||||
"rds-data", "execute-statement",
|
||||
format("--resource-arn=%s", data.terraform_remote_state.main.outputs.rds["stage"].cluster_arn),
|
||||
format("--secret-arn=%s", data.aws_secretsmanager_secret.rds_credentials.arn),
|
||||
format("--region=%s", data.aws_region.selected.name),
|
||||
format("--database=%s", local.rds_database_name),
|
||||
format("--sql=\"%s\"", local.sql_create_admin_role.sql)
|
||||
]
|
||||
|
||||
depends_on = [
|
||||
module.create_postgres_user_read_write_role
|
||||
]
|
||||
}
|
||||
|
||||
# Create a SQL users
|
||||
module "create_postgres_user" {
|
||||
for_each = {
|
||||
for user, user_info in local.sql_users_map :
|
||||
user => user_info
|
||||
if var.env_name != "localstack"
|
||||
}
|
||||
|
||||
source = "digitickets/cli/aws"
|
||||
version = "7.0.0"
|
||||
|
||||
role_session_name = "CreatePostgresUser"
|
||||
aws_cli_commands = [
|
||||
"rds-data", "execute-statement",
|
||||
format("--resource-arn=%s", data.terraform_remote_state.main.outputs.rds["stage"].cluster_arn),
|
||||
format("--secret-arn=%s", data.aws_secretsmanager_secret.rds_credentials.arn),
|
||||
format("--region=%s", data.aws_region.selected.name),
|
||||
format("--database=%s", local.rds_database_name),
|
||||
format("--sql=\"%s\"", local.sql_create_user[each.key].sql)
|
||||
]
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
resource "random_password" "db_user_secrets" {
|
||||
for_each = local.db_users
|
||||
length = 32
|
||||
numeric = true
|
||||
upper = true
|
||||
special = false
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret" "db_user_secrets" {
|
||||
for_each = local.db_users
|
||||
name = "rds-db-credentials/${data.terraform_remote_state.main.outputs.rds["stage"].cluster_resource_id}/${each.key}"
|
||||
description = "RDS database ${data.terraform_remote_state.main.outputs.rds["stage"].cluster_id} credentials for ${each.key}"
|
||||
kms_key_id = data.aws_kms_key.secretsmanager.id
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_version" "db_user_secrets" {
|
||||
for_each = aws_secretsmanager_secret.db_user_secrets
|
||||
secret_id = each.value.id
|
||||
secret_string = jsonencode({
|
||||
engine = "postgres"
|
||||
host = data.terraform_remote_state.main.outputs.rds["stage"].cluster_endpoint
|
||||
username = each.key
|
||||
password = random_password.db_user_secrets[each.key].result
|
||||
dbname = local.rds_database_name
|
||||
port = data.terraform_remote_state.main.outputs.rds["stage"].cluster_port
|
||||
})
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_policy" "db_user_secrets" {
|
||||
for_each = aws_secretsmanager_secret.db_user_secrets
|
||||
secret_arn = each.value.arn
|
||||
policy = jsonencode({
|
||||
Version = "2012-10-17",
|
||||
Statement = [
|
||||
{
|
||||
Effect = "Deny",
|
||||
Principal = "*",
|
||||
Action = ["secretsmanager:GetSecretValue"],
|
||||
Resource = each.value.arn,
|
||||
Condition = {
|
||||
StringNotLike = {
|
||||
"aws:userId" = flatten(concat([
|
||||
"*:${each.key}@formbricks.com", "*:piotr@formbricks.com"
|
||||
]))
|
||||
|
||||
},
|
||||
ArnNotEquals = {
|
||||
"aws:PrincipalArn" = module.lambda_rotate_db_secret.lambda_function_arn
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_rotation" "db_user_secrets" {
|
||||
for_each = aws_secretsmanager_secret.db_user_secrets
|
||||
secret_id = each.value.id
|
||||
rotation_lambda_arn = module.lambda_rotate_db_secret.lambda_function_arn
|
||||
rotation_rules {
|
||||
automatically_after_days = 1
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
#
|
||||
variable "env_name" {
|
||||
description = "env_name"
|
||||
type = string
|
||||
default = "staging"
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = ">= 5.46"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
locals {
|
||||
project = "formbricks"
|
||||
environment = "prod"
|
||||
name = "${local.project}-${local.environment}"
|
||||
envs = {
|
||||
prod = "${local.project}-prod"
|
||||
stage = "${local.project}-stage"
|
||||
}
|
||||
vpc_cidr = "10.0.0.0/16"
|
||||
azs = slice(data.aws_availability_zones.available.names, 0, 3)
|
||||
tags = {
|
||||
Project = local.project
|
||||
Environment = local.environment
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = local.name
|
||||
}
|
||||
tags_map = {
|
||||
prod = {
|
||||
Project = local.project
|
||||
Environment = "prod"
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = "${local.project}-prod"
|
||||
}
|
||||
stage = {
|
||||
Project = local.project
|
||||
Environment = "stage"
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = "${local.project}-stage"
|
||||
}
|
||||
}
|
||||
domain = "k8s.formbricks.com"
|
||||
karpetner_helm_version = "1.3.1"
|
||||
karpenter_namespace = "karpenter"
|
||||
}
|
||||
@@ -1,3 +1,38 @@
|
||||
locals {
|
||||
project = "formbricks"
|
||||
environment = "prod"
|
||||
name = "${local.project}-${local.environment}"
|
||||
envs = {
|
||||
prod = "${local.project}-prod"
|
||||
stage = "${local.project}-stage"
|
||||
}
|
||||
vpc_cidr = "10.0.0.0/16"
|
||||
azs = slice(data.aws_availability_zones.available.names, 0, 3)
|
||||
tags = {
|
||||
Project = local.project
|
||||
Environment = local.environment
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = local.name
|
||||
}
|
||||
tags_map = {
|
||||
prod = {
|
||||
Project = local.project
|
||||
Environment = "prod"
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = "${local.project}-prod"
|
||||
}
|
||||
stage = {
|
||||
Project = local.project
|
||||
Environment = "stage"
|
||||
ManagedBy = "Terraform"
|
||||
Blueprint = "${local.project}-stage"
|
||||
}
|
||||
}
|
||||
domain = "k8s.formbricks.com"
|
||||
karpetner_helm_version = "1.3.1"
|
||||
karpenter_namespace = "karpenter"
|
||||
}
|
||||
|
||||
################################################################################
|
||||
# Route53 Hosted Zone
|
||||
################################################################################
|
||||
@@ -121,7 +156,7 @@ module "eks" {
|
||||
most_recent = true
|
||||
}
|
||||
vpc-cni = {
|
||||
addon_version = "v1.20.0-eksbuild.1"
|
||||
most_recent = true
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
output "rds" {
|
||||
description = "RDS created for cluster"
|
||||
value = module.rds-aurora
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "rds_secret_staging_arn" {
|
||||
description = "RDS secret created for cluster"
|
||||
value = aws_secretsmanager_secret.rds_credentials["stage"].arn
|
||||
}
|
||||
@@ -75,4 +75,5 @@ module "rds-aurora" {
|
||||
}
|
||||
|
||||
tags = local.tags_map[each.key]
|
||||
|
||||
}
|
||||
|
||||
@@ -22,23 +22,3 @@ resource "aws_secretsmanager_secret_version" "formbricks_app_secrets" {
|
||||
})
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret" "rds_credentials" {
|
||||
for_each = local.envs
|
||||
name = "${each.key}/formbricks/terraform/rds/credentials"
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_version" "rds_credentials" {
|
||||
for_each = local.envs
|
||||
secret_id = aws_secretsmanager_secret.rds_credentials[each.key].id
|
||||
secret_string = <<EOF
|
||||
{
|
||||
"username": "${module.rds-aurora[each.key].cluster_master_username}",
|
||||
"password": "${random_password.postgres[each.key].result}",
|
||||
"engine": data.aws_rds_engine_version.postgresql.engine,
|
||||
"host": "${module.rds-aurora[each.key].cluster_endpoint}",
|
||||
"port": ${module.rds-aurora[each.key].cluster_port},
|
||||
"dbClusterIdentifier": "${module.rds-aurora[each.key].cluster_id}"
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `imageUrl` on the `User` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "User" DROP COLUMN "imageUrl";
|
||||
@@ -824,7 +824,6 @@ model User {
|
||||
email String @unique
|
||||
emailVerified DateTime? @map(name: "email_verified")
|
||||
|
||||
imageUrl String?
|
||||
twoFactorSecret String?
|
||||
twoFactorEnabled Boolean @default(false)
|
||||
backupCodes String?
|
||||
|
||||
@@ -57,7 +57,6 @@ export const ZUser = z.object({
|
||||
Omit<
|
||||
User,
|
||||
| "emailVerified"
|
||||
| "imageUrl"
|
||||
| "twoFactorSecret"
|
||||
| "twoFactorEnabled"
|
||||
| "backupCodes"
|
||||
|
||||
@@ -765,7 +765,7 @@ export function Survey({
|
||||
<LanguageSwitch
|
||||
surveyLanguages={localSurvey.languages}
|
||||
setSelectedLanguageCode={setselectedLanguage}
|
||||
hoverColor={styling.inputColor?.light ?? "#000000"}
|
||||
hoverColor={styling.inputColor?.light ?? "#f8fafc"}
|
||||
borderRadius={styling.roundness ?? 8}
|
||||
/>
|
||||
)}
|
||||
@@ -776,7 +776,7 @@ export function Survey({
|
||||
{isCloseButtonVisible && (
|
||||
<SurveyCloseButton
|
||||
onClose={onClose}
|
||||
hoverColor={styling.inputColor?.light ?? "#000000"}
|
||||
hoverColor={styling.inputColor?.light ?? "#f8fafc"}
|
||||
borderRadius={styling.roundness ?? 8}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -48,7 +48,6 @@ export const ZUser = z.object({
|
||||
name: ZUserName,
|
||||
email: ZUserEmail,
|
||||
emailVerified: z.date().nullable(),
|
||||
imageUrl: z.string().url().nullable(),
|
||||
twoFactorEnabled: z.boolean(),
|
||||
identityProvider: ZUserIdentityProvider,
|
||||
createdAt: z.date(),
|
||||
@@ -70,7 +69,6 @@ export const ZUserUpdateInput = z.object({
|
||||
password: ZUserPassword.optional(),
|
||||
role: ZRole.optional(),
|
||||
objective: ZUserObjective.nullish(),
|
||||
imageUrl: z.string().nullish(),
|
||||
notificationSettings: ZUserNotificationSettings.optional(),
|
||||
locale: ZUserLocale.optional(),
|
||||
lastLoginAt: z.date().nullish(),
|
||||
|
||||
16
pnpm-lock.yaml
generated
16
pnpm-lock.yaml
generated
@@ -292,8 +292,8 @@ importers:
|
||||
specifier: 3.0.2
|
||||
version: 3.0.2
|
||||
boring-avatars:
|
||||
specifier: 1.11.2
|
||||
version: 1.11.2
|
||||
specifier: 2.0.1
|
||||
version: 2.0.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
|
||||
cache-manager:
|
||||
specifier: 6.4.3
|
||||
version: 6.4.3
|
||||
@@ -5069,8 +5069,11 @@ packages:
|
||||
boolbase@1.0.0:
|
||||
resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==}
|
||||
|
||||
boring-avatars@1.11.2:
|
||||
resolution: {integrity: sha512-3+wkwPeObwS4R37FGXMYViqc4iTrIRj5yzfX9Qy4mnpZ26sX41dGMhsAgmKks1r/uufY1pl4vpgzMWHYfJRb2A==}
|
||||
boring-avatars@2.0.1:
|
||||
resolution: {integrity: sha512-TeBnZrp7WxHcQPuLhGQamklgNqaL7eUAUh3E11kFj9rTn0Hari2ZKVTchqNrp62UOHN/XOe5bZGcbzVGwHjHwg==}
|
||||
peerDependencies:
|
||||
react: '>=18.0.0'
|
||||
react-dom: '>=18.0.0'
|
||||
|
||||
bowser@2.11.0:
|
||||
resolution: {integrity: sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==}
|
||||
@@ -15116,7 +15119,10 @@ snapshots:
|
||||
|
||||
boolbase@1.0.0: {}
|
||||
|
||||
boring-avatars@1.11.2: {}
|
||||
boring-avatars@2.0.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
|
||||
dependencies:
|
||||
react: 19.1.0
|
||||
react-dom: 19.1.0(react@19.1.0)
|
||||
|
||||
bowser@2.11.0: {}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user