mirror of
https://github.com/formbricks/formbricks.git
synced 2026-02-01 04:49:11 -06:00
Compare commits
539 Commits
empty_list
...
release/4.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcbb99c43d | ||
|
|
ec415a7aa1 | ||
|
|
a1e53c9051 | ||
|
|
680295c63e | ||
|
|
73b40469f7 | ||
|
|
282e061606 | ||
|
|
5b86dd3a8f | ||
|
|
0da083a214 | ||
|
|
379a86cf46 | ||
|
|
bed78716f0 | ||
|
|
6167c3d9e6 | ||
|
|
1db1271e7f | ||
|
|
9ec1964106 | ||
|
|
d5a70796dd | ||
|
|
246351b3e6 | ||
|
|
22ea7302bb | ||
|
|
8d47ab9709 | ||
|
|
8f6d27c1ef | ||
|
|
a37815b831 | ||
|
|
2b526a87ca | ||
|
|
047750967c | ||
|
|
a54356c3b0 | ||
|
|
38ea5ed6ae | ||
|
|
6e19de32f7 | ||
|
|
957a4432f4 | ||
|
|
22a5d4bb7d | ||
|
|
226dff0344 | ||
|
|
d474a94a21 | ||
|
|
c1a4cc308b | ||
|
|
210da98b69 | ||
|
|
2fc183d384 | ||
|
|
78fb111610 | ||
|
|
11c0cb4b61 | ||
|
|
95831f7c7f | ||
|
|
a31e7bfaa5 | ||
|
|
6e35fc1769 | ||
|
|
48cded1646 | ||
|
|
db752cee15 | ||
|
|
b33aae0a73 | ||
|
|
72126ad736 | ||
|
|
4a2eeac90b | ||
|
|
46be3e7d70 | ||
|
|
6d140532a7 | ||
|
|
8c4a7f1518 | ||
|
|
63fe32a786 | ||
|
|
84c465f974 | ||
|
|
6a33498737 | ||
|
|
5130c747d4 | ||
|
|
f5583d2652 | ||
|
|
e0d75914a4 | ||
|
|
f02ca1cfe1 | ||
|
|
4ade83f189 | ||
|
|
f1fc9fea2c | ||
|
|
25266e4566 | ||
|
|
b960cfd2a1 | ||
|
|
9e1d1c1dc2 | ||
|
|
8c63a9f7af | ||
|
|
fff0a7f052 | ||
|
|
0ecc8aabff | ||
|
|
01cc0ab64d | ||
|
|
1d125bdac2 | ||
|
|
ca67c4d5a8 | ||
|
|
d167d591ce | ||
|
|
acc3b0179a | ||
|
|
3434b5cf08 | ||
|
|
a618f2df95 | ||
|
|
5b334f6623 | ||
|
|
fa2b63d6a1 | ||
|
|
9f0fe69b6b | ||
|
|
98cb2de02b | ||
|
|
f00d0b7e20 | ||
|
|
65abd4ee07 | ||
|
|
939f135bf4 | ||
|
|
729a16854a | ||
|
|
a2d3e37d69 | ||
|
|
adf12f551d | ||
|
|
3f2bddc358 | ||
|
|
ae6d1ac133 | ||
|
|
7c4569cd50 | ||
|
|
7354122447 | ||
|
|
d54dca2b27 | ||
|
|
acd5cff534 | ||
|
|
834929e766 | ||
|
|
09f40ad816 | ||
|
|
689b6491b3 | ||
|
|
b70b2eef95 | ||
|
|
392a95834b | ||
|
|
66d9cc8eac | ||
|
|
befdc078f1 | ||
|
|
13b983b3b2 | ||
|
|
1e285ebe4e | ||
|
|
a7c4971952 | ||
|
|
c8689d91d5 | ||
|
|
73a2ff7421 | ||
|
|
0c28e89b41 | ||
|
|
a736436e29 | ||
|
|
7dbb0300d3 | ||
|
|
e71f3f412c | ||
|
|
07ed926225 | ||
|
|
15dc83a4eb | ||
|
|
3ce07edf43 | ||
|
|
0f34d9cc5f | ||
|
|
e9f800f017 | ||
|
|
ba2070b638 | ||
|
|
75cdb25d27 | ||
|
|
6bc7db852c | ||
|
|
ffb4eac1a4 | ||
|
|
56da3b5725 | ||
|
|
c189af5482 | ||
|
|
5dbf42fd6a | ||
|
|
42525a86a8 | ||
|
|
b96f0e67c5 | ||
|
|
2d7b99ba26 | ||
|
|
666a79044f | ||
|
|
c3d97c2932 | ||
|
|
cc5d630a05 | ||
|
|
be38d76ccf | ||
|
|
a8eea306e5 | ||
|
|
4fd53ac115 | ||
|
|
eb92392ed1 | ||
|
|
7412b32526 | ||
|
|
193346a70d | ||
|
|
a1d4754b04 | ||
|
|
f4b918a4b6 | ||
|
|
fb9a0b197a | ||
|
|
95b6c16dd1 | ||
|
|
cfdf09650f | ||
|
|
4c94fc25ae | ||
|
|
ccf501d925 | ||
|
|
04dfbe0777 | ||
|
|
cbf255ab0d | ||
|
|
942366956c | ||
|
|
a6ee796cef | ||
|
|
a535529bd3 | ||
|
|
018cef61a6 | ||
|
|
c53e4f54cb | ||
|
|
e2fd71abfd | ||
|
|
f888aa8a19 | ||
|
|
2698817adb | ||
|
|
2c18912f2f | ||
|
|
f57497d8b3 | ||
|
|
aab6798b29 | ||
|
|
f07092595f | ||
|
|
c03c7ec1ed | ||
|
|
628de8e6ae | ||
|
|
be4b54a827 | ||
|
|
e03df83e88 | ||
|
|
ed26427302 | ||
|
|
554809742b | ||
|
|
28adfb905c | ||
|
|
05c455ed62 | ||
|
|
f7687bc0ea | ||
|
|
af34391309 | ||
|
|
70978fbbdf | ||
|
|
f6683d1165 | ||
|
|
13be7a8970 | ||
|
|
0472d5e8f0 | ||
|
|
00a61f7abe | ||
|
|
6999abba3b | ||
|
|
9ae66f44ae | ||
|
|
7933d0077a | ||
|
|
cc8289fa33 | ||
|
|
c458051839 | ||
|
|
718a199d5b | ||
|
|
5ab9fdf1e3 | ||
|
|
5741209aa9 | ||
|
|
35d0d8ed54 | ||
|
|
5bce5c0a3b | ||
|
|
c61212964c | ||
|
|
b8d41a6e9b | ||
|
|
eedd5200a4 | ||
|
|
71a85c7126 | ||
|
|
341e2639e1 | ||
|
|
056470e6f0 | ||
|
|
e965ad4b97 | ||
|
|
12e703c02b | ||
|
|
07065f2675 | ||
|
|
7ca45cefeb | ||
|
|
4df28878db | ||
|
|
b355d05b25 | ||
|
|
e757e9aec9 | ||
|
|
cf4119baf6 | ||
|
|
6be2ae3071 | ||
|
|
600b793641 | ||
|
|
cde03b6997 | ||
|
|
00371bfb01 | ||
|
|
6be6782531 | ||
|
|
3ae4f8aa68 | ||
|
|
3d3c69a92b | ||
|
|
b1b94eaa66 | ||
|
|
67cc96449d | ||
|
|
bf41a53b86 | ||
|
|
26292ecf39 | ||
|
|
056e572a31 | ||
|
|
d7bbd219a3 | ||
|
|
fe5ff9a71c | ||
|
|
4e3438683e | ||
|
|
f587446079 | ||
|
|
7a3d05eb9a | ||
|
|
906b4da33c | ||
|
|
33b9ee3a50 | ||
|
|
5a693a548c | ||
|
|
20614c2b12 | ||
|
|
0c5e079d6f | ||
|
|
b3c16c8731 | ||
|
|
a6d45a63fa | ||
|
|
a5fa876aa3 | ||
|
|
c9a50a6ff2 | ||
|
|
19389bfffc | ||
|
|
accb4f461d | ||
|
|
c04c351244 | ||
|
|
f7f8f07778 | ||
|
|
3634385c6c | ||
|
|
8bdfc0686f | ||
|
|
74405cc05f | ||
|
|
785359955a | ||
|
|
f6157d5109 | ||
|
|
070dd9f268 | ||
|
|
7a40d647d8 | ||
|
|
2186a1c60d | ||
|
|
2054de4a9d | ||
|
|
e068955fbf | ||
|
|
4f5180ea8f | ||
|
|
093013e1d2 | ||
|
|
8b5b4b4172 | ||
|
|
36c5fc4a65 | ||
|
|
df191de1b4 | ||
|
|
8bb5428548 | ||
|
|
b78f8d0599 | ||
|
|
36535e1e50 | ||
|
|
e26a188d1b | ||
|
|
aaea129d4f | ||
|
|
18f4cd977d | ||
|
|
5468510f5a | ||
|
|
76213af5d7 | ||
|
|
cdf0926c60 | ||
|
|
84b3c57087 | ||
|
|
ed10069b39 | ||
|
|
7c1033af20 | ||
|
|
98e3ad1068 | ||
|
|
b11fbd9f95 | ||
|
|
c5e31d14d1 | ||
|
|
d64d561498 | ||
|
|
1bddc9e960 | ||
|
|
3f122ed9ee | ||
|
|
bdad80d6d1 | ||
|
|
d9ea00d86e | ||
|
|
4a3c2fccba | ||
|
|
3a09af674a | ||
|
|
1ced76c44d | ||
|
|
fa1663d858 | ||
|
|
ebf591a7e0 | ||
|
|
5c9795cd23 | ||
|
|
b67177ba55 | ||
|
|
6cf1f49c8e | ||
|
|
4afb95b92a | ||
|
|
38089241b4 | ||
|
|
07487d4871 | ||
|
|
fa0879e3a0 | ||
|
|
3733c22a6f | ||
|
|
5e5baa76ab | ||
|
|
2153d2aa16 | ||
|
|
7fa4862fd9 | ||
|
|
411e9a26ee | ||
|
|
eb1349f205 | ||
|
|
5c25f25212 | ||
|
|
6af81e46ee | ||
|
|
7423fc9472 | ||
|
|
1557ffcca1 | ||
|
|
5d53ed76ed | ||
|
|
ebd399e611 | ||
|
|
843110b0d6 | ||
|
|
51babf2f98 | ||
|
|
6bc5f1e168 | ||
|
|
c9016802e7 | ||
|
|
6a49fb4700 | ||
|
|
646921cd37 | ||
|
|
34d3145fcd | ||
|
|
c3c06eb309 | ||
|
|
bf4c6238d5 | ||
|
|
8972ef0fef | ||
|
|
4e59924a5a | ||
|
|
8b28353b79 | ||
|
|
abbc7a065b | ||
|
|
00e8ee27a2 | ||
|
|
379aeba71a | ||
|
|
717adddeae | ||
|
|
41798266a0 | ||
|
|
a93fa8ec76 | ||
|
|
47c3df0466 | ||
|
|
935e24bd43 | ||
|
|
3879d86f63 | ||
|
|
839144d338 | ||
|
|
96031822a6 | ||
|
|
21c8b5d6e4 | ||
|
|
22d4952a40 | ||
|
|
933723f1fe | ||
|
|
dd394f1d2c | ||
|
|
0188aad97b | ||
|
|
d46644fe0d | ||
|
|
c259a61f0e | ||
|
|
feee22b5c3 | ||
|
|
a5433f6748 | ||
|
|
557f14bab8 | ||
|
|
fdba260301 | ||
|
|
764b8ec260 | ||
|
|
ac5d1e651e | ||
|
|
62ffcc8e68 | ||
|
|
326872a86b | ||
|
|
892b55662e | ||
|
|
23143c8664 | ||
|
|
4c71caf0da | ||
|
|
173821f846 | ||
|
|
f139830020 | ||
|
|
70979a3b5b | ||
|
|
061fa036be | ||
|
|
b83c0a4a5d | ||
|
|
1bc0563965 | ||
|
|
3a4e2a9f85 | ||
|
|
bd48139a4f | ||
|
|
89fe82a0d6 | ||
|
|
65dc1fa771 | ||
|
|
438990bffc | ||
|
|
7f7bc989c6 | ||
|
|
baa2b31bc9 | ||
|
|
77aecf3aad | ||
|
|
7c1110239b | ||
|
|
eeb337521b | ||
|
|
182f674879 | ||
|
|
73c0da4b75 | ||
|
|
f475b2e6d5 | ||
|
|
e5e8941016 | ||
|
|
c39c9998f0 | ||
|
|
a8c8e6f83f | ||
|
|
8a5e9f38d7 | ||
|
|
a0740d20ea | ||
|
|
71f378a494 | ||
|
|
4bececeb56 | ||
|
|
71c96f48d7 | ||
|
|
05d88a3069 | ||
|
|
b6a63edc88 | ||
|
|
a3764f0316 | ||
|
|
ec52bdf3fe | ||
|
|
2e9ad3ce07 | ||
|
|
654bd232d6 | ||
|
|
01984cf8ca | ||
|
|
3eb18bb120 | ||
|
|
59859d0e4f | ||
|
|
c60c8cb7bd | ||
|
|
9fa7aef253 | ||
|
|
a23594428a | ||
|
|
56e7106d6e | ||
|
|
318f891540 | ||
|
|
a59881f9ae | ||
|
|
7ab4a45ad6 | ||
|
|
2990e3805f | ||
|
|
29132ab029 | ||
|
|
f860d8d25d | ||
|
|
3501990a79 | ||
|
|
41d60c8a02 | ||
|
|
a6269f0fd3 | ||
|
|
9c0d0a16a7 | ||
|
|
c6241f7e7f | ||
|
|
92f1c2b75a | ||
|
|
4d53291c8a | ||
|
|
14b7a69cea | ||
|
|
a9015b008d | ||
|
|
d19d624c0c | ||
|
|
3edaab6c2b | ||
|
|
4786ab61e7 | ||
|
|
819380d21c | ||
|
|
fd3fedb6ed | ||
|
|
88b1e63771 | ||
|
|
3132fe74f1 | ||
|
|
a27a2a67c8 | ||
|
|
4a7ace5a0a | ||
|
|
43628caa3b | ||
|
|
9d84bc0c8d | ||
|
|
babc020085 | ||
|
|
95ee83ef31 | ||
|
|
d994af2dfd | ||
|
|
4b5b5bf59f | ||
|
|
62166dc4b1 | ||
|
|
ec6d88bf11 | ||
|
|
c0240d60a1 | ||
|
|
cd2884d83e | ||
|
|
f7aea2e706 | ||
|
|
e80fc2ee61 | ||
|
|
9b489b0682 | ||
|
|
2ee0efa1c2 | ||
|
|
9ffd67262c | ||
|
|
68dc63ce0b | ||
|
|
f239ee9697 | ||
|
|
282b3e070c | ||
|
|
b5f0bd8f9a | ||
|
|
3784bd6b5e | ||
|
|
41d27c2093 | ||
|
|
7400ce2e67 | ||
|
|
355782f404 | ||
|
|
de70e97940 | ||
|
|
287c45f996 | ||
|
|
3b07a6d013 | ||
|
|
0cc2606ec6 | ||
|
|
0fada94b80 | ||
|
|
a59ede20c7 | ||
|
|
84294f9df2 | ||
|
|
855e7c78ce | ||
|
|
6c506d90c7 | ||
|
|
53f6e02ca1 | ||
|
|
14de2eab42 | ||
|
|
ad1f80331a | ||
|
|
3527ac337b | ||
|
|
23c2d3dce9 | ||
|
|
da652bd860 | ||
|
|
6f88dde1a0 | ||
|
|
3b90223101 | ||
|
|
e29a67b1f6 | ||
|
|
78f5de2f35 | ||
|
|
b1a35d4a69 | ||
|
|
2166c44470 | ||
|
|
080cf741e9 | ||
|
|
8881691509 | ||
|
|
3045f4437f | ||
|
|
91ace0e821 | ||
|
|
6ef281647a | ||
|
|
0aaaaa54ee | ||
|
|
b1f78e7bf2 | ||
|
|
7086ce2ca3 | ||
|
|
8f8b549b1d | ||
|
|
28514487e0 | ||
|
|
ee20af54c3 | ||
|
|
d08ec4c9ab | ||
|
|
891c83e232 | ||
|
|
0b02b00b72 | ||
|
|
a217cdd501 | ||
|
|
ebe50a4821 | ||
|
|
cb68d9defc | ||
|
|
c42a706789 | ||
|
|
3803111b19 | ||
|
|
30fdcff737 | ||
|
|
e83cfa85a4 | ||
|
|
eee9ee8995 | ||
|
|
ed89f12af8 | ||
|
|
f043314537 | ||
|
|
2ce842dd8d | ||
|
|
43b43839c5 | ||
|
|
8b6e3fec37 | ||
|
|
31bcf98779 | ||
|
|
b35cabcbcc | ||
|
|
4f435f1a1f | ||
|
|
99c1e434df | ||
|
|
b13699801b | ||
|
|
ceb2e85d96 | ||
|
|
c5f8b5ec32 | ||
|
|
bdbd57c2fc | ||
|
|
d44aa17814 | ||
|
|
23d38b4c5b | ||
|
|
58213969e8 | ||
|
|
ef973c8995 | ||
|
|
bea02ba3b5 | ||
|
|
1c1e2ee09c | ||
|
|
2bf7fe6c54 | ||
|
|
9639402c39 | ||
|
|
53213b41ee | ||
|
|
b8b5eead7a | ||
|
|
a0044ce376 | ||
|
|
b3a1f24683 | ||
|
|
f06d48698a | ||
|
|
acd508ba19 | ||
|
|
e5591686b4 | ||
|
|
7be7466eee | ||
|
|
8af6c15998 | ||
|
|
17d60eb1e7 | ||
|
|
d6ecafbc23 | ||
|
|
599e847686 | ||
|
|
4e52556f7e | ||
|
|
492a59e7de | ||
|
|
e0be53805e | ||
|
|
5c2860d1a4 | ||
|
|
18ba5bbd8a | ||
|
|
572b613034 | ||
|
|
a9c7140ba6 | ||
|
|
7fa95cd74a | ||
|
|
8c7f36d496 | ||
|
|
42dcbd3e7e | ||
|
|
1c1cd99510 | ||
|
|
b0a7e212dd | ||
|
|
0c1f6f3c3a | ||
|
|
9399b526b8 | ||
|
|
cd60032bc9 | ||
|
|
a941f994ea | ||
|
|
75d170bce5 | ||
|
|
16caae6dd6 | ||
|
|
a490600479 | ||
|
|
be28641722 | ||
|
|
4fdea3221b | ||
|
|
fef30c54b2 | ||
|
|
75362eac7a | ||
|
|
6e3b224944 | ||
|
|
ef1be219b4 | ||
|
|
ba9b01a969 | ||
|
|
e810e38333 | ||
|
|
dab8ad00d5 | ||
|
|
2c34f43c83 | ||
|
|
979fd71a11 | ||
|
|
1be23eebbb | ||
|
|
d10cff917d | ||
|
|
da72101320 | ||
|
|
5f02ad49c1 | ||
|
|
6644bba6ea | ||
|
|
0b7734f725 | ||
|
|
1536bf6907 | ||
|
|
e81190214f | ||
|
|
48c8906a89 | ||
|
|
717b30115b | ||
|
|
1f3962d2d5 | ||
|
|
619f6e408f | ||
|
|
4a8719abaa | ||
|
|
7b59eb3b26 | ||
|
|
8ac280268d | ||
|
|
34e8f4931d | ||
|
|
ac46850a24 | ||
|
|
6328be220a | ||
|
|
882ad99ed7 | ||
|
|
ce47b4c2d8 | ||
|
|
ce8f9de8ec | ||
|
|
ed3c2d2b58 | ||
|
|
9ae226329b | ||
|
|
12c3899b85 | ||
|
|
ccb1353eb5 | ||
|
|
22eb0b79ee | ||
|
|
5eb7a496da | ||
|
|
7ea55e199f | ||
|
|
83eb472acd | ||
|
|
d9fe6ee4f4 | ||
|
|
51b58be079 | ||
|
|
397643330a | ||
|
|
e5fa4328e1 | ||
|
|
4b777f1907 |
352
.cursor/commands/create-question.md
Normal file
352
.cursor/commands/create-question.md
Normal file
@@ -0,0 +1,352 @@
|
||||
# Create New Question Element
|
||||
|
||||
Use this command to scaffold a new question element component in `packages/survey-ui/src/elements/`.
|
||||
|
||||
## Usage
|
||||
|
||||
When creating a new question type (e.g., `single-select`, `rating`, `nps`), follow these steps:
|
||||
|
||||
1. **Create the component file** `{question-type}.tsx` with this structure:
|
||||
|
||||
```typescript
|
||||
import * as React from "react";
|
||||
import { ElementHeader } from "../components/element-header";
|
||||
import { useTextDirection } from "../hooks/use-text-direction";
|
||||
import { cn } from "../lib/utils";
|
||||
|
||||
interface {QuestionType}Props {
|
||||
/** Unique identifier for the element container */
|
||||
elementId: string;
|
||||
/** The main question or prompt text displayed as the headline */
|
||||
headline: string;
|
||||
/** Optional descriptive text displayed below the headline */
|
||||
description?: string;
|
||||
/** Unique identifier for the input/control group */
|
||||
inputId: string;
|
||||
/** Current value */
|
||||
value?: {ValueType};
|
||||
/** Callback function called when the value changes */
|
||||
onChange: (value: {ValueType}) => void;
|
||||
/** Whether the field is required (shows asterisk indicator) */
|
||||
required?: boolean;
|
||||
/** Error message to display */
|
||||
errorMessage?: string;
|
||||
/** Text direction: 'ltr' (left-to-right), 'rtl' (right-to-left), or 'auto' (auto-detect from content) */
|
||||
dir?: "ltr" | "rtl" | "auto";
|
||||
/** Whether the controls are disabled */
|
||||
disabled?: boolean;
|
||||
// Add question-specific props here
|
||||
}
|
||||
|
||||
function {QuestionType}({
|
||||
elementId,
|
||||
headline,
|
||||
description,
|
||||
inputId,
|
||||
value,
|
||||
onChange,
|
||||
required = false,
|
||||
errorMessage,
|
||||
dir = "auto",
|
||||
disabled = false,
|
||||
// ... question-specific props
|
||||
}: {QuestionType}Props): React.JSX.Element {
|
||||
// Ensure value is always the correct type (handle undefined/null)
|
||||
const currentValue = value ?? {defaultValue};
|
||||
|
||||
// Detect text direction from content
|
||||
const detectedDir = useTextDirection({
|
||||
dir,
|
||||
textContent: [headline, description ?? "", /* add other text content from question */],
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="w-full space-y-4" id={elementId} dir={detectedDir}>
|
||||
{/* Headline */}
|
||||
<ElementHeader
|
||||
headline={headline}
|
||||
description={description}
|
||||
required={required}
|
||||
htmlFor={inputId}
|
||||
/>
|
||||
|
||||
{/* Question-specific controls */}
|
||||
{/* TODO: Add your question-specific UI here */}
|
||||
|
||||
{/* Error message */}
|
||||
{errorMessage && (
|
||||
<div className="text-destructive flex items-center gap-1 text-sm" dir={detectedDir}>
|
||||
<span>{errorMessage}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export { {QuestionType} };
|
||||
export type { {QuestionType}Props };
|
||||
```
|
||||
|
||||
2. **Create the Storybook file** `{question-type}.stories.tsx`:
|
||||
|
||||
```typescript
|
||||
import type { Decorator, Meta, StoryObj } from "@storybook/react";
|
||||
import React from "react";
|
||||
import { {QuestionType}, type {QuestionType}Props } from "./{question-type}";
|
||||
|
||||
// Styling options for the StylingPlayground story
|
||||
interface StylingOptions {
|
||||
// Question styling
|
||||
questionHeadlineFontFamily: string;
|
||||
questionHeadlineFontSize: string;
|
||||
questionHeadlineFontWeight: string;
|
||||
questionHeadlineColor: string;
|
||||
questionDescriptionFontFamily: string;
|
||||
questionDescriptionFontWeight: string;
|
||||
questionDescriptionFontSize: string;
|
||||
questionDescriptionColor: string;
|
||||
// Add component-specific styling options here
|
||||
}
|
||||
|
||||
type StoryProps = {QuestionType}Props & Partial<StylingOptions>;
|
||||
|
||||
const meta: Meta<StoryProps> = {
|
||||
title: "UI-package/Elements/{QuestionType}",
|
||||
component: {QuestionType},
|
||||
parameters: {
|
||||
layout: "centered",
|
||||
docs: {
|
||||
description: {
|
||||
component: "A complete {question type} question element...",
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: ["autodocs"],
|
||||
argTypes: {
|
||||
headline: {
|
||||
control: "text",
|
||||
description: "The main question text",
|
||||
table: { category: "Content" },
|
||||
},
|
||||
description: {
|
||||
control: "text",
|
||||
description: "Optional description or subheader text",
|
||||
table: { category: "Content" },
|
||||
},
|
||||
value: {
|
||||
control: "object",
|
||||
description: "Current value",
|
||||
table: { category: "State" },
|
||||
},
|
||||
required: {
|
||||
control: "boolean",
|
||||
description: "Whether the field is required",
|
||||
table: { category: "Validation" },
|
||||
},
|
||||
errorMessage: {
|
||||
control: "text",
|
||||
description: "Error message to display",
|
||||
table: { category: "Validation" },
|
||||
},
|
||||
dir: {
|
||||
control: { type: "select" },
|
||||
options: ["ltr", "rtl", "auto"],
|
||||
description: "Text direction for RTL support",
|
||||
table: { category: "Layout" },
|
||||
},
|
||||
disabled: {
|
||||
control: "boolean",
|
||||
description: "Whether the controls are disabled",
|
||||
table: { category: "State" },
|
||||
},
|
||||
onChange: {
|
||||
action: "changed",
|
||||
table: { category: "Events" },
|
||||
},
|
||||
// Add question-specific argTypes here
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<StoryProps>;
|
||||
|
||||
// Decorator to apply CSS variables from story args
|
||||
const withCSSVariables: Decorator<StoryProps> = (Story, context) => {
|
||||
const args = context.args as StoryProps;
|
||||
const {
|
||||
questionHeadlineFontFamily,
|
||||
questionHeadlineFontSize,
|
||||
questionHeadlineFontWeight,
|
||||
questionHeadlineColor,
|
||||
questionDescriptionFontFamily,
|
||||
questionDescriptionFontSize,
|
||||
questionDescriptionFontWeight,
|
||||
questionDescriptionColor,
|
||||
// Extract component-specific styling options
|
||||
} = args;
|
||||
|
||||
const cssVarStyle: React.CSSProperties & Record<string, string | undefined> = {
|
||||
"--fb-question-headline-font-family": questionHeadlineFontFamily,
|
||||
"--fb-question-headline-font-size": questionHeadlineFontSize,
|
||||
"--fb-question-headline-font-weight": questionHeadlineFontWeight,
|
||||
"--fb-question-headline-color": questionHeadlineColor,
|
||||
"--fb-question-description-font-family": questionDescriptionFontFamily,
|
||||
"--fb-question-description-font-size": questionDescriptionFontSize,
|
||||
"--fb-question-description-font-weight": questionDescriptionFontWeight,
|
||||
"--fb-question-description-color": questionDescriptionColor,
|
||||
// Add component-specific CSS variables
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={cssVarStyle} className="w-[600px]">
|
||||
<Story />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export const StylingPlayground: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
description: "Example description",
|
||||
// Default styling values
|
||||
questionHeadlineFontFamily: "system-ui, sans-serif",
|
||||
questionHeadlineFontSize: "1.125rem",
|
||||
questionHeadlineFontWeight: "600",
|
||||
questionHeadlineColor: "#1e293b",
|
||||
questionDescriptionFontFamily: "system-ui, sans-serif",
|
||||
questionDescriptionFontSize: "0.875rem",
|
||||
questionDescriptionFontWeight: "400",
|
||||
questionDescriptionColor: "#64748b",
|
||||
// Add component-specific default values
|
||||
},
|
||||
argTypes: {
|
||||
// Question styling argTypes
|
||||
questionHeadlineFontFamily: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineFontSize: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineFontWeight: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionHeadlineColor: {
|
||||
control: "color",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontFamily: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontSize: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionFontWeight: {
|
||||
control: "text",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
questionDescriptionColor: {
|
||||
control: "color",
|
||||
table: { category: "Question Styling" },
|
||||
},
|
||||
// Add component-specific argTypes
|
||||
},
|
||||
decorators: [withCSSVariables],
|
||||
};
|
||||
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
// Add default props
|
||||
},
|
||||
};
|
||||
|
||||
export const WithDescription: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
description: "Example description text",
|
||||
},
|
||||
};
|
||||
|
||||
export const Required: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
required: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const WithError: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
errorMessage: "This field is required",
|
||||
required: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const Disabled: Story = {
|
||||
args: {
|
||||
headline: "Example question?",
|
||||
disabled: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const RTL: Story = {
|
||||
args: {
|
||||
headline: "مثال على السؤال؟",
|
||||
description: "مثال على الوصف",
|
||||
// Add RTL-specific props
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
3. **Add CSS variables** to `packages/survey-ui/src/styles/globals.css` if needed:
|
||||
|
||||
```css
|
||||
/* Component-specific CSS variables */
|
||||
--fb-{component}-{property}: {default-value};
|
||||
```
|
||||
|
||||
4. **Export from** `packages/survey-ui/src/index.ts`:
|
||||
|
||||
```typescript
|
||||
export { {QuestionType}, type {QuestionType}Props } from "./elements/{question-type}";
|
||||
```
|
||||
|
||||
## Key Requirements
|
||||
|
||||
- ✅ Always use `ElementHeader` component for headline/description
|
||||
- ✅ Always use `useTextDirection` hook for RTL support
|
||||
- ✅ Always handle undefined/null values safely (e.g., `Array.isArray(value) ? value : []`)
|
||||
- ✅ Always include error message display if applicable
|
||||
- ✅ Always support disabled state if applicable
|
||||
- ✅ Always add JSDoc comments to props interface
|
||||
- ✅ Always create Storybook stories with styling playground
|
||||
- ✅ Always export types from component file
|
||||
- ✅ Always add to index.ts exports
|
||||
|
||||
## Examples
|
||||
|
||||
- `open-text.tsx` - Text input/textarea question (string value)
|
||||
- `multi-select.tsx` - Multiple checkbox selection (string[] value)
|
||||
|
||||
## Checklist
|
||||
|
||||
When creating a new question element, verify:
|
||||
|
||||
- [ ] Component file created with proper structure
|
||||
- [ ] Props interface with JSDoc comments for all props
|
||||
- [ ] Uses `ElementHeader` component (don't duplicate header logic)
|
||||
- [ ] Uses `useTextDirection` hook for RTL support
|
||||
- [ ] Handles undefined/null values safely
|
||||
- [ ] Storybook file created with styling playground
|
||||
- [ ] Includes common stories: Default, WithDescription, Required, WithError, Disabled, RTL
|
||||
- [ ] CSS variables added to `globals.css` if component needs custom styling
|
||||
- [ ] Exported from `index.ts` with types
|
||||
- [ ] TypeScript types properly exported
|
||||
- [ ] Error message display included if applicable
|
||||
- [ ] Disabled state supported if applicable
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Build & Deployment Best Practices
|
||||
|
||||
## Build Process
|
||||
|
||||
### Running Builds
|
||||
- Use `pnpm build` from project root for full build
|
||||
- Monitor for React hooks warnings and fix them immediately
|
||||
- Ensure all TypeScript errors are resolved before deployment
|
||||
|
||||
### Common Build Issues & Fixes
|
||||
|
||||
#### React Hooks Warnings
|
||||
- Capture ref values in variables within useEffect cleanup
|
||||
- Avoid accessing `.current` directly in cleanup functions
|
||||
- Pattern for fixing ref cleanup warnings:
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const currentRef = myRef.current;
|
||||
return () => {
|
||||
if (currentRef) {
|
||||
currentRef.cleanup();
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
```
|
||||
|
||||
#### Test Failures During Build
|
||||
- Ensure all test mocks include required constants like `SESSION_MAX_AGE`
|
||||
- Mock Next.js navigation hooks properly: `useParams`, `useRouter`, `useSearchParams`
|
||||
- Remove unused imports and constants from test files
|
||||
- Use literal values instead of imported constants when the constant isn't actually needed
|
||||
|
||||
### Test Execution
|
||||
- Run `pnpm test` to execute all tests
|
||||
- Use `pnpm test -- --run filename.test.tsx` for specific test files
|
||||
- Fix test failures before merging code
|
||||
- Ensure 100% test coverage for new components
|
||||
|
||||
### Performance Monitoring
|
||||
- Monitor build times and optimize if necessary
|
||||
- Watch for memory usage during builds
|
||||
- Use proper caching strategies for faster rebuilds
|
||||
|
||||
### Deployment Checklist
|
||||
1. All tests passing
|
||||
2. Build completes without warnings
|
||||
3. TypeScript compilation successful
|
||||
4. No linter errors
|
||||
5. Database migrations applied (if any)
|
||||
6. Environment variables configured
|
||||
|
||||
### EKS Deployment Considerations
|
||||
- Ensure latest code is deployed to all pods
|
||||
- Monitor AWS RDS Performance Insights for database issues
|
||||
- Verify environment-specific configurations
|
||||
- Check pod health and resource usage
|
||||
@@ -1,414 +0,0 @@
|
||||
---
|
||||
description: Caching rules for performance improvements
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Cache Optimization Patterns for Formbricks
|
||||
|
||||
## Cache Strategy Overview
|
||||
|
||||
Formbricks uses a **hybrid caching approach** optimized for enterprise scale:
|
||||
|
||||
- **Redis** for persistent cross-request caching
|
||||
- **React `cache()`** for request-level deduplication
|
||||
- **NO Next.js `unstable_cache()`** - avoid for reliability
|
||||
|
||||
## Key Files
|
||||
|
||||
### Core Cache Infrastructure
|
||||
- [apps/web/modules/cache/lib/service.ts](mdc:apps/web/modules/cache/lib/service.ts) - Redis cache service
|
||||
- [apps/web/modules/cache/lib/withCache.ts](mdc:apps/web/modules/cache/lib/withCache.ts) - Cache wrapper utilities
|
||||
- [apps/web/modules/cache/lib/cacheKeys.ts](mdc:apps/web/modules/cache/lib/cacheKeys.ts) - Enterprise cache key patterns and utilities
|
||||
|
||||
### Environment State Caching (Critical Endpoint)
|
||||
- [apps/web/app/api/v1/client/[environmentId]/environment/route.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/route.ts) - Main endpoint serving hundreds of thousands of SDK clients
|
||||
- [apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts) - Optimized data layer with caching
|
||||
|
||||
## Enterprise-Grade Cache Key Patterns
|
||||
|
||||
**Always use** the `createCacheKey` utilities from [cacheKeys.ts](mdc:apps/web/modules/cache/lib/cacheKeys.ts):
|
||||
|
||||
```typescript
|
||||
// ✅ Correct patterns
|
||||
createCacheKey.environment.state(environmentId) // "fb:env:abc123:state"
|
||||
createCacheKey.organization.billing(organizationId) // "fb:org:xyz789:billing"
|
||||
createCacheKey.license.status(organizationId) // "fb:license:org123:status"
|
||||
createCacheKey.user.permissions(userId, orgId) // "fb:user:456:org:123:permissions"
|
||||
|
||||
// ❌ Never use flat keys - collision-prone
|
||||
"environment_abc123"
|
||||
"user_data_456"
|
||||
```
|
||||
|
||||
## When to Use Each Cache Type
|
||||
|
||||
### Use React `cache()` for Request Deduplication
|
||||
```typescript
|
||||
// ✅ Prevents multiple calls within same request
|
||||
export const getEnterpriseLicense = reactCache(async () => {
|
||||
// Complex license validation logic
|
||||
});
|
||||
```
|
||||
|
||||
### Use `withCache()` for Simple Database Queries
|
||||
```typescript
|
||||
// ✅ Simple caching with automatic fallback (TTL in milliseconds)
|
||||
export const getActionClasses = (environmentId: string) => {
|
||||
return withCache(() => fetchActionClassesFromDB(environmentId), {
|
||||
key: createCacheKey.environment.actionClasses(environmentId),
|
||||
ttl: 60 * 30 * 1000, // 30 minutes in milliseconds
|
||||
})();
|
||||
};
|
||||
```
|
||||
|
||||
### Use Explicit Redis Cache for Complex Business Logic
|
||||
```typescript
|
||||
// ✅ Full control for high-stakes endpoints
|
||||
export const getEnvironmentState = async (environmentId: string) => {
|
||||
const cached = await environmentStateCache.getEnvironmentState(environmentId);
|
||||
if (cached) return cached;
|
||||
|
||||
const fresh = await buildComplexState(environmentId);
|
||||
await environmentStateCache.setEnvironmentState(environmentId, fresh);
|
||||
return fresh;
|
||||
};
|
||||
```
|
||||
|
||||
## Caching Decision Framework
|
||||
|
||||
### When TO Add Caching
|
||||
|
||||
```typescript
|
||||
// ✅ Expensive operations that benefit from caching
|
||||
- Database queries (>10ms typical)
|
||||
- External API calls (>50ms typical)
|
||||
- Complex computations (>5ms)
|
||||
- File system operations
|
||||
- Heavy data transformations
|
||||
|
||||
// Example: Database query with complex joins (TTL in milliseconds)
|
||||
export const getEnvironmentWithDetails = withCache(
|
||||
async (environmentId: string) => {
|
||||
return prisma.environment.findUnique({
|
||||
where: { id: environmentId },
|
||||
include: { /* complex joins */ }
|
||||
});
|
||||
},
|
||||
{ key: createCacheKey.environment.details(environmentId), ttl: 60 * 30 * 1000 } // 30 minutes
|
||||
)();
|
||||
```
|
||||
|
||||
### When NOT to Add Caching
|
||||
|
||||
```typescript
|
||||
// ❌ Don't cache these operations - minimal overhead
|
||||
- Simple property access (<0.1ms)
|
||||
- Basic transformations (<1ms)
|
||||
- Functions that just call already-cached functions
|
||||
- Pure computation without I/O
|
||||
|
||||
// ❌ Bad example: Redundant caching
|
||||
const getCachedLicenseFeatures = withCache(
|
||||
async () => {
|
||||
const license = await getEnterpriseLicense(); // Already cached!
|
||||
return license.active ? license.features : null; // Just property access
|
||||
},
|
||||
{ key: "license-features", ttl: 1800 * 1000 } // 30 minutes in milliseconds
|
||||
);
|
||||
|
||||
// ✅ Good example: Simple and efficient
|
||||
const getLicenseFeatures = async () => {
|
||||
const license = await getEnterpriseLicense(); // Already cached
|
||||
return license.active ? license.features : null; // 0.1ms overhead
|
||||
};
|
||||
```
|
||||
|
||||
### Computational Overhead Analysis
|
||||
|
||||
Before adding caching, analyze the overhead:
|
||||
|
||||
```typescript
|
||||
// ✅ High overhead - CACHE IT
|
||||
- Database queries: ~10-100ms
|
||||
- External APIs: ~50-500ms
|
||||
- File I/O: ~5-50ms
|
||||
- Complex algorithms: >5ms
|
||||
|
||||
// ❌ Low overhead - DON'T CACHE
|
||||
- Property access: ~0.001ms
|
||||
- Simple lookups: ~0.1ms
|
||||
- Basic validation: ~1ms
|
||||
- Type checks: ~0.01ms
|
||||
|
||||
// Example decision tree:
|
||||
const expensiveOperation = async () => {
|
||||
return prisma.query(); // 50ms - CACHE IT
|
||||
};
|
||||
|
||||
const cheapOperation = (data: any) => {
|
||||
return data.property; // 0.001ms - DON'T CACHE
|
||||
};
|
||||
```
|
||||
|
||||
### Avoid Cache Wrapper Anti-Pattern
|
||||
|
||||
```typescript
|
||||
// ❌ Don't create wrapper functions just for caching
|
||||
const getCachedUserPermissions = withCache(
|
||||
async (userId: string) => getUserPermissions(userId),
|
||||
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||
);
|
||||
|
||||
// ✅ Add caching directly to the original function
|
||||
export const getUserPermissions = withCache(
|
||||
async (userId: string) => {
|
||||
return prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: { permissions: true }
|
||||
});
|
||||
},
|
||||
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||
);
|
||||
```
|
||||
|
||||
## TTL Coordination Strategy
|
||||
|
||||
### Multi-Layer Cache Coordination
|
||||
For endpoints serving client SDKs, coordinate TTLs across layers:
|
||||
|
||||
```typescript
|
||||
// Client SDK cache (expiresAt) - longest TTL for fewer requests
|
||||
const CLIENT_TTL = 60 * 60; // 1 hour (seconds for client)
|
||||
|
||||
// Server Redis cache - shorter TTL ensures fresh data for clients
|
||||
const SERVER_TTL = 60 * 30 * 1000; // 30 minutes in milliseconds
|
||||
|
||||
// HTTP cache headers (seconds)
|
||||
const BROWSER_TTL = 60 * 60; // 1 hour (max-age)
|
||||
const CDN_TTL = 60 * 30; // 30 minutes (s-maxage)
|
||||
const CORS_TTL = 60 * 60; // 1 hour (balanced approach)
|
||||
```
|
||||
|
||||
### Standard TTL Guidelines (in milliseconds for cache-manager + Keyv)
|
||||
```typescript
|
||||
// Configuration data - rarely changes
|
||||
const CONFIG_TTL = 60 * 60 * 24 * 1000; // 24 hours
|
||||
|
||||
// User data - moderate frequency
|
||||
const USER_TTL = 60 * 60 * 2 * 1000; // 2 hours
|
||||
|
||||
// Survey data - changes moderately
|
||||
const SURVEY_TTL = 60 * 15 * 1000; // 15 minutes
|
||||
|
||||
// Billing data - expensive to compute
|
||||
const BILLING_TTL = 60 * 30 * 1000; // 30 minutes
|
||||
|
||||
// Action classes - infrequent changes
|
||||
const ACTION_CLASS_TTL = 60 * 30 * 1000; // 30 minutes
|
||||
```
|
||||
|
||||
## High-Frequency Endpoint Optimization
|
||||
|
||||
### Performance Patterns for High-Volume Endpoints
|
||||
|
||||
```typescript
|
||||
// ✅ Optimized high-frequency endpoint pattern
|
||||
export const GET = async (request: NextRequest, props: { params: Promise<{ id: string }> }) => {
|
||||
const params = await props.params;
|
||||
|
||||
try {
|
||||
// Simple validation (avoid Zod for high-frequency)
|
||||
if (!params.id || typeof params.id !== 'string') {
|
||||
return responses.badRequestResponse("ID is required", undefined, true);
|
||||
}
|
||||
|
||||
// Single optimized query with caching
|
||||
const data = await getOptimizedData(params.id);
|
||||
|
||||
return responses.successResponse(
|
||||
{
|
||||
data,
|
||||
expiresAt: new Date(Date.now() + CLIENT_TTL * 1000), // SDK cache duration
|
||||
},
|
||||
true,
|
||||
"public, s-maxage=1800, max-age=3600, stale-while-revalidate=1800, stale-if-error=3600"
|
||||
);
|
||||
} catch (err) {
|
||||
// Simplified error handling for performance
|
||||
if (err instanceof ResourceNotFoundError) {
|
||||
return responses.notFoundResponse(err.resourceType, err.resourceId);
|
||||
}
|
||||
logger.error({ error: err, url: request.url }, "Error in high-frequency endpoint");
|
||||
return responses.internalServerErrorResponse(err.message, true);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Avoid These Performance Anti-Patterns
|
||||
|
||||
```typescript
|
||||
// ❌ Avoid for high-frequency endpoints
|
||||
const inputValidation = ZodSchema.safeParse(input); // Too slow
|
||||
const startTime = Date.now(); logger.debug(...); // Logging overhead
|
||||
const { data, revalidateEnvironment } = await get(); // Complex return types
|
||||
```
|
||||
|
||||
### CORS Optimization
|
||||
```typescript
|
||||
// ✅ Balanced CORS caching (not too aggressive)
|
||||
export const OPTIONS = async (): Promise<Response> => {
|
||||
return responses.successResponse(
|
||||
{},
|
||||
true,
|
||||
"public, s-maxage=3600, max-age=3600" // 1 hour balanced approach
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Redis Cache Migration from Next.js
|
||||
|
||||
### Avoid Legacy Next.js Patterns
|
||||
```typescript
|
||||
// ❌ Old Next.js unstable_cache pattern (avoid)
|
||||
const getCachedData = unstable_cache(
|
||||
async (id) => fetchData(id),
|
||||
['cache-key'],
|
||||
{ tags: ['environment'], revalidate: 900 }
|
||||
);
|
||||
|
||||
// ❌ Don't use revalidateEnvironment flags with Redis
|
||||
return { data, revalidateEnvironment: true }; // This gets cached incorrectly!
|
||||
|
||||
// ✅ New Redis pattern with withCache (TTL in milliseconds)
|
||||
export const getCachedData = (id: string) =>
|
||||
withCache(
|
||||
() => fetchData(id),
|
||||
{
|
||||
key: createCacheKey.environment.data(id),
|
||||
ttl: 60 * 15 * 1000, // 15 minutes in milliseconds
|
||||
}
|
||||
)();
|
||||
```
|
||||
|
||||
### Remove Revalidation Logic
|
||||
When migrating from Next.js `unstable_cache`:
|
||||
- Remove `revalidateEnvironment` or similar flags
|
||||
- Remove tag-based invalidation logic
|
||||
- Use TTL-based expiration instead
|
||||
- Handle one-time updates (like `appSetupCompleted`) directly in cache
|
||||
|
||||
## Data Layer Optimization
|
||||
|
||||
### Single Query Pattern
|
||||
```typescript
|
||||
// ✅ Optimize with single database query
|
||||
export const getOptimizedEnvironmentData = async (environmentId: string) => {
|
||||
return prisma.environment.findUniqueOrThrow({
|
||||
where: { id: environmentId },
|
||||
include: {
|
||||
project: {
|
||||
select: { id: true, recontactDays: true, /* ... */ }
|
||||
},
|
||||
organization: {
|
||||
select: { id: true, billing: true }
|
||||
},
|
||||
surveys: {
|
||||
where: { status: "inProgress" },
|
||||
select: { id: true, name: true, /* ... */ }
|
||||
},
|
||||
actionClasses: {
|
||||
select: { id: true, name: true, /* ... */ }
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// ❌ Avoid multiple separate queries
|
||||
const environment = await getEnvironment(id);
|
||||
const organization = await getOrganization(environment.organizationId);
|
||||
const surveys = await getSurveys(id);
|
||||
const actionClasses = await getActionClasses(id);
|
||||
```
|
||||
|
||||
## Invalidation Best Practices
|
||||
|
||||
**Always use explicit key-based invalidation:**
|
||||
|
||||
```typescript
|
||||
// ✅ Clear and debuggable
|
||||
await invalidateCache(createCacheKey.environment.state(environmentId));
|
||||
await invalidateCache([
|
||||
createCacheKey.environment.surveys(environmentId),
|
||||
createCacheKey.environment.actionClasses(environmentId)
|
||||
]);
|
||||
|
||||
// ❌ Avoid complex tag systems
|
||||
await invalidateByTags(["environment", "survey"]); // Don't do this
|
||||
```
|
||||
|
||||
## Critical Performance Targets
|
||||
|
||||
### High-Frequency Endpoint Goals
|
||||
- **Cache hit ratio**: >85%
|
||||
- **Response time P95**: <200ms
|
||||
- **Database load reduction**: >60%
|
||||
- **HTTP cache duration**: 1hr browser, 30min Cloudflare
|
||||
- **SDK refresh interval**: 1 hour with 30min server cache
|
||||
|
||||
### Performance Monitoring
|
||||
- Use **existing elastic cache analytics** for metrics
|
||||
- Log cache errors and warnings (not debug info)
|
||||
- Track database query reduction
|
||||
- Monitor response times for cached endpoints
|
||||
- **Avoid performance logging** in high-frequency endpoints
|
||||
|
||||
## Error Handling Pattern
|
||||
|
||||
Always provide fallback to fresh data on cache errors:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
const cached = await cache.get(key);
|
||||
if (cached) return cached;
|
||||
|
||||
const fresh = await fetchFresh();
|
||||
await cache.set(key, fresh, ttl); // ttl in milliseconds
|
||||
return fresh;
|
||||
} catch (error) {
|
||||
// ✅ Always fallback to fresh data
|
||||
logger.warn("Cache error, fetching fresh", { key, error });
|
||||
return fetchFresh();
|
||||
}
|
||||
```
|
||||
|
||||
## Common Pitfalls to Avoid
|
||||
|
||||
1. **Never use Next.js `unstable_cache()`** - unreliable in production
|
||||
2. **Don't use revalidation flags with Redis** - they get cached incorrectly
|
||||
3. **Avoid Zod validation** for simple parameters in high-frequency endpoints
|
||||
4. **Don't add performance logging** to high-frequency endpoints
|
||||
5. **Coordinate TTLs** between client and server caches
|
||||
6. **Don't over-engineer** with complex tag systems
|
||||
7. **Avoid caching rapidly changing data** (real-time metrics)
|
||||
8. **Always validate cache keys** to prevent collisions
|
||||
9. **Don't add redundant caching layers** - analyze computational overhead first
|
||||
10. **Avoid cache wrapper functions** - add caching directly to expensive operations
|
||||
11. **Don't cache property access or simple transformations** - overhead is negligible
|
||||
12. **Analyze the full call chain** before adding caching to avoid double-caching
|
||||
13. **Remember TTL is in milliseconds** for cache-manager + Keyv stack (not seconds)
|
||||
|
||||
## Monitoring Strategy
|
||||
|
||||
- Use **existing elastic cache analytics** for metrics
|
||||
- Log cache errors and warnings
|
||||
- Track database query reduction
|
||||
- Monitor response times for cached endpoints
|
||||
- **Don't add custom metrics** that duplicate existing monitoring
|
||||
|
||||
## Important Notes
|
||||
|
||||
### TTL Units
|
||||
- **cache-manager + Keyv**: TTL in **milliseconds**
|
||||
- **Direct Redis commands**: TTL in **seconds** (EXPIRE, SETEX) or **milliseconds** (PEXPIRE, PSETEX)
|
||||
- **HTTP cache headers**: TTL in **seconds** (max-age, s-maxage)
|
||||
- **Client SDK**: TTL in **seconds** (expiresAt calculation)
|
||||
@@ -1,41 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Database Performance & Prisma Best Practices
|
||||
|
||||
## Critical Performance Rules
|
||||
|
||||
### Response Count Queries
|
||||
- **NEVER** use `skip`/`offset` with `prisma.response.count()` - this causes expensive subqueries with OFFSET
|
||||
- Always use only `where` clauses for count operations: `prisma.response.count({ where: { ... } })`
|
||||
- For pagination, separate count queries from data queries
|
||||
- Reference: [apps/web/lib/response/service.ts](mdc:apps/web/lib/response/service.ts) line 654-686
|
||||
|
||||
### Prisma Query Optimization
|
||||
- Use proper indexes defined in [packages/database/schema.prisma](mdc:packages/database/schema.prisma)
|
||||
- Leverage existing indexes: `@@index([surveyId, createdAt])`, `@@index([createdAt])`
|
||||
- Use cursor-based pagination for large datasets instead of offset-based
|
||||
- Cache frequently accessed data using React Cache and custom cache tags
|
||||
|
||||
### Date Range Filtering
|
||||
- When filtering by `createdAt`, always use indexed queries
|
||||
- Combine with `surveyId` for optimal performance: `{ surveyId, createdAt: { gte: start, lt: end } }`
|
||||
- Avoid complex WHERE clauses that can't utilize indexes
|
||||
|
||||
### Count vs Data Separation
|
||||
- Always separate count queries from data fetching queries
|
||||
- Use `Promise.all()` to run count and data queries in parallel
|
||||
- Example pattern from [apps/web/modules/api/v2/management/responses/lib/response.ts](mdc:apps/web/modules/api/v2/management/responses/lib/response.ts):
|
||||
```typescript
|
||||
const [responses, totalCount] = await Promise.all([
|
||||
prisma.response.findMany(query),
|
||||
prisma.response.count({ where: whereClause }),
|
||||
]);
|
||||
```
|
||||
|
||||
### Monitoring & Debugging
|
||||
- Monitor AWS RDS Performance Insights for problematic queries
|
||||
- Look for queries with OFFSET in count operations - these indicate performance issues
|
||||
- Use proper error handling with `DatabaseError` for Prisma exceptions
|
||||
@@ -1,101 +0,0 @@
|
||||
---
|
||||
description: >
|
||||
This rule provides comprehensive knowledge about the Formbricks database structure, relationships,
|
||||
and data patterns. It should be used **only when the agent explicitly requests database schema-level
|
||||
details** to support tasks such as: writing/debugging Prisma queries, designing/reviewing data models,
|
||||
investigating multi-tenancy behavior, creating API endpoints, or understanding data relationships.
|
||||
globs: []
|
||||
alwaysApply: agent-requested
|
||||
---
|
||||
# Formbricks Database Schema Reference
|
||||
|
||||
This rule provides a reference to the Formbricks database structure. For the most up-to-date and complete schema definitions, please refer to the schema.prisma file directly.
|
||||
|
||||
## Database Overview
|
||||
|
||||
Formbricks uses PostgreSQL with Prisma ORM. The schema is designed for multi-tenancy with strong data isolation between organizations.
|
||||
|
||||
### Core Hierarchy
|
||||
```
|
||||
Organization
|
||||
└── Project
|
||||
└── Environment (production/development)
|
||||
├── Survey
|
||||
├── Contact
|
||||
├── ActionClass
|
||||
└── Integration
|
||||
```
|
||||
|
||||
## Schema Reference
|
||||
|
||||
For the complete and up-to-date database schema, please refer to:
|
||||
- Main schema: `packages/database/schema.prisma`
|
||||
- JSON type definitions: `packages/database/json-types.ts`
|
||||
|
||||
The schema.prisma file contains all model definitions, relationships, enums, and field types. The json-types.ts file contains TypeScript type definitions for JSON fields.
|
||||
|
||||
## Data Access Patterns
|
||||
|
||||
### Multi-tenancy
|
||||
- All data is scoped by Organization
|
||||
- Environment-level isolation for surveys and contacts
|
||||
- Project-level grouping for related surveys
|
||||
|
||||
### Soft Deletion
|
||||
Some models use soft deletion patterns:
|
||||
- Check `isActive` fields where present
|
||||
- Use proper filtering in queries
|
||||
|
||||
### Cascading Deletes
|
||||
Configured cascade relationships:
|
||||
- Organization deletion cascades to all child entities
|
||||
- Survey deletion removes responses, displays, triggers
|
||||
- Contact deletion removes attributes and responses
|
||||
|
||||
## Common Query Patterns
|
||||
|
||||
### Survey with Responses
|
||||
```typescript
|
||||
// Include response count and latest responses
|
||||
const survey = await prisma.survey.findUnique({
|
||||
where: { id: surveyId },
|
||||
include: {
|
||||
responses: {
|
||||
take: 10,
|
||||
orderBy: { createdAt: 'desc' }
|
||||
},
|
||||
_count: {
|
||||
select: { responses: true }
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Environment Scoping
|
||||
```typescript
|
||||
// Always scope by environment
|
||||
const surveys = await prisma.survey.findMany({
|
||||
where: {
|
||||
environmentId: environmentId,
|
||||
// Additional filters...
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Contact with Attributes
|
||||
```typescript
|
||||
const contact = await prisma.contact.findUnique({
|
||||
where: { id: contactId },
|
||||
include: {
|
||||
attributes: {
|
||||
include: {
|
||||
attributeKey: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
This schema supports Formbricks' core functionality: multi-tenant survey management, user targeting, response collection, and analysis, all while maintaining strict data isolation and security.
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
description: Guideline for writing end-user facing documentation in the apps/docs folder
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
Follow these instructions and guidelines when asked to write documentation in the apps/docs folder
|
||||
|
||||
Follow this structure to write the title, describtion and pick a matching icon and insert it at the top of the MDX file:
|
||||
|
||||
---
|
||||
title: "FEATURE NAME"
|
||||
description: "1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT."
|
||||
icon: "link"
|
||||
---
|
||||
|
||||
- Description: 1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT.
|
||||
- Make ample use of the Mintlify components you can find here https://mintlify.com/docs/llms.txt
|
||||
- In all Headlines, only capitalize the current feature and nothing else, to Camel Case
|
||||
- If a feature is part of the Enterprise Edition, use this note:
|
||||
|
||||
<Note>
|
||||
FEATURE NAME is part of the @Enterprise Edition.
|
||||
</Note>
|
||||
@@ -1,152 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# EKS & ALB Optimization Guide for Error Reduction
|
||||
|
||||
## Infrastructure Overview
|
||||
|
||||
This project uses AWS EKS with Application Load Balancer (ALB) for the Formbricks application. The infrastructure has been optimized to minimize ELB 502/504 errors through careful configuration of connection handling, health checks, and pod lifecycle management.
|
||||
|
||||
## Key Infrastructure Files
|
||||
|
||||
### Terraform Configuration
|
||||
- **Main Infrastructure**: [infra/terraform/main.tf](mdc:infra/terraform/main.tf) - EKS cluster, VPC, Karpenter, and core AWS resources
|
||||
- **Monitoring**: [infra/terraform/cloudwatch.tf](mdc:infra/terraform/cloudwatch.tf) - CloudWatch alarms for 502/504 error tracking and alerting
|
||||
- **Database**: [infra/terraform/rds.tf](mdc:infra/terraform/rds.tf) - Aurora PostgreSQL configuration
|
||||
|
||||
### Helm Configuration
|
||||
- **Production**: [infra/formbricks-cloud-helm/values.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/values.yaml.gotmpl) - Optimized ALB and pod configurations
|
||||
- **Staging**: [infra/formbricks-cloud-helm/values-staging.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/values-staging.yaml.gotmpl) - Staging environment with spot instances
|
||||
- **Deployment**: [infra/formbricks-cloud-helm/helmfile.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/helmfile.yaml.gotmpl) - Multi-environment Helm releases
|
||||
|
||||
## ALB Optimization Patterns
|
||||
|
||||
### Connection Handling Optimizations
|
||||
```yaml
|
||||
# Key ALB annotations for reducing 502/504 errors
|
||||
alb.ingress.kubernetes.io/load-balancer-attributes: |
|
||||
idle_timeout.timeout_seconds=120,
|
||||
connection_logs.s3.enabled=false,
|
||||
access_logs.s3.enabled=false
|
||||
|
||||
alb.ingress.kubernetes.io/target-group-attributes: |
|
||||
deregistration_delay.timeout_seconds=30,
|
||||
stickiness.enabled=false,
|
||||
load_balancing.algorithm.type=least_outstanding_requests,
|
||||
target_group_health.dns_failover.minimum_healthy_targets.count=1
|
||||
```
|
||||
|
||||
### Health Check Configuration
|
||||
- **Interval**: 15 seconds for faster detection of unhealthy targets
|
||||
- **Timeout**: 5 seconds to prevent false positives
|
||||
- **Thresholds**: 2 healthy, 3 unhealthy for balanced responsiveness
|
||||
- **Path**: `/health` endpoint optimized for < 100ms response time
|
||||
|
||||
## Pod Lifecycle Management
|
||||
|
||||
### Graceful Shutdown Pattern
|
||||
```yaml
|
||||
# PreStop hook to allow connection draining
|
||||
lifecycle:
|
||||
preStop:
|
||||
exec:
|
||||
command: ["/bin/sh", "-c", "sleep 15"]
|
||||
|
||||
# Termination grace period for complete cleanup
|
||||
terminationGracePeriodSeconds: 45
|
||||
```
|
||||
|
||||
### Health Probe Strategy
|
||||
- **Startup Probe**: 5s initial delay, 5s interval, max 60s startup time
|
||||
- **Readiness Probe**: 10s delay, 10s interval for traffic readiness
|
||||
- **Liveness Probe**: 30s delay, 30s interval for container health
|
||||
|
||||
### Rolling Update Configuration
|
||||
```yaml
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxUnavailable: 25% # Maintain capacity during updates
|
||||
maxSurge: 50% # Allow faster rollouts
|
||||
```
|
||||
|
||||
## Karpenter Node Management
|
||||
|
||||
### Node Lifecycle Optimization
|
||||
- **Startup Taints**: Prevent traffic during node initialization
|
||||
- **Graceful Shutdown**: 30s grace period for pod eviction
|
||||
- **Consolidation Delay**: 60s to reduce unnecessary churn
|
||||
- **Eviction Policies**: Configured for smooth pod migrations
|
||||
|
||||
### Instance Selection
|
||||
- **Families**: c8g, c7g, m8g, m7g, r8g, r7g (ARM64 Graviton)
|
||||
- **Sizes**: 2, 4, 8 vCPUs for cost optimization
|
||||
- **Bottlerocket AMI**: Enhanced security and performance
|
||||
|
||||
## Monitoring & Alerting
|
||||
|
||||
### Critical ALB Metrics
|
||||
1. **ELB 502 Errors**: Threshold 20 over 5 minutes
|
||||
2. **ELB 504 Errors**: Threshold 15 over 5 minutes
|
||||
3. **Target Connection Errors**: Threshold 50 over 5 minutes
|
||||
4. **4XX Errors**: Threshold 100 over 10 minutes (client issues)
|
||||
|
||||
### Expected Improvements
|
||||
- **60-80% reduction** in ELB 502 errors
|
||||
- **Faster recovery** during pod restarts
|
||||
- **Better connection reuse** efficiency
|
||||
- **Improved autoscaling** responsiveness
|
||||
|
||||
## Deployment Patterns
|
||||
|
||||
### Infrastructure Updates
|
||||
1. **Terraform First**: Apply infrastructure changes via [infra/deploy-improvements.sh](mdc:infra/deploy-improvements.sh)
|
||||
2. **Helm Second**: Deploy application configurations
|
||||
3. **Verification**: Check pod status, endpoints, and ALB health
|
||||
4. **Monitoring**: Watch CloudWatch metrics for 24-48 hours
|
||||
|
||||
### Environment-Specific Configurations
|
||||
- **Production**: On-demand instances, stricter resource limits
|
||||
- **Staging**: Spot instances, rate limiting disabled, relaxed resources
|
||||
|
||||
## Troubleshooting Patterns
|
||||
|
||||
### 502 Error Investigation
|
||||
1. Check pod readiness and health probe status
|
||||
2. Verify ALB target group health
|
||||
3. Review deregistration timing during deployments
|
||||
4. Monitor connection pool utilization
|
||||
|
||||
### 504 Error Analysis
|
||||
1. Check application response times
|
||||
2. Verify timeout configurations (ALB: 120s, App: aligned)
|
||||
3. Review database query performance
|
||||
4. Monitor resource utilization during traffic spikes
|
||||
|
||||
### Connection Error Patterns
|
||||
1. Verify Karpenter node lifecycle timing
|
||||
2. Check pod termination grace periods
|
||||
3. Review ALB connection draining settings
|
||||
4. Monitor cluster autoscaling events
|
||||
|
||||
## Best Practices
|
||||
|
||||
### When Making Changes
|
||||
- **Test in staging first** with same configurations
|
||||
- **Monitor metrics** for 24-48 hours after changes
|
||||
- **Use gradual rollouts** with proper health checks
|
||||
- **Maintain ALB timeout alignment** across all layers
|
||||
|
||||
### Performance Optimization
|
||||
- **Health endpoint** should respond < 100ms consistently
|
||||
- **Connection pooling** aligned with ALB idle timeouts
|
||||
- **Resource requests/limits** tuned for consistent performance
|
||||
- **Graceful shutdown** implemented in application code
|
||||
|
||||
### Monitoring Strategy
|
||||
- **Real-time alerts** for error rate spikes
|
||||
- **Trend analysis** for connection patterns
|
||||
- **Capacity planning** based on LCU usage
|
||||
- **4XX pattern analysis** for client behavior insights
|
||||
@@ -1,334 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Formbricks Architecture & Patterns
|
||||
|
||||
## Monorepo Structure
|
||||
|
||||
### Apps Directory
|
||||
- `apps/web/` - Main Next.js web application
|
||||
- `packages/` - Shared packages and utilities
|
||||
|
||||
### Key Directories in Web App
|
||||
```
|
||||
apps/web/
|
||||
├── app/ # Next.js 13+ app directory
|
||||
│ ├── (app)/ # Main application routes
|
||||
│ ├── (auth)/ # Authentication routes
|
||||
│ ├── api/ # API routes
|
||||
│ └── share/ # Public sharing routes
|
||||
├── components/ # Shared components
|
||||
├── lib/ # Utility functions and services
|
||||
└── modules/ # Feature-specific modules
|
||||
```
|
||||
|
||||
## Routing Patterns
|
||||
|
||||
### App Router Structure
|
||||
The application uses Next.js 13+ app router with route groups:
|
||||
|
||||
```
|
||||
(app)/environments/[environmentId]/
|
||||
├── surveys/[surveyId]/
|
||||
│ ├── (analysis)/ # Analysis views
|
||||
│ │ ├── responses/ # Response management
|
||||
│ │ ├── summary/ # Survey summary
|
||||
│ │ └── hooks/ # Analysis-specific hooks
|
||||
│ ├── edit/ # Survey editing
|
||||
│ └── settings/ # Survey settings
|
||||
```
|
||||
|
||||
### Dynamic Routes
|
||||
- `[environmentId]` - Environment-specific routes
|
||||
- `[surveyId]` - Survey-specific routes
|
||||
- `[sharingKey]` - Public sharing routes
|
||||
|
||||
## Service Layer Pattern
|
||||
|
||||
### Service Organization
|
||||
Services are organized by domain in `apps/web/lib/`:
|
||||
|
||||
```typescript
|
||||
// Example: Response service
|
||||
// apps/web/lib/response/service.ts
|
||||
export const getResponseCountAction = async ({
|
||||
surveyId,
|
||||
filterCriteria,
|
||||
}: {
|
||||
surveyId: string;
|
||||
filterCriteria: any;
|
||||
}) => {
|
||||
// Service implementation
|
||||
};
|
||||
```
|
||||
|
||||
### Action Pattern
|
||||
Server actions follow a consistent pattern:
|
||||
|
||||
```typescript
|
||||
// Action wrapper for service calls
|
||||
export const getResponseCountAction = async (params) => {
|
||||
try {
|
||||
const result = await responseService.getCount(params);
|
||||
return { data: result };
|
||||
} catch (error) {
|
||||
return { error: error.message };
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Context Patterns
|
||||
|
||||
### Provider Structure
|
||||
Context providers follow a consistent pattern:
|
||||
|
||||
```typescript
|
||||
// Provider component
|
||||
export const ResponseFilterProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const [selectedFilter, setSelectedFilter] = useState(defaultFilter);
|
||||
|
||||
const value = {
|
||||
selectedFilter,
|
||||
setSelectedFilter,
|
||||
// ... other state and methods
|
||||
};
|
||||
|
||||
return (
|
||||
<ResponseFilterContext.Provider value={value}>
|
||||
{children}
|
||||
</ResponseFilterContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
// Hook for consuming context
|
||||
export const useResponseFilter = () => {
|
||||
const context = useContext(ResponseFilterContext);
|
||||
if (!context) {
|
||||
throw new Error('useResponseFilter must be used within ResponseFilterProvider');
|
||||
}
|
||||
return context;
|
||||
};
|
||||
```
|
||||
|
||||
### Context Composition
|
||||
Multiple contexts are often composed together:
|
||||
|
||||
```typescript
|
||||
// Layout component with multiple providers
|
||||
export default function AnalysisLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<ResponseFilterProvider>
|
||||
<ResponseCountProvider>
|
||||
{children}
|
||||
</ResponseCountProvider>
|
||||
</ResponseFilterProvider>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Component Patterns
|
||||
|
||||
### Page Components
|
||||
Page components are located in the app directory and follow this pattern:
|
||||
|
||||
```typescript
|
||||
// apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/responses/page.tsx
|
||||
export default function ResponsesPage() {
|
||||
return (
|
||||
<div>
|
||||
<ResponsesTable />
|
||||
<ResponsesPagination />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Component Organization
|
||||
- **Pages** - Route components in app directory
|
||||
- **Components** - Reusable UI components
|
||||
- **Modules** - Feature-specific components and logic
|
||||
|
||||
### Shared Components
|
||||
Common components are in `apps/web/components/`:
|
||||
- UI components (buttons, inputs, modals)
|
||||
- Layout components (headers, sidebars)
|
||||
- Data display components (tables, charts)
|
||||
|
||||
## Hook Patterns
|
||||
|
||||
### Custom Hook Structure
|
||||
Custom hooks follow consistent patterns:
|
||||
|
||||
```typescript
|
||||
export const useResponseCount = ({
|
||||
survey,
|
||||
initialCount
|
||||
}: {
|
||||
survey: TSurvey;
|
||||
initialCount?: number;
|
||||
}) => {
|
||||
const [responseCount, setResponseCount] = useState(initialCount ?? 0);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
// Hook logic...
|
||||
|
||||
return {
|
||||
responseCount,
|
||||
isLoading,
|
||||
refetch,
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
### Hook Dependencies
|
||||
- Use context hooks for shared state
|
||||
- Implement proper cleanup with AbortController
|
||||
- Optimize dependency arrays to prevent unnecessary re-renders
|
||||
|
||||
## Data Fetching Patterns
|
||||
|
||||
### Server Actions
|
||||
The app uses Next.js server actions for data fetching:
|
||||
|
||||
```typescript
|
||||
// Server action
|
||||
export async function getResponsesAction(params: GetResponsesParams) {
|
||||
const responses = await getResponses(params);
|
||||
return { data: responses };
|
||||
}
|
||||
|
||||
// Client usage
|
||||
const { data } = await getResponsesAction(params);
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
Consistent error handling across the application:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
const result = await apiCall();
|
||||
return { data: result };
|
||||
} catch (error) {
|
||||
console.error("Operation failed:", error);
|
||||
return { error: error.message };
|
||||
}
|
||||
```
|
||||
|
||||
## Type Safety
|
||||
|
||||
### Type Organization
|
||||
Types are organized in packages:
|
||||
- `@formbricks/types` - Shared type definitions
|
||||
- Local types in component/hook files
|
||||
|
||||
### Common Types
|
||||
```typescript
|
||||
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||
import { TResponse } from "@formbricks/types/responses";
|
||||
import { TEnvironment } from "@formbricks/types/environment";
|
||||
```
|
||||
|
||||
## State Management
|
||||
|
||||
### Local State
|
||||
- Use `useState` for component-specific state
|
||||
- Use `useReducer` for complex state logic
|
||||
- Use refs for mutable values that don't trigger re-renders
|
||||
|
||||
### Global State
|
||||
- React Context for feature-specific shared state
|
||||
- URL state for filters and pagination
|
||||
- Server state through server actions
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Code Splitting
|
||||
- Dynamic imports for heavy components
|
||||
- Route-based code splitting with app router
|
||||
- Lazy loading for non-critical features
|
||||
|
||||
### Caching Strategy
|
||||
- Server-side caching for database queries
|
||||
- Client-side caching with React Query (where applicable)
|
||||
- Static generation for public pages
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Test Organization
|
||||
```
|
||||
component/
|
||||
├── Component.tsx
|
||||
├── Component.test.tsx
|
||||
└── hooks/
|
||||
├── useHook.ts
|
||||
└── useHook.test.tsx
|
||||
```
|
||||
|
||||
### Test Patterns
|
||||
- Unit tests for utilities and services
|
||||
- Integration tests for components with context
|
||||
- Hook tests with proper mocking
|
||||
|
||||
## Build & Deployment
|
||||
|
||||
### Build Process
|
||||
- TypeScript compilation
|
||||
- Next.js build optimization
|
||||
- Asset optimization and bundling
|
||||
|
||||
### Environment Configuration
|
||||
- Environment-specific configurations
|
||||
- Feature flags for gradual rollouts
|
||||
- Database connection management
|
||||
|
||||
## Security Patterns
|
||||
|
||||
### Authentication
|
||||
- Session-based authentication
|
||||
- Environment-based access control
|
||||
- API route protection
|
||||
|
||||
### Data Validation
|
||||
- Input validation on both client and server
|
||||
- Type-safe API contracts
|
||||
- Sanitization of user inputs
|
||||
|
||||
## Monitoring & Observability
|
||||
|
||||
### Error Tracking
|
||||
- Client-side error boundaries
|
||||
- Server-side error logging
|
||||
- Performance monitoring
|
||||
|
||||
### Analytics
|
||||
- User interaction tracking
|
||||
- Performance metrics
|
||||
- Database query monitoring
|
||||
|
||||
## Best Practices Summary
|
||||
|
||||
### Code Organization
|
||||
- ✅ Follow the established directory structure
|
||||
- ✅ Use consistent naming conventions
|
||||
- ✅ Separate concerns (UI, logic, data)
|
||||
- ✅ Keep components focused and small
|
||||
|
||||
### Performance
|
||||
- ✅ Implement proper loading states
|
||||
- ✅ Use AbortController for async operations
|
||||
- ✅ Optimize database queries
|
||||
- ✅ Implement proper caching strategies
|
||||
|
||||
### Type Safety
|
||||
- ✅ Use TypeScript throughout
|
||||
- ✅ Define proper interfaces for props
|
||||
- ✅ Use type guards for runtime validation
|
||||
- ✅ Leverage shared type packages
|
||||
|
||||
### Testing
|
||||
- ✅ Write tests for critical functionality
|
||||
- ✅ Mock external dependencies properly
|
||||
- ✅ Test error scenarios and edge cases
|
||||
- ✅ Maintain good test coverage
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# React Context & Provider Patterns
|
||||
|
||||
## Context Provider Best Practices
|
||||
|
||||
### Provider Implementation
|
||||
- Use TypeScript interfaces for provider props with optional `initialCount` for testing
|
||||
- Implement proper cleanup in `useEffect` to avoid React hooks warnings
|
||||
- Reference: [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx)
|
||||
|
||||
### Cleanup Pattern for Refs
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const currentPendingRequests = pendingRequests.current;
|
||||
const currentAbortController = abortController.current;
|
||||
|
||||
return () => {
|
||||
if (currentAbortController) {
|
||||
currentAbortController.abort();
|
||||
}
|
||||
currentPendingRequests.clear();
|
||||
};
|
||||
}, []);
|
||||
```
|
||||
|
||||
### Testing Context Providers
|
||||
- Always wrap components using context in the provider during tests
|
||||
- Use `initialCount` prop for predictable test scenarios
|
||||
- Mock context dependencies like `useParams`, `useResponseFilter`
|
||||
- Example from [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx):
|
||||
|
||||
```typescript
|
||||
render(
|
||||
<ResponseCountProvider survey={dummySurvey} initialCount={5}>
|
||||
<ComponentUnderTest />
|
||||
</ResponseCountProvider>
|
||||
);
|
||||
```
|
||||
|
||||
### Required Mocks for Context Testing
|
||||
- Mock `next/navigation` with `useParams` returning environment and survey IDs
|
||||
- Mock response filter context and actions
|
||||
- Mock API actions that the provider depends on
|
||||
|
||||
### Context Hook Usage
|
||||
- Create custom hooks like `useResponseCountContext()` for consuming context
|
||||
- Provide meaningful error messages when context is used outside provider
|
||||
- Use context for shared state that multiple components need to access
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
@@ -1,327 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Testing Patterns & Best Practices
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Test Commands
|
||||
From the **root directory** (formbricks/):
|
||||
- `npm test` - Run all tests across all packages (recommended for CI/full testing)
|
||||
- `npm run test:coverage` - Run all tests with coverage reports
|
||||
- `npm run test:e2e` - Run end-to-end tests with Playwright
|
||||
|
||||
From the **apps/web directory** (apps/web/):
|
||||
- `npm run test` - Run only web app tests (fastest for development)
|
||||
- `npm run test:coverage` - Run web app tests with coverage
|
||||
- `npm run test -- <file-pattern>` - Run specific test files
|
||||
|
||||
### Examples
|
||||
```bash
|
||||
# Run all tests from root (takes ~3 minutes, runs 790 test files with 5334+ tests)
|
||||
npm test
|
||||
|
||||
# Run specific test file from apps/web (fastest for development)
|
||||
npm run test -- modules/cache/lib/service.test.ts
|
||||
|
||||
# Run tests matching pattern from apps/web
|
||||
npm run test -- modules/ee/license-check/lib/license.test.ts
|
||||
|
||||
# Run with coverage from root
|
||||
npm run test:coverage
|
||||
|
||||
# Run specific test with watch mode from apps/web (for development)
|
||||
npm run test -- --watch modules/cache/lib/service.test.ts
|
||||
|
||||
# Run tests for a specific directory from apps/web
|
||||
npm run test -- modules/cache/
|
||||
```
|
||||
|
||||
### Performance Tips
|
||||
- **For development**: Use `apps/web` directory commands to run only web app tests
|
||||
- **For CI/validation**: Use root directory commands to run all packages
|
||||
- **For specific features**: Use file patterns to target specific test files
|
||||
- **For debugging**: Use `--watch` mode for continuous testing during development
|
||||
|
||||
### Test File Organization
|
||||
- Place test files in the **same directory** as the source file
|
||||
- Use `.test.ts` for utility/service tests (Node environment)
|
||||
- Use `.test.tsx` for React component tests (jsdom environment)
|
||||
|
||||
## Test File Naming & Environment
|
||||
|
||||
### File Extensions
|
||||
- Use `.test.tsx` for React component/hook tests (runs in jsdom environment)
|
||||
- Use `.test.ts` for utility/service tests (runs in Node environment)
|
||||
- The vitest config uses `environmentMatchGlobs` to automatically set jsdom for `.tsx` files
|
||||
|
||||
### Test Structure
|
||||
```typescript
|
||||
// Import the mocked functions first
|
||||
import { useHook } from "@/path/to/hook";
|
||||
import { serviceFunction } from "@/path/to/service";
|
||||
import { renderHook, waitFor } from "@testing-library/react";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock("@/path/to/hook", () => ({
|
||||
useHook: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("ComponentName", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Setup default mocks
|
||||
});
|
||||
|
||||
test("descriptive test name", async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## React Hook Testing
|
||||
|
||||
### Context Mocking
|
||||
When testing hooks that use React Context:
|
||||
```typescript
|
||||
vi.mocked(useResponseFilter).mockReturnValue({
|
||||
selectedFilter: {
|
||||
filter: [],
|
||||
onlyComplete: false,
|
||||
},
|
||||
setSelectedFilter: vi.fn(),
|
||||
selectedOptions: {
|
||||
questionOptions: [],
|
||||
questionFilterOptions: [],
|
||||
},
|
||||
setSelectedOptions: vi.fn(),
|
||||
dateRange: { from: new Date(), to: new Date() },
|
||||
setDateRange: vi.fn(),
|
||||
resetState: vi.fn(),
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Async Hooks
|
||||
- Always use `waitFor` for async operations
|
||||
- Test both loading and completed states
|
||||
- Verify API calls with correct parameters
|
||||
|
||||
```typescript
|
||||
test("fetches data on mount", async () => {
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
expect(result.current.isLoading).toBe(true);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.data).toBe(expectedData);
|
||||
expect(vi.mocked(apiCall)).toHaveBeenCalledWith(expectedParams);
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Hook Dependencies
|
||||
To test useEffect dependencies, ensure mocks return different values:
|
||||
```typescript
|
||||
// First render
|
||||
mockGetFormattedFilters.mockReturnValue(mockFilters);
|
||||
|
||||
// Change dependency and trigger re-render
|
||||
const newMockFilters = { ...mockFilters, finished: true };
|
||||
mockGetFormattedFilters.mockReturnValue(newMockFilters);
|
||||
rerender();
|
||||
```
|
||||
|
||||
## Performance Testing
|
||||
|
||||
### Race Condition Testing
|
||||
Test AbortController implementation:
|
||||
```typescript
|
||||
test("cancels previous request when new request is made", async () => {
|
||||
let resolveFirst: (value: any) => void;
|
||||
let resolveSecond: (value: any) => void;
|
||||
|
||||
const firstPromise = new Promise((resolve) => {
|
||||
resolveFirst = resolve;
|
||||
});
|
||||
const secondPromise = new Promise((resolve) => {
|
||||
resolveSecond = resolve;
|
||||
});
|
||||
|
||||
vi.mocked(apiCall)
|
||||
.mockReturnValueOnce(firstPromise as any)
|
||||
.mockReturnValueOnce(secondPromise as any);
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
// Trigger second request
|
||||
result.current.refetch();
|
||||
|
||||
// Resolve in order - first should be cancelled
|
||||
resolveFirst!({ data: 100 });
|
||||
resolveSecond!({ data: 200 });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
// Should have result from second request
|
||||
expect(result.current.data).toBe(200);
|
||||
});
|
||||
```
|
||||
|
||||
### Cleanup Testing
|
||||
```typescript
|
||||
test("cleans up on unmount", () => {
|
||||
const abortSpy = vi.spyOn(AbortController.prototype, "abort");
|
||||
|
||||
const { unmount } = renderHook(() => useHook());
|
||||
unmount();
|
||||
|
||||
expect(abortSpy).toHaveBeenCalled();
|
||||
abortSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
## Error Handling Testing
|
||||
|
||||
### API Error Testing
|
||||
```typescript
|
||||
test("handles API errors gracefully", async () => {
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
vi.mocked(apiCall).mockRejectedValue(new Error("API Error"));
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith("Error message:", expect.any(Error));
|
||||
expect(result.current.data).toBe(fallbackValue);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
### Cancelled Request Testing
|
||||
```typescript
|
||||
test("does not update state for cancelled requests", async () => {
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
|
||||
let rejectFirst: (error: any) => void;
|
||||
const firstPromise = new Promise((_, reject) => {
|
||||
rejectFirst = reject;
|
||||
});
|
||||
|
||||
vi.mocked(apiCall)
|
||||
.mockReturnValueOnce(firstPromise as any)
|
||||
.mockResolvedValueOnce({ data: 42 });
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
result.current.refetch();
|
||||
|
||||
const abortError = new Error("Request cancelled");
|
||||
rejectFirst!(abortError);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
// Should not log error for cancelled request
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
## Type Safety in Tests
|
||||
|
||||
### Mock Type Assertions
|
||||
Use type assertions for edge cases:
|
||||
```typescript
|
||||
vi.mocked(apiCall).mockResolvedValue({
|
||||
data: null as any, // For testing null handling
|
||||
});
|
||||
|
||||
vi.mocked(apiCall).mockResolvedValue({
|
||||
data: undefined as any, // For testing undefined handling
|
||||
});
|
||||
```
|
||||
|
||||
### Proper Mock Typing
|
||||
Ensure mocks match the actual interface:
|
||||
```typescript
|
||||
const mockSurvey: TSurvey = {
|
||||
id: "survey-123",
|
||||
name: "Test Survey",
|
||||
// ... other required properties
|
||||
} as unknown as TSurvey; // Use when partial mocking is needed
|
||||
```
|
||||
|
||||
## Common Test Patterns
|
||||
|
||||
### Testing State Changes
|
||||
```typescript
|
||||
test("updates state correctly", async () => {
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
// Initial state
|
||||
expect(result.current.value).toBe(initialValue);
|
||||
|
||||
// Trigger change
|
||||
result.current.updateValue(newValue);
|
||||
|
||||
// Verify change
|
||||
expect(result.current.value).toBe(newValue);
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Multiple Scenarios
|
||||
```typescript
|
||||
test("handles different modes", async () => {
|
||||
// Test regular mode
|
||||
vi.mocked(useParams).mockReturnValue({ surveyId: "123" });
|
||||
const { rerender } = renderHook(() => useHook());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(vi.mocked(regularApi)).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Test sharing mode
|
||||
vi.mocked(useParams).mockReturnValue({
|
||||
surveyId: "123",
|
||||
sharingKey: "share-123"
|
||||
});
|
||||
rerender();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(vi.mocked(sharingApi)).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Organization
|
||||
|
||||
### Comprehensive Test Coverage
|
||||
For hooks, ensure you test:
|
||||
- ✅ Initialization (with/without initial values)
|
||||
- ✅ Data fetching (success/error cases)
|
||||
- ✅ State updates and refetching
|
||||
- ✅ Dependency changes triggering effects
|
||||
- ✅ Manual actions (refetch, reset)
|
||||
- ✅ Race condition prevention
|
||||
- ✅ Cleanup on unmount
|
||||
- ✅ Mode switching (if applicable)
|
||||
- ✅ Edge cases (null/undefined data)
|
||||
|
||||
### Test Naming
|
||||
Use descriptive test names that explain the scenario:
|
||||
- ✅ "initializes with initial count"
|
||||
- ✅ "fetches response count on mount for regular survey"
|
||||
- ✅ "cancels previous request when new request is made"
|
||||
- ❌ "test hook"
|
||||
- ❌ "it works"
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
description: Whenever the user asks to write or update a test file for .tsx or .ts files.
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
Use the rules in this file when writing tests [copilot-instructions.md](mdc:.github/copilot-instructions.md).
|
||||
After writing the tests, run them and check if there's any issue with the tests and if all of them are passing. Fix the issues and rerun the tests until all pass.
|
||||
29
.env.example
29
.env.example
@@ -9,8 +9,12 @@
|
||||
WEBAPP_URL=http://localhost:3000
|
||||
|
||||
# Required for next-auth. Should be the same as WEBAPP_URL
|
||||
# If your pplication uses a custom base path, specify the route to the API endpoint in full, e.g. NEXTAUTH_URL=https://example.com/custom-route/api/auth
|
||||
NEXTAUTH_URL=http://localhost:3000
|
||||
|
||||
# Can be used to deploy the application under a sub-path of a domain. This can only be set at build time
|
||||
# BASE_PATH=
|
||||
|
||||
# Encryption keys
|
||||
# Please set both for now, we will change this in the future
|
||||
|
||||
@@ -62,9 +66,6 @@ SMTP_PASSWORD=smtpPassword
|
||||
|
||||
# Uncomment the variables you would like to use and customize the values.
|
||||
|
||||
# Custom local storage path for file uploads
|
||||
#UPLOADS_DIR=
|
||||
|
||||
##############
|
||||
# S3 STORAGE #
|
||||
##############
|
||||
@@ -99,8 +100,6 @@ PASSWORD_RESET_DISABLED=1
|
||||
# Organization Invite. Disable the ability for invited users to create an account.
|
||||
# INVITE_DISABLED=1
|
||||
|
||||
# Docker cron jobs. Disable the supercronic cron jobs in the Docker image (useful for cluster setups).
|
||||
# DOCKER_CRON_ENABLED=1
|
||||
|
||||
##########
|
||||
# Other #
|
||||
@@ -169,6 +168,9 @@ SLACK_CLIENT_SECRET=
|
||||
# Enterprise License Key
|
||||
ENTERPRISE_LICENSE_KEY=
|
||||
|
||||
# Internal Environment (production, staging) - used for internal staging environment
|
||||
# ENVIRONMENT=production
|
||||
|
||||
# Automatically assign new users to a specific organization and role within that organization
|
||||
# Insert an existing organization id or generate a valid CUID for a new one at https://www.getuniqueid.com/cuid (e.g. cjld2cjxh0000qzrmn831i7rn)
|
||||
# (Role Management is an Enterprise feature)
|
||||
@@ -189,17 +191,14 @@ ENTERPRISE_LICENSE_KEY=
|
||||
UNSPLASH_ACCESS_KEY=
|
||||
|
||||
# The below is used for Next Caching (uses In-Memory from Next Cache if not provided)
|
||||
# You can also add more configuration to Redis using the redis.conf file in the root directory
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# The below is used for Rate Limiting (uses In-Memory LRU Cache if not provided) (You can use a service like Webdis for this)
|
||||
# REDIS_HTTP_URL:
|
||||
|
||||
# The below is used for Rate Limiting for management API
|
||||
UNKEY_ROOT_KEY=
|
||||
|
||||
# INTERCOM_APP_ID=
|
||||
# INTERCOM_SECRET_KEY=
|
||||
# Chatwoot
|
||||
# CHATWOOT_BASE_URL=
|
||||
# CHATWOOT_WEBSITE_TOKEN=
|
||||
|
||||
# Enable Prometheus metrics
|
||||
# PROMETHEUS_ENABLED=
|
||||
@@ -210,6 +209,8 @@ UNKEY_ROOT_KEY=
|
||||
# The SENTRY_AUTH_TOKEN variable is picked up by the Sentry Build Plugin.
|
||||
# It's used automatically by Sentry during the build for authentication when uploading source maps.
|
||||
# SENTRY_AUTH_TOKEN=
|
||||
# The SENTRY_ENVIRONMENT is the environment which the error will belong to in the Sentry dashboard
|
||||
# SENTRY_ENVIRONMENT=
|
||||
|
||||
# Configure the minimum role for user management from UI(owner, manager, disabled)
|
||||
# USER_MANAGEMENT_MINIMUM_ROLE="manager"
|
||||
@@ -217,7 +218,11 @@ UNKEY_ROOT_KEY=
|
||||
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
|
||||
# SESSION_MAX_AGE=86400
|
||||
|
||||
# Audit logs options. Requires REDIS_URL env varibale. Default 0.
|
||||
# Audit logs options. Default 0.
|
||||
# AUDIT_LOG_ENABLED=0
|
||||
# If the ip should be added in the log or not. Default 0
|
||||
# AUDIT_LOG_GET_USER_IP=0
|
||||
|
||||
|
||||
# Lingo.dev API key for translation generation
|
||||
LINGODOTDEV_API_KEY=your_api_key_here
|
||||
13
.eslintrc.cjs
Normal file
13
.eslintrc.cjs
Normal file
@@ -0,0 +1,13 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
ignorePatterns: ["node_modules/", "dist/", "coverage/"],
|
||||
overrides: [
|
||||
{
|
||||
files: ["packages/cache/**/*.{ts,js}"],
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
parserOptions: {
|
||||
project: "./packages/cache/tsconfig.json",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: Bug report
|
||||
description: "Found a bug? Please fill out the sections below. \U0001F44D"
|
||||
type: bug
|
||||
projects: "formbricks/8"
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,4 @@
|
||||
blank_issues_enabled: false
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Questions
|
||||
url: https://github.com/formbricks/formbricks/discussions
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
1
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: Feature request
|
||||
description: "Suggest an idea for this project \U0001F680"
|
||||
type: feature
|
||||
projects: "formbricks/21"
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-description
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/task.yml
vendored
11
.github/ISSUE_TEMPLATE/task.yml
vendored
@@ -1,11 +0,0 @@
|
||||
name: Task (internal)
|
||||
description: "Template for creating a task. Used by the Formbricks Team only \U0001f4e5"
|
||||
type: task
|
||||
body:
|
||||
- type: textarea
|
||||
id: task-summary
|
||||
attributes:
|
||||
label: Task description
|
||||
description: A clear detailed-rich description of the task.
|
||||
validations:
|
||||
required: true
|
||||
319
.github/actions/build-and-push-docker/action.yml
vendored
Normal file
319
.github/actions/build-and-push-docker/action.yml
vendored
Normal file
@@ -0,0 +1,319 @@
|
||||
name: Build and Push Docker Image
|
||||
description: |
|
||||
Unified Docker build and push action for both ECR and GHCR registries.
|
||||
|
||||
Supports:
|
||||
- ECR builds for Formbricks Cloud deployment
|
||||
- GHCR builds for community self-hosting
|
||||
- Automatic version resolution and tagging
|
||||
- Conditional signing and deployment tags
|
||||
|
||||
inputs:
|
||||
registry_type:
|
||||
description: "Registry type: 'ecr' or 'ghcr'"
|
||||
required: true
|
||||
|
||||
# Version input
|
||||
version:
|
||||
description: "Explicit version (SemVer only, e.g., 1.2.3). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||
required: false
|
||||
experimental_mode:
|
||||
description: "Enable experimental timestamped versions"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
# ECR specific inputs
|
||||
ecr_registry:
|
||||
description: "ECR registry URL (required for ECR builds)"
|
||||
required: false
|
||||
ecr_repository:
|
||||
description: "ECR repository name (required for ECR builds)"
|
||||
required: false
|
||||
ecr_region:
|
||||
description: "ECR AWS region (required for ECR builds)"
|
||||
required: false
|
||||
aws_role_arn:
|
||||
description: "AWS role ARN for ECR authentication (required for ECR builds)"
|
||||
required: false
|
||||
|
||||
# GHCR specific inputs
|
||||
ghcr_image_name:
|
||||
description: "GHCR image name (required for GHCR builds)"
|
||||
required: false
|
||||
|
||||
# Deployment options
|
||||
deploy_production:
|
||||
description: "Tag image for production deployment"
|
||||
required: false
|
||||
default: "false"
|
||||
deploy_staging:
|
||||
description: "Tag image for staging deployment"
|
||||
required: false
|
||||
default: "false"
|
||||
is_prerelease:
|
||||
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||
required: false
|
||||
default: "false"
|
||||
make_latest:
|
||||
description: "Whether to tag as latest/production (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
# Build options
|
||||
dockerfile:
|
||||
description: "Path to Dockerfile"
|
||||
required: false
|
||||
default: "apps/web/Dockerfile"
|
||||
context:
|
||||
description: "Build context"
|
||||
required: false
|
||||
default: "."
|
||||
|
||||
outputs:
|
||||
image_tag:
|
||||
description: "Resolved image tag used for the build"
|
||||
value: ${{ steps.version.outputs.version }}
|
||||
registry_tags:
|
||||
description: "Complete registry tags that were pushed"
|
||||
value: ${{ steps.build.outputs.tags }}
|
||||
image_digest:
|
||||
description: "Image digest from the build"
|
||||
value: ${{ steps.build.outputs.digest }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||
ECR_REGION: ${{ inputs.ecr_region }}
|
||||
AWS_ROLE_ARN: ${{ inputs.aws_role_arn }}
|
||||
GHCR_IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "$REGISTRY_TYPE" != "ecr" && "$REGISTRY_TYPE" != "ghcr" ]]; then
|
||||
echo "ERROR: registry_type must be 'ecr' or 'ghcr', got: $REGISTRY_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||
if [[ -z "$ECR_REGISTRY" || -z "$ECR_REPOSITORY" || -z "$ECR_REGION" || -z "$AWS_ROLE_ARN" ]]; then
|
||||
echo "ERROR: ECR builds require ecr_registry, ecr_repository, ecr_region, and aws_role_arn"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$REGISTRY_TYPE" == "ghcr" ]]; then
|
||||
if [[ -z "$GHCR_IMAGE_NAME" ]]; then
|
||||
echo "ERROR: GHCR builds require ghcr_image_name"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Input validation passed for $REGISTRY_TYPE build"
|
||||
|
||||
- name: Resolve Docker version
|
||||
id: version
|
||||
uses: ./.github/actions/resolve-docker-version
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
current_branch: ${{ github.ref_name }}
|
||||
experimental_mode: ${{ inputs.experimental_mode }}
|
||||
|
||||
- name: Update package.json version
|
||||
uses: ./.github/actions/update-package-version
|
||||
with:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
- name: Configure AWS credentials (ECR only)
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ inputs.aws_role_arn }}
|
||||
aws-region: ${{ inputs.ecr_region }}
|
||||
|
||||
- name: Log in to Amazon ECR (ECR only)
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1
|
||||
|
||||
- name: Set up Docker build tools
|
||||
uses: ./.github/actions/docker-build-setup
|
||||
with:
|
||||
registry: ${{ inputs.registry_type == 'ghcr' && 'ghcr.io' || '' }}
|
||||
setup_cosign: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||
skip_login_on_pr: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||
|
||||
- name: Build ECR tag list
|
||||
if: ${{ inputs.registry_type == 'ecr' }}
|
||||
id: ecr-tags
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||
DEPLOY_PRODUCTION: ${{ inputs.deploy_production }}
|
||||
DEPLOY_STAGING: ${{ inputs.deploy_staging }}
|
||||
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start with the base image tag
|
||||
TAGS="${ECR_REGISTRY}/${ECR_REPOSITORY}:${IMAGE_TAG}"
|
||||
|
||||
# Handle automatic tagging based on release type
|
||||
if [[ "${IS_PRERELEASE}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||
echo "Adding staging tag for prerelease"
|
||||
elif [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||
echo "Adding production tag for stable release marked as latest"
|
||||
fi
|
||||
|
||||
# Handle manual deployment overrides
|
||||
if [[ "${DEPLOY_PRODUCTION}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||
echo "Adding production tag (manual override)"
|
||||
fi
|
||||
if [[ "${DEPLOY_STAGING}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||
echo "Adding staging tag (manual override)"
|
||||
fi
|
||||
|
||||
echo "ECR tags generated:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo -e "${TAGS}"
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Generate additional GHCR tags for releases
|
||||
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||
id: ghcr-extra-tags
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ steps.version.outputs.version }}
|
||||
IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start with base version tag
|
||||
TAGS="ghcr.io/${IMAGE_NAME}:${VERSION}"
|
||||
|
||||
# For proper SemVer releases, add major.minor and major tags
|
||||
if [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
# Extract major and minor versions
|
||||
MAJOR=$(echo "${VERSION}" | cut -d. -f1)
|
||||
MINOR=$(echo "${VERSION}" | cut -d. -f2)
|
||||
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}.${MINOR}"
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}"
|
||||
|
||||
echo "Added SemVer tags: ${MAJOR}.${MINOR}, ${MAJOR}"
|
||||
fi
|
||||
|
||||
# Add latest tag for stable releases marked as latest
|
||||
if [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:latest"
|
||||
echo "Added latest tag for stable release marked as latest"
|
||||
fi
|
||||
|
||||
echo "Generated GHCR tags:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
# Debug: Show what will be passed to Docker build
|
||||
echo "DEBUG: Tags for Docker build step:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo -e "${TAGS}"
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Build GHCR metadata (experimental)
|
||||
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' }}
|
||||
id: ghcr-meta-experimental
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ghcr.io/${{ inputs.ghcr_image_name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ steps.version.outputs.version }}
|
||||
|
||||
- name: Debug Docker build tags
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=== DEBUG: Docker Build Configuration ==="
|
||||
echo "Registry Type: ${{ inputs.registry_type }}"
|
||||
echo "Experimental Mode: ${{ inputs.experimental_mode }}"
|
||||
echo "Event Name: ${{ github.event_name }}"
|
||||
echo "Is Prerelease: ${{ inputs.is_prerelease }}"
|
||||
echo "Make Latest: ${{ inputs.make_latest }}"
|
||||
echo "Version: ${{ steps.version.outputs.version }}"
|
||||
|
||||
if [[ "${{ inputs.registry_type }}" == "ecr" ]]; then
|
||||
echo "ECR Tags: ${{ steps.ecr-tags.outputs.tags }}"
|
||||
elif [[ "${{ inputs.experimental_mode }}" == "true" ]]; then
|
||||
echo "GHCR Experimental Tags: ${{ steps.ghcr-meta-experimental.outputs.tags }}"
|
||||
else
|
||||
echo "GHCR Extra Tags: ${{ steps.ghcr-extra-tags.outputs.tags }}"
|
||||
fi
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||
context: ${{ inputs.context }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ inputs.registry_type == 'ecr' && steps.ecr-tags.outputs.tags || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags) || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && steps.ghcr-extra-tags.outputs.tags) || (inputs.registry_type == 'ghcr' && format('ghcr.io/{0}:{1}', inputs.ghcr_image_name, steps.version.outputs.version)) || (inputs.registry_type == 'ecr' && format('{0}/{1}:{2}', inputs.ecr_registry, inputs.ecr_repository, steps.version.outputs.version)) }}
|
||||
labels: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.labels || '' }}
|
||||
secrets: |
|
||||
database_url=${{ env.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||
redis_url=${{ env.DUMMY_REDIS_URL }}
|
||||
sentry_auth_token=${{ env.SENTRY_AUTH_TOKEN }}
|
||||
env:
|
||||
DEPOT_PROJECT_TOKEN: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ env.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ env.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Sign GHCR image (GHCR only)
|
||||
if: ${{ inputs.registry_type == 'ghcr' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||
shell: bash
|
||||
env:
|
||||
TAGS: ${{ inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags || steps.ghcr-extra-tags.outputs.tags }}
|
||||
DIGEST: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
|
||||
|
||||
- name: Output build summary
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||
VERSION_SOURCE: ${{ steps.version.outputs.source }}
|
||||
run: |
|
||||
echo "SUCCESS: Built and pushed Docker image to $REGISTRY_TYPE"
|
||||
echo "Image Tag: $IMAGE_TAG (source: $VERSION_SOURCE)"
|
||||
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||
echo "ECR Registry: ${{ inputs.ecr_registry }}"
|
||||
echo "ECR Repository: ${{ inputs.ecr_repository }}"
|
||||
else
|
||||
echo "GHCR Image: ghcr.io/${{ inputs.ghcr_image_name }}"
|
||||
fi
|
||||
4
.github/actions/cache-build-web/action.yml
vendored
4
.github/actions/cache-build-web/action.yml
vendored
@@ -62,10 +62,12 @@ runs:
|
||||
shell: bash
|
||||
|
||||
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
||||
env:
|
||||
E2E_TESTING_MODE: ${{ inputs.e2e_testing_mode }}
|
||||
run: |
|
||||
RANDOM_KEY=$(openssl rand -hex 32)
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
||||
echo "E2E_TESTING=$E2E_TESTING_MODE" >> .env
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
|
||||
106
.github/actions/docker-build-setup/action.yml
vendored
Normal file
106
.github/actions/docker-build-setup/action.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
name: Docker Build Setup
|
||||
description: |
|
||||
Sets up common Docker build tools and authentication with security validation.
|
||||
|
||||
Security Features:
|
||||
- Registry URL validation
|
||||
- Input sanitization
|
||||
- Conditional setup based on event type
|
||||
- Post-setup verification
|
||||
|
||||
Supports Depot CLI, Cosign signing, and Docker registry authentication.
|
||||
|
||||
inputs:
|
||||
registry:
|
||||
description: "Docker registry hostname to login to (e.g., ghcr.io, registry.example.com:5000). No paths allowed."
|
||||
required: false
|
||||
default: "ghcr.io"
|
||||
setup_cosign:
|
||||
description: "Whether to install cosign for image signing"
|
||||
required: false
|
||||
default: "true"
|
||||
skip_login_on_pr:
|
||||
description: "Whether to skip registry login on pull requests"
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY: ${{ inputs.registry }}
|
||||
SETUP_COSIGN: ${{ inputs.setup_cosign }}
|
||||
SKIP_LOGIN_ON_PR: ${{ inputs.skip_login_on_pr }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Security: Validate registry input - must be hostname[:port] only, no paths
|
||||
# Allow empty registry for cases where login is handled externally (e.g., ECR)
|
||||
if [[ -n "$REGISTRY" ]]; then
|
||||
if [[ "$REGISTRY" =~ / ]]; then
|
||||
echo "ERROR: Invalid registry format: $REGISTRY"
|
||||
echo "Registry must be host[:port] with no path (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||
echo "Path components like 'ghcr.io/org' are not allowed as they break docker login"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate hostname with optional port format
|
||||
if [[ ! "$REGISTRY" =~ ^[a-zA-Z0-9.-]+(\:[0-9]+)?$ ]]; then
|
||||
echo "ERROR: Invalid registry hostname format: $REGISTRY"
|
||||
echo "Registry must be a valid hostname optionally with port (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate boolean inputs
|
||||
if [[ "$SETUP_COSIGN" != "true" && "$SETUP_COSIGN" != "false" ]]; then
|
||||
echo "ERROR: setup_cosign must be 'true' or 'false', got: $SETUP_COSIGN"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_LOGIN_ON_PR" != "true" && "$SKIP_LOGIN_ON_PR" != "false" ]]; then
|
||||
echo "ERROR: skip_login_on_pr must be 'true' or 'false', got: $SKIP_LOGIN_ON_PR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Input validation passed"
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
- name: Install cosign
|
||||
# Install cosign when requested AND when we might actually sign images
|
||||
# (i.e., non-PR contexts or when we login on PRs)
|
||||
if: ${{ inputs.setup_cosign == 'true' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
- name: Log into registry
|
||||
if: ${{ inputs.registry != '' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ${{ inputs.registry }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
- name: Verify setup completion
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Verify Depot CLI is available
|
||||
if ! command -v depot >/dev/null 2>&1; then
|
||||
echo "ERROR: Depot CLI not found in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify cosign if it should be installed (same conditions as install step)
|
||||
if [[ "${{ inputs.setup_cosign }}" == "true" ]] && [[ "${{ inputs.skip_login_on_pr }}" == "false" || "${{ github.event_name }}" != "pull_request" ]]; then
|
||||
if ! command -v cosign >/dev/null 2>&1; then
|
||||
echo "ERROR: Cosign not found in PATH despite being requested"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "SUCCESS: Docker build setup completed successfully"
|
||||
192
.github/actions/resolve-docker-version/action.yml
vendored
Normal file
192
.github/actions/resolve-docker-version/action.yml
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
name: Resolve Docker Version
|
||||
description: |
|
||||
Resolves and validates Docker-compatible SemVer versions for container builds with comprehensive security.
|
||||
|
||||
Security Features:
|
||||
- Command injection protection
|
||||
- Input sanitization and validation
|
||||
- Docker tag character restrictions
|
||||
- Length limits and boundary checks
|
||||
- Safe branch name handling
|
||||
|
||||
Supports multiple modes: release, manual override, branch auto-detection, and experimental timestamped versions.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Explicit version (SemVer only, e.g., 1.2.3-beta). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||
required: false
|
||||
current_branch:
|
||||
description: "Current branch name for auto-detection"
|
||||
required: true
|
||||
experimental_mode:
|
||||
description: "Enable experimental mode with timestamp-based versions"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
outputs:
|
||||
version:
|
||||
description: "Resolved Docker-compatible SemVer version"
|
||||
value: ${{ steps.resolve.outputs.version }}
|
||||
source:
|
||||
description: "Source of version (release|override|branch)"
|
||||
value: ${{ steps.resolve.outputs.source }}
|
||||
normalized:
|
||||
description: "Whether the version was normalized (true/false)"
|
||||
value: ${{ steps.resolve.outputs.normalized }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Resolve and validate Docker version
|
||||
id: resolve
|
||||
shell: bash
|
||||
env:
|
||||
EXPLICIT_VERSION: ${{ inputs.version }}
|
||||
CURRENT_BRANCH: ${{ inputs.current_branch }}
|
||||
EXPERIMENTAL_MODE: ${{ inputs.experimental_mode }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Function to validate SemVer format (Docker-compatible, no '+' build metadata)
|
||||
validate_semver() {
|
||||
local version="$1"
|
||||
local context="$2"
|
||||
|
||||
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid $context format. Must be semver without build metadata (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Provided: $version"
|
||||
echo "Note: Docker tags cannot contain '+' characters. Use prerelease identifiers instead."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to generate branch-based version
|
||||
generate_branch_version() {
|
||||
local branch="$1"
|
||||
local use_timestamp="${2:-true}"
|
||||
local timestamp
|
||||
|
||||
if [[ "$use_timestamp" == "true" ]]; then
|
||||
timestamp=$(date +%s)
|
||||
else
|
||||
timestamp=""
|
||||
fi
|
||||
|
||||
# Sanitize branch name for Docker compatibility
|
||||
local sanitized_branch=$(echo "$branch" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
|
||||
|
||||
# Additional safety: truncate if too long (reserve space for prefix and timestamp)
|
||||
if (( ${#sanitized_branch} > 80 )); then
|
||||
sanitized_branch="${sanitized_branch:0:80}"
|
||||
echo "INFO: Branch name truncated for Docker compatibility" >&2
|
||||
fi
|
||||
local version
|
||||
|
||||
# Generate version based on branch name (unified approach)
|
||||
# All branches get alpha versions with sanitized branch name
|
||||
if [[ -n "$timestamp" ]]; then
|
||||
version="0.0.0-alpha-$sanitized_branch-$timestamp"
|
||||
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||
else
|
||||
version="0.0.0-alpha-$sanitized_branch"
|
||||
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
|
||||
# Input validation and sanitization
|
||||
if [[ -z "$CURRENT_BRANCH" ]]; then
|
||||
echo "ERROR: current_branch input is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Security: Validate inputs to prevent command injection
|
||||
# Use grep to check for dangerous characters (more reliable than bash regex)
|
||||
validate_input() {
|
||||
local input="$1"
|
||||
local name="$2"
|
||||
|
||||
# Check for dangerous characters using grep
|
||||
if echo "$input" | grep -q '[;|&`$(){}\\[:space:]]'; then
|
||||
echo "ERROR: $name contains potentially dangerous characters: $input"
|
||||
echo "Input should only contain letters, numbers, hyphens, underscores, dots, and forward slashes"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Validate current branch
|
||||
if ! validate_input "$CURRENT_BRANCH" "Branch name"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate explicit version if provided
|
||||
if [[ -n "$EXPLICIT_VERSION" ]] && ! validate_input "$EXPLICIT_VERSION" "Explicit version"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Main resolution logic (ultra-simplified)
|
||||
NORMALIZED="false"
|
||||
|
||||
if [[ -n "$EXPLICIT_VERSION" ]]; then
|
||||
# Use provided explicit version (from either workflow_call or manual input)
|
||||
validate_semver "$EXPLICIT_VERSION" "explicit version"
|
||||
|
||||
# Normalize to lowercase for Docker/ECR compatibility
|
||||
RESOLVED_VERSION="${EXPLICIT_VERSION,,}"
|
||||
if [[ "$EXPLICIT_VERSION" != "$RESOLVED_VERSION" ]]; then
|
||||
NORMALIZED="true"
|
||||
echo "INFO: Original version contained uppercase characters, normalized: $EXPLICIT_VERSION -> $RESOLVED_VERSION"
|
||||
fi
|
||||
|
||||
SOURCE="explicit"
|
||||
echo "INFO: Using explicit version: $RESOLVED_VERSION"
|
||||
|
||||
else
|
||||
# Auto-generate version from branch name
|
||||
if [[ "$EXPERIMENTAL_MODE" == "true" ]]; then
|
||||
# Use timestamped version generation
|
||||
echo "INFO: Experimental mode: generating timestamped version from branch: $CURRENT_BRANCH"
|
||||
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "true")
|
||||
SOURCE="experimental"
|
||||
else
|
||||
# Standard branch version (no timestamp)
|
||||
echo "INFO: Auto-detecting version from branch: $CURRENT_BRANCH"
|
||||
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "false")
|
||||
SOURCE="branch"
|
||||
fi
|
||||
echo "Generated version: $RESOLVED_VERSION"
|
||||
fi
|
||||
|
||||
# Final validation - ensure result is valid Docker tag
|
||||
if [[ -z "$RESOLVED_VERSION" ]]; then
|
||||
echo "ERROR: Failed to resolve version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( ${#RESOLVED_VERSION} > 128 )); then
|
||||
echo "ERROR: Version must be at most 128 characters (Docker limitation)"
|
||||
echo "Generated version: $RESOLVED_VERSION (${#RESOLVED_VERSION} chars)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$RESOLVED_VERSION" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "ERROR: Version contains invalid characters for Docker tags"
|
||||
echo "Version: $RESOLVED_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$RESOLVED_VERSION" =~ ^[.-] || "$RESOLVED_VERSION" =~ [.-]$ ]]; then
|
||||
echo "ERROR: Version must not start or end with '.' or '-'"
|
||||
echo "Version: $RESOLVED_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Output results
|
||||
echo "SUCCESS: Resolved Docker version: $RESOLVED_VERSION (source: $SOURCE)"
|
||||
echo "version=$RESOLVED_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "source=$SOURCE" >> $GITHUB_OUTPUT
|
||||
echo "normalized=$NORMALIZED" >> $GITHUB_OUTPUT
|
||||
160
.github/actions/update-package-version/action.yml
vendored
Normal file
160
.github/actions/update-package-version/action.yml
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
name: Update Package Version
|
||||
description: |
|
||||
Safely updates package.json version with comprehensive validation and atomic operations.
|
||||
|
||||
Security Features:
|
||||
- Path traversal protection
|
||||
- SemVer validation with length limits
|
||||
- Atomic file operations with backup/recovery
|
||||
- JSON validation before applying changes
|
||||
|
||||
This action is designed to be secure by default and prevent common attack vectors.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to set in package.json (must be valid SemVer)"
|
||||
required: true
|
||||
package_path:
|
||||
description: "Path to package.json file"
|
||||
required: false
|
||||
default: "./apps/web/package.json"
|
||||
|
||||
outputs:
|
||||
updated_version:
|
||||
description: "The version that was actually set in package.json"
|
||||
value: ${{ steps.update.outputs.updated_version }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Update and verify package.json version
|
||||
id: update
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ inputs.version }}
|
||||
PACKAGE_PATH: ${{ inputs.package_path }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate inputs
|
||||
if [[ -z "$VERSION" ]]; then
|
||||
echo "ERROR: version input is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Security: Validate package_path to prevent path traversal attacks
|
||||
# Only allow paths within the workspace and must end with package.json
|
||||
if [[ "$PACKAGE_PATH" =~ \.\./|^/|^~ ]]; then
|
||||
echo "ERROR: Invalid package path - path traversal detected: $PACKAGE_PATH"
|
||||
echo "Package path must be relative to workspace root and cannot contain '../', start with '/', or '~'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$PACKAGE_PATH" =~ package\.json$ ]]; then
|
||||
echo "ERROR: Package path must end with 'package.json': $PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Resolve to absolute path within workspace for additional security
|
||||
WORKSPACE_ROOT="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
|
||||
# Use realpath to resolve both paths and handle symlinks properly
|
||||
WORKSPACE_ROOT=$(realpath "$WORKSPACE_ROOT")
|
||||
RESOLVED_PATH=$(realpath "${WORKSPACE_ROOT}/${PACKAGE_PATH}")
|
||||
|
||||
# Ensure WORKSPACE_ROOT has a trailing slash for proper prefix matching
|
||||
WORKSPACE_ROOT="${WORKSPACE_ROOT}/"
|
||||
|
||||
# Use shell string matching to ensure RESOLVED_PATH is within workspace
|
||||
# This is more secure than regex and handles edge cases properly
|
||||
if [[ "$RESOLVED_PATH" != "$WORKSPACE_ROOT"* ]]; then
|
||||
echo "ERROR: Resolved path is outside workspace: $RESOLVED_PATH"
|
||||
echo "Workspace root: $WORKSPACE_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$RESOLVED_PATH" ]]; then
|
||||
echo "ERROR: package.json not found at: $RESOLVED_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use resolved path for operations
|
||||
PACKAGE_PATH="$RESOLVED_PATH"
|
||||
|
||||
# Validate SemVer format with additional security checks
|
||||
if [[ ${#VERSION} -gt 128 ]]; then
|
||||
echo "ERROR: Version string too long (${#VERSION} chars, max 128): $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid SemVer format: $VERSION"
|
||||
echo "Expected format: MAJOR.MINOR.PATCH[-PRERELEASE]"
|
||||
echo "Only alphanumeric characters, dots, and hyphens allowed in prerelease"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: Check for reasonable version component sizes
|
||||
# Extract base version (MAJOR.MINOR.PATCH) without prerelease/build metadata
|
||||
if [[ "$VERSION" =~ ^([0-9]+\.[0-9]+\.[0-9]+) ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
else
|
||||
echo "ERROR: Could not extract base version from: $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Split version components safely
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$BASE_VERSION"
|
||||
|
||||
# Validate component sizes (should have exactly 3 parts due to regex above)
|
||||
if (( ${VERSION_PARTS[0]} > 999 || ${VERSION_PARTS[1]} > 999 || ${VERSION_PARTS[2]} > 999 )); then
|
||||
echo "ERROR: Version components too large (max 999 each): $VERSION"
|
||||
echo "Components: ${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Updating package.json version to: $VERSION"
|
||||
|
||||
# Create backup for atomic operations
|
||||
BACKUP_PATH="${PACKAGE_PATH}.backup.$$"
|
||||
cp "$PACKAGE_PATH" "$BACKUP_PATH"
|
||||
|
||||
# Use jq to safely update the version field with error handling
|
||||
if ! jq --arg version "$VERSION" '.version = $version' "$PACKAGE_PATH" > "${PACKAGE_PATH}.tmp"; then
|
||||
echo "ERROR: jq failed to process package.json"
|
||||
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate the generated JSON before applying changes
|
||||
if ! jq empty "${PACKAGE_PATH}.tmp" 2>/dev/null; then
|
||||
echo "ERROR: Generated invalid JSON"
|
||||
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Atomic move operation
|
||||
if ! mv "${PACKAGE_PATH}.tmp" "$PACKAGE_PATH"; then
|
||||
echo "ERROR: Failed to update package.json"
|
||||
# Restore backup
|
||||
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify the update was successful
|
||||
UPDATED_VERSION=$(jq -r '.version' "$PACKAGE_PATH" 2>/dev/null)
|
||||
|
||||
if [[ "$UPDATED_VERSION" != "$VERSION" ]]; then
|
||||
echo "ERROR: Version update failed!"
|
||||
echo "Expected: $VERSION"
|
||||
echo "Actual: $UPDATED_VERSION"
|
||||
# Restore backup
|
||||
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean up backup on success
|
||||
rm -f "$BACKUP_PATH"
|
||||
|
||||
echo "SUCCESS: Updated package.json version to: $UPDATED_VERSION"
|
||||
echo "updated_version=$UPDATED_VERSION" >> $GITHUB_OUTPUT
|
||||
32
.github/copilot-instructions.md
vendored
32
.github/copilot-instructions.md
vendored
@@ -1,32 +0,0 @@
|
||||
# Testing Instructions
|
||||
|
||||
When generating test files inside the "/app/web" path, follow these rules:
|
||||
|
||||
- You are an experienced senior software engineer
|
||||
- Use vitest
|
||||
- Ensure 100% code coverage
|
||||
- Add as few comments as possible
|
||||
- The test file should be located in the same folder as the original file
|
||||
- Use the `test` function instead of `it`
|
||||
- Follow the same test pattern used for other files in the package where the file is located
|
||||
- All imports should be at the top of the file, not inside individual tests
|
||||
- For mocking inside "test" blocks use "vi.mocked"
|
||||
- If the file is located in the "packages/survey" path, use "@testing-library/preact" instead of "@testing-library/react"
|
||||
- Don't mock functions that are already mocked in the "apps/web/vitestSetup.ts" file
|
||||
- When using "screen.getByText" check for the tolgee string if it is being used in the file.
|
||||
- The types for mocked variables can be found in the "packages/types" path. Be sure that every imported type exists before using it. Don't create types that are not already in the codebase.
|
||||
- When mocking data check if the properties added are part of the type of the object being mocked. Only specify known properties, don't use properties that are not part of the type.
|
||||
|
||||
If it's a test for a ".tsx" file, follow these extra instructions:
|
||||
|
||||
- Add this code inside the "describe" block and before any test:
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
- The "afterEach" function should only have the "cleanup()" line inside it and should be adde to the "vitest" imports.
|
||||
- For click events, import userEvent from "@testing-library/user-event"
|
||||
- Mock other components that can make the text more complex and but at the same time mocking it wouldn't make the test flaky. It's ok to leave basic and simple components.
|
||||
- You don't need to mock @tolgee/react
|
||||
- Use "import "@testing-library/jest-dom/vitest";"
|
||||
82
.github/workflows/apply-issue-labels-to-pr.yml
vendored
82
.github/workflows/apply-issue-labels-to-pr.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: "Apply issue labels to PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
label_on_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: none
|
||||
issues: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Apply labels from linked issue to PR
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
async function getLinkedIssues(owner, repo, prNumber) {
|
||||
const query = `query GetLinkedIssues($owner: String!, $repo: String!, $prNumber: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
pullRequest(number: $prNumber) {
|
||||
closingIssuesReferences(first: 10) {
|
||||
nodes {
|
||||
number
|
||||
labels(first: 10) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const variables = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
prNumber: prNumber,
|
||||
};
|
||||
|
||||
const result = await github.graphql(query, variables);
|
||||
return result.repository.pullRequest.closingIssuesReferences.nodes;
|
||||
}
|
||||
|
||||
const pr = context.payload.pull_request;
|
||||
const linkedIssues = await getLinkedIssues(
|
||||
context.repo.owner,
|
||||
context.repo.repo,
|
||||
pr.number
|
||||
);
|
||||
|
||||
const labelsToAdd = new Set();
|
||||
for (const issue of linkedIssues) {
|
||||
if (issue.labels && issue.labels.nodes) {
|
||||
for (const label of issue.labels.nodes) {
|
||||
labelsToAdd.add(label.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (labelsToAdd.size) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels: Array.from(labelsToAdd),
|
||||
});
|
||||
}
|
||||
94
.github/workflows/build-and-push-ecr.yml
vendored
Normal file
94
.github/workflows/build-and-push-ecr.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: Build Cloud Deployment Images
|
||||
|
||||
# This workflow builds Formbricks Docker images for ECR deployment:
|
||||
# - workflow_call: Used by releases with explicit SemVer versions
|
||||
# - workflow_dispatch: Auto-detects version from current branch or uses override
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_override:
|
||||
description: "Override version (SemVer only, e.g., 1.2.3). Leave empty to auto-detect from branch."
|
||||
required: false
|
||||
type: string
|
||||
deploy_production:
|
||||
description: "Tag image for production deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
deploy_staging:
|
||||
description: "Tag image for staging deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_tag:
|
||||
description: "Image tag to push (required for workflow_call)"
|
||||
required: true
|
||||
type: string
|
||||
IS_PRERELEASE:
|
||||
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
MAKE_LATEST:
|
||||
description: "Whether to tag for production (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
outputs:
|
||||
IMAGE_TAG:
|
||||
description: "Normalized image tag used for the build"
|
||||
value: ${{ jobs.build-and-push.outputs.IMAGE_TAG }}
|
||||
TAGS:
|
||||
description: "Newline-separated list of ECR tags pushed"
|
||||
value: ${{ jobs.build-and-push.outputs.TAGS }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
ECR_REGION: ${{ vars.ECR_REGION }}
|
||||
# ECR settings are sourced from repository/environment variables for portability across envs/forks
|
||||
ECR_REGISTRY: ${{ vars.ECR_REGISTRY }}
|
||||
ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
outputs:
|
||||
IMAGE_TAG: ${{ steps.build.outputs.image_tag }}
|
||||
TAGS: ${{ steps.build.outputs.registry_tags }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Build and push cloud deployment image
|
||||
id: build
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
with:
|
||||
registry_type: "ecr"
|
||||
ecr_registry: ${{ env.ECR_REGISTRY }}
|
||||
ecr_repository: ${{ env.ECR_REPOSITORY }}
|
||||
ecr_region: ${{ env.ECR_REGION }}
|
||||
aws_role_arn: ${{ secrets.AWS_ECR_PUSH_ROLE_ARN }}
|
||||
version: ${{ inputs.version_override || inputs.image_tag }}
|
||||
deploy_production: ${{ inputs.deploy_production }}
|
||||
deploy_staging: ${{ inputs.deploy_staging }}
|
||||
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||
env:
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
33
.github/workflows/chromatic.yml
vendored
33
.github/workflows/chromatic.yml
vendored
@@ -6,18 +6,19 @@ on:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
chromatic:
|
||||
name: Run Chromatic
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
actions: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -25,16 +26,34 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Run Chromatic
|
||||
uses: chromaui/action@c93e0bc3a63aa176e14a75b61a31847cbfdd341c # latest
|
||||
uses: chromaui/action@4c20b95e9d3209ecfdf9cd6aace6bbde71ba1694 # v13.3.4
|
||||
with:
|
||||
# ⚠️ Make sure to configure a `CHROMATIC_PROJECT_TOKEN` repository secret
|
||||
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
|
||||
workingDir: apps/storybook
|
||||
zip: true
|
||||
|
||||
27
.github/workflows/dependency-review.yml
vendored
27
.github/workflows/dependency-review.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# Dependency Review Action
|
||||
#
|
||||
# This Action will scan dependency manifest files that change as part of a Pull Request,
|
||||
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
|
||||
# Once installed, if the workflow run is marked as required,
|
||||
# PRs introducing known-vulnerable packages will be blocked from merging.
|
||||
#
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@38ecb5b593bf0eb19e335c03f97670f792489a8b # v4.7.0
|
||||
37
.github/workflows/deploy-formbricks-cloud.yml
vendored
37
.github/workflows/deploy-formbricks-cloud.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Docker image to release, full image tag if image tag is v0.0.0 enter v0.0.0."
|
||||
description: "The version of the Docker image to release (clean SemVer, e.g., 1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
REPOSITORY:
|
||||
@@ -17,8 +17,8 @@ on:
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- stage
|
||||
- prod
|
||||
- staging
|
||||
- production
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
@@ -37,21 +37,27 @@ on:
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
helmfile-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v3
|
||||
uses: tailscale/github-action@84a3f23bb4d843bcf4da6cf824ec1be473daf4de # v3.2.3
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||
tags: tag:github
|
||||
args: --accept-routes
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||
@@ -65,9 +71,9 @@ jobs:
|
||||
env:
|
||||
AWS_REGION: eu-central-1
|
||||
|
||||
- uses: helmfile/helmfile-action@v2
|
||||
name: Deploy Formbricks Cloud Prod
|
||||
if: inputs.ENVIRONMENT == 'prod'
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Production
|
||||
if: inputs.ENVIRONMENT == 'production'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
@@ -83,9 +89,9 @@ jobs:
|
||||
helmfile-auto-init: "false"
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- uses: helmfile/helmfile-action@v2
|
||||
name: Deploy Formbricks Cloud Stage
|
||||
if: inputs.ENVIRONMENT == 'stage'
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Staging
|
||||
if: inputs.ENVIRONMENT == 'staging'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
@@ -101,19 +107,20 @@ jobs:
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- name: Purge Cloudflare Cache
|
||||
if: ${{ inputs.ENVIRONMENT == 'prod' || inputs.ENVIRONMENT == 'stage' }}
|
||||
if: ${{ inputs.ENVIRONMENT == 'production' || inputs.ENVIRONMENT == 'staging' }}
|
||||
env:
|
||||
CF_ZONE_ID: ${{ secrets.CLOUDFLARE_ZONE_ID }}
|
||||
CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
ENVIRONMENT: ${{ inputs.ENVIRONMENT }}
|
||||
run: |
|
||||
# Set hostname based on environment
|
||||
if [[ "${{ inputs.ENVIRONMENT }}" == "prod" ]]; then
|
||||
if [[ "$ENVIRONMENT" == "production" ]]; then
|
||||
PURGE_HOST="app.formbricks.com"
|
||||
else
|
||||
PURGE_HOST="stage.app.formbricks.com"
|
||||
fi
|
||||
|
||||
echo "Purging Cloudflare cache for host: $PURGE_HOST (environment: ${{ inputs.ENVIRONMENT }}, zone: $CF_ZONE_ID)"
|
||||
echo "Purging Cloudflare cache for host: $PURGE_HOST (environment: $ENVIRONMENT, zone: $CF_ZONE_ID)"
|
||||
|
||||
# Prepare JSON payload for selective cache purge
|
||||
json_payload=$(cat << EOF
|
||||
|
||||
159
.github/workflows/docker-build-validation.yml
vendored
159
.github/workflows/docker-build-validation.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
||||
name: Validate Docker Build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Add PostgreSQL service container
|
||||
# Add PostgreSQL and Redis service containers
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector:pg17
|
||||
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||
env:
|
||||
POSTGRES_USER: test
|
||||
POSTGRES_PASSWORD: test
|
||||
@@ -38,43 +38,98 @@ jobs:
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
push: false
|
||||
load: true
|
||||
tags: formbricks-test:${{ github.sha }}
|
||||
tags: formbricks-test:${{ env.GITHUB_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
redis_url=redis://localhost:6379
|
||||
|
||||
- name: Verify PostgreSQL Connection
|
||||
- name: Verify and Initialize PostgreSQL
|
||||
run: |
|
||||
echo "Verifying PostgreSQL connection..."
|
||||
# Install PostgreSQL client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y postgresql-client
|
||||
|
||||
# Test connection using psql
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "\dt" || echo "Failed to connect to PostgreSQL"
|
||||
# Test connection using psql with timeout and proper error handling
|
||||
echo "Testing PostgreSQL connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until PGPASSWORD=test psql -h localhost -U test -d formbricks -c "\dt" >/dev/null 2>&1; do
|
||||
echo "Waiting for PostgreSQL to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ PostgreSQL connection successful"
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "SELECT version();"
|
||||
|
||||
# Enable necessary extensions that might be required by migrations
|
||||
echo "Enabling required PostgreSQL extensions..."
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "CREATE EXTENSION IF NOT EXISTS vector;" || echo "Vector extension already exists or not available"
|
||||
|
||||
else
|
||||
echo "❌ PostgreSQL connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration
|
||||
echo "Network configuration:"
|
||||
ip addr show
|
||||
netstat -tulpn | grep 5432 || echo "No process listening on port 5432"
|
||||
|
||||
- name: Verify Redis/Valkey Connection
|
||||
run: |
|
||||
echo "Verifying Redis/Valkey connection..."
|
||||
# Install Redis client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y redis-tools
|
||||
|
||||
# Test connection using redis-cli with timeout and proper error handling
|
||||
echo "Testing Redis connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until redis-cli -h localhost -p 6379 ping >/dev/null 2>&1; do
|
||||
echo "Waiting for Redis to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ Redis connection successful"
|
||||
redis-cli -h localhost -p 6379 info server | head -5
|
||||
else
|
||||
echo "❌ Redis connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration for Redis
|
||||
echo "Redis network configuration:"
|
||||
netstat -tulpn | grep 6379 || echo "No process listening on port 6379"
|
||||
|
||||
- name: Test Docker Image with Health Check
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
run: |
|
||||
echo "🧪 Testing if the Docker image starts correctly..."
|
||||
|
||||
@@ -86,29 +141,13 @@ jobs:
|
||||
$DOCKER_RUN_ARGS \
|
||||
-p 3000:3000 \
|
||||
-e DATABASE_URL="postgresql://test:test@host.docker.internal:5432/formbricks" \
|
||||
-e ENCRYPTION_KEY="${{ secrets.DUMMY_ENCRYPTION_KEY }}" \
|
||||
-d formbricks-test:${{ github.sha }}
|
||||
-e ENCRYPTION_KEY="$DUMMY_ENCRYPTION_KEY" \
|
||||
-e REDIS_URL="redis://host.docker.internal:6379" \
|
||||
-d "formbricks-test:$GITHUB_SHA"
|
||||
|
||||
# Give it more time to start up
|
||||
echo "Waiting 45 seconds for application to start..."
|
||||
sleep 45
|
||||
|
||||
# Check if the container is running
|
||||
if [ "$(docker inspect -f '{{.State.Running}}' formbricks-test)" != "true" ]; then
|
||||
echo "❌ Container failed to start properly!"
|
||||
docker logs formbricks-test
|
||||
exit 1
|
||||
else
|
||||
echo "✅ Container started successfully!"
|
||||
fi
|
||||
|
||||
# Try connecting to PostgreSQL from inside the container
|
||||
echo "Testing PostgreSQL connection from inside container..."
|
||||
docker exec formbricks-test sh -c 'apt-get update && apt-get install -y postgresql-client && PGPASSWORD=test psql -h host.docker.internal -U test -d formbricks -c "\dt" || echo "Failed to connect to PostgreSQL from container"'
|
||||
|
||||
# Try to access the health endpoint
|
||||
echo "🏥 Testing /health endpoint..."
|
||||
MAX_RETRIES=10
|
||||
# Start health check polling immediately (every 5 seconds for up to 5 minutes)
|
||||
echo "🏥 Polling /health endpoint every 5 seconds for up to 5 minutes..."
|
||||
MAX_RETRIES=60 # 60 attempts × 5 seconds = 5 minutes
|
||||
RETRY_COUNT=0
|
||||
HEALTH_CHECK_SUCCESS=false
|
||||
|
||||
@@ -116,38 +155,32 @@ jobs:
|
||||
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
echo "Attempt $RETRY_COUNT of $MAX_RETRIES..."
|
||||
|
||||
# Show container logs before each attempt to help debugging
|
||||
if [ $RETRY_COUNT -gt 1 ]; then
|
||||
echo "📋 Current container logs:"
|
||||
docker logs --tail 20 formbricks-test
|
||||
|
||||
# Check if container is still running
|
||||
if [ "$(docker inspect -f '{{.State.Running}}' formbricks-test 2>/dev/null)" != "true" ]; then
|
||||
echo "❌ Container stopped running after $((RETRY_COUNT * 5)) seconds!"
|
||||
echo "📋 Container logs:"
|
||||
docker logs formbricks-test
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get detailed curl output for debugging
|
||||
HTTP_OUTPUT=$(curl -v -s -m 30 http://localhost:3000/health 2>&1)
|
||||
CURL_EXIT_CODE=$?
|
||||
|
||||
echo "Curl exit code: $CURL_EXIT_CODE"
|
||||
echo "Curl output: $HTTP_OUTPUT"
|
||||
|
||||
if [ $CURL_EXIT_CODE -eq 0 ]; then
|
||||
STATUS_CODE=$(echo "$HTTP_OUTPUT" | grep -oP "HTTP/\d(\.\d)? \K\d+")
|
||||
echo "Status code detected: $STATUS_CODE"
|
||||
|
||||
if [ "$STATUS_CODE" = "200" ]; then
|
||||
echo "✅ Health check successful!"
|
||||
HEALTH_CHECK_SUCCESS=true
|
||||
break
|
||||
else
|
||||
echo "❌ Health check returned non-200 status code: $STATUS_CODE"
|
||||
fi
|
||||
else
|
||||
echo "❌ Curl command failed with exit code: $CURL_EXIT_CODE"
|
||||
|
||||
# Show progress and diagnostic info every 12 attempts (1 minute intervals)
|
||||
if [ $((RETRY_COUNT % 12)) -eq 0 ] || [ $RETRY_COUNT -eq 1 ]; then
|
||||
echo "Health check attempt $RETRY_COUNT of $MAX_RETRIES ($(($RETRY_COUNT * 5)) seconds elapsed)..."
|
||||
echo "📋 Recent container logs:"
|
||||
docker logs --tail 10 formbricks-test
|
||||
fi
|
||||
|
||||
echo "Waiting 15 seconds before next attempt..."
|
||||
sleep 15
|
||||
|
||||
# Try health endpoint with shorter timeout for faster polling
|
||||
# Use -f flag to make curl fail on HTTP error status codes (4xx, 5xx)
|
||||
if curl -f -s -m 10 http://localhost:3000/health >/dev/null 2>&1; then
|
||||
echo "✅ Health check successful after $((RETRY_COUNT * 5)) seconds!"
|
||||
HEALTH_CHECK_SUCCESS=true
|
||||
break
|
||||
fi
|
||||
|
||||
# Wait 5 seconds before next attempt
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Show full container logs for debugging
|
||||
@@ -160,7 +193,7 @@ jobs:
|
||||
|
||||
# Exit with failure if health check did not succeed
|
||||
if [ "$HEALTH_CHECK_SUCCESS" != "true" ]; then
|
||||
echo "❌ Health check failed after $MAX_RETRIES attempts"
|
||||
echo "❌ Health check failed after $((MAX_RETRIES * 5)) seconds (5 minutes)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
70
.github/workflows/docker-security-scan.yml
vendored
Normal file
70
.github/workflows/docker-security-scan.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Docker Security Scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * *" # Daily at 2 AM UTC
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["Docker Release to Github"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
name: Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout (for SARIF fingerprinting only)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Determine ref and commit for upload
|
||||
id: gitref
|
||||
shell: bash
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "${EVENT_NAME}" == "workflow_run" ]]; then
|
||||
echo "ref=refs/heads/${HEAD_BRANCH}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "ref=${GITHUB_REF}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||
with:
|
||||
image-ref: "ghcr.io/${{ github.repository }}:latest"
|
||||
format: "sarif"
|
||||
output: "trivy-results.sarif"
|
||||
severity: "CRITICAL,HIGH,MEDIUM,LOW"
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@a4e1a019f5e24960714ff6296aee04b736cbc3cf # v3.29.6
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
sarif_file: "trivy-results.sarif"
|
||||
ref: ${{ steps.gitref.outputs.ref }}
|
||||
sha: ${{ steps.gitref.outputs.sha }}
|
||||
category: "trivy-container-scan"
|
||||
149
.github/workflows/e2e.yml
vendored
149
.github/workflows/e2e.yml
vendored
@@ -3,26 +3,20 @@ name: E2E Tests
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
AZURE_CLIENT_ID:
|
||||
required: false
|
||||
AZURE_TENANT_ID:
|
||||
required: false
|
||||
AZURE_SUBSCRIPTION_ID:
|
||||
required: false
|
||||
PLAYWRIGHT_SERVICE_URL:
|
||||
required: false
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN:
|
||||
required: false
|
||||
ENTERPRISE_LICENSE_KEY:
|
||||
required: true
|
||||
# Add other secrets if necessary
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TELEMETRY_DISABLED: 1
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
@@ -33,7 +27,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector:pg17
|
||||
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||
env:
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -41,27 +35,23 @@ jobs:
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd="pg_isready -U testuser"
|
||||
--health-cmd="pg_isready -U postgres"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
valkey:
|
||||
image: valkey/valkey:8.1.1
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--entrypoint "valkey-server"
|
||||
--health-cmd="valkey-cli ping"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: allow
|
||||
egress-policy: audit
|
||||
allowed-endpoints: |
|
||||
ee.formbricks.com:443
|
||||
registry-1.docker.io:443
|
||||
docker.io:443
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
@@ -89,10 +79,72 @@ jobs:
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${{ secrets.ENTERPRISE_LICENSE_KEY }}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=redis://localhost:6379|" .env
|
||||
echo "" >> .env
|
||||
echo "E2E_TESTING=1" >> .env
|
||||
echo "S3_REGION=us-east-1" >> .env
|
||||
echo "S3_BUCKET_NAME=formbricks-e2e" >> .env
|
||||
echo "S3_ENDPOINT_URL=http://localhost:9000" >> .env
|
||||
echo "S3_ACCESS_KEY=devminio" >> .env
|
||||
echo "S3_SECRET_KEY=devminio123" >> .env
|
||||
echo "S3_FORCE_PATH_STYLE=1" >> .env
|
||||
shell: bash
|
||||
|
||||
- name: Install MinIO client (mc)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MC_VERSION="RELEASE.2025-08-13T08-35-41Z"
|
||||
MC_BASE="https://dl.min.io/client/mc/release/linux-amd64/archive"
|
||||
MC_BIN="mc.${MC_VERSION}"
|
||||
MC_SUM="${MC_BIN}.sha256sum"
|
||||
|
||||
curl -fsSL "${MC_BASE}/${MC_BIN}" -o "${MC_BIN}"
|
||||
curl -fsSL "${MC_BASE}/${MC_SUM}" -o "${MC_SUM}"
|
||||
|
||||
sha256sum -c "${MC_SUM}"
|
||||
|
||||
chmod +x "${MC_BIN}"
|
||||
sudo mv "${MC_BIN}" /usr/local/bin/mc
|
||||
|
||||
- name: Start MinIO Server
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start MinIO server in background
|
||||
docker run -d \
|
||||
--name minio-server \
|
||||
-p 9000:9000 \
|
||||
-p 9001:9001 \
|
||||
-e MINIO_ROOT_USER=devminio \
|
||||
-e MINIO_ROOT_PASSWORD=devminio123 \
|
||||
minio/minio:RELEASE.2025-09-07T16-13-09Z \
|
||||
server /data --console-address :9001
|
||||
|
||||
echo "MinIO server started"
|
||||
|
||||
- name: Wait for MinIO and create S3 bucket
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Waiting for MinIO to be ready..."
|
||||
ready=0
|
||||
for i in {1..60}; do
|
||||
if curl -fsS http://localhost:9000/minio/health/live >/dev/null; then
|
||||
echo "MinIO is up after ${i} seconds"
|
||||
ready=1
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [ "$ready" -ne 1 ]; then
|
||||
echo "::error::MinIO did not become ready within 60 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mc alias set local http://localhost:9000 devminio devminio123
|
||||
mc mb --ignore-existing local/formbricks-e2e
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
pnpm build --filter=@formbricks/web...
|
||||
@@ -102,6 +154,18 @@ jobs:
|
||||
# pnpm prisma migrate deploy
|
||||
pnpm db:migrate:dev
|
||||
|
||||
- name: Run Rate Limiter Load Tests
|
||||
run: |
|
||||
echo "Running rate limiter load tests with Redis/Valkey..."
|
||||
cd apps/web && pnpm vitest run modules/core/rate-limit/rate-limit-load.test.ts
|
||||
shell: bash
|
||||
|
||||
- name: Run Cache Integration Tests
|
||||
run: |
|
||||
echo "Running cache integration tests with Redis/Valkey..."
|
||||
cd packages/cache && pnpm vitest run src/cache-integration.test.ts
|
||||
shell: bash
|
||||
|
||||
- name: Check for Enterprise License
|
||||
run: |
|
||||
LICENSE_KEY=$(grep '^ENTERPRISE_LICENSE_KEY=' .env | cut -d'=' -f2-)
|
||||
@@ -111,6 +175,12 @@ jobs:
|
||||
fi
|
||||
echo "License key length: ${#LICENSE_KEY}"
|
||||
|
||||
- name: Disable rate limiting for E2E tests
|
||||
run: |
|
||||
echo "RATE_LIMITING_DISABLED=1" >> .env
|
||||
echo "Rate limiting disabled for E2E tests"
|
||||
shell: bash
|
||||
|
||||
- name: Run App
|
||||
run: |
|
||||
echo "Starting app with enterprise license..."
|
||||
@@ -132,31 +202,32 @@ jobs:
|
||||
- name: Install Playwright
|
||||
run: pnpm exec playwright install --with-deps
|
||||
|
||||
- name: Set Azure Secret Variables
|
||||
run: |
|
||||
if [[ -n "${{ secrets.AZURE_CLIENT_ID }}" && -n "${{ secrets.AZURE_TENANT_ID }}" && -n "${{ secrets.AZURE_SUBSCRIPTION_ID }}" ]]; then
|
||||
echo "AZURE_ENABLED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "AZURE_ENABLED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Azure login
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
uses: azure/login@a65d910e8af852a8061c627c456678983e180302 # v2.2.0
|
||||
with:
|
||||
client-id: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
|
||||
|
||||
- name: Run E2E Tests (Azure)
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
- name: Determine Playwright execution mode
|
||||
shell: bash
|
||||
env:
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||
run: |
|
||||
pnpm test-e2e:azure
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -n "${PLAYWRIGHT_SERVICE_URL}" && -n "${PLAYWRIGHT_SERVICE_ACCESS_TOKEN}" ]]; then
|
||||
echo "PW_MODE=service" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "PW_MODE=local" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Run E2E Tests (Playwright Service)
|
||||
if: env.PW_MODE == 'service'
|
||||
env:
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||
CI: true
|
||||
run: pnpm test-e2e:azure
|
||||
|
||||
- name: Run E2E Tests (Local)
|
||||
if: env.AZURE_ENABLED == 'false'
|
||||
if: env.PW_MODE == 'local'
|
||||
env:
|
||||
CI: true
|
||||
run: |
|
||||
pnpm test:e2e
|
||||
|
||||
|
||||
157
.github/workflows/formbricks-release.yml
vendored
157
.github/workflows/formbricks-release.yml
vendored
@@ -1,34 +1,157 @@
|
||||
name: Build, release & deploy Formbricks images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build & release stable docker image
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
check-latest-release:
|
||||
name: Check if this is the latest release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_latest: ${{ steps.compare_tags.outputs.is_latest }}
|
||||
# This job determines if the current release was marked as "Set as the latest release"
|
||||
# by comparing it with the latest release from GitHub API
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Get latest release tag from API
|
||||
id: get_latest_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get the latest release tag from GitHub API with error handling
|
||||
echo "Fetching latest release from GitHub API..."
|
||||
|
||||
# Use curl with error handling - API returns 404 if no releases exist
|
||||
http_code=$(curl -s -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
"https://api.github.com/repos/${REPO}/releases/latest" -o /tmp/latest_release.json)
|
||||
|
||||
if [[ "$http_code" == "404" ]]; then
|
||||
echo "⚠️ No previous releases found (404). This appears to be the first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
elif [[ "$http_code" == "200" ]]; then
|
||||
latest_release=$(jq -r .tag_name /tmp/latest_release.json)
|
||||
if [[ "$latest_release" == "null" || -z "$latest_release" ]]; then
|
||||
echo "⚠️ API returned null/empty tag_name. Treating as first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Latest release from API: ${latest_release}"
|
||||
echo "latest_release=${latest_release}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
else
|
||||
echo "❌ GitHub API error (HTTP ${http_code}). Treating as first release."
|
||||
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "Current release tag: ${{ github.event.release.tag_name }}"
|
||||
|
||||
- name: Compare release tags
|
||||
id: compare_tags
|
||||
env:
|
||||
CURRENT_TAG: ${{ github.event.release.tag_name }}
|
||||
LATEST_TAG: ${{ steps.get_latest_release.outputs.latest_release }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Handle first release case (no previous releases)
|
||||
if [[ -z "${LATEST_TAG}" ]]; then
|
||||
echo "🎉 This is the first release (${CURRENT_TAG}) - treating as latest"
|
||||
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${CURRENT_TAG}" == "${LATEST_TAG}" ]]; then
|
||||
echo "✅ This release (${CURRENT_TAG}) is marked as the latest release"
|
||||
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "ℹ️ This release (${CURRENT_TAG}) is not the latest release (latest: ${LATEST_TAG})"
|
||||
echo "is_latest=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
docker-build-community:
|
||||
name: Build & release community docker image
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
uses: ./.github/workflows/release-docker-github.yml
|
||||
secrets: inherit
|
||||
needs:
|
||||
- check-latest-release
|
||||
with:
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
|
||||
docker-build-cloud:
|
||||
name: Build & push Formbricks Cloud to ECR
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
uses: ./.github/workflows/build-and-push-ecr.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_tag: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
needs:
|
||||
- check-latest-release
|
||||
- docker-build-community
|
||||
|
||||
helm-chart-release:
|
||||
name: Release Helm Chart
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
uses: ./.github/workflows/release-helm-chart.yml
|
||||
secrets: inherit
|
||||
needs:
|
||||
- docker-build
|
||||
- docker-build-community
|
||||
with:
|
||||
VERSION: ${{ needs.docker-build.outputs.VERSION }}
|
||||
VERSION: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||
|
||||
deploy-formbricks-cloud:
|
||||
name: Deploy Helm Chart to Formbricks Cloud
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/deploy-formbricks-cloud.yml
|
||||
verify-cloud-build:
|
||||
name: Verify Cloud Build Outputs
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5 # Simple verification should be quick
|
||||
needs:
|
||||
- docker-build
|
||||
- helm-chart-release
|
||||
- docker-build-cloud
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Display ECR build outputs
|
||||
env:
|
||||
IMAGE_TAG: ${{ needs.docker-build-cloud.outputs.IMAGE_TAG }}
|
||||
TAGS: ${{ needs.docker-build-cloud.outputs.TAGS }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "✅ ECR Build Completed Successfully"
|
||||
echo "Image Tag: ${IMAGE_TAG}"
|
||||
echo "ECR Tags:"
|
||||
printf '%s\n' "${TAGS}"
|
||||
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
permissions:
|
||||
contents: write # Required for tag push operations in called workflow
|
||||
uses: ./.github/workflows/move-stable-tag.yml
|
||||
needs:
|
||||
- check-latest-release
|
||||
- docker-build-community # Ensure release is successful first
|
||||
with:
|
||||
VERSION: v${{ needs.docker-build.outputs.VERSION }}
|
||||
ENVIRONMENT: "prod"
|
||||
release_tag: ${{ github.event.release.tag_name }}
|
||||
commit_sha: ${{ github.sha }}
|
||||
is_prerelease: ${{ github.event.release.prerelease }}
|
||||
make_latest: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||
|
||||
101
.github/workflows/move-stable-tag.yml
vendored
Normal file
101
.github/workflows/move-stable-tag.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
name: Move Stable Tag
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "The release tag name (e.g., 1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
commit_sha:
|
||||
description: "The commit SHA to point the stable tag to"
|
||||
required: true
|
||||
type: string
|
||||
is_prerelease:
|
||||
description: "Whether this is a prerelease (stable tag won't be moved for prereleases)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
make_latest:
|
||||
description: "Whether to move stable tag (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# Prevent concurrent stable tag operations to avoid race conditions
|
||||
concurrency:
|
||||
group: move-stable-tag-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10 # Prevent hung git operations
|
||||
permissions:
|
||||
contents: write # Required to push tags
|
||||
# Only move stable tag for non-prerelease versions AND when make_latest is true
|
||||
if: ${{ !inputs.is_prerelease && inputs.make_latest }}
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # Full history needed for tag operations
|
||||
|
||||
- name: Validate inputs
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate release tag format
|
||||
if [[ ! "$RELEASE_TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid release tag format. Expected format: 1.2.3, 1.2.3-alpha"
|
||||
echo "Provided: $RELEASE_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate commit SHA format (40 character hex)
|
||||
if [[ ! "$COMMIT_SHA" =~ ^[a-f0-9]{40}$ ]]; then
|
||||
echo "❌ Error: Invalid commit SHA format. Expected 40 character hex string"
|
||||
echo "Provided: $COMMIT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Input validation passed"
|
||||
echo "Release tag: $RELEASE_TAG"
|
||||
echo "Commit SHA: $COMMIT_SHA"
|
||||
|
||||
- name: Move stable tag
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Configure git
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Verify the commit exists
|
||||
if ! git cat-file -e "$COMMIT_SHA"; then
|
||||
echo "❌ Error: Commit $COMMIT_SHA does not exist in this repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Move stable tag to the release commit
|
||||
echo "📌 Moving stable tag to commit: $COMMIT_SHA (release: $RELEASE_TAG)"
|
||||
git tag -f stable "$COMMIT_SHA"
|
||||
git push origin stable --force
|
||||
|
||||
echo "✅ Successfully moved stable tag to release $RELEASE_TAG"
|
||||
echo "🔗 Stable tag now points to: https://github.com/${{ github.repository }}/commit/$COMMIT_SHA"
|
||||
159
.github/workflows/pr-size-check.yml
vendored
Normal file
159
.github/workflows/pr-size-check.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
name: PR Size Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check-pr-size:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check PR size
|
||||
id: check-size
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Fetch the base branch
|
||||
git fetch origin "${{ github.base_ref }}"
|
||||
|
||||
# Get diff stats
|
||||
diff_output=$(git diff --numstat "origin/${{ github.base_ref }}"...HEAD)
|
||||
|
||||
# Count lines, excluding:
|
||||
# - Test files (*.test.ts, *.spec.tsx, etc.)
|
||||
# - Locale files (locales/*.json, i18n/*.json)
|
||||
# - Lock files (pnpm-lock.yaml, package-lock.json, yarn.lock)
|
||||
# - Generated files (dist/, coverage/, build/, .next/)
|
||||
# - Storybook stories (*.stories.tsx)
|
||||
|
||||
total_additions=0
|
||||
total_deletions=0
|
||||
counted_files=0
|
||||
excluded_files=0
|
||||
|
||||
while IFS=$'\t' read -r additions deletions file; do
|
||||
# Skip if additions or deletions are "-" (binary files)
|
||||
if [ "$additions" = "-" ] || [ "$deletions" = "-" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if file should be excluded
|
||||
case "$file" in
|
||||
*.test.ts|*.test.tsx|*.spec.ts|*.spec.tsx|*.test.js|*.test.jsx|*.spec.js|*.spec.jsx)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
*/locales/*.json|*/i18n/*.json)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
pnpm-lock.yaml|package-lock.json|yarn.lock)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
dist/*|coverage/*|build/*|node_modules/*|test-results/*|playwright-report/*|.next/*|*.tsbuildinfo)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
*.stories.ts|*.stories.tsx|*.stories.js|*.stories.jsx)
|
||||
excluded_files=$((excluded_files + 1))
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
total_additions=$((total_additions + additions))
|
||||
total_deletions=$((total_deletions + deletions))
|
||||
counted_files=$((counted_files + 1))
|
||||
done <<EOF
|
||||
${diff_output}
|
||||
EOF
|
||||
|
||||
total_changes=$((total_additions + total_deletions))
|
||||
|
||||
echo "counted_files=${counted_files}" >> "${GITHUB_OUTPUT}"
|
||||
echo "excluded_files=${excluded_files}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_additions=${total_additions}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_deletions=${total_deletions}" >> "${GITHUB_OUTPUT}"
|
||||
echo "total_changes=${total_changes}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Set flag if PR is too large (> 800 lines)
|
||||
if [ ${total_changes} -gt 800 ]; then
|
||||
echo "is_too_large=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "is_too_large=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
- name: Comment on PR if too large
|
||||
if: steps.check-size.outputs.is_too_large == 'true'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const totalChanges = ${{ steps.check-size.outputs.total_changes }};
|
||||
const countedFiles = ${{ steps.check-size.outputs.counted_files }};
|
||||
const excludedFiles = ${{ steps.check-size.outputs.excluded_files }};
|
||||
const additions = ${{ steps.check-size.outputs.total_additions }};
|
||||
const deletions = ${{ steps.check-size.outputs.total_deletions }};
|
||||
|
||||
const body = '## 🚨 PR Size Warning\n\n' +
|
||||
'This PR has approximately **' + totalChanges + ' lines** of changes (' + additions + ' additions, ' + deletions + ' deletions across ' + countedFiles + ' files).\n\n' +
|
||||
'Large PRs (>800 lines) are significantly harder to review and increase the chance of merge conflicts. Consider splitting this into smaller, self-contained PRs.\n\n' +
|
||||
'### 💡 Suggestions:\n' +
|
||||
'- **Split by feature or module** - Break down into logical, independent pieces\n' +
|
||||
'- **Create a sequence of PRs** - Each building on the previous one\n' +
|
||||
'- **Branch off PR branches** - Don\'t wait for reviews to continue dependent work\n\n' +
|
||||
'### 📊 What was counted:\n' +
|
||||
'- ✅ Source files, stylesheets, configuration files\n' +
|
||||
'- ❌ Excluded ' + excludedFiles + ' files (tests, locales, locks, generated files)\n\n' +
|
||||
'### 📚 Guidelines:\n' +
|
||||
'- **Ideal:** 300-500 lines per PR\n' +
|
||||
'- **Warning:** 500-800 lines\n' +
|
||||
'- **Critical:** 800+ lines ⚠️\n\n' +
|
||||
'If this large PR is unavoidable (e.g., migration, dependency update, major refactor), please explain in the PR description why it couldn\'t be split.';
|
||||
|
||||
// Check if we already commented
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(comment =>
|
||||
comment.user.type === 'Bot' &&
|
||||
comment.body.includes('🚨 PR Size Warning')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
|
||||
2
.github/workflows/pr.yml
vendored
2
.github/workflows/pr.yml
vendored
@@ -10,8 +10,6 @@ permissions:
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
@@ -1,99 +1,50 @@
|
||||
name: Docker Release to Github Experimental
|
||||
name: Build Community Testing Images
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
# This workflow builds experimental/testing versions of Formbricks for self-hosting customers
|
||||
# to test fixes and features before official releases. Images are pushed to GHCR with
|
||||
# timestamped experimental versions for easy identification and testing.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}-experimental
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
inputs:
|
||||
version_override:
|
||||
description: "Override version (SemVer only, e.g., 1.2.3-beta). Leave empty for auto-generated experimental version."
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
build-community-testing:
|
||||
name: Build Community Testing Image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
id-token: write
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
- name: Build and push community testing image
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
# repository is public to avoid leaking data. If you would like to publish
|
||||
# transparency data even for private images, pass --force to cosign below.
|
||||
# https://github.com/sigstore/cosign
|
||||
- name: Sign the published Docker image
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
registry_type: "ghcr"
|
||||
ghcr_image_name: "${{ github.repository }}-experimental"
|
||||
experimental_mode: "true"
|
||||
version: ${{ inputs.version_override }}
|
||||
env:
|
||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
# This step uses the identity token to provision an ephemeral certificate
|
||||
# against the sigstore community Fulcio instance.
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
132
.github/workflows/release-docker-github.yml
vendored
132
.github/workflows/release-docker-github.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Docker Release to Github
|
||||
name: Release Community Docker Images
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
@@ -7,6 +7,17 @@ name: Docker Release to Github
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
IS_PRERELEASE:
|
||||
description: "Whether this is a prerelease (affects latest tag)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
MAKE_LATEST:
|
||||
description: "Whether to tag as latest (from GitHub release 'Set as the latest release' option)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
outputs:
|
||||
VERSION:
|
||||
description: release version
|
||||
@@ -17,12 +28,14 @@ env:
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -35,82 +48,61 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Release Tag
|
||||
- name: Extract release version from tag
|
||||
id: extract_release_tag
|
||||
run: |
|
||||
TAG=${{ github.ref }}
|
||||
TAG=${TAG#refs/tags/v}
|
||||
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
|
||||
set -euo pipefail
|
||||
|
||||
# Extract tag name with fallback logic for different trigger contexts
|
||||
if [[ -n "${RELEASE_TAG:-}" ]]; then
|
||||
TAG="$RELEASE_TAG"
|
||||
echo "Using RELEASE_TAG override: $TAG"
|
||||
elif [[ "$GITHUB_REF_NAME" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]] || [[ "$GITHUB_REF_NAME" =~ ^v[0-9] ]]; then
|
||||
TAG="$GITHUB_REF_NAME"
|
||||
echo "Using GITHUB_REF_NAME (looks like tag): $TAG"
|
||||
else
|
||||
# Fallback: extract from GITHUB_REF for direct tag triggers
|
||||
TAG="${GITHUB_REF#refs/tags/}"
|
||||
if [[ -z "$TAG" || "$TAG" == "$GITHUB_REF" ]]; then
|
||||
TAG="$GITHUB_REF_NAME"
|
||||
echo "Using GITHUB_REF_NAME as final fallback: $TAG"
|
||||
else
|
||||
echo "Extracted from GITHUB_REF: $TAG"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Strip v-prefix if present (normalize to clean SemVer)
|
||||
TAG=${TAG#[vV]}
|
||||
|
||||
# Validate SemVer format (supports prereleases like 4.0.0-rc.1)
|
||||
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "ERROR: Invalid tag format '$TAG'. Expected SemVer (e.g., 1.2.3, 4.0.0-rc.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "Using version: $TAG"
|
||||
|
||||
- name: Update package.json version
|
||||
run: |
|
||||
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.RELEASE_TAG }}\"/" ./apps/web/package.json
|
||||
cat ./apps/web/package.json | grep version
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
- name: Build and push community release image
|
||||
id: build
|
||||
uses: ./.github/actions/build-and-push-docker
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
# repository is public to avoid leaking data. If you would like to publish
|
||||
# transparency data even for private images, pass --force to cosign below.
|
||||
# https://github.com/sigstore/cosign
|
||||
- name: Sign the published Docker image
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
registry_type: "ghcr"
|
||||
ghcr_image_name: ${{ env.IMAGE_NAME }}
|
||||
version: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||
env:
|
||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
# This step uses the identity token to provision an ephemeral certificate
|
||||
# against the sigstore community Fulcio instance.
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
53
.github/workflows/release-helm-chart.yml
vendored
53
.github/workflows/release-helm-chart.yml
vendored
@@ -19,15 +19,30 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Extract release version
|
||||
run: echo "VERSION=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
- name: Validate input version
|
||||
env:
|
||||
INPUT_VERSION: ${{ inputs.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Validate input version format (expects clean semver without 'v' prefix)
|
||||
if [[ ! "$INPUT_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid version format. Must be clean semver (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Expected: clean version without 'v' prefix"
|
||||
echo "Provided: $INPUT_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Store validated version in environment variable
|
||||
echo "VERSION<<EOF" >> $GITHUB_ENV
|
||||
echo "$INPUT_VERSION" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5
|
||||
@@ -35,20 +50,44 @@ jobs:
|
||||
version: latest
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | helm registry login ghcr.io --username ${{ github.actor }} --password-stdin
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_ACTOR: ${{ github.actor }}
|
||||
run: printf '%s' "$GITHUB_TOKEN" | helm registry login ghcr.io --username "$GITHUB_ACTOR" --password-stdin
|
||||
|
||||
- name: Install YQ
|
||||
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
|
||||
|
||||
- name: Update Chart.yaml with new version
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
yq -i ".version = \"${{ inputs.VERSION }}\"" helm-chart/Chart.yaml
|
||||
yq -i ".appVersion = \"v${{ inputs.VERSION }}\"" helm-chart/Chart.yaml
|
||||
set -euo pipefail
|
||||
|
||||
echo "Updating Chart.yaml with version: ${VERSION}"
|
||||
yq -i ".version = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||
yq -i ".appVersion = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||
|
||||
echo "✅ Successfully updated Chart.yaml"
|
||||
|
||||
- name: Package Helm chart
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Packaging Helm chart version: ${VERSION}"
|
||||
helm package ./helm-chart
|
||||
|
||||
echo "✅ Successfully packaged formbricks-${VERSION}.tgz"
|
||||
|
||||
- name: Push Helm chart to GitHub Container Registry
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
helm push formbricks-${{ inputs.VERSION }}.tgz oci://ghcr.io/formbricks/helm-charts
|
||||
set -euo pipefail
|
||||
|
||||
echo "Pushing Helm chart to registry: formbricks-${VERSION}.tgz"
|
||||
helm push "formbricks-${VERSION}.tgz" oci://ghcr.io/formbricks/helm-charts
|
||||
|
||||
echo "✅ Successfully pushed Helm chart to registry"
|
||||
|
||||
81
.github/workflows/scorecard.yml
vendored
81
.github/workflows/scorecard.yml
vendored
@@ -1,81 +0,0 @@
|
||||
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||
# by a third-party and are governed by separate terms of service, privacy
|
||||
# policy, and support documentation.
|
||||
|
||||
name: Scorecard supply-chain security
|
||||
on:
|
||||
# For Branch-Protection check. Only the default branch is supported. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||
branch_protection_rule:
|
||||
# To guarantee Maintained check is occasionally updated. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||
schedule:
|
||||
- cron: "17 17 * * 6"
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecard analysis
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Needed to publish results and get a badge (see publish_results below).
|
||||
id-token: write
|
||||
# Add this permission
|
||||
actions: write # Required for artifact upload
|
||||
# Uncomment the permissions below if installing in a private repository.
|
||||
# contents: read
|
||||
# actions: read
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecard on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional.
|
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||
|
||||
# Public repositories:
|
||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||
# - Allows the repository to include the Scorecard badge.
|
||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories:
|
||||
# - `publish_results` will always be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: sarif
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard (optional).
|
||||
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
8
.github/workflows/semantic-pull-requests.yml
vendored
8
.github/workflows/semantic-pull-requests.yml
vendored
@@ -56,11 +56,3 @@ jobs:
|
||||
```
|
||||
${{ steps.lint_pr_title.outputs.error_message }}
|
||||
```
|
||||
|
||||
# Delete a previous comment when the issue has been resolved
|
||||
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
|
||||
uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2
|
||||
with:
|
||||
header: pr-title-lint-error
|
||||
message: |
|
||||
Thank you for following the naming conventions for pull request titles! 🙏
|
||||
|
||||
1
.github/workflows/sonarqube.yml
vendored
1
.github/workflows/sonarqube.yml
vendored
@@ -43,6 +43,7 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Run tests with coverage
|
||||
run: |
|
||||
|
||||
84
.github/workflows/terraform-plan-and-apply.yml
vendored
84
.github/workflows/terraform-plan-and-apply.yml
vendored
@@ -1,84 +0,0 @@
|
||||
name: "Terraform"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# TODO: enable it back when migration is completed.
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "infra/terraform/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "infra/terraform/**"
|
||||
|
||||
jobs:
|
||||
terraform:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v3
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||
tags: tag:github
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
||||
aws-region: "eu-central-1"
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
|
||||
|
||||
- name: Terraform Format
|
||||
id: fmt
|
||||
run: terraform fmt -check -recursive
|
||||
continue-on-error: true
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Init
|
||||
id: init
|
||||
run: terraform init
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Validate
|
||||
id: validate
|
||||
run: terraform validate
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Plan
|
||||
id: plan
|
||||
run: terraform plan -out .planfile
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Post PR comment
|
||||
uses: borchero/terraform-plan-comment@434458316f8f24dd073cd2561c436cce41dc8f34 # v2.4.1
|
||||
if: always() && github.ref != 'refs/heads/main' && (steps.plan.outcome == 'success' || steps.plan.outcome == 'failure')
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
planfile: .planfile
|
||||
working-directory: "infra/terraform"
|
||||
|
||||
- name: Terraform Apply
|
||||
id: apply
|
||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||
run: terraform apply .planfile
|
||||
working-directory: "infra/terraform"
|
||||
1
.github/workflows/test.yml
vendored
1
.github/workflows/test.yml
vendored
@@ -41,6 +41,7 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Test
|
||||
run: pnpm test
|
||||
|
||||
51
.github/workflows/tolgee-missing-key-check.yml
vendored
51
.github/workflows/tolgee-missing-key-check.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Check Missing Translations
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
check-missing-translations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
|
||||
- name: Checkout PR
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Compare Tolgee Keys
|
||||
id: compare
|
||||
run: |
|
||||
tolgee compare --api-key ${{ secrets.TOLGEE_API_KEY }} > compare_output.txt
|
||||
cat compare_output.txt
|
||||
|
||||
- name: Check for Missing Translations
|
||||
run: |
|
||||
if grep -q "new key found" compare_output.txt; then
|
||||
echo "New keys found that may require translations:"
|
||||
exit 1
|
||||
else
|
||||
echo "No new keys found."
|
||||
fi
|
||||
87
.github/workflows/tolgee.yml
vendored
87
.github/workflows/tolgee.yml
vendored
@@ -1,87 +0,0 @@
|
||||
name: Tolgee Tagging on PR Merge
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
tag-production-keys:
|
||||
name: Tag Production Keys
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # This ensures we get the full git history
|
||||
|
||||
- name: Get source branch name
|
||||
id: branch-name
|
||||
run: |
|
||||
RAW_BRANCH="${{ github.head_ref }}"
|
||||
SOURCE_BRANCH=$(echo "$RAW_BRANCH" | sed 's/[^a-zA-Z0-9._\/-]//g')
|
||||
|
||||
|
||||
# Safely add to environment variables using GitHub's recommended method
|
||||
# This prevents environment variable injection attacks
|
||||
echo "SOURCE_BRANCH<<EOF" >> $GITHUB_ENV
|
||||
echo "$SOURCE_BRANCH" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
echo "Detected source branch: $SOURCE_BRANCH"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18 # Ensure compatibility with your project
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Tag Production Keys
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-extracted \
|
||||
--filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag production \
|
||||
--untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Tag unused production keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag production \
|
||||
--tag deprecated --untag production
|
||||
|
||||
- name: Tag unused draft:current-branch keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag deprecated --untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Sync with backup
|
||||
run: |
|
||||
npx tolgee sync \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--backup ./tolgee-backup \
|
||||
--continue-on-warning \
|
||||
--yes
|
||||
|
||||
- name: Upload backup as artifact
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: tolgee-backup-${{ github.sha }}
|
||||
path: ./tolgee-backup
|
||||
retention-days: 90
|
||||
63
.github/workflows/translation-check.yml
vendored
Normal file
63
.github/workflows/translation-check.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: Translation Validation
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "apps/web/**/*.ts"
|
||||
- "apps/web/**/*.tsx"
|
||||
- "apps/web/locales/**/*.json"
|
||||
- "scan-translations.ts"
|
||||
|
||||
jobs:
|
||||
validate-translations:
|
||||
name: Validate Translation Keys
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
|
||||
with:
|
||||
version: 9.15.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Validate translation keys
|
||||
run: |
|
||||
echo ""
|
||||
echo "🔍 Validating translation keys..."
|
||||
echo ""
|
||||
pnpm run scan-translations
|
||||
|
||||
- name: Summary
|
||||
if: success()
|
||||
run: |
|
||||
echo ""
|
||||
echo "✅ Translation validation completed successfully!"
|
||||
echo ""
|
||||
32
.github/workflows/welcome-new-contributors.yml
vendored
32
.github/workflows/welcome-new-contributors.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: "Welcome new contributors"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: opened
|
||||
pull_request_target:
|
||||
types: opened
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
welcome-message:
|
||||
name: Welcoming New Users
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.event.action == 'opened'
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/first-interaction@3c71ce730280171fd1cfb57c00c774f8998586f7 # v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pr-message: |-
|
||||
Thank you so much for making your first Pull Request and taking the time to improve Formbricks! 🚀🙏❤️
|
||||
Feel free to join the conversation on [Github Discussions](https://github.com/formbricks/formbricks/discussions) if you need any help or have any questions. 😊
|
||||
issue-message: |
|
||||
Thank you for opening your first issue! 🙏❤️ One of our team members will review it and get back to you as soon as it possible. 😊
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -56,21 +56,10 @@ packages/database/migrations
|
||||
branch.json
|
||||
.vercel
|
||||
|
||||
# Terraform
|
||||
infra/terraform/.terraform/
|
||||
**/.terraform.lock.hcl
|
||||
**/terraform.tfstate
|
||||
**/terraform.tfstate.*
|
||||
**/crash.log
|
||||
**/override.tf
|
||||
**/override.tf.json
|
||||
**/*.tfvars
|
||||
**/*.tfvars.json
|
||||
**/.terraformrc
|
||||
**/terraform.rc
|
||||
|
||||
# IntelliJ IDEA
|
||||
/.idea/
|
||||
/*.iml
|
||||
packages/ios/FormbricksSDK/FormbricksSDK.xcodeproj/project.xcworkspace/xcuserdata
|
||||
.cursorrules
|
||||
i18n.cache
|
||||
stats.html
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
# Load environment variables from .env files
|
||||
if [ -f .env ]; then
|
||||
set -a
|
||||
@@ -10,12 +7,34 @@ fi
|
||||
|
||||
pnpm lint-staged
|
||||
|
||||
# Run tolgee-pull if branch.json exists and NEXT_PUBLIC_TOLGEE_API_KEY is not set
|
||||
if [ -f branch.json ]; then
|
||||
if [ -z "$NEXT_PUBLIC_TOLGEE_API_KEY" ]; then
|
||||
echo "Skipping tolgee-pull: NEXT_PUBLIC_TOLGEE_API_KEY is not set"
|
||||
# Run Lingo.dev i18n workflow if LINGODOTDEV_API_KEY is set
|
||||
if [ -n "$LINGODOTDEV_API_KEY" ]; then
|
||||
echo ""
|
||||
echo "🌍 Running Lingo.dev translation workflow..."
|
||||
echo ""
|
||||
|
||||
# Run translation generation and validation
|
||||
if pnpm run i18n; then
|
||||
echo ""
|
||||
echo "✅ Translation validation passed"
|
||||
echo ""
|
||||
# Add updated locale files to git
|
||||
git add apps/web/locales/*.json
|
||||
else
|
||||
pnpm run tolgee-pull
|
||||
git add apps/web/locales
|
||||
echo ""
|
||||
echo "❌ Translation validation failed!"
|
||||
echo ""
|
||||
echo "Please fix the translation issues above before committing:"
|
||||
echo " • Add missing translation keys to your locale files"
|
||||
echo " • Remove unused translation keys"
|
||||
echo ""
|
||||
echo "Or run 'pnpm i18n' to see the detailed report"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo ""
|
||||
echo "⚠️ Skipping translation validation: LINGODOTDEV_API_KEY is not set"
|
||||
echo " (This is expected for community contributors)"
|
||||
echo ""
|
||||
fi
|
||||
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"$schema": "https://docs.tolgee.io/cli-schema.json",
|
||||
"format": "JSON_TOLGEE",
|
||||
"patterns": ["./apps/web/**/*.ts?(x)"],
|
||||
"projectId": 10304,
|
||||
"pull": {
|
||||
"path": "./apps/web/locales"
|
||||
},
|
||||
"push": {
|
||||
"files": [
|
||||
{
|
||||
"language": "en-US",
|
||||
"path": "./apps/web/locales/en-US.json"
|
||||
},
|
||||
{
|
||||
"language": "de-DE",
|
||||
"path": "./apps/web/locales/de-DE.json"
|
||||
},
|
||||
{
|
||||
"language": "fr-FR",
|
||||
"path": "./apps/web/locales/fr-FR.json"
|
||||
},
|
||||
{
|
||||
"language": "pt-BR",
|
||||
"path": "./apps/web/locales/pt-BR.json"
|
||||
},
|
||||
{
|
||||
"language": "zh-Hant-TW",
|
||||
"path": "./apps/web/locales/zh-Hant-TW.json"
|
||||
},
|
||||
{
|
||||
"language": "pt-PT",
|
||||
"path": "./apps/web/locales/pt-PT.json"
|
||||
}
|
||||
],
|
||||
"forceMode": "OVERRIDE"
|
||||
},
|
||||
"strictNamespace": false
|
||||
}
|
||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -1,4 +1,10 @@
|
||||
{
|
||||
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
|
||||
"eslint.workingDirectories": [
|
||||
{
|
||||
"mode": "auto"
|
||||
}
|
||||
],
|
||||
"javascript.updateImportsOnFileMove.enabled": "always",
|
||||
"sonarlint.connectedMode.project": {
|
||||
"connectionId": "formbricks",
|
||||
|
||||
82
AGENTS.md
Normal file
82
AGENTS.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## Project Structure & Module Organization
|
||||
|
||||
Formbricks runs as a pnpm/turbo monorepo. `apps/web` is the Next.js product surface, with feature modules under `app/` and `modules/`, assets in `public/` and `images/`, and Playwright specs in `apps/web/playwright/`. `apps/storybook` renders reusable UI pieces for review. Shared logic lives in `packages/*`: `database` (Prisma schemas/migrations), `surveys`, `js-core`, `types`, plus linting and TypeScript presets (`config-*`). Deployment collateral is kept in `docs/`, `docker/`, and `helm-chart/`. Unit tests sit next to their source as `*.test.ts` or inside `__tests__`.
|
||||
|
||||
## Build, Test & Development Commands
|
||||
|
||||
- `pnpm install` — install workspace dependencies pinned by `pnpm-lock.yaml`.
|
||||
- `pnpm db:up` / `pnpm db:down` — start/stop the Docker services backing the app.
|
||||
- `pnpm dev` — run all app and worker dev servers in parallel via Turborepo.
|
||||
- `pnpm build` — generate production builds for every package and app.
|
||||
- `pnpm lint` — apply the shared ESLint rules across the workspace.
|
||||
- `pnpm test` / `pnpm test:coverage` — execute Vitest suites with optional coverage.
|
||||
- `pnpm test:e2e` — launch the Playwright browser regression suite.
|
||||
- `pnpm db:migrate:dev` — apply Prisma migrations against the dev database.
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
|
||||
TypeScript, React, and Prisma are the primary languages. Use the shared ESLint presets (`@formbricks/eslint-config`) and Prettier preset (110-char width, semicolons, double quotes, sorted import groups). Two-space indentation is standard; prefer `PascalCase` for React components and folders under `modules/`, `camelCase` for functions/variables, and `SCREAMING_SNAKE_CASE` only for constants. When adding mocks, place them inside `__mocks__` so import ordering stays stable.
|
||||
We are using SonarQube to identify code smells and security hotspots.
|
||||
|
||||
## Architecture & Patterns
|
||||
|
||||
- Next.js app router lives in `apps/web/app` with route groups like `(app)` and `(auth)`. Services live in `apps/web/lib`, feature modules in `apps/web/modules`.
|
||||
- Server actions wrap service calls and return `{ data }` or `{ error }` consistently.
|
||||
- Context providers should guard against missing provider usage and use cleanup patterns that snapshot refs inside `useEffect` to avoid React hooks warnings
|
||||
|
||||
## Caching
|
||||
|
||||
- Use React `cache()` for request-level dedupe and `cache.withCache()` or explicit Redis for expensive data.
|
||||
- Do not use Next.js `unstable_cache()`.
|
||||
- Always use `createCacheKey.*` utilities for cache keys.
|
||||
|
||||
## i18n (Internationalization)
|
||||
|
||||
- All user-facing text must use the `t()` function from `react-i18next`.
|
||||
- Key naming: use lowercase with dots for nesting (e.g., `common.welcome`).
|
||||
- Translations are in `apps/web/locales/`. Default is `en-US.json`.
|
||||
- Lingo.dev is automatically translating strings from en-US into other languages on commit. Run `pnpm i18n` to generate missing translations and validate keys.
|
||||
|
||||
## Database & Prisma Performance
|
||||
|
||||
- Multi-tenancy: All data must be scoped by Organization or Environment.
|
||||
- Soft Deletion: Check for `isActive` or `deletedAt` fields; use proper filtering.
|
||||
- Never use `skip`/`offset` with `prisma.response.count()`; only use `where`.
|
||||
- Separate count and data queries and run in parallel (`Promise.all`).
|
||||
- Prefer cursor pagination for large datasets.
|
||||
- When filtering by `createdAt`, include indexed fields (e.g., `surveyId` + `createdAt`).
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
Prefer Vitest with Testing Library for logic in `.ts` files, keeping specs colocated with the code they exercise (`utility.test.ts`). Do not write tests for `.tsx` files—React components are covered by Playwright E2E tests instead. Mock network and storage boundaries through helpers from `@formbricks/*`. Run `pnpm test` before opening a PR and `pnpm test:coverage` when touching critical flows; keep coverage from regressing. End-to-end scenarios belong in `apps/web/playwright`, using descriptive filenames (`billing.spec.ts`) and tagging slow suites with `@slow` when necessary.
|
||||
|
||||
## Documentation (apps/docs)
|
||||
|
||||
- Add frontmatter with `title`, `description`, and `icon` at the top of the MDX file.
|
||||
- Do not start with an H1; use Camel Case headings (only capitalize the feature name).
|
||||
- Use Mintlify components for steps and callouts.
|
||||
- If Enterprise-only, add the Enterprise note block described in docs.
|
||||
|
||||
## Storybook
|
||||
|
||||
- Stories live in `stories.tsx` in the component folder and import from `"./index"`.
|
||||
- Use `@storybook/react-vite` and organize argTypes into `Behavior`, `Appearance`, `Content`.
|
||||
- Include Default, Disabled (if supported), WithIcon (if supported), all variants, and edge cases.
|
||||
|
||||
## GitHub Actions
|
||||
|
||||
- Always set minimal `permissions` for `GITHUB_TOKEN`.
|
||||
- On `ubuntu-latest`, add `step-security/harden-runner` as the first step.
|
||||
|
||||
## Quality Checklist
|
||||
|
||||
- Keep code DRY and small; remove dead code and unused imports.
|
||||
- Follow React hooks rules, keep effects focused, and avoid unnecessary `useMemo`/`useCallback`.
|
||||
- Prefer type inference, avoid `any`, and use shared types from `@formbricks/types`.
|
||||
- Keep components focused, avoid deep nesting, and ensure basic accessibility.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
Commits follow a lightweight Conventional Commit format (`fix:`, `chore:`, `feat:`) and usually append the PR number, e.g. `fix: update OpenAPI schema (#6617)`. Keep commits scoped and lint-clean. Pull requests should outline the problem, summarize the solution, and link to issues or product specs. Attach screenshots or gifs for UI-facing work, list any migrations or env changes, and paste the output of relevant commands (`pnpm test`, `pnpm lint`, `pnpm db:migrate:dev`) so reviewers can verify readiness.
|
||||
@@ -14,17 +14,7 @@ Are you brimming with brilliant ideas? For new features that can elevate Formbri
|
||||
|
||||
## 🛠 Crafting Pull Requests
|
||||
|
||||
Ready to dive into the code and make a real impact? Here's your path:
|
||||
|
||||
1. **Read our Best Practices**: [It takes 5 minutes](https://formbricks.com/docs/developer-docs/contributing/get-started) but will help you save hours 🤓
|
||||
|
||||
1. **Fork the Repository:** Fork our repository or use [Gitpod](https://gitpod.io) or use [Github Codespaces](https://github.com/features/codespaces) to get started instantly.
|
||||
|
||||
1. **Tweak and Transform:** Work your coding magic and apply your changes.
|
||||
|
||||
1. **Pull Request Act:** If you're ready to go, craft a new pull request closely following our PR template 🙏
|
||||
|
||||
Would you prefer a chat before you dive into a lot of work? [Github Discussions](https://github.com/formbricks/formbricks/discussions) is your harbor. Share your thoughts, and we'll meet you there with open arms. We're responsive and friendly, promise!
|
||||
For the time being, we don't have the capacity to properly facilitate community contributions. It's a lot of engineering attention often spent on issues which don't follow our prioritization, so we've decided to only facilitate community code contributions in rare exceptions in the coming months.
|
||||
|
||||
## 🚀 Aspiring Features
|
||||
|
||||
|
||||
11
README.md
11
README.md
@@ -21,6 +21,7 @@ The Open Source Qualtrics Alternative
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
||||
<a href="https://insights.linuxfoundation.org/project/formbricks"><img src="https://insights.linuxfoundation.org/api/badge/health-score?project=formbricks"></a>
|
||||
<a href="https://news.ycombinator.com/item?id=32303986"><img src="https://img.shields.io/badge/Hacker%20News-122-%23FF6600" alt="Hacker News"></a>
|
||||
<a href="[https://www.producthunt.com/products/formbricks](https://www.producthunt.com/posts/formbricks)"><img src="https://img.shields.io/badge/Product%20Hunt-455-orange?logo=producthunt&logoColor=%23fff" alt="Product Hunt"></a>
|
||||
<a href="https://github.blog/2023-04-12-github-accelerator-our-first-cohort-and-whats-next/"><img src="https://img.shields.io/badge/2023-blue?logo=github&label=Github%20Accelerator" alt="Github Accelerator"></a>
|
||||
@@ -192,7 +193,7 @@ Here are a few options:
|
||||
|
||||
- Upvote issues with 👍 reaction so we know what the demand for a particular issue is to prioritize it within the roadmap.
|
||||
|
||||
Please check out [our contribution guide](https://formbricks.com/docs/developer-docs/contributing/get-started) and our [list of open issues](https://github.com/formbricks/formbricks/issues) for more information.
|
||||
- Note: For the time being, we can only facilitate code contributions as an exception.
|
||||
|
||||
## All Thanks To Our Contributors
|
||||
|
||||
@@ -202,6 +203,14 @@ Please check out [our contribution guide](https://formbricks.com/docs/developer-
|
||||
|
||||
</a>
|
||||
|
||||
## Thanks
|
||||
|
||||
Formbricks is supported by the following companies who provide us with their tools for free as part of their open-source support:
|
||||
|
||||
<a href="https://www.chromatic.com/"><img src="https://user-images.githubusercontent.com/321738/84662277-e3db4f80-af1b-11ea-88f5-91d67a5e59f6.png" width="153" height="30" alt="Chromatic" /></a>
|
||||
|
||||
<a href="https://sentry.io/"><img src="https://github.com/user-attachments/assets/d743ffd4-b575-4802-a29a-10136be9227e" width="150" height="30" alt="Sentry" /></a>
|
||||
|
||||
<a id="contact-us"></a>
|
||||
|
||||
## 📆 Contact us
|
||||
|
||||
@@ -1,27 +1,52 @@
|
||||
import type { StorybookConfig } from "@storybook/react-vite";
|
||||
import { dirname, join } from "path";
|
||||
import { createRequire } from "module";
|
||||
import { dirname, join, resolve } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
/**
|
||||
* This function is used to resolve the absolute path of a package.
|
||||
* It is needed in projects that use Yarn PnP or are set up within a monorepo.
|
||||
*/
|
||||
const getAbsolutePath = (value: string) => {
|
||||
function getAbsolutePath(value: string): any {
|
||||
return dirname(require.resolve(join(value, "package.json")));
|
||||
};
|
||||
}
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: ["../src/**/*.mdx", "../../web/modules/ui/**/stories.@(js|jsx|mjs|ts|tsx)"],
|
||||
stories: ["../src/**/*.mdx", "../../../packages/survey-ui/src/**/*.stories.@(js|jsx|mjs|ts|tsx)"],
|
||||
addons: [
|
||||
getAbsolutePath("@storybook/addon-onboarding"),
|
||||
getAbsolutePath("@storybook/addon-links"),
|
||||
getAbsolutePath("@storybook/addon-essentials"),
|
||||
getAbsolutePath("@chromatic-com/storybook"),
|
||||
getAbsolutePath("@storybook/addon-interactions"),
|
||||
getAbsolutePath("@storybook/addon-a11y"),
|
||||
getAbsolutePath("@storybook/addon-docs"),
|
||||
],
|
||||
framework: {
|
||||
name: getAbsolutePath("@storybook/react-vite"),
|
||||
options: {},
|
||||
},
|
||||
async viteFinal(config) {
|
||||
const surveyUiPath = resolve(__dirname, "../../../packages/survey-ui/src");
|
||||
const rootPath = resolve(__dirname, "../../../");
|
||||
|
||||
// Configure server to allow files from outside the storybook directory
|
||||
config.server = config.server || {};
|
||||
config.server.fs = {
|
||||
...config.server.fs,
|
||||
allow: [...(config.server.fs?.allow || []), rootPath],
|
||||
};
|
||||
|
||||
// Configure simple alias resolution
|
||||
config.resolve = config.resolve || {};
|
||||
config.resolve.alias = {
|
||||
...config.resolve.alias,
|
||||
"@": surveyUiPath,
|
||||
};
|
||||
|
||||
return config;
|
||||
},
|
||||
};
|
||||
export default config;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Preview } from "@storybook/react";
|
||||
import "../../web/modules/ui/globals.css";
|
||||
import type { Preview } from "@storybook/react-vite";
|
||||
import React from "react";
|
||||
import "../../../packages/survey-ui/src/styles/globals.css";
|
||||
|
||||
const preview: Preview = {
|
||||
parameters: {
|
||||
@@ -8,8 +9,23 @@ const preview: Preview = {
|
||||
color: /(background|color)$/i,
|
||||
date: /Date$/i,
|
||||
},
|
||||
expanded: true,
|
||||
},
|
||||
backgrounds: {
|
||||
default: "light",
|
||||
},
|
||||
},
|
||||
decorators: [
|
||||
(Story) =>
|
||||
React.createElement(
|
||||
"div",
|
||||
{
|
||||
id: "fbjs",
|
||||
className: "w-full h-full min-h-screen p-4 bg-background font-sans antialiased text-foreground",
|
||||
},
|
||||
React.createElement(Story)
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default preview;
|
||||
|
||||
@@ -11,26 +11,24 @@
|
||||
"clean": "rimraf .turbo node_modules dist storybook-static"
|
||||
},
|
||||
"dependencies": {
|
||||
"eslint-plugin-react-refresh": "0.4.20"
|
||||
"@formbricks/survey-ui": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "3.2.6",
|
||||
"@storybook/addon-a11y": "8.6.12",
|
||||
"@storybook/addon-essentials": "8.6.12",
|
||||
"@storybook/addon-interactions": "8.6.12",
|
||||
"@storybook/addon-links": "8.6.12",
|
||||
"@storybook/addon-onboarding": "8.6.12",
|
||||
"@storybook/blocks": "8.6.12",
|
||||
"@storybook/react": "8.6.12",
|
||||
"@storybook/react-vite": "8.6.12",
|
||||
"@storybook/test": "8.6.12",
|
||||
"@typescript-eslint/eslint-plugin": "8.32.0",
|
||||
"@typescript-eslint/parser": "8.32.0",
|
||||
"@vitejs/plugin-react": "4.4.1",
|
||||
"esbuild": "0.25.4",
|
||||
"eslint-plugin-storybook": "0.12.0",
|
||||
"@chromatic-com/storybook": "^5.0.0",
|
||||
"@storybook/addon-a11y": "10.1.11",
|
||||
"@storybook/addon-links": "10.1.11",
|
||||
"@storybook/addon-onboarding": "10.1.11",
|
||||
"@storybook/react-vite": "10.1.11",
|
||||
"@typescript-eslint/eslint-plugin": "8.53.0",
|
||||
"@tailwindcss/vite": "4.1.18",
|
||||
"@typescript-eslint/parser": "8.53.0",
|
||||
"@vitejs/plugin-react": "5.1.2",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-storybook": "10.1.11",
|
||||
"prop-types": "15.8.1",
|
||||
"storybook": "8.6.12",
|
||||
"vite": "6.3.5"
|
||||
"storybook": "10.1.11",
|
||||
"vite": "7.3.1",
|
||||
"@storybook/addon-docs": "10.1.11"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Meta } from "@storybook/blocks";
|
||||
import { Meta } from "@storybook/addon-docs/blocks";
|
||||
|
||||
import Accessibility from "./assets/accessibility.png";
|
||||
import AddonLibrary from "./assets/addon-library.png";
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
import base from "../web/tailwind.config";
|
||||
import surveyUi from "../../packages/survey-ui/tailwind.config";
|
||||
|
||||
export default {
|
||||
...base,
|
||||
content: ["./index.html", "./src/**/*.{js,ts,jsx,tsx}", "../web/modules/ui/**/*.{js,ts,jsx,tsx}"],
|
||||
content: [
|
||||
"./index.html",
|
||||
"./src/**/*.{js,ts,jsx,tsx}",
|
||||
"../../packages/survey-ui/src/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
...surveyUi.theme?.extend,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import tailwindcss from "@tailwindcss/vite";
|
||||
import react from "@vitejs/plugin-react";
|
||||
import path from "path";
|
||||
import { defineConfig } from "vite";
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
plugins: [react(), tailwindcss()],
|
||||
define: {
|
||||
"process.env": {},
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "../web"),
|
||||
"@formbricks/survey-ui": path.resolve(__dirname, "../../packages/survey-ui/src"),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
7
apps/web/.eslintignore
Normal file
7
apps/web/.eslintignore
Normal file
@@ -0,0 +1,7 @@
|
||||
node_modules/
|
||||
.next/
|
||||
public/
|
||||
playwright/
|
||||
dist/
|
||||
coverage/
|
||||
vendor/
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22-alpine3.21 AS base
|
||||
FROM node:22-alpine3.22 AS base
|
||||
|
||||
#
|
||||
## step 1: Prune monorepo
|
||||
@@ -25,26 +25,22 @@ RUN corepack prepare pnpm@9.15.9 --activate
|
||||
# Install necessary build tools and compilers
|
||||
RUN apk update && apk add --no-cache cmake g++ gcc jq make openssl-dev python3
|
||||
|
||||
# BuildKit secret handling without hardcoded fallback values
|
||||
# This approach relies entirely on secrets passed from GitHub Actions
|
||||
RUN echo '#!/bin/sh' > /tmp/read-secrets.sh && \
|
||||
echo 'if [ -f "/run/secrets/database_url" ]; then' >> /tmp/read-secrets.sh && \
|
||||
echo ' export DATABASE_URL=$(cat /run/secrets/database_url)' >> /tmp/read-secrets.sh && \
|
||||
echo 'else' >> /tmp/read-secrets.sh && \
|
||||
echo ' echo "DATABASE_URL secret not found. Build may fail if this is required."' >> /tmp/read-secrets.sh && \
|
||||
echo 'fi' >> /tmp/read-secrets.sh && \
|
||||
echo 'if [ -f "/run/secrets/encryption_key" ]; then' >> /tmp/read-secrets.sh && \
|
||||
echo ' export ENCRYPTION_KEY=$(cat /run/secrets/encryption_key)' >> /tmp/read-secrets.sh && \
|
||||
echo 'else' >> /tmp/read-secrets.sh && \
|
||||
echo ' echo "ENCRYPTION_KEY secret not found. Build may fail if this is required."' >> /tmp/read-secrets.sh && \
|
||||
echo 'fi' >> /tmp/read-secrets.sh && \
|
||||
echo 'exec "$@"' >> /tmp/read-secrets.sh && \
|
||||
chmod +x /tmp/read-secrets.sh
|
||||
# Copy the secrets handling script
|
||||
COPY apps/web/scripts/docker/read-secrets.sh /tmp/read-secrets.sh
|
||||
RUN chmod +x /tmp/read-secrets.sh
|
||||
|
||||
# Increase Node.js memory limit as a regular build argument
|
||||
ARG NODE_OPTIONS="--max_old_space_size=4096"
|
||||
ARG NODE_OPTIONS="--max_old_space_size=8192"
|
||||
ENV NODE_OPTIONS=${NODE_OPTIONS}
|
||||
|
||||
# Target architecture - automatically provided by Docker in multi-platform builds
|
||||
# but needs explicit declaration for some build systems (like Depot)
|
||||
ARG TARGETARCH
|
||||
|
||||
# Base path for the application (optional)
|
||||
ARG BASE_PATH=""
|
||||
ENV BASE_PATH=${BASE_PATH}
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
@@ -62,10 +58,15 @@ RUN touch apps/web/.env
|
||||
# Install the dependencies
|
||||
RUN pnpm install --ignore-scripts
|
||||
|
||||
# Build the database package first
|
||||
RUN pnpm build --filter=@formbricks/database
|
||||
|
||||
# Build the project using our secret reader script
|
||||
# This mounts the secrets only during this build step without storing them in layers
|
||||
RUN --mount=type=secret,id=database_url \
|
||||
--mount=type=secret,id=encryption_key \
|
||||
--mount=type=secret,id=redis_url \
|
||||
--mount=type=secret,id=sentry_auth_token \
|
||||
/tmp/read-secrets.sh pnpm build --filter=@formbricks/web...
|
||||
|
||||
# Extract Prisma version
|
||||
@@ -76,8 +77,8 @@ RUN jq -r '.devDependencies.prisma' packages/database/package.json > /prisma_ver
|
||||
#
|
||||
FROM base AS runner
|
||||
|
||||
RUN npm install --ignore-scripts -g corepack@latest
|
||||
RUN corepack enable
|
||||
RUN npm install --ignore-scripts -g corepack@latest && \
|
||||
corepack enable
|
||||
|
||||
RUN apk add --no-cache curl \
|
||||
&& apk add --no-cache supercronic \
|
||||
@@ -103,23 +104,14 @@ RUN chown -R nextjs:nextjs ./apps/web/.next/static && chmod -R 755 ./apps/web/.n
|
||||
COPY --from=installer /app/apps/web/public ./apps/web/public
|
||||
RUN chown -R nextjs:nextjs ./apps/web/public && chmod -R 755 ./apps/web/public
|
||||
|
||||
# Create packages/database directory structure with proper ownership for runtime migrations
|
||||
RUN mkdir -p ./packages/database/migrations && chown -R nextjs:nextjs ./packages/database
|
||||
|
||||
COPY --from=installer /app/packages/database/schema.prisma ./packages/database/schema.prisma
|
||||
RUN chown nextjs:nextjs ./packages/database/schema.prisma && chmod 644 ./packages/database/schema.prisma
|
||||
|
||||
COPY --from=installer /app/packages/database/package.json ./packages/database/package.json
|
||||
RUN chown nextjs:nextjs ./packages/database/package.json && chmod 644 ./packages/database/package.json
|
||||
|
||||
COPY --from=installer /app/packages/database/migration ./packages/database/migration
|
||||
RUN chown -R nextjs:nextjs ./packages/database/migration && chmod -R 755 ./packages/database/migration
|
||||
|
||||
COPY --from=installer /app/packages/database/src ./packages/database/src
|
||||
RUN chown -R nextjs:nextjs ./packages/database/src && chmod -R 755 ./packages/database/src
|
||||
|
||||
COPY --from=installer /app/packages/database/node_modules ./packages/database/node_modules
|
||||
RUN chown -R nextjs:nextjs ./packages/database/node_modules && chmod -R 755 ./packages/database/node_modules
|
||||
|
||||
COPY --from=installer /app/packages/logger/dist ./packages/database/node_modules/@formbricks/logger/dist
|
||||
RUN chown -R nextjs:nextjs ./packages/database/node_modules/@formbricks/logger/dist && chmod -R 755 ./packages/database/node_modules/@formbricks/logger/dist
|
||||
COPY --from=installer /app/packages/database/dist ./packages/database/dist
|
||||
RUN chown -R nextjs:nextjs ./packages/database/dist && chmod -R 755 ./packages/database/dist
|
||||
|
||||
COPY --from=installer /app/node_modules/@prisma/client ./node_modules/@prisma/client
|
||||
RUN chown -R nextjs:nextjs ./node_modules/@prisma/client && chmod -R 755 ./node_modules/@prisma/client
|
||||
@@ -130,40 +122,35 @@ RUN chown -R nextjs:nextjs ./node_modules/.prisma && chmod -R 755 ./node_modules
|
||||
COPY --from=installer /prisma_version.txt .
|
||||
RUN chown nextjs:nextjs ./prisma_version.txt && chmod 644 ./prisma_version.txt
|
||||
|
||||
COPY /docker/cronjobs /app/docker/cronjobs
|
||||
RUN chmod -R 755 /app/docker/cronjobs
|
||||
|
||||
COPY --from=installer /app/node_modules/@paralleldrive/cuid2 ./node_modules/@paralleldrive/cuid2
|
||||
RUN chmod -R 755 ./node_modules/@paralleldrive/cuid2
|
||||
|
||||
COPY --from=installer /app/node_modules/uuid ./node_modules/uuid
|
||||
RUN chmod -R 755 ./node_modules/uuid
|
||||
|
||||
COPY --from=installer /app/node_modules/@noble/hashes ./node_modules/@noble/hashes
|
||||
RUN chmod -R 755 ./node_modules/@noble/hashes
|
||||
|
||||
COPY --from=installer /app/node_modules/zod ./node_modules/zod
|
||||
RUN chmod -R 755 ./node_modules/zod
|
||||
|
||||
RUN npm install --ignore-scripts -g tsx typescript pino-pretty
|
||||
RUN npm install -g prisma
|
||||
RUN npm install -g prisma@6
|
||||
|
||||
# Create a startup script to handle the conditional logic
|
||||
COPY --from=installer /app/apps/web/scripts/docker/next-start.sh /home/nextjs/start.sh
|
||||
RUN chown nextjs:nextjs /home/nextjs/start.sh && chmod +x /home/nextjs/start.sh
|
||||
|
||||
EXPOSE 3000
|
||||
ENV HOSTNAME "0.0.0.0"
|
||||
ENV NODE_ENV="production"
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
USER nextjs
|
||||
|
||||
# Prepare volume for uploads
|
||||
RUN mkdir -p /home/nextjs/apps/web/uploads/
|
||||
VOLUME /home/nextjs/apps/web/uploads/
|
||||
# Prepare pnpm as the nextjs user to ensure it's available at runtime
|
||||
# Prepare volumes for uploads and SAML connections
|
||||
RUN corepack prepare pnpm@9.15.9 --activate && \
|
||||
mkdir -p /home/nextjs/apps/web/uploads/ && \
|
||||
mkdir -p /home/nextjs/apps/web/saml-connection
|
||||
|
||||
# Prepare volume for SAML preloaded connection
|
||||
RUN mkdir -p /home/nextjs/apps/web/saml-connection
|
||||
VOLUME /home/nextjs/apps/web/uploads/
|
||||
VOLUME /home/nextjs/apps/web/saml-connection
|
||||
|
||||
CMD if [ "${DOCKER_CRON_ENABLED:-1}" = "1" ]; then \
|
||||
echo "Starting cron jobs..."; \
|
||||
supercronic -quiet /app/docker/cronjobs & \
|
||||
else \
|
||||
echo "Docker cron jobs are disabled via DOCKER_CRON_ENABLED=0"; \
|
||||
fi; \
|
||||
(cd packages/database && npm run db:migrate:deploy) && \
|
||||
(cd packages/database && npm run db:create-saml-database:deploy) && \
|
||||
exec node apps/web/server.js
|
||||
CMD ["/home/nextjs/start.sh"]
|
||||
@@ -1,79 +0,0 @@
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { ConnectWithFormbricks } from "./ConnectWithFormbricks";
|
||||
|
||||
// Mocks before import
|
||||
const pushMock = vi.fn();
|
||||
const refreshMock = vi.fn();
|
||||
vi.mock("@tolgee/react", () => ({ useTranslate: () => ({ t: (key: string) => key }) }));
|
||||
vi.mock("next/navigation", () => ({ useRouter: vi.fn(() => ({ push: pushMock, refresh: refreshMock })) }));
|
||||
vi.mock("./OnboardingSetupInstructions", () => ({
|
||||
OnboardingSetupInstructions: () => <div data-testid="instructions" />,
|
||||
}));
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("ConnectWithFormbricks", () => {
|
||||
const environment = { id: "env1" } as any;
|
||||
const webAppUrl = "http://app";
|
||||
const channel = {} as any;
|
||||
|
||||
test("renders waiting state when widgetSetupCompleted is false", () => {
|
||||
render(
|
||||
<ConnectWithFormbricks
|
||||
environment={environment}
|
||||
publicDomain={webAppUrl}
|
||||
widgetSetupCompleted={false}
|
||||
channel={channel}
|
||||
/>
|
||||
);
|
||||
expect(screen.getByTestId("instructions")).toBeInTheDocument();
|
||||
expect(screen.getByText("environments.connect.waiting_for_your_signal")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders success state when widgetSetupCompleted is true", () => {
|
||||
render(
|
||||
<ConnectWithFormbricks
|
||||
environment={environment}
|
||||
publicDomain={webAppUrl}
|
||||
widgetSetupCompleted={true}
|
||||
channel={channel}
|
||||
/>
|
||||
);
|
||||
expect(screen.getByText("environments.connect.congrats")).toBeInTheDocument();
|
||||
expect(screen.getByText("environments.connect.connection_successful_message")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("clicking finish button navigates to surveys", async () => {
|
||||
render(
|
||||
<ConnectWithFormbricks
|
||||
environment={environment}
|
||||
publicDomain={webAppUrl}
|
||||
widgetSetupCompleted={true}
|
||||
channel={channel}
|
||||
/>
|
||||
);
|
||||
const button = screen.getByRole("button", { name: "environments.connect.finish_onboarding" });
|
||||
await userEvent.click(button);
|
||||
expect(pushMock).toHaveBeenCalledWith(`/environments/${environment.id}/surveys`);
|
||||
});
|
||||
|
||||
test("refresh is called on visibilitychange to visible", () => {
|
||||
render(
|
||||
<ConnectWithFormbricks
|
||||
environment={environment}
|
||||
publicDomain={webAppUrl}
|
||||
widgetSetupCompleted={false}
|
||||
channel={channel}
|
||||
/>
|
||||
);
|
||||
Object.defineProperty(document, "visibilityState", { value: "visible", configurable: true });
|
||||
document.dispatchEvent(new Event("visibilitychange"));
|
||||
expect(refreshMock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,29 +1,29 @@
|
||||
"use client";
|
||||
|
||||
import { cn } from "@/lib/cn";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import { ArrowRight } from "lucide-react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { TEnvironment } from "@formbricks/types/environment";
|
||||
import { TProjectConfigChannel } from "@formbricks/types/project";
|
||||
import { cn } from "@/lib/cn";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { OnboardingSetupInstructions } from "./OnboardingSetupInstructions";
|
||||
|
||||
interface ConnectWithFormbricksProps {
|
||||
environment: TEnvironment;
|
||||
publicDomain: string;
|
||||
widgetSetupCompleted: boolean;
|
||||
appSetupCompleted: boolean;
|
||||
channel: TProjectConfigChannel;
|
||||
}
|
||||
|
||||
export const ConnectWithFormbricks = ({
|
||||
environment,
|
||||
publicDomain,
|
||||
widgetSetupCompleted,
|
||||
appSetupCompleted,
|
||||
channel,
|
||||
}: ConnectWithFormbricksProps) => {
|
||||
const { t } = useTranslate();
|
||||
const { t } = useTranslation();
|
||||
const router = useRouter();
|
||||
const handleFinishOnboarding = async () => {
|
||||
router.push(`/environments/${environment.id}/surveys`);
|
||||
@@ -51,15 +51,15 @@ export const ConnectWithFormbricks = ({
|
||||
environmentId={environment.id}
|
||||
publicDomain={publicDomain}
|
||||
channel={channel}
|
||||
widgetSetupCompleted={widgetSetupCompleted}
|
||||
appSetupCompleted={appSetupCompleted}
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-[30rem] w-1/2 flex-col items-center justify-center rounded-lg border text-center",
|
||||
widgetSetupCompleted ? "border-green-500 bg-green-100" : "border-slate-300 bg-slate-200"
|
||||
appSetupCompleted ? "border-green-500 bg-green-100" : "border-slate-300 bg-slate-200"
|
||||
)}>
|
||||
{widgetSetupCompleted ? (
|
||||
{appSetupCompleted ? (
|
||||
<div>
|
||||
<p className="text-3xl">{t("environments.connect.congrats")}</p>
|
||||
<p className="pt-4 text-sm font-medium text-slate-600">
|
||||
@@ -81,9 +81,9 @@ export const ConnectWithFormbricks = ({
|
||||
</div>
|
||||
<Button
|
||||
id="finishOnboarding"
|
||||
variant={widgetSetupCompleted ? "default" : "ghost"}
|
||||
variant={appSetupCompleted ? "default" : "ghost"}
|
||||
onClick={handleFinishOnboarding}>
|
||||
{widgetSetupCompleted
|
||||
{appSetupCompleted
|
||||
? t("environments.connect.finish_onboarding")
|
||||
: t("environments.connect.do_it_later")}
|
||||
<ArrowRight />
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import toast from "react-hot-toast";
|
||||
import { afterEach, beforeAll, describe, expect, test, vi } from "vitest";
|
||||
import { OnboardingSetupInstructions } from "./OnboardingSetupInstructions";
|
||||
|
||||
// Mock react-hot-toast so we can assert that a success message is shown
|
||||
vi.mock("react-hot-toast", () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
success: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Set up a spy for navigator.clipboard.writeText so it becomes a ViTest spy.
|
||||
beforeAll(() => {
|
||||
Object.defineProperty(navigator, "clipboard", {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: {
|
||||
// Using a mockResolvedValue resolves the promise as writeText is async.
|
||||
writeText: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe("OnboardingSetupInstructions", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
// Provide some default props for testing
|
||||
const defaultProps = {
|
||||
environmentId: "env-123",
|
||||
publicDomain: "https://example.com",
|
||||
channel: "app" as const, // Assuming channel is either "app" or "website"
|
||||
widgetSetupCompleted: false,
|
||||
};
|
||||
|
||||
test("renders HTML tab content by default", () => {
|
||||
render(<OnboardingSetupInstructions {...defaultProps} />);
|
||||
|
||||
// Since the default active tab is "html", we check for a unique text
|
||||
expect(
|
||||
screen.getByText(/environments.connect.insert_this_code_into_the_head_tag_of_your_website/i)
|
||||
).toBeInTheDocument();
|
||||
|
||||
// The HTML snippet contains a marker comment
|
||||
expect(screen.getByText("START")).toBeInTheDocument();
|
||||
|
||||
// Verify the "Copy Code" button is present
|
||||
expect(screen.getByRole("button", { name: /common.copy_code/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders NPM tab content when selected", async () => {
|
||||
render(<OnboardingSetupInstructions {...defaultProps} />);
|
||||
const user = userEvent.setup();
|
||||
|
||||
// Click on the "NPM" tab to switch views.
|
||||
const npmTab = screen.getByText("NPM");
|
||||
await user.click(npmTab);
|
||||
|
||||
// Check that the install commands are present
|
||||
expect(screen.getByText(/npm install @formbricks\/js/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/yarn add @formbricks\/js/)).toBeInTheDocument();
|
||||
|
||||
// Verify the "Read Docs" link has the correct URL (based on channel prop)
|
||||
const readDocsLink = screen.getByRole("link", { name: /common.read_docs/i });
|
||||
expect(readDocsLink).toHaveAttribute("href", "https://formbricks.com/docs/app-surveys/framework-guides");
|
||||
});
|
||||
|
||||
test("copies HTML snippet to clipboard and shows success toast when Copy Code button is clicked", async () => {
|
||||
render(<OnboardingSetupInstructions {...defaultProps} />);
|
||||
const user = userEvent.setup();
|
||||
|
||||
const writeTextSpy = vi.spyOn(navigator.clipboard, "writeText");
|
||||
|
||||
// Click the "Copy Code" button
|
||||
const copyButton = screen.getByRole("button", { name: /common.copy_code/i });
|
||||
await user.click(copyButton);
|
||||
|
||||
// Ensure navigator.clipboard.writeText was called.
|
||||
expect(writeTextSpy).toHaveBeenCalled();
|
||||
const writtenText = (navigator.clipboard.writeText as any).mock.calls[0][0] as string;
|
||||
|
||||
// Check that the pasted snippet contains the expected environment values
|
||||
expect(writtenText).toContain('var appUrl = "https://example.com"');
|
||||
expect(writtenText).toContain('var environmentId = "env-123"');
|
||||
|
||||
// Verify that a success toast was shown
|
||||
expect(toast.success).toHaveBeenCalledWith("common.copied_to_clipboard");
|
||||
});
|
||||
|
||||
test("renders step-by-step manual link with correct URL in HTML tab", () => {
|
||||
render(<OnboardingSetupInstructions {...defaultProps} />);
|
||||
const manualLink = screen.getByRole("link", { name: /common.step_by_step_manual/i });
|
||||
expect(manualLink).toHaveAttribute(
|
||||
"href",
|
||||
"https://formbricks.com/docs/app-surveys/framework-guides#html"
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,15 +1,15 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { CodeBlock } from "@/modules/ui/components/code-block";
|
||||
import { Html5Icon, NpmIcon } from "@/modules/ui/components/icons";
|
||||
import { TabBar } from "@/modules/ui/components/tab-bar";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import Link from "next/link";
|
||||
import "prismjs/themes/prism.css";
|
||||
import { useState } from "react";
|
||||
import toast from "react-hot-toast";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { TProjectConfigChannel } from "@formbricks/types/project";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { CodeBlock } from "@/modules/ui/components/code-block";
|
||||
import { Html5Icon, NpmIcon } from "@/modules/ui/components/icons";
|
||||
import { TabBar } from "@/modules/ui/components/tab-bar";
|
||||
|
||||
const tabs = [
|
||||
{ id: "html", label: "HTML", icon: <Html5Icon /> },
|
||||
@@ -20,16 +20,16 @@ interface OnboardingSetupInstructionsProps {
|
||||
environmentId: string;
|
||||
publicDomain: string;
|
||||
channel: TProjectConfigChannel;
|
||||
widgetSetupCompleted: boolean;
|
||||
appSetupCompleted: boolean;
|
||||
}
|
||||
|
||||
export const OnboardingSetupInstructions = ({
|
||||
environmentId,
|
||||
publicDomain,
|
||||
channel,
|
||||
widgetSetupCompleted,
|
||||
appSetupCompleted,
|
||||
}: OnboardingSetupInstructionsProps) => {
|
||||
const { t } = useTranslate();
|
||||
const { t } = useTranslation();
|
||||
const [activeTab, setActiveTab] = useState(tabs[0].id);
|
||||
const htmlSnippetForAppSurveys = `<!-- START Formbricks Surveys -->
|
||||
<script type="text/javascript">
|
||||
@@ -137,7 +137,7 @@ export const OnboardingSetupInstructions = ({
|
||||
<div className="mt-4 flex justify-between space-x-2">
|
||||
<Button
|
||||
id="onboarding-inapp-connect-copy-code"
|
||||
variant={widgetSetupCompleted ? "secondary" : "default"}
|
||||
variant={appSetupCompleted ? "secondary" : "default"}
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(
|
||||
channel === "app" ? htmlSnippetForAppSurveys : htmlSnippetForWebsiteSurveys
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { XIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { ConnectWithFormbricks } from "@/app/(app)/(onboarding)/environments/[environmentId]/connect/components/ConnectWithFormbricks";
|
||||
import { getEnvironment } from "@/lib/environment/service";
|
||||
import { getPublicDomain } from "@/lib/getPublicUrl";
|
||||
import { getProjectByEnvironmentId } from "@/lib/project/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { XIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
|
||||
interface ConnectPageProps {
|
||||
params: Promise<{
|
||||
@@ -25,7 +25,7 @@ const Page = async (props: ConnectPageProps) => {
|
||||
|
||||
const project = await getProjectByEnvironmentId(environment.id);
|
||||
if (!project) {
|
||||
throw new Error(t("common.project_not_found"));
|
||||
throw new Error(t("common.workspace_not_found"));
|
||||
}
|
||||
|
||||
const channel = project.config.channel || null;
|
||||
@@ -42,11 +42,11 @@ const Page = async (props: ConnectPageProps) => {
|
||||
<ConnectWithFormbricks
|
||||
environment={environment}
|
||||
publicDomain={publicDomain}
|
||||
widgetSetupCompleted={environment.appSetupCompleted}
|
||||
appSetupCompleted={environment.appSetupCompleted}
|
||||
channel={channel}
|
||||
/>
|
||||
<Button
|
||||
className="absolute right-5 top-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
className="absolute top-5 right-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
variant="ghost"
|
||||
asChild>
|
||||
<Link href={`/environments/${environment.id}`}>
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import OnboardingLayout from "./layout";
|
||||
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
IS_PRODUCTION: false,
|
||||
IS_DEVELOPMENT: true,
|
||||
E2E_TESTING: false,
|
||||
WEBAPP_URL: "http://localhost:3000",
|
||||
PUBLIC_URL: "http://localhost:3000/survey",
|
||||
ENCRYPTION_KEY: "mock-encryption-key",
|
||||
CRON_SECRET: "mock-cron-secret",
|
||||
DEFAULT_BRAND_COLOR: "#64748b",
|
||||
FB_LOGO_URL: "https://mock-logo-url.com/logo.png",
|
||||
PRIVACY_URL: "http://localhost:3000/privacy",
|
||||
TERMS_URL: "http://localhost:3000/terms",
|
||||
IMPRINT_URL: "http://localhost:3000/imprint",
|
||||
IMPRINT_ADDRESS: "Mock Address",
|
||||
PASSWORD_RESET_DISABLED: false,
|
||||
EMAIL_VERIFICATION_DISABLED: false,
|
||||
GOOGLE_OAUTH_ENABLED: false,
|
||||
GITHUB_OAUTH_ENABLED: false,
|
||||
AZURE_OAUTH_ENABLED: false,
|
||||
OIDC_OAUTH_ENABLED: false,
|
||||
SAML_OAUTH_ENABLED: false,
|
||||
SAML_XML_DIR: "./mock-saml-connection",
|
||||
SIGNUP_ENABLED: true,
|
||||
EMAIL_AUTH_ENABLED: true,
|
||||
INVITE_DISABLED: false,
|
||||
SLACK_CLIENT_SECRET: "mock-slack-secret",
|
||||
SLACK_CLIENT_ID: "mock-slack-id",
|
||||
SLACK_AUTH_URL: "https://mock-slack-auth-url.com",
|
||||
GOOGLE_SHEETS_CLIENT_ID: "mock-google-sheets-id",
|
||||
GOOGLE_SHEETS_CLIENT_SECRET: "mock-google-sheets-secret",
|
||||
GOOGLE_SHEETS_REDIRECT_URL: "http://localhost:3000/google-sheets-redirect",
|
||||
NOTION_OAUTH_CLIENT_ID: "mock-notion-id",
|
||||
NOTION_OAUTH_CLIENT_SECRET: "mock-notion-secret",
|
||||
NOTION_REDIRECT_URI: "http://localhost:3000/notion-redirect",
|
||||
NOTION_AUTH_URL: "https://mock-notion-auth-url.com",
|
||||
AIRTABLE_CLIENT_ID: "mock-airtable-id",
|
||||
SMTP_HOST: "mock-smtp-host",
|
||||
SMTP_PORT: "587",
|
||||
SMTP_SECURE_ENABLED: false,
|
||||
SMTP_USER: "mock-smtp-user",
|
||||
SMTP_PASSWORD: "mock-smtp-password",
|
||||
SMTP_AUTHENTICATED: true,
|
||||
SMTP_REJECT_UNAUTHORIZED_TLS: true,
|
||||
MAIL_FROM: "mock@mail.com",
|
||||
MAIL_FROM_NAME: "Mock Mail",
|
||||
NEXTAUTH_SECRET: "mock-nextauth-secret",
|
||||
ITEMS_PER_PAGE: 30,
|
||||
SURVEYS_PER_PAGE: 12,
|
||||
RESPONSES_PER_PAGE: 25,
|
||||
TEXT_RESPONSES_PER_PAGE: 5,
|
||||
INSIGHTS_PER_PAGE: 10,
|
||||
DOCUMENTS_PER_PAGE: 10,
|
||||
MAX_RESPONSES_FOR_INSIGHT_GENERATION: 500,
|
||||
MAX_OTHER_OPTION_LENGTH: 250,
|
||||
ENTERPRISE_LICENSE_KEY: "ABC",
|
||||
GITHUB_ID: "mock-github-id",
|
||||
GITHUB_SECRET: "mock-github-secret",
|
||||
GITHUB_OAUTH_URL: "https://mock-github-auth-url.com",
|
||||
AZURE_ID: "mock-azure-id",
|
||||
AZUREAD_CLIENT_ID: "mock-azure-client-id",
|
||||
AZUREAD_CLIENT_SECRET: "mock-azure-client-secret",
|
||||
GOOGLE_CLIENT_ID: "mock-google-client-id",
|
||||
GOOGLE_CLIENT_SECRET: "mock-google-client-secret",
|
||||
GOOGLE_OAUTH_URL: "https://mock-google-auth-url.com",
|
||||
AZURE_OAUTH_URL: "https://mock-azure-auth-url.com",
|
||||
OIDC_ID: "mock-oidc-id",
|
||||
OIDC_OAUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
SAML_ID: "mock-saml-id",
|
||||
SAML_OAUTH_URL: "https://mock-saml-auth-url.com",
|
||||
SAML_METADATA_URL: "https://mock-saml-metadata-url.com",
|
||||
AZUREAD_TENANT_ID: "mock-azure-tenant-id",
|
||||
AZUREAD_OAUTH_URL: "https://mock-azuread-auth-url.com",
|
||||
OIDC_DISPLAY_NAME: "Mock OIDC",
|
||||
OIDC_CLIENT_ID: "mock-oidc-client-id",
|
||||
OIDC_CLIENT_SECRET: "mock-oidc-client-secret",
|
||||
OIDC_REDIRECT_URL: "http://localhost:3000/oidc-redirect",
|
||||
OIDC_AUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
OIDC_ISSUER: "https://mock-oidc-issuer.com",
|
||||
OIDC_SIGNING_ALGORITHM: "RS256",
|
||||
SESSION_MAX_AGE: 1000,
|
||||
REDIS_URL: "test-redis-url",
|
||||
AUDIT_LOG_ENABLED: true,
|
||||
}));
|
||||
|
||||
vi.mock("next/navigation", () => ({
|
||||
redirect: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("next-auth", () => ({
|
||||
getServerSession: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/environment/auth", () => ({
|
||||
hasUserEnvironmentAccess: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("OnboardingLayout", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test("redirects to login if session is missing", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce(null);
|
||||
|
||||
await OnboardingLayout({
|
||||
params: { environmentId: "env1" },
|
||||
children: <div>Test Content</div>,
|
||||
});
|
||||
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
});
|
||||
|
||||
test("throws AuthorizationError if user lacks access", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({ user: { id: "user1" } });
|
||||
vi.mocked(hasUserEnvironmentAccess).mockResolvedValueOnce(false);
|
||||
|
||||
await expect(
|
||||
OnboardingLayout({
|
||||
params: { environmentId: "env1" },
|
||||
children: <div>Test Content</div>,
|
||||
})
|
||||
).rejects.toThrow("User is not authorized to access this environment");
|
||||
});
|
||||
|
||||
test("renders children if user has access", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({ user: { id: "user1" } });
|
||||
vi.mocked(hasUserEnvironmentAccess).mockResolvedValueOnce(true);
|
||||
|
||||
const result = await OnboardingLayout({
|
||||
params: { environmentId: "env1" },
|
||||
children: <div data-testid="child">Test Content</div>,
|
||||
});
|
||||
|
||||
render(result);
|
||||
|
||||
expect(screen.getByTestId("child")).toHaveTextContent("Test Content");
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,8 @@
|
||||
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { AuthorizationError } from "@formbricks/types/errors";
|
||||
import { hasUserEnvironmentAccess } from "@/lib/environment/auth";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
|
||||
const OnboardingLayout = async (props) => {
|
||||
const params = await props.params;
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import { createSurveyAction } from "@/modules/survey/components/template-list/actions";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import toast from "react-hot-toast";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { XMTemplateList } from "./XMTemplateList";
|
||||
|
||||
// Prepare push mock and module mocks before importing component
|
||||
const pushMock = vi.fn();
|
||||
vi.mock("@tolgee/react", () => ({ useTranslate: () => ({ t: (key: string) => key }) }));
|
||||
vi.mock("next/navigation", () => ({ useRouter: vi.fn(() => ({ push: pushMock })) }));
|
||||
vi.mock("react-hot-toast", () => ({ default: { error: vi.fn() } }));
|
||||
vi.mock("@/app/(app)/(onboarding)/environments/[environmentId]/xm-templates/lib/xm-templates", () => ({
|
||||
getXMTemplates: (t: any) => [
|
||||
{ id: 1, name: "tmpl1" },
|
||||
{ id: 2, name: "tmpl2" },
|
||||
],
|
||||
}));
|
||||
vi.mock("@/app/(app)/(onboarding)/environments/[environmentId]/xm-templates/lib/utils", () => ({
|
||||
replacePresetPlaceholders: (template: any, project: any) => ({ ...template, projectId: project.id }),
|
||||
}));
|
||||
vi.mock("@/modules/survey/components/template-list/actions", () => ({ createSurveyAction: vi.fn() }));
|
||||
vi.mock("@/lib/utils/helper", () => ({ getFormattedErrorMessage: () => "formatted-error" }));
|
||||
vi.mock("@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer", () => ({
|
||||
OnboardingOptionsContainer: ({ options }: { options: any[] }) => (
|
||||
<div>
|
||||
{options.map((opt, idx) => (
|
||||
<button key={idx} data-testid={`option-${idx}`} onClick={opt.onClick}>
|
||||
{opt.title}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Reset mocks between tests
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("XMTemplateList component", () => {
|
||||
const project = { id: "proj1" } as any;
|
||||
const user = { id: "user1" } as any;
|
||||
const environmentId = "env1";
|
||||
|
||||
test("creates survey and navigates on success", async () => {
|
||||
// Mock successful survey creation
|
||||
vi.mocked(createSurveyAction).mockResolvedValue({ data: { id: "survey1" } } as any);
|
||||
|
||||
render(<XMTemplateList project={project} user={user} environmentId={environmentId} />);
|
||||
|
||||
const option0 = screen.getByTestId("option-0");
|
||||
await userEvent.click(option0);
|
||||
|
||||
expect(createSurveyAction).toHaveBeenCalledWith({
|
||||
environmentId,
|
||||
surveyBody: expect.objectContaining({ id: 1, projectId: "proj1", type: "link", createdBy: "user1" }),
|
||||
});
|
||||
expect(pushMock).toHaveBeenCalledWith(`/environments/${environmentId}/surveys/survey1/edit?mode=cx`);
|
||||
});
|
||||
|
||||
test("shows error toast on failure", async () => {
|
||||
// Mock failed survey creation
|
||||
vi.mocked(createSurveyAction).mockResolvedValue({ error: "err" } as any);
|
||||
|
||||
render(<XMTemplateList project={project} user={user} environmentId={environmentId} />);
|
||||
|
||||
const option1 = screen.getByTestId("option-1");
|
||||
await userEvent.click(option1);
|
||||
|
||||
expect(createSurveyAction).toHaveBeenCalled();
|
||||
expect(toast.error).toHaveBeenCalledWith("formatted-error");
|
||||
});
|
||||
});
|
||||
@@ -1,19 +1,19 @@
|
||||
"use client";
|
||||
|
||||
import { ActivityIcon, ShoppingCartIcon, SmileIcon, StarIcon, ThumbsUpIcon, UsersIcon } from "lucide-react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import toast from "react-hot-toast";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { TProject } from "@formbricks/types/project";
|
||||
import { TSurveyCreateInput } from "@formbricks/types/surveys/types";
|
||||
import { TXMTemplate } from "@formbricks/types/templates";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
import { replacePresetPlaceholders } from "@/app/(app)/(onboarding)/environments/[environmentId]/xm-templates/lib/utils";
|
||||
import { getXMTemplates } from "@/app/(app)/(onboarding)/environments/[environmentId]/xm-templates/lib/xm-templates";
|
||||
import { OnboardingOptionsContainer } from "@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer";
|
||||
import { getFormattedErrorMessage } from "@/lib/utils/helper";
|
||||
import { createSurveyAction } from "@/modules/survey/components/template-list/actions";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import { ActivityIcon, ShoppingCartIcon, SmileIcon, StarIcon, ThumbsUpIcon, UsersIcon } from "lucide-react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import toast from "react-hot-toast";
|
||||
import { TProject } from "@formbricks/types/project";
|
||||
import { TSurveyCreateInput } from "@formbricks/types/surveys/types";
|
||||
import { TXMTemplate } from "@formbricks/types/templates";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
|
||||
interface XMTemplateListProps {
|
||||
project: TProject;
|
||||
@@ -23,7 +23,7 @@ interface XMTemplateListProps {
|
||||
|
||||
export const XMTemplateList = ({ project, user, environmentId }: XMTemplateListProps) => {
|
||||
const [activeTemplateId, setActiveTemplateId] = useState<number | null>(null);
|
||||
const { t } = useTranslate();
|
||||
const { t } = useTranslation();
|
||||
const router = useRouter();
|
||||
|
||||
const createSurvey = async (activeTemplate: TXMTemplate) => {
|
||||
|
||||
@@ -32,14 +32,22 @@ const mockProject: TProject = {
|
||||
};
|
||||
const mockTemplate: TXMTemplate = {
|
||||
name: "$[projectName] Survey",
|
||||
questions: [
|
||||
blocks: [
|
||||
{
|
||||
id: "q1",
|
||||
inputType: "text",
|
||||
type: "email" as any,
|
||||
headline: { default: "$[projectName] Question" },
|
||||
required: false,
|
||||
charLimit: { enabled: true, min: 400, max: 1000 },
|
||||
id: "block1",
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
{
|
||||
id: "q1",
|
||||
type: "openText" as const,
|
||||
inputType: "text" as const,
|
||||
headline: { default: "$[projectName] Question" },
|
||||
subheader: { default: "" },
|
||||
required: false,
|
||||
placeholder: { default: "" },
|
||||
charLimit: 1000,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
endings: [
|
||||
@@ -66,9 +74,9 @@ describe("replacePresetPlaceholders", () => {
|
||||
expect(result.name).toBe("Test Project Survey");
|
||||
});
|
||||
|
||||
test("replaces projectName placeholder in question headline", () => {
|
||||
test("replaces projectName placeholder in element headline", () => {
|
||||
const result = replacePresetPlaceholders(mockTemplate, mockProject);
|
||||
expect(result.questions[0].headline.default).toBe("Test Project Question");
|
||||
expect(result.blocks[0].elements[0].headline.default).toBe("Test Project Question");
|
||||
});
|
||||
|
||||
test("returns a new object without mutating the original template", () => {
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { replaceQuestionPresetPlaceholders } from "@/lib/utils/templates";
|
||||
import { TProject } from "@formbricks/types/project";
|
||||
import { TSurveyBlock } from "@formbricks/types/surveys/blocks";
|
||||
import { TXMTemplate } from "@formbricks/types/templates";
|
||||
import { replaceElementPresetPlaceholders } from "@/lib/utils/templates";
|
||||
|
||||
// replace all occurences of projectName with the actual project name in the current template
|
||||
export const replacePresetPlaceholders = (template: TXMTemplate, project: TProject) => {
|
||||
export const replacePresetPlaceholders = (template: TXMTemplate, project: TProject): TXMTemplate => {
|
||||
const survey = structuredClone(template);
|
||||
survey.name = survey.name.replace("$[projectName]", project.name);
|
||||
survey.questions = survey.questions.map((question) => {
|
||||
return replaceQuestionPresetPlaceholders(question, project);
|
||||
});
|
||||
return { ...template, ...survey };
|
||||
|
||||
const modifiedBlocks = survey.blocks.map((block: TSurveyBlock) => ({
|
||||
...block,
|
||||
elements: block.elements.map((element) => replaceElementPresetPlaceholders(element, project)),
|
||||
}));
|
||||
|
||||
return { ...survey, name: survey.name.replace("$[projectName]", project.name), blocks: modifiedBlocks };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup } from "@testing-library/preact";
|
||||
import { TFnType } from "@tolgee/react";
|
||||
import { TFunction } from "i18next";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { getXMSurveyDefault, getXMTemplates } from "./xm-templates";
|
||||
|
||||
@@ -14,13 +14,13 @@ describe("xm-templates", () => {
|
||||
});
|
||||
|
||||
test("getXMSurveyDefault returns default survey template", () => {
|
||||
const tMock = vi.fn((key) => key) as TFnType;
|
||||
const tMock = vi.fn((key) => key) as TFunction;
|
||||
const result = getXMSurveyDefault(tMock);
|
||||
|
||||
expect(result).toEqual({
|
||||
name: "",
|
||||
endings: expect.any(Array),
|
||||
questions: [],
|
||||
blocks: [],
|
||||
styling: {
|
||||
overwriteThemeStyling: true,
|
||||
},
|
||||
@@ -29,7 +29,7 @@ describe("xm-templates", () => {
|
||||
});
|
||||
|
||||
test("getXMTemplates returns all templates", () => {
|
||||
const tMock = vi.fn((key) => key) as TFnType;
|
||||
const tMock = vi.fn((key) => key) as TFunction;
|
||||
const result = getXMTemplates(tMock);
|
||||
|
||||
expect(result).toHaveLength(6);
|
||||
@@ -44,7 +44,7 @@ describe("xm-templates", () => {
|
||||
test("getXMTemplates handles errors gracefully", async () => {
|
||||
const tMock = vi.fn(() => {
|
||||
throw new Error("Test error");
|
||||
}) as TFnType;
|
||||
}) as TFunction;
|
||||
|
||||
const result = getXMTemplates(tMock);
|
||||
|
||||
|
||||
@@ -1,21 +1,23 @@
|
||||
import {
|
||||
buildCTAQuestion,
|
||||
buildNPSQuestion,
|
||||
buildOpenTextQuestion,
|
||||
buildRatingQuestion,
|
||||
getDefaultEndingCard,
|
||||
} from "@/app/lib/survey-builder";
|
||||
import { createId } from "@paralleldrive/cuid2";
|
||||
import { TFnType } from "@tolgee/react";
|
||||
import { TFunction } from "i18next";
|
||||
import { logger } from "@formbricks/logger";
|
||||
import { TXMTemplate } from "@formbricks/types/templates";
|
||||
import {
|
||||
buildBlock,
|
||||
buildCTAElement,
|
||||
buildNPSElement,
|
||||
buildOpenTextElement,
|
||||
buildRatingElement,
|
||||
createBlockJumpLogic,
|
||||
} from "@/app/lib/survey-block-builder";
|
||||
import { getDefaultEndingCard } from "@/app/lib/survey-builder";
|
||||
|
||||
export const getXMSurveyDefault = (t: TFnType): TXMTemplate => {
|
||||
export const getXMSurveyDefault = (t: TFunction): TXMTemplate => {
|
||||
try {
|
||||
return {
|
||||
name: "",
|
||||
endings: [getDefaultEndingCard([], t)],
|
||||
questions: [],
|
||||
blocks: [],
|
||||
styling: {
|
||||
overwriteThemeStyling: true,
|
||||
},
|
||||
@@ -26,45 +28,72 @@ export const getXMSurveyDefault = (t: TFnType): TXMTemplate => {
|
||||
}
|
||||
};
|
||||
|
||||
const npsSurvey = (t: TFnType): TXMTemplate => {
|
||||
const npsSurvey = (t: TFunction): TXMTemplate => {
|
||||
return {
|
||||
...getXMSurveyDefault(t),
|
||||
name: t("templates.nps_survey_name"),
|
||||
questions: [
|
||||
buildNPSQuestion({
|
||||
headline: t("templates.nps_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.nps_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.nps_survey_question_1_upper_label"),
|
||||
isColorCodingEnabled: true,
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildNPSElement({
|
||||
headline: t("templates.nps_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.nps_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.nps_survey_question_1_upper_label"),
|
||||
isColorCodingEnabled: true,
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
headline: t("templates.nps_survey_question_2_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
headline: t("templates.nps_survey_question_2_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
headline: t("templates.nps_survey_question_3_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
name: "Block 3",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
headline: t("templates.nps_survey_question_3_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const starRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
const reusableQuestionIds = [createId(), createId(), createId()];
|
||||
const starRatingSurvey = (t: TFunction): TXMTemplate => {
|
||||
const reusableElementIds = [createId(), createId(), createId()];
|
||||
const block3Id = createId(); // Pre-generate Block 3 ID for logic reference
|
||||
const defaultSurvey = getXMSurveyDefault(t);
|
||||
|
||||
return {
|
||||
...defaultSurvey,
|
||||
name: t("templates.star_rating_survey_name"),
|
||||
questions: [
|
||||
buildRatingQuestion({
|
||||
id: reusableQuestionIds[0],
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildRatingElement({
|
||||
id: reusableElementIds[0],
|
||||
range: 5,
|
||||
scale: "number",
|
||||
headline: t("templates.star_rating_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.star_rating_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.star_rating_survey_question_1_upper_label"),
|
||||
}),
|
||||
],
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
@@ -75,8 +104,8 @@ const starRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[0],
|
||||
type: "question",
|
||||
value: reusableElementIds[0],
|
||||
type: "element",
|
||||
},
|
||||
operator: "isLessThanOrEqual",
|
||||
rightOperand: {
|
||||
@@ -89,80 +118,72 @@ const starRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: reusableQuestionIds[2],
|
||||
objective: "jumpToBlock",
|
||||
target: block3Id,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
range: 5,
|
||||
scale: "number",
|
||||
headline: t("templates.star_rating_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.star_rating_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.star_rating_survey_question_1_upper_label"),
|
||||
t,
|
||||
}),
|
||||
buildCTAQuestion({
|
||||
id: reusableQuestionIds[1],
|
||||
html: t("templates.star_rating_survey_question_2_html"),
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
conditions: {
|
||||
id: createId(),
|
||||
connector: "and",
|
||||
conditions: [
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[1],
|
||||
type: "question",
|
||||
},
|
||||
operator: "isClicked",
|
||||
},
|
||||
],
|
||||
},
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: defaultSurvey.endings[0].id,
|
||||
},
|
||||
],
|
||||
},
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildCTAElement({
|
||||
id: reusableElementIds[1],
|
||||
subheader: t("templates.star_rating_survey_question_2_html"),
|
||||
headline: t("templates.star_rating_survey_question_2_headline"),
|
||||
required: false,
|
||||
buttonUrl: "https://formbricks.com/github",
|
||||
buttonExternal: true,
|
||||
ctaButtonLabel: t("templates.star_rating_survey_question_2_button_label"),
|
||||
}),
|
||||
],
|
||||
headline: t("templates.star_rating_survey_question_2_headline"),
|
||||
required: true,
|
||||
buttonUrl: "https://formbricks.com/github",
|
||||
buttonLabel: t("templates.star_rating_survey_question_2_button_label"),
|
||||
buttonExternal: true,
|
||||
logic: [createBlockJumpLogic(reusableElementIds[1], defaultSurvey.endings[0].id, "isClicked")],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
id: reusableQuestionIds[2],
|
||||
headline: t("templates.star_rating_survey_question_3_headline"),
|
||||
required: true,
|
||||
subheader: t("templates.star_rating_survey_question_3_subheader"),
|
||||
buildBlock({
|
||||
id: block3Id,
|
||||
name: "Block 3",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
id: reusableElementIds[2],
|
||||
headline: t("templates.star_rating_survey_question_3_headline"),
|
||||
required: true,
|
||||
subheader: t("templates.star_rating_survey_question_3_subheader"),
|
||||
placeholder: t("templates.star_rating_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
buttonLabel: t("templates.star_rating_survey_question_3_button_label"),
|
||||
placeholder: t("templates.star_rating_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const csatSurvey = (t: TFnType): TXMTemplate => {
|
||||
const reusableQuestionIds = [createId(), createId(), createId()];
|
||||
const csatSurvey = (t: TFunction): TXMTemplate => {
|
||||
const reusableElementIds = [createId(), createId(), createId()];
|
||||
const block3Id = createId(); // Pre-generate Block 3 ID for logic reference
|
||||
const defaultSurvey = getXMSurveyDefault(t);
|
||||
|
||||
return {
|
||||
...defaultSurvey,
|
||||
name: t("templates.csat_survey_name"),
|
||||
questions: [
|
||||
buildRatingQuestion({
|
||||
id: reusableQuestionIds[0],
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildRatingElement({
|
||||
id: reusableElementIds[0],
|
||||
range: 5,
|
||||
scale: "smiley",
|
||||
headline: t("templates.csat_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.csat_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.csat_survey_question_1_upper_label"),
|
||||
}),
|
||||
],
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
@@ -173,8 +194,8 @@ const csatSurvey = (t: TFnType): TXMTemplate => {
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[0],
|
||||
type: "question",
|
||||
value: reusableElementIds[0],
|
||||
type: "element",
|
||||
},
|
||||
operator: "isLessThanOrEqual",
|
||||
rightOperand: {
|
||||
@@ -187,101 +208,103 @@ const csatSurvey = (t: TFnType): TXMTemplate => {
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: reusableQuestionIds[2],
|
||||
objective: "jumpToBlock",
|
||||
target: block3Id,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
range: 5,
|
||||
scale: "smiley",
|
||||
headline: t("templates.csat_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.csat_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.csat_survey_question_1_upper_label"),
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
id: reusableQuestionIds[1],
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
conditions: {
|
||||
id: createId(),
|
||||
connector: "and",
|
||||
conditions: [
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[1],
|
||||
type: "question",
|
||||
},
|
||||
operator: "isSubmitted",
|
||||
},
|
||||
],
|
||||
},
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: defaultSurvey.endings[0].id,
|
||||
},
|
||||
],
|
||||
},
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
id: reusableElementIds[1],
|
||||
headline: t("templates.csat_survey_question_2_headline"),
|
||||
required: false,
|
||||
placeholder: t("templates.csat_survey_question_2_placeholder"),
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
headline: t("templates.csat_survey_question_2_headline"),
|
||||
required: false,
|
||||
placeholder: t("templates.csat_survey_question_2_placeholder"),
|
||||
inputType: "text",
|
||||
logic: [createBlockJumpLogic(reusableElementIds[1], defaultSurvey.endings[0].id, "isSubmitted")],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
id: reusableQuestionIds[2],
|
||||
headline: t("templates.csat_survey_question_3_headline"),
|
||||
required: false,
|
||||
placeholder: t("templates.csat_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
id: block3Id,
|
||||
name: "Block 3",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
id: reusableElementIds[2],
|
||||
headline: t("templates.csat_survey_question_3_headline"),
|
||||
required: false,
|
||||
placeholder: t("templates.csat_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const cessSurvey = (t: TFnType): TXMTemplate => {
|
||||
const cessSurvey = (t: TFunction): TXMTemplate => {
|
||||
return {
|
||||
...getXMSurveyDefault(t),
|
||||
name: t("templates.cess_survey_name"),
|
||||
questions: [
|
||||
buildRatingQuestion({
|
||||
range: 5,
|
||||
scale: "number",
|
||||
headline: t("templates.cess_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.cess_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.cess_survey_question_1_upper_label"),
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildRatingElement({
|
||||
range: 5,
|
||||
scale: "number",
|
||||
headline: t("templates.cess_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.cess_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.cess_survey_question_1_upper_label"),
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
headline: t("templates.cess_survey_question_2_headline"),
|
||||
required: true,
|
||||
placeholder: t("templates.cess_survey_question_2_placeholder"),
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
headline: t("templates.cess_survey_question_2_headline"),
|
||||
required: true,
|
||||
placeholder: t("templates.cess_survey_question_2_placeholder"),
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const smileysRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
const reusableQuestionIds = [createId(), createId(), createId()];
|
||||
const smileysRatingSurvey = (t: TFunction): TXMTemplate => {
|
||||
const reusableElementIds = [createId(), createId(), createId()];
|
||||
const block3Id = createId(); // Pre-generate Block 3 ID for logic reference
|
||||
const defaultSurvey = getXMSurveyDefault(t);
|
||||
|
||||
return {
|
||||
...defaultSurvey,
|
||||
name: t("templates.smileys_survey_name"),
|
||||
questions: [
|
||||
buildRatingQuestion({
|
||||
id: reusableQuestionIds[0],
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildRatingElement({
|
||||
id: reusableElementIds[0],
|
||||
range: 5,
|
||||
scale: "smiley",
|
||||
headline: t("templates.smileys_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.smileys_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.smileys_survey_question_1_upper_label"),
|
||||
}),
|
||||
],
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
@@ -292,8 +315,8 @@ const smileysRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[0],
|
||||
type: "question",
|
||||
value: reusableElementIds[0],
|
||||
type: "element",
|
||||
},
|
||||
operator: "isLessThanOrEqual",
|
||||
rightOperand: {
|
||||
@@ -306,100 +329,95 @@ const smileysRatingSurvey = (t: TFnType): TXMTemplate => {
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: reusableQuestionIds[2],
|
||||
objective: "jumpToBlock",
|
||||
target: block3Id,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
range: 5,
|
||||
scale: "smiley",
|
||||
headline: t("templates.smileys_survey_question_1_headline"),
|
||||
required: true,
|
||||
lowerLabel: t("templates.smileys_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.smileys_survey_question_1_upper_label"),
|
||||
t,
|
||||
}),
|
||||
buildCTAQuestion({
|
||||
id: reusableQuestionIds[1],
|
||||
html: t("templates.smileys_survey_question_2_html"),
|
||||
logic: [
|
||||
{
|
||||
id: createId(),
|
||||
conditions: {
|
||||
id: createId(),
|
||||
connector: "and",
|
||||
conditions: [
|
||||
{
|
||||
id: createId(),
|
||||
leftOperand: {
|
||||
value: reusableQuestionIds[1],
|
||||
type: "question",
|
||||
},
|
||||
operator: "isClicked",
|
||||
},
|
||||
],
|
||||
},
|
||||
actions: [
|
||||
{
|
||||
id: createId(),
|
||||
objective: "jumpToQuestion",
|
||||
target: defaultSurvey.endings[0].id,
|
||||
},
|
||||
],
|
||||
},
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildCTAElement({
|
||||
id: reusableElementIds[1],
|
||||
subheader: t("templates.smileys_survey_question_2_html"),
|
||||
headline: t("templates.smileys_survey_question_2_headline"),
|
||||
required: false,
|
||||
buttonUrl: "https://formbricks.com/github",
|
||||
buttonExternal: true,
|
||||
ctaButtonLabel: t("templates.smileys_survey_question_2_button_label"),
|
||||
}),
|
||||
],
|
||||
headline: t("templates.smileys_survey_question_2_headline"),
|
||||
required: true,
|
||||
buttonUrl: "https://formbricks.com/github",
|
||||
buttonLabel: t("templates.smileys_survey_question_2_button_label"),
|
||||
buttonExternal: true,
|
||||
logic: [createBlockJumpLogic(reusableElementIds[1], defaultSurvey.endings[0].id, "isClicked")],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
id: reusableQuestionIds[2],
|
||||
headline: t("templates.smileys_survey_question_3_headline"),
|
||||
required: true,
|
||||
subheader: t("templates.smileys_survey_question_3_subheader"),
|
||||
buildBlock({
|
||||
id: block3Id,
|
||||
name: "Block 3",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
id: reusableElementIds[2],
|
||||
headline: t("templates.smileys_survey_question_3_headline"),
|
||||
required: true,
|
||||
subheader: t("templates.smileys_survey_question_3_subheader"),
|
||||
placeholder: t("templates.smileys_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
buttonLabel: t("templates.smileys_survey_question_3_button_label"),
|
||||
placeholder: t("templates.smileys_survey_question_3_placeholder"),
|
||||
inputType: "text",
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const enpsSurvey = (t: TFnType): TXMTemplate => {
|
||||
const enpsSurvey = (t: TFunction): TXMTemplate => {
|
||||
return {
|
||||
...getXMSurveyDefault(t),
|
||||
name: t("templates.enps_survey_name"),
|
||||
questions: [
|
||||
buildNPSQuestion({
|
||||
headline: t("templates.enps_survey_question_1_headline"),
|
||||
required: false,
|
||||
lowerLabel: t("templates.enps_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.enps_survey_question_1_upper_label"),
|
||||
isColorCodingEnabled: true,
|
||||
blocks: [
|
||||
buildBlock({
|
||||
name: "Block 1",
|
||||
elements: [
|
||||
buildNPSElement({
|
||||
headline: t("templates.enps_survey_question_1_headline"),
|
||||
required: false,
|
||||
lowerLabel: t("templates.enps_survey_question_1_lower_label"),
|
||||
upperLabel: t("templates.enps_survey_question_1_upper_label"),
|
||||
isColorCodingEnabled: true,
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
headline: t("templates.enps_survey_question_2_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
name: "Block 2",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
headline: t("templates.enps_survey_question_2_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
buildOpenTextQuestion({
|
||||
headline: t("templates.enps_survey_question_3_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
buildBlock({
|
||||
name: "Block 3",
|
||||
elements: [
|
||||
buildOpenTextElement({
|
||||
headline: t("templates.enps_survey_question_3_headline"),
|
||||
required: false,
|
||||
inputType: "text",
|
||||
}),
|
||||
],
|
||||
t,
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
export const getXMTemplates = (t: TFnType): TXMTemplate[] => {
|
||||
export const getXMTemplates = (t: TFunction): TXMTemplate[] => {
|
||||
try {
|
||||
return [
|
||||
npsSurvey(t),
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import { XIcon } from "lucide-react";
|
||||
import { getServerSession } from "next-auth";
|
||||
import Link from "next/link";
|
||||
import { XMTemplateList } from "@/app/(app)/(onboarding)/environments/[environmentId]/xm-templates/components/XMTemplateList";
|
||||
import { getEnvironment } from "@/lib/environment/service";
|
||||
import { getProjectByEnvironmentId, getUserProjects } from "@/lib/project/service";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import { getOrganizationIdFromEnvironmentId } from "@/lib/utils/helper";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { XIcon } from "lucide-react";
|
||||
import { getServerSession } from "next-auth";
|
||||
import Link from "next/link";
|
||||
|
||||
interface XMTemplatePageProps {
|
||||
params: Promise<{
|
||||
@@ -38,7 +38,7 @@ const Page = async (props: XMTemplatePageProps) => {
|
||||
|
||||
const project = await getProjectByEnvironmentId(environment.id);
|
||||
if (!project) {
|
||||
throw new Error(t("common.project_not_found"));
|
||||
throw new Error(t("common.workspace_not_found"));
|
||||
}
|
||||
|
||||
const projects = await getUserProjects(session.user.id, organizationId);
|
||||
@@ -49,7 +49,7 @@ const Page = async (props: XMTemplatePageProps) => {
|
||||
<XMTemplateList project={project} user={user} environmentId={environment.id} />
|
||||
{projects.length >= 2 && (
|
||||
<Button
|
||||
className="absolute right-5 top-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
className="absolute top-5 right-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
variant="ghost"
|
||||
asChild>
|
||||
<Link href={`/environments/${environment.id}/surveys`}>
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"use server";
|
||||
|
||||
import { TOrganizationTeam } from "@/app/(app)/(onboarding)/types/onboarding";
|
||||
import { validateInputs } from "@/lib/utils/validate";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { cache as reactCache } from "react";
|
||||
import { prisma } from "@formbricks/database";
|
||||
import { ZId } from "@formbricks/types/common";
|
||||
import { DatabaseError } from "@formbricks/types/errors";
|
||||
import { TOrganizationTeam } from "@/app/(app)/(onboarding)/types/onboarding";
|
||||
import { validateInputs } from "@/lib/utils/validate";
|
||||
|
||||
export const getTeamsByOrganizationId = reactCache(
|
||||
async (organizationId: string): Promise<TOrganizationTeam[] | null> => {
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { LandingSidebar } from "./landing-sidebar";
|
||||
|
||||
// Mock constants that this test needs
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
WEBAPP_URL: "http://localhost:3000",
|
||||
}));
|
||||
|
||||
// Mock server actions that this test needs
|
||||
vi.mock("@/modules/auth/actions/sign-out", () => ({
|
||||
logSignOutAction: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
// Module mocks must be declared before importing the component
|
||||
vi.mock("@tolgee/react", () => ({
|
||||
useTranslate: () => ({ t: (key: string) => key, isLoading: false }),
|
||||
}));
|
||||
|
||||
// Mock our useSignOut hook
|
||||
const mockSignOut = vi.fn();
|
||||
vi.mock("@/modules/auth/hooks/use-sign-out", () => ({
|
||||
useSignOut: () => ({
|
||||
signOut: mockSignOut,
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("next/navigation", () => ({ useRouter: () => ({ push: vi.fn() }) }));
|
||||
vi.mock("@/modules/organization/components/CreateOrganizationModal", () => ({
|
||||
CreateOrganizationModal: ({ open }: { open: boolean }) => (
|
||||
<div data-testid={open ? "modal-open" : "modal-closed"} />
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/avatars", () => ({
|
||||
ProfileAvatar: ({ userId }: { userId: string }) => <div data-testid="avatar">{userId}</div>,
|
||||
}));
|
||||
|
||||
// Ensure mocks are reset between tests
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("LandingSidebar component", () => {
|
||||
const user = { id: "u1", name: "Alice", email: "alice@example.com", imageUrl: "" } as any;
|
||||
const organization = { id: "o1", name: "orgOne" } as any;
|
||||
const organizations = [
|
||||
{ id: "o2", name: "betaOrg" },
|
||||
{ id: "o1", name: "alphaOrg" },
|
||||
] as any;
|
||||
|
||||
test("renders logo, avatar, and initial modal closed", () => {
|
||||
render(
|
||||
<LandingSidebar
|
||||
isMultiOrgEnabled={false}
|
||||
user={user}
|
||||
organization={organization}
|
||||
organizations={organizations}
|
||||
/>
|
||||
);
|
||||
|
||||
// Formbricks logo
|
||||
expect(screen.getByAltText("environments.formbricks_logo")).toBeInTheDocument();
|
||||
// Profile avatar
|
||||
expect(screen.getByTestId("avatar")).toHaveTextContent("u1");
|
||||
// CreateOrganizationModal should be closed initially
|
||||
expect(screen.getByTestId("modal-closed")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("clicking logout triggers signOut", async () => {
|
||||
render(
|
||||
<LandingSidebar
|
||||
isMultiOrgEnabled={false}
|
||||
user={user}
|
||||
organization={organization}
|
||||
organizations={organizations}
|
||||
/>
|
||||
);
|
||||
|
||||
// Open user dropdown by clicking on avatar trigger
|
||||
const trigger = screen.getByTestId("avatar").parentElement;
|
||||
if (trigger) await userEvent.click(trigger);
|
||||
|
||||
// Click logout menu item
|
||||
const logoutItem = await screen.findByText("common.logout");
|
||||
await userEvent.click(logoutItem);
|
||||
|
||||
expect(mockSignOut).toHaveBeenCalledWith({
|
||||
reason: "user_initiated",
|
||||
redirectUrl: "/auth/login",
|
||||
organizationId: "o1",
|
||||
redirect: true,
|
||||
callbackUrl: "/auth/login",
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,14 @@
|
||||
"use client";
|
||||
|
||||
import { ArrowUpRightIcon, ChevronRightIcon, LogOutIcon } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { TOrganization } from "@formbricks/types/organizations";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
import FBLogo from "@/images/formbricks-wordmark.svg";
|
||||
import { cn } from "@/lib/cn";
|
||||
import { capitalizeFirstLetter } from "@/lib/utils/strings";
|
||||
import { useSignOut } from "@/modules/auth/hooks/use-sign-out";
|
||||
import { CreateOrganizationModal } from "@/modules/organization/components/CreateOrganizationModal";
|
||||
import { ProfileAvatar } from "@/modules/ui/components/avatars";
|
||||
@@ -10,48 +16,20 @@ import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuPortal,
|
||||
DropdownMenuRadioGroup,
|
||||
DropdownMenuRadioItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuSub,
|
||||
DropdownMenuSubContent,
|
||||
DropdownMenuSubTrigger,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/modules/ui/components/dropdown-menu";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import { ArrowUpRightIcon, ChevronRightIcon, LogOutIcon, PlusIcon } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useMemo, useState } from "react";
|
||||
import { TOrganization } from "@formbricks/types/organizations";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
|
||||
interface LandingSidebarProps {
|
||||
isMultiOrgEnabled: boolean;
|
||||
user: TUser;
|
||||
organization: TOrganization;
|
||||
organizations: TOrganization[];
|
||||
}
|
||||
|
||||
export const LandingSidebar = ({
|
||||
isMultiOrgEnabled,
|
||||
user,
|
||||
organization,
|
||||
organizations,
|
||||
}: LandingSidebarProps) => {
|
||||
export const LandingSidebar = ({ user, organization }: LandingSidebarProps) => {
|
||||
const [openCreateOrganizationModal, setOpenCreateOrganizationModal] = useState<boolean>(false);
|
||||
|
||||
const { t } = useTranslate();
|
||||
const { t } = useTranslation();
|
||||
const { signOut: signOutWithAudit } = useSignOut({ id: user.id, email: user.email });
|
||||
|
||||
const router = useRouter();
|
||||
|
||||
const handleEnvironmentChangeByOrganization = (organizationId: string) => {
|
||||
router.push(`/organizations/${organizationId}/`);
|
||||
};
|
||||
|
||||
const dropdownNavigation = [
|
||||
{
|
||||
label: t("common.documentation"),
|
||||
@@ -61,13 +39,6 @@ export const LandingSidebar = ({
|
||||
},
|
||||
];
|
||||
|
||||
const currentOrganizationId = organization?.id;
|
||||
const currentOrganizationName = capitalizeFirstLetter(organization?.name);
|
||||
|
||||
const sortedOrganizations = useMemo(() => {
|
||||
return [...organizations].sort((a, b) => a.name.localeCompare(b.name));
|
||||
}, [organizations]);
|
||||
|
||||
return (
|
||||
<aside
|
||||
className={cn(
|
||||
@@ -80,27 +51,26 @@ export const LandingSidebar = ({
|
||||
<DropdownMenuTrigger
|
||||
asChild
|
||||
id="userDropdownTrigger"
|
||||
className="w-full rounded-br-xl border-t py-4 pl-4 transition-colors duration-200 hover:bg-slate-50 focus:outline-none">
|
||||
<div tabIndex={0} className={cn("flex cursor-pointer flex-row items-center space-x-3")}>
|
||||
<ProfileAvatar userId={user.id} imageUrl={user.imageUrl} />
|
||||
<>
|
||||
<div>
|
||||
<p
|
||||
title={user?.email}
|
||||
className={cn(
|
||||
"ph-no-capture ph-no-capture -mb-0.5 max-w-28 truncate text-sm font-bold text-slate-700"
|
||||
)}>
|
||||
{user?.name ? <span>{user?.name}</span> : <span>{user?.email}</span>}
|
||||
</p>
|
||||
<p
|
||||
title={capitalizeFirstLetter(organization?.name)}
|
||||
className="max-w-28 truncate text-sm text-slate-500">
|
||||
{capitalizeFirstLetter(organization?.name)}
|
||||
</p>
|
||||
</div>
|
||||
<ChevronRightIcon className={cn("h-5 w-5 text-slate-700 hover:text-slate-500")} />
|
||||
</>
|
||||
</div>
|
||||
className="w-full rounded-br-xl border-t p-4 transition-colors duration-200 hover:bg-slate-50 focus:outline-none">
|
||||
<button
|
||||
type="button"
|
||||
className={cn("flex w-full cursor-pointer flex-row items-center gap-3 text-left")}
|
||||
aria-haspopup="menu">
|
||||
<ProfileAvatar userId={user.id} />
|
||||
<div className="grow overflow-hidden">
|
||||
<p
|
||||
title={user?.email}
|
||||
className={cn(
|
||||
"ph-no-capture ph-no-capture -mb-0.5 truncate text-sm font-bold text-slate-700"
|
||||
)}>
|
||||
{user?.name ? <span>{user?.name}</span> : <span>{user?.email}</span>}
|
||||
</p>
|
||||
<p title={organization?.name} className="truncate text-sm text-slate-500">
|
||||
{organization?.name}
|
||||
</p>
|
||||
</div>
|
||||
<ChevronRightIcon className={cn("h-5 w-5 shrink-0 text-slate-700 hover:text-slate-500")} />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
|
||||
<DropdownMenuContent
|
||||
@@ -112,7 +82,13 @@ export const LandingSidebar = ({
|
||||
{/* Dropdown Items */}
|
||||
|
||||
{dropdownNavigation.map((link) => (
|
||||
<Link id={link.href} href={link.href} target={link.target} className="flex w-full items-center">
|
||||
<Link
|
||||
key={link.href}
|
||||
id={link.href}
|
||||
href={link.href}
|
||||
target={link.target}
|
||||
rel={link.target === "_blank" ? "noopener noreferrer" : undefined}
|
||||
className="flex w-full items-center">
|
||||
<DropdownMenuItem>
|
||||
<link.icon className="mr-2 h-4 w-4" strokeWidth={1.5} />
|
||||
{link.label}
|
||||
@@ -121,7 +97,6 @@ export const LandingSidebar = ({
|
||||
))}
|
||||
|
||||
{/* Logout */}
|
||||
|
||||
<DropdownMenuItem
|
||||
onClick={async () => {
|
||||
await signOutWithAudit({
|
||||
@@ -130,50 +105,12 @@ export const LandingSidebar = ({
|
||||
organizationId: organization.id,
|
||||
redirect: true,
|
||||
callbackUrl: "/auth/login",
|
||||
clearEnvironmentId: true,
|
||||
});
|
||||
}}
|
||||
icon={<LogOutIcon className="mr-2 h-4 w-4" strokeWidth={1.5} />}>
|
||||
{t("common.logout")}
|
||||
</DropdownMenuItem>
|
||||
|
||||
{/* Organization Switch */}
|
||||
|
||||
{(isMultiOrgEnabled || organizations.length > 1) && (
|
||||
<DropdownMenuSub>
|
||||
<DropdownMenuSubTrigger className="rounded-lg">
|
||||
<div>
|
||||
<p>{currentOrganizationName}</p>
|
||||
<p className="block text-xs text-slate-500">{t("common.switch_organization")}</p>
|
||||
</div>
|
||||
</DropdownMenuSubTrigger>
|
||||
<DropdownMenuPortal>
|
||||
<DropdownMenuSubContent sideOffset={10} alignOffset={5}>
|
||||
<DropdownMenuRadioGroup
|
||||
value={currentOrganizationId}
|
||||
onValueChange={(organizationId) =>
|
||||
handleEnvironmentChangeByOrganization(organizationId)
|
||||
}>
|
||||
{sortedOrganizations.map((organization) => (
|
||||
<DropdownMenuRadioItem
|
||||
value={organization.id}
|
||||
className="cursor-pointer rounded-lg"
|
||||
key={organization.id}>
|
||||
{organization.name}
|
||||
</DropdownMenuRadioItem>
|
||||
))}
|
||||
</DropdownMenuRadioGroup>
|
||||
<DropdownMenuSeparator />
|
||||
{isMultiOrgEnabled && (
|
||||
<DropdownMenuItem
|
||||
onClick={() => setOpenCreateOrganizationModal(true)}
|
||||
icon={<PlusIcon className="mr-2 h-4 w-4" />}>
|
||||
<span>{t("common.create_new_organization")}</span>
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
</DropdownMenuSubContent>
|
||||
</DropdownMenuPortal>
|
||||
</DropdownMenuSub>
|
||||
)}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
|
||||
@@ -1,187 +0,0 @@
|
||||
import { getEnvironments } from "@/lib/environment/service";
|
||||
import { getMembershipByUserIdOrganizationId } from "@/lib/membership/service";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup } from "@testing-library/preact";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import LandingLayout from "./layout";
|
||||
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
IS_PRODUCTION: false,
|
||||
IS_DEVELOPMENT: true,
|
||||
E2E_TESTING: false,
|
||||
WEBAPP_URL: "http://localhost:3000",
|
||||
PUBLIC_URL: "http://localhost:3000/survey",
|
||||
ENCRYPTION_KEY: "mock-encryption-key",
|
||||
CRON_SECRET: "mock-cron-secret",
|
||||
DEFAULT_BRAND_COLOR: "#64748b",
|
||||
FB_LOGO_URL: "https://mock-logo-url.com/logo.png",
|
||||
PRIVACY_URL: "http://localhost:3000/privacy",
|
||||
TERMS_URL: "http://localhost:3000/terms",
|
||||
IMPRINT_URL: "http://localhost:3000/imprint",
|
||||
IMPRINT_ADDRESS: "Mock Address",
|
||||
PASSWORD_RESET_DISABLED: false,
|
||||
EMAIL_VERIFICATION_DISABLED: false,
|
||||
GOOGLE_OAUTH_ENABLED: false,
|
||||
GITHUB_OAUTH_ENABLED: false,
|
||||
AZURE_OAUTH_ENABLED: false,
|
||||
OIDC_OAUTH_ENABLED: false,
|
||||
SAML_OAUTH_ENABLED: false,
|
||||
SAML_XML_DIR: "./mock-saml-connection",
|
||||
SIGNUP_ENABLED: true,
|
||||
EMAIL_AUTH_ENABLED: true,
|
||||
INVITE_DISABLED: false,
|
||||
SLACK_CLIENT_SECRET: "mock-slack-secret",
|
||||
SLACK_CLIENT_ID: "mock-slack-id",
|
||||
SLACK_AUTH_URL: "https://mock-slack-auth-url.com",
|
||||
GOOGLE_SHEETS_CLIENT_ID: "mock-google-sheets-id",
|
||||
GOOGLE_SHEETS_CLIENT_SECRET: "mock-google-sheets-secret",
|
||||
GOOGLE_SHEETS_REDIRECT_URL: "http://localhost:3000/google-sheets-redirect",
|
||||
NOTION_OAUTH_CLIENT_ID: "mock-notion-id",
|
||||
NOTION_OAUTH_CLIENT_SECRET: "mock-notion-secret",
|
||||
NOTION_REDIRECT_URI: "http://localhost:3000/notion-redirect",
|
||||
NOTION_AUTH_URL: "https://mock-notion-auth-url.com",
|
||||
AIRTABLE_CLIENT_ID: "mock-airtable-id",
|
||||
SMTP_HOST: "mock-smtp-host",
|
||||
SMTP_PORT: "587",
|
||||
SMTP_SECURE_ENABLED: false,
|
||||
SMTP_USER: "mock-smtp-user",
|
||||
SMTP_PASSWORD: "mock-smtp-password",
|
||||
SMTP_AUTHENTICATED: true,
|
||||
SMTP_REJECT_UNAUTHORIZED_TLS: true,
|
||||
MAIL_FROM: "mock@mail.com",
|
||||
MAIL_FROM_NAME: "Mock Mail",
|
||||
NEXTAUTH_SECRET: "mock-nextauth-secret",
|
||||
ITEMS_PER_PAGE: 30,
|
||||
SURVEYS_PER_PAGE: 12,
|
||||
RESPONSES_PER_PAGE: 25,
|
||||
TEXT_RESPONSES_PER_PAGE: 5,
|
||||
INSIGHTS_PER_PAGE: 10,
|
||||
DOCUMENTS_PER_PAGE: 10,
|
||||
MAX_RESPONSES_FOR_INSIGHT_GENERATION: 500,
|
||||
MAX_OTHER_OPTION_LENGTH: 250,
|
||||
ENTERPRISE_LICENSE_KEY: "ABC",
|
||||
GITHUB_ID: "mock-github-id",
|
||||
GITHUB_SECRET: "mock-github-secret",
|
||||
GITHUB_OAUTH_URL: "https://mock-github-auth-url.com",
|
||||
AZURE_ID: "mock-azure-id",
|
||||
AZUREAD_CLIENT_ID: "mock-azure-client-id",
|
||||
AZUREAD_CLIENT_SECRET: "mock-azure-client-secret",
|
||||
GOOGLE_CLIENT_ID: "mock-google-client-id",
|
||||
GOOGLE_CLIENT_SECRET: "mock-google-client-secret",
|
||||
GOOGLE_OAUTH_URL: "https://mock-google-auth-url.com",
|
||||
AZURE_OAUTH_URL: "https://mock-azure-auth-url.com",
|
||||
OIDC_ID: "mock-oidc-id",
|
||||
OIDC_OAUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
SAML_ID: "mock-saml-id",
|
||||
SAML_OAUTH_URL: "https://mock-saml-auth-url.com",
|
||||
SAML_METADATA_URL: "https://mock-saml-metadata-url.com",
|
||||
AZUREAD_TENANT_ID: "mock-azure-tenant-id",
|
||||
AZUREAD_OAUTH_URL: "https://mock-azuread-auth-url.com",
|
||||
OIDC_DISPLAY_NAME: "Mock OIDC",
|
||||
OIDC_CLIENT_ID: "mock-oidc-client-id",
|
||||
OIDC_CLIENT_SECRET: "mock-oidc-client-secret",
|
||||
OIDC_REDIRECT_URL: "http://localhost:3000/oidc-redirect",
|
||||
OIDC_AUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
OIDC_ISSUER: "https://mock-oidc-issuer.com",
|
||||
OIDC_SIGNING_ALGORITHM: "RS256",
|
||||
SESSION_MAX_AGE: 1000,
|
||||
REDIS_URL: "test-redis-url",
|
||||
AUDIT_LOG_ENABLED: true,
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/environment/service");
|
||||
vi.mock("@/lib/membership/service");
|
||||
vi.mock("@/lib/project/service");
|
||||
vi.mock("next-auth");
|
||||
vi.mock("next/navigation");
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
describe("LandingLayout", () => {
|
||||
test("redirects to login if no session exists", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValue(null);
|
||||
|
||||
const props = { params: { organizationId: "org-123" }, children: <div>Child Content</div> };
|
||||
|
||||
await LandingLayout(props);
|
||||
|
||||
expect(vi.mocked(redirect)).toHaveBeenCalledWith("/auth/login");
|
||||
});
|
||||
|
||||
test("returns notFound if no membership is found", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValue({ user: { id: "user-123" } });
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue(null);
|
||||
|
||||
const props = { params: { organizationId: "org-123" }, children: <div>Child Content</div> };
|
||||
|
||||
await LandingLayout(props);
|
||||
|
||||
expect(vi.mocked(notFound)).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("redirects to production environment if available", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValue({ user: { id: "user-123" } });
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue({
|
||||
organizationId: "org-123",
|
||||
userId: "user-123",
|
||||
accepted: true,
|
||||
role: "owner",
|
||||
});
|
||||
vi.mocked(getUserProjects).mockResolvedValue([
|
||||
{
|
||||
id: "proj-123",
|
||||
organizationId: "org-123",
|
||||
createdAt: new Date("2023-01-01"),
|
||||
updatedAt: new Date("2023-01-02"),
|
||||
name: "Project 1",
|
||||
styling: { allowStyleOverwrite: true },
|
||||
recontactDays: 30,
|
||||
inAppSurveyBranding: true,
|
||||
linkSurveyBranding: true,
|
||||
} as any,
|
||||
]);
|
||||
vi.mocked(getEnvironments).mockResolvedValue([
|
||||
{
|
||||
id: "env-123",
|
||||
type: "production",
|
||||
projectId: "proj-123",
|
||||
createdAt: new Date("2023-01-01"),
|
||||
updatedAt: new Date("2023-01-02"),
|
||||
appSetupCompleted: true,
|
||||
},
|
||||
]);
|
||||
|
||||
const props = { params: { organizationId: "org-123" }, children: <div>Child Content</div> };
|
||||
|
||||
await LandingLayout(props);
|
||||
|
||||
expect(vi.mocked(redirect)).toHaveBeenCalledWith("/environments/env-123/");
|
||||
});
|
||||
|
||||
test("renders children if no projects or production environment exist", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValue({ user: { id: "user-123" } });
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue({
|
||||
organizationId: "org-123",
|
||||
userId: "user-123",
|
||||
accepted: true,
|
||||
role: "owner",
|
||||
});
|
||||
vi.mocked(getUserProjects).mockResolvedValue([]);
|
||||
|
||||
const props = { params: { organizationId: "org-123" }, children: <div>Child Content</div> };
|
||||
|
||||
const result = await LandingLayout(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
<>
|
||||
<div>Child Content</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,9 @@
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { getEnvironments } from "@/lib/environment/service";
|
||||
import { getMembershipByUserIdOrganizationId } from "@/lib/membership/service";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
|
||||
const LandingLayout = async (props) => {
|
||||
const params = await props.params;
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
import { getOrganizationsByUserId } from "@/lib/organization/service";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
vi.mock("@/modules/ee/license-check/lib/license", () => ({
|
||||
getEnterpriseLicense: vi.fn().mockResolvedValue({
|
||||
active: true,
|
||||
features: { isMultiOrgEnabled: true },
|
||||
lastChecked: new Date(),
|
||||
isPendingDowngrade: false,
|
||||
fallbackLevel: "live",
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
IS_PRODUCTION: false,
|
||||
IS_DEVELOPMENT: true,
|
||||
E2E_TESTING: false,
|
||||
WEBAPP_URL: "http://localhost:3000",
|
||||
ENCRYPTION_KEY: "mock-encryption-key",
|
||||
CRON_SECRET: "mock-cron-secret",
|
||||
DEFAULT_BRAND_COLOR: "#64748b",
|
||||
FB_LOGO_URL: "https://mock-logo-url.com/logo.png",
|
||||
PRIVACY_URL: "http://localhost:3000/privacy",
|
||||
TERMS_URL: "http://localhost:3000/terms",
|
||||
IMPRINT_URL: "http://localhost:3000/imprint",
|
||||
IMPRINT_ADDRESS: "Mock Address",
|
||||
PASSWORD_RESET_DISABLED: false,
|
||||
EMAIL_VERIFICATION_DISABLED: false,
|
||||
GOOGLE_OAUTH_ENABLED: false,
|
||||
GITHUB_OAUTH_ENABLED: false,
|
||||
AZURE_OAUTH_ENABLED: false,
|
||||
OIDC_OAUTH_ENABLED: false,
|
||||
SAML_OAUTH_ENABLED: false,
|
||||
SAML_XML_DIR: "./mock-saml-connection",
|
||||
SIGNUP_ENABLED: true,
|
||||
EMAIL_AUTH_ENABLED: true,
|
||||
INVITE_DISABLED: false,
|
||||
SLACK_CLIENT_SECRET: "mock-slack-secret",
|
||||
SLACK_CLIENT_ID: "mock-slack-id",
|
||||
SLACK_AUTH_URL: "https://mock-slack-auth-url.com",
|
||||
GOOGLE_SHEETS_CLIENT_ID: "mock-google-sheets-id",
|
||||
GOOGLE_SHEETS_CLIENT_SECRET: "mock-google-sheets-secret",
|
||||
GOOGLE_SHEETS_REDIRECT_URL: "http://localhost:3000/google-sheets-redirect",
|
||||
NOTION_OAUTH_CLIENT_ID: "mock-notion-id",
|
||||
NOTION_OAUTH_CLIENT_SECRET: "mock-notion-secret",
|
||||
NOTION_REDIRECT_URI: "http://localhost:3000/notion-redirect",
|
||||
NOTION_AUTH_URL: "https://mock-notion-auth-url.com",
|
||||
AIRTABLE_CLIENT_ID: "mock-airtable-id",
|
||||
SMTP_HOST: "mock-smtp-host",
|
||||
SMTP_PORT: "587",
|
||||
SMTP_SECURE_ENABLED: false,
|
||||
SMTP_USER: "mock-smtp-user",
|
||||
SMTP_PASSWORD: "mock-smtp-password",
|
||||
SMTP_AUTHENTICATED: true,
|
||||
SMTP_REJECT_UNAUTHORIZED_TLS: true,
|
||||
MAIL_FROM: "mock@mail.com",
|
||||
MAIL_FROM_NAME: "Mock Mail",
|
||||
NEXTAUTH_SECRET: "mock-nextauth-secret",
|
||||
ITEMS_PER_PAGE: 30,
|
||||
SURVEYS_PER_PAGE: 12,
|
||||
RESPONSES_PER_PAGE: 25,
|
||||
TEXT_RESPONSES_PER_PAGE: 5,
|
||||
INSIGHTS_PER_PAGE: 10,
|
||||
DOCUMENTS_PER_PAGE: 10,
|
||||
MAX_RESPONSES_FOR_INSIGHT_GENERATION: 500,
|
||||
MAX_OTHER_OPTION_LENGTH: 250,
|
||||
ENTERPRISE_LICENSE_KEY: "ABC",
|
||||
GITHUB_ID: "mock-github-id",
|
||||
GITHUB_SECRET: "mock-github-secret",
|
||||
GITHUB_OAUTH_URL: "https://mock-github-auth-url.com",
|
||||
AZURE_ID: "mock-azure-id",
|
||||
AZUREAD_CLIENT_ID: "mock-azure-client-id",
|
||||
AZUREAD_CLIENT_SECRET: "mock-azure-client-secret",
|
||||
GOOGLE_CLIENT_ID: "mock-google-client-id",
|
||||
GOOGLE_CLIENT_SECRET: "mock-google-client-secret",
|
||||
GOOGLE_OAUTH_URL: "https://mock-google-auth-url.com",
|
||||
AZURE_OAUTH_URL: "https://mock-azure-auth-url.com",
|
||||
OIDC_ID: "mock-oidc-id",
|
||||
OIDC_OAUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
SAML_ID: "mock-saml-id",
|
||||
SAML_OAUTH_URL: "https://mock-saml-auth-url.com",
|
||||
SAML_METADATA_URL: "https://mock-saml-metadata-url.com",
|
||||
AZUREAD_TENANT_ID: "mock-azure-tenant-id",
|
||||
AZUREAD_OAUTH_URL: "https://mock-azuread-auth-url.com",
|
||||
OIDC_DISPLAY_NAME: "Mock OIDC",
|
||||
OIDC_CLIENT_ID: "mock-oidc-client-id",
|
||||
OIDC_CLIENT_SECRET: "mock-oidc-client-secret",
|
||||
OIDC_REDIRECT_URL: "http://localhost:3000/oidc-redirect",
|
||||
OIDC_AUTH_URL: "https://mock-oidc-auth-url.com",
|
||||
OIDC_ISSUER: "https://mock-oidc-issuer.com",
|
||||
OIDC_SIGNING_ALGORITHM: "RS256",
|
||||
SESSION_MAX_AGE: 1000,
|
||||
REDIS_URL: "test-redis-url",
|
||||
AUDIT_LOG_ENABLED: true,
|
||||
}));
|
||||
|
||||
vi.mock("@/app/(app)/(onboarding)/organizations/[organizationId]/landing/components/landing-sidebar", () => ({
|
||||
LandingSidebar: () => <div data-testid="landing-sidebar" />,
|
||||
}));
|
||||
vi.mock("@/modules/organization/lib/utils");
|
||||
vi.mock("@/lib/user/service");
|
||||
vi.mock("@/lib/organization/service");
|
||||
vi.mock("@/tolgee/server");
|
||||
vi.mock("next/navigation", () => ({
|
||||
redirect: vi.fn(() => "REDIRECT_STUB"),
|
||||
notFound: vi.fn(() => "NOT_FOUND_STUB"),
|
||||
}));
|
||||
|
||||
// Mock the React cache function
|
||||
vi.mock("react", async () => {
|
||||
const actual = await vi.importActual("react");
|
||||
return {
|
||||
...actual,
|
||||
cache: (fn: any) => fn,
|
||||
};
|
||||
});
|
||||
|
||||
describe("Page component", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
test("redirects to login if no user session", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({ session: {}, organization: {} } as any);
|
||||
await vi.doMock("@/modules/ee/license-check/lib/license", () => ({
|
||||
getEnterpriseLicense: vi.fn().mockResolvedValue({
|
||||
active: true,
|
||||
features: { isMultiOrgEnabled: true },
|
||||
lastChecked: new Date(),
|
||||
isPendingDowngrade: false,
|
||||
fallbackLevel: "live",
|
||||
}),
|
||||
}));
|
||||
const { default: Page } = await import("./page");
|
||||
const result = await Page({ params: { organizationId: "org1" } });
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
expect(result).toBe("REDIRECT_STUB");
|
||||
});
|
||||
|
||||
test("returns notFound if user does not exist", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({
|
||||
session: { user: { id: "user1" } },
|
||||
organization: {},
|
||||
} as any);
|
||||
vi.mocked(getUser).mockResolvedValue(null);
|
||||
await vi.doMock("@/modules/ee/license-check/lib/license", () => ({
|
||||
getEnterpriseLicense: vi.fn().mockResolvedValue({
|
||||
active: true,
|
||||
features: { isMultiOrgEnabled: true },
|
||||
lastChecked: new Date(),
|
||||
isPendingDowngrade: false,
|
||||
fallbackLevel: "live",
|
||||
}),
|
||||
}));
|
||||
const { default: Page } = await import("./page");
|
||||
const result = await Page({ params: { organizationId: "org1" } });
|
||||
expect(notFound).toHaveBeenCalled();
|
||||
expect(result).toBe("NOT_FOUND_STUB");
|
||||
});
|
||||
|
||||
test("renders header and sidebar for authenticated user", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({
|
||||
session: { user: { id: "user1" } },
|
||||
organization: { id: "org1" },
|
||||
} as any);
|
||||
vi.mocked(getUser).mockResolvedValue({ id: "user1", name: "Test User" } as any);
|
||||
vi.mocked(getOrganizationsByUserId).mockResolvedValue([{ id: "org1", name: "Org One" } as any]);
|
||||
vi.mocked(getTranslate).mockResolvedValue((props: any) =>
|
||||
typeof props === "string" ? props : props.key || ""
|
||||
);
|
||||
await vi.doMock("@/modules/ee/license-check/lib/license", () => ({
|
||||
getEnterpriseLicense: vi.fn().mockResolvedValue({
|
||||
active: true,
|
||||
features: { isMultiOrgEnabled: true },
|
||||
lastChecked: new Date(),
|
||||
isPendingDowngrade: false,
|
||||
fallbackLevel: "live",
|
||||
}),
|
||||
}));
|
||||
const { default: Page } = await import("./page");
|
||||
const element = await Page({ params: { organizationId: "org1" } });
|
||||
render(element as React.ReactElement);
|
||||
expect(screen.getByTestId("landing-sidebar")).toBeInTheDocument();
|
||||
expect(screen.getByText("organizations.landing.no_projects_warning_title")).toBeInTheDocument();
|
||||
expect(screen.getByText("organizations.landing.no_projects_warning_subtitle")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,14 @@
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { LandingSidebar } from "@/app/(app)/(onboarding)/organizations/[organizationId]/landing/components/landing-sidebar";
|
||||
import { getOrganizationsByUserId } from "@/lib/organization/service";
|
||||
import { ProjectAndOrgSwitch } from "@/app/(app)/environments/[environmentId]/components/project-and-org-switch";
|
||||
import { IS_FORMBRICKS_CLOUD } from "@/lib/constants";
|
||||
import { getMembershipByUserIdOrganizationId } from "@/lib/membership/service";
|
||||
import { getAccessFlags } from "@/lib/membership/utils";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import { getEnterpriseLicense } from "@/modules/ee/license-check/lib/license";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { getIsMultiOrgEnabled } from "@/modules/ee/license-check/lib/utils";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
|
||||
const Page = async (props) => {
|
||||
const params = await props.params;
|
||||
@@ -20,26 +23,37 @@ const Page = async (props) => {
|
||||
const user = await getUser(session.user.id);
|
||||
if (!user) return notFound();
|
||||
|
||||
const organizations = await getOrganizationsByUserId(session.user.id);
|
||||
const isMultiOrgEnabled = await getIsMultiOrgEnabled();
|
||||
|
||||
const { features } = await getEnterpriseLicense();
|
||||
|
||||
const isMultiOrgEnabled = features?.isMultiOrgEnabled ?? false;
|
||||
const membership = await getMembershipByUserIdOrganizationId(session.user.id, organization.id);
|
||||
const { isMember } = getAccessFlags(membership?.role);
|
||||
|
||||
return (
|
||||
<div className="flex min-h-full min-w-full flex-row">
|
||||
<LandingSidebar
|
||||
user={user}
|
||||
organization={organization}
|
||||
isMultiOrgEnabled={isMultiOrgEnabled}
|
||||
organizations={organizations}
|
||||
/>
|
||||
<LandingSidebar user={user} organization={organization} />
|
||||
<div className="flex-1">
|
||||
<div className="flex h-full flex-col items-center justify-center space-y-12">
|
||||
<Header
|
||||
title={t("organizations.landing.no_projects_warning_title")}
|
||||
subtitle={t("organizations.landing.no_projects_warning_subtitle")}
|
||||
/>
|
||||
<div className="flex h-full flex-col">
|
||||
<div className="p-6">
|
||||
{/* we only need to render organization breadcrumb on this page, organizations/projects are lazy-loaded */}
|
||||
<ProjectAndOrgSwitch
|
||||
currentOrganizationId={organization.id}
|
||||
currentOrganizationName={organization.name}
|
||||
isMultiOrgEnabled={isMultiOrgEnabled}
|
||||
organizationProjectsLimit={0}
|
||||
isFormbricksCloud={IS_FORMBRICKS_CLOUD}
|
||||
isLicenseActive={false}
|
||||
isOwnerOrManager={false}
|
||||
isAccessControlAllowed={false}
|
||||
isMember={isMember}
|
||||
environments={[]}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex h-full flex-col items-center justify-center space-y-12">
|
||||
<Header
|
||||
title={t("organizations.landing.no_workspaces_warning_title")}
|
||||
subtitle={t("organizations.landing.no_workspaces_warning_subtitle")}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
import { canUserAccessOrganization } from "@/lib/organization/auth";
|
||||
import { getOrganization } from "@/lib/organization/service";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { act, cleanup, render, screen } from "@testing-library/react";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import React from "react";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { TOrganization } from "@formbricks/types/organizations";
|
||||
import { TUser } from "@formbricks/types/user";
|
||||
import ProjectOnboardingLayout from "./layout";
|
||||
|
||||
// Mock all the modules and functions that this layout uses:
|
||||
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
POSTHOG_API_KEY: "mock-posthog-api-key",
|
||||
POSTHOG_HOST: "mock-posthog-host",
|
||||
IS_POSTHOG_CONFIGURED: true,
|
||||
ENCRYPTION_KEY: "mock-encryption-key",
|
||||
ENTERPRISE_LICENSE_KEY: "mock-enterprise-license-key",
|
||||
GITHUB_ID: "mock-github-id",
|
||||
GITHUB_SECRET: "test-githubID",
|
||||
GOOGLE_CLIENT_ID: "test-google-client-id",
|
||||
GOOGLE_CLIENT_SECRET: "test-google-client-secret",
|
||||
AZUREAD_CLIENT_ID: "test-azuread-client-id",
|
||||
AZUREAD_CLIENT_SECRET: "test-azure",
|
||||
AZUREAD_TENANT_ID: "test-azuread-tenant-id",
|
||||
OIDC_DISPLAY_NAME: "test-oidc-display-name",
|
||||
OIDC_CLIENT_ID: "test-oidc-client-id",
|
||||
OIDC_ISSUER: "test-oidc-issuer",
|
||||
OIDC_CLIENT_SECRET: "test-oidc-client-secret",
|
||||
OIDC_SIGNING_ALGORITHM: "test-oidc-signing-algorithm",
|
||||
WEBAPP_URL: "test-webapp-url",
|
||||
IS_PRODUCTION: false,
|
||||
SESSION_MAX_AGE: 1000,
|
||||
REDIS_URL: "test-redis-url",
|
||||
AUDIT_LOG_ENABLED: true,
|
||||
}));
|
||||
|
||||
vi.mock("next-auth", () => ({
|
||||
getServerSession: vi.fn(),
|
||||
}));
|
||||
vi.mock("next/navigation", () => ({
|
||||
redirect: vi.fn(),
|
||||
}));
|
||||
vi.mock("@/lib/organization/auth", () => ({
|
||||
canUserAccessOrganization: vi.fn(),
|
||||
}));
|
||||
vi.mock("@/lib/organization/service", () => ({
|
||||
getOrganization: vi.fn(),
|
||||
}));
|
||||
vi.mock("@/lib/user/service", () => ({
|
||||
getUser: vi.fn(),
|
||||
}));
|
||||
vi.mock("@/tolgee/server", () => ({
|
||||
getTranslate: vi.fn(() => {
|
||||
// Return a mock translator that just returns the key
|
||||
return (key: string) => key;
|
||||
}),
|
||||
}));
|
||||
|
||||
// mock the child components
|
||||
vi.mock("@/app/(app)/environments/[environmentId]/components/PosthogIdentify", () => ({
|
||||
PosthogIdentify: () => <div data-testid="posthog-identify" />,
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/toaster-client", () => ({
|
||||
ToasterClient: () => <div data-testid="toaster-client" />,
|
||||
}));
|
||||
|
||||
describe("ProjectOnboardingLayout", () => {
|
||||
beforeEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
test("redirects to /auth/login if there is no session", async () => {
|
||||
// Mock no session
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce(null);
|
||||
|
||||
const layoutElement = await ProjectOnboardingLayout({
|
||||
params: { organizationId: "org-123" },
|
||||
children: <div data-testid="child-content">Hello!</div>,
|
||||
});
|
||||
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
// Layout returns nothing after redirect
|
||||
expect(layoutElement).toBeUndefined();
|
||||
});
|
||||
|
||||
test("throws an error if user does not exist", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({
|
||||
user: { id: "user-123" },
|
||||
});
|
||||
vi.mocked(getUser).mockResolvedValueOnce(null); // no user in DB
|
||||
|
||||
await expect(
|
||||
ProjectOnboardingLayout({
|
||||
params: { organizationId: "org-123" },
|
||||
children: <div data-testid="child-content">Hello!</div>,
|
||||
})
|
||||
).rejects.toThrow("common.user_not_found");
|
||||
});
|
||||
|
||||
test("throws AuthorizationError if user cannot access organization", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({ user: { id: "user-123" } });
|
||||
vi.mocked(getUser).mockResolvedValueOnce({ id: "user-123" } as TUser);
|
||||
vi.mocked(canUserAccessOrganization).mockResolvedValueOnce(false);
|
||||
|
||||
await expect(
|
||||
ProjectOnboardingLayout({
|
||||
params: { organizationId: "org-123" },
|
||||
children: <div data-testid="child-content">Child</div>,
|
||||
})
|
||||
).rejects.toThrow("common.not_authorized");
|
||||
});
|
||||
|
||||
test("throws an error if organization does not exist", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({ user: { id: "user-123" } });
|
||||
vi.mocked(getUser).mockResolvedValueOnce({ id: "user-123" } as TUser);
|
||||
vi.mocked(canUserAccessOrganization).mockResolvedValueOnce(true);
|
||||
vi.mocked(getOrganization).mockResolvedValueOnce(null);
|
||||
|
||||
await expect(
|
||||
ProjectOnboardingLayout({
|
||||
params: { organizationId: "org-123" },
|
||||
children: <div data-testid="child-content">Hello!</div>,
|
||||
})
|
||||
).rejects.toThrow("common.organization_not_found");
|
||||
});
|
||||
|
||||
test("renders child content plus PosthogIdentify & ToasterClient if everything is valid", async () => {
|
||||
// Provide valid data
|
||||
vi.mocked(getServerSession).mockResolvedValueOnce({ user: { id: "user-123" } });
|
||||
vi.mocked(getUser).mockResolvedValueOnce({ id: "user-123", name: "Test User" } as TUser);
|
||||
vi.mocked(canUserAccessOrganization).mockResolvedValueOnce(true);
|
||||
vi.mocked(getOrganization).mockResolvedValueOnce({
|
||||
id: "org-123",
|
||||
name: "Test Org",
|
||||
billing: {
|
||||
plan: "enterprise",
|
||||
},
|
||||
} as TOrganization);
|
||||
|
||||
let layoutElement: React.ReactNode;
|
||||
// Because it's an async server component, do it in an act
|
||||
await act(async () => {
|
||||
layoutElement = await ProjectOnboardingLayout({
|
||||
params: { organizationId: "org-123" },
|
||||
children: <div data-testid="child-content">Hello!</div>,
|
||||
});
|
||||
render(layoutElement);
|
||||
});
|
||||
|
||||
expect(screen.getByTestId("child-content")).toHaveTextContent("Hello!");
|
||||
expect(screen.getByTestId("posthog-identify")).toBeInTheDocument();
|
||||
expect(screen.getByTestId("toaster-client")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,14 +1,12 @@
|
||||
import { PosthogIdentify } from "@/app/(app)/environments/[environmentId]/components/PosthogIdentify";
|
||||
import { IS_POSTHOG_CONFIGURED } from "@/lib/constants";
|
||||
import { canUserAccessOrganization } from "@/lib/organization/auth";
|
||||
import { getOrganization } from "@/lib/organization/service";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { ToasterClient } from "@/modules/ui/components/toaster-client";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { AuthorizationError } from "@formbricks/types/errors";
|
||||
import { canUserAccessOrganization } from "@/lib/organization/auth";
|
||||
import { getOrganization } from "@/lib/organization/service";
|
||||
import { getUser } from "@/lib/user/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { ToasterClient } from "@/modules/ui/components/toaster-client";
|
||||
|
||||
const ProjectOnboardingLayout = async (props) => {
|
||||
const params = await props.params;
|
||||
@@ -40,14 +38,6 @@ const ProjectOnboardingLayout = async (props) => {
|
||||
|
||||
return (
|
||||
<div className="flex-1 bg-slate-50">
|
||||
<PosthogIdentify
|
||||
session={session}
|
||||
user={user}
|
||||
organizationId={organization.id}
|
||||
organizationName={organization.name}
|
||||
organizationBilling={organization.billing}
|
||||
isPosthogEnabled={IS_POSTHOG_CONFIGURED}
|
||||
/>
|
||||
<ToasterClient />
|
||||
{children}
|
||||
</div>
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import { redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import Page from "./page";
|
||||
|
||||
const mockTranslate = vi.fn((key) => key);
|
||||
|
||||
// Module mocks must be declared before importing the component
|
||||
vi.mock("@/lib/project/service", () => ({ getUserProjects: vi.fn() }));
|
||||
vi.mock("@/modules/organization/lib/utils", () => ({ getOrganizationAuth: vi.fn() }));
|
||||
vi.mock("@/tolgee/server", () => ({ getTranslate: vi.fn() }));
|
||||
vi.mock("next/navigation", () => ({ redirect: vi.fn(() => "REDIRECT_STUB") }));
|
||||
vi.mock("@/modules/ui/components/header", () => ({
|
||||
Header: ({ title, subtitle }: { title: string; subtitle: string }) => (
|
||||
<div>
|
||||
<h1>{title}</h1>
|
||||
<p>{subtitle}</p>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
vi.mock("@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer", () => ({
|
||||
OnboardingOptionsContainer: ({ options }: { options: any[] }) => (
|
||||
<div data-testid="options">{options.map((o) => o.title).join(",")}</div>
|
||||
),
|
||||
}));
|
||||
vi.mock("next/link", () => ({
|
||||
default: ({ href, children }: { href: string; children: React.ReactNode }) => <a href={href}>{children}</a>,
|
||||
}));
|
||||
|
||||
describe("Page component", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const params = Promise.resolve({ organizationId: "org1" });
|
||||
|
||||
test("redirects to login if no user session", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({ session: {} } as any);
|
||||
|
||||
const result = await Page({ params });
|
||||
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
expect(result).toBe("REDIRECT_STUB");
|
||||
});
|
||||
|
||||
test("renders header, options, and close button when projects exist", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({ session: { user: { id: "user1" } } } as any);
|
||||
vi.mocked(getTranslate).mockResolvedValue(mockTranslate);
|
||||
vi.mocked(getUserProjects).mockResolvedValue([{ id: 1 }] as any);
|
||||
|
||||
const element = await Page({ params });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
// Header title and subtitle
|
||||
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent(
|
||||
"organizations.projects.new.channel.channel_select_title"
|
||||
);
|
||||
expect(
|
||||
screen.getByText("organizations.projects.new.channel.channel_select_subtitle")
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Options container with correct titles
|
||||
expect(screen.getByTestId("options")).toHaveTextContent(
|
||||
"organizations.projects.new.channel.link_and_email_surveys," +
|
||||
"organizations.projects.new.channel.in_product_surveys"
|
||||
);
|
||||
|
||||
// Close button link rendered when projects >=1
|
||||
const closeLink = screen.getByRole("link");
|
||||
expect(closeLink).toHaveAttribute("href", "/");
|
||||
});
|
||||
|
||||
test("does not render close button when no projects", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValue({ session: { user: { id: "user1" } } } as any);
|
||||
vi.mocked(getTranslate).mockResolvedValue(mockTranslate);
|
||||
vi.mocked(getUserProjects).mockResolvedValue([]);
|
||||
|
||||
const element = await Page({ params });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
expect(screen.queryByRole("link")).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,223 +0,0 @@
|
||||
import { getMembershipByUserIdOrganizationId } from "@/lib/membership/service";
|
||||
import { getOrganization } from "@/lib/organization/service";
|
||||
import { getOrganizationProjectsCount } from "@/lib/project/service";
|
||||
import { getOrganizationProjectsLimit } from "@/modules/ee/license-check/lib/utils";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup } from "@testing-library/react";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { TMembership } from "@formbricks/types/memberships";
|
||||
import { TOrganization } from "@formbricks/types/organizations";
|
||||
import OnboardingLayout from "./layout";
|
||||
|
||||
// Mock environment variables
|
||||
vi.mock("@/lib/constants", () => ({
|
||||
IS_FORMBRICKS_CLOUD: false,
|
||||
POSTHOG_API_KEY: "mock-posthog-api-key",
|
||||
POSTHOG_HOST: "mock-posthog-host",
|
||||
IS_POSTHOG_CONFIGURED: true,
|
||||
ENCRYPTION_KEY: "mock-encryption-key",
|
||||
ENTERPRISE_LICENSE_KEY: "mock-enterprise-license-key",
|
||||
GITHUB_ID: "mock-github-id",
|
||||
GITHUB_SECRET: "test-githubID",
|
||||
GOOGLE_CLIENT_ID: "test-google-client-id",
|
||||
GOOGLE_CLIENT_SECRET: "test-google-client-secret",
|
||||
AZUREAD_CLIENT_ID: "test-azuread-client-id",
|
||||
AZUREAD_CLIENT_SECRET: "test-azure",
|
||||
AZUREAD_TENANT_ID: "test-azuread-tenant-id",
|
||||
OIDC_DISPLAY_NAME: "test-oidc-display-name",
|
||||
OIDC_CLIENT_ID: "test-oidc-client-id",
|
||||
OIDC_ISSUER: "test-oidc-issuer",
|
||||
OIDC_CLIENT_SECRET: "test-oidc-client-secret",
|
||||
OIDC_SIGNING_ALGORITHM: "test-oidc-signing-algorithm",
|
||||
WEBAPP_URL: "test-webapp-url",
|
||||
IS_PRODUCTION: false,
|
||||
SESSION_MAX_AGE: 1000,
|
||||
REDIS_URL: "test-redis-url",
|
||||
AUDIT_LOG_ENABLED: true,
|
||||
}));
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock("next-auth", () => ({
|
||||
getServerSession: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/membership/service", () => ({
|
||||
getMembershipByUserIdOrganizationId: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/organization/service", () => ({
|
||||
getOrganization: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/project/service", () => ({
|
||||
getOrganizationProjectsCount: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/modules/ee/license-check/lib/utils", () => ({
|
||||
getOrganizationProjectsLimit: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/tolgee/server", () => ({
|
||||
getTranslate: async () => (key: string) => key,
|
||||
}));
|
||||
|
||||
describe("OnboardingLayout", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test("redirects to login if no session", async () => {
|
||||
vi.mocked(getServerSession).mockResolvedValue(null);
|
||||
|
||||
const props = {
|
||||
params: { organizationId: "test-org-id" },
|
||||
children: <div>Test Child</div>,
|
||||
};
|
||||
|
||||
await OnboardingLayout(props);
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
});
|
||||
|
||||
test("returns not found if user is member or billing", async () => {
|
||||
const mockSession = {
|
||||
user: { id: "test-user-id" },
|
||||
};
|
||||
vi.mocked(getServerSession).mockResolvedValue(mockSession as any);
|
||||
|
||||
const mockMembership: TMembership = {
|
||||
organizationId: "test-org-id",
|
||||
userId: "test-user-id",
|
||||
accepted: true,
|
||||
role: "member",
|
||||
};
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue(mockMembership);
|
||||
|
||||
const props = {
|
||||
params: { organizationId: "test-org-id" },
|
||||
children: <div>Test Child</div>,
|
||||
};
|
||||
|
||||
await OnboardingLayout(props);
|
||||
expect(notFound).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("throws error if organization is not found", async () => {
|
||||
const mockSession = {
|
||||
user: { id: "test-user-id" },
|
||||
};
|
||||
vi.mocked(getServerSession).mockResolvedValue(mockSession as any);
|
||||
|
||||
const mockMembership: TMembership = {
|
||||
organizationId: "test-org-id",
|
||||
userId: "test-user-id",
|
||||
accepted: true,
|
||||
role: "owner",
|
||||
};
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue(mockMembership);
|
||||
vi.mocked(getOrganization).mockResolvedValue(null);
|
||||
|
||||
const props = {
|
||||
params: { organizationId: "test-org-id" },
|
||||
children: <div>Test Child</div>,
|
||||
};
|
||||
|
||||
await expect(OnboardingLayout(props)).rejects.toThrow("common.organization_not_found");
|
||||
});
|
||||
|
||||
test("redirects to home if project limit is reached", async () => {
|
||||
const mockSession = {
|
||||
user: { id: "test-user-id" },
|
||||
};
|
||||
vi.mocked(getServerSession).mockResolvedValue(mockSession as any);
|
||||
|
||||
const mockMembership: TMembership = {
|
||||
organizationId: "test-org-id",
|
||||
userId: "test-user-id",
|
||||
accepted: true,
|
||||
role: "owner",
|
||||
};
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue(mockMembership);
|
||||
|
||||
const mockOrganization: TOrganization = {
|
||||
id: "test-org-id",
|
||||
name: "Test Org",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
isAIEnabled: false,
|
||||
billing: {
|
||||
stripeCustomerId: null,
|
||||
plan: "free",
|
||||
period: "monthly",
|
||||
limits: {
|
||||
projects: 3,
|
||||
monthly: {
|
||||
responses: 1500,
|
||||
miu: 2000,
|
||||
},
|
||||
},
|
||||
periodStart: new Date(),
|
||||
},
|
||||
};
|
||||
vi.mocked(getOrganization).mockResolvedValue(mockOrganization);
|
||||
vi.mocked(getOrganizationProjectsLimit).mockResolvedValue(3);
|
||||
vi.mocked(getOrganizationProjectsCount).mockResolvedValue(3);
|
||||
|
||||
const props = {
|
||||
params: { organizationId: "test-org-id" },
|
||||
children: <div>Test Child</div>,
|
||||
};
|
||||
|
||||
await OnboardingLayout(props);
|
||||
expect(redirect).toHaveBeenCalledWith("/");
|
||||
});
|
||||
|
||||
test("renders children when all conditions are met", async () => {
|
||||
const mockSession = {
|
||||
user: { id: "test-user-id" },
|
||||
};
|
||||
vi.mocked(getServerSession).mockResolvedValue(mockSession as any);
|
||||
|
||||
const mockMembership: TMembership = {
|
||||
organizationId: "test-org-id",
|
||||
userId: "test-user-id",
|
||||
accepted: true,
|
||||
role: "owner",
|
||||
};
|
||||
vi.mocked(getMembershipByUserIdOrganizationId).mockResolvedValue(mockMembership);
|
||||
|
||||
const mockOrganization: TOrganization = {
|
||||
id: "test-org-id",
|
||||
name: "Test Org",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
isAIEnabled: false,
|
||||
billing: {
|
||||
stripeCustomerId: null,
|
||||
plan: "free",
|
||||
period: "monthly",
|
||||
limits: {
|
||||
projects: 3,
|
||||
monthly: {
|
||||
responses: 1500,
|
||||
miu: 2000,
|
||||
},
|
||||
},
|
||||
periodStart: new Date(),
|
||||
},
|
||||
};
|
||||
vi.mocked(getOrganization).mockResolvedValue(mockOrganization);
|
||||
vi.mocked(getOrganizationProjectsLimit).mockResolvedValue(3);
|
||||
vi.mocked(getOrganizationProjectsCount).mockResolvedValue(2);
|
||||
|
||||
const props = {
|
||||
params: { organizationId: "test-org-id" },
|
||||
children: <div>Test Child</div>,
|
||||
};
|
||||
|
||||
const result = await OnboardingLayout(props);
|
||||
expect(result).toEqual(<>{props.children}</>);
|
||||
});
|
||||
});
|
||||
@@ -1,72 +0,0 @@
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import { redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import Page from "./page";
|
||||
|
||||
const mockTranslate = vi.fn((key) => key);
|
||||
|
||||
vi.mock("@/modules/organization/lib/utils", () => ({ getOrganizationAuth: vi.fn() }));
|
||||
vi.mock("@/lib/project/service", () => ({ getUserProjects: vi.fn() }));
|
||||
vi.mock("@/tolgee/server", () => ({ getTranslate: vi.fn() }));
|
||||
vi.mock("next/navigation", () => ({ redirect: vi.fn() }));
|
||||
vi.mock("next/link", () => ({
|
||||
__esModule: true,
|
||||
default: ({ href, children }: any) => <a href={href}>{children}</a>,
|
||||
}));
|
||||
vi.mock("@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer", () => ({
|
||||
OnboardingOptionsContainer: ({ options }: any) => (
|
||||
<div data-testid="options">{options.map((o: any) => o.title).join(",")}</div>
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/header", () => ({ Header: ({ title }: any) => <h1>{title}</h1> }));
|
||||
vi.mock("@/modules/ui/components/button", () => ({
|
||||
Button: ({ children, ...props }: any) => <button {...props}>{children}</button>,
|
||||
}));
|
||||
|
||||
describe("Mode Page", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const params = Promise.resolve({ organizationId: "org1" });
|
||||
|
||||
test("redirects to login if no session user", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({ session: {} } as any);
|
||||
await Page({ params });
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
});
|
||||
|
||||
test("renders header and options without close link when no projects", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({ session: { user: { id: "u1" } } } as any);
|
||||
vi.mocked(getTranslate).mockResolvedValue(mockTranslate);
|
||||
vi.mocked(getUserProjects).mockResolvedValueOnce([] as any);
|
||||
|
||||
const element = await Page({ params });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent(
|
||||
"organizations.projects.new.mode.what_are_you_here_for"
|
||||
);
|
||||
expect(screen.getByTestId("options")).toHaveTextContent(
|
||||
"organizations.projects.new.mode.formbricks_surveys," + "organizations.projects.new.mode.formbricks_cx"
|
||||
);
|
||||
expect(screen.queryByRole("link")).toBeNull();
|
||||
});
|
||||
|
||||
test("renders close link when projects exist", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({ session: { user: { id: "u1" } } } as any);
|
||||
vi.mocked(getTranslate).mockResolvedValue(mockTranslate);
|
||||
vi.mocked(getUserProjects).mockResolvedValueOnce([{ id: "p1" } as any]);
|
||||
|
||||
const element = await Page({ params });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
const link = screen.getByRole("link");
|
||||
expect(link).toHaveAttribute("href", "/");
|
||||
});
|
||||
});
|
||||
@@ -1,124 +0,0 @@
|
||||
import { createProjectAction } from "@/app/(app)/environments/[environmentId]/actions";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { toast } from "react-hot-toast";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { ProjectSettings } from "./ProjectSettings";
|
||||
|
||||
// Mocks before imports
|
||||
const pushMock = vi.fn();
|
||||
vi.mock("next/navigation", () => ({ useRouter: () => ({ push: pushMock }) }));
|
||||
vi.mock("@tolgee/react", () => ({ useTranslate: () => ({ t: (key: string) => key }) }));
|
||||
vi.mock("react-hot-toast", () => ({ toast: { error: vi.fn() } }));
|
||||
vi.mock("@/app/(app)/environments/[environmentId]/actions", () => ({ createProjectAction: vi.fn() }));
|
||||
vi.mock("@/lib/utils/helper", () => ({ getFormattedErrorMessage: () => "formatted-error" }));
|
||||
vi.mock("@/modules/ui/components/color-picker", () => ({
|
||||
ColorPicker: ({ color, onChange }: any) => (
|
||||
<button data-testid="color-picker" onClick={() => onChange("#000")}>
|
||||
{color}
|
||||
</button>
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/input", () => ({
|
||||
Input: ({ value, onChange, placeholder }: any) => (
|
||||
<input placeholder={placeholder} value={value} onChange={(e) => onChange((e.target as any).value)} />
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/multi-select", () => ({
|
||||
MultiSelect: ({ value, options, onChange }: any) => (
|
||||
<select
|
||||
data-testid="multi-select"
|
||||
multiple
|
||||
value={value}
|
||||
onChange={(e) => onChange(Array.from((e.target as any).selectedOptions).map((o: any) => o.value))}>
|
||||
{options.map((o: any) => (
|
||||
<option key={o.value} value={o.value}>
|
||||
{o.label}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/survey", () => ({
|
||||
SurveyInline: () => <div data-testid="survey-inline" />,
|
||||
}));
|
||||
vi.mock("@/lib/templates", () => ({ previewSurvey: () => ({}) }));
|
||||
vi.mock("@/modules/ee/teams/team-list/components/create-team-modal", () => ({
|
||||
CreateTeamModal: ({ open }: any) => <div data-testid={open ? "team-modal-open" : "team-modal-closed"} />,
|
||||
}));
|
||||
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
localStorage.clear();
|
||||
});
|
||||
|
||||
describe("ProjectSettings component", () => {
|
||||
const baseProps = {
|
||||
organizationId: "org1",
|
||||
projectMode: "cx",
|
||||
industry: "ind",
|
||||
defaultBrandColor: "#fff",
|
||||
organizationTeams: [],
|
||||
canDoRoleManagement: false,
|
||||
userProjectsCount: 0,
|
||||
} as any;
|
||||
|
||||
const fillAndSubmit = async () => {
|
||||
const nameInput = screen.getByPlaceholderText("e.g. Formbricks");
|
||||
await userEvent.clear(nameInput);
|
||||
await userEvent.type(nameInput, "TestProject");
|
||||
const nextButton = screen.getByRole("button", { name: "common.next" });
|
||||
await userEvent.click(nextButton);
|
||||
};
|
||||
|
||||
test("successful createProject for link channel navigates to surveys and clears localStorage", async () => {
|
||||
(createProjectAction as any).mockResolvedValue({
|
||||
data: { environments: [{ id: "env123", type: "production" }] },
|
||||
});
|
||||
render(<ProjectSettings {...baseProps} channel="link" projectMode="cx" />);
|
||||
await fillAndSubmit();
|
||||
expect(createProjectAction).toHaveBeenCalledWith({
|
||||
organizationId: "org1",
|
||||
data: expect.objectContaining({ teamIds: [] }),
|
||||
});
|
||||
expect(pushMock).toHaveBeenCalledWith("/environments/env123/surveys");
|
||||
expect(localStorage.getItem("FORMBRICKS_SURVEYS_FILTERS_KEY_LS")).toBeNull();
|
||||
});
|
||||
|
||||
test("successful createProject for app channel navigates to connect", async () => {
|
||||
(createProjectAction as any).mockResolvedValue({
|
||||
data: { environments: [{ id: "env456", type: "production" }] },
|
||||
});
|
||||
render(<ProjectSettings {...baseProps} channel="app" projectMode="cx" />);
|
||||
await fillAndSubmit();
|
||||
expect(pushMock).toHaveBeenCalledWith("/environments/env456/connect");
|
||||
});
|
||||
|
||||
test("successful createProject for cx mode navigates to xm-templates when channel is neither link nor app", async () => {
|
||||
(createProjectAction as any).mockResolvedValue({
|
||||
data: { environments: [{ id: "env789", type: "production" }] },
|
||||
});
|
||||
render(<ProjectSettings {...baseProps} channel="unknown" projectMode="cx" />);
|
||||
await fillAndSubmit();
|
||||
expect(pushMock).toHaveBeenCalledWith("/environments/env789/xm-templates");
|
||||
});
|
||||
|
||||
test("shows error toast on createProject error response", async () => {
|
||||
(createProjectAction as any).mockResolvedValue({ error: "err" });
|
||||
render(<ProjectSettings {...baseProps} channel="link" projectMode="cx" />);
|
||||
await fillAndSubmit();
|
||||
expect(toast.error).toHaveBeenCalledWith("formatted-error");
|
||||
});
|
||||
|
||||
test("shows error toast on exception", async () => {
|
||||
(createProjectAction as any).mockImplementation(() => {
|
||||
throw new Error("fail");
|
||||
});
|
||||
render(<ProjectSettings {...baseProps} channel="link" projectMode="cx" />);
|
||||
await fillAndSubmit();
|
||||
expect(toast.error).toHaveBeenCalledWith("organizations.projects.new.settings.project_creation_failed");
|
||||
});
|
||||
});
|
||||
@@ -1,106 +0,0 @@
|
||||
import { getTeamsByOrganizationId } from "@/app/(app)/(onboarding)/lib/onboarding";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getRoleManagementPermission } from "@/modules/ee/license-check/lib/utils";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import { redirect } from "next/navigation";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import Page from "./page";
|
||||
|
||||
vi.mock("@/lib/constants", () => ({ DEFAULT_BRAND_COLOR: "#fff" }));
|
||||
// Mocks before component import
|
||||
vi.mock("@/app/(app)/(onboarding)/lib/onboarding", () => ({ getTeamsByOrganizationId: vi.fn() }));
|
||||
vi.mock("@/lib/project/service", () => ({ getUserProjects: vi.fn() }));
|
||||
vi.mock("@/modules/ee/license-check/lib/utils", () => ({ getRoleManagementPermission: vi.fn() }));
|
||||
vi.mock("@/modules/organization/lib/utils", () => ({ getOrganizationAuth: vi.fn() }));
|
||||
vi.mock("@/tolgee/server", () => ({ getTranslate: () => Promise.resolve((key: string) => key) }));
|
||||
vi.mock("next/navigation", () => ({ redirect: vi.fn() }));
|
||||
vi.mock("next/link", () => ({
|
||||
__esModule: true,
|
||||
default: ({ href, children }: any) => <a href={href}>{children}</a>,
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/header", () => ({
|
||||
Header: ({ title, subtitle }: any) => (
|
||||
<div>
|
||||
<h1>{title}</h1>
|
||||
<p>{subtitle}</p>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
vi.mock("@/modules/ui/components/button", () => ({
|
||||
Button: ({ children, ...props }: any) => <button {...props}>{children}</button>,
|
||||
}));
|
||||
vi.mock(
|
||||
"@/app/(app)/(onboarding)/organizations/[organizationId]/projects/new/settings/components/ProjectSettings",
|
||||
() => ({
|
||||
ProjectSettings: (props: any) => <div data-testid="project-settings" data-mode={props.projectMode} />,
|
||||
})
|
||||
);
|
||||
|
||||
// Cleanup after each test
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("ProjectSettingsPage", () => {
|
||||
const params = Promise.resolve({ organizationId: "org1" });
|
||||
const searchParams = Promise.resolve({ channel: "link", industry: "other", mode: "cx" } as any);
|
||||
|
||||
test("redirects to login when no session user", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({ session: {} } as any);
|
||||
await Page({ params, searchParams });
|
||||
expect(redirect).toHaveBeenCalledWith("/auth/login");
|
||||
});
|
||||
|
||||
test("throws when teams not found", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({
|
||||
session: { user: { id: "u1" } },
|
||||
organization: { billing: { plan: "basic" } },
|
||||
} as any);
|
||||
vi.mocked(getUserProjects).mockResolvedValueOnce([] as any);
|
||||
vi.mocked(getTeamsByOrganizationId).mockResolvedValueOnce(null as any);
|
||||
vi.mocked(getRoleManagementPermission).mockResolvedValueOnce(false as any);
|
||||
|
||||
await expect(Page({ params, searchParams })).rejects.toThrow("common.organization_teams_not_found");
|
||||
});
|
||||
|
||||
test("renders header, settings and close link when projects exist", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({
|
||||
session: { user: { id: "u1" } },
|
||||
organization: { billing: { plan: "basic" } },
|
||||
} as any);
|
||||
vi.mocked(getUserProjects).mockResolvedValueOnce([{ id: "p1" }] as any);
|
||||
vi.mocked(getTeamsByOrganizationId).mockResolvedValueOnce([{ id: "t1", name: "Team1" }] as any);
|
||||
vi.mocked(getRoleManagementPermission).mockResolvedValueOnce(true as any);
|
||||
|
||||
const element = await Page({ params, searchParams });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
// Header
|
||||
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent(
|
||||
"organizations.projects.new.settings.project_settings_title"
|
||||
);
|
||||
// ProjectSettings stub receives mode prop
|
||||
expect(screen.getByTestId("project-settings")).toHaveAttribute("data-mode", "cx");
|
||||
// Close link for existing projects
|
||||
const link = screen.getByRole("link");
|
||||
expect(link).toHaveAttribute("href", "/");
|
||||
});
|
||||
|
||||
test("renders without close link when no projects", async () => {
|
||||
vi.mocked(getOrganizationAuth).mockResolvedValueOnce({
|
||||
session: { user: { id: "u1" } },
|
||||
organization: { billing: { plan: "basic" } },
|
||||
} as any);
|
||||
vi.mocked(getUserProjects).mockResolvedValueOnce([] as any);
|
||||
vi.mocked(getTeamsByOrganizationId).mockResolvedValueOnce([{ id: "t1", name: "Team1" }] as any);
|
||||
vi.mocked(getRoleManagementPermission).mockResolvedValueOnce(true as any);
|
||||
|
||||
const element = await Page({ params, searchParams });
|
||||
render(element as React.ReactElement);
|
||||
|
||||
expect(screen.queryByRole("link")).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,12 @@
|
||||
import { OnboardingOptionsContainer } from "@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { PictureInPicture2Icon, SendIcon, XIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { redirect } from "next/navigation";
|
||||
import { OnboardingOptionsContainer } from "@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
|
||||
interface ChannelPageProps {
|
||||
params: Promise<{
|
||||
@@ -26,16 +26,16 @@ const Page = async (props: ChannelPageProps) => {
|
||||
const t = await getTranslate();
|
||||
const channelOptions = [
|
||||
{
|
||||
title: t("organizations.projects.new.channel.link_and_email_surveys"),
|
||||
description: t("organizations.projects.new.channel.link_and_email_surveys_description"),
|
||||
title: t("organizations.workspaces.new.channel.link_and_email_surveys"),
|
||||
description: t("organizations.workspaces.new.channel.link_and_email_surveys_description"),
|
||||
icon: SendIcon,
|
||||
href: `/organizations/${params.organizationId}/projects/new/settings?channel=link`,
|
||||
href: `/organizations/${params.organizationId}/workspaces/new/settings?channel=link`,
|
||||
},
|
||||
{
|
||||
title: t("organizations.projects.new.channel.in_product_surveys"),
|
||||
description: t("organizations.projects.new.channel.in_product_surveys_description"),
|
||||
title: t("organizations.workspaces.new.channel.in_product_surveys"),
|
||||
description: t("organizations.workspaces.new.channel.in_product_surveys_description"),
|
||||
icon: PictureInPicture2Icon,
|
||||
href: `/organizations/${params.organizationId}/projects/new/settings?channel=app`,
|
||||
href: `/organizations/${params.organizationId}/workspaces/new/settings?channel=app`,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -44,13 +44,13 @@ const Page = async (props: ChannelPageProps) => {
|
||||
return (
|
||||
<div className="flex min-h-full min-w-full flex-col items-center justify-center space-y-12">
|
||||
<Header
|
||||
title={t("organizations.projects.new.channel.channel_select_title")}
|
||||
subtitle={t("organizations.projects.new.channel.channel_select_subtitle")}
|
||||
title={t("organizations.workspaces.new.channel.channel_select_title")}
|
||||
subtitle={t("organizations.workspaces.new.channel.channel_select_subtitle")}
|
||||
/>
|
||||
<OnboardingOptionsContainer options={channelOptions} />
|
||||
{projects.length >= 1 && (
|
||||
<Button
|
||||
className="absolute right-5 top-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
className="absolute top-5 right-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
variant="ghost"
|
||||
asChild>
|
||||
<Link href={"/"}>
|
||||
@@ -1,12 +1,12 @@
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
import { getMembershipByUserIdOrganizationId } from "@/lib/membership/service";
|
||||
import { getAccessFlags } from "@/lib/membership/utils";
|
||||
import { getOrganization } from "@/lib/organization/service";
|
||||
import { getOrganizationProjectsCount } from "@/lib/project/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { authOptions } from "@/modules/auth/lib/authOptions";
|
||||
import { getOrganizationProjectsLimit } from "@/modules/ee/license-check/lib/utils";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { notFound, redirect } from "next/navigation";
|
||||
|
||||
const OnboardingLayout = async (props) => {
|
||||
const params = await props.params;
|
||||
@@ -15,7 +15,7 @@ const OnboardingLayout = async (props) => {
|
||||
const t = await getTranslate();
|
||||
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session || !session.user) {
|
||||
if (!session?.user) {
|
||||
return redirect(`/auth/login`);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { OnboardingOptionsContainer } from "@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { HeartIcon, ListTodoIcon, XIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { redirect } from "next/navigation";
|
||||
import { OnboardingOptionsContainer } from "@/app/(app)/(onboarding)/organizations/components/OnboardingOptionsContainer";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
|
||||
interface ModePageProps {
|
||||
params: Promise<{
|
||||
@@ -26,16 +26,16 @@ const Page = async (props: ModePageProps) => {
|
||||
const t = await getTranslate();
|
||||
const channelOptions = [
|
||||
{
|
||||
title: t("organizations.projects.new.mode.formbricks_surveys"),
|
||||
description: t("organizations.projects.new.mode.formbricks_surveys_description"),
|
||||
title: t("organizations.workspaces.new.mode.formbricks_surveys"),
|
||||
description: t("organizations.workspaces.new.mode.formbricks_surveys_description"),
|
||||
icon: ListTodoIcon,
|
||||
href: `/organizations/${params.organizationId}/projects/new/channel`,
|
||||
href: `/organizations/${params.organizationId}/workspaces/new/channel`,
|
||||
},
|
||||
{
|
||||
title: t("organizations.projects.new.mode.formbricks_cx"),
|
||||
description: t("organizations.projects.new.mode.formbricks_cx_description"),
|
||||
title: t("organizations.workspaces.new.mode.formbricks_cx"),
|
||||
description: t("organizations.workspaces.new.mode.formbricks_cx_description"),
|
||||
icon: HeartIcon,
|
||||
href: `/organizations/${params.organizationId}/projects/new/settings?mode=cx`,
|
||||
href: `/organizations/${params.organizationId}/workspaces/new/settings?mode=cx`,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -43,11 +43,11 @@ const Page = async (props: ModePageProps) => {
|
||||
|
||||
return (
|
||||
<div className="flex min-h-full min-w-full flex-col items-center justify-center space-y-12">
|
||||
<Header title={t("organizations.projects.new.mode.what_are_you_here_for")} />
|
||||
<Header title={t("organizations.workspaces.new.mode.what_are_you_here_for")} />
|
||||
<OnboardingOptionsContainer options={channelOptions} />
|
||||
{projects.length >= 1 && (
|
||||
<Button
|
||||
className="absolute right-5 top-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
className="absolute top-5 right-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
variant="ghost"
|
||||
asChild>
|
||||
<Link href={"/"}>
|
||||
@@ -1,5 +1,19 @@
|
||||
"use client";
|
||||
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import Image from "next/image";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "react-hot-toast";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import {
|
||||
TProjectConfigChannel,
|
||||
TProjectConfigIndustry,
|
||||
TProjectMode,
|
||||
TProjectUpdateInput,
|
||||
ZProjectUpdateInput,
|
||||
} from "@formbricks/types/project";
|
||||
import { createProjectAction } from "@/app/(app)/environments/[environmentId]/actions";
|
||||
import { previewSurvey } from "@/app/lib/templates";
|
||||
import { FORMBRICKS_SURVEYS_FILTERS_KEY_LS } from "@/lib/localStorage";
|
||||
@@ -20,20 +34,6 @@ import {
|
||||
import { Input } from "@/modules/ui/components/input";
|
||||
import { MultiSelect } from "@/modules/ui/components/multi-select";
|
||||
import { SurveyInline } from "@/modules/ui/components/survey";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useTranslate } from "@tolgee/react";
|
||||
import Image from "next/image";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "react-hot-toast";
|
||||
import {
|
||||
TProjectConfigChannel,
|
||||
TProjectConfigIndustry,
|
||||
TProjectMode,
|
||||
TProjectUpdateInput,
|
||||
ZProjectUpdateInput,
|
||||
} from "@formbricks/types/project";
|
||||
|
||||
interface ProjectSettingsProps {
|
||||
organizationId: string;
|
||||
@@ -42,8 +42,9 @@ interface ProjectSettingsProps {
|
||||
industry: TProjectConfigIndustry;
|
||||
defaultBrandColor: string;
|
||||
organizationTeams: TOrganizationTeam[];
|
||||
canDoRoleManagement: boolean;
|
||||
isAccessControlAllowed: boolean;
|
||||
userProjectsCount: number;
|
||||
publicDomain: string;
|
||||
}
|
||||
|
||||
export const ProjectSettings = ({
|
||||
@@ -53,13 +54,14 @@ export const ProjectSettings = ({
|
||||
industry,
|
||||
defaultBrandColor,
|
||||
organizationTeams,
|
||||
canDoRoleManagement = false,
|
||||
isAccessControlAllowed = false,
|
||||
userProjectsCount,
|
||||
publicDomain,
|
||||
}: ProjectSettingsProps) => {
|
||||
const [createTeamModalOpen, setCreateTeamModalOpen] = useState(false);
|
||||
|
||||
const router = useRouter();
|
||||
const { t } = useTranslate();
|
||||
const { t } = useTranslation();
|
||||
const addProject = async (data: TProjectUpdateInput) => {
|
||||
try {
|
||||
const createProjectResponse = await createProjectAction({
|
||||
@@ -77,7 +79,7 @@ export const ProjectSettings = ({
|
||||
(environment) => environment.type === "production"
|
||||
);
|
||||
if (productionEnvironment) {
|
||||
if (typeof window !== "undefined") {
|
||||
if (globalThis.window !== undefined) {
|
||||
// Rmove filters when creating a new project
|
||||
localStorage.removeItem(FORMBRICKS_SURVEYS_FILTERS_KEY_LS);
|
||||
}
|
||||
@@ -94,7 +96,7 @@ export const ProjectSettings = ({
|
||||
toast.error(errorMessage);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(t("organizations.projects.new.settings.project_creation_failed"));
|
||||
toast.error(t("organizations.workspaces.new.settings.workspace_creation_failed"));
|
||||
console.error(error);
|
||||
}
|
||||
};
|
||||
@@ -105,7 +107,6 @@ export const ProjectSettings = ({
|
||||
styling: { allowStyleOverwrite: true, brandColor: { light: defaultBrandColor } },
|
||||
teamIds: [],
|
||||
},
|
||||
|
||||
resolver: zodResolver(ZProjectUpdateInput),
|
||||
});
|
||||
const projectName = form.watch("name");
|
||||
@@ -129,9 +130,9 @@ export const ProjectSettings = ({
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormItem className="w-full space-y-4">
|
||||
<div>
|
||||
<FormLabel>{t("organizations.projects.new.settings.brand_color")}</FormLabel>
|
||||
<FormLabel>{t("organizations.workspaces.new.settings.brand_color")}</FormLabel>
|
||||
<FormDescription>
|
||||
{t("organizations.projects.new.settings.brand_color_description")}
|
||||
{t("organizations.workspaces.new.settings.brand_color_description")}
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
@@ -153,9 +154,9 @@ export const ProjectSettings = ({
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormItem className="w-full space-y-4">
|
||||
<div>
|
||||
<FormLabel>{t("organizations.projects.new.settings.project_name")}</FormLabel>
|
||||
<FormLabel>{t("organizations.workspaces.new.settings.workspace_name")}</FormLabel>
|
||||
<FormDescription>
|
||||
{t("organizations.projects.new.settings.project_name_description")}
|
||||
{t("organizations.workspaces.new.settings.workspace_name_description")}
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
@@ -174,7 +175,7 @@ export const ProjectSettings = ({
|
||||
)}
|
||||
/>
|
||||
|
||||
{canDoRoleManagement && userProjectsCount > 0 && (
|
||||
{isAccessControlAllowed && userProjectsCount > 0 && (
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="teamIds"
|
||||
@@ -184,7 +185,7 @@ export const ProjectSettings = ({
|
||||
<div>
|
||||
<FormLabel>{t("common.teams")}</FormLabel>
|
||||
<FormDescription>
|
||||
{t("organizations.projects.new.settings.team_description")}
|
||||
{t("organizations.workspaces.new.settings.team_description")}
|
||||
</FormDescription>
|
||||
</div>
|
||||
<Button
|
||||
@@ -192,7 +193,7 @@ export const ProjectSettings = ({
|
||||
size="sm"
|
||||
type="button"
|
||||
onClick={() => setCreateTeamModalOpen(true)}>
|
||||
{t("organizations.projects.new.settings.create_new_team")}
|
||||
{t("organizations.workspaces.new.settings.create_new_team")}
|
||||
</Button>
|
||||
</div>
|
||||
<FormControl>
|
||||
@@ -225,12 +226,13 @@ export const ProjectSettings = ({
|
||||
alt="Logo"
|
||||
width={256}
|
||||
height={56}
|
||||
className="absolute left-2 top-2 -mb-6 h-20 w-auto max-w-64 rounded-lg border object-contain p-1"
|
||||
className="absolute top-2 left-2 -mb-6 h-20 w-auto max-w-64 rounded-lg border object-contain p-1"
|
||||
/>
|
||||
)}
|
||||
<p className="text-sm text-slate-400">{t("common.preview")}</p>
|
||||
<div className="z-0 h-3/4 w-3/4">
|
||||
<SurveyInline
|
||||
appUrl={publicDomain}
|
||||
isPreviewMode={true}
|
||||
survey={previewSurvey(projectName || "my Product", t)}
|
||||
styling={{ brandColor: { light: brandColor } }}
|
||||
@@ -1,16 +1,17 @@
|
||||
import { getTeamsByOrganizationId } from "@/app/(app)/(onboarding)/lib/onboarding";
|
||||
import { ProjectSettings } from "@/app/(app)/(onboarding)/organizations/[organizationId]/projects/new/settings/components/ProjectSettings";
|
||||
import { DEFAULT_BRAND_COLOR } from "@/lib/constants";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getRoleManagementPermission } from "@/modules/ee/license-check/lib/utils";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
import { getTranslate } from "@/tolgee/server";
|
||||
import { XIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { redirect } from "next/navigation";
|
||||
import { TProjectConfigChannel, TProjectConfigIndustry, TProjectMode } from "@formbricks/types/project";
|
||||
import { getTeamsByOrganizationId } from "@/app/(app)/(onboarding)/lib/onboarding";
|
||||
import { ProjectSettings } from "@/app/(app)/(onboarding)/organizations/[organizationId]/workspaces/new/settings/components/ProjectSettings";
|
||||
import { DEFAULT_BRAND_COLOR } from "@/lib/constants";
|
||||
import { getPublicDomain } from "@/lib/getPublicUrl";
|
||||
import { getUserProjects } from "@/lib/project/service";
|
||||
import { getTranslate } from "@/lingodotdev/server";
|
||||
import { getAccessControlPermission } from "@/modules/ee/license-check/lib/utils";
|
||||
import { getOrganizationAuth } from "@/modules/organization/lib/utils";
|
||||
import { Button } from "@/modules/ui/components/button";
|
||||
import { Header } from "@/modules/ui/components/header";
|
||||
|
||||
interface ProjectSettingsPageProps {
|
||||
params: Promise<{
|
||||
@@ -41,17 +42,19 @@ const Page = async (props: ProjectSettingsPageProps) => {
|
||||
|
||||
const organizationTeams = await getTeamsByOrganizationId(params.organizationId);
|
||||
|
||||
const canDoRoleManagement = await getRoleManagementPermission(organization.billing.plan);
|
||||
const isAccessControlAllowed = await getAccessControlPermission(organization.billing.plan);
|
||||
|
||||
if (!organizationTeams) {
|
||||
throw new Error(t("common.organization_teams_not_found"));
|
||||
}
|
||||
|
||||
const publicDomain = getPublicDomain();
|
||||
|
||||
return (
|
||||
<div className="flex min-h-full min-w-full flex-col items-center justify-center space-y-12">
|
||||
<Header
|
||||
title={t("organizations.projects.new.settings.project_settings_title")}
|
||||
subtitle={t("organizations.projects.new.settings.project_settings_subtitle")}
|
||||
title={t("organizations.workspaces.new.settings.workspace_settings_title")}
|
||||
subtitle={t("organizations.workspaces.new.settings.workspace_settings_subtitle")}
|
||||
/>
|
||||
<ProjectSettings
|
||||
organizationId={params.organizationId}
|
||||
@@ -60,12 +63,13 @@ const Page = async (props: ProjectSettingsPageProps) => {
|
||||
industry={industry}
|
||||
defaultBrandColor={DEFAULT_BRAND_COLOR}
|
||||
organizationTeams={organizationTeams}
|
||||
canDoRoleManagement={canDoRoleManagement}
|
||||
isAccessControlAllowed={isAccessControlAllowed}
|
||||
userProjectsCount={projects.length}
|
||||
publicDomain={publicDomain}
|
||||
/>
|
||||
{projects.length >= 1 && (
|
||||
<Button
|
||||
className="absolute right-5 top-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
className="absolute top-5 right-5 !mt-0 text-slate-500 hover:text-slate-700"
|
||||
variant="ghost"
|
||||
asChild>
|
||||
<Link href={"/"}>
|
||||
@@ -1,106 +0,0 @@
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { cleanup, render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { Home, Settings } from "lucide-react";
|
||||
import { afterEach, describe, expect, test, vi } from "vitest";
|
||||
import { OnboardingOptionsContainer } from "./OnboardingOptionsContainer";
|
||||
|
||||
describe("OnboardingOptionsContainer", () => {
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
test("renders options with links", () => {
|
||||
const options = [
|
||||
{
|
||||
title: "Test Option",
|
||||
description: "Test Description",
|
||||
icon: Home,
|
||||
href: "/test",
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
expect(screen.getByText("Test Option")).toBeInTheDocument();
|
||||
expect(screen.getByText("Test Description")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders options with onClick handler", () => {
|
||||
const onClickMock = vi.fn();
|
||||
const options = [
|
||||
{
|
||||
title: "Click Option",
|
||||
description: "Click Description",
|
||||
icon: Home,
|
||||
onClick: onClickMock,
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
expect(screen.getByText("Click Option")).toBeInTheDocument();
|
||||
expect(screen.getByText("Click Description")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders options with iconText", () => {
|
||||
const options = [
|
||||
{
|
||||
title: "Icon Text Option",
|
||||
description: "Icon Text Description",
|
||||
icon: Home,
|
||||
iconText: "Custom Icon Text",
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
expect(screen.getByText("Custom Icon Text")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders options with loading state", () => {
|
||||
const options = [
|
||||
{
|
||||
title: "Loading Option",
|
||||
description: "Loading Description",
|
||||
icon: Home,
|
||||
isLoading: true,
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
expect(screen.getByText("Loading Option")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("renders multiple options", () => {
|
||||
const options = [
|
||||
{
|
||||
title: "First Option",
|
||||
description: "First Description",
|
||||
icon: Home,
|
||||
},
|
||||
{
|
||||
title: "Second Option",
|
||||
description: "Second Description",
|
||||
icon: Settings,
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
expect(screen.getByText("First Option")).toBeInTheDocument();
|
||||
expect(screen.getByText("Second Option")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test("calls onClick handler when clicking an option", async () => {
|
||||
const onClickMock = vi.fn();
|
||||
const options = [
|
||||
{
|
||||
title: "Click Option",
|
||||
description: "Click Description",
|
||||
icon: Home,
|
||||
onClick: onClickMock,
|
||||
},
|
||||
];
|
||||
|
||||
render(<OnboardingOptionsContainer options={options} />);
|
||||
await userEvent.click(screen.getByText("Click Option"));
|
||||
expect(onClickMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,7 @@
|
||||
import { OptionCard } from "@/modules/ui/components/option-card";
|
||||
import { LucideProps } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { ForwardRefExoticComponent, RefAttributes } from "react";
|
||||
import { OptionCard } from "@/modules/ui/components/option-card";
|
||||
|
||||
interface OnboardingOptionsContainerProps {
|
||||
options: {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user