mirror of
https://github.com/formbricks/formbricks.git
synced 2025-12-21 13:40:31 -06:00
Compare commits
1046 Commits
codeql2
...
fix/upload
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
930db6284b | ||
|
|
379aeba71a | ||
|
|
717adddeae | ||
|
|
41798266a0 | ||
|
|
a93fa8ec76 | ||
|
|
47c3df0466 | ||
|
|
935e24bd43 | ||
|
|
3879d86f63 | ||
|
|
839144d338 | ||
|
|
96031822a6 | ||
|
|
21c8b5d6e4 | ||
|
|
22d4952a40 | ||
|
|
933723f1fe | ||
|
|
dd394f1d2c | ||
|
|
0188aad97b | ||
|
|
d46644fe0d | ||
|
|
c259a61f0e | ||
|
|
feee22b5c3 | ||
|
|
a5433f6748 | ||
|
|
557f14bab8 | ||
|
|
fdba260301 | ||
|
|
764b8ec260 | ||
|
|
ac5d1e651e | ||
|
|
62ffcc8e68 | ||
|
|
326872a86b | ||
|
|
892b55662e | ||
|
|
23143c8664 | ||
|
|
4c71caf0da | ||
|
|
173821f846 | ||
|
|
f139830020 | ||
|
|
70979a3b5b | ||
|
|
061fa036be | ||
|
|
b83c0a4a5d | ||
|
|
1bc0563965 | ||
|
|
3a4e2a9f85 | ||
|
|
bd48139a4f | ||
|
|
89fe82a0d6 | ||
|
|
65dc1fa771 | ||
|
|
438990bffc | ||
|
|
7f7bc989c6 | ||
|
|
baa2b31bc9 | ||
|
|
77aecf3aad | ||
|
|
7c1110239b | ||
|
|
eeb337521b | ||
|
|
182f674879 | ||
|
|
73c0da4b75 | ||
|
|
f475b2e6d5 | ||
|
|
e5e8941016 | ||
|
|
c39c9998f0 | ||
|
|
a8c8e6f83f | ||
|
|
8a5e9f38d7 | ||
|
|
a0740d20ea | ||
|
|
71f378a494 | ||
|
|
4bececeb56 | ||
|
|
71c96f48d7 | ||
|
|
05d88a3069 | ||
|
|
b6a63edc88 | ||
|
|
a3764f0316 | ||
|
|
ec52bdf3fe | ||
|
|
2e9ad3ce07 | ||
|
|
654bd232d6 | ||
|
|
01984cf8ca | ||
|
|
3eb18bb120 | ||
|
|
59859d0e4f | ||
|
|
c60c8cb7bd | ||
|
|
9fa7aef253 | ||
|
|
a23594428a | ||
|
|
56e7106d6e | ||
|
|
318f891540 | ||
|
|
a59881f9ae | ||
|
|
7ab4a45ad6 | ||
|
|
2990e3805f | ||
|
|
29132ab029 | ||
|
|
f860d8d25d | ||
|
|
3501990a79 | ||
|
|
41d60c8a02 | ||
|
|
a6269f0fd3 | ||
|
|
9c0d0a16a7 | ||
|
|
c6241f7e7f | ||
|
|
92f1c2b75a | ||
|
|
4d53291c8a | ||
|
|
14b7a69cea | ||
|
|
a9015b008d | ||
|
|
d19d624c0c | ||
|
|
3edaab6c2b | ||
|
|
4786ab61e7 | ||
|
|
819380d21c | ||
|
|
fd3fedb6ed | ||
|
|
88b1e63771 | ||
|
|
3132fe74f1 | ||
|
|
a27a2a67c8 | ||
|
|
4a7ace5a0a | ||
|
|
43628caa3b | ||
|
|
9d84bc0c8d | ||
|
|
babc020085 | ||
|
|
95ee83ef31 | ||
|
|
d994af2dfd | ||
|
|
4b5b5bf59f | ||
|
|
62166dc4b1 | ||
|
|
ec6d88bf11 | ||
|
|
c0240d60a1 | ||
|
|
cd2884d83e | ||
|
|
f7aea2e706 | ||
|
|
e80fc2ee61 | ||
|
|
9b489b0682 | ||
|
|
2ee0efa1c2 | ||
|
|
9ffd67262c | ||
|
|
68dc63ce0b | ||
|
|
f239ee9697 | ||
|
|
282b3e070c | ||
|
|
b5f0bd8f9a | ||
|
|
3784bd6b5e | ||
|
|
41d27c2093 | ||
|
|
7400ce2e67 | ||
|
|
355782f404 | ||
|
|
de70e97940 | ||
|
|
287c45f996 | ||
|
|
3b07a6d013 | ||
|
|
0cc2606ec6 | ||
|
|
0fada94b80 | ||
|
|
a59ede20c7 | ||
|
|
84294f9df2 | ||
|
|
855e7c78ce | ||
|
|
6c506d90c7 | ||
|
|
53f6e02ca1 | ||
|
|
14de2eab42 | ||
|
|
ad1f80331a | ||
|
|
3527ac337b | ||
|
|
23c2d3dce9 | ||
|
|
da652bd860 | ||
|
|
6f88dde1a0 | ||
|
|
3b90223101 | ||
|
|
e29a67b1f6 | ||
|
|
78f5de2f35 | ||
|
|
b1a35d4a69 | ||
|
|
2166c44470 | ||
|
|
080cf741e9 | ||
|
|
8881691509 | ||
|
|
3045f4437f | ||
|
|
91ace0e821 | ||
|
|
6ef281647a | ||
|
|
0aaaaa54ee | ||
|
|
b1f78e7bf2 | ||
|
|
7086ce2ca3 | ||
|
|
8f8b549b1d | ||
|
|
28514487e0 | ||
|
|
ee20af54c3 | ||
|
|
d08ec4c9ab | ||
|
|
891c83e232 | ||
|
|
0b02b00b72 | ||
|
|
a217cdd501 | ||
|
|
ebe50a4821 | ||
|
|
cb68d9defc | ||
|
|
c42a706789 | ||
|
|
3803111b19 | ||
|
|
30fdcff737 | ||
|
|
e83cfa85a4 | ||
|
|
eee9ee8995 | ||
|
|
ed89f12af8 | ||
|
|
f043314537 | ||
|
|
2ce842dd8d | ||
|
|
43b43839c5 | ||
|
|
8b6e3fec37 | ||
|
|
31bcf98779 | ||
|
|
b35cabcbcc | ||
|
|
4f435f1a1f | ||
|
|
99c1e434df | ||
|
|
b13699801b | ||
|
|
ceb2e85d96 | ||
|
|
c5f8b5ec32 | ||
|
|
bdbd57c2fc | ||
|
|
d44aa17814 | ||
|
|
23d38b4c5b | ||
|
|
58213969e8 | ||
|
|
ef973c8995 | ||
|
|
bea02ba3b5 | ||
|
|
1c1e2ee09c | ||
|
|
2bf7fe6c54 | ||
|
|
9639402c39 | ||
|
|
53213b41ee | ||
|
|
b8b5eead7a | ||
|
|
a0044ce376 | ||
|
|
b3a1f24683 | ||
|
|
f06d48698a | ||
|
|
acd508ba19 | ||
|
|
e5591686b4 | ||
|
|
7be7466eee | ||
|
|
8af6c15998 | ||
|
|
17d60eb1e7 | ||
|
|
d6ecafbc23 | ||
|
|
599e847686 | ||
|
|
4e52556f7e | ||
|
|
492a59e7de | ||
|
|
e0be53805e | ||
|
|
5c2860d1a4 | ||
|
|
18ba5bbd8a | ||
|
|
572b613034 | ||
|
|
a9c7140ba6 | ||
|
|
7fa95cd74a | ||
|
|
8c7f36d496 | ||
|
|
42dcbd3e7e | ||
|
|
1c1cd99510 | ||
|
|
b0a7e212dd | ||
|
|
0c1f6f3c3a | ||
|
|
9399b526b8 | ||
|
|
cd60032bc9 | ||
|
|
a941f994ea | ||
|
|
75d170bce5 | ||
|
|
16caae6dd6 | ||
|
|
a490600479 | ||
|
|
be28641722 | ||
|
|
4fdea3221b | ||
|
|
fef30c54b2 | ||
|
|
75362eac7a | ||
|
|
6e3b224944 | ||
|
|
ef1be219b4 | ||
|
|
ba9b01a969 | ||
|
|
e810e38333 | ||
|
|
dab8ad00d5 | ||
|
|
2c34f43c83 | ||
|
|
979fd71a11 | ||
|
|
1be23eebbb | ||
|
|
d10cff917d | ||
|
|
da72101320 | ||
|
|
5f02ad49c1 | ||
|
|
6644bba6ea | ||
|
|
0b7734f725 | ||
|
|
1536bf6907 | ||
|
|
e81190214f | ||
|
|
48c8906a89 | ||
|
|
717b30115b | ||
|
|
1f3962d2d5 | ||
|
|
619f6e408f | ||
|
|
4a8719abaa | ||
|
|
7b59eb3b26 | ||
|
|
8ac280268d | ||
|
|
34e8f4931d | ||
|
|
ac46850a24 | ||
|
|
6328be220a | ||
|
|
882ad99ed7 | ||
|
|
ce47b4c2d8 | ||
|
|
ce8f9de8ec | ||
|
|
ed3c2d2b58 | ||
|
|
9ae226329b | ||
|
|
12c3899b85 | ||
|
|
ccb1353eb5 | ||
|
|
22eb0b79ee | ||
|
|
5eb7a496da | ||
|
|
7ea55e199f | ||
|
|
83eb472acd | ||
|
|
d9fe6ee4f4 | ||
|
|
51b58be079 | ||
|
|
397643330a | ||
|
|
e5fa4328e1 | ||
|
|
4b777f1907 | ||
|
|
c3547ccb36 | ||
|
|
a0f334b300 | ||
|
|
a9f635b768 | ||
|
|
d385b4a0d6 | ||
|
|
5e825413d2 | ||
|
|
8c3e816ccd | ||
|
|
6ddc91ee85 | ||
|
|
14023ca8a9 | ||
|
|
385e8a4262 | ||
|
|
e358104f7c | ||
|
|
c8e9194ab6 | ||
|
|
bebe29815d | ||
|
|
7f40502c94 | ||
|
|
5fb5215680 | ||
|
|
19b80ff042 | ||
|
|
2dfdba2acf | ||
|
|
f7842789de | ||
|
|
59bdd5f065 | ||
|
|
8da1bc71a6 | ||
|
|
0e0259691c | ||
|
|
ac7831fa3d | ||
|
|
db32cb392f | ||
|
|
e5cb01bd88 | ||
|
|
cbef4c2a69 | ||
|
|
86948b70de | ||
|
|
dfe955ca7c | ||
|
|
eb4b2dde05 | ||
|
|
f2dae67813 | ||
|
|
3ffc9bd290 | ||
|
|
a9946737df | ||
|
|
ece3d508a2 | ||
|
|
0d1d227e6a | ||
|
|
c0b8edfdf2 | ||
|
|
45fec0e184 | ||
|
|
2c2ba919c6 | ||
|
|
6d8adc6168 | ||
|
|
ec208960e8 | ||
|
|
b9505158b4 | ||
|
|
ad0c3421f0 | ||
|
|
916c00344b | ||
|
|
459cdee17e | ||
|
|
bb26a64dbb | ||
|
|
29a3fa532a | ||
|
|
738b8f9012 | ||
|
|
c95272288e | ||
|
|
919febd166 | ||
|
|
10ccc20b53 | ||
|
|
d9ca64da54 | ||
|
|
ce00ec97d1 | ||
|
|
2b9cd37c6c | ||
|
|
f8f14eb6f3 | ||
|
|
645fc863aa | ||
|
|
c53f030b24 | ||
|
|
45d74f9ba0 | ||
|
|
87870919ca | ||
|
|
ce2fdde474 | ||
|
|
6e2f30c6ed | ||
|
|
5c8040008a | ||
|
|
639e25d679 | ||
|
|
f7e5ef96d2 | ||
|
|
745f5487e9 | ||
|
|
0e7f3adf53 | ||
|
|
342d2b1fc4 | ||
|
|
15279685f7 | ||
|
|
12aa959f50 | ||
|
|
9478946c7a | ||
|
|
8560bbf28b | ||
|
|
df7afe1b64 | ||
|
|
df52b60d61 | ||
|
|
65b051f0eb | ||
|
|
7678084061 | ||
|
|
022d33d06f | ||
|
|
4d157bf8dc | ||
|
|
9fcbe4e8c5 | ||
|
|
5aeb92eb4f | ||
|
|
00dfa629b5 | ||
|
|
3ca471b6a2 | ||
|
|
a525589186 | ||
|
|
59ed10398d | ||
|
|
25a86e31df | ||
|
|
7d6743a81a | ||
|
|
6616f62da5 | ||
|
|
a3cbc05e12 | ||
|
|
97095a627a | ||
|
|
910d257c56 | ||
|
|
0c0a008b28 | ||
|
|
9879458353 | ||
|
|
d44f1f3b4b | ||
|
|
c5d387a7e5 | ||
|
|
a6aacd5c55 | ||
|
|
57e7485564 | ||
|
|
42a38a6f47 | ||
|
|
34bb9c2127 | ||
|
|
6442b5e4aa | ||
|
|
dde5a55446 | ||
|
|
13e615a798 | ||
|
|
9c81961b0b | ||
|
|
c1a35e2d75 | ||
|
|
13415c75c2 | ||
|
|
300557a0e6 | ||
|
|
fcbb97010c | ||
|
|
6be46b16b2 | ||
|
|
35b2356a31 | ||
|
|
53ef756723 | ||
|
|
0f0b743a10 | ||
|
|
3f7dafb65c | ||
|
|
9df791b5ff | ||
|
|
dea40d9757 | ||
|
|
dd12a589d6 | ||
|
|
af6e5ba31e | ||
|
|
2b57b2080b | ||
|
|
154c85a0f7 | ||
|
|
3f465d4594 | ||
|
|
94e883f4c3 | ||
|
|
38622101f1 | ||
|
|
0eb64c0084 | ||
|
|
409f5b1791 | ||
|
|
14398a9c4f | ||
|
|
d1cdf6e216 | ||
|
|
65da25a626 | ||
|
|
ce8b019e93 | ||
|
|
67d7fe016d | ||
|
|
47583b5a32 | ||
|
|
03c9a6aaae | ||
|
|
4dcf9b093b | ||
|
|
5ba5ebf63d | ||
|
|
115bea2792 | ||
|
|
b0495a8a42 | ||
|
|
faabd371f5 | ||
|
|
f0be6de0b3 | ||
|
|
b338c6d28d | ||
|
|
07e9a7c007 | ||
|
|
928bb3f8bc | ||
|
|
b9d62f6af2 | ||
|
|
f7ac38953b | ||
|
|
6441c0aa31 | ||
|
|
16479eb6cf | ||
|
|
69472c21c2 | ||
|
|
c270688e8f | ||
|
|
00c86c7082 | ||
|
|
e95e9f9fda | ||
|
|
1588c2f47b | ||
|
|
53850c96db | ||
|
|
ae2cb15055 | ||
|
|
8bf1e096c0 | ||
|
|
0052dc88f0 | ||
|
|
d67d62df45 | ||
|
|
5d45de6bc4 | ||
|
|
cf5bc51e94 | ||
|
|
9a7d24ea4e | ||
|
|
649f28ff8d | ||
|
|
bc5a81d146 | ||
|
|
7dce35bde4 | ||
|
|
f30ebc32ec | ||
|
|
027bc20975 | ||
|
|
3b1cddb9ce | ||
|
|
bd22aaaa86 | ||
|
|
e0e42d2eed | ||
|
|
616210f1bf | ||
|
|
ff2e7f6cc7 | ||
|
|
d1ce037f7d | ||
|
|
91f87f4b7b | ||
|
|
61657b9f9a | ||
|
|
476d032642 | ||
|
|
7538e570c5 | ||
|
|
66fcf4b79b | ||
|
|
21371b1815 | ||
|
|
a53c13d6ed | ||
|
|
1a0c6e72b2 | ||
|
|
ba7c8b79b1 | ||
|
|
d7b504eed0 | ||
|
|
a1df10eb09 | ||
|
|
92be409d4f | ||
|
|
665c7c6bf1 | ||
|
|
6c2ff7ee08 | ||
|
|
295a1bf402 | ||
|
|
3e6f558b08 | ||
|
|
aad5a59e82 | ||
|
|
36d02480b2 | ||
|
|
99454ac57b | ||
|
|
e2915f878e | ||
|
|
710a813e9b | ||
|
|
8bdb818995 | ||
|
|
20466c3800 | ||
|
|
faf6c2d062 | ||
|
|
a760a3c341 | ||
|
|
94e6d2f215 | ||
|
|
a6f1c0f63d | ||
|
|
c653996cbb | ||
|
|
da44fef89d | ||
|
|
4dc2c5e3df | ||
|
|
1797c2ae20 | ||
|
|
3b5da01c0a | ||
|
|
0f1bdce002 | ||
|
|
7c8f3e826f | ||
|
|
f21d63bb55 | ||
|
|
f223bb3d3f | ||
|
|
51001d07b6 | ||
|
|
a9eedd3c7a | ||
|
|
b0aa08fe4e | ||
|
|
8d45d24d55 | ||
|
|
8c1b9f81b9 | ||
|
|
71fad1c22b | ||
|
|
292266c597 | ||
|
|
54e589a6a0 | ||
|
|
fb3f425c27 | ||
|
|
1aaa30c6e9 | ||
|
|
8611410b21 | ||
|
|
40fa7a69c0 | ||
|
|
5eca30e513 | ||
|
|
4b78493782 | ||
|
|
2ce44b734f | ||
|
|
85d8f8c3ae | ||
|
|
3f16291137 | ||
|
|
a5958d5653 | ||
|
|
fdbdf8207a | ||
|
|
630e5489ec | ||
|
|
36943bb786 | ||
|
|
e1bbb0a10f | ||
|
|
27da540846 | ||
|
|
7d7f6ed04a | ||
|
|
ff01bc342d | ||
|
|
cd8b40b569 | ||
|
|
31c742f7a8 | ||
|
|
d6a7a2c21f | ||
|
|
499ecab691 | ||
|
|
df06540f1b | ||
|
|
a32b213ca5 | ||
|
|
6120f992a4 | ||
|
|
389a551a69 | ||
|
|
8ddbdc0e1e | ||
|
|
302c6a90c0 | ||
|
|
18e597d8a3 | ||
|
|
81d717ccff | ||
|
|
2e979c7323 | ||
|
|
4dfd15d6dd | ||
|
|
5b9bf3ff43 | ||
|
|
d2f7485098 | ||
|
|
f8fee1fba7 | ||
|
|
19249ca00f | ||
|
|
01e5700340 | ||
|
|
ff2f7660a6 | ||
|
|
2bc05e2b4a | ||
|
|
137c6447b7 | ||
|
|
ebc8f0c917 | ||
|
|
5a8d10b5b4 | ||
|
|
875815fb62 | ||
|
|
cdf526e130 | ||
|
|
b685032b34 | ||
|
|
a171f9cb00 | ||
|
|
c452f05ec2 | ||
|
|
93d91f80f2 | ||
|
|
7b764c8427 | ||
|
|
016289c8cb | ||
|
|
93a9575389 | ||
|
|
9e265adf14 | ||
|
|
eb08a0ed14 | ||
|
|
c533f37983 | ||
|
|
ca4f8385e4 | ||
|
|
3eb9aa74ed | ||
|
|
637b51464c | ||
|
|
fd9585a66e | ||
|
|
49ecbcb0c9 | ||
|
|
1132bdd66a | ||
|
|
c7d6ed9ea3 | ||
|
|
782528f169 | ||
|
|
104c78275f | ||
|
|
d9d88f7175 | ||
|
|
bf7e24cf11 | ||
|
|
c8aba01db3 | ||
|
|
a896c7e46e | ||
|
|
8018ec14a2 | ||
|
|
9c3208c860 | ||
|
|
e1063964cf | ||
|
|
38568738cc | ||
|
|
15b8358b14 | ||
|
|
2173cb2610 | ||
|
|
87b925d622 | ||
|
|
885b06cc26 | ||
|
|
adb6a5f41e | ||
|
|
3b815e22e3 | ||
|
|
4d4a5c0e64 | ||
|
|
0e89293974 | ||
|
|
c306911b3a | ||
|
|
4f276f0095 | ||
|
|
81fc97c7e9 | ||
|
|
785c5a59c6 | ||
|
|
25ecfaa883 | ||
|
|
38e2c019fa | ||
|
|
15878a4ac5 | ||
|
|
9802536ded | ||
|
|
2c7f92a4d7 | ||
|
|
c653841037 | ||
|
|
ec314c14ea | ||
|
|
c03e60ac0b | ||
|
|
cbf2343143 | ||
|
|
9d9b3ac543 | ||
|
|
591b35a70b | ||
|
|
f0c7b881d3 | ||
|
|
3fd5515db1 | ||
|
|
f32401afd6 | ||
|
|
1b9d91f1e8 | ||
|
|
1f039d707c | ||
|
|
6671d877ad | ||
|
|
2867c95494 | ||
|
|
aa55cec060 | ||
|
|
dfb6c4cd9e | ||
|
|
a9082f66e8 | ||
|
|
bf39b0fbfb | ||
|
|
e347f2179a | ||
|
|
d4f155b6bc | ||
|
|
da001834f5 | ||
|
|
f54352dd82 | ||
|
|
0fba0fae73 | ||
|
|
406ec88515 | ||
|
|
b97957d166 | ||
|
|
655ad6b9e0 | ||
|
|
f5ce42fc2d | ||
|
|
709cdf260d | ||
|
|
5c583028e0 | ||
|
|
c70008d1be | ||
|
|
13fa716fe8 | ||
|
|
c3af5b428f | ||
|
|
40e2f28e94 | ||
|
|
2964f2e079 | ||
|
|
e1a5291123 | ||
|
|
ef41f35209 | ||
|
|
2f64b202c1 | ||
|
|
2500c739ae | ||
|
|
63a9a6135b | ||
|
|
417005c6e9 | ||
|
|
cd1739c901 | ||
|
|
709917eb8f | ||
|
|
3ba70122d5 | ||
|
|
5ff025543e | ||
|
|
896d5bad12 | ||
|
|
e9dbaa3c28 | ||
|
|
d352d03071 | ||
|
|
ebefe775bb | ||
|
|
0852a961cc | ||
|
|
46f06f4c0e | ||
|
|
afb39e4aba | ||
|
|
2c6a90f82b | ||
|
|
e35f732e48 | ||
|
|
ec8b17dee2 | ||
|
|
947bc1a233 | ||
|
|
7050caa2f3 | ||
|
|
c4fd1a0a54 | ||
|
|
4de5f5c490 | ||
|
|
b3f336c959 | ||
|
|
010784c2b2 | ||
|
|
306f654617 | ||
|
|
60d0563487 | ||
|
|
777210ec42 | ||
|
|
8649522b5b | ||
|
|
71ebde06f4 | ||
|
|
d98eb5b46f | ||
|
|
6a2a8b74c8 | ||
|
|
43d5d3d719 | ||
|
|
5527f184b7 | ||
|
|
7dd5cf8b6e | ||
|
|
aec697f5b9 | ||
|
|
aa2588dd89 | ||
|
|
ed886e1794 | ||
|
|
452709dec7 | ||
|
|
a5cac35cfd | ||
|
|
3ee8485ef0 | ||
|
|
673f61be17 | ||
|
|
db86247510 | ||
|
|
090f6eef71 | ||
|
|
214d18616f | ||
|
|
3b126291a6 | ||
|
|
55a230e127 | ||
|
|
2a107ece7f | ||
|
|
7a3ef93a18 | ||
|
|
6255c9baad | ||
|
|
c322a963ab | ||
|
|
b1e8cb5a07 | ||
|
|
a391089efc | ||
|
|
1894bbe4f7 | ||
|
|
07dba90679 | ||
|
|
ca5ea315d6 | ||
|
|
646fe9c67f | ||
|
|
6a123a2399 | ||
|
|
39aa9f0941 | ||
|
|
625a4dcfae | ||
|
|
7971681d02 | ||
|
|
3dea241d7a | ||
|
|
e5ce6532f5 | ||
|
|
aa910ca3f0 | ||
|
|
c2d237a99a | ||
|
|
a371bdaedd | ||
|
|
dbbd77a8eb | ||
|
|
c28de7c079 | ||
|
|
05f1068e01 | ||
|
|
7103ec9877 | ||
|
|
9cd7a25343 | ||
|
|
2d028d18e5 | ||
|
|
0164eca206 | ||
|
|
f227c9e97e | ||
|
|
aecedfd082 | ||
|
|
e0f180bf04 | ||
|
|
5d0c435a33 | ||
|
|
daa7e7b56a | ||
|
|
655f319083 | ||
|
|
fcfe5682da | ||
|
|
e1140ac436 | ||
|
|
1529f5d478 | ||
|
|
4870dc8d45 | ||
|
|
a25e5dcfcd | ||
|
|
828e23b5c6 | ||
|
|
1921312445 | ||
|
|
0b9a884364 | ||
|
|
da4211f0b0 | ||
|
|
b21827cb32 | ||
|
|
4424a8a21d | ||
|
|
eb030f9ed6 | ||
|
|
333372d61c | ||
|
|
48a92f3e55 | ||
|
|
ddc767e53e | ||
|
|
432425ea59 | ||
|
|
6075fd3ef8 | ||
|
|
f099a46f83 | ||
|
|
fe54ef66c6 | ||
|
|
4eb0e930f6 | ||
|
|
fae925aa25 | ||
|
|
764a3d2fde | ||
|
|
b5a51f1304 | ||
|
|
140aee749b | ||
|
|
4113dd1873 | ||
|
|
0e0d3780d3 | ||
|
|
38ff01aedc | ||
|
|
cdf687ad80 | ||
|
|
a399fc7f80 | ||
|
|
c54a48e70b | ||
|
|
884b6f12ae | ||
|
|
5cae0febc9 | ||
|
|
0e898db710 | ||
|
|
40d54d60d4 | ||
|
|
269e026381 | ||
|
|
8245f2f6af | ||
|
|
8c07e8b1a8 | ||
|
|
e94b0845a2 | ||
|
|
4acc85bd12 | ||
|
|
ffa534d5eb | ||
|
|
fccf0f1e39 | ||
|
|
a5d80d1f02 | ||
|
|
803a73afb6 | ||
|
|
1eb8049d04 | ||
|
|
f9ed0c487f | ||
|
|
fa7d33351f | ||
|
|
e3084760b8 | ||
|
|
8e5addad5c | ||
|
|
6e741018e5 | ||
|
|
98c7c78421 | ||
|
|
16c588138c | ||
|
|
1373863af5 | ||
|
|
75315ea2c5 | ||
|
|
9f6fb8a387 | ||
|
|
b84d3d5806 | ||
|
|
5c2c1bbfcd | ||
|
|
54e84858b5 | ||
|
|
833d0789d7 | ||
|
|
1a974f3dd8 | ||
|
|
146173883f | ||
|
|
ebb02a5723 | ||
|
|
c96f7fed18 | ||
|
|
861eff3cd2 | ||
|
|
b66c0d17d0 | ||
|
|
0e748050f3 | ||
|
|
ae3524b79f | ||
|
|
0ce58b592a | ||
|
|
578346840e | ||
|
|
56bcb46d6c | ||
|
|
91405c48e0 | ||
|
|
b40dff621a | ||
|
|
7d4409e2b4 | ||
|
|
64a385b835 | ||
|
|
ee2573d128 | ||
|
|
d082e7c44d | ||
|
|
cd65850308 | ||
|
|
b91ae7e9b1 | ||
|
|
9baab1bf08 | ||
|
|
fdd4d8b926 | ||
|
|
c26c42d67f | ||
|
|
e1553becbc | ||
|
|
2e845ab0c0 | ||
|
|
79062a5476 | ||
|
|
840495b0e6 | ||
|
|
dd8fdbc7e3 | ||
|
|
a18c2aadf6 | ||
|
|
c7b653f073 | ||
|
|
8201df66c3 | ||
|
|
30e9baf0df | ||
|
|
a92881645b | ||
|
|
9522545152 | ||
|
|
33a09874c1 | ||
|
|
933755b81d | ||
|
|
8a1352d149 | ||
|
|
b47b7d7ea8 | ||
|
|
27cc4b7a5d | ||
|
|
249cc7f276 | ||
|
|
7ee9e92f7e | ||
|
|
588e80a237 | ||
|
|
e3d2d355de | ||
|
|
e8e00691c0 | ||
|
|
216be571e2 | ||
|
|
0176e82b3c | ||
|
|
30d3297002 | ||
|
|
6aa94a55ab | ||
|
|
a747ca42ad | ||
|
|
cd7a056bf1 | ||
|
|
baab774ede | ||
|
|
877c974108 | ||
|
|
d8033762d7 | ||
|
|
47a265016c | ||
|
|
ef78c68ac1 | ||
|
|
bc3f261076 | ||
|
|
4fa8a66c5f | ||
|
|
5885afc4f8 | ||
|
|
f3d21c50ab | ||
|
|
30729db47a | ||
|
|
0ff690a344 | ||
|
|
bec138c4f0 | ||
|
|
2976810fdf | ||
|
|
ae70ff2901 | ||
|
|
ee2642825f | ||
|
|
9cf98df425 | ||
|
|
1229904188 | ||
|
|
9152181a00 | ||
|
|
4843cb8789 | ||
|
|
6d3a489dff | ||
|
|
ea8e98fa0d | ||
|
|
614385da75 | ||
|
|
f6ef3ba7c5 | ||
|
|
22e8a137ef | ||
|
|
69734cc922 | ||
|
|
a9fe05d64a | ||
|
|
8bb3bd9409 | ||
|
|
5219065b8e | ||
|
|
cb8497229d | ||
|
|
25b8920d20 | ||
|
|
9203db88ab | ||
|
|
36378e9c23 | ||
|
|
9c33e77755 | ||
|
|
88cb4c742f | ||
|
|
475cce8253 | ||
|
|
a86c1738d1 | ||
|
|
96a4d02c80 | ||
|
|
bb6df783ab | ||
|
|
26cca5c2f8 | ||
|
|
7e3dd7d624 | ||
|
|
db9a53f923 | ||
|
|
92ae4786f0 | ||
|
|
b35cf14d32 | ||
|
|
14374b55d2 | ||
|
|
5a919018c5 | ||
|
|
6ac73d3f25 | ||
|
|
510fe3902e | ||
|
|
2bc23594ad | ||
|
|
06e00f3066 | ||
|
|
9b3d409695 | ||
|
|
f7f5737abf | ||
|
|
9160d63ad4 | ||
|
|
458f135ee1 | ||
|
|
b6fc104357 | ||
|
|
8e116bf62d | ||
|
|
f1d697a83f | ||
|
|
69a7a57f41 | ||
|
|
24de1559a5 | ||
|
|
ec29abfcaf | ||
|
|
eac97db665 | ||
|
|
d8386328e7 | ||
|
|
d28f321aa2 | ||
|
|
e691c076a1 | ||
|
|
ad842e0e80 | ||
|
|
dcf4109c5b | ||
|
|
05287c135e | ||
|
|
6ff8ec21cf | ||
|
|
7b6e22aa04 | ||
|
|
ee56914285 | ||
|
|
a2e9cd3c43 | ||
|
|
359f29a264 | ||
|
|
576b15fec0 | ||
|
|
42434290da | ||
|
|
62c6189dfd | ||
|
|
21c9ebbca3 | ||
|
|
658d4687f9 | ||
|
|
3775453db8 | ||
|
|
edcaf8e639 | ||
|
|
3aa658a64e | ||
|
|
58fc66ad1c | ||
|
|
f68f87645f | ||
|
|
25f99da172 | ||
|
|
5da6faa972 | ||
|
|
02b25138ef | ||
|
|
21644f5ad8 | ||
|
|
d3adc1629c | ||
|
|
7c60c57c60 | ||
|
|
7006a790dc | ||
|
|
2575b649a0 | ||
|
|
8399391aaa | ||
|
|
dfbec20016 | ||
|
|
17ac777e9b | ||
|
|
01edc0e6e0 | ||
|
|
d6b58a5e66 | ||
|
|
517ef9515c | ||
|
|
7d94861db9 | ||
|
|
cb1e4fa583 | ||
|
|
dd5fced6c4 | ||
|
|
01a4d91167 | ||
|
|
2e2f0fdbb5 | ||
|
|
3ca1a72c6a | ||
|
|
9905199055 | ||
|
|
5970ff917f | ||
|
|
d197c91995 | ||
|
|
6e1ee6df12 | ||
|
|
b44d6e60bd | ||
|
|
1d0e49d5b6 | ||
|
|
ced7b2aa2c | ||
|
|
6aa473c316 | ||
|
|
aebe36b9e8 | ||
|
|
71c92766d3 | ||
|
|
a3f5f7645a | ||
|
|
7e8514e7be | ||
|
|
7715789d0f | ||
|
|
a93fed448f | ||
|
|
10738a7af0 | ||
|
|
2c00c55e5d | ||
|
|
80ba02851f | ||
|
|
5fd3190a2d | ||
|
|
117ec317de | ||
|
|
f4f2836bdb | ||
|
|
e194a2feb8 | ||
|
|
93e9ec867c | ||
|
|
1fe625a9b4 | ||
|
|
1c1ef56e00 | ||
|
|
838fe16845 | ||
|
|
7f3c45f85a | ||
|
|
9a4f6721e2 | ||
|
|
a4ffc03e55 | ||
|
|
c4c98bda31 | ||
|
|
63aa2fd307 | ||
|
|
99d0b1786d | ||
|
|
843fed5ffd | ||
|
|
9b8112b478 | ||
|
|
2433d40918 | ||
|
|
2c1f473bbe | ||
|
|
7605a4d835 | ||
|
|
a1e2fddd5c | ||
|
|
74b770a937 | ||
|
|
bd8724c1e2 | ||
|
|
e508159255 | ||
|
|
4649921b1d | ||
|
|
1c58ac3704 | ||
|
|
4676b4cd25 | ||
|
|
15f36651d8 | ||
|
|
7b11ef9b40 | ||
|
|
3b90f085b1 | ||
|
|
36bf445370 | ||
|
|
8936ce928f | ||
|
|
28aec8852b | ||
|
|
5d1224e438 | ||
|
|
a0d02a843e | ||
|
|
08f22983e7 | ||
|
|
07289667c0 | ||
|
|
f49375dce4 | ||
|
|
9910cafe78 | ||
|
|
9996a1579b | ||
|
|
fd913ad1fa | ||
|
|
412a873c47 | ||
|
|
22046f4cfb | ||
|
|
030debe9d9 | ||
|
|
9db1d548a0 | ||
|
|
da167642b7 | ||
|
|
971053c90d | ||
|
|
65d9220df6 | ||
|
|
264ebb1d4c | ||
|
|
3509d3594f | ||
|
|
2a268accff | ||
|
|
37966880fd | ||
|
|
6699c92082 | ||
|
|
9ca1c5c14b | ||
|
|
af1a5f7361 | ||
|
|
c349a3b869 | ||
|
|
8d2edf91a1 | ||
|
|
39b686a13b | ||
|
|
139965d6ca | ||
|
|
e79e692735 | ||
|
|
797c56585b | ||
|
|
e38391ade0 | ||
|
|
f344715381 | ||
|
|
1e95b8cf9e | ||
|
|
8ffaf0748e | ||
|
|
88357a3aeb | ||
|
|
dfe025ab8e | ||
|
|
97a66168c0 | ||
|
|
a9983e1fe0 | ||
|
|
793ce0afd8 | ||
|
|
4d7cc26983 | ||
|
|
c72ce9b446 | ||
|
|
f3c628ba76 | ||
|
|
9a66e26b00 | ||
|
|
8b7f2f102f | ||
|
|
56d8c3f50f | ||
|
|
61f5c66444 | ||
|
|
a2f7b1a780 | ||
|
|
35b2d12e18 | ||
|
|
5dcd32050a | ||
|
|
1099d67bf1 | ||
|
|
69625ae832 | ||
|
|
a1c7c4a310 | ||
|
|
a3bea3b7da | ||
|
|
a3043c1f6d | ||
|
|
d0da7858ec | ||
|
|
7bba09c16d | ||
|
|
78813d53b1 | ||
|
|
35eac02545 | ||
|
|
a5b0d39adf | ||
|
|
f80d1b32b7 | ||
|
|
7598a16b75 | ||
|
|
b83b54eee1 | ||
|
|
eb2621f72a | ||
|
|
44980d21a9 | ||
|
|
e1d2e1357b | ||
|
|
f80c7d03e2 | ||
|
|
4ca6ee358b | ||
|
|
9dad06222d | ||
|
|
37ef6be4c3 | ||
|
|
f0a4fad878 | ||
|
|
06ae035e11 | ||
|
|
91b5177bdb | ||
|
|
30bd427985 | ||
|
|
a92762ff47 | ||
|
|
0a5c98aba0 | ||
|
|
6f041bf693 | ||
|
|
23c9dc304a | ||
|
|
97377fe8bd | ||
|
|
36cf16ce90 | ||
|
|
ab3ef63097 | ||
|
|
7f8549124f | ||
|
|
43b1cb904d | ||
|
|
762a3ca626 | ||
|
|
91f0d00ba2 | ||
|
|
41f42f4427 | ||
|
|
98181bfe6c | ||
|
|
a8ab4aaf2e | ||
|
|
78dca7a2bf | ||
|
|
844ea40c3a | ||
|
|
7a6dedf452 | ||
|
|
b641b37308 | ||
|
|
8c1f8bfb42 | ||
|
|
1f1563401d | ||
|
|
9fd585ee07 | ||
|
|
609dcabf77 | ||
|
|
80d338c998 | ||
|
|
306784c31b | ||
|
|
bcd68e0f19 | ||
|
|
5764148753 | ||
|
|
e7edfe3ba1 | ||
|
|
da6f54eede | ||
|
|
ade5c3d80e | ||
|
|
cc2600cfba | ||
|
|
9b191ef3e4 | ||
|
|
c450c35baf | ||
|
|
b35b82f4ee | ||
|
|
ea52624ab2 | ||
|
|
0484bccfd1 | ||
|
|
d094f63faa | ||
|
|
dc6bc61442 | ||
|
|
5918c42cf9 | ||
|
|
c34a08561e | ||
|
|
7213c726b4 | ||
|
|
f650ac4e76 | ||
|
|
2ff1be2c4a | ||
|
|
61ac306ef3 | ||
|
|
022569e404 | ||
|
|
ebf22df7b6 | ||
|
|
adcc596875 | ||
|
|
1bcdf06b43 | ||
|
|
3be78f0312 | ||
|
|
5633499834 | ||
|
|
88847a153b | ||
|
|
3e7c3a45c3 | ||
|
|
1af1a92fec | ||
|
|
7b38923b7d | ||
|
|
38500e1d79 | ||
|
|
2e82fc3ead | ||
|
|
e88ae4aa3d | ||
|
|
5f55c922dc | ||
|
|
fc3c044e00 |
61
.cursor/rules/build-and-deployment.mdc
Normal file
61
.cursor/rules/build-and-deployment.mdc
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Build & Deployment Best Practices
|
||||
|
||||
## Build Process
|
||||
|
||||
### Running Builds
|
||||
- Use `pnpm build` from project root for full build
|
||||
- Monitor for React hooks warnings and fix them immediately
|
||||
- Ensure all TypeScript errors are resolved before deployment
|
||||
|
||||
### Common Build Issues & Fixes
|
||||
|
||||
#### React Hooks Warnings
|
||||
- Capture ref values in variables within useEffect cleanup
|
||||
- Avoid accessing `.current` directly in cleanup functions
|
||||
- Pattern for fixing ref cleanup warnings:
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const currentRef = myRef.current;
|
||||
return () => {
|
||||
if (currentRef) {
|
||||
currentRef.cleanup();
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
```
|
||||
|
||||
#### Test Failures During Build
|
||||
- Ensure all test mocks include required constants like `SESSION_MAX_AGE`
|
||||
- Mock Next.js navigation hooks properly: `useParams`, `useRouter`, `useSearchParams`
|
||||
- Remove unused imports and constants from test files
|
||||
- Use literal values instead of imported constants when the constant isn't actually needed
|
||||
|
||||
### Test Execution
|
||||
- Run `pnpm test` to execute all tests
|
||||
- Use `pnpm test -- --run filename.test.tsx` for specific test files
|
||||
- Fix test failures before merging code
|
||||
- Ensure 100% test coverage for new components
|
||||
|
||||
### Performance Monitoring
|
||||
- Monitor build times and optimize if necessary
|
||||
- Watch for memory usage during builds
|
||||
- Use proper caching strategies for faster rebuilds
|
||||
|
||||
### Deployment Checklist
|
||||
1. All tests passing
|
||||
2. Build completes without warnings
|
||||
3. TypeScript compilation successful
|
||||
4. No linter errors
|
||||
5. Database migrations applied (if any)
|
||||
6. Environment variables configured
|
||||
|
||||
### EKS Deployment Considerations
|
||||
- Ensure latest code is deployed to all pods
|
||||
- Monitor AWS RDS Performance Insights for database issues
|
||||
- Verify environment-specific configurations
|
||||
- Check pod health and resource usage
|
||||
415
.cursor/rules/cache-optimization.mdc
Normal file
415
.cursor/rules/cache-optimization.mdc
Normal file
@@ -0,0 +1,415 @@
|
||||
---
|
||||
description: Caching rules for performance improvements
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Cache Optimization Patterns for Formbricks
|
||||
|
||||
## Cache Strategy Overview
|
||||
|
||||
Formbricks uses a **hybrid caching approach** optimized for enterprise scale:
|
||||
|
||||
- **Redis** for persistent cross-request caching
|
||||
- **React `cache()`** for request-level deduplication
|
||||
- **NO Next.js `unstable_cache()`** - avoid for reliability
|
||||
|
||||
## Key Files
|
||||
|
||||
### Core Cache Infrastructure
|
||||
- [packages/cache/src/service.ts](mdc:packages/cache/src/service.ts) - Redis cache service
|
||||
- [packages/cache/src/client.ts](mdc:packages/cache/src/client.ts) - Cache client initialization and singleton management
|
||||
- [apps/web/lib/cache/index.ts](mdc:apps/web/lib/cache/index.ts) - Cache service proxy for web app
|
||||
- [packages/cache/src/index.ts](mdc:packages/cache/src/index.ts) - Cache package exports and utilities
|
||||
|
||||
### Environment State Caching (Critical Endpoint)
|
||||
- [apps/web/app/api/v1/client/[environmentId]/environment/route.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/route.ts) - Main endpoint serving hundreds of thousands of SDK clients
|
||||
- [apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts) - Optimized data layer with caching
|
||||
|
||||
## Enterprise-Grade Cache Key Patterns
|
||||
|
||||
**Always use** the `createCacheKey` utilities from the cache package:
|
||||
|
||||
```typescript
|
||||
// ✅ Correct patterns
|
||||
createCacheKey.environment.state(environmentId) // "fb:env:abc123:state"
|
||||
createCacheKey.organization.billing(organizationId) // "fb:org:xyz789:billing"
|
||||
createCacheKey.license.status(organizationId) // "fb:license:org123:status"
|
||||
createCacheKey.user.permissions(userId, orgId) // "fb:user:456:org:123:permissions"
|
||||
|
||||
// ❌ Never use flat keys - collision-prone
|
||||
"environment_abc123"
|
||||
"user_data_456"
|
||||
```
|
||||
|
||||
## When to Use Each Cache Type
|
||||
|
||||
### Use React `cache()` for Request Deduplication
|
||||
```typescript
|
||||
// ✅ Prevents multiple calls within same request
|
||||
export const getEnterpriseLicense = reactCache(async () => {
|
||||
// Complex license validation logic
|
||||
});
|
||||
```
|
||||
|
||||
### Use `cache.withCache()` for Simple Database Queries
|
||||
```typescript
|
||||
// ✅ Simple caching with automatic fallback (TTL in milliseconds)
|
||||
export const getActionClasses = (environmentId: string) => {
|
||||
return cache.withCache(() => fetchActionClassesFromDB(environmentId),
|
||||
createCacheKey.environment.actionClasses(environmentId),
|
||||
60 * 30 * 1000 // 30 minutes in milliseconds
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
### Use Explicit Redis Cache for Complex Business Logic
|
||||
```typescript
|
||||
// ✅ Full control for high-stakes endpoints
|
||||
export const getEnvironmentState = async (environmentId: string) => {
|
||||
const cached = await environmentStateCache.getEnvironmentState(environmentId);
|
||||
if (cached) return cached;
|
||||
|
||||
const fresh = await buildComplexState(environmentId);
|
||||
await environmentStateCache.setEnvironmentState(environmentId, fresh);
|
||||
return fresh;
|
||||
};
|
||||
```
|
||||
|
||||
## Caching Decision Framework
|
||||
|
||||
### When TO Add Caching
|
||||
|
||||
```typescript
|
||||
// ✅ Expensive operations that benefit from caching
|
||||
- Database queries (>10ms typical)
|
||||
- External API calls (>50ms typical)
|
||||
- Complex computations (>5ms)
|
||||
- File system operations
|
||||
- Heavy data transformations
|
||||
|
||||
// Example: Database query with complex joins (TTL in milliseconds)
|
||||
export const getEnvironmentWithDetails = withCache(
|
||||
async (environmentId: string) => {
|
||||
return prisma.environment.findUnique({
|
||||
where: { id: environmentId },
|
||||
include: { /* complex joins */ }
|
||||
});
|
||||
},
|
||||
{ key: createCacheKey.environment.details(environmentId), ttl: 60 * 30 * 1000 } // 30 minutes
|
||||
)();
|
||||
```
|
||||
|
||||
### When NOT to Add Caching
|
||||
|
||||
```typescript
|
||||
// ❌ Don't cache these operations - minimal overhead
|
||||
- Simple property access (<0.1ms)
|
||||
- Basic transformations (<1ms)
|
||||
- Functions that just call already-cached functions
|
||||
- Pure computation without I/O
|
||||
|
||||
// ❌ Bad example: Redundant caching
|
||||
const getCachedLicenseFeatures = withCache(
|
||||
async () => {
|
||||
const license = await getEnterpriseLicense(); // Already cached!
|
||||
return license.active ? license.features : null; // Just property access
|
||||
},
|
||||
{ key: "license-features", ttl: 1800 * 1000 } // 30 minutes in milliseconds
|
||||
);
|
||||
|
||||
// ✅ Good example: Simple and efficient
|
||||
const getLicenseFeatures = async () => {
|
||||
const license = await getEnterpriseLicense(); // Already cached
|
||||
return license.active ? license.features : null; // 0.1ms overhead
|
||||
};
|
||||
```
|
||||
|
||||
### Computational Overhead Analysis
|
||||
|
||||
Before adding caching, analyze the overhead:
|
||||
|
||||
```typescript
|
||||
// ✅ High overhead - CACHE IT
|
||||
- Database queries: ~10-100ms
|
||||
- External APIs: ~50-500ms
|
||||
- File I/O: ~5-50ms
|
||||
- Complex algorithms: >5ms
|
||||
|
||||
// ❌ Low overhead - DON'T CACHE
|
||||
- Property access: ~0.001ms
|
||||
- Simple lookups: ~0.1ms
|
||||
- Basic validation: ~1ms
|
||||
- Type checks: ~0.01ms
|
||||
|
||||
// Example decision tree:
|
||||
const expensiveOperation = async () => {
|
||||
return prisma.query(); // 50ms - CACHE IT
|
||||
};
|
||||
|
||||
const cheapOperation = (data: any) => {
|
||||
return data.property; // 0.001ms - DON'T CACHE
|
||||
};
|
||||
```
|
||||
|
||||
### Avoid Cache Wrapper Anti-Pattern
|
||||
|
||||
```typescript
|
||||
// ❌ Don't create wrapper functions just for caching
|
||||
const getCachedUserPermissions = withCache(
|
||||
async (userId: string) => getUserPermissions(userId),
|
||||
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||
);
|
||||
|
||||
// ✅ Add caching directly to the original function
|
||||
export const getUserPermissions = withCache(
|
||||
async (userId: string) => {
|
||||
return prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: { permissions: true }
|
||||
});
|
||||
},
|
||||
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||
);
|
||||
```
|
||||
|
||||
## TTL Coordination Strategy
|
||||
|
||||
### Multi-Layer Cache Coordination
|
||||
For endpoints serving client SDKs, coordinate TTLs across layers:
|
||||
|
||||
```typescript
|
||||
// Client SDK cache (expiresAt) - longest TTL for fewer requests
|
||||
const CLIENT_TTL = 60 * 60; // 1 hour (seconds for client)
|
||||
|
||||
// Server Redis cache - shorter TTL ensures fresh data for clients
|
||||
const SERVER_TTL = 60 * 30 * 1000; // 30 minutes in milliseconds
|
||||
|
||||
// HTTP cache headers (seconds)
|
||||
const BROWSER_TTL = 60 * 60; // 1 hour (max-age)
|
||||
const CDN_TTL = 60 * 30; // 30 minutes (s-maxage)
|
||||
const CORS_TTL = 60 * 60; // 1 hour (balanced approach)
|
||||
```
|
||||
|
||||
### Standard TTL Guidelines (in milliseconds for cache-manager + Keyv)
|
||||
```typescript
|
||||
// Configuration data - rarely changes
|
||||
const CONFIG_TTL = 60 * 60 * 24 * 1000; // 24 hours
|
||||
|
||||
// User data - moderate frequency
|
||||
const USER_TTL = 60 * 60 * 2 * 1000; // 2 hours
|
||||
|
||||
// Survey data - changes moderately
|
||||
const SURVEY_TTL = 60 * 15 * 1000; // 15 minutes
|
||||
|
||||
// Billing data - expensive to compute
|
||||
const BILLING_TTL = 60 * 30 * 1000; // 30 minutes
|
||||
|
||||
// Action classes - infrequent changes
|
||||
const ACTION_CLASS_TTL = 60 * 30 * 1000; // 30 minutes
|
||||
```
|
||||
|
||||
## High-Frequency Endpoint Optimization
|
||||
|
||||
### Performance Patterns for High-Volume Endpoints
|
||||
|
||||
```typescript
|
||||
// ✅ Optimized high-frequency endpoint pattern
|
||||
export const GET = async (request: NextRequest, props: { params: Promise<{ id: string }> }) => {
|
||||
const params = await props.params;
|
||||
|
||||
try {
|
||||
// Simple validation (avoid Zod for high-frequency)
|
||||
if (!params.id || typeof params.id !== 'string') {
|
||||
return responses.badRequestResponse("ID is required", undefined, true);
|
||||
}
|
||||
|
||||
// Single optimized query with caching
|
||||
const data = await getOptimizedData(params.id);
|
||||
|
||||
return responses.successResponse(
|
||||
{
|
||||
data,
|
||||
expiresAt: new Date(Date.now() + CLIENT_TTL * 1000), // SDK cache duration
|
||||
},
|
||||
true,
|
||||
"public, s-maxage=1800, max-age=3600, stale-while-revalidate=1800, stale-if-error=3600"
|
||||
);
|
||||
} catch (err) {
|
||||
// Simplified error handling for performance
|
||||
if (err instanceof ResourceNotFoundError) {
|
||||
return responses.notFoundResponse(err.resourceType, err.resourceId);
|
||||
}
|
||||
logger.error({ error: err, url: request.url }, "Error in high-frequency endpoint");
|
||||
return responses.internalServerErrorResponse(err.message, true);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Avoid These Performance Anti-Patterns
|
||||
|
||||
```typescript
|
||||
// ❌ Avoid for high-frequency endpoints
|
||||
const inputValidation = ZodSchema.safeParse(input); // Too slow
|
||||
const startTime = Date.now(); logger.debug(...); // Logging overhead
|
||||
const { data, revalidateEnvironment } = await get(); // Complex return types
|
||||
```
|
||||
|
||||
### CORS Optimization
|
||||
```typescript
|
||||
// ✅ Balanced CORS caching (not too aggressive)
|
||||
export const OPTIONS = async (): Promise<Response> => {
|
||||
return responses.successResponse(
|
||||
{},
|
||||
true,
|
||||
"public, s-maxage=3600, max-age=3600" // 1 hour balanced approach
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Redis Cache Migration from Next.js
|
||||
|
||||
### Avoid Legacy Next.js Patterns
|
||||
```typescript
|
||||
// ❌ Old Next.js unstable_cache pattern (avoid)
|
||||
const getCachedData = unstable_cache(
|
||||
async (id) => fetchData(id),
|
||||
['cache-key'],
|
||||
{ tags: ['environment'], revalidate: 900 }
|
||||
);
|
||||
|
||||
// ❌ Don't use revalidateEnvironment flags with Redis
|
||||
return { data, revalidateEnvironment: true }; // This gets cached incorrectly!
|
||||
|
||||
// ✅ New Redis pattern with withCache (TTL in milliseconds)
|
||||
export const getCachedData = (id: string) =>
|
||||
withCache(
|
||||
() => fetchData(id),
|
||||
{
|
||||
key: createCacheKey.environment.data(id),
|
||||
ttl: 60 * 15 * 1000, // 15 minutes in milliseconds
|
||||
}
|
||||
)();
|
||||
```
|
||||
|
||||
### Remove Revalidation Logic
|
||||
When migrating from Next.js `unstable_cache`:
|
||||
- Remove `revalidateEnvironment` or similar flags
|
||||
- Remove tag-based invalidation logic
|
||||
- Use TTL-based expiration instead
|
||||
- Handle one-time updates (like `appSetupCompleted`) directly in cache
|
||||
|
||||
## Data Layer Optimization
|
||||
|
||||
### Single Query Pattern
|
||||
```typescript
|
||||
// ✅ Optimize with single database query
|
||||
export const getOptimizedEnvironmentData = async (environmentId: string) => {
|
||||
return prisma.environment.findUniqueOrThrow({
|
||||
where: { id: environmentId },
|
||||
include: {
|
||||
project: {
|
||||
select: { id: true, recontactDays: true, /* ... */ }
|
||||
},
|
||||
organization: {
|
||||
select: { id: true, billing: true }
|
||||
},
|
||||
surveys: {
|
||||
where: { status: "inProgress" },
|
||||
select: { id: true, name: true, /* ... */ }
|
||||
},
|
||||
actionClasses: {
|
||||
select: { id: true, name: true, /* ... */ }
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// ❌ Avoid multiple separate queries
|
||||
const environment = await getEnvironment(id);
|
||||
const organization = await getOrganization(environment.organizationId);
|
||||
const surveys = await getSurveys(id);
|
||||
const actionClasses = await getActionClasses(id);
|
||||
```
|
||||
|
||||
## Invalidation Best Practices
|
||||
|
||||
**Always use explicit key-based invalidation:**
|
||||
|
||||
```typescript
|
||||
// ✅ Clear and debuggable
|
||||
await invalidateCache(createCacheKey.environment.state(environmentId));
|
||||
await invalidateCache([
|
||||
createCacheKey.environment.surveys(environmentId),
|
||||
createCacheKey.environment.actionClasses(environmentId)
|
||||
]);
|
||||
|
||||
// ❌ Avoid complex tag systems
|
||||
await invalidateByTags(["environment", "survey"]); // Don't do this
|
||||
```
|
||||
|
||||
## Critical Performance Targets
|
||||
|
||||
### High-Frequency Endpoint Goals
|
||||
- **Cache hit ratio**: >85%
|
||||
- **Response time P95**: <200ms
|
||||
- **Database load reduction**: >60%
|
||||
- **HTTP cache duration**: 1hr browser, 30min Cloudflare
|
||||
- **SDK refresh interval**: 1 hour with 30min server cache
|
||||
|
||||
### Performance Monitoring
|
||||
- Use **existing elastic cache analytics** for metrics
|
||||
- Log cache errors and warnings (not debug info)
|
||||
- Track database query reduction
|
||||
- Monitor response times for cached endpoints
|
||||
- **Avoid performance logging** in high-frequency endpoints
|
||||
|
||||
## Error Handling Pattern
|
||||
|
||||
Always provide fallback to fresh data on cache errors:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
const cached = await cache.get(key);
|
||||
if (cached) return cached;
|
||||
|
||||
const fresh = await fetchFresh();
|
||||
await cache.set(key, fresh, ttl); // ttl in milliseconds
|
||||
return fresh;
|
||||
} catch (error) {
|
||||
// ✅ Always fallback to fresh data
|
||||
logger.warn("Cache error, fetching fresh", { key, error });
|
||||
return fetchFresh();
|
||||
}
|
||||
```
|
||||
|
||||
## Common Pitfalls to Avoid
|
||||
|
||||
1. **Never use Next.js `unstable_cache()`** - unreliable in production
|
||||
2. **Don't use revalidation flags with Redis** - they get cached incorrectly
|
||||
3. **Avoid Zod validation** for simple parameters in high-frequency endpoints
|
||||
4. **Don't add performance logging** to high-frequency endpoints
|
||||
5. **Coordinate TTLs** between client and server caches
|
||||
6. **Don't over-engineer** with complex tag systems
|
||||
7. **Avoid caching rapidly changing data** (real-time metrics)
|
||||
8. **Always validate cache keys** to prevent collisions
|
||||
9. **Don't add redundant caching layers** - analyze computational overhead first
|
||||
10. **Avoid cache wrapper functions** - add caching directly to expensive operations
|
||||
11. **Don't cache property access or simple transformations** - overhead is negligible
|
||||
12. **Analyze the full call chain** before adding caching to avoid double-caching
|
||||
13. **Remember TTL is in milliseconds** for cache-manager + Keyv stack (not seconds)
|
||||
|
||||
## Monitoring Strategy
|
||||
|
||||
- Use **existing elastic cache analytics** for metrics
|
||||
- Log cache errors and warnings
|
||||
- Track database query reduction
|
||||
- Monitor response times for cached endpoints
|
||||
- **Don't add custom metrics** that duplicate existing monitoring
|
||||
|
||||
## Important Notes
|
||||
|
||||
### TTL Units
|
||||
- **cache-manager + Keyv**: TTL in **milliseconds**
|
||||
- **Direct Redis commands**: TTL in **seconds** (EXPIRE, SETEX) or **milliseconds** (PEXPIRE, PSETEX)
|
||||
- **HTTP cache headers**: TTL in **seconds** (max-age, s-maxage)
|
||||
- **Client SDK**: TTL in **seconds** (expiresAt calculation)
|
||||
41
.cursor/rules/database-performance.mdc
Normal file
41
.cursor/rules/database-performance.mdc
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Database Performance & Prisma Best Practices
|
||||
|
||||
## Critical Performance Rules
|
||||
|
||||
### Response Count Queries
|
||||
- **NEVER** use `skip`/`offset` with `prisma.response.count()` - this causes expensive subqueries with OFFSET
|
||||
- Always use only `where` clauses for count operations: `prisma.response.count({ where: { ... } })`
|
||||
- For pagination, separate count queries from data queries
|
||||
- Reference: [apps/web/lib/response/service.ts](mdc:apps/web/lib/response/service.ts) line 654-686
|
||||
|
||||
### Prisma Query Optimization
|
||||
- Use proper indexes defined in [packages/database/schema.prisma](mdc:packages/database/schema.prisma)
|
||||
- Leverage existing indexes: `@@index([surveyId, createdAt])`, `@@index([createdAt])`
|
||||
- Use cursor-based pagination for large datasets instead of offset-based
|
||||
- Cache frequently accessed data using React Cache and custom cache tags
|
||||
|
||||
### Date Range Filtering
|
||||
- When filtering by `createdAt`, always use indexed queries
|
||||
- Combine with `surveyId` for optimal performance: `{ surveyId, createdAt: { gte: start, lt: end } }`
|
||||
- Avoid complex WHERE clauses that can't utilize indexes
|
||||
|
||||
### Count vs Data Separation
|
||||
- Always separate count queries from data fetching queries
|
||||
- Use `Promise.all()` to run count and data queries in parallel
|
||||
- Example pattern from [apps/web/modules/api/v2/management/responses/lib/response.ts](mdc:apps/web/modules/api/v2/management/responses/lib/response.ts):
|
||||
```typescript
|
||||
const [responses, totalCount] = await Promise.all([
|
||||
prisma.response.findMany(query),
|
||||
prisma.response.count({ where: whereClause }),
|
||||
]);
|
||||
```
|
||||
|
||||
### Monitoring & Debugging
|
||||
- Monitor AWS RDS Performance Insights for problematic queries
|
||||
- Look for queries with OFFSET in count operations - these indicate performance issues
|
||||
- Use proper error handling with `DatabaseError` for Prisma exceptions
|
||||
110
.cursor/rules/database.mdc
Normal file
110
.cursor/rules/database.mdc
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
description: >
|
||||
This rule provides comprehensive knowledge about the Formbricks database structure, relationships,
|
||||
and data patterns. It should be used **only when the agent explicitly requests database schema-level
|
||||
details** to support tasks such as: writing/debugging Prisma queries, designing/reviewing data models,
|
||||
investigating multi-tenancy behavior, creating API endpoints, or understanding data relationships.
|
||||
globs: []
|
||||
alwaysApply: agent-requested
|
||||
---
|
||||
|
||||
# Formbricks Database Schema Reference
|
||||
|
||||
This rule provides a reference to the Formbricks database structure. For the most up-to-date and complete schema definitions, please refer to the schema.prisma file directly.
|
||||
|
||||
## Database Overview
|
||||
|
||||
Formbricks uses PostgreSQL with Prisma ORM. The schema is designed for multi-tenancy with strong data isolation between organizations.
|
||||
|
||||
### Core Hierarchy
|
||||
|
||||
```
|
||||
Organization
|
||||
└── Project
|
||||
└── Environment (production/development)
|
||||
├── Survey
|
||||
├── Contact
|
||||
├── ActionClass
|
||||
└── Integration
|
||||
```
|
||||
|
||||
## Schema Reference
|
||||
|
||||
For the complete and up-to-date database schema, please refer to:
|
||||
|
||||
- Main schema: `packages/database/schema.prisma`
|
||||
- JSON type definitions: `packages/database/json-types.ts`
|
||||
|
||||
The schema.prisma file contains all model definitions, relationships, enums, and field types. The json-types.ts file contains TypeScript type definitions for JSON fields.
|
||||
|
||||
## Data Access Patterns
|
||||
|
||||
### Multi-tenancy
|
||||
|
||||
- All data is scoped by Organization
|
||||
- Environment-level isolation for surveys and contacts
|
||||
- Project-level grouping for related surveys
|
||||
|
||||
### Soft Deletion
|
||||
|
||||
Some models use soft deletion patterns:
|
||||
|
||||
- Check `isActive` fields where present
|
||||
- Use proper filtering in queries
|
||||
|
||||
### Cascading Deletes
|
||||
|
||||
Configured cascade relationships:
|
||||
|
||||
- Organization deletion cascades to all child entities
|
||||
- Survey deletion removes responses, displays, triggers
|
||||
- Contact deletion removes attributes and responses
|
||||
|
||||
## Common Query Patterns
|
||||
|
||||
### Survey with Responses
|
||||
|
||||
```typescript
|
||||
// Include response count and latest responses
|
||||
const survey = await prisma.survey.findUnique({
|
||||
where: { id: surveyId },
|
||||
include: {
|
||||
responses: {
|
||||
take: 10,
|
||||
orderBy: { createdAt: "desc" },
|
||||
},
|
||||
_count: {
|
||||
select: { responses: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Environment Scoping
|
||||
|
||||
```typescript
|
||||
// Always scope by environment
|
||||
const surveys = await prisma.survey.findMany({
|
||||
where: {
|
||||
environmentId: environmentId,
|
||||
// Additional filters...
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Contact with Attributes
|
||||
|
||||
```typescript
|
||||
const contact = await prisma.contact.findUnique({
|
||||
where: { id: contactId },
|
||||
include: {
|
||||
attributes: {
|
||||
include: {
|
||||
attributeKey: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
This schema supports Formbricks' core functionality: multi-tenant survey management, user targeting, response collection, and analysis, all while maintaining strict data isolation and security.
|
||||
28
.cursor/rules/documentations.mdc
Normal file
28
.cursor/rules/documentations.mdc
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
description: Guideline for writing end-user facing documentation in the apps/docs folder
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
Follow these instructions and guidelines when asked to write documentation in the apps/docs folder
|
||||
|
||||
Follow this structure to write the title, describtion and pick a matching icon and insert it at the top of the MDX file:
|
||||
|
||||
---
|
||||
|
||||
title: "FEATURE NAME"
|
||||
description: "1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT."
|
||||
icon: "link"
|
||||
|
||||
---
|
||||
|
||||
- Description: 1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT.
|
||||
- Make ample use of the Mintlify components you can find here https://mintlify.com/docs/llms.txt - e.g. if docs describe consecutive steps, always use Mintlify Step component.
|
||||
- In all Headlines, only capitalize the current feature and nothing else, to Camel Case.
|
||||
- The page should never start with H1 headline, because it's already part of the template.
|
||||
- Tonality: Keep it concise and to the point. Avoid Jargon where possible.
|
||||
- If a feature is part of the Enterprise Edition, use this note:
|
||||
|
||||
<Note>
|
||||
FEATURE NAME is part of the [Enterprise Edition](/self-hosting/advanced/license)
|
||||
</Note>
|
||||
152
.cursor/rules/eks-alb-optimization.mdc
Normal file
152
.cursor/rules/eks-alb-optimization.mdc
Normal file
@@ -0,0 +1,152 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# EKS & ALB Optimization Guide for Error Reduction
|
||||
|
||||
## Infrastructure Overview
|
||||
|
||||
This project uses AWS EKS with Application Load Balancer (ALB) for the Formbricks application. The infrastructure has been optimized to minimize ELB 502/504 errors through careful configuration of connection handling, health checks, and pod lifecycle management.
|
||||
|
||||
## Key Infrastructure Files
|
||||
|
||||
### Terraform Configuration
|
||||
- **Main Infrastructure**: [infra/terraform/main.tf](mdc:infra/terraform/main.tf) - EKS cluster, VPC, Karpenter, and core AWS resources
|
||||
- **Monitoring**: [infra/terraform/cloudwatch.tf](mdc:infra/terraform/cloudwatch.tf) - CloudWatch alarms for 502/504 error tracking and alerting
|
||||
- **Database**: [infra/terraform/rds.tf](mdc:infra/terraform/rds.tf) - Aurora PostgreSQL configuration
|
||||
|
||||
### Helm Configuration
|
||||
- **Production**: [infra/formbricks-cloud-helm/values.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/values.yaml.gotmpl) - Optimized ALB and pod configurations
|
||||
- **Staging**: [infra/formbricks-cloud-helm/values-staging.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/values-staging.yaml.gotmpl) - Staging environment with spot instances
|
||||
- **Deployment**: [infra/formbricks-cloud-helm/helmfile.yaml.gotmpl](mdc:infra/formbricks-cloud-helm/helmfile.yaml.gotmpl) - Multi-environment Helm releases
|
||||
|
||||
## ALB Optimization Patterns
|
||||
|
||||
### Connection Handling Optimizations
|
||||
```yaml
|
||||
# Key ALB annotations for reducing 502/504 errors
|
||||
alb.ingress.kubernetes.io/load-balancer-attributes: |
|
||||
idle_timeout.timeout_seconds=120,
|
||||
connection_logs.s3.enabled=false,
|
||||
access_logs.s3.enabled=false
|
||||
|
||||
alb.ingress.kubernetes.io/target-group-attributes: |
|
||||
deregistration_delay.timeout_seconds=30,
|
||||
stickiness.enabled=false,
|
||||
load_balancing.algorithm.type=least_outstanding_requests,
|
||||
target_group_health.dns_failover.minimum_healthy_targets.count=1
|
||||
```
|
||||
|
||||
### Health Check Configuration
|
||||
- **Interval**: 15 seconds for faster detection of unhealthy targets
|
||||
- **Timeout**: 5 seconds to prevent false positives
|
||||
- **Thresholds**: 2 healthy, 3 unhealthy for balanced responsiveness
|
||||
- **Path**: `/health` endpoint optimized for < 100ms response time
|
||||
|
||||
## Pod Lifecycle Management
|
||||
|
||||
### Graceful Shutdown Pattern
|
||||
```yaml
|
||||
# PreStop hook to allow connection draining
|
||||
lifecycle:
|
||||
preStop:
|
||||
exec:
|
||||
command: ["/bin/sh", "-c", "sleep 15"]
|
||||
|
||||
# Termination grace period for complete cleanup
|
||||
terminationGracePeriodSeconds: 45
|
||||
```
|
||||
|
||||
### Health Probe Strategy
|
||||
- **Startup Probe**: 5s initial delay, 5s interval, max 60s startup time
|
||||
- **Readiness Probe**: 10s delay, 10s interval for traffic readiness
|
||||
- **Liveness Probe**: 30s delay, 30s interval for container health
|
||||
|
||||
### Rolling Update Configuration
|
||||
```yaml
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxUnavailable: 25% # Maintain capacity during updates
|
||||
maxSurge: 50% # Allow faster rollouts
|
||||
```
|
||||
|
||||
## Karpenter Node Management
|
||||
|
||||
### Node Lifecycle Optimization
|
||||
- **Startup Taints**: Prevent traffic during node initialization
|
||||
- **Graceful Shutdown**: 30s grace period for pod eviction
|
||||
- **Consolidation Delay**: 60s to reduce unnecessary churn
|
||||
- **Eviction Policies**: Configured for smooth pod migrations
|
||||
|
||||
### Instance Selection
|
||||
- **Families**: c8g, c7g, m8g, m7g, r8g, r7g (ARM64 Graviton)
|
||||
- **Sizes**: 2, 4, 8 vCPUs for cost optimization
|
||||
- **Bottlerocket AMI**: Enhanced security and performance
|
||||
|
||||
## Monitoring & Alerting
|
||||
|
||||
### Critical ALB Metrics
|
||||
1. **ELB 502 Errors**: Threshold 20 over 5 minutes
|
||||
2. **ELB 504 Errors**: Threshold 15 over 5 minutes
|
||||
3. **Target Connection Errors**: Threshold 50 over 5 minutes
|
||||
4. **4XX Errors**: Threshold 100 over 10 minutes (client issues)
|
||||
|
||||
### Expected Improvements
|
||||
- **60-80% reduction** in ELB 502 errors
|
||||
- **Faster recovery** during pod restarts
|
||||
- **Better connection reuse** efficiency
|
||||
- **Improved autoscaling** responsiveness
|
||||
|
||||
## Deployment Patterns
|
||||
|
||||
### Infrastructure Updates
|
||||
1. **Terraform First**: Apply infrastructure changes via [infra/deploy-improvements.sh](mdc:infra/deploy-improvements.sh)
|
||||
2. **Helm Second**: Deploy application configurations
|
||||
3. **Verification**: Check pod status, endpoints, and ALB health
|
||||
4. **Monitoring**: Watch CloudWatch metrics for 24-48 hours
|
||||
|
||||
### Environment-Specific Configurations
|
||||
- **Production**: On-demand instances, stricter resource limits
|
||||
- **Staging**: Spot instances, rate limiting disabled, relaxed resources
|
||||
|
||||
## Troubleshooting Patterns
|
||||
|
||||
### 502 Error Investigation
|
||||
1. Check pod readiness and health probe status
|
||||
2. Verify ALB target group health
|
||||
3. Review deregistration timing during deployments
|
||||
4. Monitor connection pool utilization
|
||||
|
||||
### 504 Error Analysis
|
||||
1. Check application response times
|
||||
2. Verify timeout configurations (ALB: 120s, App: aligned)
|
||||
3. Review database query performance
|
||||
4. Monitor resource utilization during traffic spikes
|
||||
|
||||
### Connection Error Patterns
|
||||
1. Verify Karpenter node lifecycle timing
|
||||
2. Check pod termination grace periods
|
||||
3. Review ALB connection draining settings
|
||||
4. Monitor cluster autoscaling events
|
||||
|
||||
## Best Practices
|
||||
|
||||
### When Making Changes
|
||||
- **Test in staging first** with same configurations
|
||||
- **Monitor metrics** for 24-48 hours after changes
|
||||
- **Use gradual rollouts** with proper health checks
|
||||
- **Maintain ALB timeout alignment** across all layers
|
||||
|
||||
### Performance Optimization
|
||||
- **Health endpoint** should respond < 100ms consistently
|
||||
- **Connection pooling** aligned with ALB idle timeouts
|
||||
- **Resource requests/limits** tuned for consistent performance
|
||||
- **Graceful shutdown** implemented in application code
|
||||
|
||||
### Monitoring Strategy
|
||||
- **Real-time alerts** for error rate spikes
|
||||
- **Trend analysis** for connection patterns
|
||||
- **Capacity planning** based on LCU usage
|
||||
- **4XX pattern analysis** for client behavior insights
|
||||
332
.cursor/rules/formbricks-architecture.mdc
Normal file
332
.cursor/rules/formbricks-architecture.mdc
Normal file
@@ -0,0 +1,332 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Formbricks Architecture & Patterns
|
||||
|
||||
## Monorepo Structure
|
||||
|
||||
### Apps Directory
|
||||
- `apps/web/` - Main Next.js web application
|
||||
- `packages/` - Shared packages and utilities
|
||||
|
||||
### Key Directories in Web App
|
||||
```
|
||||
apps/web/
|
||||
├── app/ # Next.js 13+ app directory
|
||||
│ ├── (app)/ # Main application routes
|
||||
│ ├── (auth)/ # Authentication routes
|
||||
│ ├── api/ # API routes
|
||||
├── components/ # Shared components
|
||||
├── lib/ # Utility functions and services
|
||||
└── modules/ # Feature-specific modules
|
||||
```
|
||||
|
||||
## Routing Patterns
|
||||
|
||||
### App Router Structure
|
||||
The application uses Next.js 13+ app router with route groups:
|
||||
|
||||
```
|
||||
(app)/environments/[environmentId]/
|
||||
├── surveys/[surveyId]/
|
||||
│ ├── (analysis)/ # Analysis views
|
||||
│ │ ├── responses/ # Response management
|
||||
│ │ ├── summary/ # Survey summary
|
||||
│ │ └── hooks/ # Analysis-specific hooks
|
||||
│ ├── edit/ # Survey editing
|
||||
│ └── settings/ # Survey settings
|
||||
```
|
||||
|
||||
### Dynamic Routes
|
||||
- `[environmentId]` - Environment-specific routes
|
||||
- `[surveyId]` - Survey-specific routes
|
||||
|
||||
## Service Layer Pattern
|
||||
|
||||
### Service Organization
|
||||
Services are organized by domain in `apps/web/lib/`:
|
||||
|
||||
```typescript
|
||||
// Example: Response service
|
||||
// apps/web/lib/response/service.ts
|
||||
export const getResponseCountAction = async ({
|
||||
surveyId,
|
||||
filterCriteria,
|
||||
}: {
|
||||
surveyId: string;
|
||||
filterCriteria: any;
|
||||
}) => {
|
||||
// Service implementation
|
||||
};
|
||||
```
|
||||
|
||||
### Action Pattern
|
||||
Server actions follow a consistent pattern:
|
||||
|
||||
```typescript
|
||||
// Action wrapper for service calls
|
||||
export const getResponseCountAction = async (params) => {
|
||||
try {
|
||||
const result = await responseService.getCount(params);
|
||||
return { data: result };
|
||||
} catch (error) {
|
||||
return { error: error.message };
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Context Patterns
|
||||
|
||||
### Provider Structure
|
||||
Context providers follow a consistent pattern:
|
||||
|
||||
```typescript
|
||||
// Provider component
|
||||
export const ResponseFilterProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const [selectedFilter, setSelectedFilter] = useState(defaultFilter);
|
||||
|
||||
const value = {
|
||||
selectedFilter,
|
||||
setSelectedFilter,
|
||||
// ... other state and methods
|
||||
};
|
||||
|
||||
return (
|
||||
<ResponseFilterContext.Provider value={value}>
|
||||
{children}
|
||||
</ResponseFilterContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
// Hook for consuming context
|
||||
export const useResponseFilter = () => {
|
||||
const context = useContext(ResponseFilterContext);
|
||||
if (!context) {
|
||||
throw new Error('useResponseFilter must be used within ResponseFilterProvider');
|
||||
}
|
||||
return context;
|
||||
};
|
||||
```
|
||||
|
||||
### Context Composition
|
||||
Multiple contexts are often composed together:
|
||||
|
||||
```typescript
|
||||
// Layout component with multiple providers
|
||||
export default function AnalysisLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<ResponseFilterProvider>
|
||||
<ResponseCountProvider>
|
||||
{children}
|
||||
</ResponseCountProvider>
|
||||
</ResponseFilterProvider>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Component Patterns
|
||||
|
||||
### Page Components
|
||||
Page components are located in the app directory and follow this pattern:
|
||||
|
||||
```typescript
|
||||
// apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/responses/page.tsx
|
||||
export default function ResponsesPage() {
|
||||
return (
|
||||
<div>
|
||||
<ResponsesTable />
|
||||
<ResponsesPagination />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Component Organization
|
||||
- **Pages** - Route components in app directory
|
||||
- **Components** - Reusable UI components
|
||||
- **Modules** - Feature-specific components and logic
|
||||
|
||||
### Shared Components
|
||||
Common components are in `apps/web/components/`:
|
||||
- UI components (buttons, inputs, modals)
|
||||
- Layout components (headers, sidebars)
|
||||
- Data display components (tables, charts)
|
||||
|
||||
## Hook Patterns
|
||||
|
||||
### Custom Hook Structure
|
||||
Custom hooks follow consistent patterns:
|
||||
|
||||
```typescript
|
||||
export const useResponseCount = ({
|
||||
survey,
|
||||
initialCount
|
||||
}: {
|
||||
survey: TSurvey;
|
||||
initialCount?: number;
|
||||
}) => {
|
||||
const [responseCount, setResponseCount] = useState(initialCount ?? 0);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
// Hook logic...
|
||||
|
||||
return {
|
||||
responseCount,
|
||||
isLoading,
|
||||
refetch,
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
### Hook Dependencies
|
||||
- Use context hooks for shared state
|
||||
- Implement proper cleanup with AbortController
|
||||
- Optimize dependency arrays to prevent unnecessary re-renders
|
||||
|
||||
## Data Fetching Patterns
|
||||
|
||||
### Server Actions
|
||||
The app uses Next.js server actions for data fetching:
|
||||
|
||||
```typescript
|
||||
// Server action
|
||||
export async function getResponsesAction(params: GetResponsesParams) {
|
||||
const responses = await getResponses(params);
|
||||
return { data: responses };
|
||||
}
|
||||
|
||||
// Client usage
|
||||
const { data } = await getResponsesAction(params);
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
Consistent error handling across the application:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
const result = await apiCall();
|
||||
return { data: result };
|
||||
} catch (error) {
|
||||
console.error("Operation failed:", error);
|
||||
return { error: error.message };
|
||||
}
|
||||
```
|
||||
|
||||
## Type Safety
|
||||
|
||||
### Type Organization
|
||||
Types are organized in packages:
|
||||
- `@formbricks/types` - Shared type definitions
|
||||
- Local types in component/hook files
|
||||
|
||||
### Common Types
|
||||
```typescript
|
||||
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||
import { TResponse } from "@formbricks/types/responses";
|
||||
import { TEnvironment } from "@formbricks/types/environment";
|
||||
```
|
||||
|
||||
## State Management
|
||||
|
||||
### Local State
|
||||
- Use `useState` for component-specific state
|
||||
- Use `useReducer` for complex state logic
|
||||
- Use refs for mutable values that don't trigger re-renders
|
||||
|
||||
### Global State
|
||||
- React Context for feature-specific shared state
|
||||
- URL state for filters and pagination
|
||||
- Server state through server actions
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Code Splitting
|
||||
- Dynamic imports for heavy components
|
||||
- Route-based code splitting with app router
|
||||
- Lazy loading for non-critical features
|
||||
|
||||
### Caching Strategy
|
||||
- Server-side caching for database queries
|
||||
- Client-side caching with React Query (where applicable)
|
||||
- Static generation for public pages
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Test Organization
|
||||
```
|
||||
component/
|
||||
├── Component.tsx
|
||||
├── Component.test.tsx
|
||||
└── hooks/
|
||||
├── useHook.ts
|
||||
└── useHook.test.tsx
|
||||
```
|
||||
|
||||
### Test Patterns
|
||||
- Unit tests for utilities and services
|
||||
- Integration tests for components with context
|
||||
- Hook tests with proper mocking
|
||||
|
||||
## Build & Deployment
|
||||
|
||||
### Build Process
|
||||
- TypeScript compilation
|
||||
- Next.js build optimization
|
||||
- Asset optimization and bundling
|
||||
|
||||
### Environment Configuration
|
||||
- Environment-specific configurations
|
||||
- Feature flags for gradual rollouts
|
||||
- Database connection management
|
||||
|
||||
## Security Patterns
|
||||
|
||||
### Authentication
|
||||
- Session-based authentication
|
||||
- Environment-based access control
|
||||
- API route protection
|
||||
|
||||
### Data Validation
|
||||
- Input validation on both client and server
|
||||
- Type-safe API contracts
|
||||
- Sanitization of user inputs
|
||||
|
||||
## Monitoring & Observability
|
||||
|
||||
### Error Tracking
|
||||
- Client-side error boundaries
|
||||
- Server-side error logging
|
||||
- Performance monitoring
|
||||
|
||||
### Analytics
|
||||
- User interaction tracking
|
||||
- Performance metrics
|
||||
- Database query monitoring
|
||||
|
||||
## Best Practices Summary
|
||||
|
||||
### Code Organization
|
||||
- ✅ Follow the established directory structure
|
||||
- ✅ Use consistent naming conventions
|
||||
- ✅ Separate concerns (UI, logic, data)
|
||||
- ✅ Keep components focused and small
|
||||
|
||||
### Performance
|
||||
- ✅ Implement proper loading states
|
||||
- ✅ Use AbortController for async operations
|
||||
- ✅ Optimize database queries
|
||||
- ✅ Implement proper caching strategies
|
||||
|
||||
### Type Safety
|
||||
- ✅ Use TypeScript throughout
|
||||
- ✅ Define proper interfaces for props
|
||||
- ✅ Use type guards for runtime validation
|
||||
- ✅ Leverage shared type packages
|
||||
|
||||
### Testing
|
||||
- ✅ Write tests for critical functionality
|
||||
- ✅ Mock external dependencies properly
|
||||
- ✅ Test error scenarios and edge cases
|
||||
- ✅ Maintain good test coverage
|
||||
232
.cursor/rules/github-actions-security.mdc
Normal file
232
.cursor/rules/github-actions-security.mdc
Normal file
@@ -0,0 +1,232 @@
|
||||
---
|
||||
description: Security best practices and guidelines for writing GitHub Actions and workflows
|
||||
globs: .github/workflows/*.yml,.github/workflows/*.yaml,.github/actions/*/action.yml,.github/actions/*/action.yaml
|
||||
---
|
||||
|
||||
# GitHub Actions Security Best Practices
|
||||
|
||||
## Required Security Measures
|
||||
|
||||
### 1. Set Minimum GITHUB_TOKEN Permissions
|
||||
|
||||
Always explicitly set the minimum required permissions for GITHUB_TOKEN:
|
||||
|
||||
```yaml
|
||||
permissions:
|
||||
contents: read
|
||||
# Only add additional permissions if absolutely necessary:
|
||||
# pull-requests: write # for commenting on PRs
|
||||
# issues: write # for creating/updating issues
|
||||
# checks: write # for publishing check results
|
||||
```
|
||||
|
||||
### 2. Add Harden-Runner as First Step
|
||||
|
||||
For **every job** on `ubuntu-latest`, add Harden-Runner as the first step:
|
||||
|
||||
```yaml
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit # or 'block' for stricter security
|
||||
```
|
||||
|
||||
### 3. Pin Actions to Full Commit SHA
|
||||
|
||||
**Always** pin third-party actions to their full commit SHA, not tags:
|
||||
|
||||
```yaml
|
||||
# ❌ BAD - uses mutable tag
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# ✅ GOOD - pinned to immutable commit SHA
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
```
|
||||
|
||||
### 4. Secure Variable Handling
|
||||
|
||||
Prevent command injection by properly quoting variables:
|
||||
|
||||
```yaml
|
||||
# ❌ BAD - potential command injection
|
||||
run: echo "Processing ${{ inputs.user_input }}"
|
||||
|
||||
# ✅ GOOD - properly quoted
|
||||
env:
|
||||
USER_INPUT: ${{ inputs.user_input }}
|
||||
run: echo "Processing ${USER_INPUT}"
|
||||
```
|
||||
|
||||
Use `${VARIABLE}` syntax in shell scripts instead of `$VARIABLE`.
|
||||
|
||||
### 5. Environment Variables for Secrets
|
||||
|
||||
Store sensitive data in environment variables, not inline:
|
||||
|
||||
```yaml
|
||||
# ❌ BAD
|
||||
run: curl -H "Authorization: Bearer ${{ secrets.TOKEN }}" api.example.com
|
||||
|
||||
# ✅ GOOD
|
||||
env:
|
||||
API_TOKEN: ${{ secrets.TOKEN }}
|
||||
run: curl -H "Authorization: Bearer ${API_TOKEN}" api.example.com
|
||||
```
|
||||
|
||||
## Workflow Structure Best Practices
|
||||
|
||||
### Required Workflow Elements
|
||||
|
||||
```yaml
|
||||
name: "Descriptive Workflow Name"
|
||||
|
||||
on:
|
||||
# Define specific triggers
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
# Always set explicit permissions
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
job-name:
|
||||
name: "Descriptive Job Name"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30 # tune per job; standardize repo-wide
|
||||
|
||||
# Set job-level permissions if different from workflow level
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
# Always start with Harden-Runner on ubuntu-latest
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
# Pin all actions to commit SHA
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
```
|
||||
|
||||
### Input Validation for Actions
|
||||
|
||||
For composite actions, always validate inputs:
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
user_input:
|
||||
description: "User provided input"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Validate input
|
||||
shell: bash
|
||||
run: |
|
||||
# Harden shell and validate input format/content before use
|
||||
set -euo pipefail
|
||||
|
||||
USER_INPUT="${{ inputs.user_input }}"
|
||||
|
||||
if [[ ! "${USER_INPUT}" =~ ^[A-Za-z0-9._-]+$ ]]; then
|
||||
echo "❌ Invalid input format"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
## Docker Security in Actions
|
||||
|
||||
### Pin Docker Images to Digests
|
||||
|
||||
```yaml
|
||||
# ❌ BAD - mutable tag
|
||||
container: node:18
|
||||
|
||||
# ✅ GOOD - pinned to digest
|
||||
container: node:18@sha256:a1ba21bf0c92931d02a8416f0a54daad66cb36a85d6a37b82dfe1604c4c09cad
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Secure File Operations
|
||||
|
||||
```yaml
|
||||
- name: Process files securely
|
||||
shell: bash
|
||||
env:
|
||||
FILE_PATH: ${{ inputs.file_path }}
|
||||
run: |
|
||||
set -euo pipefail # Fail on errors, undefined vars, pipe failures
|
||||
|
||||
# Use absolute paths and validate
|
||||
SAFE_PATH=$(realpath "${FILE_PATH}")
|
||||
if [[ "$SAFE_PATH" != "${GITHUB_WORKSPACE}"/* ]]; then
|
||||
echo "❌ Path outside workspace"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
### Artifact Handling
|
||||
|
||||
```yaml
|
||||
- name: Upload artifacts securely
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
with:
|
||||
name: build-artifacts
|
||||
path: |
|
||||
dist/
|
||||
!dist/**/*.log # Exclude sensitive files
|
||||
retention-days: 30
|
||||
```
|
||||
|
||||
### GHCR authentication for pulls/scans
|
||||
|
||||
```yaml
|
||||
# Minimal permissions required for GHCR pulls/scans
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
|
||||
steps:
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
## Security Checklist
|
||||
|
||||
- [ ] Minimum GITHUB_TOKEN permissions set
|
||||
- [ ] Harden-Runner added to all ubuntu-latest jobs
|
||||
- [ ] All third-party actions pinned to commit SHA
|
||||
- [ ] Input validation implemented for custom actions
|
||||
- [ ] Variables properly quoted in shell scripts
|
||||
- [ ] Secrets stored in environment variables
|
||||
- [ ] Docker images pinned to digests (if used)
|
||||
- [ ] Error handling with `set -euo pipefail`
|
||||
- [ ] File paths validated and sanitized
|
||||
- [ ] No sensitive data in logs or outputs
|
||||
- [ ] GHCR login performed before pulls/scans (packages: read)
|
||||
- [ ] Job timeouts configured (`timeout-minutes`)
|
||||
|
||||
## Recommended Additional Workflows
|
||||
|
||||
Consider adding these security-focused workflows to your repository:
|
||||
|
||||
1. **CodeQL Analysis** - Static Application Security Testing (SAST)
|
||||
2. **Dependency Review** - Scan for vulnerable dependencies in PRs
|
||||
3. **Dependabot Configuration** - Automated dependency updates
|
||||
|
||||
## Resources
|
||||
|
||||
- [GitHub Security Hardening Guide](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions)
|
||||
- [Step Security Harden-Runner](https://github.com/step-security/harden-runner)
|
||||
- [Secure-Repo Best Practices](https://github.com/step-security/secure-repo)
|
||||
5
.cursor/rules/performance-optimization.mdc
Normal file
5
.cursor/rules/performance-optimization.mdc
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
52
.cursor/rules/react-context-patterns.mdc
Normal file
52
.cursor/rules/react-context-patterns.mdc
Normal file
@@ -0,0 +1,52 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# React Context & Provider Patterns
|
||||
|
||||
## Context Provider Best Practices
|
||||
|
||||
### Provider Implementation
|
||||
- Use TypeScript interfaces for provider props with optional `initialCount` for testing
|
||||
- Implement proper cleanup in `useEffect` to avoid React hooks warnings
|
||||
- Reference: [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx)
|
||||
|
||||
### Cleanup Pattern for Refs
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
const currentPendingRequests = pendingRequests.current;
|
||||
const currentAbortController = abortController.current;
|
||||
|
||||
return () => {
|
||||
if (currentAbortController) {
|
||||
currentAbortController.abort();
|
||||
}
|
||||
currentPendingRequests.clear();
|
||||
};
|
||||
}, []);
|
||||
```
|
||||
|
||||
### Testing Context Providers
|
||||
- Always wrap components using context in the provider during tests
|
||||
- Use `initialCount` prop for predictable test scenarios
|
||||
- Mock context dependencies like `useParams`, `useResponseFilter`
|
||||
- Example from [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx):
|
||||
|
||||
```typescript
|
||||
render(
|
||||
<ResponseCountProvider survey={dummySurvey} initialCount={5}>
|
||||
<ComponentUnderTest />
|
||||
</ResponseCountProvider>
|
||||
);
|
||||
```
|
||||
|
||||
### Required Mocks for Context Testing
|
||||
- Mock `next/navigation` with `useParams` returning environment and survey IDs
|
||||
- Mock response filter context and actions
|
||||
- Mock API actions that the provider depends on
|
||||
|
||||
### Context Hook Usage
|
||||
- Create custom hooks like `useResponseCountContext()` for consuming context
|
||||
- Provide meaningful error messages when context is used outside provider
|
||||
- Use context for shared state that multiple components need to access
|
||||
5
.cursor/rules/react-context-providers.mdc
Normal file
5
.cursor/rules/react-context-providers.mdc
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
216
.cursor/rules/storybook-component-migration.mdc
Normal file
216
.cursor/rules/storybook-component-migration.mdc
Normal file
@@ -0,0 +1,216 @@
|
||||
---
|
||||
description: Migrate deprecated UI components to a unified component
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Component Migration Automation Rule
|
||||
|
||||
## Overview
|
||||
This rule automates the migration of deprecated components to new component systems in React/TypeScript codebases.
|
||||
|
||||
## Trigger
|
||||
When the user requests component migration (e.g., "migrate [DeprecatedComponent] to [NewComponent]" or "component migration").
|
||||
|
||||
## Process
|
||||
|
||||
### Step 1: Discovery and Planning
|
||||
1. **Identify migration parameters:**
|
||||
- Ask user for deprecated component name (e.g., "Modal")
|
||||
- Ask user for new component name(s) (e.g., "Dialog")
|
||||
- Ask for any components to exclude (e.g., "ModalWithTabs")
|
||||
- Ask for specific import paths if needed
|
||||
|
||||
2. **Scan codebase** for deprecated components:
|
||||
- Search for `import.*[DeprecatedComponent]` patterns
|
||||
- Exclude specified components that should not be migrated
|
||||
- List all found components with file paths
|
||||
- Present numbered list to user for confirmation
|
||||
|
||||
### Step 2: Component-by-Component Migration
|
||||
For each component, follow this exact sequence:
|
||||
|
||||
#### 2.1 Component Migration
|
||||
- **Import changes:**
|
||||
- Ask user to provide the new import structure
|
||||
- Example transformation pattern:
|
||||
```typescript
|
||||
// FROM:
|
||||
import { [DeprecatedComponent] } from "@/components/ui/[DeprecatedComponent]"
|
||||
|
||||
// TO:
|
||||
import {
|
||||
[NewComponent],
|
||||
[NewComponentPart1],
|
||||
[NewComponentPart2],
|
||||
// ... other parts
|
||||
} from "@/components/ui/[NewComponent]"
|
||||
```
|
||||
|
||||
- **Props transformation:**
|
||||
- Ask user for prop mapping rules (e.g., `open` → `open`, `setOpen` → `onOpenChange`)
|
||||
- Ask for props to remove (e.g., `noPadding`, `closeOnOutsideClick`, `size`)
|
||||
- Apply transformations based on user specifications
|
||||
|
||||
- **Structure transformation:**
|
||||
- Ask user for the new component structure pattern
|
||||
- Apply the transformation maintaining all functionality
|
||||
- Preserve all existing logic, state management, and event handlers
|
||||
|
||||
#### 2.2 Wait for User Approval
|
||||
- Present the migration changes
|
||||
- Wait for explicit user approval before proceeding
|
||||
- If rejected, ask for specific feedback and iterate
|
||||
#### 2.3 Re-read and Apply Additional Changes
|
||||
- Re-read the component file to capture any user modifications
|
||||
- Apply any additional improvements the user made
|
||||
- Ensure all changes are incorporated
|
||||
|
||||
#### 2.4 Test File Updates
|
||||
- **Find corresponding test file** (same name with `.test.tsx` or `.test.ts`)
|
||||
- **Update test mocks:**
|
||||
- Ask user for new component mock structure
|
||||
- Replace old component mocks with new ones
|
||||
- Example pattern:
|
||||
```typescript
|
||||
// Add to test setup:
|
||||
jest.mock("@/components/ui/[NewComponent]", () => ({
|
||||
[NewComponent]: ({ children, [props] }: any) => ([mock implementation]),
|
||||
[NewComponentPart1]: ({ children }: any) => <div data-testid="[new-component-part1]">{children}</div>,
|
||||
[NewComponentPart2]: ({ children }: any) => <div data-testid="[new-component-part2]">{children}</div>,
|
||||
// ... other parts
|
||||
}));
|
||||
```
|
||||
- **Update test expectations:**
|
||||
- Change test IDs from old component to new component
|
||||
- Update any component-specific assertions
|
||||
- Ensure all new component parts used in the component are mocked
|
||||
|
||||
#### 2.5 Run Tests and Optimize
|
||||
- Execute `Node package manager test -- ComponentName.test.tsx`
|
||||
- Fix any failing tests
|
||||
- Optimize code quality (imports, formatting, etc.)
|
||||
- Re-run tests until all pass
|
||||
- **Maximum 3 iterations** - if still failing, ask user for guidance
|
||||
|
||||
#### 2.6 Wait for Final Approval
|
||||
- Present test results and any optimizations made
|
||||
- Wait for user approval of the complete migration
|
||||
- If rejected, iterate based on feedback
|
||||
|
||||
#### 2.7 Git Commit
|
||||
- Run: `git add .`
|
||||
- Run: `git commit -m "migrate [ComponentName] from [DeprecatedComponent] to [NewComponent]"`
|
||||
- Confirm commit was successful
|
||||
|
||||
### Step 3: Final Report Generation
|
||||
After all components are migrated, generate a comprehensive GitHub PR report:
|
||||
|
||||
#### PR Title
|
||||
```
|
||||
feat: migrate [DeprecatedComponent] components to [NewComponent] system
|
||||
```
|
||||
|
||||
#### PR Description Template
|
||||
```markdown
|
||||
## 🔄 [DeprecatedComponent] to [NewComponent] Migration
|
||||
|
||||
### Overview
|
||||
Migrated [X] [DeprecatedComponent] components to the new [NewComponent] component system to modernize the UI architecture and improve consistency.
|
||||
|
||||
### Components Migrated
|
||||
[List each component with file path]
|
||||
|
||||
### Technical Changes
|
||||
- **Imports:** Replaced `[DeprecatedComponent]` with `[NewComponent], [NewComponentParts...]`
|
||||
- **Props:** [List prop transformations]
|
||||
- **Structure:** Implemented proper [NewComponent] component hierarchy
|
||||
- **Styling:** [Describe styling changes]
|
||||
- **Tests:** Updated all test mocks and expectations
|
||||
|
||||
### Migration Pattern
|
||||
```typescript
|
||||
// Before
|
||||
<[DeprecatedComponent] [oldProps]>
|
||||
[oldStructure]
|
||||
</[DeprecatedComponent]>
|
||||
|
||||
// After
|
||||
<[NewComponent] [newProps]>
|
||||
[newStructure]
|
||||
</[NewComponent]>
|
||||
```
|
||||
|
||||
### Testing
|
||||
- ✅ All existing tests updated and passing
|
||||
- ✅ Component functionality preserved
|
||||
- ✅ UI/UX behavior maintained
|
||||
|
||||
### How to Test This PR
|
||||
1. **Functional Testing:**
|
||||
- Navigate to each migrated component's usage
|
||||
- Verify [component] opens and closes correctly
|
||||
- Test all interactive elements within [components]
|
||||
- Confirm styling and layout are preserved
|
||||
|
||||
2. **Automated Testing:**
|
||||
```bash
|
||||
Node package manager test
|
||||
```
|
||||
|
||||
3. **Visual Testing:**
|
||||
- Check that all [components] maintain proper styling
|
||||
- Verify responsive behavior
|
||||
- Test keyboard navigation and accessibility
|
||||
|
||||
### Breaking Changes
|
||||
[List any breaking changes or state "None - this is a drop-in replacement maintaining all existing functionality."]
|
||||
|
||||
### Notes
|
||||
- [Any excluded components] were preserved as they already use [NewComponent] internally
|
||||
- All form validation and complex state management preserved
|
||||
- Enhanced code quality with better imports and formatting
|
||||
```
|
||||
|
||||
## Special Considerations
|
||||
|
||||
### Excluded Components
|
||||
- **DO NOT MIGRATE** components specified by user as exclusions
|
||||
- They may already use the new component internally or have other reasons
|
||||
- Inform user these are skipped and why
|
||||
|
||||
### Complex Components
|
||||
- Preserve all existing functionality (forms, validation, state management)
|
||||
- Maintain prop interfaces
|
||||
- Keep all event handlers and callbacks
|
||||
- Preserve accessibility features
|
||||
|
||||
### Test Coverage
|
||||
- Ensure all new component parts are mocked when used
|
||||
- Mock all new component parts that appear in the component
|
||||
- Update test IDs from old component to new component
|
||||
- Maintain all existing test scenarios
|
||||
|
||||
### Error Handling
|
||||
- If tests fail after 3 iterations, stop and ask user for guidance
|
||||
- If component is too complex, ask user for specific guidance
|
||||
- If unsure about functionality preservation, ask for clarification
|
||||
|
||||
### Migration Patterns
|
||||
- Always ask user for specific migration patterns before starting
|
||||
- Confirm import structures, prop mappings, and component hierarchies
|
||||
- Adapt to different component architectures (simple replacements, complex restructuring, etc.)
|
||||
|
||||
## Success Criteria
|
||||
- All deprecated components successfully migrated to new components
|
||||
- All tests passing
|
||||
- No functionality lost
|
||||
- Code quality maintained or improved
|
||||
- User approval on each component
|
||||
- Successful git commits for each migration
|
||||
- Comprehensive PR report generated
|
||||
|
||||
## Usage Examples
|
||||
- "migrate Modal to Dialog"
|
||||
- "migrate Button to NewButton"
|
||||
- "migrate Card to ModernCard"
|
||||
- "component migration" (will prompt for details)
|
||||
177
.cursor/rules/storybook-create-new-story.mdc
Normal file
177
.cursor/rules/storybook-create-new-story.mdc
Normal file
@@ -0,0 +1,177 @@
|
||||
---
|
||||
description: Create a story in Storybook for a given component
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Formbricks Storybook Stories
|
||||
|
||||
## When generating Storybook stories for Formbricks components:
|
||||
|
||||
### 1. **File Structure**
|
||||
- Create `stories.tsx` (not `.stories.tsx`) in component directory
|
||||
- Use exact import: `import { Meta, StoryObj } from "@storybook/react-vite";`
|
||||
- Import component from `"./index"`
|
||||
|
||||
### 2. **Story Structure Template**
|
||||
```tsx
|
||||
import { Meta, StoryObj } from "@storybook/react-vite";
|
||||
import { ComponentName } from "./index";
|
||||
|
||||
// For complex components with configurable options
|
||||
// consider this as an example the options need to reflect the props types
|
||||
interface StoryOptions {
|
||||
showIcon: boolean;
|
||||
numberOfElements: number;
|
||||
customLabels: string[];
|
||||
}
|
||||
|
||||
type StoryProps = React.ComponentProps<typeof ComponentName> & StoryOptions;
|
||||
|
||||
const meta: Meta<StoryProps> = {
|
||||
title: "UI/ComponentName",
|
||||
component: ComponentName,
|
||||
tags: ["autodocs"],
|
||||
parameters: {
|
||||
layout: "centered",
|
||||
controls: { sort: "alpha", exclude: [] },
|
||||
docs: {
|
||||
description: {
|
||||
component: "The **ComponentName** component provides [description].",
|
||||
},
|
||||
},
|
||||
},
|
||||
argTypes: {
|
||||
// Organize in exactly these categories: Behavior, Appearance, Content
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof ComponentName> & { args: StoryOptions };
|
||||
```
|
||||
|
||||
### 3. **ArgTypes Organization**
|
||||
Organize ALL argTypes into exactly three categories:
|
||||
- **Behavior**: disabled, variant, onChange, etc.
|
||||
- **Appearance**: size, color, layout, styling, etc.
|
||||
- **Content**: text, icons, numberOfElements, etc.
|
||||
|
||||
Format:
|
||||
```tsx
|
||||
argTypes: {
|
||||
propName: {
|
||||
control: "select" | "boolean" | "text" | "number",
|
||||
options: ["option1", "option2"], // for select
|
||||
description: "Clear description",
|
||||
table: {
|
||||
category: "Behavior" | "Appearance" | "Content",
|
||||
type: { summary: "string" },
|
||||
defaultValue: { summary: "default" },
|
||||
},
|
||||
order: 1,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### 4. **Required Stories**
|
||||
Every component must include:
|
||||
- `Default`: Most common use case
|
||||
- `Disabled`: If component supports disabled state
|
||||
- `WithIcon`: If component supports icons
|
||||
- Variant stories for each variant (Primary, Secondary, Error, etc.)
|
||||
- Edge case stories (ManyElements, LongText, CustomStyling)
|
||||
|
||||
### 5. **Story Format**
|
||||
```tsx
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
// Props with realistic values
|
||||
},
|
||||
};
|
||||
|
||||
export const EdgeCase: Story = {
|
||||
args: { /* ... */ },
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
story: "Use this when [specific scenario].",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### 6. **Dynamic Content Pattern**
|
||||
For components with dynamic content, create render function:
|
||||
```tsx
|
||||
const renderComponent = (args: StoryProps) => {
|
||||
const { numberOfElements, showIcon, customLabels } = args;
|
||||
|
||||
// Generate dynamic content
|
||||
const elements = Array.from({ length: numberOfElements }, (_, i) => ({
|
||||
id: `element-${i}`,
|
||||
label: customLabels[i] || `Element ${i + 1}`,
|
||||
icon: showIcon ? <IconComponent /> : undefined,
|
||||
}));
|
||||
|
||||
return <ComponentName {...args} elements={elements} />;
|
||||
};
|
||||
|
||||
export const Dynamic: Story = {
|
||||
render: renderComponent,
|
||||
args: {
|
||||
numberOfElements: 3,
|
||||
showIcon: true,
|
||||
customLabels: ["First", "Second", "Third"],
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### 7. **State Management**
|
||||
For interactive components:
|
||||
```tsx
|
||||
import { useState } from "react";
|
||||
|
||||
const ComponentWithState = (args: any) => {
|
||||
const [value, setValue] = useState(args.defaultValue);
|
||||
|
||||
return (
|
||||
<ComponentName
|
||||
{...args}
|
||||
value={value}
|
||||
onChange={(newValue) => {
|
||||
setValue(newValue);
|
||||
args.onChange?.(newValue);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const Interactive: Story = {
|
||||
render: ComponentWithState,
|
||||
args: { defaultValue: "initial" },
|
||||
};
|
||||
```
|
||||
|
||||
### 8. **Quality Requirements**
|
||||
- Include component description in parameters.docs
|
||||
- Add story documentation for non-obvious use cases
|
||||
- Test edge cases (overflow, empty states, many elements)
|
||||
- Ensure no TypeScript errors
|
||||
- Use realistic prop values
|
||||
- Include at least 3-5 story variants
|
||||
- Example values need to be in the context of survey application
|
||||
|
||||
### 9. **Naming Conventions**
|
||||
- **Story titles**: "UI/ComponentName"
|
||||
- **Story exports**: PascalCase (Default, WithIcon, ManyElements)
|
||||
- **Categories**: "Behavior", "Appearance", "Content" (exact spelling)
|
||||
- **Props**: camelCase matching component props
|
||||
|
||||
### 10. **Special Cases**
|
||||
- **Generic components**: Remove `component` from meta if type conflicts
|
||||
- **Form components**: Include Invalid, WithValue stories
|
||||
- **Navigation**: Include ManyItems stories
|
||||
- **Modals, Dropdowns and Popups **: Include trigger and content structure
|
||||
|
||||
## Generate stories that are comprehensive, well-documented, and reflect all component states and edge cases.
|
||||
322
.cursor/rules/testing-patterns.mdc
Normal file
322
.cursor/rules/testing-patterns.mdc
Normal file
@@ -0,0 +1,322 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Testing Patterns & Best Practices
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Test Commands
|
||||
From the **root directory** (formbricks/):
|
||||
- `npm test` - Run all tests across all packages (recommended for CI/full testing)
|
||||
- `npm run test:coverage` - Run all tests with coverage reports
|
||||
- `npm run test:e2e` - Run end-to-end tests with Playwright
|
||||
|
||||
From the **apps/web directory** (apps/web/):
|
||||
- `npm run test` - Run only web app tests (fastest for development)
|
||||
- `npm run test:coverage` - Run web app tests with coverage
|
||||
- `npm run test -- <file-pattern>` - Run specific test files
|
||||
|
||||
### Examples
|
||||
```bash
|
||||
# Run all tests from root (takes ~3 minutes, runs 790 test files with 5334+ tests)
|
||||
npm test
|
||||
|
||||
# Run specific test file from apps/web (fastest for development)
|
||||
npm run test -- modules/cache/lib/service.test.ts
|
||||
|
||||
# Run tests matching pattern from apps/web
|
||||
npm run test -- modules/ee/license-check/lib/license.test.ts
|
||||
|
||||
# Run with coverage from root
|
||||
npm run test:coverage
|
||||
|
||||
# Run specific test with watch mode from apps/web (for development)
|
||||
npm run test -- --watch modules/cache/lib/service.test.ts
|
||||
|
||||
# Run tests for a specific directory from apps/web
|
||||
npm run test -- modules/cache/
|
||||
```
|
||||
|
||||
### Performance Tips
|
||||
- **For development**: Use `apps/web` directory commands to run only web app tests
|
||||
- **For CI/validation**: Use root directory commands to run all packages
|
||||
- **For specific features**: Use file patterns to target specific test files
|
||||
- **For debugging**: Use `--watch` mode for continuous testing during development
|
||||
|
||||
### Test File Organization
|
||||
- Place test files in the **same directory** as the source file
|
||||
- Use `.test.ts` for utility/service tests (Node environment)
|
||||
- Use `.test.tsx` for React component tests (jsdom environment)
|
||||
|
||||
## Test File Naming & Environment
|
||||
|
||||
### File Extensions
|
||||
- Use `.test.tsx` for React component/hook tests (runs in jsdom environment)
|
||||
- Use `.test.ts` for utility/service tests (runs in Node environment)
|
||||
- The vitest config uses `environmentMatchGlobs` to automatically set jsdom for `.tsx` files
|
||||
|
||||
### Test Structure
|
||||
```typescript
|
||||
// Import the mocked functions first
|
||||
import { useHook } from "@/path/to/hook";
|
||||
import { serviceFunction } from "@/path/to/service";
|
||||
import { renderHook, waitFor } from "@testing-library/react";
|
||||
import { beforeEach, describe, expect, test, vi } from "vitest";
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock("@/path/to/hook", () => ({
|
||||
useHook: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("ComponentName", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Setup default mocks
|
||||
});
|
||||
|
||||
test("descriptive test name", async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## React Hook Testing
|
||||
|
||||
### Context Mocking
|
||||
When testing hooks that use React Context:
|
||||
```typescript
|
||||
vi.mocked(useResponseFilter).mockReturnValue({
|
||||
selectedFilter: {
|
||||
filter: [],
|
||||
responseStatus: "all",
|
||||
},
|
||||
setSelectedFilter: vi.fn(),
|
||||
selectedOptions: {
|
||||
questionOptions: [],
|
||||
questionFilterOptions: [],
|
||||
},
|
||||
setSelectedOptions: vi.fn(),
|
||||
dateRange: { from: new Date(), to: new Date() },
|
||||
setDateRange: vi.fn(),
|
||||
resetState: vi.fn(),
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Async Hooks
|
||||
- Always use `waitFor` for async operations
|
||||
- Test both loading and completed states
|
||||
- Verify API calls with correct parameters
|
||||
|
||||
```typescript
|
||||
test("fetches data on mount", async () => {
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
expect(result.current.isLoading).toBe(true);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.data).toBe(expectedData);
|
||||
expect(vi.mocked(apiCall)).toHaveBeenCalledWith(expectedParams);
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Hook Dependencies
|
||||
To test useEffect dependencies, ensure mocks return different values:
|
||||
```typescript
|
||||
// First render
|
||||
mockGetFormattedFilters.mockReturnValue(mockFilters);
|
||||
|
||||
// Change dependency and trigger re-render
|
||||
const newMockFilters = { ...mockFilters, finished: true };
|
||||
mockGetFormattedFilters.mockReturnValue(newMockFilters);
|
||||
rerender();
|
||||
```
|
||||
|
||||
## Performance Testing
|
||||
|
||||
### Race Condition Testing
|
||||
Test AbortController implementation:
|
||||
```typescript
|
||||
test("cancels previous request when new request is made", async () => {
|
||||
let resolveFirst: (value: any) => void;
|
||||
let resolveSecond: (value: any) => void;
|
||||
|
||||
const firstPromise = new Promise((resolve) => {
|
||||
resolveFirst = resolve;
|
||||
});
|
||||
const secondPromise = new Promise((resolve) => {
|
||||
resolveSecond = resolve;
|
||||
});
|
||||
|
||||
vi.mocked(apiCall)
|
||||
.mockReturnValueOnce(firstPromise as any)
|
||||
.mockReturnValueOnce(secondPromise as any);
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
// Trigger second request
|
||||
result.current.refetch();
|
||||
|
||||
// Resolve in order - first should be cancelled
|
||||
resolveFirst!({ data: 100 });
|
||||
resolveSecond!({ data: 200 });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
// Should have result from second request
|
||||
expect(result.current.data).toBe(200);
|
||||
});
|
||||
```
|
||||
|
||||
### Cleanup Testing
|
||||
```typescript
|
||||
test("cleans up on unmount", () => {
|
||||
const abortSpy = vi.spyOn(AbortController.prototype, "abort");
|
||||
|
||||
const { unmount } = renderHook(() => useHook());
|
||||
unmount();
|
||||
|
||||
expect(abortSpy).toHaveBeenCalled();
|
||||
abortSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
## Error Handling Testing
|
||||
|
||||
### API Error Testing
|
||||
```typescript
|
||||
test("handles API errors gracefully", async () => {
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
vi.mocked(apiCall).mockRejectedValue(new Error("API Error"));
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith("Error message:", expect.any(Error));
|
||||
expect(result.current.data).toBe(fallbackValue);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
### Cancelled Request Testing
|
||||
```typescript
|
||||
test("does not update state for cancelled requests", async () => {
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
|
||||
let rejectFirst: (error: any) => void;
|
||||
const firstPromise = new Promise((_, reject) => {
|
||||
rejectFirst = reject;
|
||||
});
|
||||
|
||||
vi.mocked(apiCall)
|
||||
.mockReturnValueOnce(firstPromise as any)
|
||||
.mockResolvedValueOnce({ data: 42 });
|
||||
|
||||
const { result } = renderHook(() => useHook());
|
||||
result.current.refetch();
|
||||
|
||||
const abortError = new Error("Request cancelled");
|
||||
rejectFirst!(abortError);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
// Should not log error for cancelled request
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
```
|
||||
|
||||
## Type Safety in Tests
|
||||
|
||||
### Mock Type Assertions
|
||||
Use type assertions for edge cases:
|
||||
```typescript
|
||||
vi.mocked(apiCall).mockResolvedValue({
|
||||
data: null as any, // For testing null handling
|
||||
});
|
||||
|
||||
vi.mocked(apiCall).mockResolvedValue({
|
||||
data: undefined as any, // For testing undefined handling
|
||||
});
|
||||
```
|
||||
|
||||
### Proper Mock Typing
|
||||
Ensure mocks match the actual interface:
|
||||
```typescript
|
||||
const mockSurvey: TSurvey = {
|
||||
id: "survey-123",
|
||||
name: "Test Survey",
|
||||
// ... other required properties
|
||||
} as unknown as TSurvey; // Use when partial mocking is needed
|
||||
```
|
||||
|
||||
## Common Test Patterns
|
||||
|
||||
### Testing State Changes
|
||||
```typescript
|
||||
test("updates state correctly", async () => {
|
||||
const { result } = renderHook(() => useHook());
|
||||
|
||||
// Initial state
|
||||
expect(result.current.value).toBe(initialValue);
|
||||
|
||||
// Trigger change
|
||||
result.current.updateValue(newValue);
|
||||
|
||||
// Verify change
|
||||
expect(result.current.value).toBe(newValue);
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Multiple Scenarios
|
||||
```typescript
|
||||
test("handles different modes", async () => {
|
||||
// Test regular mode
|
||||
vi.mocked(useParams).mockReturnValue({ surveyId: "123" });
|
||||
const { rerender } = renderHook(() => useHook());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(vi.mocked(regularApi)).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
rerender();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(vi.mocked(sharingApi)).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Organization
|
||||
|
||||
### Comprehensive Test Coverage
|
||||
For hooks, ensure you test:
|
||||
- ✅ Initialization (with/without initial values)
|
||||
- ✅ Data fetching (success/error cases)
|
||||
- ✅ State updates and refetching
|
||||
- ✅ Dependency changes triggering effects
|
||||
- ✅ Manual actions (refetch, reset)
|
||||
- ✅ Race condition prevention
|
||||
- ✅ Cleanup on unmount
|
||||
- ✅ Mode switching (if applicable)
|
||||
- ✅ Edge cases (null/undefined data)
|
||||
|
||||
### Test Naming
|
||||
Use descriptive test names that explain the scenario:
|
||||
- ✅ "initializes with initial count"
|
||||
- ✅ "fetches response count on mount for regular survey"
|
||||
- ✅ "cancels previous request when new request is made"
|
||||
- ❌ "test hook"
|
||||
- ❌ "it works"
|
||||
7
.cursor/rules/testing.mdc
Normal file
7
.cursor/rules/testing.mdc
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
description: Whenever the user asks to write or update a test file for .tsx or .ts files.
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
Use the rules in this file when writing tests [copilot-instructions.md](mdc:.github/copilot-instructions.md).
|
||||
After writing the tests, run them and check if there's any issue with the tests and if all of them are passing. Fix the issues and rerun the tests until all pass.
|
||||
@@ -1,16 +0,0 @@
|
||||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||
ARG VARIANT=20
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||
# ARG EXTRA_NODE_VERSION=10
|
||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||
|
||||
# [Optional] Uncomment if you want to install more global node modules
|
||||
# RUN su node -c "npm install -g <your-package-list-here>"
|
||||
|
||||
RUN su node -c "npm install -g pnpm"
|
||||
@@ -1,28 +1,6 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/javascript-node-postgres
|
||||
// Update the VARIANT arg in docker-compose.yml to pick a Node.js version
|
||||
{
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": ["dbaeumer.vscode-eslint"]
|
||||
}
|
||||
},
|
||||
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// This can be used to network with other containers or with the host.
|
||||
"forwardPorts": [3000, 5432, 8025],
|
||||
|
||||
"name": "Node.js & PostgreSQL",
|
||||
"postAttachCommand": "pnpm dev --filter=@formbricks/web... --filter=@formbricks/demo...",
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp .env.example .env && sed -i '/^ENCRYPTION_KEY=/c\\ENCRYPTION_KEY='$(openssl rand -hex 32) .env && sed -i '/^NEXTAUTH_SECRET=/c\\NEXTAUTH_SECRET='$(openssl rand -hex 32) .env && sed -i '/^CRON_SECRET=/c\\CRON_SECRET='$(openssl rand -hex 32) .env && pnpm install && pnpm db:migrate:dev",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspace"
|
||||
"features": {},
|
||||
"image": "mcr.microsoft.com/devcontainers/universal:2",
|
||||
"postAttachCommand": "pnpm go",
|
||||
"postCreateCommand": "cp .env.example .env && sed -i '/^ENCRYPTION_KEY=/c\\ENCRYPTION_KEY='$(openssl rand -hex 32) .env && sed -i '/^NEXTAUTH_SECRET=/c\\NEXTAUTH_SECRET='$(openssl rand -hex 32) .env && sed -i '/^CRON_SECRET=/c\\CRON_SECRET='$(openssl rand -hex 32) .env && pnpm install && pnpm db:migrate:dev"
|
||||
}
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
# Update 'VARIANT' to pick an LTS version of Node.js: 20, 18, 16, 14.
|
||||
# Append -bullseye or -buster to pin to an OS version.
|
||||
# Use -bullseye variants on local arm64/Apple Silicon.
|
||||
VARIANT: "20"
|
||||
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
command: sleep infinity
|
||||
|
||||
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
|
||||
network_mode: service:db
|
||||
# Uncomment the next line to use a non-root user for all processes.
|
||||
# user: node
|
||||
|
||||
# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
|
||||
# (Adding the "ports" property to this file will not forward from a Codespace.)
|
||||
|
||||
db:
|
||||
image: pgvector/pgvector:pg17
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: formbricks
|
||||
# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
|
||||
# (Adding the "ports" property to this file will not forward from a Codespace.)
|
||||
|
||||
mailhog:
|
||||
image: mailhog/mailhog
|
||||
network_mode: service:app
|
||||
logging:
|
||||
driver:
|
||||
"none" # disable saving logs
|
||||
# ports:
|
||||
# - 8025:8025 # web ui
|
||||
# 1025:1025 # smtp server
|
||||
|
||||
volumes:
|
||||
postgres-data: null
|
||||
@@ -1,39 +1,56 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
# **/node_modules
|
||||
**/node_modules
|
||||
.pnp
|
||||
.pnp.js
|
||||
.pnpm-store/
|
||||
|
||||
# testing
|
||||
coverage
|
||||
**/coverage
|
||||
|
||||
# next.js
|
||||
**/.next
|
||||
**/out
|
||||
**/.next/
|
||||
**/out/
|
||||
**/build
|
||||
|
||||
# node
|
||||
**/dist
|
||||
**/dist/
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
**/.DS_Store
|
||||
*.pem
|
||||
Zone.Identifier
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# turbo
|
||||
.turbo
|
||||
# local env files
|
||||
**/.env
|
||||
**/.env.local
|
||||
**/.env.development.local
|
||||
**/.env.test.local
|
||||
**/.env.production.local
|
||||
!packages/database/.env
|
||||
!apps/web/.env
|
||||
|
||||
# nixos stuff
|
||||
# build tools
|
||||
.turbo
|
||||
**/*vite.config.*.timestamp-*
|
||||
|
||||
# environment specific
|
||||
.direnv
|
||||
|
||||
.vscode
|
||||
.github
|
||||
**/.turbo
|
||||
# Playwright
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
|
||||
.env
|
||||
# project specific
|
||||
packages/lib/uploads
|
||||
apps/web/public/js
|
||||
packages/database/migrations
|
||||
branch.json
|
||||
78
.env.example
78
.env.example
@@ -25,6 +25,9 @@ NEXTAUTH_SECRET=
|
||||
# You can use: `openssl rand -hex 32` to generate a secure one
|
||||
CRON_SECRET=
|
||||
|
||||
# Set the minimum log level(debug, info, warn, error, fatal)
|
||||
LOG_LEVEL=info
|
||||
|
||||
##############
|
||||
# DATABASE #
|
||||
##############
|
||||
@@ -39,6 +42,7 @@ DATABASE_URL='postgresql://postgres:postgres@localhost:5432/formbricks?schema=pu
|
||||
# See optional configurations below if you want to disable these features.
|
||||
|
||||
MAIL_FROM=noreply@example.com
|
||||
MAIL_FROM_NAME=Formbricks
|
||||
SMTP_HOST=localhost
|
||||
SMTP_PORT=1025
|
||||
# Enable SMTP_SECURE_ENABLED for TLS (port 465)
|
||||
@@ -46,6 +50,9 @@ SMTP_SECURE_ENABLED=0
|
||||
SMTP_USER=smtpUser
|
||||
SMTP_PASSWORD=smtpPassword
|
||||
|
||||
# If set to 0, the server will not require SMTP_USER and SMTP_PASSWORD(default is 1)
|
||||
# SMTP_AUTHENTICATED=
|
||||
|
||||
# If set to 0, the server will accept connections without requiring authorization from the list of supplied CAs (default is 1).
|
||||
# SMTP_REJECT_UNAUTHORIZED_TLS=0
|
||||
|
||||
@@ -55,9 +62,6 @@ SMTP_PASSWORD=smtpPassword
|
||||
|
||||
# Uncomment the variables you would like to use and customize the values.
|
||||
|
||||
# Custom local storage path for file uploads
|
||||
#UPLOADS_DIR=
|
||||
|
||||
##############
|
||||
# S3 STORAGE #
|
||||
##############
|
||||
@@ -73,6 +77,9 @@ S3_ENDPOINT_URL=
|
||||
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
||||
S3_FORCE_PATH_STYLE=0
|
||||
|
||||
# Set this URL to add a public domain for all your client facing routes(default is WEBAPP_URL)
|
||||
# PUBLIC_URL=https://survey.example.com
|
||||
|
||||
#####################
|
||||
# Disable Features #
|
||||
#####################
|
||||
@@ -83,16 +90,13 @@ EMAIL_VERIFICATION_DISABLED=1
|
||||
# Password Reset. If you enable Password Reset functionality you have to setup SMTP-Settings, too.
|
||||
PASSWORD_RESET_DISABLED=1
|
||||
|
||||
# Signup. Disable the ability for new users to create an account.
|
||||
# Note: This variable is only available to the SaaS setup of Formbricks Cloud. Signup is disable by default for self-hosting.
|
||||
# SIGNUP_DISABLED=1
|
||||
|
||||
# Email login. Disable the ability for users to login with email.
|
||||
# EMAIL_AUTH_DISABLED=1
|
||||
|
||||
# Organization Invite. Disable the ability for invited users to create an account.
|
||||
# INVITE_DISABLED=1
|
||||
|
||||
|
||||
##########
|
||||
# Other #
|
||||
##########
|
||||
@@ -101,6 +105,15 @@ PASSWORD_RESET_DISABLED=1
|
||||
PRIVACY_URL=
|
||||
TERMS_URL=
|
||||
IMPRINT_URL=
|
||||
IMPRINT_ADDRESS=
|
||||
|
||||
# Configure Turnstile in signup flow
|
||||
# TURNSTILE_SITE_KEY=
|
||||
# TURNSTILE_SECRET_KEY=
|
||||
|
||||
# Google reCAPTCHA v3 keys
|
||||
RECAPTCHA_SITE_KEY=
|
||||
RECAPTCHA_SECRET_KEY=
|
||||
|
||||
# Configure Github Login
|
||||
GITHUB_ID=
|
||||
@@ -122,6 +135,9 @@ AZUREAD_TENANT_ID=
|
||||
# OIDC_DISPLAY_NAME=
|
||||
# OIDC_SIGNING_ALGORITHM=
|
||||
|
||||
# Configure SAML SSO
|
||||
# SAML_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/formbricks-saml
|
||||
|
||||
# Configure this when you want to ship JS & CSS files from a complete URL instead of the current domain
|
||||
# ASSET_PREFIX_URL=
|
||||
|
||||
@@ -133,11 +149,6 @@ NOTION_OAUTH_CLIENT_SECRET=
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
||||
# Configure Formbricks usage within Formbricks
|
||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=
|
||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=
|
||||
NEXT_PUBLIC_FORMBRICKS_ONBOARDING_SURVEY_ID=
|
||||
|
||||
# Oauth credentials for Google sheet integration
|
||||
GOOGLE_SHEETS_CLIENT_ID=
|
||||
GOOGLE_SHEETS_CLIENT_SECRET=
|
||||
@@ -156,12 +167,12 @@ ENTERPRISE_LICENSE_KEY=
|
||||
# Automatically assign new users to a specific organization and role within that organization
|
||||
# Insert an existing organization id or generate a valid CUID for a new one at https://www.getuniqueid.com/cuid (e.g. cjld2cjxh0000qzrmn831i7rn)
|
||||
# (Role Management is an Enterprise feature)
|
||||
# DEFAULT_ORGANIZATION_ID=
|
||||
# DEFAULT_ORGANIZATION_ROLE=admin
|
||||
# AUTH_SSO_DEFAULT_TEAM_ID=
|
||||
# AUTH_SKIP_INVITE_FOR_SSO=
|
||||
|
||||
# Send new users to customer.io
|
||||
# CUSTOMER_IO_API_KEY=
|
||||
# CUSTOMER_IO_SITE_ID=
|
||||
# Send new users to Brevo
|
||||
# BREVO_API_KEY=
|
||||
# BREVO_LIST_ID=
|
||||
|
||||
# Ignore Rate Limiting across the Formbricks app
|
||||
# RATE_LIMITING_DISABLED=1
|
||||
@@ -173,16 +184,33 @@ ENTERPRISE_LICENSE_KEY=
|
||||
UNSPLASH_ACCESS_KEY=
|
||||
|
||||
# The below is used for Next Caching (uses In-Memory from Next Cache if not provided)
|
||||
# REDIS_URL=redis://localhost:6379
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# The below is used for Rate Limiting (uses In-Memory LRU Cache if not provided) (You can use a service like Webdis for this)
|
||||
# REDIS_HTTP_URL:
|
||||
|
||||
# Disable custom cache handler if necessary (e.g. if deployed on Vercel)
|
||||
# CUSTOM_CACHE_DISABLED=1
|
||||
# INTERCOM_APP_ID=
|
||||
# INTERCOM_SECRET_KEY=
|
||||
|
||||
# Azure AI settings
|
||||
# AI_AZURE_RESSOURCE_NAME=
|
||||
# AI_AZURE_API_KEY=
|
||||
# AI_AZURE_EMBEDDINGS_DEPLOYMENT_ID=
|
||||
# AI_AZURE_LLM_DEPLOYMENT_ID=
|
||||
# Enable Prometheus metrics
|
||||
# PROMETHEUS_ENABLED=
|
||||
# PROMETHEUS_EXPORTER_PORT=
|
||||
|
||||
# The SENTRY_DSN is used for error tracking and performance monitoring with Sentry.
|
||||
# SENTRY_DSN=
|
||||
# The SENTRY_AUTH_TOKEN variable is picked up by the Sentry Build Plugin.
|
||||
# It's used automatically by Sentry during the build for authentication when uploading source maps.
|
||||
# SENTRY_AUTH_TOKEN=
|
||||
# The SENTRY_ENVIRONMENT is the environment which the error will belong to in the Sentry dashboard
|
||||
# SENTRY_ENVIRONMENT=
|
||||
|
||||
# Configure the minimum role for user management from UI(owner, manager, disabled)
|
||||
# USER_MANAGEMENT_MINIMUM_ROLE="manager"
|
||||
|
||||
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
|
||||
# SESSION_MAX_AGE=86400
|
||||
|
||||
# Audit logs options. Default 0.
|
||||
# AUDIT_LOG_ENABLED=0
|
||||
# If the ip should be added in the log or not. Default 0
|
||||
# AUDIT_LOG_GET_USER_IP=0
|
||||
|
||||
13
.eslintrc.cjs
Normal file
13
.eslintrc.cjs
Normal file
@@ -0,0 +1,13 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
ignorePatterns: ["node_modules/", "dist/", "coverage/"],
|
||||
overrides: [
|
||||
{
|
||||
files: ["packages/cache/**/*.{ts,js}"],
|
||||
extends: ["@formbricks/eslint-config/library.js"],
|
||||
parserOptions: {
|
||||
project: "./packages/cache/tsconfig.json",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
12
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
12
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,7 +1,8 @@
|
||||
name: Bug report
|
||||
description: "Found a bug? Please fill out the sections below. \U0001F44D"
|
||||
labels:
|
||||
- bug
|
||||
type: bug
|
||||
projects: "formbricks/8"
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: issue-summary
|
||||
@@ -10,6 +11,13 @@ body:
|
||||
description: A summary of the issue. This needs to be a clear detailed-rich summary.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: issue-expected-behavior
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: other-information
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,4 @@
|
||||
blank_issues_enabled: false
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Questions
|
||||
url: https://github.com/formbricks/formbricks/discussions
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Feature request
|
||||
description: "Suggest an idea for this project \U0001F680"
|
||||
labels:
|
||||
- enhancement
|
||||
type: feature
|
||||
projects: "formbricks/21"
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-description
|
||||
@@ -30,4 +30,4 @@ body:
|
||||
### Additional resources 🤓
|
||||
|
||||
- Check out our [Contributor Docs](https://formbricks.com/docs/developer-docs/contributing/get-started)
|
||||
- Anything unclear? [Ask in Discord](https://formbricks.com/discord)
|
||||
- Anything unclear? [Ask in Github Discussions](https://github.com/formbricks/formbricks/discussions)
|
||||
|
||||
21
.github/actions/cache-build-web/action.yml
vendored
21
.github/actions/cache-build-web/action.yml
vendored
@@ -8,6 +8,14 @@ on:
|
||||
required: false
|
||||
default: "0"
|
||||
|
||||
inputs:
|
||||
turbo_token:
|
||||
description: "Turborepo token"
|
||||
required: false
|
||||
turbo_team:
|
||||
description: "Turborepo team"
|
||||
required: false
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@@ -41,7 +49,7 @@ runs:
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -54,17 +62,18 @@ runs:
|
||||
shell: bash
|
||||
|
||||
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
||||
env:
|
||||
E2E_TESTING_MODE: ${{ inputs.e2e_testing_mode }}
|
||||
run: |
|
||||
RANDOM_KEY=$(openssl rand -hex 32)
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${RANDOM_KEY}/" .env
|
||||
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
||||
echo "E2E_TESTING=$E2E_TESTING_MODE" >> .env
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
pnpm build --filter=@formbricks/web...
|
||||
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.turbo_token }}
|
||||
TURBO_TEAM: ${{ inputs.turbo_team }}
|
||||
|
||||
32
.github/copilot-instructions.md
vendored
Normal file
32
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Testing Instructions
|
||||
|
||||
When generating test files inside the "/app/web" path, follow these rules:
|
||||
|
||||
- You are an experienced senior software engineer
|
||||
- Use vitest
|
||||
- Ensure 100% code coverage
|
||||
- Add as few comments as possible
|
||||
- The test file should be located in the same folder as the original file
|
||||
- Use the `test` function instead of `it`
|
||||
- Follow the same test pattern used for other files in the package where the file is located
|
||||
- All imports should be at the top of the file, not inside individual tests
|
||||
- For mocking inside "test" blocks use "vi.mocked"
|
||||
- If the file is located in the "packages/survey" path, use "@testing-library/preact" instead of "@testing-library/react"
|
||||
- Don't mock functions that are already mocked in the "apps/web/vitestSetup.ts" file
|
||||
- When using "screen.getByText" check for the tolgee string if it is being used in the file.
|
||||
- The types for mocked variables can be found in the "packages/types" path. Be sure that every imported type exists before using it. Don't create types that are not already in the codebase.
|
||||
- When mocking data check if the properties added are part of the type of the object being mocked. Only specify known properties, don't use properties that are not part of the type.
|
||||
|
||||
If it's a test for a ".tsx" file, follow these extra instructions:
|
||||
|
||||
- Add this code inside the "describe" block and before any test:
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
- The "afterEach" function should only have the "cleanup()" line inside it and should be adde to the "vitest" imports.
|
||||
- For click events, import userEvent from "@testing-library/user-event"
|
||||
- Mock other components that can make the text more complex and but at the same time mocking it wouldn't make the test flaky. It's ok to leave basic and simple components.
|
||||
- You don't need to mock @tolgee/react
|
||||
- Use "import "@testing-library/jest-dom/vitest";"
|
||||
74
.github/workflows/apply-issue-labels-to-pr.yml
vendored
74
.github/workflows/apply-issue-labels-to-pr.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: "Apply issue labels to PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
|
||||
jobs:
|
||||
label_on_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: none
|
||||
issues: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Apply labels from linked issue to PR
|
||||
uses: actions/github-script@v5
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
async function getLinkedIssues(owner, repo, prNumber) {
|
||||
const query = `query GetLinkedIssues($owner: String!, $repo: String!, $prNumber: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
pullRequest(number: $prNumber) {
|
||||
closingIssuesReferences(first: 10) {
|
||||
nodes {
|
||||
number
|
||||
labels(first: 10) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const variables = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
prNumber: prNumber,
|
||||
};
|
||||
|
||||
const result = await github.graphql(query, variables);
|
||||
return result.repository.pullRequest.closingIssuesReferences.nodes;
|
||||
}
|
||||
|
||||
const pr = context.payload.pull_request;
|
||||
const linkedIssues = await getLinkedIssues(
|
||||
context.repo.owner,
|
||||
context.repo.repo,
|
||||
pr.number
|
||||
);
|
||||
|
||||
const labelsToAdd = new Set();
|
||||
for (const issue of linkedIssues) {
|
||||
if (issue.labels && issue.labels.nodes) {
|
||||
for (const label of issue.labels.nodes) {
|
||||
labelsToAdd.add(label.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (labelsToAdd.size) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels: Array.from(labelsToAdd),
|
||||
});
|
||||
}
|
||||
168
.github/workflows/build-and-push-ecr.yml
vendored
Normal file
168
.github/workflows/build-and-push-ecr.yml
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
name: Build & Push Docker to ECR
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image_tag:
|
||||
description: "Image tag to push (e.g., v3.16.1, main)"
|
||||
required: true
|
||||
default: "v3.16.1"
|
||||
deploy_production:
|
||||
description: "Tag image for production deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
deploy_staging:
|
||||
description: "Tag image for staging deployment"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
ECR_REGION: ${{ vars.ECR_REGION }}
|
||||
# ECR settings are sourced from repository/environment variables for portability across envs/forks
|
||||
ECR_REGISTRY: ${{ vars.ECR_REGISTRY }}
|
||||
ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }}
|
||||
DOCKERFILE: apps/web/Dockerfile
|
||||
CONTEXT: .
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Validate image tag input
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_TAG: ${{ inputs.image_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -z "${IMAGE_TAG}" ]]; then
|
||||
echo "❌ Image tag is required (non-empty)."
|
||||
exit 1
|
||||
fi
|
||||
if (( ${#IMAGE_TAG} > 128 )); then
|
||||
echo "❌ Image tag must be at most 128 characters."
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "${IMAGE_TAG}" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "❌ Image tag may only contain lowercase letters, digits, '.', '_' and '-'."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "${IMAGE_TAG}" =~ ^[.-] || "${IMAGE_TAG}" =~ [.-]$ ]]; then
|
||||
echo "❌ Image tag must not start or end with '.' or '-'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate required variables
|
||||
shell: bash
|
||||
env:
|
||||
ECR_REGISTRY: ${{ env.ECR_REGISTRY }}
|
||||
ECR_REPOSITORY: ${{ env.ECR_REPOSITORY }}
|
||||
ECR_REGION: ${{ env.ECR_REGION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -z "${ECR_REGISTRY}" || -z "${ECR_REPOSITORY}" || -z "${ECR_REGION}" ]]; then
|
||||
echo "ECR_REGION, ECR_REGISTRY and ECR_REPOSITORY must be set via repository or environment variables (Settings → Variables)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Update package.json version
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_TAG: ${{ inputs.image_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.16.1 -> 3.16.1)
|
||||
VERSION="${IMAGE_TAG#v}"
|
||||
|
||||
# Validate SemVer format (major.minor.patch with optional prerelease and build metadata)
|
||||
if [[ ! "${VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid version format after extraction. Must be SemVer (e.g., 1.2.3, 1.2.3-alpha, 1.2.3+build.1)"
|
||||
echo "Original input: ${IMAGE_TAG}"
|
||||
echo "Extracted version: ${VERSION}"
|
||||
echo "Expected format: MAJOR.MINOR.PATCH[-PRERELEASE][+BUILDMETADATA]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Valid SemVer format detected: ${VERSION}"
|
||||
echo "Updating package.json version to: ${VERSION}"
|
||||
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${VERSION}\"/" ./apps/web/package.json
|
||||
cat ./apps/web/package.json | grep version
|
||||
|
||||
- name: Build tag list
|
||||
id: tags
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_TAG: ${{ inputs.image_tag }}
|
||||
DEPLOY_PRODUCTION: ${{ inputs.deploy_production }}
|
||||
DEPLOY_STAGING: ${{ inputs.deploy_staging }}
|
||||
ECR_REGISTRY: ${{ env.ECR_REGISTRY }}
|
||||
ECR_REPOSITORY: ${{ env.ECR_REPOSITORY }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Start with the base image tag
|
||||
TAGS="${ECR_REGISTRY}/${ECR_REPOSITORY}:${IMAGE_TAG}"
|
||||
|
||||
# Add production tag if requested
|
||||
if [[ "${DEPLOY_PRODUCTION}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||
fi
|
||||
|
||||
# Add staging tag if requested
|
||||
if [[ "${DEPLOY_STAGING}" == "true" ]]; then
|
||||
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||
fi
|
||||
|
||||
# Output for debugging
|
||||
echo "Generated tags:"
|
||||
echo -e "${TAGS}"
|
||||
|
||||
# Set output for next step (escape newlines for GitHub Actions)
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo -e "${TAGS}"
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Configure AWS credentials (OIDC)
|
||||
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ECR_PUSH_ROLE_ARN }}
|
||||
aws-region: ${{ env.ECR_REGION }}
|
||||
|
||||
- name: Log in to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
- name: Build and push image (Depot remote builder)
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: ${{ env.CONTEXT }}
|
||||
file: ${{ env.DOCKERFILE }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
sentry_auth_token=${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
28
.github/workflows/build-docs.yml
vendored
28
.github/workflows/build-docs.yml
vendored
@@ -1,28 +0,0 @@
|
||||
name: Build Docs
|
||||
on:
|
||||
workflow_call:
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
pnpm build --filter=@formbricks/docs...
|
||||
shell: bash
|
||||
13
.github/workflows/build-web.yml
vendored
13
.github/workflows/build-web.yml
vendored
@@ -1,6 +1,10 @@
|
||||
name: Build Web
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Formbricks-web
|
||||
@@ -8,7 +12,12 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Build & Cache Web Binaries
|
||||
@@ -16,3 +25,5 @@ jobs:
|
||||
id: cache-build-web
|
||||
with:
|
||||
e2e_testing_mode: "0"
|
||||
turbo_token: ${{ secrets.TURBO_TOKEN }}
|
||||
turbo_team: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
20
.github/workflows/chromatic.yml
vendored
20
.github/workflows/chromatic.yml
vendored
@@ -6,24 +6,36 @@ on:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
chromatic:
|
||||
name: Run Chromatic
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
id-token: write
|
||||
actions: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
- name: Run Chromatic
|
||||
uses: chromaui/action@latest
|
||||
uses: chromaui/action@c93e0bc3a63aa176e14a75b61a31847cbfdd341c # latest
|
||||
with:
|
||||
# ⚠️ Make sure to configure a `CHROMATIC_PROJECT_TOKEN` repository secret
|
||||
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
|
||||
|
||||
92
.github/workflows/codeql.yml
vendored
92
.github/workflows/codeql.yml
vendored
@@ -1,92 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL Advanced"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: '17 1 * * 1'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
24
.github/workflows/cron-surveyStatusUpdate.yml
vendored
24
.github/workflows/cron-surveyStatusUpdate.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Cron - Survey status update
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
# schedule:
|
||||
# Runs “At 00:00.” (see https://crontab.guru)
|
||||
# - cron: "0 0 * * *"
|
||||
jobs:
|
||||
cron-weeklySummary:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
||||
run: |
|
||||
curl ${{ env.APP_URL }}/api/cron/survey-status \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
||||
--fail
|
||||
24
.github/workflows/cron-weeklySummary.yml
vendored
24
.github/workflows/cron-weeklySummary.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Cron - Weekly summary
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs “At 08:00 on Monday.” (see https://crontab.guru)
|
||||
- cron: "0 8 * * 1"
|
||||
jobs:
|
||||
cron-weeklySummary:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
||||
run: |
|
||||
curl ${{ env.APP_URL }}/api/cron/weekly-summary \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
||||
--fail
|
||||
149
.github/workflows/deploy-formbricks-cloud.yml
vendored
Normal file
149
.github/workflows/deploy-formbricks-cloud.yml
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
name: Formbricks Cloud Deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Docker image to release, full image tag if image tag is v0.0.0 enter v0.0.0."
|
||||
required: true
|
||||
type: string
|
||||
REPOSITORY:
|
||||
description: "The repository to use for the Docker image"
|
||||
required: false
|
||||
type: string
|
||||
default: "ghcr.io/formbricks/formbricks"
|
||||
ENVIRONMENT:
|
||||
description: "The environment to deploy to"
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Docker image to release"
|
||||
required: true
|
||||
type: string
|
||||
REPOSITORY:
|
||||
description: "The repository to use for the Docker image"
|
||||
required: false
|
||||
type: string
|
||||
default: "ghcr.io/formbricks/formbricks"
|
||||
ENVIRONMENT:
|
||||
description: "The environment to deploy to"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
helmfile-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@84a3f23bb4d843bcf4da6cf824ec1be473daf4de # v3.2.3
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||
tags: tag:github
|
||||
args: --accept-routes
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
||||
aws-region: "eu-central-1"
|
||||
|
||||
- name: Setup Cluster Access
|
||||
run: |
|
||||
aws eks update-kubeconfig --name formbricks-prod-eks --region eu-central-1
|
||||
env:
|
||||
AWS_REGION: eu-central-1
|
||||
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Production
|
||||
if: inputs.ENVIRONMENT == 'production'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
FORMBRICKS_S3_BUCKET: ${{ secrets.FORMBRICKS_S3_BUCKET }}
|
||||
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.FORMBRICKS_INGRESS_CERT_ARN }}
|
||||
FORMBRICKS_ROLE_ARN: ${{ secrets.FORMBRICKS_ROLE_ARN }}
|
||||
with:
|
||||
helmfile-version: "v1.0.0"
|
||||
helm-plugins: >
|
||||
https://github.com/databus23/helm-diff,
|
||||
https://github.com/jkroepke/helm-secrets
|
||||
helmfile-args: apply -l environment=prod
|
||||
helmfile-auto-init: "false"
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||
name: Deploy Formbricks Cloud Staging
|
||||
if: inputs.ENVIRONMENT == 'staging'
|
||||
env:
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.STAGE_FORMBRICKS_INGRESS_CERT_ARN }}
|
||||
FORMBRICKS_ROLE_ARN: ${{ secrets.STAGE_FORMBRICKS_ROLE_ARN }}
|
||||
with:
|
||||
helmfile-version: "v1.0.0"
|
||||
helm-plugins: >
|
||||
https://github.com/databus23/helm-diff,
|
||||
https://github.com/jkroepke/helm-secrets
|
||||
helmfile-args: apply -l environment=stage
|
||||
helmfile-auto-init: "false"
|
||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||
|
||||
- name: Purge Cloudflare Cache
|
||||
if: ${{ inputs.ENVIRONMENT == 'production' || inputs.ENVIRONMENT == 'staging' }}
|
||||
env:
|
||||
CF_ZONE_ID: ${{ secrets.CLOUDFLARE_ZONE_ID }}
|
||||
CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
ENVIRONMENT: ${{ inputs.ENVIRONMENT }}
|
||||
run: |
|
||||
# Set hostname based on environment
|
||||
if [[ "$ENVIRONMENT" == "production" ]]; then
|
||||
PURGE_HOST="app.formbricks.com"
|
||||
else
|
||||
PURGE_HOST="stage.app.formbricks.com"
|
||||
fi
|
||||
|
||||
echo "Purging Cloudflare cache for host: $PURGE_HOST (environment: $ENVIRONMENT, zone: $CF_ZONE_ID)"
|
||||
|
||||
# Prepare JSON payload for selective cache purge
|
||||
json_payload=$(cat << EOF
|
||||
{
|
||||
"hosts": ["$PURGE_HOST"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make API call to Cloudflare
|
||||
response=$(curl -s -X POST \
|
||||
"https://api.cloudflare.com/client/v4/zones/$CF_ZONE_ID/purge_cache" \
|
||||
-H "Authorization: Bearer $CF_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data "$json_payload")
|
||||
|
||||
echo "Cloudflare API response: $response"
|
||||
|
||||
# Verify the operation was successful
|
||||
if [[ "$(echo "$response" | jq -r .success)" == "true" ]]; then
|
||||
echo "✅ Successfully purged cache for $PURGE_HOST"
|
||||
else
|
||||
echo "❌ Cloudflare cache purge failed"
|
||||
echo "Error details: $(echo "$response" | jq -r .errors)"
|
||||
exit 1
|
||||
fi
|
||||
200
.github/workflows/docker-build-validation.yml
vendored
Normal file
200
.github/workflows/docker-build-validation.yml
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
name: Docker Build Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
merge_group:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
validate-docker-build:
|
||||
name: Validate Docker Build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Add PostgreSQL and Redis service containers
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||
env:
|
||||
POSTGRES_USER: test
|
||||
POSTGRES_PASSWORD: test
|
||||
POSTGRES_DB: formbricks
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Health check to ensure PostgreSQL is ready before using it
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
push: false
|
||||
load: true
|
||||
tags: formbricks-test:${{ env.GITHUB_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
redis_url=redis://localhost:6379
|
||||
|
||||
- name: Verify and Initialize PostgreSQL
|
||||
run: |
|
||||
echo "Verifying PostgreSQL connection..."
|
||||
# Install PostgreSQL client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y postgresql-client
|
||||
|
||||
# Test connection using psql with timeout and proper error handling
|
||||
echo "Testing PostgreSQL connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until PGPASSWORD=test psql -h localhost -U test -d formbricks -c "\dt" >/dev/null 2>&1; do
|
||||
echo "Waiting for PostgreSQL to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ PostgreSQL connection successful"
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "SELECT version();"
|
||||
|
||||
# Enable necessary extensions that might be required by migrations
|
||||
echo "Enabling required PostgreSQL extensions..."
|
||||
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "CREATE EXTENSION IF NOT EXISTS vector;" || echo "Vector extension already exists or not available"
|
||||
|
||||
else
|
||||
echo "❌ PostgreSQL connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration
|
||||
echo "Network configuration:"
|
||||
netstat -tulpn | grep 5432 || echo "No process listening on port 5432"
|
||||
|
||||
- name: Verify Redis/Valkey Connection
|
||||
run: |
|
||||
echo "Verifying Redis/Valkey connection..."
|
||||
# Install Redis client to test connection
|
||||
sudo apt-get update && sudo apt-get install -y redis-tools
|
||||
|
||||
# Test connection using redis-cli with timeout and proper error handling
|
||||
echo "Testing Redis connection with 30 second timeout..."
|
||||
if timeout 30 bash -c 'until redis-cli -h localhost -p 6379 ping >/dev/null 2>&1; do
|
||||
echo "Waiting for Redis to be ready..."
|
||||
sleep 2
|
||||
done'; then
|
||||
echo "✅ Redis connection successful"
|
||||
redis-cli -h localhost -p 6379 info server | head -5
|
||||
else
|
||||
echo "❌ Redis connection failed after 30 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show network configuration for Redis
|
||||
echo "Redis network configuration:"
|
||||
netstat -tulpn | grep 6379 || echo "No process listening on port 6379"
|
||||
|
||||
- name: Test Docker Image with Health Check
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
run: |
|
||||
echo "🧪 Testing if the Docker image starts correctly..."
|
||||
|
||||
# Add extra docker run args to support host.docker.internal on Linux
|
||||
DOCKER_RUN_ARGS="--add-host=host.docker.internal:host-gateway"
|
||||
|
||||
# Start the container with host.docker.internal pointing to the host
|
||||
docker run --name formbricks-test \
|
||||
$DOCKER_RUN_ARGS \
|
||||
-p 3000:3000 \
|
||||
-e DATABASE_URL="postgresql://test:test@host.docker.internal:5432/formbricks" \
|
||||
-e ENCRYPTION_KEY="$DUMMY_ENCRYPTION_KEY" \
|
||||
-e REDIS_URL="redis://host.docker.internal:6379" \
|
||||
-d "formbricks-test:$GITHUB_SHA"
|
||||
|
||||
# Start health check polling immediately (every 5 seconds for up to 5 minutes)
|
||||
echo "🏥 Polling /health endpoint every 5 seconds for up to 5 minutes..."
|
||||
MAX_RETRIES=60 # 60 attempts × 5 seconds = 5 minutes
|
||||
RETRY_COUNT=0
|
||||
HEALTH_CHECK_SUCCESS=false
|
||||
|
||||
set +e # Disable exit on error to allow for retries
|
||||
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
|
||||
# Check if container is still running
|
||||
if [ "$(docker inspect -f '{{.State.Running}}' formbricks-test 2>/dev/null)" != "true" ]; then
|
||||
echo "❌ Container stopped running after $((RETRY_COUNT * 5)) seconds!"
|
||||
echo "📋 Container logs:"
|
||||
docker logs formbricks-test
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show progress and diagnostic info every 12 attempts (1 minute intervals)
|
||||
if [ $((RETRY_COUNT % 12)) -eq 0 ] || [ $RETRY_COUNT -eq 1 ]; then
|
||||
echo "Health check attempt $RETRY_COUNT of $MAX_RETRIES ($(($RETRY_COUNT * 5)) seconds elapsed)..."
|
||||
echo "📋 Recent container logs:"
|
||||
docker logs --tail 10 formbricks-test
|
||||
fi
|
||||
|
||||
# Try health endpoint with shorter timeout for faster polling
|
||||
# Use -f flag to make curl fail on HTTP error status codes (4xx, 5xx)
|
||||
if curl -f -s -m 10 http://localhost:3000/health >/dev/null 2>&1; then
|
||||
echo "✅ Health check successful after $((RETRY_COUNT * 5)) seconds!"
|
||||
HEALTH_CHECK_SUCCESS=true
|
||||
break
|
||||
fi
|
||||
|
||||
# Wait 5 seconds before next attempt
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Show full container logs for debugging
|
||||
echo "📋 Full container logs:"
|
||||
docker logs formbricks-test
|
||||
|
||||
# Clean up the container
|
||||
echo "🧹 Cleaning up..."
|
||||
docker rm -f formbricks-test
|
||||
|
||||
# Exit with failure if health check did not succeed
|
||||
if [ "$HEALTH_CHECK_SUCCESS" != "true" ]; then
|
||||
echo "❌ Health check failed after $((MAX_RETRIES * 5)) seconds (5 minutes)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✨ Docker validation complete - all checks passed!"
|
||||
70
.github/workflows/docker-security-scan.yml
vendored
Normal file
70
.github/workflows/docker-security-scan.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Docker Security Scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * *" # Daily at 2 AM UTC
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["Docker Release to Github"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
name: Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout (for SARIF fingerprinting only)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Determine ref and commit for upload
|
||||
id: gitref
|
||||
shell: bash
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "${EVENT_NAME}" == "workflow_run" ]]; then
|
||||
echo "ref=refs/heads/${HEAD_BRANCH}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "ref=${GITHUB_REF}" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||
with:
|
||||
image-ref: "ghcr.io/${{ github.repository }}:latest"
|
||||
format: "sarif"
|
||||
output: "trivy-results.sarif"
|
||||
severity: "CRITICAL,HIGH,MEDIUM,LOW"
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@a4e1a019f5e24960714ff6296aee04b736cbc3cf # v3.29.6
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
sarif_file: "trivy-results.sarif"
|
||||
ref: ${{ steps.gitref.outputs.ref }}
|
||||
sha: ${{ steps.gitref.outputs.sha }}
|
||||
category: "trivy-container-scan"
|
||||
162
.github/workflows/e2e.yml
vendored
162
.github/workflows/e2e.yml
vendored
@@ -1,9 +1,31 @@
|
||||
name: E2E Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
AZURE_CLIENT_ID:
|
||||
required: false
|
||||
AZURE_TENANT_ID:
|
||||
required: false
|
||||
AZURE_SUBSCRIPTION_ID:
|
||||
required: false
|
||||
PLAYWRIGHT_SERVICE_URL:
|
||||
required: false
|
||||
ENTERPRISE_LICENSE_KEY:
|
||||
required: true
|
||||
# Add other secrets if necessary
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TELEMETRY_DISABLED: 1
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Run E2E Tests
|
||||
@@ -23,17 +45,40 @@ jobs:
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
valkey:
|
||||
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||
ports:
|
||||
- 6379:6379
|
||||
minio:
|
||||
image: bitnami/minio:2025.7.23-debian-12-r5
|
||||
env:
|
||||
MINIO_ROOT_USER: minioadmin
|
||||
MINIO_ROOT_PASSWORD: minioadmin
|
||||
ports:
|
||||
- 9000:9000
|
||||
options: >-
|
||||
--health-cmd="curl -fsS http://localhost:9000/minio/health/live || exit 1"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=20
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: allow
|
||||
allowed-endpoints: |
|
||||
ee.formbricks.com:443
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
- name: Setup Node.js 22.x
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 22.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
@@ -49,22 +94,85 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${{ secrets.ENTERPRISE_LICENSE_KEY }}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=redis://localhost:6379|" .env
|
||||
echo "" >> .env
|
||||
echo "E2E_TESTING=1" >> .env
|
||||
echo "S3_REGION=us-east-1" >> .env
|
||||
echo "S3_BUCKET_NAME=formbricks-e2e" >> .env
|
||||
echo "S3_ENDPOINT_URL=http://localhost:9000" >> .env
|
||||
echo "S3_ACCESS_KEY=minioadmin" >> .env
|
||||
echo "S3_SECRET_KEY=minioadmin" >> .env
|
||||
echo "S3_FORCE_PATH_STYLE=1" >> .env
|
||||
shell: bash
|
||||
|
||||
- name: Install MinIO client (mc)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MC_VERSION="RELEASE.2025-08-13T08-35-41Z"
|
||||
MC_BASE="https://dl.min.io/client/mc/release/linux-amd64/archive"
|
||||
MC_BIN="mc.${MC_VERSION}"
|
||||
MC_SUM="${MC_BIN}.sha256sum"
|
||||
|
||||
curl -fsSL "${MC_BASE}/${MC_BIN}" -o "${MC_BIN}"
|
||||
curl -fsSL "${MC_BASE}/${MC_SUM}" -o "${MC_SUM}"
|
||||
|
||||
sha256sum -c "${MC_SUM}"
|
||||
|
||||
chmod +x "${MC_BIN}"
|
||||
sudo mv "${MC_BIN}" /usr/local/bin/mc
|
||||
|
||||
- name: Wait for MinIO and create S3 bucket
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Waiting for MinIO to be ready..."
|
||||
ready=0
|
||||
for i in {1..60}; do
|
||||
if curl -fsS http://localhost:9000/minio/health/live >/dev/null; then
|
||||
echo "MinIO is up after ${i} seconds"
|
||||
ready=1
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [ "$ready" -ne 1 ]; then
|
||||
echo "::error::MinIO did not become ready within 60 seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mc alias set local http://localhost:9000 minioadmin minioadmin
|
||||
mc mb --ignore-existing local/formbricks-e2e
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
pnpm build --filter=@formbricks/web...
|
||||
|
||||
- name: Apply Prisma Migrations
|
||||
run: |
|
||||
pnpm prisma migrate deploy
|
||||
# pnpm prisma migrate deploy
|
||||
pnpm db:migrate:dev
|
||||
|
||||
- name: Run Rate Limiter Load Tests
|
||||
run: |
|
||||
echo "Running rate limiter load tests with Redis/Valkey..."
|
||||
cd apps/web && pnpm vitest run modules/core/rate-limit/rate-limit-load.test.ts
|
||||
shell: bash
|
||||
|
||||
- name: Check for Enterprise License
|
||||
run: |
|
||||
LICENSE_KEY=$(grep '^ENTERPRISE_LICENSE_KEY=' .env | cut -d'=' -f2-)
|
||||
if [ -z "$LICENSE_KEY" ]; then
|
||||
echo "::error::ENTERPRISE_LICENSE_KEY in .env is empty. Please check your secret configuration."
|
||||
exit 1
|
||||
fi
|
||||
echo "License key length: ${#LICENSE_KEY}"
|
||||
|
||||
- name: Run App
|
||||
run: |
|
||||
NODE_ENV=test pnpm start --filter=@formbricks/web &
|
||||
echo "Starting app with enterprise license..."
|
||||
NODE_ENV=test pnpm start --filter=@formbricks/web | tee app.log 2>&1 &
|
||||
sleep 10 # Optional: gives some buffer for the app to start
|
||||
for attempt in {1..10}; do
|
||||
if [ $(curl -o /dev/null -s -w "%{http_code}" http://localhost:3000/health) -eq 200 ]; then
|
||||
@@ -82,13 +190,47 @@ jobs:
|
||||
- name: Install Playwright
|
||||
run: pnpm exec playwright install --with-deps
|
||||
|
||||
- name: Run E2E Tests
|
||||
- name: Set Azure Secret Variables
|
||||
run: |
|
||||
if [[ -n "${{ secrets.AZURE_CLIENT_ID }}" && -n "${{ secrets.AZURE_TENANT_ID }}" && -n "${{ secrets.AZURE_SUBSCRIPTION_ID }}" ]]; then
|
||||
echo "AZURE_ENABLED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "AZURE_ENABLED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Azure login
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
uses: azure/login@a65d910e8af852a8061c627c456678983e180302 # v2.2.0
|
||||
with:
|
||||
client-id: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
|
||||
|
||||
- name: Run E2E Tests (Azure)
|
||||
if: env.AZURE_ENABLED == 'true'
|
||||
env:
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
run: |
|
||||
pnpm test-e2e:azure
|
||||
|
||||
- name: Run E2E Tests (Local)
|
||||
if: env.AZURE_ENABLED == 'false'
|
||||
run: |
|
||||
pnpm test:e2e
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
if: failure()
|
||||
with:
|
||||
name: app-logs
|
||||
path: app.log
|
||||
|
||||
- name: Output App Logs
|
||||
if: failure()
|
||||
run: cat app.log
|
||||
|
||||
58
.github/workflows/formbricks-release.yml
vendored
Normal file
58
.github/workflows/formbricks-release.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Build, release & deploy Formbricks images
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build & release docker image
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
uses: ./.github/workflows/release-docker-github.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
|
||||
helm-chart-release:
|
||||
name: Release Helm Chart
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
uses: ./.github/workflows/release-helm-chart.yml
|
||||
secrets: inherit
|
||||
needs:
|
||||
- docker-build
|
||||
with:
|
||||
VERSION: ${{ needs.docker-build.outputs.VERSION }}
|
||||
|
||||
deploy-formbricks-cloud:
|
||||
name: Deploy Helm Chart to Formbricks Cloud
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/deploy-formbricks-cloud.yml
|
||||
needs:
|
||||
- docker-build
|
||||
- helm-chart-release
|
||||
with:
|
||||
VERSION: v${{ needs.docker-build.outputs.VERSION }}
|
||||
ENVIRONMENT: ${{ github.event.release.prerelease && 'staging' || 'production' }}
|
||||
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
permissions:
|
||||
contents: read
|
||||
uses: ./.github/workflows/move-stable-tag.yml
|
||||
needs:
|
||||
- docker-build # Ensure release is successful first
|
||||
with:
|
||||
release_tag: ${{ github.event.release.tag_name }}
|
||||
commit_sha: ${{ github.sha }}
|
||||
is_prerelease: ${{ github.event.release.prerelease }}
|
||||
19
.github/workflows/labeler.yml
vendored
19
.github/workflows/labeler.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
labeler:
|
||||
name: Pull Request Labeler
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
# https://github.com/actions/labeler/issues/442#issuecomment-1297359481
|
||||
sync-labels: ""
|
||||
15
.github/workflows/lint.yml
vendored
15
.github/workflows/lint.yml
vendored
@@ -1,6 +1,10 @@
|
||||
name: Lint
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Linters
|
||||
@@ -8,16 +12,21 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
96
.github/workflows/move-stable-tag.yml
vendored
Normal file
96
.github/workflows/move-stable-tag.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
name: Move Stable Tag
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "The release tag name (e.g., v1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
commit_sha:
|
||||
description: "The commit SHA to point the stable tag to"
|
||||
required: true
|
||||
type: string
|
||||
is_prerelease:
|
||||
description: "Whether this is a prerelease (stable tag won't be moved for prereleases)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# Prevent concurrent stable tag operations to avoid race conditions
|
||||
concurrency:
|
||||
group: move-stable-tag-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
move-stable-tag:
|
||||
name: Move stable tag to release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10 # Prevent hung git operations
|
||||
permissions:
|
||||
contents: write # Required to push tags
|
||||
# Only move stable tag for non-prerelease versions
|
||||
if: ${{ !inputs.is_prerelease }}
|
||||
steps:
|
||||
- name: Harden the runner
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # Full history needed for tag operations
|
||||
|
||||
- name: Validate inputs
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate release tag format
|
||||
if [[ ! "$RELEASE_TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid release tag format. Expected format: v1.2.3, v1.2.3-alpha"
|
||||
echo "Provided: $RELEASE_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate commit SHA format (40 character hex)
|
||||
if [[ ! "$COMMIT_SHA" =~ ^[a-f0-9]{40}$ ]]; then
|
||||
echo "❌ Error: Invalid commit SHA format. Expected 40 character hex string"
|
||||
echo "Provided: $COMMIT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Input validation passed"
|
||||
echo "Release tag: $RELEASE_TAG"
|
||||
echo "Commit SHA: $COMMIT_SHA"
|
||||
|
||||
- name: Move stable tag
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Configure git
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Verify the commit exists
|
||||
if ! git cat-file -e "$COMMIT_SHA"; then
|
||||
echo "❌ Error: Commit $COMMIT_SHA does not exist in this repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Move stable tag to the release commit
|
||||
echo "📌 Moving stable tag to commit: $COMMIT_SHA (release: $RELEASE_TAG)"
|
||||
git tag -f stable "$COMMIT_SHA"
|
||||
git push origin stable --force
|
||||
|
||||
echo "✅ Successfully moved stable tag to release $RELEASE_TAG"
|
||||
echo "🔗 Stable tag now points to: https://github.com/${{ github.repository }}/commit/$COMMIT_SHA"
|
||||
52
.github/workflows/pr.yml
vendored
52
.github/workflows/pr.yml
vendored
@@ -1,9 +1,15 @@
|
||||
name: PR Update
|
||||
|
||||
# Update permissions to include all necessary ones
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
actions: read
|
||||
checks: write
|
||||
id-token: write
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -12,64 +18,40 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect changes
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
outputs:
|
||||
has-files-requiring-all-checks: ${{ steps.filter.outputs.has-files-requiring-all-checks }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
has-files-requiring-all-checks:
|
||||
- "!(**.md|.github/CODEOWNERS)"
|
||||
|
||||
test:
|
||||
name: Run Unit Tests
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.has-files-requiring-all-checks == 'true' }}
|
||||
uses: ./.github/workflows/test.yml
|
||||
secrets: inherit
|
||||
|
||||
lint:
|
||||
name: Run Linters
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.has-files-requiring-all-checks == 'true' }}
|
||||
uses: ./.github/workflows/lint.yml
|
||||
secrets: inherit
|
||||
|
||||
build:
|
||||
name: Build Formbricks-web
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.has-files-requiring-all-checks == 'true' }}
|
||||
uses: ./.github/workflows/build-web.yml
|
||||
secrets: inherit
|
||||
|
||||
docs:
|
||||
name: Build Docs
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.has-files-requiring-all-checks == 'true' }}
|
||||
uses: ./.github/workflows/build-docs.yml
|
||||
secrets: inherit
|
||||
|
||||
e2e-test:
|
||||
name: Run E2E Tests
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.has-files-requiring-all-checks == 'true' }}
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
secrets: inherit
|
||||
|
||||
required:
|
||||
name: PR Check Summary
|
||||
needs: [lint, test, build, e2e-test, docs]
|
||||
needs: [lint, test, build, e2e-test]
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
checks: write
|
||||
statuses: write
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0
|
||||
with:
|
||||
egress-policy: audit
|
||||
- name: fail if conditional jobs failed
|
||||
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') || contains(needs.*.result, 'cancelled')
|
||||
run: exit 1
|
||||
|
||||
46
.github/workflows/release-changesets.yml
vendored
46
.github/workflows/release-changesets.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Release Changesets
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
#push:
|
||||
# branches:
|
||||
# - main
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Node.js 18.x
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v2.2.4
|
||||
|
||||
- name: Install Dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: Create Release Pull Request or Publish to npm
|
||||
id: changesets
|
||||
uses: changesets/action@v1
|
||||
with:
|
||||
# This expects you to have a script called release which does a build for your packages and calls changeset publish
|
||||
publish: pnpm release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -1,62 +0,0 @@
|
||||
name: Docker for Data Migrations
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: formbricks/data-migrations
|
||||
DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/formbricks?schema=public"
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=raw,value=${{ github.ref_name }}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./packages/database/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
DATABASE_URL=${{ env.DATABASE_URL }}
|
||||
|
||||
- name: Sign the published Docker image
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
run: |
|
||||
cosign sign --yes ghcr.io/${{ env.IMAGE_NAME }}:${{ github.ref_name }}
|
||||
cosign sign --yes ghcr.io/${{ env.IMAGE_NAME }}:latest
|
||||
@@ -15,7 +15,9 @@ env:
|
||||
IMAGE_NAME: ${{ github.repository }}-experimental
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -27,24 +29,75 @@ jobs:
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
id-token: write
|
||||
|
||||
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate SemVer version from branch or tag
|
||||
id: generate_version
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Get reference name and type from environment variables
|
||||
echo "Reference type: $REF_TYPE"
|
||||
echo "Reference name: $REF_NAME"
|
||||
|
||||
# Create unique timestamped version for testing sourcemap resolution
|
||||
TIMESTAMP=$(date +%s)
|
||||
|
||||
if [[ "$REF_TYPE" == "tag" ]]; then
|
||||
# If running from a tag, use the tag name + timestamp
|
||||
if [[ "$REF_NAME" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
|
||||
# Tag looks like a SemVer, use it directly (remove 'v' prefix if present)
|
||||
BASE_VERSION=$(echo "$REF_NAME" | sed 's/^v//')
|
||||
VERSION="${BASE_VERSION}-${TIMESTAMP}"
|
||||
echo "Using SemVer tag with timestamp: $VERSION"
|
||||
else
|
||||
# Tag is not SemVer, treat as prerelease
|
||||
SANITIZED_TAG=$(echo "$REF_NAME" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
|
||||
VERSION="0.0.0-${SANITIZED_TAG}-${TIMESTAMP}"
|
||||
echo "Using tag as prerelease with timestamp: $VERSION"
|
||||
fi
|
||||
else
|
||||
# Running from branch, use branch name as prerelease + timestamp
|
||||
SANITIZED_BRANCH=$(echo "$REF_NAME" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
|
||||
VERSION="0.0.0-${SANITIZED_BRANCH}-${TIMESTAMP}"
|
||||
echo "Using branch as prerelease with timestamp: $VERSION"
|
||||
fi
|
||||
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Generated SemVer version: $VERSION"
|
||||
|
||||
- name: Update package.json version
|
||||
run: |
|
||||
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.VERSION }}\"/" ./apps/web/package.json
|
||||
cat ./apps/web/package.json | grep version
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3 # v3.0.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -54,15 +107,18 @@ jobs:
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5 # v5.0.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ env.VERSION }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@v1
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
@@ -72,8 +128,12 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
sentry_auth_token=${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
@@ -88,4 +148,4 @@ jobs:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
# This step uses the identity token to provision an ephemeral certificate
|
||||
# against the sigstore community Fulcio instance.
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
|
||||
80
.github/workflows/release-docker-github.yml
vendored
80
.github/workflows/release-docker-github.yml
vendored
@@ -6,10 +6,17 @@ name: Docker Release to Github
|
||||
# documentation.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_call:
|
||||
inputs:
|
||||
IS_PRERELEASE:
|
||||
description: "Whether this is a prerelease (affects latest tag)"
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
outputs:
|
||||
VERSION:
|
||||
description: release version
|
||||
value: ${{ jobs.build.outputs.VERSION }}
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
@@ -18,7 +25,9 @@ env:
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -26,28 +35,62 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
id-token: write
|
||||
|
||||
outputs:
|
||||
VERSION: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Release Tag
|
||||
id: extract_release_tag
|
||||
run: |
|
||||
# Extract version from tag (e.g., refs/tags/v1.2.3 -> 1.2.3)
|
||||
TAG="$GITHUB_REF"
|
||||
TAG=${TAG#refs/tags/v}
|
||||
|
||||
# Validate the extracted tag format
|
||||
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid release tag format after extraction. Must be semver (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Original ref: $GITHUB_REF"
|
||||
echo "Extracted tag: $TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Safely add to environment variables
|
||||
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
|
||||
|
||||
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "Using tag-based version: $TAG"
|
||||
|
||||
- name: Update package.json version
|
||||
run: |
|
||||
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.RELEASE_TAG }}\"/" ./apps/web/package.json
|
||||
cat ./apps/web/package.json | grep version
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3 # v3.0.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -57,15 +100,22 @@ jobs:
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5 # v5.0.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
# Default semver tags (version, major.minor, major)
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
# Only tag as 'latest' for stable releases (not prereleases)
|
||||
type=raw,value=latest,enable=${{ !inputs.IS_PRERELEASE }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@v1
|
||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||
with:
|
||||
project: tw0fqmsx3c
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
@@ -75,8 +125,10 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
secrets: |
|
||||
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||
sentry_auth_token=${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
# Sign the resulting Docker image digest except on PRs.
|
||||
# This will only write to the public Rekor transparency log when the Docker
|
||||
|
||||
44
.github/workflows/release-docker.yml
vendored
44
.github/workflows/release-docker.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Release on Dockerhub
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
release-image-on-dockerhub:
|
||||
name: Release on Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Get Release Tag
|
||||
id: extract_release_tag
|
||||
run: |
|
||||
TAG=${{ github.ref }}
|
||||
TAG=${TAG#refs/tags/v}
|
||||
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/web/Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USERNAME }}/formbricks:${{ env.RELEASE_TAG }}
|
||||
${{ secrets.DOCKER_USERNAME }}/formbricks:latest
|
||||
72
.github/workflows/release-helm-chart.yml
vendored
Normal file
72
.github/workflows/release-helm-chart.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Publish Helm Chart
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
description: "The version of the Helm chart to release"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Validate input version
|
||||
env:
|
||||
INPUT_VERSION: ${{ inputs.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Validate input version format (expects clean semver without 'v' prefix)
|
||||
if [[ ! "$INPUT_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "❌ Error: Invalid version format. Must be clean semver (e.g., 1.2.3, 1.2.3-alpha)"
|
||||
echo "Expected: clean version without 'v' prefix"
|
||||
echo "Provided: $INPUT_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Store validated version in environment variable
|
||||
echo "VERSION<<EOF" >> $GITHUB_ENV
|
||||
echo "$INPUT_VERSION" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_ACTOR: ${{ github.actor }}
|
||||
run: printf '%s' "$GITHUB_TOKEN" | helm registry login ghcr.io --username "$GITHUB_ACTOR" --password-stdin
|
||||
|
||||
- name: Install YQ
|
||||
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
|
||||
|
||||
- name: Update Chart.yaml with new version
|
||||
run: |
|
||||
yq -i ".version = \"$VERSION\"" helm-chart/Chart.yaml
|
||||
yq -i ".appVersion = \"v$VERSION\"" helm-chart/Chart.yaml
|
||||
|
||||
- name: Package Helm chart
|
||||
run: |
|
||||
helm package ./helm-chart
|
||||
|
||||
- name: Push Helm chart to GitHub Container Registry
|
||||
run: |
|
||||
helm push "formbricks-$VERSION.tgz" oci://ghcr.io/formbricks/helm-charts
|
||||
17
.github/workflows/semantic-pull-requests.yml
vendored
17
.github/workflows/semantic-pull-requests.yml
vendored
@@ -16,7 +16,12 @@ jobs:
|
||||
name: PR title
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@v5
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5.5.3
|
||||
id: lint_pr_title
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -35,7 +40,7 @@ jobs:
|
||||
revert
|
||||
ossgg
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
- uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2
|
||||
# When the previous steps fails, the workflow would stop. By adding this
|
||||
# condition you can continue the execution with the populated error message.
|
||||
if: always() && (steps.lint_pr_title.outputs.error_message != null)
|
||||
@@ -51,11 +56,3 @@ jobs:
|
||||
```
|
||||
${{ steps.lint_pr_title.outputs.error_message }}
|
||||
```
|
||||
|
||||
# Delete a previous comment when the issue has been resolved
|
||||
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
header: pr-title-lint-error
|
||||
message: |
|
||||
Thank you for following the naming conventions for pull request titles! 🙏
|
||||
|
||||
55
.github/workflows/sonarqube.yml
vendored
Normal file
55
.github/workflows/sonarqube.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: SonarQube
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
merge_group:
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
sonarqube:
|
||||
name: SonarQube
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
|
||||
- name: Setup Node.js 22.x
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
|
||||
- name: create .env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Generate Random ENCRYPTION_KEY, CRON_SECRET & NEXTAUTH_SECRET and fill in .env
|
||||
run: |
|
||||
RANDOM_KEY=$(openssl rand -hex 32)
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Run tests with coverage
|
||||
run: |
|
||||
pnpm test:coverage
|
||||
- name: SonarQube Scan
|
||||
uses: SonarSource/sonarqube-scan-action@2500896589ef8f7247069a56136f8dc177c27ccf
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
86
.github/workflows/terraform-plan-and-apply.yml
vendored
Normal file
86
.github/workflows/terraform-plan-and-apply.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: "Terraform"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# TODO: enable it back when migration is completed.
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "infra/terraform/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "infra/terraform/**"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
terraform:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@84a3f23bb4d843bcf4da6cf824ec1be473daf4de # v3.2.3
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||
tags: tag:github
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
||||
aws-region: "eu-central-1"
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
|
||||
|
||||
- name: Terraform Format
|
||||
id: fmt
|
||||
run: terraform fmt -check -recursive
|
||||
continue-on-error: true
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Init
|
||||
id: init
|
||||
run: terraform init
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Validate
|
||||
id: validate
|
||||
run: terraform validate
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Terraform Plan
|
||||
id: plan
|
||||
run: terraform plan -out .planfile
|
||||
working-directory: infra/terraform
|
||||
|
||||
- name: Post PR comment
|
||||
uses: borchero/terraform-plan-comment@434458316f8f24dd073cd2561c436cce41dc8f34 # v2.4.1
|
||||
if: always() && github.ref != 'refs/heads/main' && (steps.plan.outcome == 'success' || steps.plan.outcome == 'failure')
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
planfile: .planfile
|
||||
working-directory: "infra/terraform"
|
||||
|
||||
- name: Terraform Apply
|
||||
id: apply
|
||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||
run: terraform apply .planfile
|
||||
working-directory: "infra/terraform"
|
||||
17
.github/workflows/test.yml
vendored
17
.github/workflows/test.yml
vendored
@@ -1,23 +1,33 @@
|
||||
name: Tests
|
||||
on:
|
||||
workflow_call:
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||
@@ -31,6 +41,7 @@ jobs:
|
||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||
|
||||
- name: Test
|
||||
run: pnpm test
|
||||
|
||||
51
.github/workflows/tolgee-missing-key-check.yml
vendored
Normal file
51
.github/workflows/tolgee-missing-key-check.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Check Missing Translations
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
check-missing-translations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
|
||||
- name: Checkout PR
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Compare Tolgee Keys
|
||||
id: compare
|
||||
run: |
|
||||
tolgee compare --api-key ${{ secrets.TOLGEE_API_KEY }} > compare_output.txt
|
||||
cat compare_output.txt
|
||||
|
||||
- name: Check for Missing Translations
|
||||
run: |
|
||||
if grep -q "new key found" compare_output.txt; then
|
||||
echo "New keys found that may require translations:"
|
||||
exit 1
|
||||
else
|
||||
echo "No new keys found."
|
||||
fi
|
||||
95
.github/workflows/tolgee.yml
vendored
Normal file
95
.github/workflows/tolgee.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: Tolgee Tagging on PR Merge
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
tag-production-keys:
|
||||
name: Tag Production Keys
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # This ensures we get the full git history
|
||||
|
||||
- name: Get source branch name
|
||||
id: branch-name
|
||||
env:
|
||||
RAW_BRANCH: ${{ github.head_ref }}
|
||||
run: |
|
||||
# Validate and sanitize branch name - only allow alphanumeric, dots, underscores, hyphens, and forward slashes
|
||||
SOURCE_BRANCH=$(echo "$RAW_BRANCH" | sed 's/[^a-zA-Z0-9._\/-]//g')
|
||||
|
||||
# Additional validation - ensure branch name is not empty after sanitization
|
||||
if [[ -z "$SOURCE_BRANCH" ]]; then
|
||||
echo "❌ Error: Branch name is empty after sanitization"
|
||||
echo "Original branch: $RAW_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Safely add to environment variables using GitHub's recommended method
|
||||
# This prevents environment variable injection attacks
|
||||
echo "SOURCE_BRANCH<<EOF" >> $GITHUB_ENV
|
||||
echo "$SOURCE_BRANCH" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
echo "Detected source branch: $SOURCE_BRANCH"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||
with:
|
||||
node-version: 18 # Ensure compatibility with your project
|
||||
|
||||
- name: Install Tolgee CLI
|
||||
run: npm install -g @tolgee/cli
|
||||
|
||||
- name: Tag Production Keys
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-extracted \
|
||||
--filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag production \
|
||||
--untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Tag unused production keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag production \
|
||||
--tag deprecated --untag production
|
||||
|
||||
- name: Tag unused draft:current-branch keys as Deprecated
|
||||
run: |
|
||||
npx tolgee tag \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--filter-not-extracted --filter-tag "draft:${SOURCE_BRANCH}" \
|
||||
--tag deprecated --untag "draft:${SOURCE_BRANCH}"
|
||||
|
||||
- name: Sync with backup
|
||||
run: |
|
||||
npx tolgee sync \
|
||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
||||
--backup ./tolgee-backup \
|
||||
--continue-on-warning \
|
||||
--yes
|
||||
|
||||
- name: Upload backup as artifact
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: tolgee-backup-${{ github.sha }}
|
||||
path: ./tolgee-backup
|
||||
retention-days: 90
|
||||
27
.github/workflows/welcome-new-contributors.yml
vendored
27
.github/workflows/welcome-new-contributors.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: "Welcome new contributors"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: opened
|
||||
pull_request:
|
||||
types: opened
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
welcome-message:
|
||||
name: Welcoming New Users
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.event.action == 'opened'
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pr-message: |-
|
||||
Thank you so much for making your first Pull Request and taking the time to improve Formbricks! 🚀🙏❤️
|
||||
Feel free to join the conversation at [Discord](https://formbricks.com/discord)
|
||||
issue-message: |
|
||||
Thank you for opening your first issue! 🙏❤️ One of our team members will review it and get back to you as soon as it possible. 😊
|
||||
65
.gitignore
vendored
65
.gitignore
vendored
@@ -1,25 +1,26 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
node_modules
|
||||
**/node_modules
|
||||
.pnp
|
||||
.pnp.js
|
||||
.pnpm-store/
|
||||
|
||||
# testing
|
||||
coverage
|
||||
**/coverage
|
||||
|
||||
# next.js
|
||||
.next/
|
||||
out/
|
||||
build
|
||||
**/.next/
|
||||
**/out/
|
||||
**/build
|
||||
|
||||
# node
|
||||
dist/
|
||||
**/dist/
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
**/.DS_Store
|
||||
*.pem
|
||||
Zone.Identifier
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
@@ -27,36 +28,50 @@ yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
**/.env
|
||||
**/.env.local
|
||||
**/.env.development.local
|
||||
**/.env.test.local
|
||||
**/.env.production.local
|
||||
!packages/database/.env
|
||||
!apps/web/.env
|
||||
|
||||
# Prisma generated files
|
||||
packages/database/zod
|
||||
|
||||
# turbo
|
||||
# build tools
|
||||
.turbo
|
||||
**/*vite.config.*.timestamp-*
|
||||
|
||||
# nixos stuff
|
||||
# environment specific
|
||||
.direnv
|
||||
|
||||
Zone.Identifier
|
||||
|
||||
# Playwright
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
|
||||
# uploads
|
||||
# project specific
|
||||
packages/lib/uploads
|
||||
|
||||
# Vite Timestamps
|
||||
*vite.config.*.timestamp-*
|
||||
|
||||
# js compiled assets
|
||||
apps/web/public/js
|
||||
packages/database/migrations
|
||||
branch.json
|
||||
.vercel
|
||||
|
||||
# Terraform
|
||||
infra/terraform/.terraform/
|
||||
**/.terraform.lock.hcl
|
||||
**/terraform.tfstate
|
||||
**/terraform.tfstate.*
|
||||
**/crash.log
|
||||
**/override.tf
|
||||
**/override.tf.json
|
||||
**/*.tfvars
|
||||
**/*.tfvars.json
|
||||
**/.terraformrc
|
||||
**/terraform.rc
|
||||
|
||||
# IntelliJ IDEA
|
||||
/.idea/
|
||||
/*.iml
|
||||
packages/ios/FormbricksSDK/FormbricksSDK.xcodeproj/project.xcworkspace/xcuserdata
|
||||
.cursorrules
|
||||
i18n.cache
|
||||
|
||||
6
.gitpod.Dockerfile
vendored
6
.gitpod.Dockerfile
vendored
@@ -1,6 +0,0 @@
|
||||
FROM gitpod/workspace-full
|
||||
|
||||
# Install custom tools, runtime, etc.
|
||||
RUN brew install yq
|
||||
|
||||
RUN pnpm install turbo --global
|
||||
74
.gitpod.yml
74
.gitpod.yml
@@ -1,74 +0,0 @@
|
||||
tasks:
|
||||
- name: demo
|
||||
init: |
|
||||
gp sync-await init-install &&
|
||||
bash .gitpod/setup-demo.bash
|
||||
command: |
|
||||
cd apps/demo &&
|
||||
cp .env.example .env &&
|
||||
sed -i -r "s#^(NEXT_PUBLIC_FORMBRICKS_API_HOST=).*#\1 $(gp url 3000)#" .env &&
|
||||
gp sync-await init &&
|
||||
turbo --filter "@formbricks/demo" go
|
||||
|
||||
- name: Init Formbricks
|
||||
init: |
|
||||
cp .env.example .env &&
|
||||
bash .gitpod/init.bash &&
|
||||
turbo --filter "@formbricks/js" build &&
|
||||
gp sync-done init-install
|
||||
command: |
|
||||
gp sync-done init &&
|
||||
gp tasks list &&
|
||||
gp ports await 3002 && gp ports await 3000 && gp open apps/demo/.env && gp preview $(gp url 3002) --external
|
||||
|
||||
- name: web
|
||||
init: |
|
||||
gp sync-await init-install &&
|
||||
bash .gitpod/setup-web.bash &&
|
||||
turbo --filter "@formbricks/database" db:down
|
||||
command: |
|
||||
gp sync-await init &&
|
||||
cp .env.example .env &&
|
||||
sed -i -r "s#^(WEBAPP_URL=).*#\1 $(gp url 3000)#" .env &&
|
||||
RANDOM_ENCRYPTION_KEY=$(openssl rand -hex 32)
|
||||
sed -i 's/^ENCRYPTION_KEY=.*/ENCRYPTION_KEY='"$RANDOM_ENCRYPTION_KEY"'/' .env
|
||||
turbo --filter "@formbricks/web" go
|
||||
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
|
||||
ports:
|
||||
- port: 3000
|
||||
visibility: public
|
||||
onOpen: open-browser
|
||||
- port: 3001
|
||||
visibility: public
|
||||
onOpen: ignore
|
||||
- port: 3002
|
||||
visibility: public
|
||||
onOpen: ignore
|
||||
- port: 5432
|
||||
visibility: public
|
||||
onOpen: ignore
|
||||
- port: 1025
|
||||
visibility: public
|
||||
onOpen: ignore
|
||||
- port: 8025
|
||||
visibility: public
|
||||
onOpen: open-browser
|
||||
|
||||
github:
|
||||
prebuilds:
|
||||
master: true
|
||||
pullRequests: true
|
||||
addComment: true
|
||||
|
||||
vscode:
|
||||
extensions:
|
||||
- "ban.spellright"
|
||||
- "bradlc.vscode-tailwindcss"
|
||||
- "DavidAnson.vscode-markdownlint"
|
||||
- "dbaeumer.vscode-eslint"
|
||||
- "esbenp.prettier-vscode"
|
||||
- "Prisma.prisma"
|
||||
- "yzhang.markdown-all-in-one"
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
images=($(yq eval '.services.*.image' packages/database/docker-compose.yml))
|
||||
images=($(yq eval '.services.*.image' docker-compose.dev.yml))
|
||||
|
||||
pull_image() {
|
||||
docker pull "$1"
|
||||
|
||||
2
.husky/post-checkout
Normal file
2
.husky/post-checkout
Normal file
@@ -0,0 +1,2 @@
|
||||
echo "{\"branchName\": \"$(git rev-parse --abbrev-ref HEAD)\"}" > ./branch.json
|
||||
prettier --write ./branch.json
|
||||
@@ -1 +1,21 @@
|
||||
pnpm lint-staged
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
# Load environment variables from .env files
|
||||
if [ -f .env ]; then
|
||||
set -a
|
||||
. .env
|
||||
set +a
|
||||
fi
|
||||
|
||||
pnpm lint-staged
|
||||
|
||||
# Run tolgee-pull if branch.json exists and NEXT_PUBLIC_TOLGEE_API_KEY is not set
|
||||
if [ -f branch.json ]; then
|
||||
if [ -z "$NEXT_PUBLIC_TOLGEE_API_KEY" ]; then
|
||||
echo "Skipping tolgee-pull: NEXT_PUBLIC_TOLGEE_API_KEY is not set"
|
||||
else
|
||||
pnpm run tolgee-pull
|
||||
git add apps/web/locales
|
||||
fi
|
||||
fi
|
||||
51
.tolgeerc.json
Normal file
51
.tolgeerc.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"$schema": "https://docs.tolgee.io/cli-schema.json",
|
||||
"format": "JSON_TOLGEE",
|
||||
"patterns": ["./apps/web/**/*.ts?(x)"],
|
||||
"projectId": 10304,
|
||||
"pull": {
|
||||
"path": "./apps/web/locales"
|
||||
},
|
||||
"push": {
|
||||
"files": [
|
||||
{
|
||||
"language": "en-US",
|
||||
"path": "./apps/web/locales/en-US.json"
|
||||
},
|
||||
{
|
||||
"language": "de-DE",
|
||||
"path": "./apps/web/locales/de-DE.json"
|
||||
},
|
||||
{
|
||||
"language": "fr-FR",
|
||||
"path": "./apps/web/locales/fr-FR.json"
|
||||
},
|
||||
{
|
||||
"language": "pt-BR",
|
||||
"path": "./apps/web/locales/pt-BR.json"
|
||||
},
|
||||
{
|
||||
"language": "zh-Hant-TW",
|
||||
"path": "./apps/web/locales/zh-Hant-TW.json"
|
||||
},
|
||||
{
|
||||
"language": "pt-PT",
|
||||
"path": "./apps/web/locales/pt-PT.json"
|
||||
},
|
||||
{
|
||||
"language": "ro-RO",
|
||||
"path": "./apps/web/locales/ro-RO.json"
|
||||
},
|
||||
{
|
||||
"language": "ja-JP",
|
||||
"path": "./apps/web/locales/ja-JP.json"
|
||||
},
|
||||
{
|
||||
"language": "zh-Hans-CN",
|
||||
"path": "./apps/web/locales/zh-Hans-CN.json"
|
||||
}
|
||||
],
|
||||
"forceMode": "OVERRIDE"
|
||||
},
|
||||
"strictNamespace": false
|
||||
}
|
||||
4
.vscode/extensions.json
vendored
4
.vscode/extensions.json
vendored
@@ -6,6 +6,8 @@
|
||||
"dbaeumer.vscode-eslint", // eslint plugin
|
||||
"esbenp.prettier-vscode", // prettier plugin
|
||||
"Prisma.prisma", // syntax|format|completion for prisma
|
||||
"yzhang.markdown-all-in-one" // nicer markdown support
|
||||
"yzhang.markdown-all-in-one", // nicer markdown support
|
||||
"vitest.explorer", // run tests directly from the code window
|
||||
"sonarsource.sonarlint-vscode" // sonarqube linter for vscode
|
||||
]
|
||||
}
|
||||
|
||||
10
.vscode/settings.json
vendored
10
.vscode/settings.json
vendored
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
|
||||
"eslint.workingDirectories": [{ "mode": "auto" }],
|
||||
"javascript.updateImportsOnFileMove.enabled": "always",
|
||||
"sonarlint.connectedMode.project": {
|
||||
"connectionId": "formbricks",
|
||||
"projectKey": "formbricks_formbricks"
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"typescript.updateImportsOnFileMove.enabled": "always"
|
||||
}
|
||||
|
||||
@@ -14,17 +14,7 @@ Are you brimming with brilliant ideas? For new features that can elevate Formbri
|
||||
|
||||
## 🛠 Crafting Pull Requests
|
||||
|
||||
Ready to dive into the code and make a real impact? Here's your path:
|
||||
|
||||
1. **Read our Best Practices**: [It takes 5 minutes](https://formbricks.com/docs/developer-docs/contributing/get-started) but will help you save hours 🤓
|
||||
|
||||
1. **Fork the Repository:** Fork our repository or use [Gitpod](https://formbricks.com/docs/developer-docs/contributing/gitpod) or use [Codespaces](https://formbricks.com/docs/developer-docs/contributing/codespaces)
|
||||
|
||||
1. **Tweak and Transform:** Work your coding magic and apply your changes.
|
||||
|
||||
1. **Pull Request Act:** If you're ready to go, craft a new pull request closely following our PR template 🙏
|
||||
|
||||
Would you prefer a chat before you dive into a lot of work? Our [Discord server](https://formbricks.com/discord) is your harbor. Share your thoughts, and we'll meet you there with open arms. We're responsive and friendly, promise!
|
||||
For the time being, we don't have the capacity to properly facilitate community contributions. It's a lot of engineering attention often spent on issues which don't follow our prioritization, so we've decided to only facilitate community code contributions in rare exceptions in the coming months.
|
||||
|
||||
## 🚀 Aspiring Features
|
||||
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -2,8 +2,8 @@ Copyright (c) 2024 Formbricks GmbH
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
- All content that resides under the "packages/ee/", "apps/web/modules/ee" & "apps/web/app/(ee)" directories of this repository, if these directories exist, is licensed under the license defined in "packages/ee/LICENSE".
|
||||
- All content that resides under the "packages/js/", "packages/react-native/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
||||
- All content that resides under the "apps/web/modules/ee" directory of this repository, if these directories exist, is licensed under the license defined in "apps/web/modules/ee/LICENSE".
|
||||
- All content that resides under the "packages/js/", "packages/android/", "packages/ios/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
||||
- All third party components incorporated into the Formbricks Software are licensed under the original license provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned directories or restrictions above is available under the "AGPLv3" license as defined below.
|
||||
|
||||
|
||||
15
README.md
15
README.md
@@ -13,14 +13,15 @@
|
||||
<h3 align="center">Formbricks</h3>
|
||||
|
||||
<p align="center">
|
||||
Harvest user-insights, build irresistible experiences.
|
||||
The Open Source Qualtrics Alternative
|
||||
<br />
|
||||
<a href="https://formbricks.com/">Website</a> | <a href="https://formbricks.com/discord">Join Discord community</a>
|
||||
<a href="https://formbricks.com/">Website</a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://formbricks.com/discord"><img src="https://img.shields.io/discord/979077669410979880?label=Discord&logo=discord&logoColor=%23fff" alt="Join Formbricks Discord"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
||||
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
||||
<a href="https://insights.linuxfoundation.org/project/formbricks"><img src="https://insights.linuxfoundation.org/api/badge/health-score?project=formbricks"></a>
|
||||
<a href="https://news.ycombinator.com/item?id=32303986"><img src="https://img.shields.io/badge/Hacker%20News-122-%23FF6600" alt="Hacker News"></a>
|
||||
<a href="[https://www.producthunt.com/products/formbricks](https://www.producthunt.com/posts/formbricks)"><img src="https://img.shields.io/badge/Product%20Hunt-455-orange?logo=producthunt&logoColor=%23fff" alt="Product Hunt"></a>
|
||||
<a href="https://github.blog/2023-04-12-github-accelerator-our-first-cohort-and-whats-next/"><img src="https://img.shields.io/badge/2023-blue?logo=github&label=Github%20Accelerator" alt="Github Accelerator"></a>
|
||||
@@ -192,7 +193,7 @@ Here are a few options:
|
||||
|
||||
- Upvote issues with 👍 reaction so we know what the demand for a particular issue is to prioritize it within the roadmap.
|
||||
|
||||
Please check out [our contribution guide](https://formbricks.com/docs/developer-docs/contributing/get-started) and our [list of open issues](https://github.com/formbricks/formbricks/issues) for more information.
|
||||
- Note: For the time being, we can only facilitate code contributions as an exception.
|
||||
|
||||
## All Thanks To Our Contributors
|
||||
|
||||
@@ -228,14 +229,14 @@ The Formbricks core application is licensed under the [AGPLv3 Open Source Licens
|
||||
|
||||
### The Enterprise Edition
|
||||
|
||||
Additional to the AGPL licensed Formbricks core, this repository contains code licensed under an Enterprise license. The [code](https://github.com/formbricks/formbricks/tree/main/packages/ee) and [license](https://github.com/formbricks/formbricks/blob/main/packages/ee/LICENSE) for the enterprise functionality can be found in the `/packages/ee` folder of this repository. This additional functionality is not part of the AGPLv3 licensed Formbricks core and is designed to meet the needs of larger teams and enterprises. This advanced functionality is already included in the Docker images, but you need an [Enterprise License Key](https://formbricks.com/docs/self-hosting/enterprise) to unlock it.
|
||||
Additional to the AGPL licensed Formbricks core, this repository contains code licensed under an Enterprise license. The [code](https://github.com/formbricks/formbricks/tree/main/apps/web/modules/ee) and [license](https://github.com/formbricks/formbricks/blob/main/apps/web/modules/ee/LICENSE) for the enterprise functionality can be found in the `/apps/web/modules/ee` folder of this repository. This additional functionality is not part of the AGPLv3 licensed Formbricks core and is designed to meet the needs of larger teams and enterprises. This advanced functionality is already included in the Docker images, but you need an [Enterprise License Key](https://formbricks.com/docs/self-hosting/enterprise) to unlock it.
|
||||
|
||||
### White-Labeling Formbricks and Other Licensing Needs
|
||||
|
||||
We currently do not offer Formbricks white-labeled. Any other needs? [Send us an email](mailto:hola@formbricks.com).
|
||||
We currently do not offer Formbricks white-labeled. That means that we don't sell a license which let's other companies resell Formbricks to third parties under their name nor take parts (like the survey editor) out of Formbricks to add to their own software products. Any other needs? [Send us an email](mailto:hola@formbricks.com).
|
||||
|
||||
### Why charge for Enterprise Features?
|
||||
|
||||
The Enterprise Edition and White-Label Licenses allow us to fund the development of Formbricks sustainably. It guarantees that the open-source surveying infrastructure we're building will be around for decades to come.
|
||||
The Enterprise Edition allows us to fund the development of Formbricks sustainably. It guarantees that the free and open-source surveying infrastructure we're building will be around for decades to come.
|
||||
|
||||
<p align="right"><a href="#top">🔼 Back to top</a></p>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Security Policy of Formbricks
|
||||
|
||||
This is Formbrick's security policy. Please reach out to us
|
||||
on our Discord or, if privately, via <security@formbricks.com>
|
||||
on Github or, if privately, via <security@formbricks.com>
|
||||
|
||||
## Introduction
|
||||
|
||||
@@ -40,7 +40,7 @@ We invite you to report if:
|
||||
|
||||
Avoid reporting if:
|
||||
|
||||
- Assistance is needed to optimize Formbricks for security – please engage on our Discord for this.
|
||||
- Assistance is needed to optimize Formbricks for security – please engage on Github Discussions for this.
|
||||
- Help is required for applying security-related updates.
|
||||
- The concern is not related to security.
|
||||
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
EXPO_PUBLIC_API_HOST=http://192.168.178.20:3000
|
||||
EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=clzr04nkd000bcdl110j0ijyq
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
extends: ["@formbricks/eslint-config/react.js"],
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
};
|
||||
35
apps/demo-react-native/.gitignore
vendored
35
apps/demo-react-native/.gitignore
vendored
@@ -1,35 +0,0 @@
|
||||
# Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files
|
||||
|
||||
# dependencies
|
||||
node_modules/
|
||||
|
||||
# Expo
|
||||
.expo/
|
||||
dist/
|
||||
web-build/
|
||||
|
||||
# Native
|
||||
*.orig.*
|
||||
*.jks
|
||||
*.p8
|
||||
*.p12
|
||||
*.key
|
||||
*.mobileprovision
|
||||
|
||||
# Metro
|
||||
.metro-health-check*
|
||||
|
||||
# debug
|
||||
npm-debug.*
|
||||
yarn-debug.*
|
||||
yarn-error.*
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"expo": {
|
||||
"android": {
|
||||
"adaptiveIcon": {
|
||||
"backgroundColor": "#ffffff",
|
||||
"foregroundImage": "./assets/adaptive-icon.png"
|
||||
}
|
||||
},
|
||||
"assetBundlePatterns": ["**/*"],
|
||||
"icon": "./assets/icon.png",
|
||||
"ios": {
|
||||
"infoPlist": {
|
||||
"NSCameraUsageDescription": "Take pictures for certain activities.",
|
||||
"NSMicrophoneUsageDescription": "Need microphone access for recording videos.",
|
||||
"NSPhotoLibraryUsageDescription": "Select pictures for certain activities."
|
||||
},
|
||||
"supportsTablet": true
|
||||
},
|
||||
"jsEngine": "hermes",
|
||||
"name": "react-native-demo",
|
||||
"orientation": "portrait",
|
||||
"slug": "react-native-demo",
|
||||
"splash": {
|
||||
"backgroundColor": "#ffffff",
|
||||
"image": "./assets/splash.png",
|
||||
"resizeMode": "contain"
|
||||
},
|
||||
"userInterfaceStyle": "light",
|
||||
"version": "1.0.0",
|
||||
"web": {
|
||||
"favicon": "./assets/favicon.png"
|
||||
}
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 17 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.4 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 22 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 46 KiB |
@@ -1,6 +0,0 @@
|
||||
module.exports = function babel(api) {
|
||||
api.cache(true);
|
||||
return {
|
||||
presets: ["babel-preset-expo"],
|
||||
};
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
import { registerRootComponent } from "expo";
|
||||
import { LogBox } from "react-native";
|
||||
import App from "./src/app";
|
||||
|
||||
registerRootComponent(App);
|
||||
|
||||
LogBox.ignoreAllLogs();
|
||||
@@ -1,21 +0,0 @@
|
||||
// Learn more https://docs.expo.io/guides/customizing-metro
|
||||
const path = require("node:path");
|
||||
const { getDefaultConfig } = require("expo/metro-config");
|
||||
|
||||
// Find the workspace root, this can be replaced with `find-yarn-workspace-root`
|
||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
||||
const projectRoot = __dirname;
|
||||
|
||||
const config = getDefaultConfig(projectRoot);
|
||||
|
||||
// 1. Watch all files within the monorepo
|
||||
config.watchFolders = [workspaceRoot];
|
||||
// 2. Let Metro know where to resolve packages, and in what order
|
||||
config.resolver.nodeModulesPaths = [
|
||||
path.resolve(projectRoot, "node_modules"),
|
||||
path.resolve(workspaceRoot, "node_modules"),
|
||||
];
|
||||
// 3. Force Metro to resolve (sub)dependencies only from the `nodeModulesPaths`
|
||||
config.resolver.disableHierarchicalLookup = true;
|
||||
|
||||
module.exports = config;
|
||||
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"name": "@formbricks/demo-react-native",
|
||||
"version": "1.0.0",
|
||||
"main": "./index.js",
|
||||
"scripts": {
|
||||
"dev": "expo start",
|
||||
"android": "expo start --android",
|
||||
"ios": "expo start --ios",
|
||||
"web": "expo start --web",
|
||||
"eject": "expo eject",
|
||||
"clean": "rimraf .turbo node_modules .expo"
|
||||
},
|
||||
"dependencies": {
|
||||
"@formbricks/js": "workspace:*",
|
||||
"@formbricks/react-native": "workspace:*",
|
||||
"expo": "51.0.26",
|
||||
"expo-status-bar": "1.12.1",
|
||||
"react": "18.3.1",
|
||||
"react-native": "0.74.4",
|
||||
"react-native-webview": "13.8.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.25.2",
|
||||
"@types/react": "18.3.11",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
import { StatusBar } from "expo-status-bar";
|
||||
import { Button, LogBox, StyleSheet, Text, View } from "react-native";
|
||||
import Formbricks, { track } from "@formbricks/react-native";
|
||||
|
||||
LogBox.ignoreAllLogs();
|
||||
|
||||
export default function App(): JSX.Element {
|
||||
if (!process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID) {
|
||||
throw new Error("EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID is required");
|
||||
}
|
||||
|
||||
if (!process.env.EXPO_PUBLIC_API_HOST) {
|
||||
throw new Error("EXPO_PUBLIC_API_HOST is required");
|
||||
}
|
||||
|
||||
const config = {
|
||||
environmentId: process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID,
|
||||
apiHost: process.env.EXPO_PUBLIC_API_HOST,
|
||||
userId: "random-user-id",
|
||||
attributes: {
|
||||
language: "en",
|
||||
testAttr: "attr-test",
|
||||
},
|
||||
};
|
||||
|
||||
return (
|
||||
<View style={styles.container}>
|
||||
<Text>Formbricks React Native SDK Demo</Text>
|
||||
|
||||
<Button
|
||||
title="Trigger Code Action"
|
||||
onPress={() => {
|
||||
track("code").catch((error: unknown) => {
|
||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
||||
console.error("Error tracking event:", error);
|
||||
});
|
||||
}}
|
||||
/>
|
||||
<StatusBar style="auto" />
|
||||
<Formbricks initConfig={config} />
|
||||
</View>
|
||||
);
|
||||
}
|
||||
|
||||
const styles = StyleSheet.create({
|
||||
container: {
|
||||
flex: 1,
|
||||
backgroundColor: "#fff",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
},
|
||||
});
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true
|
||||
},
|
||||
"extends": "expo/tsconfig.base"
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=http://localhost:3000
|
||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=YOUR_ENVIRONMENT_ID
|
||||
|
||||
# Copy the environment ID for the URL of your Formbricks App and
|
||||
# paste it above to connect your Formbricks App with the Demo App.
|
||||
@@ -1,3 +0,0 @@
|
||||
module.exports = {
|
||||
extends: ["@formbricks/eslint-config/legacy-next.js"],
|
||||
};
|
||||
36
apps/demo/.gitignore
vendored
36
apps/demo/.gitignore
vendored
@@ -1,36 +0,0 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
@@ -1,13 +0,0 @@
|
||||
import { Sidebar } from "./Sidebar";
|
||||
|
||||
export const LayoutApp = ({ children }: { children: React.ReactNode }) => {
|
||||
return (
|
||||
<div className="min-h-full">
|
||||
{/* Static sidebar for desktop */}
|
||||
<div className="hidden lg:fixed lg:inset-y-0 lg:flex lg:w-64 lg:flex-col">
|
||||
<Sidebar />
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col lg:pl-64">{children}</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
import {
|
||||
ClockIcon,
|
||||
CogIcon,
|
||||
CreditCardIcon,
|
||||
FileBarChartIcon,
|
||||
HelpCircleIcon,
|
||||
HomeIcon,
|
||||
ScaleIcon,
|
||||
ShieldCheckIcon,
|
||||
UsersIcon,
|
||||
} from "lucide-react";
|
||||
import { classNames } from "../lib/utils";
|
||||
|
||||
const navigation = [
|
||||
{ name: "Home", href: "#", icon: HomeIcon, current: true },
|
||||
{ name: "History", href: "#", icon: ClockIcon, current: false },
|
||||
{ name: "Balances", href: "#", icon: ScaleIcon, current: false },
|
||||
{ name: "Cards", href: "#", icon: CreditCardIcon, current: false },
|
||||
{ name: "Recipients", href: "#", icon: UsersIcon, current: false },
|
||||
{ name: "Reports", href: "#", icon: FileBarChartIcon, current: false },
|
||||
];
|
||||
const secondaryNavigation = [
|
||||
{ name: "Settings", href: "#", icon: CogIcon },
|
||||
{ name: "Help", href: "#", icon: HelpCircleIcon },
|
||||
{ name: "Privacy", href: "#", icon: ShieldCheckIcon },
|
||||
];
|
||||
|
||||
export const Sidebar = () => {
|
||||
return (
|
||||
<div className="flex flex-grow flex-col overflow-y-auto bg-cyan-700 pb-4 pt-5">
|
||||
<nav
|
||||
className="mt-5 flex flex-1 flex-col divide-y divide-cyan-800 overflow-y-auto"
|
||||
aria-label="Sidebar">
|
||||
<div className="space-y-1 px-2">
|
||||
{navigation.map((item) => (
|
||||
<a
|
||||
key={item.name}
|
||||
href={item.href}
|
||||
className={classNames(
|
||||
item.current ? "bg-cyan-800 text-white" : "text-cyan-100 hover:bg-cyan-600 hover:text-white",
|
||||
"group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6"
|
||||
)}
|
||||
aria-current={item.current ? "page" : undefined}>
|
||||
<item.icon className="mr-4 h-6 w-6 flex-shrink-0 text-cyan-200" aria-hidden="true" />
|
||||
{item.name}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
<div className="mt-6 pt-6">
|
||||
<div className="space-y-1 px-2">
|
||||
{secondaryNavigation.map((item) => (
|
||||
<a
|
||||
key={item.name}
|
||||
href={item.href}
|
||||
className="group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6 text-cyan-100 hover:bg-cyan-600 hover:text-white">
|
||||
<item.icon className="mr-4 h-6 w-6 text-cyan-200" aria-hidden="true" />
|
||||
{item.name}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,3 +0,0 @@
|
||||
export const classNames = (...classes: any) => {
|
||||
return classes.filter(Boolean).join(" ");
|
||||
};
|
||||
5
apps/demo/next-env.d.ts
vendored
5
apps/demo/next-env.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/pages/building-your-application/configuring/typescript for more information.
|
||||
@@ -1,17 +0,0 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
protocol: "https",
|
||||
hostname: "tailwindui.com",
|
||||
},
|
||||
{
|
||||
protocol: "https",
|
||||
hostname: "images.unsplash.com",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"name": "@formbricks/demo",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"clean": "rimraf .turbo node_modules .next",
|
||||
"dev": "next dev -p 3002 --turbo",
|
||||
"go": "next dev -p 3002 --turbo",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@formbricks/js": "workspace:*",
|
||||
"@formbricks/ui": "workspace:*",
|
||||
"lucide-react": "0.452.0",
|
||||
"next": "14.2.15",
|
||||
"react": "18.3.1",
|
||||
"react-dom": "18.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@formbricks/eslint-config": "workspace:*",
|
||||
"@formbricks/config-typescript": "workspace:*"
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import type { AppProps } from "next/app";
|
||||
import Head from "next/head";
|
||||
import "@formbricks/ui/globals.css";
|
||||
|
||||
const App = ({ Component, pageProps }: AppProps) => {
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>Demo App</title>
|
||||
</Head>
|
||||
{(!process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID ||
|
||||
!process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) && (
|
||||
<div className="w-full bg-red-500 p-3 text-center text-sm text-white">
|
||||
Please set Formbricks environment variables in apps/demo/.env
|
||||
</div>
|
||||
)}
|
||||
<Component {...pageProps} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default App;
|
||||
@@ -1,15 +0,0 @@
|
||||
import { Head, Html, Main, NextScript } from "next/document";
|
||||
|
||||
const Document = () => {
|
||||
return (
|
||||
<Html lang="en" className="h-full bg-slate-50">
|
||||
<Head />
|
||||
<body className="h-full">
|
||||
<Main />
|
||||
<NextScript />
|
||||
</body>
|
||||
</Html>
|
||||
);
|
||||
};
|
||||
|
||||
export default Document;
|
||||
@@ -1,239 +0,0 @@
|
||||
import Image from "next/image";
|
||||
import { useRouter } from "next/router";
|
||||
import { useEffect, useState } from "react";
|
||||
import formbricks from "@formbricks/js";
|
||||
import fbsetup from "../public/fb-setup.png";
|
||||
|
||||
declare const window: any;
|
||||
|
||||
const AppPage = ({}) => {
|
||||
const [darkMode, setDarkMode] = useState(false);
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
if (darkMode) {
|
||||
document.body.classList.add("dark");
|
||||
} else {
|
||||
document.body.classList.remove("dark");
|
||||
}
|
||||
}, [darkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
// enable Formbricks debug mode by adding formbricksDebug=true GET parameter
|
||||
const addFormbricksDebugParam = () => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
if (!urlParams.has("formbricksDebug")) {
|
||||
urlParams.set("formbricksDebug", "true");
|
||||
const newUrl = `${window.location.pathname}?${urlParams.toString()}`;
|
||||
window.history.replaceState({}, "", newUrl);
|
||||
}
|
||||
};
|
||||
|
||||
addFormbricksDebugParam();
|
||||
|
||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
||||
const userId = "THIS-IS-A-VERY-LONG-USER-ID-FOR-TESTING";
|
||||
const userInitAttributes = {
|
||||
language: "de",
|
||||
"Init Attribute 1": "eight",
|
||||
"Init Attribute 2": "two",
|
||||
};
|
||||
|
||||
formbricks.init({
|
||||
environmentId: process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID,
|
||||
apiHost: process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST,
|
||||
userId,
|
||||
attributes: userInitAttributes,
|
||||
});
|
||||
}
|
||||
|
||||
// Connect next.js router to Formbricks
|
||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
||||
const handleRouteChange = formbricks?.registerRouteChange;
|
||||
router.events.on("routeChangeComplete", handleRouteChange);
|
||||
|
||||
return () => {
|
||||
router.events.off("routeChangeComplete", handleRouteChange);
|
||||
};
|
||||
}
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-white px-12 py-6 dark:bg-slate-800">
|
||||
<div className="flex flex-col justify-between md:flex-row">
|
||||
<div className="flex flex-col items-center gap-2 sm:flex-row">
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold text-slate-900 dark:text-white">
|
||||
Formbricks In-product Survey Demo App
|
||||
</h1>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
This app helps you test your app surveys. You can create and test user actions, create and
|
||||
update user attributes, etc.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<button
|
||||
className="mt-2 rounded-lg bg-slate-200 px-6 py-1 dark:bg-slate-700 dark:text-slate-100"
|
||||
onClick={() => setDarkMode(!darkMode)}>
|
||||
{darkMode ? "Toggle Light Mode" : "Toggle Dark Mode"}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="my-4 grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||
<div>
|
||||
<div className="rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">1. Setup .env</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
Copy the environment ID of your Formbricks app to the env variable in /apps/demo/.env
|
||||
</p>
|
||||
<Image src={fbsetup} alt="fb setup" className="mt-4 rounded" priority />
|
||||
|
||||
<div className="mt-4 flex-col items-start text-sm text-slate-700 sm:flex sm:items-center sm:text-base dark:text-slate-300">
|
||||
<p className="mb-1 sm:mb-0 sm:mr-2">You're connected with env:</p>
|
||||
<div className="flex items-center">
|
||||
<strong className="w-32 truncate sm:w-auto">
|
||||
{process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID}
|
||||
</strong>
|
||||
<span className="relative ml-2 flex h-3 w-3">
|
||||
<span className="absolute inline-flex h-full w-full animate-ping rounded-full bg-green-500 opacity-75"></span>
|
||||
<span className="relative inline-flex h-3 w-3 rounded-full bg-green-500"></span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-4 rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">2. Widget Logs</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
Look at the logs to understand how the widget works.{" "}
|
||||
<strong className="dark:text-white">Open your browser console</strong> to see the logs.
|
||||
</p>
|
||||
{/* <div className="max-h-[40vh] overflow-y-auto py-4">
|
||||
<LogsContainer />
|
||||
</div> */}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="md:grid md:grid-cols-3">
|
||||
<div className="col-span-3 self-start rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
||||
<h3 className="text-lg font-semibold dark:text-white">
|
||||
Reset person / pull data from Formbricks app
|
||||
</h3>
|
||||
<p className="text-slate-700 dark:text-slate-300">
|
||||
On formbricks.reset() the local state will <strong>be deleted</strong> and formbricks gets{" "}
|
||||
<strong>reinitialized</strong>.
|
||||
</p>
|
||||
<button
|
||||
className="my-4 rounded-lg bg-slate-500 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600"
|
||||
onClick={() => {
|
||||
formbricks.reset();
|
||||
}}>
|
||||
Reset
|
||||
</button>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
If you made a change in Formbricks app and it does not seem to work, hit 'Reset' and
|
||||
try again.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
No-Code Action
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sends a{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/actions/no-code"
|
||||
className="underline dark:text-blue-500"
|
||||
target="_blank">
|
||||
No Code Action
|
||||
</a>{" "}
|
||||
as long as you created it beforehand in the Formbricks App.{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/actions/no-code"
|
||||
target="_blank"
|
||||
className="underline dark:text-blue-500">
|
||||
Here are instructions on how to do it.
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
onClick={() => {
|
||||
formbricks.setAttribute("Plan", "Free");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Plan to 'Free'
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/custom-attributes"
|
||||
target="_blank"
|
||||
className="underline dark:text-blue-500">
|
||||
attribute
|
||||
</a>{" "}
|
||||
'Plan' to 'Free'. If the attribute does not exist, it creates it.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
onClick={() => {
|
||||
formbricks.setAttribute("Plan", "Paid");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Plan to 'Paid'
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/custom-attributes"
|
||||
target="_blank"
|
||||
className="underline dark:text-blue-500">
|
||||
attribute
|
||||
</a>{" "}
|
||||
'Plan' to 'Paid'. If the attribute does not exist, it creates it.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-6">
|
||||
<div>
|
||||
<button
|
||||
onClick={() => {
|
||||
formbricks.setEmail("test@web.com");
|
||||
}}
|
||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
||||
Set Email
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
||||
This button sets the{" "}
|
||||
<a
|
||||
href="https://formbricks.com/docs/attributes/identify-users"
|
||||
target="_blank"
|
||||
className="underline dark:text-blue-500">
|
||||
user email
|
||||
</a>{" "}
|
||||
'test@web.com'
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AppPage;
|
||||
@@ -1,6 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 6.2 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user