Compare commits
790 Commits
fix-hidden
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b70b2eef95 | ||
|
|
392a95834b | ||
|
|
66d9cc8eac | ||
|
|
befdc078f1 | ||
|
|
13b983b3b2 | ||
|
|
1e285ebe4e | ||
|
|
a7c4971952 | ||
|
|
c8689d91d5 | ||
|
|
73a2ff7421 | ||
|
|
0c28e89b41 | ||
|
|
a736436e29 | ||
|
|
7dbb0300d3 | ||
|
|
e71f3f412c | ||
|
|
07ed926225 | ||
|
|
15dc83a4eb | ||
|
|
3ce07edf43 | ||
|
|
0f34d9cc5f | ||
|
|
e9f800f017 | ||
|
|
ba2070b638 | ||
|
|
75cdb25d27 | ||
|
|
6bc7db852c | ||
|
|
ffb4eac1a4 | ||
|
|
56da3b5725 | ||
|
|
c189af5482 | ||
|
|
5dbf42fd6a | ||
|
|
42525a86a8 | ||
|
|
b96f0e67c5 | ||
|
|
2d7b99ba26 | ||
|
|
666a79044f | ||
|
|
c3d97c2932 | ||
|
|
cc5d630a05 | ||
|
|
be38d76ccf | ||
|
|
a8eea306e5 | ||
|
|
4fd53ac115 | ||
|
|
eb92392ed1 | ||
|
|
7412b32526 | ||
|
|
193346a70d | ||
|
|
a1d4754b04 | ||
|
|
f4b918a4b6 | ||
|
|
fb9a0b197a | ||
|
|
95b6c16dd1 | ||
|
|
cfdf09650f | ||
|
|
4c94fc25ae | ||
|
|
ccf501d925 | ||
|
|
04dfbe0777 | ||
|
|
cbf255ab0d | ||
|
|
942366956c | ||
|
|
a6ee796cef | ||
|
|
a535529bd3 | ||
|
|
018cef61a6 | ||
|
|
c53e4f54cb | ||
|
|
e2fd71abfd | ||
|
|
f888aa8a19 | ||
|
|
2698817adb | ||
|
|
2c18912f2f | ||
|
|
f57497d8b3 | ||
|
|
aab6798b29 | ||
|
|
f07092595f | ||
|
|
c03c7ec1ed | ||
|
|
628de8e6ae | ||
|
|
be4b54a827 | ||
|
|
e03df83e88 | ||
|
|
ed26427302 | ||
|
|
554809742b | ||
|
|
28adfb905c | ||
|
|
05c455ed62 | ||
|
|
f7687bc0ea | ||
|
|
af34391309 | ||
|
|
70978fbbdf | ||
|
|
f6683d1165 | ||
|
|
13be7a8970 | ||
|
|
0472d5e8f0 | ||
|
|
00a61f7abe | ||
|
|
6999abba3b | ||
|
|
9ae66f44ae | ||
|
|
7933d0077a | ||
|
|
cc8289fa33 | ||
|
|
c458051839 | ||
|
|
718a199d5b | ||
|
|
5ab9fdf1e3 | ||
|
|
5741209aa9 | ||
|
|
35d0d8ed54 | ||
|
|
5bce5c0a3b | ||
|
|
c61212964c | ||
|
|
b8d41a6e9b | ||
|
|
eedd5200a4 | ||
|
|
71a85c7126 | ||
|
|
341e2639e1 | ||
|
|
056470e6f0 | ||
|
|
e965ad4b97 | ||
|
|
12e703c02b | ||
|
|
07065f2675 | ||
|
|
7ca45cefeb | ||
|
|
4df28878db | ||
|
|
b355d05b25 | ||
|
|
e757e9aec9 | ||
|
|
cf4119baf6 | ||
|
|
6be2ae3071 | ||
|
|
600b793641 | ||
|
|
cde03b6997 | ||
|
|
00371bfb01 | ||
|
|
6be6782531 | ||
|
|
3ae4f8aa68 | ||
|
|
3d3c69a92b | ||
|
|
b1b94eaa66 | ||
|
|
67cc96449d | ||
|
|
bf41a53b86 | ||
|
|
26292ecf39 | ||
|
|
056e572a31 | ||
|
|
d7bbd219a3 | ||
|
|
fe5ff9a71c | ||
|
|
4e3438683e | ||
|
|
f587446079 | ||
|
|
7a3d05eb9a | ||
|
|
906b4da33c | ||
|
|
33b9ee3a50 | ||
|
|
5a693a548c | ||
|
|
20614c2b12 | ||
|
|
0c5e079d6f | ||
|
|
b3c16c8731 | ||
|
|
a6d45a63fa | ||
|
|
a5fa876aa3 | ||
|
|
c9a50a6ff2 | ||
|
|
19389bfffc | ||
|
|
accb4f461d | ||
|
|
c04c351244 | ||
|
|
f7f8f07778 | ||
|
|
3634385c6c | ||
|
|
8bdfc0686f | ||
|
|
74405cc05f | ||
|
|
785359955a | ||
|
|
f6157d5109 | ||
|
|
070dd9f268 | ||
|
|
7a40d647d8 | ||
|
|
2186a1c60d | ||
|
|
2054de4a9d | ||
|
|
e068955fbf | ||
|
|
4f5180ea8f | ||
|
|
093013e1d2 | ||
|
|
8b5b4b4172 | ||
|
|
36c5fc4a65 | ||
|
|
df191de1b4 | ||
|
|
8bb5428548 | ||
|
|
b78f8d0599 | ||
|
|
36535e1e50 | ||
|
|
e26a188d1b | ||
|
|
aaea129d4f | ||
|
|
18f4cd977d | ||
|
|
5468510f5a | ||
|
|
76213af5d7 | ||
|
|
cdf0926c60 | ||
|
|
84b3c57087 | ||
|
|
ed10069b39 | ||
|
|
7c1033af20 | ||
|
|
98e3ad1068 | ||
|
|
b11fbd9f95 | ||
|
|
c5e31d14d1 | ||
|
|
d64d561498 | ||
|
|
1bddc9e960 | ||
|
|
3f122ed9ee | ||
|
|
bdad80d6d1 | ||
|
|
d9ea00d86e | ||
|
|
4a3c2fccba | ||
|
|
3a09af674a | ||
|
|
1ced76c44d | ||
|
|
fa1663d858 | ||
|
|
ebf591a7e0 | ||
|
|
5c9795cd23 | ||
|
|
b67177ba55 | ||
|
|
6cf1f49c8e | ||
|
|
4afb95b92a | ||
|
|
38089241b4 | ||
|
|
07487d4871 | ||
|
|
fa0879e3a0 | ||
|
|
3733c22a6f | ||
|
|
5e5baa76ab | ||
|
|
2153d2aa16 | ||
|
|
7fa4862fd9 | ||
|
|
411e9a26ee | ||
|
|
eb1349f205 | ||
|
|
5c25f25212 | ||
|
|
6af81e46ee | ||
|
|
7423fc9472 | ||
|
|
1557ffcca1 | ||
|
|
5d53ed76ed | ||
|
|
ebd399e611 | ||
|
|
843110b0d6 | ||
|
|
51babf2f98 | ||
|
|
6bc5f1e168 | ||
|
|
c9016802e7 | ||
|
|
6a49fb4700 | ||
|
|
646921cd37 | ||
|
|
34d3145fcd | ||
|
|
c3c06eb309 | ||
|
|
bf4c6238d5 | ||
|
|
8972ef0fef | ||
|
|
4e59924a5a | ||
|
|
8b28353b79 | ||
|
|
abbc7a065b | ||
|
|
00e8ee27a2 | ||
|
|
379aeba71a | ||
|
|
717adddeae | ||
|
|
41798266a0 | ||
|
|
a93fa8ec76 | ||
|
|
47c3df0466 | ||
|
|
935e24bd43 | ||
|
|
3879d86f63 | ||
|
|
839144d338 | ||
|
|
96031822a6 | ||
|
|
21c8b5d6e4 | ||
|
|
22d4952a40 | ||
|
|
933723f1fe | ||
|
|
dd394f1d2c | ||
|
|
0188aad97b | ||
|
|
d46644fe0d | ||
|
|
c259a61f0e | ||
|
|
feee22b5c3 | ||
|
|
a5433f6748 | ||
|
|
557f14bab8 | ||
|
|
fdba260301 | ||
|
|
764b8ec260 | ||
|
|
ac5d1e651e | ||
|
|
62ffcc8e68 | ||
|
|
326872a86b | ||
|
|
892b55662e | ||
|
|
23143c8664 | ||
|
|
4c71caf0da | ||
|
|
173821f846 | ||
|
|
f139830020 | ||
|
|
70979a3b5b | ||
|
|
061fa036be | ||
|
|
b83c0a4a5d | ||
|
|
1bc0563965 | ||
|
|
3a4e2a9f85 | ||
|
|
bd48139a4f | ||
|
|
89fe82a0d6 | ||
|
|
65dc1fa771 | ||
|
|
438990bffc | ||
|
|
7f7bc989c6 | ||
|
|
baa2b31bc9 | ||
|
|
77aecf3aad | ||
|
|
7c1110239b | ||
|
|
eeb337521b | ||
|
|
182f674879 | ||
|
|
73c0da4b75 | ||
|
|
f475b2e6d5 | ||
|
|
e5e8941016 | ||
|
|
c39c9998f0 | ||
|
|
a8c8e6f83f | ||
|
|
8a5e9f38d7 | ||
|
|
a0740d20ea | ||
|
|
71f378a494 | ||
|
|
4bececeb56 | ||
|
|
71c96f48d7 | ||
|
|
05d88a3069 | ||
|
|
b6a63edc88 | ||
|
|
a3764f0316 | ||
|
|
ec52bdf3fe | ||
|
|
2e9ad3ce07 | ||
|
|
654bd232d6 | ||
|
|
01984cf8ca | ||
|
|
3eb18bb120 | ||
|
|
59859d0e4f | ||
|
|
c60c8cb7bd | ||
|
|
9fa7aef253 | ||
|
|
a23594428a | ||
|
|
56e7106d6e | ||
|
|
318f891540 | ||
|
|
a59881f9ae | ||
|
|
7ab4a45ad6 | ||
|
|
2990e3805f | ||
|
|
29132ab029 | ||
|
|
f860d8d25d | ||
|
|
3501990a79 | ||
|
|
41d60c8a02 | ||
|
|
a6269f0fd3 | ||
|
|
9c0d0a16a7 | ||
|
|
c6241f7e7f | ||
|
|
92f1c2b75a | ||
|
|
4d53291c8a | ||
|
|
14b7a69cea | ||
|
|
a9015b008d | ||
|
|
d19d624c0c | ||
|
|
3edaab6c2b | ||
|
|
4786ab61e7 | ||
|
|
819380d21c | ||
|
|
fd3fedb6ed | ||
|
|
88b1e63771 | ||
|
|
3132fe74f1 | ||
|
|
a27a2a67c8 | ||
|
|
4a7ace5a0a | ||
|
|
43628caa3b | ||
|
|
9d84bc0c8d | ||
|
|
babc020085 | ||
|
|
95ee83ef31 | ||
|
|
d994af2dfd | ||
|
|
4b5b5bf59f | ||
|
|
62166dc4b1 | ||
|
|
ec6d88bf11 | ||
|
|
c0240d60a1 | ||
|
|
cd2884d83e | ||
|
|
f7aea2e706 | ||
|
|
e80fc2ee61 | ||
|
|
9b489b0682 | ||
|
|
2ee0efa1c2 | ||
|
|
9ffd67262c | ||
|
|
68dc63ce0b | ||
|
|
f239ee9697 | ||
|
|
282b3e070c | ||
|
|
b5f0bd8f9a | ||
|
|
3784bd6b5e | ||
|
|
41d27c2093 | ||
|
|
7400ce2e67 | ||
|
|
355782f404 | ||
|
|
de70e97940 | ||
|
|
287c45f996 | ||
|
|
3b07a6d013 | ||
|
|
0cc2606ec6 | ||
|
|
0fada94b80 | ||
|
|
a59ede20c7 | ||
|
|
84294f9df2 | ||
|
|
855e7c78ce | ||
|
|
6c506d90c7 | ||
|
|
53f6e02ca1 | ||
|
|
14de2eab42 | ||
|
|
ad1f80331a | ||
|
|
3527ac337b | ||
|
|
23c2d3dce9 | ||
|
|
da652bd860 | ||
|
|
6f88dde1a0 | ||
|
|
3b90223101 | ||
|
|
e29a67b1f6 | ||
|
|
78f5de2f35 | ||
|
|
b1a35d4a69 | ||
|
|
2166c44470 | ||
|
|
080cf741e9 | ||
|
|
8881691509 | ||
|
|
3045f4437f | ||
|
|
91ace0e821 | ||
|
|
6ef281647a | ||
|
|
0aaaaa54ee | ||
|
|
b1f78e7bf2 | ||
|
|
7086ce2ca3 | ||
|
|
8f8b549b1d | ||
|
|
28514487e0 | ||
|
|
ee20af54c3 | ||
|
|
d08ec4c9ab | ||
|
|
891c83e232 | ||
|
|
0b02b00b72 | ||
|
|
a217cdd501 | ||
|
|
ebe50a4821 | ||
|
|
cb68d9defc | ||
|
|
c42a706789 | ||
|
|
3803111b19 | ||
|
|
30fdcff737 | ||
|
|
e83cfa85a4 | ||
|
|
eee9ee8995 | ||
|
|
ed89f12af8 | ||
|
|
f043314537 | ||
|
|
2ce842dd8d | ||
|
|
43b43839c5 | ||
|
|
8b6e3fec37 | ||
|
|
31bcf98779 | ||
|
|
b35cabcbcc | ||
|
|
4f435f1a1f | ||
|
|
99c1e434df | ||
|
|
b13699801b | ||
|
|
ceb2e85d96 | ||
|
|
c5f8b5ec32 | ||
|
|
bdbd57c2fc | ||
|
|
d44aa17814 | ||
|
|
23d38b4c5b | ||
|
|
58213969e8 | ||
|
|
ef973c8995 | ||
|
|
bea02ba3b5 | ||
|
|
1c1e2ee09c | ||
|
|
2bf7fe6c54 | ||
|
|
9639402c39 | ||
|
|
53213b41ee | ||
|
|
b8b5eead7a | ||
|
|
a0044ce376 | ||
|
|
b3a1f24683 | ||
|
|
f06d48698a | ||
|
|
acd508ba19 | ||
|
|
e5591686b4 | ||
|
|
7be7466eee | ||
|
|
8af6c15998 | ||
|
|
17d60eb1e7 | ||
|
|
d6ecafbc23 | ||
|
|
599e847686 | ||
|
|
4e52556f7e | ||
|
|
492a59e7de | ||
|
|
e0be53805e | ||
|
|
5c2860d1a4 | ||
|
|
18ba5bbd8a | ||
|
|
572b613034 | ||
|
|
a9c7140ba6 | ||
|
|
7fa95cd74a | ||
|
|
8c7f36d496 | ||
|
|
42dcbd3e7e | ||
|
|
1c1cd99510 | ||
|
|
b0a7e212dd | ||
|
|
0c1f6f3c3a | ||
|
|
9399b526b8 | ||
|
|
cd60032bc9 | ||
|
|
a941f994ea | ||
|
|
75d170bce5 | ||
|
|
16caae6dd6 | ||
|
|
a490600479 | ||
|
|
be28641722 | ||
|
|
4fdea3221b | ||
|
|
fef30c54b2 | ||
|
|
75362eac7a | ||
|
|
6e3b224944 | ||
|
|
ef1be219b4 | ||
|
|
ba9b01a969 | ||
|
|
e810e38333 | ||
|
|
dab8ad00d5 | ||
|
|
2c34f43c83 | ||
|
|
979fd71a11 | ||
|
|
1be23eebbb | ||
|
|
d10cff917d | ||
|
|
da72101320 | ||
|
|
5f02ad49c1 | ||
|
|
6644bba6ea | ||
|
|
0b7734f725 | ||
|
|
1536bf6907 | ||
|
|
e81190214f | ||
|
|
48c8906a89 | ||
|
|
717b30115b | ||
|
|
1f3962d2d5 | ||
|
|
619f6e408f | ||
|
|
4a8719abaa | ||
|
|
7b59eb3b26 | ||
|
|
8ac280268d | ||
|
|
34e8f4931d | ||
|
|
ac46850a24 | ||
|
|
6328be220a | ||
|
|
882ad99ed7 | ||
|
|
ce47b4c2d8 | ||
|
|
ce8f9de8ec | ||
|
|
ed3c2d2b58 | ||
|
|
9ae226329b | ||
|
|
12c3899b85 | ||
|
|
ccb1353eb5 | ||
|
|
22eb0b79ee | ||
|
|
5eb7a496da | ||
|
|
7ea55e199f | ||
|
|
83eb472acd | ||
|
|
d9fe6ee4f4 | ||
|
|
51b58be079 | ||
|
|
397643330a | ||
|
|
e5fa4328e1 | ||
|
|
4b777f1907 | ||
|
|
c3547ccb36 | ||
|
|
a0f334b300 | ||
|
|
a9f635b768 | ||
|
|
d385b4a0d6 | ||
|
|
5e825413d2 | ||
|
|
8c3e816ccd | ||
|
|
6ddc91ee85 | ||
|
|
14023ca8a9 | ||
|
|
385e8a4262 | ||
|
|
e358104f7c | ||
|
|
c8e9194ab6 | ||
|
|
bebe29815d | ||
|
|
7f40502c94 | ||
|
|
5fb5215680 | ||
|
|
19b80ff042 | ||
|
|
2dfdba2acf | ||
|
|
f7842789de | ||
|
|
59bdd5f065 | ||
|
|
8da1bc71a6 | ||
|
|
0e0259691c | ||
|
|
ac7831fa3d | ||
|
|
db32cb392f | ||
|
|
e5cb01bd88 | ||
|
|
cbef4c2a69 | ||
|
|
86948b70de | ||
|
|
dfe955ca7c | ||
|
|
eb4b2dde05 | ||
|
|
f2dae67813 | ||
|
|
3ffc9bd290 | ||
|
|
a9946737df | ||
|
|
ece3d508a2 | ||
|
|
0d1d227e6a | ||
|
|
c0b8edfdf2 | ||
|
|
45fec0e184 | ||
|
|
2c2ba919c6 | ||
|
|
6d8adc6168 | ||
|
|
ec208960e8 | ||
|
|
b9505158b4 | ||
|
|
ad0c3421f0 | ||
|
|
916c00344b | ||
|
|
459cdee17e | ||
|
|
bb26a64dbb | ||
|
|
29a3fa532a | ||
|
|
738b8f9012 | ||
|
|
c95272288e | ||
|
|
919febd166 | ||
|
|
10ccc20b53 | ||
|
|
d9ca64da54 | ||
|
|
ce00ec97d1 | ||
|
|
2b9cd37c6c | ||
|
|
f8f14eb6f3 | ||
|
|
645fc863aa | ||
|
|
c53f030b24 | ||
|
|
45d74f9ba0 | ||
|
|
87870919ca | ||
|
|
ce2fdde474 | ||
|
|
6e2f30c6ed | ||
|
|
5c8040008a | ||
|
|
639e25d679 | ||
|
|
f7e5ef96d2 | ||
|
|
745f5487e9 | ||
|
|
0e7f3adf53 | ||
|
|
342d2b1fc4 | ||
|
|
15279685f7 | ||
|
|
12aa959f50 | ||
|
|
9478946c7a | ||
|
|
8560bbf28b | ||
|
|
df7afe1b64 | ||
|
|
df52b60d61 | ||
|
|
65b051f0eb | ||
|
|
7678084061 | ||
|
|
022d33d06f | ||
|
|
4d157bf8dc | ||
|
|
9fcbe4e8c5 | ||
|
|
5aeb92eb4f | ||
|
|
00dfa629b5 | ||
|
|
3ca471b6a2 | ||
|
|
a525589186 | ||
|
|
59ed10398d | ||
|
|
25a86e31df | ||
|
|
7d6743a81a | ||
|
|
6616f62da5 | ||
|
|
a3cbc05e12 | ||
|
|
97095a627a | ||
|
|
910d257c56 | ||
|
|
0c0a008b28 | ||
|
|
9879458353 | ||
|
|
d44f1f3b4b | ||
|
|
c5d387a7e5 | ||
|
|
a6aacd5c55 | ||
|
|
57e7485564 | ||
|
|
42a38a6f47 | ||
|
|
34bb9c2127 | ||
|
|
6442b5e4aa | ||
|
|
dde5a55446 | ||
|
|
13e615a798 | ||
|
|
9c81961b0b | ||
|
|
c1a35e2d75 | ||
|
|
13415c75c2 | ||
|
|
300557a0e6 | ||
|
|
fcbb97010c | ||
|
|
6be46b16b2 | ||
|
|
35b2356a31 | ||
|
|
53ef756723 | ||
|
|
0f0b743a10 | ||
|
|
3f7dafb65c | ||
|
|
9df791b5ff | ||
|
|
dea40d9757 | ||
|
|
dd12a589d6 | ||
|
|
af6e5ba31e | ||
|
|
2b57b2080b | ||
|
|
154c85a0f7 | ||
|
|
3f465d4594 | ||
|
|
94e883f4c3 | ||
|
|
38622101f1 | ||
|
|
0eb64c0084 | ||
|
|
409f5b1791 | ||
|
|
14398a9c4f | ||
|
|
d1cdf6e216 | ||
|
|
65da25a626 | ||
|
|
ce8b019e93 | ||
|
|
67d7fe016d | ||
|
|
47583b5a32 | ||
|
|
03c9a6aaae | ||
|
|
4dcf9b093b | ||
|
|
5ba5ebf63d | ||
|
|
115bea2792 | ||
|
|
b0495a8a42 | ||
|
|
faabd371f5 | ||
|
|
f0be6de0b3 | ||
|
|
b338c6d28d | ||
|
|
07e9a7c007 | ||
|
|
928bb3f8bc | ||
|
|
b9d62f6af2 | ||
|
|
f7ac38953b | ||
|
|
6441c0aa31 | ||
|
|
16479eb6cf | ||
|
|
69472c21c2 | ||
|
|
c270688e8f | ||
|
|
00c86c7082 | ||
|
|
e95e9f9fda | ||
|
|
1588c2f47b | ||
|
|
53850c96db | ||
|
|
ae2cb15055 | ||
|
|
8bf1e096c0 | ||
|
|
0052dc88f0 | ||
|
|
d67d62df45 | ||
|
|
5d45de6bc4 | ||
|
|
cf5bc51e94 | ||
|
|
9a7d24ea4e | ||
|
|
649f28ff8d | ||
|
|
bc5a81d146 | ||
|
|
7dce35bde4 | ||
|
|
f30ebc32ec | ||
|
|
027bc20975 | ||
|
|
3b1cddb9ce | ||
|
|
bd22aaaa86 | ||
|
|
e0e42d2eed | ||
|
|
616210f1bf | ||
|
|
ff2e7f6cc7 | ||
|
|
d1ce037f7d | ||
|
|
91f87f4b7b | ||
|
|
61657b9f9a | ||
|
|
476d032642 | ||
|
|
7538e570c5 | ||
|
|
66fcf4b79b | ||
|
|
21371b1815 | ||
|
|
a53c13d6ed | ||
|
|
1a0c6e72b2 | ||
|
|
ba7c8b79b1 | ||
|
|
d7b504eed0 | ||
|
|
a1df10eb09 | ||
|
|
92be409d4f | ||
|
|
665c7c6bf1 | ||
|
|
6c2ff7ee08 | ||
|
|
295a1bf402 | ||
|
|
3e6f558b08 | ||
|
|
aad5a59e82 | ||
|
|
36d02480b2 | ||
|
|
99454ac57b | ||
|
|
e2915f878e | ||
|
|
710a813e9b | ||
|
|
8bdb818995 | ||
|
|
20466c3800 | ||
|
|
faf6c2d062 | ||
|
|
a760a3c341 | ||
|
|
94e6d2f215 | ||
|
|
a6f1c0f63d | ||
|
|
c653996cbb | ||
|
|
da44fef89d | ||
|
|
4dc2c5e3df | ||
|
|
1797c2ae20 | ||
|
|
3b5da01c0a | ||
|
|
0f1bdce002 | ||
|
|
7c8f3e826f | ||
|
|
f21d63bb55 | ||
|
|
f223bb3d3f | ||
|
|
51001d07b6 | ||
|
|
a9eedd3c7a | ||
|
|
b0aa08fe4e | ||
|
|
8d45d24d55 | ||
|
|
8c1b9f81b9 | ||
|
|
71fad1c22b | ||
|
|
292266c597 | ||
|
|
54e589a6a0 | ||
|
|
fb3f425c27 | ||
|
|
1aaa30c6e9 | ||
|
|
8611410b21 | ||
|
|
40fa7a69c0 | ||
|
|
5eca30e513 | ||
|
|
4b78493782 | ||
|
|
2ce44b734f | ||
|
|
85d8f8c3ae | ||
|
|
3f16291137 | ||
|
|
a5958d5653 | ||
|
|
fdbdf8207a | ||
|
|
630e5489ec | ||
|
|
36943bb786 | ||
|
|
e1bbb0a10f | ||
|
|
27da540846 | ||
|
|
7d7f6ed04a | ||
|
|
ff01bc342d | ||
|
|
cd8b40b569 | ||
|
|
31c742f7a8 | ||
|
|
d6a7a2c21f | ||
|
|
499ecab691 | ||
|
|
df06540f1b | ||
|
|
a32b213ca5 | ||
|
|
6120f992a4 | ||
|
|
389a551a69 | ||
|
|
8ddbdc0e1e | ||
|
|
302c6a90c0 | ||
|
|
18e597d8a3 | ||
|
|
81d717ccff | ||
|
|
2e979c7323 | ||
|
|
4dfd15d6dd | ||
|
|
5b9bf3ff43 | ||
|
|
d2f7485098 | ||
|
|
f8fee1fba7 | ||
|
|
19249ca00f | ||
|
|
01e5700340 | ||
|
|
ff2f7660a6 | ||
|
|
2bc05e2b4a | ||
|
|
137c6447b7 | ||
|
|
ebc8f0c917 | ||
|
|
5a8d10b5b4 | ||
|
|
875815fb62 | ||
|
|
cdf526e130 | ||
|
|
b685032b34 | ||
|
|
a171f9cb00 | ||
|
|
c452f05ec2 | ||
|
|
93d91f80f2 | ||
|
|
7b764c8427 | ||
|
|
016289c8cb | ||
|
|
93a9575389 | ||
|
|
9e265adf14 | ||
|
|
eb08a0ed14 | ||
|
|
c533f37983 | ||
|
|
ca4f8385e4 | ||
|
|
3eb9aa74ed | ||
|
|
637b51464c | ||
|
|
fd9585a66e | ||
|
|
49ecbcb0c9 | ||
|
|
1132bdd66a | ||
|
|
c7d6ed9ea3 | ||
|
|
782528f169 | ||
|
|
104c78275f | ||
|
|
d9d88f7175 | ||
|
|
bf7e24cf11 | ||
|
|
c8aba01db3 | ||
|
|
a896c7e46e | ||
|
|
8018ec14a2 | ||
|
|
9c3208c860 | ||
|
|
e1063964cf | ||
|
|
38568738cc | ||
|
|
15b8358b14 | ||
|
|
2173cb2610 | ||
|
|
87b925d622 | ||
|
|
885b06cc26 | ||
|
|
adb6a5f41e | ||
|
|
3b815e22e3 | ||
|
|
4d4a5c0e64 | ||
|
|
0e89293974 | ||
|
|
c306911b3a | ||
|
|
4f276f0095 | ||
|
|
81fc97c7e9 | ||
|
|
785c5a59c6 | ||
|
|
25ecfaa883 | ||
|
|
38e2c019fa | ||
|
|
15878a4ac5 | ||
|
|
9802536ded | ||
|
|
2c7f92a4d7 | ||
|
|
c653841037 | ||
|
|
ec314c14ea | ||
|
|
c03e60ac0b | ||
|
|
cbf2343143 | ||
|
|
9d9b3ac543 | ||
|
|
591b35a70b | ||
|
|
f0c7b881d3 | ||
|
|
3fd5515db1 | ||
|
|
f32401afd6 | ||
|
|
1b9d91f1e8 | ||
|
|
1f039d707c | ||
|
|
6671d877ad | ||
|
|
2867c95494 | ||
|
|
aa55cec060 | ||
|
|
dfb6c4cd9e | ||
|
|
a9082f66e8 | ||
|
|
bf39b0fbfb | ||
|
|
e347f2179a | ||
|
|
d4f155b6bc | ||
|
|
da001834f5 | ||
|
|
f54352dd82 | ||
|
|
0fba0fae73 | ||
|
|
406ec88515 | ||
|
|
b97957d166 | ||
|
|
655ad6b9e0 | ||
|
|
f5ce42fc2d | ||
|
|
709cdf260d | ||
|
|
5c583028e0 | ||
|
|
c70008d1be | ||
|
|
13fa716fe8 | ||
|
|
c3af5b428f | ||
|
|
40e2f28e94 | ||
|
|
2964f2e079 | ||
|
|
e1a5291123 | ||
|
|
ef41f35209 | ||
|
|
2f64b202c1 | ||
|
|
2500c739ae | ||
|
|
63a9a6135b | ||
|
|
417005c6e9 | ||
|
|
cd1739c901 | ||
|
|
709917eb8f | ||
|
|
3ba70122d5 | ||
|
|
5ff025543e | ||
|
|
896d5bad12 |
352
.cursor/commands/create-question.md
Normal file
@@ -0,0 +1,352 @@
|
|||||||
|
# Create New Question Element
|
||||||
|
|
||||||
|
Use this command to scaffold a new question element component in `packages/survey-ui/src/elements/`.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
When creating a new question type (e.g., `single-select`, `rating`, `nps`), follow these steps:
|
||||||
|
|
||||||
|
1. **Create the component file** `{question-type}.tsx` with this structure:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import * as React from "react";
|
||||||
|
import { ElementHeader } from "../components/element-header";
|
||||||
|
import { useTextDirection } from "../hooks/use-text-direction";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
|
||||||
|
interface {QuestionType}Props {
|
||||||
|
/** Unique identifier for the element container */
|
||||||
|
elementId: string;
|
||||||
|
/** The main question or prompt text displayed as the headline */
|
||||||
|
headline: string;
|
||||||
|
/** Optional descriptive text displayed below the headline */
|
||||||
|
description?: string;
|
||||||
|
/** Unique identifier for the input/control group */
|
||||||
|
inputId: string;
|
||||||
|
/** Current value */
|
||||||
|
value?: {ValueType};
|
||||||
|
/** Callback function called when the value changes */
|
||||||
|
onChange: (value: {ValueType}) => void;
|
||||||
|
/** Whether the field is required (shows asterisk indicator) */
|
||||||
|
required?: boolean;
|
||||||
|
/** Error message to display */
|
||||||
|
errorMessage?: string;
|
||||||
|
/** Text direction: 'ltr' (left-to-right), 'rtl' (right-to-left), or 'auto' (auto-detect from content) */
|
||||||
|
dir?: "ltr" | "rtl" | "auto";
|
||||||
|
/** Whether the controls are disabled */
|
||||||
|
disabled?: boolean;
|
||||||
|
// Add question-specific props here
|
||||||
|
}
|
||||||
|
|
||||||
|
function {QuestionType}({
|
||||||
|
elementId,
|
||||||
|
headline,
|
||||||
|
description,
|
||||||
|
inputId,
|
||||||
|
value,
|
||||||
|
onChange,
|
||||||
|
required = false,
|
||||||
|
errorMessage,
|
||||||
|
dir = "auto",
|
||||||
|
disabled = false,
|
||||||
|
// ... question-specific props
|
||||||
|
}: {QuestionType}Props): React.JSX.Element {
|
||||||
|
// Ensure value is always the correct type (handle undefined/null)
|
||||||
|
const currentValue = value ?? {defaultValue};
|
||||||
|
|
||||||
|
// Detect text direction from content
|
||||||
|
const detectedDir = useTextDirection({
|
||||||
|
dir,
|
||||||
|
textContent: [headline, description ?? "", /* add other text content from question */],
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="w-full space-y-4" id={elementId} dir={detectedDir}>
|
||||||
|
{/* Headline */}
|
||||||
|
<ElementHeader
|
||||||
|
headline={headline}
|
||||||
|
description={description}
|
||||||
|
required={required}
|
||||||
|
htmlFor={inputId}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Question-specific controls */}
|
||||||
|
{/* TODO: Add your question-specific UI here */}
|
||||||
|
|
||||||
|
{/* Error message */}
|
||||||
|
{errorMessage && (
|
||||||
|
<div className="text-destructive flex items-center gap-1 text-sm" dir={detectedDir}>
|
||||||
|
<span>{errorMessage}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { {QuestionType} };
|
||||||
|
export type { {QuestionType}Props };
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Create the Storybook file** `{question-type}.stories.tsx`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import type { Decorator, Meta, StoryObj } from "@storybook/react";
|
||||||
|
import React from "react";
|
||||||
|
import { {QuestionType}, type {QuestionType}Props } from "./{question-type}";
|
||||||
|
|
||||||
|
// Styling options for the StylingPlayground story
|
||||||
|
interface StylingOptions {
|
||||||
|
// Question styling
|
||||||
|
questionHeadlineFontFamily: string;
|
||||||
|
questionHeadlineFontSize: string;
|
||||||
|
questionHeadlineFontWeight: string;
|
||||||
|
questionHeadlineColor: string;
|
||||||
|
questionDescriptionFontFamily: string;
|
||||||
|
questionDescriptionFontWeight: string;
|
||||||
|
questionDescriptionFontSize: string;
|
||||||
|
questionDescriptionColor: string;
|
||||||
|
// Add component-specific styling options here
|
||||||
|
}
|
||||||
|
|
||||||
|
type StoryProps = {QuestionType}Props & Partial<StylingOptions>;
|
||||||
|
|
||||||
|
const meta: Meta<StoryProps> = {
|
||||||
|
title: "UI-package/Elements/{QuestionType}",
|
||||||
|
component: {QuestionType},
|
||||||
|
parameters: {
|
||||||
|
layout: "centered",
|
||||||
|
docs: {
|
||||||
|
description: {
|
||||||
|
component: "A complete {question type} question element...",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags: ["autodocs"],
|
||||||
|
argTypes: {
|
||||||
|
headline: {
|
||||||
|
control: "text",
|
||||||
|
description: "The main question text",
|
||||||
|
table: { category: "Content" },
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
control: "text",
|
||||||
|
description: "Optional description or subheader text",
|
||||||
|
table: { category: "Content" },
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
control: "object",
|
||||||
|
description: "Current value",
|
||||||
|
table: { category: "State" },
|
||||||
|
},
|
||||||
|
required: {
|
||||||
|
control: "boolean",
|
||||||
|
description: "Whether the field is required",
|
||||||
|
table: { category: "Validation" },
|
||||||
|
},
|
||||||
|
errorMessage: {
|
||||||
|
control: "text",
|
||||||
|
description: "Error message to display",
|
||||||
|
table: { category: "Validation" },
|
||||||
|
},
|
||||||
|
dir: {
|
||||||
|
control: { type: "select" },
|
||||||
|
options: ["ltr", "rtl", "auto"],
|
||||||
|
description: "Text direction for RTL support",
|
||||||
|
table: { category: "Layout" },
|
||||||
|
},
|
||||||
|
disabled: {
|
||||||
|
control: "boolean",
|
||||||
|
description: "Whether the controls are disabled",
|
||||||
|
table: { category: "State" },
|
||||||
|
},
|
||||||
|
onChange: {
|
||||||
|
action: "changed",
|
||||||
|
table: { category: "Events" },
|
||||||
|
},
|
||||||
|
// Add question-specific argTypes here
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default meta;
|
||||||
|
type Story = StoryObj<StoryProps>;
|
||||||
|
|
||||||
|
// Decorator to apply CSS variables from story args
|
||||||
|
const withCSSVariables: Decorator<StoryProps> = (Story, context) => {
|
||||||
|
const args = context.args as StoryProps;
|
||||||
|
const {
|
||||||
|
questionHeadlineFontFamily,
|
||||||
|
questionHeadlineFontSize,
|
||||||
|
questionHeadlineFontWeight,
|
||||||
|
questionHeadlineColor,
|
||||||
|
questionDescriptionFontFamily,
|
||||||
|
questionDescriptionFontSize,
|
||||||
|
questionDescriptionFontWeight,
|
||||||
|
questionDescriptionColor,
|
||||||
|
// Extract component-specific styling options
|
||||||
|
} = args;
|
||||||
|
|
||||||
|
const cssVarStyle: React.CSSProperties & Record<string, string | undefined> = {
|
||||||
|
"--fb-question-headline-font-family": questionHeadlineFontFamily,
|
||||||
|
"--fb-question-headline-font-size": questionHeadlineFontSize,
|
||||||
|
"--fb-question-headline-font-weight": questionHeadlineFontWeight,
|
||||||
|
"--fb-question-headline-color": questionHeadlineColor,
|
||||||
|
"--fb-question-description-font-family": questionDescriptionFontFamily,
|
||||||
|
"--fb-question-description-font-size": questionDescriptionFontSize,
|
||||||
|
"--fb-question-description-font-weight": questionDescriptionFontWeight,
|
||||||
|
"--fb-question-description-color": questionDescriptionColor,
|
||||||
|
// Add component-specific CSS variables
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={cssVarStyle} className="w-[600px]">
|
||||||
|
<Story />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const StylingPlayground: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
description: "Example description",
|
||||||
|
// Default styling values
|
||||||
|
questionHeadlineFontFamily: "system-ui, sans-serif",
|
||||||
|
questionHeadlineFontSize: "1.125rem",
|
||||||
|
questionHeadlineFontWeight: "600",
|
||||||
|
questionHeadlineColor: "#1e293b",
|
||||||
|
questionDescriptionFontFamily: "system-ui, sans-serif",
|
||||||
|
questionDescriptionFontSize: "0.875rem",
|
||||||
|
questionDescriptionFontWeight: "400",
|
||||||
|
questionDescriptionColor: "#64748b",
|
||||||
|
// Add component-specific default values
|
||||||
|
},
|
||||||
|
argTypes: {
|
||||||
|
// Question styling argTypes
|
||||||
|
questionHeadlineFontFamily: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionHeadlineFontSize: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionHeadlineFontWeight: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionHeadlineColor: {
|
||||||
|
control: "color",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionDescriptionFontFamily: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionDescriptionFontSize: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionDescriptionFontWeight: {
|
||||||
|
control: "text",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
questionDescriptionColor: {
|
||||||
|
control: "color",
|
||||||
|
table: { category: "Question Styling" },
|
||||||
|
},
|
||||||
|
// Add component-specific argTypes
|
||||||
|
},
|
||||||
|
decorators: [withCSSVariables],
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Default: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
// Add default props
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const WithDescription: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
description: "Example description text",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Required: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const WithError: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
errorMessage: "This field is required",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Disabled: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "Example question?",
|
||||||
|
disabled: true,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const RTL: Story = {
|
||||||
|
args: {
|
||||||
|
headline: "مثال على السؤال؟",
|
||||||
|
description: "مثال على الوصف",
|
||||||
|
// Add RTL-specific props
|
||||||
|
},
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Add CSS variables** to `packages/survey-ui/src/styles/globals.css` if needed:
|
||||||
|
|
||||||
|
```css
|
||||||
|
/* Component-specific CSS variables */
|
||||||
|
--fb-{component}-{property}: {default-value};
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Export from** `packages/survey-ui/src/index.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export { {QuestionType}, type {QuestionType}Props } from "./elements/{question-type}";
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Requirements
|
||||||
|
|
||||||
|
- ✅ Always use `ElementHeader` component for headline/description
|
||||||
|
- ✅ Always use `useTextDirection` hook for RTL support
|
||||||
|
- ✅ Always handle undefined/null values safely (e.g., `Array.isArray(value) ? value : []`)
|
||||||
|
- ✅ Always include error message display if applicable
|
||||||
|
- ✅ Always support disabled state if applicable
|
||||||
|
- ✅ Always add JSDoc comments to props interface
|
||||||
|
- ✅ Always create Storybook stories with styling playground
|
||||||
|
- ✅ Always export types from component file
|
||||||
|
- ✅ Always add to index.ts exports
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
- `open-text.tsx` - Text input/textarea question (string value)
|
||||||
|
- `multi-select.tsx` - Multiple checkbox selection (string[] value)
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
When creating a new question element, verify:
|
||||||
|
|
||||||
|
- [ ] Component file created with proper structure
|
||||||
|
- [ ] Props interface with JSDoc comments for all props
|
||||||
|
- [ ] Uses `ElementHeader` component (don't duplicate header logic)
|
||||||
|
- [ ] Uses `useTextDirection` hook for RTL support
|
||||||
|
- [ ] Handles undefined/null values safely
|
||||||
|
- [ ] Storybook file created with styling playground
|
||||||
|
- [ ] Includes common stories: Default, WithDescription, Required, WithError, Disabled, RTL
|
||||||
|
- [ ] CSS variables added to `globals.css` if component needs custom styling
|
||||||
|
- [ ] Exported from `index.ts` with types
|
||||||
|
- [ ] TypeScript types properly exported
|
||||||
|
- [ ] Error message display included if applicable
|
||||||
|
- [ ] Disabled state supported if applicable
|
||||||
|
|
||||||
61
.cursor/rules/build-and-deployment.mdc
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
---
|
||||||
|
description:
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Build & Deployment Best Practices
|
||||||
|
|
||||||
|
## Build Process
|
||||||
|
|
||||||
|
### Running Builds
|
||||||
|
- Use `pnpm build` from project root for full build
|
||||||
|
- Monitor for React hooks warnings and fix them immediately
|
||||||
|
- Ensure all TypeScript errors are resolved before deployment
|
||||||
|
|
||||||
|
### Common Build Issues & Fixes
|
||||||
|
|
||||||
|
#### React Hooks Warnings
|
||||||
|
- Capture ref values in variables within useEffect cleanup
|
||||||
|
- Avoid accessing `.current` directly in cleanup functions
|
||||||
|
- Pattern for fixing ref cleanup warnings:
|
||||||
|
```typescript
|
||||||
|
useEffect(() => {
|
||||||
|
const currentRef = myRef.current;
|
||||||
|
return () => {
|
||||||
|
if (currentRef) {
|
||||||
|
currentRef.cleanup();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Test Failures During Build
|
||||||
|
- Ensure all test mocks include required constants like `SESSION_MAX_AGE`
|
||||||
|
- Mock Next.js navigation hooks properly: `useParams`, `useRouter`, `useSearchParams`
|
||||||
|
- Remove unused imports and constants from test files
|
||||||
|
- Use literal values instead of imported constants when the constant isn't actually needed
|
||||||
|
|
||||||
|
### Test Execution
|
||||||
|
- Run `pnpm test` to execute all tests
|
||||||
|
- Use `pnpm test -- --run filename.test.tsx` for specific test files
|
||||||
|
- Fix test failures before merging code
|
||||||
|
- Ensure 100% test coverage for new components
|
||||||
|
|
||||||
|
### Performance Monitoring
|
||||||
|
- Monitor build times and optimize if necessary
|
||||||
|
- Watch for memory usage during builds
|
||||||
|
- Use proper caching strategies for faster rebuilds
|
||||||
|
|
||||||
|
### Deployment Checklist
|
||||||
|
1. All tests passing
|
||||||
|
2. Build completes without warnings
|
||||||
|
3. TypeScript compilation successful
|
||||||
|
4. No linter errors
|
||||||
|
5. Database migrations applied (if any)
|
||||||
|
6. Environment variables configured
|
||||||
|
|
||||||
|
### EKS Deployment Considerations
|
||||||
|
- Ensure latest code is deployed to all pods
|
||||||
|
- Monitor AWS RDS Performance Insights for database issues
|
||||||
|
- Verify environment-specific configurations
|
||||||
|
- Check pod health and resource usage
|
||||||
415
.cursor/rules/cache-optimization.mdc
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
---
|
||||||
|
description: Caching rules for performance improvements
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Cache Optimization Patterns for Formbricks
|
||||||
|
|
||||||
|
## Cache Strategy Overview
|
||||||
|
|
||||||
|
Formbricks uses a **hybrid caching approach** optimized for enterprise scale:
|
||||||
|
|
||||||
|
- **Redis** for persistent cross-request caching
|
||||||
|
- **React `cache()`** for request-level deduplication
|
||||||
|
- **NO Next.js `unstable_cache()`** - avoid for reliability
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
### Core Cache Infrastructure
|
||||||
|
- [packages/cache/src/service.ts](mdc:packages/cache/src/service.ts) - Redis cache service
|
||||||
|
- [packages/cache/src/client.ts](mdc:packages/cache/src/client.ts) - Cache client initialization and singleton management
|
||||||
|
- [apps/web/lib/cache/index.ts](mdc:apps/web/lib/cache/index.ts) - Cache service proxy for web app
|
||||||
|
- [packages/cache/src/index.ts](mdc:packages/cache/src/index.ts) - Cache package exports and utilities
|
||||||
|
|
||||||
|
### Environment State Caching (Critical Endpoint)
|
||||||
|
- [apps/web/app/api/v1/client/[environmentId]/environment/route.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/route.ts) - Main endpoint serving hundreds of thousands of SDK clients
|
||||||
|
- [apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts](mdc:apps/web/app/api/v1/client/[environmentId]/environment/lib/data.ts) - Optimized data layer with caching
|
||||||
|
|
||||||
|
## Enterprise-Grade Cache Key Patterns
|
||||||
|
|
||||||
|
**Always use** the `createCacheKey` utilities from the cache package:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Correct patterns
|
||||||
|
createCacheKey.environment.state(environmentId) // "fb:env:abc123:state"
|
||||||
|
createCacheKey.organization.billing(organizationId) // "fb:org:xyz789:billing"
|
||||||
|
createCacheKey.license.status(organizationId) // "fb:license:org123:status"
|
||||||
|
createCacheKey.user.permissions(userId, orgId) // "fb:user:456:org:123:permissions"
|
||||||
|
|
||||||
|
// ❌ Never use flat keys - collision-prone
|
||||||
|
"environment_abc123"
|
||||||
|
"user_data_456"
|
||||||
|
```
|
||||||
|
|
||||||
|
## When to Use Each Cache Type
|
||||||
|
|
||||||
|
### Use React `cache()` for Request Deduplication
|
||||||
|
```typescript
|
||||||
|
// ✅ Prevents multiple calls within same request
|
||||||
|
export const getEnterpriseLicense = reactCache(async () => {
|
||||||
|
// Complex license validation logic
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use `cache.withCache()` for Simple Database Queries
|
||||||
|
```typescript
|
||||||
|
// ✅ Simple caching with automatic fallback (TTL in milliseconds)
|
||||||
|
export const getActionClasses = (environmentId: string) => {
|
||||||
|
return cache.withCache(() => fetchActionClassesFromDB(environmentId),
|
||||||
|
createCacheKey.environment.actionClasses(environmentId),
|
||||||
|
60 * 30 * 1000 // 30 minutes in milliseconds
|
||||||
|
);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Explicit Redis Cache for Complex Business Logic
|
||||||
|
```typescript
|
||||||
|
// ✅ Full control for high-stakes endpoints
|
||||||
|
export const getEnvironmentState = async (environmentId: string) => {
|
||||||
|
const cached = await environmentStateCache.getEnvironmentState(environmentId);
|
||||||
|
if (cached) return cached;
|
||||||
|
|
||||||
|
const fresh = await buildComplexState(environmentId);
|
||||||
|
await environmentStateCache.setEnvironmentState(environmentId, fresh);
|
||||||
|
return fresh;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Caching Decision Framework
|
||||||
|
|
||||||
|
### When TO Add Caching
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Expensive operations that benefit from caching
|
||||||
|
- Database queries (>10ms typical)
|
||||||
|
- External API calls (>50ms typical)
|
||||||
|
- Complex computations (>5ms)
|
||||||
|
- File system operations
|
||||||
|
- Heavy data transformations
|
||||||
|
|
||||||
|
// Example: Database query with complex joins (TTL in milliseconds)
|
||||||
|
export const getEnvironmentWithDetails = withCache(
|
||||||
|
async (environmentId: string) => {
|
||||||
|
return prisma.environment.findUnique({
|
||||||
|
where: { id: environmentId },
|
||||||
|
include: { /* complex joins */ }
|
||||||
|
});
|
||||||
|
},
|
||||||
|
{ key: createCacheKey.environment.details(environmentId), ttl: 60 * 30 * 1000 } // 30 minutes
|
||||||
|
)();
|
||||||
|
```
|
||||||
|
|
||||||
|
### When NOT to Add Caching
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Don't cache these operations - minimal overhead
|
||||||
|
- Simple property access (<0.1ms)
|
||||||
|
- Basic transformations (<1ms)
|
||||||
|
- Functions that just call already-cached functions
|
||||||
|
- Pure computation without I/O
|
||||||
|
|
||||||
|
// ❌ Bad example: Redundant caching
|
||||||
|
const getCachedLicenseFeatures = withCache(
|
||||||
|
async () => {
|
||||||
|
const license = await getEnterpriseLicense(); // Already cached!
|
||||||
|
return license.active ? license.features : null; // Just property access
|
||||||
|
},
|
||||||
|
{ key: "license-features", ttl: 1800 * 1000 } // 30 minutes in milliseconds
|
||||||
|
);
|
||||||
|
|
||||||
|
// ✅ Good example: Simple and efficient
|
||||||
|
const getLicenseFeatures = async () => {
|
||||||
|
const license = await getEnterpriseLicense(); // Already cached
|
||||||
|
return license.active ? license.features : null; // 0.1ms overhead
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Computational Overhead Analysis
|
||||||
|
|
||||||
|
Before adding caching, analyze the overhead:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ High overhead - CACHE IT
|
||||||
|
- Database queries: ~10-100ms
|
||||||
|
- External APIs: ~50-500ms
|
||||||
|
- File I/O: ~5-50ms
|
||||||
|
- Complex algorithms: >5ms
|
||||||
|
|
||||||
|
// ❌ Low overhead - DON'T CACHE
|
||||||
|
- Property access: ~0.001ms
|
||||||
|
- Simple lookups: ~0.1ms
|
||||||
|
- Basic validation: ~1ms
|
||||||
|
- Type checks: ~0.01ms
|
||||||
|
|
||||||
|
// Example decision tree:
|
||||||
|
const expensiveOperation = async () => {
|
||||||
|
return prisma.query(); // 50ms - CACHE IT
|
||||||
|
};
|
||||||
|
|
||||||
|
const cheapOperation = (data: any) => {
|
||||||
|
return data.property; // 0.001ms - DON'T CACHE
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Avoid Cache Wrapper Anti-Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Don't create wrapper functions just for caching
|
||||||
|
const getCachedUserPermissions = withCache(
|
||||||
|
async (userId: string) => getUserPermissions(userId),
|
||||||
|
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||||
|
);
|
||||||
|
|
||||||
|
// ✅ Add caching directly to the original function
|
||||||
|
export const getUserPermissions = withCache(
|
||||||
|
async (userId: string) => {
|
||||||
|
return prisma.user.findUnique({
|
||||||
|
where: { id: userId },
|
||||||
|
include: { permissions: true }
|
||||||
|
});
|
||||||
|
},
|
||||||
|
{ key: createCacheKey.user.permissions(userId), ttl: 3600 * 1000 } // 1 hour in milliseconds
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## TTL Coordination Strategy
|
||||||
|
|
||||||
|
### Multi-Layer Cache Coordination
|
||||||
|
For endpoints serving client SDKs, coordinate TTLs across layers:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Client SDK cache (expiresAt) - longest TTL for fewer requests
|
||||||
|
const CLIENT_TTL = 60; // 1 minute (seconds for client)
|
||||||
|
|
||||||
|
// Server Redis cache - shorter TTL ensures fresh data for clients
|
||||||
|
const SERVER_TTL = 60 * 1000; // 1 minutes in milliseconds
|
||||||
|
|
||||||
|
// HTTP cache headers (seconds)
|
||||||
|
const BROWSER_TTL = 60; // 1 minute (max-age)
|
||||||
|
const CDN_TTL = 60; // 1 minute (s-maxage)
|
||||||
|
const CORS_TTL = 60 * 60; // 1 hour (balanced approach)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Standard TTL Guidelines (in milliseconds for cache-manager + Keyv)
|
||||||
|
```typescript
|
||||||
|
// Configuration data - rarely changes
|
||||||
|
const CONFIG_TTL = 60 * 60 * 24 * 1000; // 24 hours
|
||||||
|
|
||||||
|
// User data - moderate frequency
|
||||||
|
const USER_TTL = 60 * 60 * 2 * 1000; // 2 hours
|
||||||
|
|
||||||
|
// Survey data - changes moderately
|
||||||
|
const SURVEY_TTL = 60 * 15 * 1000; // 15 minutes
|
||||||
|
|
||||||
|
// Billing data - expensive to compute
|
||||||
|
const BILLING_TTL = 60 * 30 * 1000; // 30 minutes
|
||||||
|
|
||||||
|
// Action classes - infrequent changes
|
||||||
|
const ACTION_CLASS_TTL = 60 * 30 * 1000; // 30 minutes
|
||||||
|
```
|
||||||
|
|
||||||
|
## High-Frequency Endpoint Optimization
|
||||||
|
|
||||||
|
### Performance Patterns for High-Volume Endpoints
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Optimized high-frequency endpoint pattern
|
||||||
|
export const GET = async (request: NextRequest, props: { params: Promise<{ id: string }> }) => {
|
||||||
|
const params = await props.params;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Simple validation (avoid Zod for high-frequency)
|
||||||
|
if (!params.id || typeof params.id !== 'string') {
|
||||||
|
return responses.badRequestResponse("ID is required", undefined, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single optimized query with caching
|
||||||
|
const data = await getOptimizedData(params.id);
|
||||||
|
|
||||||
|
return responses.successResponse(
|
||||||
|
{
|
||||||
|
data,
|
||||||
|
expiresAt: new Date(Date.now() + CLIENT_TTL * 1000), // SDK cache duration
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
"public, s-maxage=1800, max-age=3600, stale-while-revalidate=1800, stale-if-error=3600"
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
// Simplified error handling for performance
|
||||||
|
if (err instanceof ResourceNotFoundError) {
|
||||||
|
return responses.notFoundResponse(err.resourceType, err.resourceId);
|
||||||
|
}
|
||||||
|
logger.error({ error: err, url: request.url }, "Error in high-frequency endpoint");
|
||||||
|
return responses.internalServerErrorResponse(err.message, true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Avoid These Performance Anti-Patterns
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Avoid for high-frequency endpoints
|
||||||
|
const inputValidation = ZodSchema.safeParse(input); // Too slow
|
||||||
|
const startTime = Date.now(); logger.debug(...); // Logging overhead
|
||||||
|
const { data, revalidateEnvironment } = await get(); // Complex return types
|
||||||
|
```
|
||||||
|
|
||||||
|
### CORS Optimization
|
||||||
|
```typescript
|
||||||
|
// ✅ Balanced CORS caching (not too aggressive)
|
||||||
|
export const OPTIONS = async (): Promise<Response> => {
|
||||||
|
return responses.successResponse(
|
||||||
|
{},
|
||||||
|
true,
|
||||||
|
"public, s-maxage=3600, max-age=3600" // 1 hour balanced approach
|
||||||
|
);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Redis Cache Migration from Next.js
|
||||||
|
|
||||||
|
### Avoid Legacy Next.js Patterns
|
||||||
|
```typescript
|
||||||
|
// ❌ Old Next.js unstable_cache pattern (avoid)
|
||||||
|
const getCachedData = unstable_cache(
|
||||||
|
async (id) => fetchData(id),
|
||||||
|
['cache-key'],
|
||||||
|
{ tags: ['environment'], revalidate: 900 }
|
||||||
|
);
|
||||||
|
|
||||||
|
// ❌ Don't use revalidateEnvironment flags with Redis
|
||||||
|
return { data, revalidateEnvironment: true }; // This gets cached incorrectly!
|
||||||
|
|
||||||
|
// ✅ New Redis pattern with withCache (TTL in milliseconds)
|
||||||
|
export const getCachedData = (id: string) =>
|
||||||
|
withCache(
|
||||||
|
() => fetchData(id),
|
||||||
|
{
|
||||||
|
key: createCacheKey.environment.data(id),
|
||||||
|
ttl: 60 * 15 * 1000, // 15 minutes in milliseconds
|
||||||
|
}
|
||||||
|
)();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Remove Revalidation Logic
|
||||||
|
When migrating from Next.js `unstable_cache`:
|
||||||
|
- Remove `revalidateEnvironment` or similar flags
|
||||||
|
- Remove tag-based invalidation logic
|
||||||
|
- Use TTL-based expiration instead
|
||||||
|
- Handle one-time updates (like `appSetupCompleted`) directly in cache
|
||||||
|
|
||||||
|
## Data Layer Optimization
|
||||||
|
|
||||||
|
### Single Query Pattern
|
||||||
|
```typescript
|
||||||
|
// ✅ Optimize with single database query
|
||||||
|
export const getOptimizedEnvironmentData = async (environmentId: string) => {
|
||||||
|
return prisma.environment.findUniqueOrThrow({
|
||||||
|
where: { id: environmentId },
|
||||||
|
include: {
|
||||||
|
project: {
|
||||||
|
select: { id: true, recontactDays: true, /* ... */ }
|
||||||
|
},
|
||||||
|
organization: {
|
||||||
|
select: { id: true, billing: true }
|
||||||
|
},
|
||||||
|
surveys: {
|
||||||
|
where: { status: "inProgress" },
|
||||||
|
select: { id: true, name: true, /* ... */ }
|
||||||
|
},
|
||||||
|
actionClasses: {
|
||||||
|
select: { id: true, name: true, /* ... */ }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// ❌ Avoid multiple separate queries
|
||||||
|
const environment = await getEnvironment(id);
|
||||||
|
const organization = await getOrganization(environment.organizationId);
|
||||||
|
const surveys = await getSurveys(id);
|
||||||
|
const actionClasses = await getActionClasses(id);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Invalidation Best Practices
|
||||||
|
|
||||||
|
**Always use explicit key-based invalidation:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Clear and debuggable
|
||||||
|
await invalidateCache(createCacheKey.environment.state(environmentId));
|
||||||
|
await invalidateCache([
|
||||||
|
createCacheKey.environment.surveys(environmentId),
|
||||||
|
createCacheKey.environment.actionClasses(environmentId)
|
||||||
|
]);
|
||||||
|
|
||||||
|
// ❌ Avoid complex tag systems
|
||||||
|
await invalidateByTags(["environment", "survey"]); // Don't do this
|
||||||
|
```
|
||||||
|
|
||||||
|
## Critical Performance Targets
|
||||||
|
|
||||||
|
### High-Frequency Endpoint Goals
|
||||||
|
- **Cache hit ratio**: >85%
|
||||||
|
- **Response time P95**: <200ms
|
||||||
|
- **Database load reduction**: >60%
|
||||||
|
- **HTTP cache duration**: 1hr browser, 30min Cloudflare
|
||||||
|
- **SDK refresh interval**: 1 hour with 30min server cache
|
||||||
|
|
||||||
|
### Performance Monitoring
|
||||||
|
- Use **existing elastic cache analytics** for metrics
|
||||||
|
- Log cache errors and warnings (not debug info)
|
||||||
|
- Track database query reduction
|
||||||
|
- Monitor response times for cached endpoints
|
||||||
|
- **Avoid performance logging** in high-frequency endpoints
|
||||||
|
|
||||||
|
## Error Handling Pattern
|
||||||
|
|
||||||
|
Always provide fallback to fresh data on cache errors:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
try {
|
||||||
|
const cached = await cache.get(key);
|
||||||
|
if (cached) return cached;
|
||||||
|
|
||||||
|
const fresh = await fetchFresh();
|
||||||
|
await cache.set(key, fresh, ttl); // ttl in milliseconds
|
||||||
|
return fresh;
|
||||||
|
} catch (error) {
|
||||||
|
// ✅ Always fallback to fresh data
|
||||||
|
logger.warn("Cache error, fetching fresh", { key, error });
|
||||||
|
return fetchFresh();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Pitfalls to Avoid
|
||||||
|
|
||||||
|
1. **Never use Next.js `unstable_cache()`** - unreliable in production
|
||||||
|
2. **Don't use revalidation flags with Redis** - they get cached incorrectly
|
||||||
|
3. **Avoid Zod validation** for simple parameters in high-frequency endpoints
|
||||||
|
4. **Don't add performance logging** to high-frequency endpoints
|
||||||
|
5. **Coordinate TTLs** between client and server caches
|
||||||
|
6. **Don't over-engineer** with complex tag systems
|
||||||
|
7. **Avoid caching rapidly changing data** (real-time metrics)
|
||||||
|
8. **Always validate cache keys** to prevent collisions
|
||||||
|
9. **Don't add redundant caching layers** - analyze computational overhead first
|
||||||
|
10. **Avoid cache wrapper functions** - add caching directly to expensive operations
|
||||||
|
11. **Don't cache property access or simple transformations** - overhead is negligible
|
||||||
|
12. **Analyze the full call chain** before adding caching to avoid double-caching
|
||||||
|
13. **Remember TTL is in milliseconds** for cache-manager + Keyv stack (not seconds)
|
||||||
|
|
||||||
|
## Monitoring Strategy
|
||||||
|
|
||||||
|
- Use **existing elastic cache analytics** for metrics
|
||||||
|
- Log cache errors and warnings
|
||||||
|
- Track database query reduction
|
||||||
|
- Monitor response times for cached endpoints
|
||||||
|
- **Don't add custom metrics** that duplicate existing monitoring
|
||||||
|
|
||||||
|
## Important Notes
|
||||||
|
|
||||||
|
### TTL Units
|
||||||
|
- **cache-manager + Keyv**: TTL in **milliseconds**
|
||||||
|
- **Direct Redis commands**: TTL in **seconds** (EXPIRE, SETEX) or **milliseconds** (PEXPIRE, PSETEX)
|
||||||
|
- **HTTP cache headers**: TTL in **seconds** (max-age, s-maxage)
|
||||||
|
- **Client SDK**: TTL in **seconds** (expiresAt calculation)
|
||||||
41
.cursor/rules/database-performance.mdc
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
---
|
||||||
|
description:
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Database Performance & Prisma Best Practices
|
||||||
|
|
||||||
|
## Critical Performance Rules
|
||||||
|
|
||||||
|
### Response Count Queries
|
||||||
|
- **NEVER** use `skip`/`offset` with `prisma.response.count()` - this causes expensive subqueries with OFFSET
|
||||||
|
- Always use only `where` clauses for count operations: `prisma.response.count({ where: { ... } })`
|
||||||
|
- For pagination, separate count queries from data queries
|
||||||
|
- Reference: [apps/web/lib/response/service.ts](mdc:apps/web/lib/response/service.ts) line 654-686
|
||||||
|
|
||||||
|
### Prisma Query Optimization
|
||||||
|
- Use proper indexes defined in [packages/database/schema.prisma](mdc:packages/database/schema.prisma)
|
||||||
|
- Leverage existing indexes: `@@index([surveyId, createdAt])`, `@@index([createdAt])`
|
||||||
|
- Use cursor-based pagination for large datasets instead of offset-based
|
||||||
|
- Cache frequently accessed data using React Cache and custom cache tags
|
||||||
|
|
||||||
|
### Date Range Filtering
|
||||||
|
- When filtering by `createdAt`, always use indexed queries
|
||||||
|
- Combine with `surveyId` for optimal performance: `{ surveyId, createdAt: { gte: start, lt: end } }`
|
||||||
|
- Avoid complex WHERE clauses that can't utilize indexes
|
||||||
|
|
||||||
|
### Count vs Data Separation
|
||||||
|
- Always separate count queries from data fetching queries
|
||||||
|
- Use `Promise.all()` to run count and data queries in parallel
|
||||||
|
- Example pattern from [apps/web/modules/api/v2/management/responses/lib/response.ts](mdc:apps/web/modules/api/v2/management/responses/lib/response.ts):
|
||||||
|
```typescript
|
||||||
|
const [responses, totalCount] = await Promise.all([
|
||||||
|
prisma.response.findMany(query),
|
||||||
|
prisma.response.count({ where: whereClause }),
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Monitoring & Debugging
|
||||||
|
- Monitor AWS RDS Performance Insights for problematic queries
|
||||||
|
- Look for queries with OFFSET in count operations - these indicate performance issues
|
||||||
|
- Use proper error handling with `DatabaseError` for Prisma exceptions
|
||||||
105
.cursor/rules/database.mdc
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
---
|
||||||
|
description: >
|
||||||
|
globs: schema.prisma
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Formbricks Database Schema Reference
|
||||||
|
|
||||||
|
This rule provides a reference to the Formbricks database structure. For the most up-to-date and complete schema definitions, please refer to the schema.prisma file directly.
|
||||||
|
|
||||||
|
## Database Overview
|
||||||
|
|
||||||
|
Formbricks uses PostgreSQL with Prisma ORM. The schema is designed for multi-tenancy with strong data isolation between organizations.
|
||||||
|
|
||||||
|
### Core Hierarchy
|
||||||
|
|
||||||
|
```
|
||||||
|
Organization
|
||||||
|
└── Project
|
||||||
|
└── Environment (production/development)
|
||||||
|
├── Survey
|
||||||
|
├── Contact
|
||||||
|
├── ActionClass
|
||||||
|
└── Integration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Reference
|
||||||
|
|
||||||
|
For the complete and up-to-date database schema, please refer to:
|
||||||
|
|
||||||
|
- Main schema: `packages/database/schema.prisma`
|
||||||
|
- JSON type definitions: `packages/database/json-types.ts`
|
||||||
|
|
||||||
|
The schema.prisma file contains all model definitions, relationships, enums, and field types. The json-types.ts file contains TypeScript type definitions for JSON fields.
|
||||||
|
|
||||||
|
## Data Access Patterns
|
||||||
|
|
||||||
|
### Multi-tenancy
|
||||||
|
|
||||||
|
- All data is scoped by Organization
|
||||||
|
- Environment-level isolation for surveys and contacts
|
||||||
|
- Project-level grouping for related surveys
|
||||||
|
|
||||||
|
### Soft Deletion
|
||||||
|
|
||||||
|
Some models use soft deletion patterns:
|
||||||
|
|
||||||
|
- Check `isActive` fields where present
|
||||||
|
- Use proper filtering in queries
|
||||||
|
|
||||||
|
### Cascading Deletes
|
||||||
|
|
||||||
|
Configured cascade relationships:
|
||||||
|
|
||||||
|
- Organization deletion cascades to all child entities
|
||||||
|
- Survey deletion removes responses, displays, triggers
|
||||||
|
- Contact deletion removes attributes and responses
|
||||||
|
|
||||||
|
## Common Query Patterns
|
||||||
|
|
||||||
|
### Survey with Responses
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Include response count and latest responses
|
||||||
|
const survey = await prisma.survey.findUnique({
|
||||||
|
where: { id: surveyId },
|
||||||
|
include: {
|
||||||
|
responses: {
|
||||||
|
take: 10,
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
},
|
||||||
|
_count: {
|
||||||
|
select: { responses: true },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Scoping
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Always scope by environment
|
||||||
|
const surveys = await prisma.survey.findMany({
|
||||||
|
where: {
|
||||||
|
environmentId: environmentId,
|
||||||
|
// Additional filters...
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Contact with Attributes
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const contact = await prisma.contact.findUnique({
|
||||||
|
where: { id: contactId },
|
||||||
|
include: {
|
||||||
|
attributes: {
|
||||||
|
include: {
|
||||||
|
attributeKey: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This schema supports Formbricks' core functionality: multi-tenant survey management, user targeting, response collection, and analysis, all while maintaining strict data isolation and security.
|
||||||
28
.cursor/rules/documentations.mdc
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
description: Guideline for writing end-user facing documentation in the apps/docs folder
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
|
||||||
|
Follow these instructions and guidelines when asked to write documentation in the apps/docs folder
|
||||||
|
|
||||||
|
Follow this structure to write the title, describtion and pick a matching icon and insert it at the top of the MDX file:
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
title: "FEATURE NAME"
|
||||||
|
description: "1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT."
|
||||||
|
icon: "link"
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
- Description: 1 concise sentence to describe WHEN the feature is being used and FOR WHAT BENEFIT.
|
||||||
|
- Make ample use of the Mintlify components you can find here https://mintlify.com/docs/llms.txt - e.g. if docs describe consecutive steps, always use Mintlify Step component.
|
||||||
|
- In all Headlines, only capitalize the current feature and nothing else, to Camel Case.
|
||||||
|
- The page should never start with H1 headline, because it's already part of the template.
|
||||||
|
- Tonality: Keep it concise and to the point. Avoid Jargon where possible.
|
||||||
|
- If a feature is part of the Enterprise Edition, use this note:
|
||||||
|
|
||||||
|
<Note>
|
||||||
|
FEATURE NAME is part of the [Enterprise Edition](/self-hosting/advanced/license)
|
||||||
|
</Note>
|
||||||
332
.cursor/rules/formbricks-architecture.mdc
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
---
|
||||||
|
description:
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Formbricks Architecture & Patterns
|
||||||
|
|
||||||
|
## Monorepo Structure
|
||||||
|
|
||||||
|
### Apps Directory
|
||||||
|
- `apps/web/` - Main Next.js web application
|
||||||
|
- `packages/` - Shared packages and utilities
|
||||||
|
|
||||||
|
### Key Directories in Web App
|
||||||
|
```
|
||||||
|
apps/web/
|
||||||
|
├── app/ # Next.js 13+ app directory
|
||||||
|
│ ├── (app)/ # Main application routes
|
||||||
|
│ ├── (auth)/ # Authentication routes
|
||||||
|
│ ├── api/ # API routes
|
||||||
|
├── components/ # Shared components
|
||||||
|
├── lib/ # Utility functions and services
|
||||||
|
└── modules/ # Feature-specific modules
|
||||||
|
```
|
||||||
|
|
||||||
|
## Routing Patterns
|
||||||
|
|
||||||
|
### App Router Structure
|
||||||
|
The application uses Next.js 13+ app router with route groups:
|
||||||
|
|
||||||
|
```
|
||||||
|
(app)/environments/[environmentId]/
|
||||||
|
├── surveys/[surveyId]/
|
||||||
|
│ ├── (analysis)/ # Analysis views
|
||||||
|
│ │ ├── responses/ # Response management
|
||||||
|
│ │ ├── summary/ # Survey summary
|
||||||
|
│ │ └── hooks/ # Analysis-specific hooks
|
||||||
|
│ ├── edit/ # Survey editing
|
||||||
|
│ └── settings/ # Survey settings
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dynamic Routes
|
||||||
|
- `[environmentId]` - Environment-specific routes
|
||||||
|
- `[surveyId]` - Survey-specific routes
|
||||||
|
|
||||||
|
## Service Layer Pattern
|
||||||
|
|
||||||
|
### Service Organization
|
||||||
|
Services are organized by domain in `apps/web/lib/`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Example: Response service
|
||||||
|
// apps/web/lib/response/service.ts
|
||||||
|
export const getResponseCountAction = async ({
|
||||||
|
surveyId,
|
||||||
|
filterCriteria,
|
||||||
|
}: {
|
||||||
|
surveyId: string;
|
||||||
|
filterCriteria: any;
|
||||||
|
}) => {
|
||||||
|
// Service implementation
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Action Pattern
|
||||||
|
Server actions follow a consistent pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Action wrapper for service calls
|
||||||
|
export const getResponseCountAction = async (params) => {
|
||||||
|
try {
|
||||||
|
const result = await responseService.getCount(params);
|
||||||
|
return { data: result };
|
||||||
|
} catch (error) {
|
||||||
|
return { error: error.message };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Context Patterns
|
||||||
|
|
||||||
|
### Provider Structure
|
||||||
|
Context providers follow a consistent pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Provider component
|
||||||
|
export const ResponseFilterProvider = ({ children }: { children: React.ReactNode }) => {
|
||||||
|
const [selectedFilter, setSelectedFilter] = useState(defaultFilter);
|
||||||
|
|
||||||
|
const value = {
|
||||||
|
selectedFilter,
|
||||||
|
setSelectedFilter,
|
||||||
|
// ... other state and methods
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponseFilterContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</ResponseFilterContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Hook for consuming context
|
||||||
|
export const useResponseFilter = () => {
|
||||||
|
const context = useContext(ResponseFilterContext);
|
||||||
|
if (!context) {
|
||||||
|
throw new Error('useResponseFilter must be used within ResponseFilterProvider');
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Context Composition
|
||||||
|
Multiple contexts are often composed together:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Layout component with multiple providers
|
||||||
|
export default function AnalysisLayout({ children }: { children: React.ReactNode }) {
|
||||||
|
return (
|
||||||
|
<ResponseFilterProvider>
|
||||||
|
<ResponseCountProvider>
|
||||||
|
{children}
|
||||||
|
</ResponseCountProvider>
|
||||||
|
</ResponseFilterProvider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Component Patterns
|
||||||
|
|
||||||
|
### Page Components
|
||||||
|
Page components are located in the app directory and follow this pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/responses/page.tsx
|
||||||
|
export default function ResponsesPage() {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<ResponsesTable />
|
||||||
|
<ResponsesPagination />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Component Organization
|
||||||
|
- **Pages** - Route components in app directory
|
||||||
|
- **Components** - Reusable UI components
|
||||||
|
- **Modules** - Feature-specific components and logic
|
||||||
|
|
||||||
|
### Shared Components
|
||||||
|
Common components are in `apps/web/components/`:
|
||||||
|
- UI components (buttons, inputs, modals)
|
||||||
|
- Layout components (headers, sidebars)
|
||||||
|
- Data display components (tables, charts)
|
||||||
|
|
||||||
|
## Hook Patterns
|
||||||
|
|
||||||
|
### Custom Hook Structure
|
||||||
|
Custom hooks follow consistent patterns:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const useResponseCount = ({
|
||||||
|
survey,
|
||||||
|
initialCount
|
||||||
|
}: {
|
||||||
|
survey: TSurvey;
|
||||||
|
initialCount?: number;
|
||||||
|
}) => {
|
||||||
|
const [responseCount, setResponseCount] = useState(initialCount ?? 0);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
|
// Hook logic...
|
||||||
|
|
||||||
|
return {
|
||||||
|
responseCount,
|
||||||
|
isLoading,
|
||||||
|
refetch,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hook Dependencies
|
||||||
|
- Use context hooks for shared state
|
||||||
|
- Implement proper cleanup with AbortController
|
||||||
|
- Optimize dependency arrays to prevent unnecessary re-renders
|
||||||
|
|
||||||
|
## Data Fetching Patterns
|
||||||
|
|
||||||
|
### Server Actions
|
||||||
|
The app uses Next.js server actions for data fetching:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Server action
|
||||||
|
export async function getResponsesAction(params: GetResponsesParams) {
|
||||||
|
const responses = await getResponses(params);
|
||||||
|
return { data: responses };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client usage
|
||||||
|
const { data } = await getResponsesAction(params);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
Consistent error handling across the application:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
try {
|
||||||
|
const result = await apiCall();
|
||||||
|
return { data: result };
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Operation failed:", error);
|
||||||
|
return { error: error.message };
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Safety
|
||||||
|
|
||||||
|
### Type Organization
|
||||||
|
Types are organized in packages:
|
||||||
|
- `@formbricks/types` - Shared type definitions
|
||||||
|
- Local types in component/hook files
|
||||||
|
|
||||||
|
### Common Types
|
||||||
|
```typescript
|
||||||
|
import { TSurvey } from "@formbricks/types/surveys/types";
|
||||||
|
import { TResponse } from "@formbricks/types/responses";
|
||||||
|
import { TEnvironment } from "@formbricks/types/environment";
|
||||||
|
```
|
||||||
|
|
||||||
|
## State Management
|
||||||
|
|
||||||
|
### Local State
|
||||||
|
- Use `useState` for component-specific state
|
||||||
|
- Use `useReducer` for complex state logic
|
||||||
|
- Use refs for mutable values that don't trigger re-renders
|
||||||
|
|
||||||
|
### Global State
|
||||||
|
- React Context for feature-specific shared state
|
||||||
|
- URL state for filters and pagination
|
||||||
|
- Server state through server actions
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
### Code Splitting
|
||||||
|
- Dynamic imports for heavy components
|
||||||
|
- Route-based code splitting with app router
|
||||||
|
- Lazy loading for non-critical features
|
||||||
|
|
||||||
|
### Caching Strategy
|
||||||
|
- Server-side caching for database queries
|
||||||
|
- Client-side caching with React Query (where applicable)
|
||||||
|
- Static generation for public pages
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
### Test Organization
|
||||||
|
```
|
||||||
|
component/
|
||||||
|
├── Component.tsx
|
||||||
|
├── Component.test.tsx
|
||||||
|
└── hooks/
|
||||||
|
├── useHook.ts
|
||||||
|
└── useHook.test.tsx
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Patterns
|
||||||
|
- Unit tests for utilities and services
|
||||||
|
- Integration tests for components with context
|
||||||
|
- Hook tests with proper mocking
|
||||||
|
|
||||||
|
## Build & Deployment
|
||||||
|
|
||||||
|
### Build Process
|
||||||
|
- TypeScript compilation
|
||||||
|
- Next.js build optimization
|
||||||
|
- Asset optimization and bundling
|
||||||
|
|
||||||
|
### Environment Configuration
|
||||||
|
- Environment-specific configurations
|
||||||
|
- Feature flags for gradual rollouts
|
||||||
|
- Database connection management
|
||||||
|
|
||||||
|
## Security Patterns
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
- Session-based authentication
|
||||||
|
- Environment-based access control
|
||||||
|
- API route protection
|
||||||
|
|
||||||
|
### Data Validation
|
||||||
|
- Input validation on both client and server
|
||||||
|
- Type-safe API contracts
|
||||||
|
- Sanitization of user inputs
|
||||||
|
|
||||||
|
## Monitoring & Observability
|
||||||
|
|
||||||
|
### Error Tracking
|
||||||
|
- Client-side error boundaries
|
||||||
|
- Server-side error logging
|
||||||
|
- Performance monitoring
|
||||||
|
|
||||||
|
### Analytics
|
||||||
|
- User interaction tracking
|
||||||
|
- Performance metrics
|
||||||
|
- Database query monitoring
|
||||||
|
|
||||||
|
## Best Practices Summary
|
||||||
|
|
||||||
|
### Code Organization
|
||||||
|
- ✅ Follow the established directory structure
|
||||||
|
- ✅ Use consistent naming conventions
|
||||||
|
- ✅ Separate concerns (UI, logic, data)
|
||||||
|
- ✅ Keep components focused and small
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
- ✅ Implement proper loading states
|
||||||
|
- ✅ Use AbortController for async operations
|
||||||
|
- ✅ Optimize database queries
|
||||||
|
- ✅ Implement proper caching strategies
|
||||||
|
|
||||||
|
### Type Safety
|
||||||
|
- ✅ Use TypeScript throughout
|
||||||
|
- ✅ Define proper interfaces for props
|
||||||
|
- ✅ Use type guards for runtime validation
|
||||||
|
- ✅ Leverage shared type packages
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
- ✅ Write tests for critical functionality
|
||||||
|
- ✅ Mock external dependencies properly
|
||||||
|
- ✅ Test error scenarios and edge cases
|
||||||
|
- ✅ Maintain good test coverage
|
||||||
232
.cursor/rules/github-actions-security.mdc
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
---
|
||||||
|
description: Security best practices and guidelines for writing GitHub Actions and workflows
|
||||||
|
globs: .github/workflows/*.yml,.github/workflows/*.yaml,.github/actions/*/action.yml,.github/actions/*/action.yaml
|
||||||
|
---
|
||||||
|
|
||||||
|
# GitHub Actions Security Best Practices
|
||||||
|
|
||||||
|
## Required Security Measures
|
||||||
|
|
||||||
|
### 1. Set Minimum GITHUB_TOKEN Permissions
|
||||||
|
|
||||||
|
Always explicitly set the minimum required permissions for GITHUB_TOKEN:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
# Only add additional permissions if absolutely necessary:
|
||||||
|
# pull-requests: write # for commenting on PRs
|
||||||
|
# issues: write # for creating/updating issues
|
||||||
|
# checks: write # for publishing check results
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Add Harden-Runner as First Step
|
||||||
|
|
||||||
|
For **every job** on `ubuntu-latest`, add Harden-Runner as the first step:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit # or 'block' for stricter security
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Pin Actions to Full Commit SHA
|
||||||
|
|
||||||
|
**Always** pin third-party actions to their full commit SHA, not tags:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# ❌ BAD - uses mutable tag
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# ✅ GOOD - pinned to immutable commit SHA
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Secure Variable Handling
|
||||||
|
|
||||||
|
Prevent command injection by properly quoting variables:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# ❌ BAD - potential command injection
|
||||||
|
run: echo "Processing ${{ inputs.user_input }}"
|
||||||
|
|
||||||
|
# ✅ GOOD - properly quoted
|
||||||
|
env:
|
||||||
|
USER_INPUT: ${{ inputs.user_input }}
|
||||||
|
run: echo "Processing ${USER_INPUT}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `${VARIABLE}` syntax in shell scripts instead of `$VARIABLE`.
|
||||||
|
|
||||||
|
### 5. Environment Variables for Secrets
|
||||||
|
|
||||||
|
Store sensitive data in environment variables, not inline:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# ❌ BAD
|
||||||
|
run: curl -H "Authorization: Bearer ${{ secrets.TOKEN }}" api.example.com
|
||||||
|
|
||||||
|
# ✅ GOOD
|
||||||
|
env:
|
||||||
|
API_TOKEN: ${{ secrets.TOKEN }}
|
||||||
|
run: curl -H "Authorization: Bearer ${API_TOKEN}" api.example.com
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow Structure Best Practices
|
||||||
|
|
||||||
|
### Required Workflow Elements
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: "Descriptive Workflow Name"
|
||||||
|
|
||||||
|
on:
|
||||||
|
# Define specific triggers
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
# Always set explicit permissions
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
job-name:
|
||||||
|
name: "Descriptive Job Name"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30 # tune per job; standardize repo-wide
|
||||||
|
|
||||||
|
# Set job-level permissions if different from workflow level
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Always start with Harden-Runner on ubuntu-latest
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@v2
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
# Pin all actions to commit SHA
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
```
|
||||||
|
|
||||||
|
### Input Validation for Actions
|
||||||
|
|
||||||
|
For composite actions, always validate inputs:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
inputs:
|
||||||
|
user_input:
|
||||||
|
description: "User provided input"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Validate input
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Harden shell and validate input format/content before use
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
USER_INPUT="${{ inputs.user_input }}"
|
||||||
|
|
||||||
|
if [[ ! "${USER_INPUT}" =~ ^[A-Za-z0-9._-]+$ ]]; then
|
||||||
|
echo "❌ Invalid input format"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
## Docker Security in Actions
|
||||||
|
|
||||||
|
### Pin Docker Images to Digests
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# ❌ BAD - mutable tag
|
||||||
|
container: node:18
|
||||||
|
|
||||||
|
# ✅ GOOD - pinned to digest
|
||||||
|
container: node:18@sha256:a1ba21bf0c92931d02a8416f0a54daad66cb36a85d6a37b82dfe1604c4c09cad
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Secure File Operations
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Process files securely
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
FILE_PATH: ${{ inputs.file_path }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail # Fail on errors, undefined vars, pipe failures
|
||||||
|
|
||||||
|
# Use absolute paths and validate
|
||||||
|
SAFE_PATH=$(realpath "${FILE_PATH}")
|
||||||
|
if [[ "$SAFE_PATH" != "${GITHUB_WORKSPACE}"/* ]]; then
|
||||||
|
echo "❌ Path outside workspace"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
### Artifact Handling
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Upload artifacts securely
|
||||||
|
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||||
|
with:
|
||||||
|
name: build-artifacts
|
||||||
|
path: |
|
||||||
|
dist/
|
||||||
|
!dist/**/*.log # Exclude sensitive files
|
||||||
|
retention-days: 30
|
||||||
|
```
|
||||||
|
|
||||||
|
### GHCR authentication for pulls/scans
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Minimal permissions required for GHCR pulls/scans
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Checklist
|
||||||
|
|
||||||
|
- [ ] Minimum GITHUB_TOKEN permissions set
|
||||||
|
- [ ] Harden-Runner added to all ubuntu-latest jobs
|
||||||
|
- [ ] All third-party actions pinned to commit SHA
|
||||||
|
- [ ] Input validation implemented for custom actions
|
||||||
|
- [ ] Variables properly quoted in shell scripts
|
||||||
|
- [ ] Secrets stored in environment variables
|
||||||
|
- [ ] Docker images pinned to digests (if used)
|
||||||
|
- [ ] Error handling with `set -euo pipefail`
|
||||||
|
- [ ] File paths validated and sanitized
|
||||||
|
- [ ] No sensitive data in logs or outputs
|
||||||
|
- [ ] GHCR login performed before pulls/scans (packages: read)
|
||||||
|
- [ ] Job timeouts configured (`timeout-minutes`)
|
||||||
|
|
||||||
|
## Recommended Additional Workflows
|
||||||
|
|
||||||
|
Consider adding these security-focused workflows to your repository:
|
||||||
|
|
||||||
|
1. **CodeQL Analysis** - Static Application Security Testing (SAST)
|
||||||
|
2. **Dependency Review** - Scan for vulnerable dependencies in PRs
|
||||||
|
3. **Dependabot Configuration** - Automated dependency updates
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
- [GitHub Security Hardening Guide](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions)
|
||||||
|
- [Step Security Harden-Runner](https://github.com/step-security/harden-runner)
|
||||||
|
- [Secure-Repo Best Practices](https://github.com/step-security/secure-repo)
|
||||||
457
.cursor/rules/i18n-management.mdc
Normal file
@@ -0,0 +1,457 @@
|
|||||||
|
---
|
||||||
|
title: i18n Management with Lingo.dev
|
||||||
|
description: Guidelines for managing internationalization (i18n) with Lingo.dev, including translation workflow, key validation, and best practices
|
||||||
|
---
|
||||||
|
|
||||||
|
# i18n Management with Lingo.dev
|
||||||
|
|
||||||
|
This rule defines the workflow and best practices for managing internationalization (i18n) in the Formbricks project using Lingo.dev.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Formbricks uses [Lingo.dev](https://lingo.dev) for managing translations across multiple languages. The translation workflow includes:
|
||||||
|
|
||||||
|
1. **Translation Keys**: Defined in code using the `t()` function from `react-i18next`
|
||||||
|
2. **Translation Files**: JSON files stored in `apps/web/locales/` for each supported language
|
||||||
|
3. **Validation**: Automated scanning to detect missing and unused translation keys
|
||||||
|
4. **CI/CD**: Pre-commit hooks and GitHub Actions to enforce translation quality
|
||||||
|
|
||||||
|
## Translation Workflow
|
||||||
|
|
||||||
|
### 1. Using Translations in Code
|
||||||
|
|
||||||
|
When adding translatable text in the web app, use the `t()` function or `<Trans>` component:
|
||||||
|
|
||||||
|
**Using the `t()` function:**
|
||||||
|
```tsx
|
||||||
|
import { useTranslate } from "@/lib/i18n/translate";
|
||||||
|
|
||||||
|
const MyComponent = () => {
|
||||||
|
const { t } = useTranslate();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<h1>{t("common.welcome")}</h1>
|
||||||
|
<p>{t("pages.dashboard.description")}</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using the `<Trans>` component (for text with HTML elements):**
|
||||||
|
```tsx
|
||||||
|
import { Trans } from "react-i18next";
|
||||||
|
|
||||||
|
const MyComponent = () => {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<Trans
|
||||||
|
i18nKey="auth.terms_agreement"
|
||||||
|
components={{
|
||||||
|
link: <a href="/terms" />,
|
||||||
|
b: <b />
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Naming Conventions:**
|
||||||
|
- Use dot notation for nested keys: `section.subsection.key`
|
||||||
|
- Use descriptive names: `auth.login.success_message` not `auth.msg1`
|
||||||
|
- Group related keys together: `auth.*`, `errors.*`, `common.*`
|
||||||
|
- Use lowercase with underscores: `user_profile_settings` not `UserProfileSettings`
|
||||||
|
|
||||||
|
### 2. Translation File Structure
|
||||||
|
|
||||||
|
Translation files are located in `apps/web/locales/` and use the following naming convention:
|
||||||
|
- `en-US.json` (English - United States, default)
|
||||||
|
- `de-DE.json` (German)
|
||||||
|
- `fr-FR.json` (French)
|
||||||
|
- `pt-BR.json` (Portuguese - Brazil)
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
**File Structure:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"common": {
|
||||||
|
"welcome": "Welcome",
|
||||||
|
"save": "Save",
|
||||||
|
"cancel": "Cancel"
|
||||||
|
},
|
||||||
|
"auth": {
|
||||||
|
"login": {
|
||||||
|
"title": "Login",
|
||||||
|
"email_placeholder": "Enter your email",
|
||||||
|
"password_placeholder": "Enter your password"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Adding New Translation Keys
|
||||||
|
|
||||||
|
When adding new translation keys:
|
||||||
|
|
||||||
|
1. **Add the key in your code** using `t("your.new.key")`
|
||||||
|
2. **Add translation for that key in en-US.json file**
|
||||||
|
3. **Run the translation workflow:**
|
||||||
|
```bash
|
||||||
|
pnpm i18n
|
||||||
|
```
|
||||||
|
This will:
|
||||||
|
- Generate translations for all languages using Lingo.dev
|
||||||
|
- Validate that all keys are present and used
|
||||||
|
|
||||||
|
4. **Review and commit** the generated translation files
|
||||||
|
|
||||||
|
### 4. Available Scripts
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate translations using Lingo.dev
|
||||||
|
pnpm generate-translations
|
||||||
|
|
||||||
|
# Scan and validate translation keys
|
||||||
|
pnpm scan-translations
|
||||||
|
|
||||||
|
# Full workflow: generate + validate
|
||||||
|
pnpm i18n
|
||||||
|
|
||||||
|
# Validate only (without generation)
|
||||||
|
pnpm i18n:validate
|
||||||
|
```
|
||||||
|
|
||||||
|
## Translation Key Validation
|
||||||
|
|
||||||
|
### Automated Validation
|
||||||
|
|
||||||
|
The project includes automated validation that runs:
|
||||||
|
- **Pre-commit hook**: Validates translations before allowing commits (when `LINGODOTDEV_API_KEY` is set)
|
||||||
|
- **GitHub Actions**: Validates translations on every PR and push to main
|
||||||
|
|
||||||
|
### Validation Rules
|
||||||
|
|
||||||
|
The validation script (`scan-translations.ts`) checks for:
|
||||||
|
|
||||||
|
1. **Missing Keys**: Translation keys used in code but not present in translation files
|
||||||
|
2. **Unused Keys**: Translation keys present in translation files but not used in code
|
||||||
|
3. **Incomplete Translations**: Keys that exist in the default language (`en-US`) but are missing in target languages
|
||||||
|
|
||||||
|
**What gets scanned:**
|
||||||
|
- All `.ts` and `.tsx` files in `apps/web/`
|
||||||
|
- Both `t()` function calls and `<Trans i18nKey="">` components
|
||||||
|
- All locale files (`de-DE.json`, `fr-FR.json`, `ja-JP.json`, etc.)
|
||||||
|
|
||||||
|
**What gets excluded:**
|
||||||
|
- Test files (`*.test.ts`, `*.test.tsx`, `*.spec.ts`, `*.spec.tsx`)
|
||||||
|
- Build directories (`node_modules`, `dist`, `build`, `.next`, `coverage`)
|
||||||
|
- Locale files themselves (from code scanning)
|
||||||
|
|
||||||
|
**Note:** Test files are excluded because they often use mock or example translation keys for testing purposes that don't need to exist in production translation files.
|
||||||
|
|
||||||
|
### Fixing Validation Errors
|
||||||
|
|
||||||
|
#### Missing Keys
|
||||||
|
|
||||||
|
If you encounter missing key errors:
|
||||||
|
|
||||||
|
```
|
||||||
|
❌ MISSING KEYS (2):
|
||||||
|
|
||||||
|
These keys are used in code but not found in translation files:
|
||||||
|
|
||||||
|
• auth.signup.email_required
|
||||||
|
• settings.profile.update_success
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resolution:**
|
||||||
|
1. Ensure that translations for those keys are present in en-US.json .
|
||||||
|
2. Run `pnpm generate-translations` to have Lingo.dev generate the missing translations
|
||||||
|
3. OR manually add the keys to `apps/web/locales/en-US.json`:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"signup": {
|
||||||
|
"email_required": "Email is required"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"profile": {
|
||||||
|
"update_success": "Profile updated successfully"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
3. Run `pnpm scan-translations` to verify
|
||||||
|
4. Commit the changes
|
||||||
|
|
||||||
|
#### Unused Keys
|
||||||
|
|
||||||
|
If you encounter unused key errors:
|
||||||
|
|
||||||
|
```
|
||||||
|
⚠️ UNUSED KEYS (1):
|
||||||
|
|
||||||
|
These keys exist in translation files but are not used in code:
|
||||||
|
|
||||||
|
• old.deprecated.key
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resolution:**
|
||||||
|
1. If the key is truly unused, remove it from all translation files
|
||||||
|
2. If the key should be used, add it to your code using `t("old.deprecated.key")`
|
||||||
|
3. Run `pnpm scan-translations` to verify
|
||||||
|
4. Commit the changes
|
||||||
|
|
||||||
|
#### Incomplete Translations
|
||||||
|
|
||||||
|
If you encounter incomplete translation errors:
|
||||||
|
|
||||||
|
```
|
||||||
|
⚠️ INCOMPLETE TRANSLATIONS:
|
||||||
|
|
||||||
|
Some keys from en-US are missing in target languages:
|
||||||
|
|
||||||
|
📝 de-DE (5 missing keys):
|
||||||
|
• auth.new_feature.title
|
||||||
|
• auth.new_feature.description
|
||||||
|
• settings.advanced.option
|
||||||
|
... and 2 more
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resolution:**
|
||||||
|
1. **Recommended:** Run `pnpm generate-translations` to have Lingo.dev automatically translate the missing keys
|
||||||
|
2. **Manual:** Add the missing keys to the target language files:
|
||||||
|
```bash
|
||||||
|
# Copy the structure from en-US.json and translate the values
|
||||||
|
# For example, in de-DE.json:
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"new_feature": {
|
||||||
|
"title": "Neues Feature",
|
||||||
|
"description": "Beschreibung des neuen Features"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
3. Run `pnpm scan-translations` to verify all translations are complete
|
||||||
|
4. Commit the changes
|
||||||
|
|
||||||
|
## Pre-commit Hook Behavior
|
||||||
|
|
||||||
|
The pre-commit hook will:
|
||||||
|
|
||||||
|
1. Run `lint-staged` for code formatting
|
||||||
|
2. If `LINGODOTDEV_API_KEY` is set:
|
||||||
|
- Generate translations using Lingo.dev
|
||||||
|
- Validate translation keys
|
||||||
|
- Auto-add updated locale files to the commit
|
||||||
|
- **Block the commit** if validation fails
|
||||||
|
3. If `LINGODOTDEV_API_KEY` is not set:
|
||||||
|
- Skip translation validation (for community contributors)
|
||||||
|
- Show a warning message
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
### LINGODOTDEV_API_KEY
|
||||||
|
|
||||||
|
This is the API key for Lingo.dev integration.
|
||||||
|
|
||||||
|
**For Core Team:**
|
||||||
|
- Add to your local `.env` file
|
||||||
|
- Required for running translation generation
|
||||||
|
|
||||||
|
**For Community Contributors:**
|
||||||
|
- Not required for local development
|
||||||
|
- Translation validation will be skipped
|
||||||
|
- The CI will still validate translations
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### 1. Keep Keys Organized
|
||||||
|
|
||||||
|
Group related keys together:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"login": { ... },
|
||||||
|
"signup": { ... },
|
||||||
|
"forgot_password": { ... }
|
||||||
|
},
|
||||||
|
"dashboard": {
|
||||||
|
"header": { ... },
|
||||||
|
"sidebar": { ... }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Avoid Hardcoded Strings
|
||||||
|
|
||||||
|
**❌ Bad:**
|
||||||
|
```tsx
|
||||||
|
<button>Click here</button>
|
||||||
|
```
|
||||||
|
|
||||||
|
**✅ Good:**
|
||||||
|
```tsx
|
||||||
|
<button>{t("common.click_here")}</button>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Use Interpolation for Dynamic Content
|
||||||
|
|
||||||
|
**❌ Bad:**
|
||||||
|
```tsx
|
||||||
|
{t("welcome")} {userName}!
|
||||||
|
```
|
||||||
|
|
||||||
|
**✅ Good:**
|
||||||
|
```tsx
|
||||||
|
{t("auth.welcome_message", { userName })}
|
||||||
|
```
|
||||||
|
|
||||||
|
With translation:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"welcome_message": "Welcome, {userName}!"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Avoid Dynamic Key Construction
|
||||||
|
|
||||||
|
**❌ Bad:**
|
||||||
|
```tsx
|
||||||
|
const key = `errors.${errorCode}`;
|
||||||
|
t(key);
|
||||||
|
```
|
||||||
|
|
||||||
|
**✅ Good:**
|
||||||
|
```tsx
|
||||||
|
switch (errorCode) {
|
||||||
|
case "401":
|
||||||
|
return t("errors.unauthorized");
|
||||||
|
case "404":
|
||||||
|
return t("errors.not_found");
|
||||||
|
default:
|
||||||
|
return t("errors.unknown");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Test Translation Keys
|
||||||
|
|
||||||
|
When adding new features:
|
||||||
|
1. Add translation keys
|
||||||
|
2. Test in multiple languages using the language switcher
|
||||||
|
3. Ensure text doesn't overflow in longer translations (German, French)
|
||||||
|
4. Run `pnpm scan-translations` before committing
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Issue: Pre-commit hook fails with validation errors
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
# Run the full i18n workflow
|
||||||
|
pnpm i18n
|
||||||
|
|
||||||
|
# Fix any missing or unused keys
|
||||||
|
# Then commit again
|
||||||
|
git add .
|
||||||
|
git commit -m "your message"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: Translation validation passes locally but fails in CI
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
- Ensure all translation files are committed
|
||||||
|
- Check that `scan-translations.ts` hasn't been modified
|
||||||
|
- Verify that locale files are properly formatted JSON
|
||||||
|
|
||||||
|
### Issue: Cannot commit because of missing translations
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
# If you have LINGODOTDEV_API_KEY:
|
||||||
|
pnpm generate-translations
|
||||||
|
|
||||||
|
# If you don't have the API key (community contributor):
|
||||||
|
# Manually add the missing keys to en-US.json
|
||||||
|
# Then run validation:
|
||||||
|
pnpm scan-translations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: Getting "unused keys" for keys that are used
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
- The script scans `.ts` and `.tsx` files only
|
||||||
|
- If keys are used in other file types, they may be flagged
|
||||||
|
- Verify the key is actually used with `grep -r "your.key" apps/web/`
|
||||||
|
- If it's a false positive, consider updating the scanning patterns in `scan-translations.ts`
|
||||||
|
|
||||||
|
## AI Assistant Guidelines
|
||||||
|
|
||||||
|
When assisting with i18n-related tasks, always:
|
||||||
|
|
||||||
|
1. **Use the `t()` function** for all user-facing text
|
||||||
|
2. **Follow key naming conventions** (lowercase, dots for nesting)
|
||||||
|
3. **Run validation** after making changes: `pnpm scan-translations`
|
||||||
|
4. **Fix missing keys** by adding them to `en-US.json`
|
||||||
|
5. **Remove unused keys** from all translation files
|
||||||
|
6. **Test the pre-commit hook** if making changes to translation workflow
|
||||||
|
7. **Update this rule file** if translation workflow changes
|
||||||
|
|
||||||
|
### Fixing Missing Translation Keys
|
||||||
|
|
||||||
|
When the AI encounters missing translation key errors:
|
||||||
|
|
||||||
|
1. Identify the missing keys from the error output
|
||||||
|
2. Determine the appropriate section and naming for each key
|
||||||
|
3. Add the keys to `apps/web/locales/en-US.json` with meaningful English text
|
||||||
|
4. Ensure proper JSON structure and nesting
|
||||||
|
5. Run `pnpm scan-translations` to verify
|
||||||
|
6. Inform the user that other language files will be updated via Lingo.dev
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```typescript
|
||||||
|
// Error: Missing key "settings.api.rate_limit_exceeded"
|
||||||
|
|
||||||
|
// Add to en-US.json:
|
||||||
|
{
|
||||||
|
"settings": {
|
||||||
|
"api": {
|
||||||
|
"rate_limit_exceeded": "API rate limit exceeded. Please try again later."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Removing Unused Translation Keys
|
||||||
|
|
||||||
|
When the AI encounters unused translation key errors:
|
||||||
|
|
||||||
|
1. Verify the keys are truly unused by searching the codebase
|
||||||
|
2. Remove the keys from `apps/web/locales/en-US.json`
|
||||||
|
3. Note that removal from other language files can be handled via Lingo.dev
|
||||||
|
4. Run `pnpm scan-translations` to verify
|
||||||
|
|
||||||
|
## Migration Notes
|
||||||
|
|
||||||
|
This project previously used Tolgee for translations. As of this migration:
|
||||||
|
|
||||||
|
- **Old scripts**: `tolgee-pull` is deprecated (kept for reference)
|
||||||
|
- **New scripts**: Use `pnpm i18n` or `pnpm generate-translations`
|
||||||
|
- **Old workflows**: `tolgee.yml` and `tolgee-missing-key-check.yml` removed
|
||||||
|
- **New workflow**: `translation-check.yml` handles all validation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated:** October 14, 2025
|
||||||
|
**Related Files:**
|
||||||
|
- `scan-translations.ts` - Translation validation script
|
||||||
|
- `.husky/pre-commit` - Pre-commit hook with i18n validation
|
||||||
|
- `.github/workflows/translation-check.yml` - CI workflow for translation validation
|
||||||
|
- `apps/web/locales/*.json` - Translation files
|
||||||
52
.cursor/rules/react-context-patterns.mdc
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
description:
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# React Context & Provider Patterns
|
||||||
|
|
||||||
|
## Context Provider Best Practices
|
||||||
|
|
||||||
|
### Provider Implementation
|
||||||
|
- Use TypeScript interfaces for provider props with optional `initialCount` for testing
|
||||||
|
- Implement proper cleanup in `useEffect` to avoid React hooks warnings
|
||||||
|
- Reference: [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/components/ResponseCountProvider.tsx)
|
||||||
|
|
||||||
|
### Cleanup Pattern for Refs
|
||||||
|
```typescript
|
||||||
|
useEffect(() => {
|
||||||
|
const currentPendingRequests = pendingRequests.current;
|
||||||
|
const currentAbortController = abortController.current;
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (currentAbortController) {
|
||||||
|
currentAbortController.abort();
|
||||||
|
}
|
||||||
|
currentPendingRequests.clear();
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing Context Providers
|
||||||
|
- Always wrap components using context in the provider during tests
|
||||||
|
- Use `initialCount` prop for predictable test scenarios
|
||||||
|
- Mock context dependencies like `useParams`, `useResponseFilter`
|
||||||
|
- Example from [apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx](mdc:apps/web/app/(app)/environments/[environmentId]/surveys/[surveyId]/(analysis)/summary/components/SurveyAnalysisCTA.test.tsx):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
render(
|
||||||
|
<ResponseCountProvider survey={dummySurvey} initialCount={5}>
|
||||||
|
<ComponentUnderTest />
|
||||||
|
</ResponseCountProvider>
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Required Mocks for Context Testing
|
||||||
|
- Mock `next/navigation` with `useParams` returning environment and survey IDs
|
||||||
|
- Mock response filter context and actions
|
||||||
|
- Mock API actions that the provider depends on
|
||||||
|
|
||||||
|
### Context Hook Usage
|
||||||
|
- Create custom hooks like `useResponseCountContext()` for consuming context
|
||||||
|
- Provide meaningful error messages when context is used outside provider
|
||||||
|
- Use context for shared state that multiple components need to access
|
||||||
179
.cursor/rules/review-and-refine.mdc
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
---
|
||||||
|
description: Apply these quality standards before finalizing code changes to ensure DRY principles, React best practices, TypeScript conventions, and maintainable code.
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# Review & Refine
|
||||||
|
|
||||||
|
Before finalizing any code changes, review your implementation against these quality standards:
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
### DRY (Don't Repeat Yourself)
|
||||||
|
|
||||||
|
- Extract duplicated logic into reusable functions or hooks
|
||||||
|
- If the same code appears in multiple places, consolidate it
|
||||||
|
- Create helper functions at appropriate scope (component-level, module-level, or utility files)
|
||||||
|
- Avoid copy-pasting code blocks
|
||||||
|
|
||||||
|
### Code Reduction
|
||||||
|
|
||||||
|
- Remove unnecessary code, comments, and abstractions
|
||||||
|
- Prefer built-in solutions over custom implementations
|
||||||
|
- Consolidate similar logic
|
||||||
|
- Remove dead code and unused imports
|
||||||
|
- Question if every line of code is truly needed
|
||||||
|
|
||||||
|
## React Best Practices
|
||||||
|
|
||||||
|
### Component Design
|
||||||
|
|
||||||
|
- Keep components focused on a single responsibility
|
||||||
|
- Extract complex logic into custom hooks
|
||||||
|
- Prefer composition over prop drilling
|
||||||
|
- Use children props and render props when appropriate
|
||||||
|
- Keep component files under 300 lines when possible
|
||||||
|
|
||||||
|
### Hooks Usage
|
||||||
|
|
||||||
|
- Follow Rules of Hooks (only call at top level, only in React functions)
|
||||||
|
- Extract complex `useEffect` logic into custom hooks
|
||||||
|
- Use `useMemo` and `useCallback` only when you have a measured performance issue
|
||||||
|
- Declare dependencies arrays correctly - don't ignore exhaustive-deps warnings
|
||||||
|
- Keep `useEffect` focused on a single concern
|
||||||
|
|
||||||
|
### State Management
|
||||||
|
|
||||||
|
- Colocate state as close as possible to where it's used
|
||||||
|
- Lift state only when necessary
|
||||||
|
- Use `useReducer` for complex state logic with multiple sub-values
|
||||||
|
- Avoid derived state - compute values during render instead
|
||||||
|
- Don't store values in state that can be computed from props
|
||||||
|
|
||||||
|
### Event Handlers
|
||||||
|
|
||||||
|
- Name event handlers with `handle` prefix (e.g., `handleClick`, `handleSubmit`)
|
||||||
|
- Extract complex event handler logic into separate functions
|
||||||
|
- Avoid inline arrow functions in JSX when they contain complex logic
|
||||||
|
|
||||||
|
## TypeScript Best Practices
|
||||||
|
|
||||||
|
### Type Safety
|
||||||
|
|
||||||
|
- Prefer type inference over explicit types when possible
|
||||||
|
- Use `const` assertions for literal types
|
||||||
|
- Avoid `any` - use `unknown` if type is truly unknown
|
||||||
|
- Use discriminated unions for complex conditional logic
|
||||||
|
- Leverage type guards and narrowing
|
||||||
|
|
||||||
|
### Interface & Type Usage
|
||||||
|
|
||||||
|
- Use existing types from `@formbricks/types` - don't recreate them
|
||||||
|
- Prefer `interface` for object shapes that might be extended
|
||||||
|
- Prefer `type` for unions, intersections, and mapped types
|
||||||
|
- Define types close to where they're used unless they're shared
|
||||||
|
- Export types from index files for shared types
|
||||||
|
|
||||||
|
### Type Assertions
|
||||||
|
|
||||||
|
- Avoid type assertions (`as`) when possible
|
||||||
|
- Use type guards instead of assertions
|
||||||
|
- Only assert when you have more information than TypeScript
|
||||||
|
|
||||||
|
## Code Organization
|
||||||
|
|
||||||
|
### Separation of Concerns
|
||||||
|
|
||||||
|
- Separate business logic from UI rendering
|
||||||
|
- Extract API calls into separate functions or modules
|
||||||
|
- Keep data transformation separate from component logic
|
||||||
|
- Use custom hooks for stateful logic that doesn't render UI
|
||||||
|
|
||||||
|
### Function Clarity
|
||||||
|
|
||||||
|
- Functions should do one thing well
|
||||||
|
- Name functions clearly and descriptively
|
||||||
|
- Keep functions small (aim for under 20 lines)
|
||||||
|
- Extract complex conditionals into named boolean variables or functions
|
||||||
|
- Avoid deep nesting (max 3 levels)
|
||||||
|
|
||||||
|
### File Structure
|
||||||
|
|
||||||
|
- Group related functions together
|
||||||
|
- Order declarations logically (types → hooks → helpers → component)
|
||||||
|
- Keep imports organized (external → internal → relative)
|
||||||
|
- Consider splitting large files by concern
|
||||||
|
|
||||||
|
## Additional Quality Checks
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
- Don't optimize prematurely - measure first
|
||||||
|
- Avoid creating new objects/arrays/functions in render unnecessarily
|
||||||
|
- Use keys properly in lists (stable, unique identifiers)
|
||||||
|
- Lazy load heavy components when appropriate
|
||||||
|
|
||||||
|
### Accessibility
|
||||||
|
|
||||||
|
- Use semantic HTML elements
|
||||||
|
- Include ARIA labels where needed
|
||||||
|
- Ensure keyboard navigation works
|
||||||
|
- Check color contrast and focus states
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- Handle error states in components
|
||||||
|
- Provide user feedback for failed operations
|
||||||
|
- Use error boundaries for component errors
|
||||||
|
- Log errors appropriately (avoid swallowing errors silently)
|
||||||
|
|
||||||
|
### Naming Conventions
|
||||||
|
|
||||||
|
- Use descriptive names (avoid abbreviations unless very common)
|
||||||
|
- Boolean variables/props should sound like yes/no questions (`isLoading`, `hasError`, `canEdit`)
|
||||||
|
- Arrays should be plural (`users`, `choices`, `items`)
|
||||||
|
- Event handlers: `handleX` in components, `onX` for props
|
||||||
|
- Constants in UPPER_SNAKE_CASE only for true constants
|
||||||
|
|
||||||
|
### Code Readability
|
||||||
|
|
||||||
|
- Prefer early returns to reduce nesting
|
||||||
|
- Use destructuring to make code clearer
|
||||||
|
- Break complex expressions into named variables
|
||||||
|
- Add comments only when code can't be made self-explanatory
|
||||||
|
- Use whitespace to group related code
|
||||||
|
|
||||||
|
### Testing Considerations
|
||||||
|
|
||||||
|
- Write code that's easy to test (pure functions, clear inputs/outputs)
|
||||||
|
- Avoid hard-to-mock dependencies when possible
|
||||||
|
- Keep side effects at the edges of your code
|
||||||
|
|
||||||
|
## Review Checklist
|
||||||
|
|
||||||
|
Before submitting your changes, ask yourself:
|
||||||
|
|
||||||
|
1. **DRY**: Is there any duplicated logic I can extract?
|
||||||
|
2. **Clarity**: Would another developer understand this code easily?
|
||||||
|
3. **Simplicity**: Is this the simplest solution that works?
|
||||||
|
4. **Types**: Am I using TypeScript effectively?
|
||||||
|
5. **React**: Am I following React idioms and best practices?
|
||||||
|
6. **Performance**: Are there obvious performance issues?
|
||||||
|
7. **Separation**: Are concerns properly separated?
|
||||||
|
8. **Testing**: Is this code testable?
|
||||||
|
9. **Maintenance**: Will this be easy to change in 6 months?
|
||||||
|
10. **Deletion**: Can I remove any code and still accomplish the goal?
|
||||||
|
|
||||||
|
## When to Apply This Rule
|
||||||
|
|
||||||
|
Apply this rule:
|
||||||
|
|
||||||
|
- After implementing a feature but before marking it complete
|
||||||
|
- When you notice your code feels "messy" or complex
|
||||||
|
- Before requesting code review
|
||||||
|
- When you see yourself copy-pasting code
|
||||||
|
- After receiving feedback about code quality
|
||||||
|
|
||||||
|
Don't let perfect be the enemy of good, but always strive for:
|
||||||
|
**Simple, readable, maintainable code that does one thing well.**
|
||||||
216
.cursor/rules/storybook-component-migration.mdc
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
---
|
||||||
|
description: Migrate deprecated UI components to a unified component
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# Component Migration Automation Rule
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
This rule automates the migration of deprecated components to new component systems in React/TypeScript codebases.
|
||||||
|
|
||||||
|
## Trigger
|
||||||
|
When the user requests component migration (e.g., "migrate [DeprecatedComponent] to [NewComponent]" or "component migration").
|
||||||
|
|
||||||
|
## Process
|
||||||
|
|
||||||
|
### Step 1: Discovery and Planning
|
||||||
|
1. **Identify migration parameters:**
|
||||||
|
- Ask user for deprecated component name (e.g., "Modal")
|
||||||
|
- Ask user for new component name(s) (e.g., "Dialog")
|
||||||
|
- Ask for any components to exclude (e.g., "ModalWithTabs")
|
||||||
|
- Ask for specific import paths if needed
|
||||||
|
|
||||||
|
2. **Scan codebase** for deprecated components:
|
||||||
|
- Search for `import.*[DeprecatedComponent]` patterns
|
||||||
|
- Exclude specified components that should not be migrated
|
||||||
|
- List all found components with file paths
|
||||||
|
- Present numbered list to user for confirmation
|
||||||
|
|
||||||
|
### Step 2: Component-by-Component Migration
|
||||||
|
For each component, follow this exact sequence:
|
||||||
|
|
||||||
|
#### 2.1 Component Migration
|
||||||
|
- **Import changes:**
|
||||||
|
- Ask user to provide the new import structure
|
||||||
|
- Example transformation pattern:
|
||||||
|
```typescript
|
||||||
|
// FROM:
|
||||||
|
import { [DeprecatedComponent] } from "@/components/ui/[DeprecatedComponent]"
|
||||||
|
|
||||||
|
// TO:
|
||||||
|
import {
|
||||||
|
[NewComponent],
|
||||||
|
[NewComponentPart1],
|
||||||
|
[NewComponentPart2],
|
||||||
|
// ... other parts
|
||||||
|
} from "@/components/ui/[NewComponent]"
|
||||||
|
```
|
||||||
|
|
||||||
|
- **Props transformation:**
|
||||||
|
- Ask user for prop mapping rules (e.g., `open` → `open`, `setOpen` → `onOpenChange`)
|
||||||
|
- Ask for props to remove (e.g., `noPadding`, `closeOnOutsideClick`, `size`)
|
||||||
|
- Apply transformations based on user specifications
|
||||||
|
|
||||||
|
- **Structure transformation:**
|
||||||
|
- Ask user for the new component structure pattern
|
||||||
|
- Apply the transformation maintaining all functionality
|
||||||
|
- Preserve all existing logic, state management, and event handlers
|
||||||
|
|
||||||
|
#### 2.2 Wait for User Approval
|
||||||
|
- Present the migration changes
|
||||||
|
- Wait for explicit user approval before proceeding
|
||||||
|
- If rejected, ask for specific feedback and iterate
|
||||||
|
#### 2.3 Re-read and Apply Additional Changes
|
||||||
|
- Re-read the component file to capture any user modifications
|
||||||
|
- Apply any additional improvements the user made
|
||||||
|
- Ensure all changes are incorporated
|
||||||
|
|
||||||
|
#### 2.4 Test File Updates
|
||||||
|
- **Find corresponding test file** (same name with `.test.tsx` or `.test.ts`)
|
||||||
|
- **Update test mocks:**
|
||||||
|
- Ask user for new component mock structure
|
||||||
|
- Replace old component mocks with new ones
|
||||||
|
- Example pattern:
|
||||||
|
```typescript
|
||||||
|
// Add to test setup:
|
||||||
|
jest.mock("@/components/ui/[NewComponent]", () => ({
|
||||||
|
[NewComponent]: ({ children, [props] }: any) => ([mock implementation]),
|
||||||
|
[NewComponentPart1]: ({ children }: any) => <div data-testid="[new-component-part1]">{children}</div>,
|
||||||
|
[NewComponentPart2]: ({ children }: any) => <div data-testid="[new-component-part2]">{children}</div>,
|
||||||
|
// ... other parts
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
- **Update test expectations:**
|
||||||
|
- Change test IDs from old component to new component
|
||||||
|
- Update any component-specific assertions
|
||||||
|
- Ensure all new component parts used in the component are mocked
|
||||||
|
|
||||||
|
#### 2.5 Run Tests and Optimize
|
||||||
|
- Execute `Node package manager test -- ComponentName.test.tsx`
|
||||||
|
- Fix any failing tests
|
||||||
|
- Optimize code quality (imports, formatting, etc.)
|
||||||
|
- Re-run tests until all pass
|
||||||
|
- **Maximum 3 iterations** - if still failing, ask user for guidance
|
||||||
|
|
||||||
|
#### 2.6 Wait for Final Approval
|
||||||
|
- Present test results and any optimizations made
|
||||||
|
- Wait for user approval of the complete migration
|
||||||
|
- If rejected, iterate based on feedback
|
||||||
|
|
||||||
|
#### 2.7 Git Commit
|
||||||
|
- Run: `git add .`
|
||||||
|
- Run: `git commit -m "migrate [ComponentName] from [DeprecatedComponent] to [NewComponent]"`
|
||||||
|
- Confirm commit was successful
|
||||||
|
|
||||||
|
### Step 3: Final Report Generation
|
||||||
|
After all components are migrated, generate a comprehensive GitHub PR report:
|
||||||
|
|
||||||
|
#### PR Title
|
||||||
|
```
|
||||||
|
feat: migrate [DeprecatedComponent] components to [NewComponent] system
|
||||||
|
```
|
||||||
|
|
||||||
|
#### PR Description Template
|
||||||
|
```markdown
|
||||||
|
## 🔄 [DeprecatedComponent] to [NewComponent] Migration
|
||||||
|
|
||||||
|
### Overview
|
||||||
|
Migrated [X] [DeprecatedComponent] components to the new [NewComponent] component system to modernize the UI architecture and improve consistency.
|
||||||
|
|
||||||
|
### Components Migrated
|
||||||
|
[List each component with file path]
|
||||||
|
|
||||||
|
### Technical Changes
|
||||||
|
- **Imports:** Replaced `[DeprecatedComponent]` with `[NewComponent], [NewComponentParts...]`
|
||||||
|
- **Props:** [List prop transformations]
|
||||||
|
- **Structure:** Implemented proper [NewComponent] component hierarchy
|
||||||
|
- **Styling:** [Describe styling changes]
|
||||||
|
- **Tests:** Updated all test mocks and expectations
|
||||||
|
|
||||||
|
### Migration Pattern
|
||||||
|
```typescript
|
||||||
|
// Before
|
||||||
|
<[DeprecatedComponent] [oldProps]>
|
||||||
|
[oldStructure]
|
||||||
|
</[DeprecatedComponent]>
|
||||||
|
|
||||||
|
// After
|
||||||
|
<[NewComponent] [newProps]>
|
||||||
|
[newStructure]
|
||||||
|
</[NewComponent]>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
- ✅ All existing tests updated and passing
|
||||||
|
- ✅ Component functionality preserved
|
||||||
|
- ✅ UI/UX behavior maintained
|
||||||
|
|
||||||
|
### How to Test This PR
|
||||||
|
1. **Functional Testing:**
|
||||||
|
- Navigate to each migrated component's usage
|
||||||
|
- Verify [component] opens and closes correctly
|
||||||
|
- Test all interactive elements within [components]
|
||||||
|
- Confirm styling and layout are preserved
|
||||||
|
|
||||||
|
2. **Automated Testing:**
|
||||||
|
```bash
|
||||||
|
Node package manager test
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Visual Testing:**
|
||||||
|
- Check that all [components] maintain proper styling
|
||||||
|
- Verify responsive behavior
|
||||||
|
- Test keyboard navigation and accessibility
|
||||||
|
|
||||||
|
### Breaking Changes
|
||||||
|
[List any breaking changes or state "None - this is a drop-in replacement maintaining all existing functionality."]
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
- [Any excluded components] were preserved as they already use [NewComponent] internally
|
||||||
|
- All form validation and complex state management preserved
|
||||||
|
- Enhanced code quality with better imports and formatting
|
||||||
|
```
|
||||||
|
|
||||||
|
## Special Considerations
|
||||||
|
|
||||||
|
### Excluded Components
|
||||||
|
- **DO NOT MIGRATE** components specified by user as exclusions
|
||||||
|
- They may already use the new component internally or have other reasons
|
||||||
|
- Inform user these are skipped and why
|
||||||
|
|
||||||
|
### Complex Components
|
||||||
|
- Preserve all existing functionality (forms, validation, state management)
|
||||||
|
- Maintain prop interfaces
|
||||||
|
- Keep all event handlers and callbacks
|
||||||
|
- Preserve accessibility features
|
||||||
|
|
||||||
|
### Test Coverage
|
||||||
|
- Ensure all new component parts are mocked when used
|
||||||
|
- Mock all new component parts that appear in the component
|
||||||
|
- Update test IDs from old component to new component
|
||||||
|
- Maintain all existing test scenarios
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
- If tests fail after 3 iterations, stop and ask user for guidance
|
||||||
|
- If component is too complex, ask user for specific guidance
|
||||||
|
- If unsure about functionality preservation, ask for clarification
|
||||||
|
|
||||||
|
### Migration Patterns
|
||||||
|
- Always ask user for specific migration patterns before starting
|
||||||
|
- Confirm import structures, prop mappings, and component hierarchies
|
||||||
|
- Adapt to different component architectures (simple replacements, complex restructuring, etc.)
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
- All deprecated components successfully migrated to new components
|
||||||
|
- All tests passing
|
||||||
|
- No functionality lost
|
||||||
|
- Code quality maintained or improved
|
||||||
|
- User approval on each component
|
||||||
|
- Successful git commits for each migration
|
||||||
|
- Comprehensive PR report generated
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
- "migrate Modal to Dialog"
|
||||||
|
- "migrate Button to NewButton"
|
||||||
|
- "migrate Card to ModernCard"
|
||||||
|
- "component migration" (will prompt for details)
|
||||||
177
.cursor/rules/storybook-create-new-story.mdc
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
---
|
||||||
|
description: Create a story in Storybook for a given component
|
||||||
|
globs:
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# Formbricks Storybook Stories
|
||||||
|
|
||||||
|
## When generating Storybook stories for Formbricks components:
|
||||||
|
|
||||||
|
### 1. **File Structure**
|
||||||
|
- Create `stories.tsx` (not `.stories.tsx`) in component directory
|
||||||
|
- Use exact import: `import { Meta, StoryObj } from "@storybook/react-vite";`
|
||||||
|
- Import component from `"./index"`
|
||||||
|
|
||||||
|
### 2. **Story Structure Template**
|
||||||
|
```tsx
|
||||||
|
import { Meta, StoryObj } from "@storybook/react-vite";
|
||||||
|
import { ComponentName } from "./index";
|
||||||
|
|
||||||
|
// For complex components with configurable options
|
||||||
|
// consider this as an example the options need to reflect the props types
|
||||||
|
interface StoryOptions {
|
||||||
|
showIcon: boolean;
|
||||||
|
numberOfElements: number;
|
||||||
|
customLabels: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
type StoryProps = React.ComponentProps<typeof ComponentName> & StoryOptions;
|
||||||
|
|
||||||
|
const meta: Meta<StoryProps> = {
|
||||||
|
title: "UI/ComponentName",
|
||||||
|
component: ComponentName,
|
||||||
|
tags: ["autodocs"],
|
||||||
|
parameters: {
|
||||||
|
layout: "centered",
|
||||||
|
controls: { sort: "alpha", exclude: [] },
|
||||||
|
docs: {
|
||||||
|
description: {
|
||||||
|
component: "The **ComponentName** component provides [description].",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
argTypes: {
|
||||||
|
// Organize in exactly these categories: Behavior, Appearance, Content
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default meta;
|
||||||
|
type Story = StoryObj<typeof ComponentName> & { args: StoryOptions };
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. **ArgTypes Organization**
|
||||||
|
Organize ALL argTypes into exactly three categories:
|
||||||
|
- **Behavior**: disabled, variant, onChange, etc.
|
||||||
|
- **Appearance**: size, color, layout, styling, etc.
|
||||||
|
- **Content**: text, icons, numberOfElements, etc.
|
||||||
|
|
||||||
|
Format:
|
||||||
|
```tsx
|
||||||
|
argTypes: {
|
||||||
|
propName: {
|
||||||
|
control: "select" | "boolean" | "text" | "number",
|
||||||
|
options: ["option1", "option2"], // for select
|
||||||
|
description: "Clear description",
|
||||||
|
table: {
|
||||||
|
category: "Behavior" | "Appearance" | "Content",
|
||||||
|
type: { summary: "string" },
|
||||||
|
defaultValue: { summary: "default" },
|
||||||
|
},
|
||||||
|
order: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. **Required Stories**
|
||||||
|
Every component must include:
|
||||||
|
- `Default`: Most common use case
|
||||||
|
- `Disabled`: If component supports disabled state
|
||||||
|
- `WithIcon`: If component supports icons
|
||||||
|
- Variant stories for each variant (Primary, Secondary, Error, etc.)
|
||||||
|
- Edge case stories (ManyElements, LongText, CustomStyling)
|
||||||
|
|
||||||
|
### 5. **Story Format**
|
||||||
|
```tsx
|
||||||
|
export const Default: Story = {
|
||||||
|
args: {
|
||||||
|
// Props with realistic values
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const EdgeCase: Story = {
|
||||||
|
args: { /* ... */ },
|
||||||
|
parameters: {
|
||||||
|
docs: {
|
||||||
|
description: {
|
||||||
|
story: "Use this when [specific scenario].",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6. **Dynamic Content Pattern**
|
||||||
|
For components with dynamic content, create render function:
|
||||||
|
```tsx
|
||||||
|
const renderComponent = (args: StoryProps) => {
|
||||||
|
const { numberOfElements, showIcon, customLabels } = args;
|
||||||
|
|
||||||
|
// Generate dynamic content
|
||||||
|
const elements = Array.from({ length: numberOfElements }, (_, i) => ({
|
||||||
|
id: `element-${i}`,
|
||||||
|
label: customLabels[i] || `Element ${i + 1}`,
|
||||||
|
icon: showIcon ? <IconComponent /> : undefined,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return <ComponentName {...args} elements={elements} />;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Dynamic: Story = {
|
||||||
|
render: renderComponent,
|
||||||
|
args: {
|
||||||
|
numberOfElements: 3,
|
||||||
|
showIcon: true,
|
||||||
|
customLabels: ["First", "Second", "Third"],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7. **State Management**
|
||||||
|
For interactive components:
|
||||||
|
```tsx
|
||||||
|
import { useState } from "react";
|
||||||
|
|
||||||
|
const ComponentWithState = (args: any) => {
|
||||||
|
const [value, setValue] = useState(args.defaultValue);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ComponentName
|
||||||
|
{...args}
|
||||||
|
value={value}
|
||||||
|
onChange={(newValue) => {
|
||||||
|
setValue(newValue);
|
||||||
|
args.onChange?.(newValue);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Interactive: Story = {
|
||||||
|
render: ComponentWithState,
|
||||||
|
args: { defaultValue: "initial" },
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8. **Quality Requirements**
|
||||||
|
- Include component description in parameters.docs
|
||||||
|
- Add story documentation for non-obvious use cases
|
||||||
|
- Test edge cases (overflow, empty states, many elements)
|
||||||
|
- Ensure no TypeScript errors
|
||||||
|
- Use realistic prop values
|
||||||
|
- Include at least 3-5 story variants
|
||||||
|
- Example values need to be in the context of survey application
|
||||||
|
|
||||||
|
### 9. **Naming Conventions**
|
||||||
|
- **Story titles**: "UI/ComponentName"
|
||||||
|
- **Story exports**: PascalCase (Default, WithIcon, ManyElements)
|
||||||
|
- **Categories**: "Behavior", "Appearance", "Content" (exact spelling)
|
||||||
|
- **Props**: camelCase matching component props
|
||||||
|
|
||||||
|
### 10. **Special Cases**
|
||||||
|
- **Generic components**: Remove `component` from meta if type conflicts
|
||||||
|
- **Form components**: Include Invalid, WithValue stories
|
||||||
|
- **Navigation**: Include ManyItems stories
|
||||||
|
- **Modals, Dropdowns and Popups **: Include trigger and content structure
|
||||||
|
|
||||||
|
## Generate stories that are comprehensive, well-documented, and reflect all component states and edge cases.
|
||||||
72
.env.example
@@ -9,8 +9,12 @@
|
|||||||
WEBAPP_URL=http://localhost:3000
|
WEBAPP_URL=http://localhost:3000
|
||||||
|
|
||||||
# Required for next-auth. Should be the same as WEBAPP_URL
|
# Required for next-auth. Should be the same as WEBAPP_URL
|
||||||
|
# If your pplication uses a custom base path, specify the route to the API endpoint in full, e.g. NEXTAUTH_URL=https://example.com/custom-route/api/auth
|
||||||
NEXTAUTH_URL=http://localhost:3000
|
NEXTAUTH_URL=http://localhost:3000
|
||||||
|
|
||||||
|
# Can be used to deploy the application under a sub-path of a domain. This can only be set at build time
|
||||||
|
# BASE_PATH=
|
||||||
|
|
||||||
# Encryption keys
|
# Encryption keys
|
||||||
# Please set both for now, we will change this in the future
|
# Please set both for now, we will change this in the future
|
||||||
|
|
||||||
@@ -62,9 +66,6 @@ SMTP_PASSWORD=smtpPassword
|
|||||||
|
|
||||||
# Uncomment the variables you would like to use and customize the values.
|
# Uncomment the variables you would like to use and customize the values.
|
||||||
|
|
||||||
# Custom local storage path for file uploads
|
|
||||||
#UPLOADS_DIR=
|
|
||||||
|
|
||||||
##############
|
##############
|
||||||
# S3 STORAGE #
|
# S3 STORAGE #
|
||||||
##############
|
##############
|
||||||
@@ -80,6 +81,9 @@ S3_ENDPOINT_URL=
|
|||||||
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
# Force path style for S3 compatible storage (0 for disabled, 1 for enabled)
|
||||||
S3_FORCE_PATH_STYLE=0
|
S3_FORCE_PATH_STYLE=0
|
||||||
|
|
||||||
|
# Set this URL to add a public domain for all your client facing routes(default is WEBAPP_URL)
|
||||||
|
# PUBLIC_URL=https://survey.example.com
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Disable Features #
|
# Disable Features #
|
||||||
#####################
|
#####################
|
||||||
@@ -90,18 +94,12 @@ EMAIL_VERIFICATION_DISABLED=1
|
|||||||
# Password Reset. If you enable Password Reset functionality you have to setup SMTP-Settings, too.
|
# Password Reset. If you enable Password Reset functionality you have to setup SMTP-Settings, too.
|
||||||
PASSWORD_RESET_DISABLED=1
|
PASSWORD_RESET_DISABLED=1
|
||||||
|
|
||||||
# Signup. Disable the ability for new users to create an account.
|
|
||||||
# Note: This variable is only available to the SaaS setup of Formbricks Cloud. Signup is disable by default for self-hosting.
|
|
||||||
# SIGNUP_DISABLED=1
|
|
||||||
|
|
||||||
# Email login. Disable the ability for users to login with email.
|
# Email login. Disable the ability for users to login with email.
|
||||||
# EMAIL_AUTH_DISABLED=1
|
# EMAIL_AUTH_DISABLED=1
|
||||||
|
|
||||||
# Organization Invite. Disable the ability for invited users to create an account.
|
# Organization Invite. Disable the ability for invited users to create an account.
|
||||||
# INVITE_DISABLED=1
|
# INVITE_DISABLED=1
|
||||||
|
|
||||||
# Docker cron jobs. Disable the supercronic cron jobs in the Docker image (useful for cluster setups).
|
|
||||||
# DOCKER_CRON_ENABLED=1
|
|
||||||
|
|
||||||
##########
|
##########
|
||||||
# Other #
|
# Other #
|
||||||
@@ -114,9 +112,13 @@ IMPRINT_URL=
|
|||||||
IMPRINT_ADDRESS=
|
IMPRINT_ADDRESS=
|
||||||
|
|
||||||
# Configure Turnstile in signup flow
|
# Configure Turnstile in signup flow
|
||||||
# NEXT_PUBLIC_TURNSTILE_SITE_KEY=
|
# TURNSTILE_SITE_KEY=
|
||||||
# TURNSTILE_SECRET_KEY=
|
# TURNSTILE_SECRET_KEY=
|
||||||
|
|
||||||
|
# Google reCAPTCHA v3 keys
|
||||||
|
RECAPTCHA_SITE_KEY=
|
||||||
|
RECAPTCHA_SECRET_KEY=
|
||||||
|
|
||||||
# Configure Github Login
|
# Configure Github Login
|
||||||
GITHUB_ID=
|
GITHUB_ID=
|
||||||
GITHUB_SECRET=
|
GITHUB_SECRET=
|
||||||
@@ -151,11 +153,6 @@ NOTION_OAUTH_CLIENT_SECRET=
|
|||||||
STRIPE_SECRET_KEY=
|
STRIPE_SECRET_KEY=
|
||||||
STRIPE_WEBHOOK_SECRET=
|
STRIPE_WEBHOOK_SECRET=
|
||||||
|
|
||||||
# Configure Formbricks usage within Formbricks
|
|
||||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=
|
|
||||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=
|
|
||||||
NEXT_PUBLIC_FORMBRICKS_ONBOARDING_SURVEY_ID=
|
|
||||||
|
|
||||||
# Oauth credentials for Google sheet integration
|
# Oauth credentials for Google sheet integration
|
||||||
GOOGLE_SHEETS_CLIENT_ID=
|
GOOGLE_SHEETS_CLIENT_ID=
|
||||||
GOOGLE_SHEETS_CLIENT_SECRET=
|
GOOGLE_SHEETS_CLIENT_SECRET=
|
||||||
@@ -174,8 +171,8 @@ ENTERPRISE_LICENSE_KEY=
|
|||||||
# Automatically assign new users to a specific organization and role within that organization
|
# Automatically assign new users to a specific organization and role within that organization
|
||||||
# Insert an existing organization id or generate a valid CUID for a new one at https://www.getuniqueid.com/cuid (e.g. cjld2cjxh0000qzrmn831i7rn)
|
# Insert an existing organization id or generate a valid CUID for a new one at https://www.getuniqueid.com/cuid (e.g. cjld2cjxh0000qzrmn831i7rn)
|
||||||
# (Role Management is an Enterprise feature)
|
# (Role Management is an Enterprise feature)
|
||||||
# DEFAULT_ORGANIZATION_ID=
|
# AUTH_SSO_DEFAULT_TEAM_ID=
|
||||||
# DEFAULT_ORGANIZATION_ROLE=owner
|
# AUTH_SKIP_INVITE_FOR_SSO=
|
||||||
|
|
||||||
# Send new users to Brevo
|
# Send new users to Brevo
|
||||||
# BREVO_API_KEY=
|
# BREVO_API_KEY=
|
||||||
@@ -191,29 +188,38 @@ ENTERPRISE_LICENSE_KEY=
|
|||||||
UNSPLASH_ACCESS_KEY=
|
UNSPLASH_ACCESS_KEY=
|
||||||
|
|
||||||
# The below is used for Next Caching (uses In-Memory from Next Cache if not provided)
|
# The below is used for Next Caching (uses In-Memory from Next Cache if not provided)
|
||||||
# You can also add more configuration to Redis using the redis.conf file in the root directory
|
|
||||||
REDIS_URL=redis://localhost:6379
|
REDIS_URL=redis://localhost:6379
|
||||||
REDIS_DEFAULT_TTL=86400 # 1 day
|
|
||||||
|
|
||||||
# The below is used for Rate Limiting (uses In-Memory LRU Cache if not provided) (You can use a service like Webdis for this)
|
# The below is used for Rate Limiting (uses In-Memory LRU Cache if not provided) (You can use a service like Webdis for this)
|
||||||
# REDIS_HTTP_URL:
|
# REDIS_HTTP_URL:
|
||||||
|
|
||||||
# The below is used for Rate Limiting for management API
|
# Chatwoot
|
||||||
UNKEY_ROOT_KEY=
|
# CHATWOOT_BASE_URL=
|
||||||
|
# CHATWOOT_WEBSITE_TOKEN=
|
||||||
# Disable custom cache handler if necessary (e.g. if deployed on Vercel)
|
|
||||||
# CUSTOM_CACHE_DISABLED=1
|
|
||||||
|
|
||||||
# Azure AI settings
|
|
||||||
# AI_AZURE_RESSOURCE_NAME=
|
|
||||||
# AI_AZURE_API_KEY=
|
|
||||||
# AI_AZURE_EMBEDDINGS_DEPLOYMENT_ID=
|
|
||||||
# AI_AZURE_LLM_DEPLOYMENT_ID=
|
|
||||||
|
|
||||||
# INTERCOM_APP_ID=
|
|
||||||
# INTERCOM_SECRET_KEY=
|
|
||||||
|
|
||||||
# Enable Prometheus metrics
|
# Enable Prometheus metrics
|
||||||
# PROMETHEUS_ENABLED=
|
# PROMETHEUS_ENABLED=
|
||||||
# PROMETHEUS_EXPORTER_PORT=
|
# PROMETHEUS_EXPORTER_PORT=
|
||||||
|
|
||||||
|
# The SENTRY_DSN is used for error tracking and performance monitoring with Sentry.
|
||||||
|
# SENTRY_DSN=
|
||||||
|
# The SENTRY_AUTH_TOKEN variable is picked up by the Sentry Build Plugin.
|
||||||
|
# It's used automatically by Sentry during the build for authentication when uploading source maps.
|
||||||
|
# SENTRY_AUTH_TOKEN=
|
||||||
|
# The SENTRY_ENVIRONMENT is the environment which the error will belong to in the Sentry dashboard
|
||||||
|
# SENTRY_ENVIRONMENT=
|
||||||
|
|
||||||
|
# Configure the minimum role for user management from UI(owner, manager, disabled)
|
||||||
|
# USER_MANAGEMENT_MINIMUM_ROLE="manager"
|
||||||
|
|
||||||
|
# Configure the maximum age for the session in seconds. Default is 86400 (24 hours)
|
||||||
|
# SESSION_MAX_AGE=86400
|
||||||
|
|
||||||
|
# Audit logs options. Default 0.
|
||||||
|
# AUDIT_LOG_ENABLED=0
|
||||||
|
# If the ip should be added in the log or not. Default 0
|
||||||
|
# AUDIT_LOG_GET_USER_IP=0
|
||||||
|
|
||||||
|
|
||||||
|
# Lingo.dev API key for translation generation
|
||||||
|
LINGODOTDEV_API_KEY=your_api_key_here
|
||||||
13
.eslintrc.cjs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
module.exports = {
|
||||||
|
root: true,
|
||||||
|
ignorePatterns: ["node_modules/", "dist/", "coverage/"],
|
||||||
|
overrides: [
|
||||||
|
{
|
||||||
|
files: ["packages/cache/**/*.{ts,js}"],
|
||||||
|
extends: ["@formbricks/eslint-config/library.js"],
|
||||||
|
parserOptions: {
|
||||||
|
project: "./packages/cache/tsconfig.json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
description: "Found a bug? Please fill out the sections below. \U0001F44D"
|
description: "Found a bug? Please fill out the sections below. \U0001F44D"
|
||||||
type: bug
|
type: bug
|
||||||
|
projects: "formbricks/8"
|
||||||
labels: ["bug"]
|
labels: ["bug"]
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: true
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Questions
|
- name: Questions
|
||||||
url: https://github.com/formbricks/formbricks/discussions
|
url: https://github.com/formbricks/formbricks/discussions
|
||||||
|
|||||||
1
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: "Suggest an idea for this project \U0001F680"
|
description: "Suggest an idea for this project \U0001F680"
|
||||||
type: feature
|
type: feature
|
||||||
|
projects: "formbricks/21"
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: problem-description
|
id: problem-description
|
||||||
|
|||||||
11
.github/ISSUE_TEMPLATE/task.yml
vendored
@@ -1,11 +0,0 @@
|
|||||||
name: Task (internal)
|
|
||||||
description: "Template for creating a task. Used by the Formbricks Team only \U0001f4e5"
|
|
||||||
type: task
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: task-summary
|
|
||||||
attributes:
|
|
||||||
label: Task description
|
|
||||||
description: A clear detailed-rich description of the task.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
319
.github/actions/build-and-push-docker/action.yml
vendored
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
name: Build and Push Docker Image
|
||||||
|
description: |
|
||||||
|
Unified Docker build and push action for both ECR and GHCR registries.
|
||||||
|
|
||||||
|
Supports:
|
||||||
|
- ECR builds for Formbricks Cloud deployment
|
||||||
|
- GHCR builds for community self-hosting
|
||||||
|
- Automatic version resolution and tagging
|
||||||
|
- Conditional signing and deployment tags
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
registry_type:
|
||||||
|
description: "Registry type: 'ecr' or 'ghcr'"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
# Version input
|
||||||
|
version:
|
||||||
|
description: "Explicit version (SemVer only, e.g., 1.2.3). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||||
|
required: false
|
||||||
|
experimental_mode:
|
||||||
|
description: "Enable experimental timestamped versions"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
# ECR specific inputs
|
||||||
|
ecr_registry:
|
||||||
|
description: "ECR registry URL (required for ECR builds)"
|
||||||
|
required: false
|
||||||
|
ecr_repository:
|
||||||
|
description: "ECR repository name (required for ECR builds)"
|
||||||
|
required: false
|
||||||
|
ecr_region:
|
||||||
|
description: "ECR AWS region (required for ECR builds)"
|
||||||
|
required: false
|
||||||
|
aws_role_arn:
|
||||||
|
description: "AWS role ARN for ECR authentication (required for ECR builds)"
|
||||||
|
required: false
|
||||||
|
|
||||||
|
# GHCR specific inputs
|
||||||
|
ghcr_image_name:
|
||||||
|
description: "GHCR image name (required for GHCR builds)"
|
||||||
|
required: false
|
||||||
|
|
||||||
|
# Deployment options
|
||||||
|
deploy_production:
|
||||||
|
description: "Tag image for production deployment"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
deploy_staging:
|
||||||
|
description: "Tag image for staging deployment"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
is_prerelease:
|
||||||
|
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
make_latest:
|
||||||
|
description: "Whether to tag as latest/production (from GitHub release 'Set as the latest release' option)"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
# Build options
|
||||||
|
dockerfile:
|
||||||
|
description: "Path to Dockerfile"
|
||||||
|
required: false
|
||||||
|
default: "apps/web/Dockerfile"
|
||||||
|
context:
|
||||||
|
description: "Build context"
|
||||||
|
required: false
|
||||||
|
default: "."
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
image_tag:
|
||||||
|
description: "Resolved image tag used for the build"
|
||||||
|
value: ${{ steps.version.outputs.version }}
|
||||||
|
registry_tags:
|
||||||
|
description: "Complete registry tags that were pushed"
|
||||||
|
value: ${{ steps.build.outputs.tags }}
|
||||||
|
image_digest:
|
||||||
|
description: "Image digest from the build"
|
||||||
|
value: ${{ steps.build.outputs.digest }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Validate inputs
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||||
|
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||||
|
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||||
|
ECR_REGION: ${{ inputs.ecr_region }}
|
||||||
|
AWS_ROLE_ARN: ${{ inputs.aws_role_arn }}
|
||||||
|
GHCR_IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [[ "$REGISTRY_TYPE" != "ecr" && "$REGISTRY_TYPE" != "ghcr" ]]; then
|
||||||
|
echo "ERROR: registry_type must be 'ecr' or 'ghcr', got: $REGISTRY_TYPE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||||
|
if [[ -z "$ECR_REGISTRY" || -z "$ECR_REPOSITORY" || -z "$ECR_REGION" || -z "$AWS_ROLE_ARN" ]]; then
|
||||||
|
echo "ERROR: ECR builds require ecr_registry, ecr_repository, ecr_region, and aws_role_arn"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$REGISTRY_TYPE" == "ghcr" ]]; then
|
||||||
|
if [[ -z "$GHCR_IMAGE_NAME" ]]; then
|
||||||
|
echo "ERROR: GHCR builds require ghcr_image_name"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "SUCCESS: Input validation passed for $REGISTRY_TYPE build"
|
||||||
|
|
||||||
|
- name: Resolve Docker version
|
||||||
|
id: version
|
||||||
|
uses: ./.github/actions/resolve-docker-version
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
current_branch: ${{ github.ref_name }}
|
||||||
|
experimental_mode: ${{ inputs.experimental_mode }}
|
||||||
|
|
||||||
|
- name: Update package.json version
|
||||||
|
uses: ./.github/actions/update-package-version
|
||||||
|
with:
|
||||||
|
version: ${{ steps.version.outputs.version }}
|
||||||
|
|
||||||
|
- name: Configure AWS credentials (ECR only)
|
||||||
|
if: ${{ inputs.registry_type == 'ecr' }}
|
||||||
|
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a # v4.2.0
|
||||||
|
with:
|
||||||
|
role-to-assume: ${{ inputs.aws_role_arn }}
|
||||||
|
aws-region: ${{ inputs.ecr_region }}
|
||||||
|
|
||||||
|
- name: Log in to Amazon ECR (ECR only)
|
||||||
|
if: ${{ inputs.registry_type == 'ecr' }}
|
||||||
|
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1
|
||||||
|
|
||||||
|
- name: Set up Docker build tools
|
||||||
|
uses: ./.github/actions/docker-build-setup
|
||||||
|
with:
|
||||||
|
registry: ${{ inputs.registry_type == 'ghcr' && 'ghcr.io' || '' }}
|
||||||
|
setup_cosign: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||||
|
skip_login_on_pr: ${{ inputs.registry_type == 'ghcr' && 'true' || 'false' }}
|
||||||
|
|
||||||
|
- name: Build ECR tag list
|
||||||
|
if: ${{ inputs.registry_type == 'ecr' }}
|
||||||
|
id: ecr-tags
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||||
|
ECR_REGISTRY: ${{ inputs.ecr_registry }}
|
||||||
|
ECR_REPOSITORY: ${{ inputs.ecr_repository }}
|
||||||
|
DEPLOY_PRODUCTION: ${{ inputs.deploy_production }}
|
||||||
|
DEPLOY_STAGING: ${{ inputs.deploy_staging }}
|
||||||
|
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||||
|
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Start with the base image tag
|
||||||
|
TAGS="${ECR_REGISTRY}/${ECR_REPOSITORY}:${IMAGE_TAG}"
|
||||||
|
|
||||||
|
# Handle automatic tagging based on release type
|
||||||
|
if [[ "${IS_PRERELEASE}" == "true" ]]; then
|
||||||
|
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||||
|
echo "Adding staging tag for prerelease"
|
||||||
|
elif [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||||
|
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||||
|
echo "Adding production tag for stable release marked as latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Handle manual deployment overrides
|
||||||
|
if [[ "${DEPLOY_PRODUCTION}" == "true" ]]; then
|
||||||
|
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:production"
|
||||||
|
echo "Adding production tag (manual override)"
|
||||||
|
fi
|
||||||
|
if [[ "${DEPLOY_STAGING}" == "true" ]]; then
|
||||||
|
TAGS="${TAGS}\n${ECR_REGISTRY}/${ECR_REPOSITORY}:staging"
|
||||||
|
echo "Adding staging tag (manual override)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "ECR tags generated:"
|
||||||
|
echo -e "${TAGS}"
|
||||||
|
|
||||||
|
{
|
||||||
|
echo "tags<<EOF"
|
||||||
|
echo -e "${TAGS}"
|
||||||
|
echo "EOF"
|
||||||
|
} >> "${GITHUB_OUTPUT}"
|
||||||
|
|
||||||
|
- name: Generate additional GHCR tags for releases
|
||||||
|
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||||
|
id: ghcr-extra-tags
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
VERSION: ${{ steps.version.outputs.version }}
|
||||||
|
IMAGE_NAME: ${{ inputs.ghcr_image_name }}
|
||||||
|
IS_PRERELEASE: ${{ inputs.is_prerelease }}
|
||||||
|
MAKE_LATEST: ${{ inputs.make_latest }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Start with base version tag
|
||||||
|
TAGS="ghcr.io/${IMAGE_NAME}:${VERSION}"
|
||||||
|
|
||||||
|
# For proper SemVer releases, add major.minor and major tags
|
||||||
|
if [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
# Extract major and minor versions
|
||||||
|
MAJOR=$(echo "${VERSION}" | cut -d. -f1)
|
||||||
|
MINOR=$(echo "${VERSION}" | cut -d. -f2)
|
||||||
|
|
||||||
|
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}.${MINOR}"
|
||||||
|
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:${MAJOR}"
|
||||||
|
|
||||||
|
echo "Added SemVer tags: ${MAJOR}.${MINOR}, ${MAJOR}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add latest tag for stable releases marked as latest
|
||||||
|
if [[ "${IS_PRERELEASE}" == "false" && "${MAKE_LATEST}" == "true" ]]; then
|
||||||
|
TAGS="${TAGS}\nghcr.io/${IMAGE_NAME}:latest"
|
||||||
|
echo "Added latest tag for stable release marked as latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Generated GHCR tags:"
|
||||||
|
echo -e "${TAGS}"
|
||||||
|
|
||||||
|
# Debug: Show what will be passed to Docker build
|
||||||
|
echo "DEBUG: Tags for Docker build step:"
|
||||||
|
echo -e "${TAGS}"
|
||||||
|
|
||||||
|
{
|
||||||
|
echo "tags<<EOF"
|
||||||
|
echo -e "${TAGS}"
|
||||||
|
echo "EOF"
|
||||||
|
} >> "${GITHUB_OUTPUT}"
|
||||||
|
|
||||||
|
- name: Build GHCR metadata (experimental)
|
||||||
|
if: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' }}
|
||||||
|
id: ghcr-meta-experimental
|
||||||
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
|
with:
|
||||||
|
images: ghcr.io/${{ inputs.ghcr_image_name }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=raw,value=${{ steps.version.outputs.version }}
|
||||||
|
|
||||||
|
- name: Debug Docker build tags
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "=== DEBUG: Docker Build Configuration ==="
|
||||||
|
echo "Registry Type: ${{ inputs.registry_type }}"
|
||||||
|
echo "Experimental Mode: ${{ inputs.experimental_mode }}"
|
||||||
|
echo "Event Name: ${{ github.event_name }}"
|
||||||
|
echo "Is Prerelease: ${{ inputs.is_prerelease }}"
|
||||||
|
echo "Make Latest: ${{ inputs.make_latest }}"
|
||||||
|
echo "Version: ${{ steps.version.outputs.version }}"
|
||||||
|
|
||||||
|
if [[ "${{ inputs.registry_type }}" == "ecr" ]]; then
|
||||||
|
echo "ECR Tags: ${{ steps.ecr-tags.outputs.tags }}"
|
||||||
|
elif [[ "${{ inputs.experimental_mode }}" == "true" ]]; then
|
||||||
|
echo "GHCR Experimental Tags: ${{ steps.ghcr-meta-experimental.outputs.tags }}"
|
||||||
|
else
|
||||||
|
echo "GHCR Extra Tags: ${{ steps.ghcr-extra-tags.outputs.tags }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
id: build
|
||||||
|
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
||||||
|
with:
|
||||||
|
project: tw0fqmsx3c
|
||||||
|
token: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ inputs.registry_type == 'ecr' && steps.ecr-tags.outputs.tags || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags) || (inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'false' && steps.ghcr-extra-tags.outputs.tags) || (inputs.registry_type == 'ghcr' && format('ghcr.io/{0}:{1}', inputs.ghcr_image_name, steps.version.outputs.version)) || (inputs.registry_type == 'ecr' && format('{0}/{1}:{2}', inputs.ecr_registry, inputs.ecr_repository, steps.version.outputs.version)) }}
|
||||||
|
labels: ${{ inputs.registry_type == 'ghcr' && inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.labels || '' }}
|
||||||
|
secrets: |
|
||||||
|
database_url=${{ env.DUMMY_DATABASE_URL }}
|
||||||
|
encryption_key=${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||||
|
redis_url=${{ env.DUMMY_REDIS_URL }}
|
||||||
|
sentry_auth_token=${{ env.SENTRY_AUTH_TOKEN }}
|
||||||
|
env:
|
||||||
|
DEPOT_PROJECT_TOKEN: ${{ env.DEPOT_PROJECT_TOKEN }}
|
||||||
|
DUMMY_DATABASE_URL: ${{ env.DUMMY_DATABASE_URL }}
|
||||||
|
DUMMY_ENCRYPTION_KEY: ${{ env.DUMMY_ENCRYPTION_KEY }}
|
||||||
|
DUMMY_REDIS_URL: ${{ env.DUMMY_REDIS_URL }}
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||||
|
|
||||||
|
- name: Sign GHCR image (GHCR only)
|
||||||
|
if: ${{ inputs.registry_type == 'ghcr' && (github.event_name == 'workflow_call' || github.event_name == 'release' || github.event_name == 'workflow_dispatch') }}
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
TAGS: ${{ inputs.experimental_mode == 'true' && steps.ghcr-meta-experimental.outputs.tags || steps.ghcr-extra-tags.outputs.tags }}
|
||||||
|
DIGEST: ${{ steps.build.outputs.digest }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
|
||||||
|
|
||||||
|
- name: Output build summary
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_TYPE: ${{ inputs.registry_type }}
|
||||||
|
IMAGE_TAG: ${{ steps.version.outputs.version }}
|
||||||
|
VERSION_SOURCE: ${{ steps.version.outputs.source }}
|
||||||
|
run: |
|
||||||
|
echo "SUCCESS: Built and pushed Docker image to $REGISTRY_TYPE"
|
||||||
|
echo "Image Tag: $IMAGE_TAG (source: $VERSION_SOURCE)"
|
||||||
|
if [[ "$REGISTRY_TYPE" == "ecr" ]]; then
|
||||||
|
echo "ECR Registry: ${{ inputs.ecr_registry }}"
|
||||||
|
echo "ECR Repository: ${{ inputs.ecr_repository }}"
|
||||||
|
else
|
||||||
|
echo "GHCR Image: ghcr.io/${{ inputs.ghcr_image_name }}"
|
||||||
|
fi
|
||||||
19
.github/actions/cache-build-web/action.yml
vendored
@@ -8,6 +8,14 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: "0"
|
default: "0"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
turbo_token:
|
||||||
|
description: "Turborepo token"
|
||||||
|
required: false
|
||||||
|
turbo_team:
|
||||||
|
description: "Turborepo team"
|
||||||
|
required: false
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
@@ -41,7 +49,7 @@ runs:
|
|||||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -54,13 +62,18 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
- name: Fill ENCRYPTION_KEY, ENTERPRISE_LICENSE_KEY and E2E_TESTING in .env
|
||||||
|
env:
|
||||||
|
E2E_TESTING_MODE: ${{ inputs.e2e_testing_mode }}
|
||||||
run: |
|
run: |
|
||||||
RANDOM_KEY=$(openssl rand -hex 32)
|
RANDOM_KEY=$(openssl rand -hex 32)
|
||||||
echo "E2E_TESTING=${{ inputs.e2e_testing_mode }}" >> .env
|
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||||
|
echo "E2E_TESTING=$E2E_TESTING_MODE" >> .env
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
pnpm build --filter=@formbricks/web...
|
pnpm build --filter=@formbricks/web...
|
||||||
|
|
||||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
|
env:
|
||||||
|
TURBO_TOKEN: ${{ inputs.turbo_token }}
|
||||||
|
TURBO_TEAM: ${{ inputs.turbo_team }}
|
||||||
|
|||||||
106
.github/actions/docker-build-setup/action.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
name: Docker Build Setup
|
||||||
|
description: |
|
||||||
|
Sets up common Docker build tools and authentication with security validation.
|
||||||
|
|
||||||
|
Security Features:
|
||||||
|
- Registry URL validation
|
||||||
|
- Input sanitization
|
||||||
|
- Conditional setup based on event type
|
||||||
|
- Post-setup verification
|
||||||
|
|
||||||
|
Supports Depot CLI, Cosign signing, and Docker registry authentication.
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
registry:
|
||||||
|
description: "Docker registry hostname to login to (e.g., ghcr.io, registry.example.com:5000). No paths allowed."
|
||||||
|
required: false
|
||||||
|
default: "ghcr.io"
|
||||||
|
setup_cosign:
|
||||||
|
description: "Whether to install cosign for image signing"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
skip_login_on_pr:
|
||||||
|
description: "Whether to skip registry login on pull requests"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Validate inputs
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY: ${{ inputs.registry }}
|
||||||
|
SETUP_COSIGN: ${{ inputs.setup_cosign }}
|
||||||
|
SKIP_LOGIN_ON_PR: ${{ inputs.skip_login_on_pr }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Security: Validate registry input - must be hostname[:port] only, no paths
|
||||||
|
# Allow empty registry for cases where login is handled externally (e.g., ECR)
|
||||||
|
if [[ -n "$REGISTRY" ]]; then
|
||||||
|
if [[ "$REGISTRY" =~ / ]]; then
|
||||||
|
echo "ERROR: Invalid registry format: $REGISTRY"
|
||||||
|
echo "Registry must be host[:port] with no path (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||||
|
echo "Path components like 'ghcr.io/org' are not allowed as they break docker login"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate hostname with optional port format
|
||||||
|
if [[ ! "$REGISTRY" =~ ^[a-zA-Z0-9.-]+(\:[0-9]+)?$ ]]; then
|
||||||
|
echo "ERROR: Invalid registry hostname format: $REGISTRY"
|
||||||
|
echo "Registry must be a valid hostname optionally with port (e.g., 'ghcr.io' or 'registry.example.com:5000')"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate boolean inputs
|
||||||
|
if [[ "$SETUP_COSIGN" != "true" && "$SETUP_COSIGN" != "false" ]]; then
|
||||||
|
echo "ERROR: setup_cosign must be 'true' or 'false', got: $SETUP_COSIGN"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$SKIP_LOGIN_ON_PR" != "true" && "$SKIP_LOGIN_ON_PR" != "false" ]]; then
|
||||||
|
echo "ERROR: skip_login_on_pr must be 'true' or 'false', got: $SKIP_LOGIN_ON_PR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "SUCCESS: Input validation passed"
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
# Install cosign when requested AND when we might actually sign images
|
||||||
|
# (i.e., non-PR contexts or when we login on PRs)
|
||||||
|
if: ${{ inputs.setup_cosign == 'true' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||||
|
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||||
|
|
||||||
|
- name: Log into registry
|
||||||
|
if: ${{ inputs.registry != '' && (inputs.skip_login_on_pr == 'false' || github.event_name != 'pull_request') }}
|
||||||
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
|
with:
|
||||||
|
registry: ${{ inputs.registry }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Verify setup completion
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Verify Depot CLI is available
|
||||||
|
if ! command -v depot >/dev/null 2>&1; then
|
||||||
|
echo "ERROR: Depot CLI not found in PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify cosign if it should be installed (same conditions as install step)
|
||||||
|
if [[ "${{ inputs.setup_cosign }}" == "true" ]] && [[ "${{ inputs.skip_login_on_pr }}" == "false" || "${{ github.event_name }}" != "pull_request" ]]; then
|
||||||
|
if ! command -v cosign >/dev/null 2>&1; then
|
||||||
|
echo "ERROR: Cosign not found in PATH despite being requested"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "SUCCESS: Docker build setup completed successfully"
|
||||||
192
.github/actions/resolve-docker-version/action.yml
vendored
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
name: Resolve Docker Version
|
||||||
|
description: |
|
||||||
|
Resolves and validates Docker-compatible SemVer versions for container builds with comprehensive security.
|
||||||
|
|
||||||
|
Security Features:
|
||||||
|
- Command injection protection
|
||||||
|
- Input sanitization and validation
|
||||||
|
- Docker tag character restrictions
|
||||||
|
- Length limits and boundary checks
|
||||||
|
- Safe branch name handling
|
||||||
|
|
||||||
|
Supports multiple modes: release, manual override, branch auto-detection, and experimental timestamped versions.
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "Explicit version (SemVer only, e.g., 1.2.3-beta). If provided, this version is used directly. If empty, version is auto-generated from branch name."
|
||||||
|
required: false
|
||||||
|
current_branch:
|
||||||
|
description: "Current branch name for auto-detection"
|
||||||
|
required: true
|
||||||
|
experimental_mode:
|
||||||
|
description: "Enable experimental mode with timestamp-based versions"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
version:
|
||||||
|
description: "Resolved Docker-compatible SemVer version"
|
||||||
|
value: ${{ steps.resolve.outputs.version }}
|
||||||
|
source:
|
||||||
|
description: "Source of version (release|override|branch)"
|
||||||
|
value: ${{ steps.resolve.outputs.source }}
|
||||||
|
normalized:
|
||||||
|
description: "Whether the version was normalized (true/false)"
|
||||||
|
value: ${{ steps.resolve.outputs.normalized }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Resolve and validate Docker version
|
||||||
|
id: resolve
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
EXPLICIT_VERSION: ${{ inputs.version }}
|
||||||
|
CURRENT_BRANCH: ${{ inputs.current_branch }}
|
||||||
|
EXPERIMENTAL_MODE: ${{ inputs.experimental_mode }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Function to validate SemVer format (Docker-compatible, no '+' build metadata)
|
||||||
|
validate_semver() {
|
||||||
|
local version="$1"
|
||||||
|
local context="$2"
|
||||||
|
|
||||||
|
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||||
|
echo "ERROR: Invalid $context format. Must be semver without build metadata (e.g., 1.2.3, 1.2.3-alpha)"
|
||||||
|
echo "Provided: $version"
|
||||||
|
echo "Note: Docker tags cannot contain '+' characters. Use prerelease identifiers instead."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to generate branch-based version
|
||||||
|
generate_branch_version() {
|
||||||
|
local branch="$1"
|
||||||
|
local use_timestamp="${2:-true}"
|
||||||
|
local timestamp
|
||||||
|
|
||||||
|
if [[ "$use_timestamp" == "true" ]]; then
|
||||||
|
timestamp=$(date +%s)
|
||||||
|
else
|
||||||
|
timestamp=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sanitize branch name for Docker compatibility
|
||||||
|
local sanitized_branch=$(echo "$branch" | sed 's/[^a-zA-Z0-9.-]/-/g' | sed 's/--*/-/g' | sed 's/^-\|-$//g')
|
||||||
|
|
||||||
|
# Additional safety: truncate if too long (reserve space for prefix and timestamp)
|
||||||
|
if (( ${#sanitized_branch} > 80 )); then
|
||||||
|
sanitized_branch="${sanitized_branch:0:80}"
|
||||||
|
echo "INFO: Branch name truncated for Docker compatibility" >&2
|
||||||
|
fi
|
||||||
|
local version
|
||||||
|
|
||||||
|
# Generate version based on branch name (unified approach)
|
||||||
|
# All branches get alpha versions with sanitized branch name
|
||||||
|
if [[ -n "$timestamp" ]]; then
|
||||||
|
version="0.0.0-alpha-$sanitized_branch-$timestamp"
|
||||||
|
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||||
|
else
|
||||||
|
version="0.0.0-alpha-$sanitized_branch"
|
||||||
|
echo "INFO: Branch '$branch' detected - alpha version: $version" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$version"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Input validation and sanitization
|
||||||
|
if [[ -z "$CURRENT_BRANCH" ]]; then
|
||||||
|
echo "ERROR: current_branch input is required"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Security: Validate inputs to prevent command injection
|
||||||
|
# Use grep to check for dangerous characters (more reliable than bash regex)
|
||||||
|
validate_input() {
|
||||||
|
local input="$1"
|
||||||
|
local name="$2"
|
||||||
|
|
||||||
|
# Check for dangerous characters using grep
|
||||||
|
if echo "$input" | grep -q '[;|&`$(){}\\[:space:]]'; then
|
||||||
|
echo "ERROR: $name contains potentially dangerous characters: $input"
|
||||||
|
echo "Input should only contain letters, numbers, hyphens, underscores, dots, and forward slashes"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate current branch
|
||||||
|
if ! validate_input "$CURRENT_BRANCH" "Branch name"; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate explicit version if provided
|
||||||
|
if [[ -n "$EXPLICIT_VERSION" ]] && ! validate_input "$EXPLICIT_VERSION" "Explicit version"; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Main resolution logic (ultra-simplified)
|
||||||
|
NORMALIZED="false"
|
||||||
|
|
||||||
|
if [[ -n "$EXPLICIT_VERSION" ]]; then
|
||||||
|
# Use provided explicit version (from either workflow_call or manual input)
|
||||||
|
validate_semver "$EXPLICIT_VERSION" "explicit version"
|
||||||
|
|
||||||
|
# Normalize to lowercase for Docker/ECR compatibility
|
||||||
|
RESOLVED_VERSION="${EXPLICIT_VERSION,,}"
|
||||||
|
if [[ "$EXPLICIT_VERSION" != "$RESOLVED_VERSION" ]]; then
|
||||||
|
NORMALIZED="true"
|
||||||
|
echo "INFO: Original version contained uppercase characters, normalized: $EXPLICIT_VERSION -> $RESOLVED_VERSION"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SOURCE="explicit"
|
||||||
|
echo "INFO: Using explicit version: $RESOLVED_VERSION"
|
||||||
|
|
||||||
|
else
|
||||||
|
# Auto-generate version from branch name
|
||||||
|
if [[ "$EXPERIMENTAL_MODE" == "true" ]]; then
|
||||||
|
# Use timestamped version generation
|
||||||
|
echo "INFO: Experimental mode: generating timestamped version from branch: $CURRENT_BRANCH"
|
||||||
|
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "true")
|
||||||
|
SOURCE="experimental"
|
||||||
|
else
|
||||||
|
# Standard branch version (no timestamp)
|
||||||
|
echo "INFO: Auto-detecting version from branch: $CURRENT_BRANCH"
|
||||||
|
RESOLVED_VERSION=$(generate_branch_version "$CURRENT_BRANCH" "false")
|
||||||
|
SOURCE="branch"
|
||||||
|
fi
|
||||||
|
echo "Generated version: $RESOLVED_VERSION"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Final validation - ensure result is valid Docker tag
|
||||||
|
if [[ -z "$RESOLVED_VERSION" ]]; then
|
||||||
|
echo "ERROR: Failed to resolve version"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if (( ${#RESOLVED_VERSION} > 128 )); then
|
||||||
|
echo "ERROR: Version must be at most 128 characters (Docker limitation)"
|
||||||
|
echo "Generated version: $RESOLVED_VERSION (${#RESOLVED_VERSION} chars)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! "$RESOLVED_VERSION" =~ ^[a-z0-9._-]+$ ]]; then
|
||||||
|
echo "ERROR: Version contains invalid characters for Docker tags"
|
||||||
|
echo "Version: $RESOLVED_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$RESOLVED_VERSION" =~ ^[.-] || "$RESOLVED_VERSION" =~ [.-]$ ]]; then
|
||||||
|
echo "ERROR: Version must not start or end with '.' or '-'"
|
||||||
|
echo "Version: $RESOLVED_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output results
|
||||||
|
echo "SUCCESS: Resolved Docker version: $RESOLVED_VERSION (source: $SOURCE)"
|
||||||
|
echo "version=$RESOLVED_VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "source=$SOURCE" >> $GITHUB_OUTPUT
|
||||||
|
echo "normalized=$NORMALIZED" >> $GITHUB_OUTPUT
|
||||||
160
.github/actions/update-package-version/action.yml
vendored
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
name: Update Package Version
|
||||||
|
description: |
|
||||||
|
Safely updates package.json version with comprehensive validation and atomic operations.
|
||||||
|
|
||||||
|
Security Features:
|
||||||
|
- Path traversal protection
|
||||||
|
- SemVer validation with length limits
|
||||||
|
- Atomic file operations with backup/recovery
|
||||||
|
- JSON validation before applying changes
|
||||||
|
|
||||||
|
This action is designed to be secure by default and prevent common attack vectors.
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "Version to set in package.json (must be valid SemVer)"
|
||||||
|
required: true
|
||||||
|
package_path:
|
||||||
|
description: "Path to package.json file"
|
||||||
|
required: false
|
||||||
|
default: "./apps/web/package.json"
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
updated_version:
|
||||||
|
description: "The version that was actually set in package.json"
|
||||||
|
value: ${{ steps.update.outputs.updated_version }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Update and verify package.json version
|
||||||
|
id: update
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
VERSION: ${{ inputs.version }}
|
||||||
|
PACKAGE_PATH: ${{ inputs.package_path }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Validate inputs
|
||||||
|
if [[ -z "$VERSION" ]]; then
|
||||||
|
echo "ERROR: version input is required"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Security: Validate package_path to prevent path traversal attacks
|
||||||
|
# Only allow paths within the workspace and must end with package.json
|
||||||
|
if [[ "$PACKAGE_PATH" =~ \.\./|^/|^~ ]]; then
|
||||||
|
echo "ERROR: Invalid package path - path traversal detected: $PACKAGE_PATH"
|
||||||
|
echo "Package path must be relative to workspace root and cannot contain '../', start with '/', or '~'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! "$PACKAGE_PATH" =~ package\.json$ ]]; then
|
||||||
|
echo "ERROR: Package path must end with 'package.json': $PACKAGE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Resolve to absolute path within workspace for additional security
|
||||||
|
WORKSPACE_ROOT="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||||
|
|
||||||
|
# Use realpath to resolve both paths and handle symlinks properly
|
||||||
|
WORKSPACE_ROOT=$(realpath "$WORKSPACE_ROOT")
|
||||||
|
RESOLVED_PATH=$(realpath "${WORKSPACE_ROOT}/${PACKAGE_PATH}")
|
||||||
|
|
||||||
|
# Ensure WORKSPACE_ROOT has a trailing slash for proper prefix matching
|
||||||
|
WORKSPACE_ROOT="${WORKSPACE_ROOT}/"
|
||||||
|
|
||||||
|
# Use shell string matching to ensure RESOLVED_PATH is within workspace
|
||||||
|
# This is more secure than regex and handles edge cases properly
|
||||||
|
if [[ "$RESOLVED_PATH" != "$WORKSPACE_ROOT"* ]]; then
|
||||||
|
echo "ERROR: Resolved path is outside workspace: $RESOLVED_PATH"
|
||||||
|
echo "Workspace root: $WORKSPACE_ROOT"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$RESOLVED_PATH" ]]; then
|
||||||
|
echo "ERROR: package.json not found at: $RESOLVED_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use resolved path for operations
|
||||||
|
PACKAGE_PATH="$RESOLVED_PATH"
|
||||||
|
|
||||||
|
# Validate SemVer format with additional security checks
|
||||||
|
if [[ ${#VERSION} -gt 128 ]]; then
|
||||||
|
echo "ERROR: Version string too long (${#VERSION} chars, max 128): $VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||||
|
echo "ERROR: Invalid SemVer format: $VERSION"
|
||||||
|
echo "Expected format: MAJOR.MINOR.PATCH[-PRERELEASE]"
|
||||||
|
echo "Only alphanumeric characters, dots, and hyphens allowed in prerelease"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Additional validation: Check for reasonable version component sizes
|
||||||
|
# Extract base version (MAJOR.MINOR.PATCH) without prerelease/build metadata
|
||||||
|
if [[ "$VERSION" =~ ^([0-9]+\.[0-9]+\.[0-9]+) ]]; then
|
||||||
|
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||||
|
else
|
||||||
|
echo "ERROR: Could not extract base version from: $VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Split version components safely
|
||||||
|
IFS='.' read -ra VERSION_PARTS <<< "$BASE_VERSION"
|
||||||
|
|
||||||
|
# Validate component sizes (should have exactly 3 parts due to regex above)
|
||||||
|
if (( ${VERSION_PARTS[0]} > 999 || ${VERSION_PARTS[1]} > 999 || ${VERSION_PARTS[2]} > 999 )); then
|
||||||
|
echo "ERROR: Version components too large (max 999 each): $VERSION"
|
||||||
|
echo "Components: ${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Updating package.json version to: $VERSION"
|
||||||
|
|
||||||
|
# Create backup for atomic operations
|
||||||
|
BACKUP_PATH="${PACKAGE_PATH}.backup.$$"
|
||||||
|
cp "$PACKAGE_PATH" "$BACKUP_PATH"
|
||||||
|
|
||||||
|
# Use jq to safely update the version field with error handling
|
||||||
|
if ! jq --arg version "$VERSION" '.version = $version' "$PACKAGE_PATH" > "${PACKAGE_PATH}.tmp"; then
|
||||||
|
echo "ERROR: jq failed to process package.json"
|
||||||
|
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate the generated JSON before applying changes
|
||||||
|
if ! jq empty "${PACKAGE_PATH}.tmp" 2>/dev/null; then
|
||||||
|
echo "ERROR: Generated invalid JSON"
|
||||||
|
rm -f "${PACKAGE_PATH}.tmp" "$BACKUP_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Atomic move operation
|
||||||
|
if ! mv "${PACKAGE_PATH}.tmp" "$PACKAGE_PATH"; then
|
||||||
|
echo "ERROR: Failed to update package.json"
|
||||||
|
# Restore backup
|
||||||
|
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the update was successful
|
||||||
|
UPDATED_VERSION=$(jq -r '.version' "$PACKAGE_PATH" 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ "$UPDATED_VERSION" != "$VERSION" ]]; then
|
||||||
|
echo "ERROR: Version update failed!"
|
||||||
|
echo "Expected: $VERSION"
|
||||||
|
echo "Actual: $UPDATED_VERSION"
|
||||||
|
# Restore backup
|
||||||
|
mv "$BACKUP_PATH" "$PACKAGE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up backup on success
|
||||||
|
rm -f "$BACKUP_PATH"
|
||||||
|
|
||||||
|
echo "SUCCESS: Updated package.json version to: $UPDATED_VERSION"
|
||||||
|
echo "updated_version=$UPDATED_VERSION" >> $GITHUB_OUTPUT
|
||||||
82
.github/workflows/apply-issue-labels-to-pr.yml
vendored
@@ -1,82 +0,0 @@
|
|||||||
name: "Apply issue labels to PR"
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
label_on_pr:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
issues: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: Apply labels from linked issue to PR
|
|
||||||
uses: actions/github-script@211cb3fefb35a799baa5156f9321bb774fe56294 # v5.2.0
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
script: |
|
|
||||||
async function getLinkedIssues(owner, repo, prNumber) {
|
|
||||||
const query = `query GetLinkedIssues($owner: String!, $repo: String!, $prNumber: Int!) {
|
|
||||||
repository(owner: $owner, name: $repo) {
|
|
||||||
pullRequest(number: $prNumber) {
|
|
||||||
closingIssuesReferences(first: 10) {
|
|
||||||
nodes {
|
|
||||||
number
|
|
||||||
labels(first: 10) {
|
|
||||||
nodes {
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`;
|
|
||||||
|
|
||||||
const variables = {
|
|
||||||
owner: owner,
|
|
||||||
repo: repo,
|
|
||||||
prNumber: prNumber,
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = await github.graphql(query, variables);
|
|
||||||
return result.repository.pullRequest.closingIssuesReferences.nodes;
|
|
||||||
}
|
|
||||||
|
|
||||||
const pr = context.payload.pull_request;
|
|
||||||
const linkedIssues = await getLinkedIssues(
|
|
||||||
context.repo.owner,
|
|
||||||
context.repo.repo,
|
|
||||||
pr.number
|
|
||||||
);
|
|
||||||
|
|
||||||
const labelsToAdd = new Set();
|
|
||||||
for (const issue of linkedIssues) {
|
|
||||||
if (issue.labels && issue.labels.nodes) {
|
|
||||||
for (const label of issue.labels.nodes) {
|
|
||||||
labelsToAdd.add(label.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (labelsToAdd.size) {
|
|
||||||
await github.rest.issues.addLabels({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
labels: Array.from(labelsToAdd),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
94
.github/workflows/build-and-push-ecr.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
name: Build Cloud Deployment Images
|
||||||
|
|
||||||
|
# This workflow builds Formbricks Docker images for ECR deployment:
|
||||||
|
# - workflow_call: Used by releases with explicit SemVer versions
|
||||||
|
# - workflow_dispatch: Auto-detects version from current branch or uses override
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version_override:
|
||||||
|
description: "Override version (SemVer only, e.g., 1.2.3). Leave empty to auto-detect from branch."
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
deploy_production:
|
||||||
|
description: "Tag image for production deployment"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
deploy_staging:
|
||||||
|
description: "Tag image for staging deployment"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
image_tag:
|
||||||
|
description: "Image tag to push (required for workflow_call)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
IS_PRERELEASE:
|
||||||
|
description: "Whether this is a prerelease (auto-tags for staging/production)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
MAKE_LATEST:
|
||||||
|
description: "Whether to tag for production (from GitHub release 'Set as the latest release' option)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
outputs:
|
||||||
|
IMAGE_TAG:
|
||||||
|
description: "Normalized image tag used for the build"
|
||||||
|
value: ${{ jobs.build-and-push.outputs.IMAGE_TAG }}
|
||||||
|
TAGS:
|
||||||
|
description: "Newline-separated list of ECR tags pushed"
|
||||||
|
value: ${{ jobs.build-and-push.outputs.TAGS }}
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
ECR_REGION: ${{ vars.ECR_REGION }}
|
||||||
|
# ECR settings are sourced from repository/environment variables for portability across envs/forks
|
||||||
|
ECR_REGISTRY: ${{ vars.ECR_REGISTRY }}
|
||||||
|
ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
name: Build and Push
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 45
|
||||||
|
outputs:
|
||||||
|
IMAGE_TAG: ${{ steps.build.outputs.image_tag }}
|
||||||
|
TAGS: ${{ steps.build.outputs.registry_tags }}
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
|
- name: Build and push cloud deployment image
|
||||||
|
id: build
|
||||||
|
uses: ./.github/actions/build-and-push-docker
|
||||||
|
with:
|
||||||
|
registry_type: "ecr"
|
||||||
|
ecr_registry: ${{ env.ECR_REGISTRY }}
|
||||||
|
ecr_repository: ${{ env.ECR_REPOSITORY }}
|
||||||
|
ecr_region: ${{ env.ECR_REGION }}
|
||||||
|
aws_role_arn: ${{ secrets.AWS_ECR_PUSH_ROLE_ARN }}
|
||||||
|
version: ${{ inputs.version_override || inputs.image_tag }}
|
||||||
|
deploy_production: ${{ inputs.deploy_production }}
|
||||||
|
deploy_staging: ${{ inputs.deploy_staging }}
|
||||||
|
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||||
|
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||||
|
env:
|
||||||
|
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
|
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
|
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
|
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
8
.github/workflows/build-web.yml
vendored
@@ -4,7 +4,7 @@ on:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build Formbricks-web
|
name: Build Formbricks-web
|
||||||
@@ -13,11 +13,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: ./.github/actions/dangerous-git-checkout
|
- uses: ./.github/actions/dangerous-git-checkout
|
||||||
|
|
||||||
- name: Build & Cache Web Binaries
|
- name: Build & Cache Web Binaries
|
||||||
@@ -25,3 +25,5 @@ jobs:
|
|||||||
id: cache-build-web
|
id: cache-build-web
|
||||||
with:
|
with:
|
||||||
e2e_testing_mode: "0"
|
e2e_testing_mode: "0"
|
||||||
|
turbo_token: ${{ secrets.TURBO_TOKEN }}
|
||||||
|
turbo_team: ${{ vars.TURBO_TEAM }}
|
||||||
|
|||||||
9
.github/workflows/chromatic.yml
vendored
@@ -6,13 +6,20 @@ on:
|
|||||||
- main
|
- main
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
chromatic:
|
chromatic:
|
||||||
name: Run Chromatic
|
name: Run Chromatic
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
actions: read
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
|
|||||||
33
.github/workflows/cron-surveyStatusUpdate.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
name: Cron - Survey status update
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
|
||||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
|
||||||
schedule:
|
|
||||||
# Runs "At 00:00." (see https://crontab.guru)
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cron-weeklySummary:
|
|
||||||
env:
|
|
||||||
APP_URL: ${{ secrets.APP_URL }}
|
|
||||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: cURL request
|
|
||||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
|
||||||
run: |
|
|
||||||
curl ${{ env.APP_URL }}/api/cron/survey-status \
|
|
||||||
-X POST \
|
|
||||||
-H 'content-type: application/json' \
|
|
||||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
|
||||||
--fail
|
|
||||||
33
.github/workflows/cron-weeklySummary.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
name: Cron - Weekly summary
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
|
||||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
|
||||||
schedule:
|
|
||||||
# Runs “At 08:00 on Monday.” (see https://crontab.guru)
|
|
||||||
- cron: "0 8 * * 1"
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cron-weeklySummary:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
APP_URL: ${{ secrets.APP_URL }}
|
|
||||||
CRON_SECRET: ${{ secrets.CRON_SECRET }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
- name: cURL request
|
|
||||||
if: ${{ env.APP_URL && env.CRON_SECRET }}
|
|
||||||
run: |
|
|
||||||
curl ${{ env.APP_URL }}/api/cron/weekly-summary \
|
|
||||||
-X POST \
|
|
||||||
-H 'content-type: application/json' \
|
|
||||||
-H 'x-api-key: ${{ env.CRON_SECRET }}' \
|
|
||||||
--fail
|
|
||||||
27
.github/workflows/dependency-review.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
# Dependency Review Action
|
|
||||||
#
|
|
||||||
# This Action will scan dependency manifest files that change as part of a Pull Request,
|
|
||||||
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
|
|
||||||
# Once installed, if the workflow run is marked as required,
|
|
||||||
# PRs introducing known-vulnerable packages will be blocked from merging.
|
|
||||||
#
|
|
||||||
# Source repository: https://github.com/actions/dependency-review-action
|
|
||||||
name: 'Dependency Review'
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependency-review:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: 'Checkout Repository'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
- name: 'Dependency Review'
|
|
||||||
uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0
|
|
||||||
107
.github/workflows/deploy-formbricks-cloud.yml
vendored
@@ -4,39 +4,63 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
VERSION:
|
VERSION:
|
||||||
description: 'The version of the Docker image to release'
|
description: "The version of the Docker image to release (clean SemVer, e.g., 1.2.3)"
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
REPOSITORY:
|
REPOSITORY:
|
||||||
description: 'The repository to use for the Docker image'
|
description: "The repository to use for the Docker image"
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: 'ghcr.io/formbricks/formbricks'
|
default: "ghcr.io/formbricks/formbricks"
|
||||||
|
ENVIRONMENT:
|
||||||
|
description: "The environment to deploy to"
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- staging
|
||||||
|
- production
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
VERSION:
|
VERSION:
|
||||||
description: 'The version of the Docker image to release'
|
description: "The version of the Docker image to release"
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
REPOSITORY:
|
REPOSITORY:
|
||||||
description: 'The repository to use for the Docker image'
|
description: "The repository to use for the Docker image"
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: 'ghcr.io/formbricks/formbricks'
|
default: "ghcr.io/formbricks/formbricks"
|
||||||
|
ENVIRONMENT:
|
||||||
|
description: "The environment to deploy to"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write
|
id-token: write
|
||||||
contents: write
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
helmfile-deploy:
|
helmfile-deploy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
|
- name: Tailscale
|
||||||
|
uses: tailscale/github-action@84a3f23bb4d843bcf4da6cf824ec1be473daf4de # v3.2.3
|
||||||
|
with:
|
||||||
|
oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
|
||||||
|
oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
|
||||||
|
tags: tag:github
|
||||||
|
args: --accept-routes
|
||||||
|
|
||||||
- name: Configure AWS Credentials
|
- name: Configure AWS Credentials
|
||||||
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
|
uses: aws-actions/configure-aws-credentials@f24d7193d98baebaeacc7e2227925dd47cc267f5 # v4.2.0
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
||||||
aws-region: "eu-central-1"
|
aws-region: "eu-central-1"
|
||||||
@@ -47,7 +71,9 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWS_REGION: eu-central-1
|
AWS_REGION: eu-central-1
|
||||||
|
|
||||||
- uses: helmfile/helmfile-action@v2
|
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||||
|
name: Deploy Formbricks Cloud Production
|
||||||
|
if: inputs.ENVIRONMENT == 'production'
|
||||||
env:
|
env:
|
||||||
VERSION: ${{ inputs.VERSION }}
|
VERSION: ${{ inputs.VERSION }}
|
||||||
REPOSITORY: ${{ inputs.REPOSITORY }}
|
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||||
@@ -55,10 +81,69 @@ jobs:
|
|||||||
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.FORMBRICKS_INGRESS_CERT_ARN }}
|
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.FORMBRICKS_INGRESS_CERT_ARN }}
|
||||||
FORMBRICKS_ROLE_ARN: ${{ secrets.FORMBRICKS_ROLE_ARN }}
|
FORMBRICKS_ROLE_ARN: ${{ secrets.FORMBRICKS_ROLE_ARN }}
|
||||||
with:
|
with:
|
||||||
|
helmfile-version: "v1.0.0"
|
||||||
helm-plugins: >
|
helm-plugins: >
|
||||||
https://github.com/databus23/helm-diff,
|
https://github.com/databus23/helm-diff,
|
||||||
https://github.com/jkroepke/helm-secrets
|
https://github.com/jkroepke/helm-secrets
|
||||||
helmfile-args: apply
|
helmfile-args: apply -l environment=prod
|
||||||
helmfile-auto-init: "false"
|
helmfile-auto-init: "false"
|
||||||
helmfile-workdirectory: infra/formbricks-cloud-helm
|
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||||
|
|
||||||
|
- uses: helmfile/helmfile-action@712000e3d4e28c72778ecc53857746082f555ef3 # v2.0.4
|
||||||
|
name: Deploy Formbricks Cloud Staging
|
||||||
|
if: inputs.ENVIRONMENT == 'staging'
|
||||||
|
env:
|
||||||
|
VERSION: ${{ inputs.VERSION }}
|
||||||
|
REPOSITORY: ${{ inputs.REPOSITORY }}
|
||||||
|
FORMBRICKS_INGRESS_CERT_ARN: ${{ secrets.STAGE_FORMBRICKS_INGRESS_CERT_ARN }}
|
||||||
|
FORMBRICKS_ROLE_ARN: ${{ secrets.STAGE_FORMBRICKS_ROLE_ARN }}
|
||||||
|
with:
|
||||||
|
helmfile-version: "v1.0.0"
|
||||||
|
helm-plugins: >
|
||||||
|
https://github.com/databus23/helm-diff,
|
||||||
|
https://github.com/jkroepke/helm-secrets
|
||||||
|
helmfile-args: apply -l environment=stage
|
||||||
|
helmfile-auto-init: "false"
|
||||||
|
helmfile-workdirectory: infra/formbricks-cloud-helm
|
||||||
|
|
||||||
|
- name: Purge Cloudflare Cache
|
||||||
|
if: ${{ inputs.ENVIRONMENT == 'production' || inputs.ENVIRONMENT == 'staging' }}
|
||||||
|
env:
|
||||||
|
CF_ZONE_ID: ${{ secrets.CLOUDFLARE_ZONE_ID }}
|
||||||
|
CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
ENVIRONMENT: ${{ inputs.ENVIRONMENT }}
|
||||||
|
run: |
|
||||||
|
# Set hostname based on environment
|
||||||
|
if [[ "$ENVIRONMENT" == "production" ]]; then
|
||||||
|
PURGE_HOST="app.formbricks.com"
|
||||||
|
else
|
||||||
|
PURGE_HOST="stage.app.formbricks.com"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Purging Cloudflare cache for host: $PURGE_HOST (environment: $ENVIRONMENT, zone: $CF_ZONE_ID)"
|
||||||
|
|
||||||
|
# Prepare JSON payload for selective cache purge
|
||||||
|
json_payload=$(cat << EOF
|
||||||
|
{
|
||||||
|
"hosts": ["$PURGE_HOST"]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make API call to Cloudflare
|
||||||
|
response=$(curl -s -X POST \
|
||||||
|
"https://api.cloudflare.com/client/v4/zones/$CF_ZONE_ID/purge_cache" \
|
||||||
|
-H "Authorization: Bearer $CF_API_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
--data "$json_payload")
|
||||||
|
|
||||||
|
echo "Cloudflare API response: $response"
|
||||||
|
|
||||||
|
# Verify the operation was successful
|
||||||
|
if [[ "$(echo "$response" | jq -r .success)" == "true" ]]; then
|
||||||
|
echo "✅ Successfully purged cache for $PURGE_HOST"
|
||||||
|
else
|
||||||
|
echo "❌ Cloudflare cache purge failed"
|
||||||
|
echo "Error details: $(echo "$response" | jq -r .errors)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|||||||
200
.github/workflows/docker-build-validation.yml
vendored
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
name: Docker Build Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
merge_group:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
env:
|
||||||
|
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||||
|
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-docker-build:
|
||||||
|
name: Validate Docker Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
# Add PostgreSQL and Redis service containers
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: test
|
||||||
|
POSTGRES_PASSWORD: test
|
||||||
|
POSTGRES_DB: formbricks
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
# Health check to ensure PostgreSQL is ready before using it
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout Repository
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||||
|
|
||||||
|
- name: Build Docker Image
|
||||||
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||||
|
env:
|
||||||
|
GITHUB_SHA: ${{ github.sha }}
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./apps/web/Dockerfile
|
||||||
|
push: false
|
||||||
|
load: true
|
||||||
|
tags: formbricks-test:${{ env.GITHUB_SHA }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
secrets: |
|
||||||
|
database_url=${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
|
encryption_key=${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
|
redis_url=redis://localhost:6379
|
||||||
|
|
||||||
|
- name: Verify and Initialize PostgreSQL
|
||||||
|
run: |
|
||||||
|
echo "Verifying PostgreSQL connection..."
|
||||||
|
# Install PostgreSQL client to test connection
|
||||||
|
sudo apt-get update && sudo apt-get install -y postgresql-client
|
||||||
|
|
||||||
|
# Test connection using psql with timeout and proper error handling
|
||||||
|
echo "Testing PostgreSQL connection with 30 second timeout..."
|
||||||
|
if timeout 30 bash -c 'until PGPASSWORD=test psql -h localhost -U test -d formbricks -c "\dt" >/dev/null 2>&1; do
|
||||||
|
echo "Waiting for PostgreSQL to be ready..."
|
||||||
|
sleep 2
|
||||||
|
done'; then
|
||||||
|
echo "✅ PostgreSQL connection successful"
|
||||||
|
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "SELECT version();"
|
||||||
|
|
||||||
|
# Enable necessary extensions that might be required by migrations
|
||||||
|
echo "Enabling required PostgreSQL extensions..."
|
||||||
|
PGPASSWORD=test psql -h localhost -U test -d formbricks -c "CREATE EXTENSION IF NOT EXISTS vector;" || echo "Vector extension already exists or not available"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "❌ PostgreSQL connection failed after 30 seconds"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show network configuration
|
||||||
|
echo "Network configuration:"
|
||||||
|
netstat -tulpn | grep 5432 || echo "No process listening on port 5432"
|
||||||
|
|
||||||
|
- name: Verify Redis/Valkey Connection
|
||||||
|
run: |
|
||||||
|
echo "Verifying Redis/Valkey connection..."
|
||||||
|
# Install Redis client to test connection
|
||||||
|
sudo apt-get update && sudo apt-get install -y redis-tools
|
||||||
|
|
||||||
|
# Test connection using redis-cli with timeout and proper error handling
|
||||||
|
echo "Testing Redis connection with 30 second timeout..."
|
||||||
|
if timeout 30 bash -c 'until redis-cli -h localhost -p 6379 ping >/dev/null 2>&1; do
|
||||||
|
echo "Waiting for Redis to be ready..."
|
||||||
|
sleep 2
|
||||||
|
done'; then
|
||||||
|
echo "✅ Redis connection successful"
|
||||||
|
redis-cli -h localhost -p 6379 info server | head -5
|
||||||
|
else
|
||||||
|
echo "❌ Redis connection failed after 30 seconds"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show network configuration for Redis
|
||||||
|
echo "Redis network configuration:"
|
||||||
|
netstat -tulpn | grep 6379 || echo "No process listening on port 6379"
|
||||||
|
|
||||||
|
- name: Test Docker Image with Health Check
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
GITHUB_SHA: ${{ github.sha }}
|
||||||
|
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
|
run: |
|
||||||
|
echo "🧪 Testing if the Docker image starts correctly..."
|
||||||
|
|
||||||
|
# Add extra docker run args to support host.docker.internal on Linux
|
||||||
|
DOCKER_RUN_ARGS="--add-host=host.docker.internal:host-gateway"
|
||||||
|
|
||||||
|
# Start the container with host.docker.internal pointing to the host
|
||||||
|
docker run --name formbricks-test \
|
||||||
|
$DOCKER_RUN_ARGS \
|
||||||
|
-p 3000:3000 \
|
||||||
|
-e DATABASE_URL="postgresql://test:test@host.docker.internal:5432/formbricks" \
|
||||||
|
-e ENCRYPTION_KEY="$DUMMY_ENCRYPTION_KEY" \
|
||||||
|
-e REDIS_URL="redis://host.docker.internal:6379" \
|
||||||
|
-d "formbricks-test:$GITHUB_SHA"
|
||||||
|
|
||||||
|
# Start health check polling immediately (every 5 seconds for up to 5 minutes)
|
||||||
|
echo "🏥 Polling /health endpoint every 5 seconds for up to 5 minutes..."
|
||||||
|
MAX_RETRIES=60 # 60 attempts × 5 seconds = 5 minutes
|
||||||
|
RETRY_COUNT=0
|
||||||
|
HEALTH_CHECK_SUCCESS=false
|
||||||
|
|
||||||
|
set +e # Disable exit on error to allow for retries
|
||||||
|
|
||||||
|
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||||
|
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||||
|
|
||||||
|
# Check if container is still running
|
||||||
|
if [ "$(docker inspect -f '{{.State.Running}}' formbricks-test 2>/dev/null)" != "true" ]; then
|
||||||
|
echo "❌ Container stopped running after $((RETRY_COUNT * 5)) seconds!"
|
||||||
|
echo "📋 Container logs:"
|
||||||
|
docker logs formbricks-test
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show progress and diagnostic info every 12 attempts (1 minute intervals)
|
||||||
|
if [ $((RETRY_COUNT % 12)) -eq 0 ] || [ $RETRY_COUNT -eq 1 ]; then
|
||||||
|
echo "Health check attempt $RETRY_COUNT of $MAX_RETRIES ($(($RETRY_COUNT * 5)) seconds elapsed)..."
|
||||||
|
echo "📋 Recent container logs:"
|
||||||
|
docker logs --tail 10 formbricks-test
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try health endpoint with shorter timeout for faster polling
|
||||||
|
# Use -f flag to make curl fail on HTTP error status codes (4xx, 5xx)
|
||||||
|
if curl -f -s -m 10 http://localhost:3000/health >/dev/null 2>&1; then
|
||||||
|
echo "✅ Health check successful after $((RETRY_COUNT * 5)) seconds!"
|
||||||
|
HEALTH_CHECK_SUCCESS=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wait 5 seconds before next attempt
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
# Show full container logs for debugging
|
||||||
|
echo "📋 Full container logs:"
|
||||||
|
docker logs formbricks-test
|
||||||
|
|
||||||
|
# Clean up the container
|
||||||
|
echo "🧹 Cleaning up..."
|
||||||
|
docker rm -f formbricks-test
|
||||||
|
|
||||||
|
# Exit with failure if health check did not succeed
|
||||||
|
if [ "$HEALTH_CHECK_SUCCESS" != "true" ]; then
|
||||||
|
echo "❌ Health check failed after $((MAX_RETRIES * 5)) seconds (5 minutes)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✨ Docker validation complete - all checks passed!"
|
||||||
70
.github/workflows/docker-security-scan.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Docker Security Scan
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 2 * * *" # Daily at 2 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Docker Release to Github"]
|
||||||
|
types: [completed]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
scan:
|
||||||
|
name: Vulnerability Scan
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout (for SARIF fingerprinting only)
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Determine ref and commit for upload
|
||||||
|
id: gitref
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
EVENT_NAME: ${{ github.event_name }}
|
||||||
|
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||||
|
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [[ "${EVENT_NAME}" == "workflow_run" ]]; then
|
||||||
|
echo "ref=refs/heads/${HEAD_BRANCH}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "ref=${GITHUB_REF}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Run Trivy vulnerability scanner
|
||||||
|
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||||
|
with:
|
||||||
|
image-ref: "ghcr.io/${{ github.repository }}:latest"
|
||||||
|
format: "sarif"
|
||||||
|
output: "trivy-results.sarif"
|
||||||
|
severity: "CRITICAL,HIGH,MEDIUM,LOW"
|
||||||
|
|
||||||
|
- name: Upload Trivy scan results to GitHub Security tab
|
||||||
|
uses: github/codeql-action/upload-sarif@a4e1a019f5e24960714ff6296aee04b736cbc3cf # v3.29.6
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
sarif_file: "trivy-results.sarif"
|
||||||
|
ref: ${{ steps.gitref.outputs.ref }}
|
||||||
|
sha: ${{ steps.gitref.outputs.sha }}
|
||||||
|
category: "trivy-container-scan"
|
||||||
168
.github/workflows/e2e.yml
vendored
@@ -3,25 +3,22 @@ name: E2E Tests
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
secrets:
|
secrets:
|
||||||
AZURE_CLIENT_ID:
|
|
||||||
required: false
|
|
||||||
AZURE_TENANT_ID:
|
|
||||||
required: false
|
|
||||||
AZURE_SUBSCRIPTION_ID:
|
|
||||||
required: false
|
|
||||||
PLAYWRIGHT_SERVICE_URL:
|
PLAYWRIGHT_SERVICE_URL:
|
||||||
required: false
|
required: false
|
||||||
|
PLAYWRIGHT_SERVICE_ACCESS_TOKEN:
|
||||||
|
required: false
|
||||||
|
ENTERPRISE_LICENSE_KEY:
|
||||||
|
required: true
|
||||||
# Add other secrets if necessary
|
# Add other secrets if necessary
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TELEMETRY_DISABLED: 1
|
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||||
|
TURBO_TEAM: ${{ vars.TURBO_TEAM }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write
|
|
||||||
contents: read
|
contents: read
|
||||||
actions: read
|
actions: read
|
||||||
checks: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -30,7 +27,7 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: pgvector/pgvector:pg17
|
image: pgvector/pgvector@sha256:9ae02a756ba16a2d69dd78058e25915e36e189bb36ddf01ceae86390d7ed786a
|
||||||
env:
|
env:
|
||||||
POSTGRES_DB: postgres
|
POSTGRES_DB: postgres
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
@@ -38,23 +35,31 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
options: >-
|
options: >-
|
||||||
--health-cmd="pg_isready -U testuser"
|
--health-cmd="pg_isready -U postgres"
|
||||||
--health-interval=10s
|
--health-interval=10s
|
||||||
--health-timeout=5s
|
--health-timeout=5s
|
||||||
--health-retries=5
|
--health-retries=5
|
||||||
|
valkey:
|
||||||
|
image: valkey/valkey@sha256:12ba4f45a7c3e1d0f076acd616cb230834e75a77e8516dde382720af32832d6d
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
allowed-endpoints: |
|
||||||
|
ee.formbricks.com:443
|
||||||
|
registry-1.docker.io:443
|
||||||
|
docker.io:443
|
||||||
|
|
||||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: ./.github/actions/dangerous-git-checkout
|
- uses: ./.github/actions/dangerous-git-checkout
|
||||||
|
|
||||||
- name: Setup Node.js 20.x
|
- name: Setup Node.js 22.x
|
||||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2
|
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 22.x
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||||
@@ -73,11 +78,73 @@ jobs:
|
|||||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${RANDOM_KEY}/" .env
|
sed -i "s/ENTERPRISE_LICENSE_KEY=.*/ENTERPRISE_LICENSE_KEY=${{ secrets.ENTERPRISE_LICENSE_KEY }}/" .env
|
||||||
|
sed -i "s|REDIS_URL=.*|REDIS_URL=redis://localhost:6379|" .env
|
||||||
echo "" >> .env
|
echo "" >> .env
|
||||||
echo "E2E_TESTING=1" >> .env
|
echo "E2E_TESTING=1" >> .env
|
||||||
|
echo "S3_REGION=us-east-1" >> .env
|
||||||
|
echo "S3_BUCKET_NAME=formbricks-e2e" >> .env
|
||||||
|
echo "S3_ENDPOINT_URL=http://localhost:9000" >> .env
|
||||||
|
echo "S3_ACCESS_KEY=devminio" >> .env
|
||||||
|
echo "S3_SECRET_KEY=devminio123" >> .env
|
||||||
|
echo "S3_FORCE_PATH_STYLE=1" >> .env
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
- name: Install MinIO client (mc)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MC_VERSION="RELEASE.2025-08-13T08-35-41Z"
|
||||||
|
MC_BASE="https://dl.min.io/client/mc/release/linux-amd64/archive"
|
||||||
|
MC_BIN="mc.${MC_VERSION}"
|
||||||
|
MC_SUM="${MC_BIN}.sha256sum"
|
||||||
|
|
||||||
|
curl -fsSL "${MC_BASE}/${MC_BIN}" -o "${MC_BIN}"
|
||||||
|
curl -fsSL "${MC_BASE}/${MC_SUM}" -o "${MC_SUM}"
|
||||||
|
|
||||||
|
sha256sum -c "${MC_SUM}"
|
||||||
|
|
||||||
|
chmod +x "${MC_BIN}"
|
||||||
|
sudo mv "${MC_BIN}" /usr/local/bin/mc
|
||||||
|
|
||||||
|
- name: Start MinIO Server
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Start MinIO server in background
|
||||||
|
docker run -d \
|
||||||
|
--name minio-server \
|
||||||
|
-p 9000:9000 \
|
||||||
|
-p 9001:9001 \
|
||||||
|
-e MINIO_ROOT_USER=devminio \
|
||||||
|
-e MINIO_ROOT_PASSWORD=devminio123 \
|
||||||
|
minio/minio:RELEASE.2025-09-07T16-13-09Z \
|
||||||
|
server /data --console-address :9001
|
||||||
|
|
||||||
|
echo "MinIO server started"
|
||||||
|
|
||||||
|
- name: Wait for MinIO and create S3 bucket
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Waiting for MinIO to be ready..."
|
||||||
|
ready=0
|
||||||
|
for i in {1..60}; do
|
||||||
|
if curl -fsS http://localhost:9000/minio/health/live >/dev/null; then
|
||||||
|
echo "MinIO is up after ${i} seconds"
|
||||||
|
ready=1
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$ready" -ne 1 ]; then
|
||||||
|
echo "::error::MinIO did not become ready within 60 seconds"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mc alias set local http://localhost:9000 devminio devminio123
|
||||||
|
mc mb --ignore-existing local/formbricks-e2e
|
||||||
|
|
||||||
- name: Build App
|
- name: Build App
|
||||||
run: |
|
run: |
|
||||||
pnpm build --filter=@formbricks/web...
|
pnpm build --filter=@formbricks/web...
|
||||||
@@ -87,8 +154,36 @@ jobs:
|
|||||||
# pnpm prisma migrate deploy
|
# pnpm prisma migrate deploy
|
||||||
pnpm db:migrate:dev
|
pnpm db:migrate:dev
|
||||||
|
|
||||||
|
- name: Run Rate Limiter Load Tests
|
||||||
|
run: |
|
||||||
|
echo "Running rate limiter load tests with Redis/Valkey..."
|
||||||
|
cd apps/web && pnpm vitest run modules/core/rate-limit/rate-limit-load.test.ts
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Run Cache Integration Tests
|
||||||
|
run: |
|
||||||
|
echo "Running cache integration tests with Redis/Valkey..."
|
||||||
|
cd packages/cache && pnpm vitest run src/cache-integration.test.ts
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Check for Enterprise License
|
||||||
|
run: |
|
||||||
|
LICENSE_KEY=$(grep '^ENTERPRISE_LICENSE_KEY=' .env | cut -d'=' -f2-)
|
||||||
|
if [ -z "$LICENSE_KEY" ]; then
|
||||||
|
echo "::error::ENTERPRISE_LICENSE_KEY in .env is empty. Please check your secret configuration."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "License key length: ${#LICENSE_KEY}"
|
||||||
|
|
||||||
|
- name: Disable rate limiting for E2E tests
|
||||||
|
run: |
|
||||||
|
echo "RATE_LIMITING_DISABLED=1" >> .env
|
||||||
|
echo "Rate limiting disabled for E2E tests"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
- name: Run App
|
- name: Run App
|
||||||
run: |
|
run: |
|
||||||
|
echo "Starting app with enterprise license..."
|
||||||
NODE_ENV=test pnpm start --filter=@formbricks/web | tee app.log 2>&1 &
|
NODE_ENV=test pnpm start --filter=@formbricks/web | tee app.log 2>&1 &
|
||||||
sleep 10 # Optional: gives some buffer for the app to start
|
sleep 10 # Optional: gives some buffer for the app to start
|
||||||
for attempt in {1..10}; do
|
for attempt in {1..10}; do
|
||||||
@@ -107,31 +202,32 @@ jobs:
|
|||||||
- name: Install Playwright
|
- name: Install Playwright
|
||||||
run: pnpm exec playwright install --with-deps
|
run: pnpm exec playwright install --with-deps
|
||||||
|
|
||||||
- name: Set Azure Secret Variables
|
- name: Determine Playwright execution mode
|
||||||
run: |
|
shell: bash
|
||||||
if [[ -n "${{ secrets.AZURE_CLIENT_ID }}" && -n "${{ secrets.AZURE_TENANT_ID }}" && -n "${{ secrets.AZURE_SUBSCRIPTION_ID }}" ]]; then
|
|
||||||
echo "AZURE_ENABLED=true" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "AZURE_ENABLED=false" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Azure login
|
|
||||||
if: env.AZURE_ENABLED == 'true'
|
|
||||||
uses: azure/login@a65d910e8af852a8061c627c456678983e180302 # v2.2.0
|
|
||||||
with:
|
|
||||||
client-id: ${{ secrets.AZURE_CLIENT_ID }}
|
|
||||||
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
|
||||||
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
|
|
||||||
|
|
||||||
- name: Run E2E Tests (Azure)
|
|
||||||
if: env.AZURE_ENABLED == 'true'
|
|
||||||
env:
|
env:
|
||||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||||
|
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
pnpm test-e2e:azure
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [[ -n "${PLAYWRIGHT_SERVICE_URL}" && -n "${PLAYWRIGHT_SERVICE_ACCESS_TOKEN}" ]]; then
|
||||||
|
echo "PW_MODE=service" >> "$GITHUB_ENV"
|
||||||
|
else
|
||||||
|
echo "PW_MODE=local" >> "$GITHUB_ENV"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run E2E Tests (Playwright Service)
|
||||||
|
if: env.PW_MODE == 'service'
|
||||||
|
env:
|
||||||
|
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||||
|
PLAYWRIGHT_SERVICE_ACCESS_TOKEN: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_TOKEN }}
|
||||||
|
CI: true
|
||||||
|
run: pnpm test-e2e:azure
|
||||||
|
|
||||||
- name: Run E2E Tests (Local)
|
- name: Run E2E Tests (Local)
|
||||||
if: env.AZURE_ENABLED == 'false'
|
if: env.PW_MODE == 'local'
|
||||||
|
env:
|
||||||
|
CI: true
|
||||||
run: |
|
run: |
|
||||||
pnpm test:e2e
|
pnpm test:e2e
|
||||||
|
|
||||||
|
|||||||
156
.github/workflows/formbricks-release.yml
vendored
@@ -1,33 +1,157 @@
|
|||||||
name: Build, release & deploy Formbricks images
|
name: Build, release & deploy Formbricks images
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
release:
|
||||||
push:
|
types: [published]
|
||||||
tags:
|
|
||||||
- "v*"
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker-build:
|
check-latest-release:
|
||||||
name: Build & release stable docker image
|
name: Check if this is the latest release
|
||||||
if: startsWith(github.ref, 'refs/tags/v')
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 5
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
outputs:
|
||||||
|
is_latest: ${{ steps.compare_tags.outputs.is_latest }}
|
||||||
|
# This job determines if the current release was marked as "Set as the latest release"
|
||||||
|
# by comparing it with the latest release from GitHub API
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Get latest release tag from API
|
||||||
|
id: get_latest_release
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
REPO: ${{ github.repository }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Get the latest release tag from GitHub API with error handling
|
||||||
|
echo "Fetching latest release from GitHub API..."
|
||||||
|
|
||||||
|
# Use curl with error handling - API returns 404 if no releases exist
|
||||||
|
http_code=$(curl -s -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" \
|
||||||
|
"https://api.github.com/repos/${REPO}/releases/latest" -o /tmp/latest_release.json)
|
||||||
|
|
||||||
|
if [[ "$http_code" == "404" ]]; then
|
||||||
|
echo "⚠️ No previous releases found (404). This appears to be the first release."
|
||||||
|
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "$http_code" == "200" ]]; then
|
||||||
|
latest_release=$(jq -r .tag_name /tmp/latest_release.json)
|
||||||
|
if [[ "$latest_release" == "null" || -z "$latest_release" ]]; then
|
||||||
|
echo "⚠️ API returned null/empty tag_name. Treating as first release."
|
||||||
|
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "Latest release from API: ${latest_release}"
|
||||||
|
echo "latest_release=${latest_release}" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "❌ GitHub API error (HTTP ${http_code}). Treating as first release."
|
||||||
|
echo "latest_release=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Current release tag: ${{ github.event.release.tag_name }}"
|
||||||
|
|
||||||
|
- name: Compare release tags
|
||||||
|
id: compare_tags
|
||||||
|
env:
|
||||||
|
CURRENT_TAG: ${{ github.event.release.tag_name }}
|
||||||
|
LATEST_TAG: ${{ steps.get_latest_release.outputs.latest_release }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Handle first release case (no previous releases)
|
||||||
|
if [[ -z "${LATEST_TAG}" ]]; then
|
||||||
|
echo "🎉 This is the first release (${CURRENT_TAG}) - treating as latest"
|
||||||
|
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "${CURRENT_TAG}" == "${LATEST_TAG}" ]]; then
|
||||||
|
echo "✅ This release (${CURRENT_TAG}) is marked as the latest release"
|
||||||
|
echo "is_latest=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "ℹ️ This release (${CURRENT_TAG}) is not the latest release (latest: ${LATEST_TAG})"
|
||||||
|
echo "is_latest=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
docker-build-community:
|
||||||
|
name: Build & release community docker image
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
id-token: write
|
||||||
uses: ./.github/workflows/release-docker-github.yml
|
uses: ./.github/workflows/release-docker-github.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
needs:
|
||||||
|
- check-latest-release
|
||||||
|
with:
|
||||||
|
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||||
|
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||||
|
|
||||||
|
docker-build-cloud:
|
||||||
|
name: Build & push Formbricks Cloud to ECR
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
uses: ./.github/workflows/build-and-push-ecr.yml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
image_tag: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||||
|
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||||
|
MAKE_LATEST: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||||
|
needs:
|
||||||
|
- check-latest-release
|
||||||
|
- docker-build-community
|
||||||
|
|
||||||
helm-chart-release:
|
helm-chart-release:
|
||||||
name: Release Helm Chart
|
name: Release Helm Chart
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
uses: ./.github/workflows/release-helm-chart.yml
|
uses: ./.github/workflows/release-helm-chart.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs:
|
needs:
|
||||||
- docker-build
|
- docker-build-community
|
||||||
with:
|
with:
|
||||||
VERSION: ${{ needs.docker-build.outputs.VERSION }}
|
VERSION: ${{ needs.docker-build-community.outputs.VERSION }}
|
||||||
|
|
||||||
deploy-formbricks-cloud:
|
verify-cloud-build:
|
||||||
name: Deploy Helm Chart to Formbricks Cloud
|
name: Verify Cloud Build Outputs
|
||||||
secrets: inherit
|
runs-on: ubuntu-latest
|
||||||
uses: ./.github/workflows/deploy-formbricks-cloud.yml
|
timeout-minutes: 5 # Simple verification should be quick
|
||||||
needs:
|
needs:
|
||||||
- docker-build
|
- docker-build-cloud
|
||||||
- helm-chart-release
|
steps:
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Display ECR build outputs
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ needs.docker-build-cloud.outputs.IMAGE_TAG }}
|
||||||
|
TAGS: ${{ needs.docker-build-cloud.outputs.TAGS }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "✅ ECR Build Completed Successfully"
|
||||||
|
echo "Image Tag: ${IMAGE_TAG}"
|
||||||
|
echo "ECR Tags:"
|
||||||
|
printf '%s\n' "${TAGS}"
|
||||||
|
|
||||||
|
move-stable-tag:
|
||||||
|
name: Move stable tag to release
|
||||||
|
permissions:
|
||||||
|
contents: write # Required for tag push operations in called workflow
|
||||||
|
uses: ./.github/workflows/move-stable-tag.yml
|
||||||
|
needs:
|
||||||
|
- check-latest-release
|
||||||
|
- docker-build-community # Ensure release is successful first
|
||||||
with:
|
with:
|
||||||
VERSION: ${{ needs.docker-build.outputs.VERSION }}
|
release_tag: ${{ github.event.release.tag_name }}
|
||||||
|
commit_sha: ${{ github.sha }}
|
||||||
|
is_prerelease: ${{ github.event.release.prerelease }}
|
||||||
|
make_latest: ${{ needs.check-latest-release.outputs.is_latest == 'true' }}
|
||||||
|
|||||||
27
.github/workflows/labeler.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: "Pull Request Labeler"
|
|
||||||
on:
|
|
||||||
- pull_request_target
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
labeler:
|
|
||||||
name: Pull Request Labeler
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- uses: actions/labeler@ac9175f8a1f3625fd0d4fb234536d26811351594 # v4.3.0
|
|
||||||
with:
|
|
||||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
|
||||||
# https://github.com/actions/labeler/issues/442#issuecomment-1297359481
|
|
||||||
sync-labels: ""
|
|
||||||
4
.github/workflows/lint.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
@@ -26,7 +26,7 @@ jobs:
|
|||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||||
|
|||||||
101
.github/workflows/move-stable-tag.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Move Stable Tag
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
release_tag:
|
||||||
|
description: "The release tag name (e.g., 1.2.3)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
commit_sha:
|
||||||
|
description: "The commit SHA to point the stable tag to"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
is_prerelease:
|
||||||
|
description: "Whether this is a prerelease (stable tag won't be moved for prereleases)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
make_latest:
|
||||||
|
description: "Whether to move stable tag (from GitHub release 'Set as the latest release' option)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
# Prevent concurrent stable tag operations to avoid race conditions
|
||||||
|
concurrency:
|
||||||
|
group: move-stable-tag-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
move-stable-tag:
|
||||||
|
name: Move stable tag to release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10 # Prevent hung git operations
|
||||||
|
permissions:
|
||||||
|
contents: write # Required to push tags
|
||||||
|
# Only move stable tag for non-prerelease versions AND when make_latest is true
|
||||||
|
if: ${{ !inputs.is_prerelease && inputs.make_latest }}
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Full history needed for tag operations
|
||||||
|
|
||||||
|
- name: Validate inputs
|
||||||
|
env:
|
||||||
|
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||||
|
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Validate release tag format
|
||||||
|
if [[ ! "$RELEASE_TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||||
|
echo "❌ Error: Invalid release tag format. Expected format: 1.2.3, 1.2.3-alpha"
|
||||||
|
echo "Provided: $RELEASE_TAG"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate commit SHA format (40 character hex)
|
||||||
|
if [[ ! "$COMMIT_SHA" =~ ^[a-f0-9]{40}$ ]]; then
|
||||||
|
echo "❌ Error: Invalid commit SHA format. Expected 40 character hex string"
|
||||||
|
echo "Provided: $COMMIT_SHA"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Input validation passed"
|
||||||
|
echo "Release tag: $RELEASE_TAG"
|
||||||
|
echo "Commit SHA: $COMMIT_SHA"
|
||||||
|
|
||||||
|
- name: Move stable tag
|
||||||
|
env:
|
||||||
|
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||||
|
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Configure git
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
# Verify the commit exists
|
||||||
|
if ! git cat-file -e "$COMMIT_SHA"; then
|
||||||
|
echo "❌ Error: Commit $COMMIT_SHA does not exist in this repository"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Move stable tag to the release commit
|
||||||
|
echo "📌 Moving stable tag to commit: $COMMIT_SHA (release: $RELEASE_TAG)"
|
||||||
|
git tag -f stable "$COMMIT_SHA"
|
||||||
|
git push origin stable --force
|
||||||
|
|
||||||
|
echo "✅ Successfully moved stable tag to release $RELEASE_TAG"
|
||||||
|
echo "🔗 Stable tag now points to: https://github.com/${{ github.repository }}/commit/$COMMIT_SHA"
|
||||||
165
.github/workflows/pr-size-check.yml
vendored
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
name: PR Size Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-pr-size:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner
|
||||||
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Check PR size
|
||||||
|
id: check-size
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Fetch the base branch
|
||||||
|
git fetch origin "${{ github.base_ref }}"
|
||||||
|
|
||||||
|
# Get diff stats
|
||||||
|
diff_output=$(git diff --numstat "origin/${{ github.base_ref }}"...HEAD)
|
||||||
|
|
||||||
|
# Count lines, excluding:
|
||||||
|
# - Test files (*.test.ts, *.spec.tsx, etc.)
|
||||||
|
# - Locale files (locales/*.json, i18n/*.json)
|
||||||
|
# - Lock files (pnpm-lock.yaml, package-lock.json, yarn.lock)
|
||||||
|
# - Generated files (dist/, coverage/, build/, .next/)
|
||||||
|
# - Storybook stories (*.stories.tsx)
|
||||||
|
|
||||||
|
total_additions=0
|
||||||
|
total_deletions=0
|
||||||
|
counted_files=0
|
||||||
|
excluded_files=0
|
||||||
|
|
||||||
|
while IFS=$'\t' read -r additions deletions file; do
|
||||||
|
# Skip if additions or deletions are "-" (binary files)
|
||||||
|
if [ "$additions" = "-" ] || [ "$deletions" = "-" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if file should be excluded
|
||||||
|
case "$file" in
|
||||||
|
*.test.ts|*.test.tsx|*.spec.ts|*.spec.tsx|*.test.js|*.test.jsx|*.spec.js|*.spec.jsx)
|
||||||
|
excluded_files=$((excluded_files + 1))
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
*/locales/*.json|*/i18n/*.json)
|
||||||
|
excluded_files=$((excluded_files + 1))
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
pnpm-lock.yaml|package-lock.json|yarn.lock)
|
||||||
|
excluded_files=$((excluded_files + 1))
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
dist/*|coverage/*|build/*|node_modules/*|test-results/*|playwright-report/*|.next/*|*.tsbuildinfo)
|
||||||
|
excluded_files=$((excluded_files + 1))
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
*.stories.ts|*.stories.tsx|*.stories.js|*.stories.jsx)
|
||||||
|
excluded_files=$((excluded_files + 1))
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
total_additions=$((total_additions + additions))
|
||||||
|
total_deletions=$((total_deletions + deletions))
|
||||||
|
counted_files=$((counted_files + 1))
|
||||||
|
done <<EOF
|
||||||
|
${diff_output}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
total_changes=$((total_additions + total_deletions))
|
||||||
|
|
||||||
|
echo "counted_files=${counted_files}" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo "excluded_files=${excluded_files}" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo "total_additions=${total_additions}" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo "total_deletions=${total_deletions}" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo "total_changes=${total_changes}" >> "${GITHUB_OUTPUT}"
|
||||||
|
|
||||||
|
# Set flag if PR is too large (> 800 lines)
|
||||||
|
if [ ${total_changes} -gt 800 ]; then
|
||||||
|
echo "is_too_large=true" >> "${GITHUB_OUTPUT}"
|
||||||
|
else
|
||||||
|
echo "is_too_large=false" >> "${GITHUB_OUTPUT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Comment on PR if too large
|
||||||
|
if: steps.check-size.outputs.is_too_large == 'true'
|
||||||
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
script: |
|
||||||
|
const totalChanges = ${{ steps.check-size.outputs.total_changes }};
|
||||||
|
const countedFiles = ${{ steps.check-size.outputs.counted_files }};
|
||||||
|
const excludedFiles = ${{ steps.check-size.outputs.excluded_files }};
|
||||||
|
const additions = ${{ steps.check-size.outputs.total_additions }};
|
||||||
|
const deletions = ${{ steps.check-size.outputs.total_deletions }};
|
||||||
|
|
||||||
|
const body = `## 🚨 PR Size Warning
|
||||||
|
|
||||||
|
This PR has approximately **${totalChanges} lines** of changes (${additions} additions, ${deletions} deletions across ${countedFiles} files).
|
||||||
|
|
||||||
|
Large PRs (>800 lines) are significantly harder to review and increase the chance of merge conflicts. Consider splitting this into smaller, self-contained PRs.
|
||||||
|
|
||||||
|
### 💡 Suggestions:
|
||||||
|
- **Split by feature or module** - Break down into logical, independent pieces
|
||||||
|
- **Create a sequence of PRs** - Each building on the previous one
|
||||||
|
- **Branch off PR branches** - Don't wait for reviews to continue dependent work
|
||||||
|
|
||||||
|
### 📊 What was counted:
|
||||||
|
- ✅ Source files, stylesheets, configuration files
|
||||||
|
- ❌ Excluded ${excludedFiles} files (tests, locales, locks, generated files)
|
||||||
|
|
||||||
|
### 📚 Guidelines:
|
||||||
|
- **Ideal:** 300-500 lines per PR
|
||||||
|
- **Warning:** 500-800 lines
|
||||||
|
- **Critical:** 800+ lines ⚠️
|
||||||
|
|
||||||
|
If this large PR is unavoidable (e.g., migration, dependency update, major refactor), please explain in the PR description why it couldn't be split.`;
|
||||||
|
|
||||||
|
// Check if we already commented
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
});
|
||||||
|
|
||||||
|
const botComment = comments.find(comment =>
|
||||||
|
comment.user.type === 'Bot' &&
|
||||||
|
comment.body.includes('🚨 PR Size Warning')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (botComment) {
|
||||||
|
// Update existing comment
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
comment_id: botComment.id,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Create new comment
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
4
.github/workflows/pr.yml
vendored
@@ -10,8 +10,6 @@ permissions:
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
merge_group:
|
merge_group:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
@@ -51,7 +49,7 @@ jobs:
|
|||||||
statuses: write
|
statuses: write
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
- name: fail if conditional jobs failed
|
- name: fail if conditional jobs failed
|
||||||
|
|||||||
56
.github/workflows/release-changesets.yml
vendored
@@ -1,56 +0,0 @@
|
|||||||
name: Release Changesets
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
#push:
|
|
||||||
# branches:
|
|
||||||
# - main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
|
|
||||||
env:
|
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release:
|
|
||||||
name: Release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 15
|
|
||||||
env:
|
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: Checkout Repo
|
|
||||||
uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 # v2.7.0
|
|
||||||
|
|
||||||
- name: Setup Node.js 18.x
|
|
||||||
uses: actions/setup-node@7c12f8017d5436eb855f1ed4399f037a36fbd9e8 # v2.5.2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
|
|
||||||
- name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd # v2.2.4
|
|
||||||
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
|
||||||
|
|
||||||
- name: Create Release Pull Request or Publish to npm
|
|
||||||
id: changesets
|
|
||||||
uses: changesets/action@c8bada60c408975afd1a20b3db81d6eee6789308 # v1.4.9
|
|
||||||
with:
|
|
||||||
# This expects you to have a script called release which does a build for your packages and calls changeset publish
|
|
||||||
publish: pnpm release
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
@@ -1,99 +1,50 @@
|
|||||||
name: Docker Release to Github Experimental
|
name: Build Community Testing Images
|
||||||
|
|
||||||
# This workflow uses actions that are not certified by GitHub.
|
# This workflow builds experimental/testing versions of Formbricks for self-hosting customers
|
||||||
# They are provided by a third-party and are governed by
|
# to test fixes and features before official releases. Images are pushed to GHCR with
|
||||||
# separate terms of service, privacy policy, and support
|
# timestamped experimental versions for easy identification and testing.
|
||||||
# documentation.
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
env:
|
version_override:
|
||||||
# Use docker.io for Docker Hub if empty
|
description: "Override version (SemVer only, e.g., 1.2.3-beta). Leave empty for auto-generated experimental version."
|
||||||
REGISTRY: ghcr.io
|
required: false
|
||||||
# github.repository as <account>/<repo>
|
type: string
|
||||||
IMAGE_NAME: ${{ github.repository }}-experimental
|
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
|
||||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build-community-testing:
|
||||||
|
name: Build Community Testing Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
timeout-minutes: 45
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
# This is used to complete the identity challenge
|
|
||||||
# with sigstore/fulcio when running outside of PRs.
|
|
||||||
id-token: write
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
|
||||||
|
|
||||||
# Install the cosign tool except on PR
|
|
||||||
# https://github.com/sigstore/cosign-installer
|
|
||||||
- name: Install cosign
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # v3.5.0
|
|
||||||
|
|
||||||
# Login against a Docker registry except on PR
|
|
||||||
# https://github.com/docker/login-action
|
|
||||||
- name: Log into registry ${{ env.REGISTRY }}
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
fetch-depth: 0
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Extract metadata (tags, labels) for Docker
|
- name: Build and push community testing image
|
||||||
# https://github.com/docker/metadata-action
|
uses: ./.github/actions/build-and-push-docker
|
||||||
- name: Extract Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
registry_type: "ghcr"
|
||||||
|
ghcr_image_name: "${{ github.repository }}-experimental"
|
||||||
# Build and push Docker image with Buildx (don't push on PR)
|
experimental_mode: "true"
|
||||||
# https://github.com/docker/build-push-action
|
version: ${{ inputs.version_override }}
|
||||||
- name: Build and push Docker image
|
|
||||||
id: build-and-push
|
|
||||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
|
||||||
with:
|
|
||||||
project: tw0fqmsx3c
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
context: .
|
|
||||||
file: ./apps/web/Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
|
|
||||||
# Sign the resulting Docker image digest except on PRs.
|
|
||||||
# This will only write to the public Rekor transparency log when the Docker
|
|
||||||
# repository is public to avoid leaking data. If you would like to publish
|
|
||||||
# transparency data even for private images, pass --force to cosign below.
|
|
||||||
# https://github.com/sigstore/cosign
|
|
||||||
- name: Sign the published Docker image
|
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
|
||||||
env:
|
env:
|
||||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
# This step uses the identity token to provision an ephemeral certificate
|
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
# against the sigstore community Fulcio instance.
|
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
|||||||
133
.github/workflows/release-docker-github.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Docker Release to Github
|
name: Release Community Docker Images
|
||||||
|
|
||||||
# This workflow uses actions that are not certified by GitHub.
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
# They are provided by a third-party and are governed by
|
# They are provided by a third-party and are governed by
|
||||||
@@ -7,6 +7,17 @@ name: Docker Release to Github
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
IS_PRERELEASE:
|
||||||
|
description: "Whether this is a prerelease (affects latest tag)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
MAKE_LATEST:
|
||||||
|
description: "Whether to tag as latest (from GitHub release 'Set as the latest release' option)"
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
outputs:
|
outputs:
|
||||||
VERSION:
|
VERSION:
|
||||||
description: release version
|
description: release version
|
||||||
@@ -17,9 +28,6 @@ env:
|
|||||||
REGISTRY: ghcr.io
|
REGISTRY: ghcr.io
|
||||||
# github.repository as <account>/<repo>
|
# github.repository as <account>/<repo>
|
||||||
IMAGE_NAME: ${{ github.repository }}
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
|
||||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
|
||||||
DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/formbricks?schema=public"
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -27,93 +35,74 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 45
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
id-token: write
|
||||||
# This is used to complete the identity challenge
|
# This is used to complete the identity challenge
|
||||||
# with sigstore/fulcio when running outside of PRs.
|
# with sigstore/fulcio when running outside of PRs.
|
||||||
id-token: write
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
VERSION: ${{ steps.extract_release_tag.outputs.VERSION }}
|
VERSION: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Get Release Tag
|
- name: Extract release version from tag
|
||||||
id: extract_release_tag
|
id: extract_release_tag
|
||||||
run: |
|
run: |
|
||||||
TAG=${{ github.ref }}
|
set -euo pipefail
|
||||||
TAG=${TAG#refs/tags/v}
|
|
||||||
echo "RELEASE_TAG=$TAG" >> $GITHUB_ENV
|
# Extract tag name with fallback logic for different trigger contexts
|
||||||
|
if [[ -n "${RELEASE_TAG:-}" ]]; then
|
||||||
|
TAG="$RELEASE_TAG"
|
||||||
|
echo "Using RELEASE_TAG override: $TAG"
|
||||||
|
elif [[ "$GITHUB_REF_NAME" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]] || [[ "$GITHUB_REF_NAME" =~ ^v[0-9] ]]; then
|
||||||
|
TAG="$GITHUB_REF_NAME"
|
||||||
|
echo "Using GITHUB_REF_NAME (looks like tag): $TAG"
|
||||||
|
else
|
||||||
|
# Fallback: extract from GITHUB_REF for direct tag triggers
|
||||||
|
TAG="${GITHUB_REF#refs/tags/}"
|
||||||
|
if [[ -z "$TAG" || "$TAG" == "$GITHUB_REF" ]]; then
|
||||||
|
TAG="$GITHUB_REF_NAME"
|
||||||
|
echo "Using GITHUB_REF_NAME as final fallback: $TAG"
|
||||||
|
else
|
||||||
|
echo "Extracted from GITHUB_REF: $TAG"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Strip v-prefix if present (normalize to clean SemVer)
|
||||||
|
TAG=${TAG#[vV]}
|
||||||
|
|
||||||
|
# Validate SemVer format (supports prereleases like 4.0.0-rc.1)
|
||||||
|
if [[ ! "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||||
|
echo "ERROR: Invalid tag format '$TAG'. Expected SemVer (e.g., 1.2.3, 4.0.0-rc.1)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
|
echo "VERSION=$TAG" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using version: $TAG"
|
||||||
|
|
||||||
- name: Update package.json version
|
- name: Build and push community release image
|
||||||
run: |
|
id: build
|
||||||
sed -i "s/\"version\": \"0.0.0\"/\"version\": \"${{ env.RELEASE_TAG }}\"/" ./apps/web/package.json
|
uses: ./.github/actions/build-and-push-docker
|
||||||
cat ./apps/web/package.json | grep version
|
|
||||||
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0
|
|
||||||
|
|
||||||
# Install the cosign tool except on PR
|
|
||||||
# https://github.com/sigstore/cosign-installer
|
|
||||||
- name: Install cosign
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # v3.5.0
|
|
||||||
|
|
||||||
# Login against a Docker registry except on PR
|
|
||||||
# https://github.com/docker/login-action
|
|
||||||
- name: Log into registry ${{ env.REGISTRY }}
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry_type: "ghcr"
|
||||||
username: ${{ github.actor }}
|
ghcr_image_name: ${{ env.IMAGE_NAME }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
version: ${{ steps.extract_release_tag.outputs.VERSION }}
|
||||||
|
is_prerelease: ${{ inputs.IS_PRERELEASE }}
|
||||||
# Extract metadata (tags, labels) for Docker
|
make_latest: ${{ inputs.MAKE_LATEST }}
|
||||||
# https://github.com/docker/metadata-action
|
|
||||||
- name: Extract Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
|
|
||||||
# Build and push Docker image with Buildx (don't push on PR)
|
|
||||||
# https://github.com/docker/build-push-action
|
|
||||||
- name: Build and push Docker image
|
|
||||||
id: build-and-push
|
|
||||||
uses: depot/build-push-action@636daae76684e38c301daa0c5eca1c095b24e780 # v1.14.0
|
|
||||||
with:
|
|
||||||
project: tw0fqmsx3c
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
context: .
|
|
||||||
file: ./apps/web/Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
|
|
||||||
# Sign the resulting Docker image digest except on PRs.
|
|
||||||
# This will only write to the public Rekor transparency log when the Docker
|
|
||||||
# repository is public to avoid leaking data. If you would like to publish
|
|
||||||
# transparency data even for private images, pass --force to cosign below.
|
|
||||||
# https://github.com/sigstore/cosign
|
|
||||||
- name: Sign the published Docker image
|
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
|
||||||
env:
|
env:
|
||||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
DEPOT_PROJECT_TOKEN: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
DUMMY_DATABASE_URL: ${{ secrets.DUMMY_DATABASE_URL }}
|
||||||
# This step uses the identity token to provision an ephemeral certificate
|
DUMMY_ENCRYPTION_KEY: ${{ secrets.DUMMY_ENCRYPTION_KEY }}
|
||||||
# against the sigstore community Fulcio instance.
|
DUMMY_REDIS_URL: ${{ secrets.DUMMY_REDIS_URL }}
|
||||||
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
|||||||
55
.github/workflows/release-helm-chart.yml
vendored
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
VERSION:
|
VERSION:
|
||||||
description: 'The version of the Helm chart to release'
|
description: "The version of the Helm chart to release"
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
@@ -19,15 +19,30 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Extract release version
|
- name: Validate input version
|
||||||
run: echo "VERSION=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
env:
|
||||||
|
INPUT_VERSION: ${{ inputs.VERSION }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
# Validate input version format (expects clean semver without 'v' prefix)
|
||||||
|
if [[ ! "$INPUT_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$ ]]; then
|
||||||
|
echo "❌ Error: Invalid version format. Must be clean semver (e.g., 1.2.3, 1.2.3-alpha)"
|
||||||
|
echo "Expected: clean version without 'v' prefix"
|
||||||
|
echo "Provided: $INPUT_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Store validated version in environment variable
|
||||||
|
echo "VERSION<<EOF" >> $GITHUB_ENV
|
||||||
|
echo "$INPUT_VERSION" >> $GITHUB_ENV
|
||||||
|
echo "EOF" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Set up Helm
|
- name: Set up Helm
|
||||||
uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5
|
uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5
|
||||||
@@ -35,20 +50,44 @@ jobs:
|
|||||||
version: latest
|
version: latest
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | helm registry login ghcr.io --username ${{ github.actor }} --password-stdin
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_ACTOR: ${{ github.actor }}
|
||||||
|
run: printf '%s' "$GITHUB_TOKEN" | helm registry login ghcr.io --username "$GITHUB_ACTOR" --password-stdin
|
||||||
|
|
||||||
- name: Install YQ
|
- name: Install YQ
|
||||||
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
|
uses: dcarbone/install-yq-action@4075b4dca348d74bd83f2bf82d30f25d7c54539b # v1.3.1
|
||||||
|
|
||||||
- name: Update Chart.yaml with new version
|
- name: Update Chart.yaml with new version
|
||||||
|
env:
|
||||||
|
VERSION: ${{ env.VERSION }}
|
||||||
run: |
|
run: |
|
||||||
yq -i ".version = \"${{ inputs.VERSION }}\"" helm-chart/Chart.yaml
|
set -euo pipefail
|
||||||
yq -i ".appVersion = \"v${{ inputs.VERSION }}\"" helm-chart/Chart.yaml
|
|
||||||
|
echo "Updating Chart.yaml with version: ${VERSION}"
|
||||||
|
yq -i ".version = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||||
|
yq -i ".appVersion = \"${VERSION}\"" helm-chart/Chart.yaml
|
||||||
|
|
||||||
|
echo "✅ Successfully updated Chart.yaml"
|
||||||
|
|
||||||
- name: Package Helm chart
|
- name: Package Helm chart
|
||||||
|
env:
|
||||||
|
VERSION: ${{ env.VERSION }}
|
||||||
run: |
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Packaging Helm chart version: ${VERSION}"
|
||||||
helm package ./helm-chart
|
helm package ./helm-chart
|
||||||
|
|
||||||
|
echo "✅ Successfully packaged formbricks-${VERSION}.tgz"
|
||||||
|
|
||||||
- name: Push Helm chart to GitHub Container Registry
|
- name: Push Helm chart to GitHub Container Registry
|
||||||
|
env:
|
||||||
|
VERSION: ${{ env.VERSION }}
|
||||||
run: |
|
run: |
|
||||||
helm push formbricks-${{ inputs.VERSION }}.tgz oci://ghcr.io/formbricks/helm-charts
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Pushing Helm chart to registry: formbricks-${VERSION}.tgz"
|
||||||
|
helm push "formbricks-${VERSION}.tgz" oci://ghcr.io/formbricks/helm-charts
|
||||||
|
|
||||||
|
echo "✅ Successfully pushed Helm chart to registry"
|
||||||
|
|||||||
81
.github/workflows/scorecard.yml
vendored
@@ -1,81 +0,0 @@
|
|||||||
# This workflow uses actions that are not certified by GitHub. They are provided
|
|
||||||
# by a third-party and are governed by separate terms of service, privacy
|
|
||||||
# policy, and support documentation.
|
|
||||||
|
|
||||||
name: Scorecard supply-chain security
|
|
||||||
on:
|
|
||||||
# For Branch-Protection check. Only the default branch is supported. See
|
|
||||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
|
||||||
branch_protection_rule:
|
|
||||||
# To guarantee Maintained check is occasionally updated. See
|
|
||||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
|
||||||
schedule:
|
|
||||||
- cron: "17 17 * * 6"
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Declare default permissions as read only.
|
|
||||||
permissions: read-all
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analysis:
|
|
||||||
name: Scorecard analysis
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
# Needed to upload the results to code-scanning dashboard.
|
|
||||||
security-events: write
|
|
||||||
# Needed to publish results and get a badge (see publish_results below).
|
|
||||||
id-token: write
|
|
||||||
# Add this permission
|
|
||||||
actions: write # Required for artifact upload
|
|
||||||
# Uncomment the permissions below if installing in a private repository.
|
|
||||||
# contents: read
|
|
||||||
# actions: read
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: "Checkout code"
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: "Run analysis"
|
|
||||||
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
|
|
||||||
with:
|
|
||||||
results_file: results.sarif
|
|
||||||
results_format: sarif
|
|
||||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
|
||||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
|
||||||
# - you are installing Scorecard on a *private* repository
|
|
||||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional.
|
|
||||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
|
||||||
|
|
||||||
# Public repositories:
|
|
||||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
|
||||||
# - Allows the repository to include the Scorecard badge.
|
|
||||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
|
||||||
# For private repositories:
|
|
||||||
# - `publish_results` will always be set to `false`, regardless
|
|
||||||
# of the value entered here.
|
|
||||||
publish_results: true
|
|
||||||
|
|
||||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
|
||||||
# format to the repository Actions tab.
|
|
||||||
- name: "Upload artifact"
|
|
||||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
|
||||||
with:
|
|
||||||
name: sarif
|
|
||||||
path: results.sarif
|
|
||||||
retention-days: 5
|
|
||||||
|
|
||||||
# Upload the results to GitHub's code scanning dashboard (optional).
|
|
||||||
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
|
|
||||||
- name: "Upload to code-scanning"
|
|
||||||
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
|
|
||||||
with:
|
|
||||||
sarif_file: results.sarif
|
|
||||||
12
.github/workflows/semantic-pull-requests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ jobs:
|
|||||||
revert
|
revert
|
||||||
ossgg
|
ossgg
|
||||||
|
|
||||||
- uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2.9.1
|
- uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2
|
||||||
# When the previous steps fails, the workflow would stop. By adding this
|
# When the previous steps fails, the workflow would stop. By adding this
|
||||||
# condition you can continue the execution with the populated error message.
|
# condition you can continue the execution with the populated error message.
|
||||||
if: always() && (steps.lint_pr_title.outputs.error_message != null)
|
if: always() && (steps.lint_pr_title.outputs.error_message != null)
|
||||||
@@ -56,11 +56,3 @@ jobs:
|
|||||||
```
|
```
|
||||||
${{ steps.lint_pr_title.outputs.error_message }}
|
${{ steps.lint_pr_title.outputs.error_message }}
|
||||||
```
|
```
|
||||||
|
|
||||||
# Delete a previous comment when the issue has been resolved
|
|
||||||
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
|
|
||||||
uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2.9.1
|
|
||||||
with:
|
|
||||||
header: pr-title-lint-error
|
|
||||||
message: |
|
|
||||||
Thank you for following the naming conventions for pull request titles! 🙏
|
|
||||||
|
|||||||
11
.github/workflows/sonarqube.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
@@ -23,13 +23,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||||
|
|
||||||
- name: Setup Node.js 20.x
|
- name: Setup Node.js 22.x
|
||||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 22.x
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --config.platform=linux --config.architecture=x64
|
run: pnpm install --config.platform=linux --config.architecture=x64
|
||||||
@@ -43,12 +43,13 @@ jobs:
|
|||||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||||
|
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||||
|
|
||||||
- name: Run tests with coverage
|
- name: Run tests with coverage
|
||||||
run: |
|
run: |
|
||||||
pnpm test:coverage
|
pnpm test:coverage
|
||||||
- name: SonarQube Scan
|
- name: SonarQube Scan
|
||||||
uses: SonarSource/sonarqube-scan-action@bfd4e558cda28cda6b5defafb9232d191be8c203
|
uses: SonarSource/sonarqube-scan-action@2500896589ef8f7247069a56136f8dc177c27ccf
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
|
||||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||||
|
|||||||
75
.github/workflows/terrafrom-plan-and-apply.yml
vendored
@@ -1,75 +0,0 @@
|
|||||||
name: 'Terraform'
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# TODO: enable it back when migration is completed.
|
|
||||||
# push:
|
|
||||||
# branches:
|
|
||||||
# - main
|
|
||||||
# pull_request:
|
|
||||||
# branches:
|
|
||||||
# - main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
terraform:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
|
|
||||||
- name: Configure AWS Credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
|
|
||||||
with:
|
|
||||||
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
|
||||||
aws-region: "eu-central-1"
|
|
||||||
|
|
||||||
- name: Setup Terraform
|
|
||||||
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
|
|
||||||
|
|
||||||
- name: Terraform Format
|
|
||||||
id: fmt
|
|
||||||
run: terraform fmt -check -recursive
|
|
||||||
continue-on-error: true
|
|
||||||
working-directory: infra/terraform
|
|
||||||
|
|
||||||
- name: Terraform Init
|
|
||||||
id: init
|
|
||||||
run: terraform init
|
|
||||||
working-directory: infra/terraform
|
|
||||||
|
|
||||||
- name: Terraform Validate
|
|
||||||
id: validate
|
|
||||||
run: terraform validate
|
|
||||||
working-directory: infra/terraform
|
|
||||||
|
|
||||||
- name: Terraform Plan
|
|
||||||
id: plan
|
|
||||||
run: terraform plan -out .planfile
|
|
||||||
working-directory: infra/terraform
|
|
||||||
|
|
||||||
# - name: Post PR comment
|
|
||||||
# uses: borchero/terraform-plan-comment@3399d8dbae8b05185e815e02361ede2949cd99c4 # v2.4.0
|
|
||||||
# if: always() && github.ref != 'refs/heads/main' && (steps.validate.outcome == 'success' || steps.validate.outcome == 'failure')
|
|
||||||
# with:
|
|
||||||
# token: ${{ github.token }}
|
|
||||||
# planfile: .planfile
|
|
||||||
# working-directory: "infra/terraform"
|
|
||||||
# skip-comment: true
|
|
||||||
|
|
||||||
- name: Terraform Apply
|
|
||||||
id: apply
|
|
||||||
# if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
|
||||||
run: terraform apply .planfile
|
|
||||||
working-directory: "infra/terraform"
|
|
||||||
|
|
||||||
5
.github/workflows/test.yml
vendored
@@ -14,11 +14,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
with:
|
with:
|
||||||
egress-policy: audit
|
egress-policy: audit
|
||||||
|
|
||||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: ./.github/actions/dangerous-git-checkout
|
- uses: ./.github/actions/dangerous-git-checkout
|
||||||
|
|
||||||
- name: Setup Node.js 20.x
|
- name: Setup Node.js 20.x
|
||||||
@@ -41,6 +41,7 @@ jobs:
|
|||||||
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
sed -i "s/ENCRYPTION_KEY=.*/ENCRYPTION_KEY=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/CRON_SECRET=.*/CRON_SECRET=${RANDOM_KEY}/" .env
|
||||||
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
sed -i "s/NEXTAUTH_SECRET=.*/NEXTAUTH_SECRET=${RANDOM_KEY}/" .env
|
||||||
|
sed -i "s|REDIS_URL=.*|REDIS_URL=|" .env
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: pnpm test
|
run: pnpm test
|
||||||
|
|||||||
51
.github/workflows/tolgee-missing-key-check.yml
vendored
@@ -1,51 +0,0 @@
|
|||||||
name: Check Missing Translations
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-missing-translations:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.ref }}
|
|
||||||
|
|
||||||
- name: Checkout PR
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Install Tolgee CLI
|
|
||||||
run: npm install -g @tolgee/cli
|
|
||||||
|
|
||||||
- name: Compare Tolgee Keys
|
|
||||||
id: compare
|
|
||||||
run: |
|
|
||||||
tolgee compare --api-key ${{ secrets.TOLGEE_API_KEY }} > compare_output.txt
|
|
||||||
cat compare_output.txt
|
|
||||||
|
|
||||||
- name: Check for Missing Translations
|
|
||||||
run: |
|
|
||||||
if grep -q "new key found" compare_output.txt; then
|
|
||||||
echo "New keys found that may require translations:"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "No new keys found."
|
|
||||||
fi
|
|
||||||
87
.github/workflows/tolgee.yml
vendored
@@ -1,87 +0,0 @@
|
|||||||
name: Tolgee Tagging on PR Merge
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [closed]
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
tag-production-keys:
|
|
||||||
name: Tag Production Keys
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.pull_request.merged == true
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # This ensures we get the full git history
|
|
||||||
|
|
||||||
- name: Get source branch name
|
|
||||||
id: branch-name
|
|
||||||
run: |
|
|
||||||
RAW_BRANCH="${{ github.head_ref }}"
|
|
||||||
SOURCE_BRANCH=$(echo "$RAW_BRANCH" | sed 's/[^a-zA-Z0-9._\/-]//g')
|
|
||||||
|
|
||||||
|
|
||||||
# Safely add to environment variables using GitHub's recommended method
|
|
||||||
# This prevents environment variable injection attacks
|
|
||||||
echo "SOURCE_BRANCH<<EOF" >> $GITHUB_ENV
|
|
||||||
echo "$SOURCE_BRANCH" >> $GITHUB_ENV
|
|
||||||
echo "EOF" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
echo "Detected source branch: $SOURCE_BRANCH"
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
|
||||||
with:
|
|
||||||
node-version: 18 # Ensure compatibility with your project
|
|
||||||
|
|
||||||
- name: Install Tolgee CLI
|
|
||||||
run: npm install -g @tolgee/cli
|
|
||||||
|
|
||||||
- name: Tag Production Keys
|
|
||||||
run: |
|
|
||||||
npx tolgee tag \
|
|
||||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
|
||||||
--filter-extracted \
|
|
||||||
--filter-tag "draft:${SOURCE_BRANCH}" \
|
|
||||||
--tag production \
|
|
||||||
--untag "draft:${SOURCE_BRANCH}"
|
|
||||||
|
|
||||||
- name: Tag unused production keys as Deprecated
|
|
||||||
run: |
|
|
||||||
npx tolgee tag \
|
|
||||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
|
||||||
--filter-not-extracted --filter-tag production \
|
|
||||||
--tag deprecated --untag production
|
|
||||||
|
|
||||||
- name: Tag unused draft:current-branch keys as Deprecated
|
|
||||||
run: |
|
|
||||||
npx tolgee tag \
|
|
||||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
|
||||||
--filter-not-extracted --filter-tag "draft:${SOURCE_BRANCH}" \
|
|
||||||
--tag deprecated --untag "draft:${SOURCE_BRANCH}"
|
|
||||||
|
|
||||||
- name: Sync with backup
|
|
||||||
run: |
|
|
||||||
npx tolgee sync \
|
|
||||||
--api-key ${{ secrets.TOLGEE_API_KEY }} \
|
|
||||||
--backup ./tolgee-backup \
|
|
||||||
--continue-on-warning \
|
|
||||||
--yes
|
|
||||||
|
|
||||||
- name: Upload backup as artifact
|
|
||||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
|
||||||
with:
|
|
||||||
name: tolgee-backup-${{ github.sha }}
|
|
||||||
path: ./tolgee-backup
|
|
||||||
retention-days: 90
|
|
||||||
63
.github/workflows/translation-check.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: Translation Validation
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
paths:
|
||||||
|
- "apps/web/**/*.ts"
|
||||||
|
- "apps/web/**/*.tsx"
|
||||||
|
- "apps/web/locales/**/*.json"
|
||||||
|
- "scan-translations.ts"
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- "apps/web/**/*.ts"
|
||||||
|
- "apps/web/**/*.tsx"
|
||||||
|
- "apps/web/locales/**/*.json"
|
||||||
|
- "scan-translations.ts"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-translations:
|
||||||
|
name: Validate Translation Keys
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Harden the runner (Audit all outbound calls)
|
||||||
|
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
|
||||||
|
- name: Setup pnpm
|
||||||
|
uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
|
||||||
|
with:
|
||||||
|
version: 9.15.9
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Validate translation keys
|
||||||
|
run: |
|
||||||
|
echo ""
|
||||||
|
echo "🔍 Validating translation keys..."
|
||||||
|
echo ""
|
||||||
|
pnpm run scan-translations
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
if: success()
|
||||||
|
run: |
|
||||||
|
echo ""
|
||||||
|
echo "✅ Translation validation completed successfully!"
|
||||||
|
echo ""
|
||||||
32
.github/workflows/welcome-new-contributors.yml
vendored
@@ -1,32 +0,0 @@
|
|||||||
name: "Welcome new contributors"
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: opened
|
|
||||||
pull_request_target:
|
|
||||||
types: opened
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
welcome-message:
|
|
||||||
name: Welcoming New Users
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 10
|
|
||||||
if: github.event.action == 'opened'
|
|
||||||
steps:
|
|
||||||
- name: Harden the runner (Audit all outbound calls)
|
|
||||||
uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
|
|
||||||
with:
|
|
||||||
egress-policy: audit
|
|
||||||
|
|
||||||
- uses: actions/first-interaction@3c71ce730280171fd1cfb57c00c774f8998586f7 # v1
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
pr-message: |-
|
|
||||||
Thank you so much for making your first Pull Request and taking the time to improve Formbricks! 🚀🙏❤️
|
|
||||||
Feel free to join the conversation on [Github Discussions](https://github.com/formbricks/formbricks/discussions) if you need any help or have any questions. 😊
|
|
||||||
issue-message: |
|
|
||||||
Thank you for opening your first issue! 🙏❤️ One of our team members will review it and get back to you as soon as it possible. 😊
|
|
||||||
16
.gitignore
vendored
@@ -56,19 +56,9 @@ packages/database/migrations
|
|||||||
branch.json
|
branch.json
|
||||||
.vercel
|
.vercel
|
||||||
|
|
||||||
# Terraform
|
|
||||||
infra/terraform/.terraform/
|
|
||||||
**/.terraform.lock.hcl
|
|
||||||
**/terraform.tfstate
|
|
||||||
**/terraform.tfstate.*
|
|
||||||
**/crash.log
|
|
||||||
**/override.tf
|
|
||||||
**/override.tf.json
|
|
||||||
**/*.tfvars
|
|
||||||
**/*.tfvars.json
|
|
||||||
**/.terraformrc
|
|
||||||
**/terraform.rc
|
|
||||||
|
|
||||||
# IntelliJ IDEA
|
# IntelliJ IDEA
|
||||||
/.idea/
|
/.idea/
|
||||||
/*.iml
|
/*.iml
|
||||||
|
packages/ios/FormbricksSDK/FormbricksSDK.xcodeproj/project.xcworkspace/xcuserdata
|
||||||
|
.cursorrules
|
||||||
|
i18n.cache
|
||||||
|
|||||||
@@ -10,12 +10,34 @@ fi
|
|||||||
|
|
||||||
pnpm lint-staged
|
pnpm lint-staged
|
||||||
|
|
||||||
# Run tolgee-pull if branch.json exists and NEXT_PUBLIC_TOLGEE_API_KEY is not set
|
# Run Lingo.dev i18n workflow if LINGODOTDEV_API_KEY is set
|
||||||
if [ -f branch.json ]; then
|
if [ -n "$LINGODOTDEV_API_KEY" ]; then
|
||||||
if [ -z "$NEXT_PUBLIC_TOLGEE_API_KEY" ]; then
|
echo ""
|
||||||
echo "Skipping tolgee-pull: NEXT_PUBLIC_TOLGEE_API_KEY is not set"
|
echo "🌍 Running Lingo.dev translation workflow..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Run translation generation and validation
|
||||||
|
if pnpm run i18n; then
|
||||||
|
echo ""
|
||||||
|
echo "✅ Translation validation passed"
|
||||||
|
echo ""
|
||||||
|
# Add updated locale files to git
|
||||||
|
git add apps/web/locales/*.json
|
||||||
else
|
else
|
||||||
pnpm run tolgee-pull
|
echo ""
|
||||||
git add packages/lib/messages
|
echo "❌ Translation validation failed!"
|
||||||
|
echo ""
|
||||||
|
echo "Please fix the translation issues above before committing:"
|
||||||
|
echo " • Add missing translation keys to your locale files"
|
||||||
|
echo " • Remove unused translation keys"
|
||||||
|
echo ""
|
||||||
|
echo "Or run 'pnpm i18n' to see the detailed report"
|
||||||
|
echo ""
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
echo "⚠️ Skipping translation validation: LINGODOTDEV_API_KEY is not set"
|
||||||
|
echo " (This is expected for community contributors)"
|
||||||
|
echo ""
|
||||||
fi
|
fi
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://docs.tolgee.io/cli-schema.json",
|
|
||||||
"format": "JSON_TOLGEE",
|
|
||||||
"patterns": ["./apps/web/**/*.ts?(x)"],
|
|
||||||
"projectId": 10304,
|
|
||||||
"pull": {
|
|
||||||
"path": "./packages/lib/messages"
|
|
||||||
},
|
|
||||||
"push": {
|
|
||||||
"files": [
|
|
||||||
{
|
|
||||||
"language": "en-US",
|
|
||||||
"path": "./packages/lib/messages/en-US.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "de-DE",
|
|
||||||
"path": "./packages/lib/messages/de-DE.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "fr-FR",
|
|
||||||
"path": "./packages/lib/messages/fr-FR.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "pt-BR",
|
|
||||||
"path": "./packages/lib/messages/pt-BR.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "zh-Hant-TW",
|
|
||||||
"path": "./packages/lib/messages/zh-Hant-TW.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "pt-PT",
|
|
||||||
"path": "./packages/lib/messages/pt-PT.json"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"forceMode": "OVERRIDE"
|
|
||||||
},
|
|
||||||
"strictNamespace": false
|
|
||||||
}
|
|
||||||
14
.vscode/settings.json
vendored
@@ -1,4 +1,16 @@
|
|||||||
{
|
{
|
||||||
|
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
|
||||||
|
"eslint.workingDirectories": [
|
||||||
|
{
|
||||||
|
"mode": "auto"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"javascript.updateImportsOnFileMove.enabled": "always",
|
||||||
|
"sonarlint.connectedMode.project": {
|
||||||
|
"connectionId": "formbricks",
|
||||||
|
"projectKey": "formbricks_formbricks"
|
||||||
|
},
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.tsdk": "node_modules/typescript/lib"
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
|
"typescript.updateImportsOnFileMove.enabled": "always"
|
||||||
}
|
}
|
||||||
|
|||||||
28
AGENTS.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Repository Guidelines
|
||||||
|
|
||||||
|
## Project Structure & Module Organization
|
||||||
|
|
||||||
|
Formbricks runs as a pnpm/turbo monorepo. `apps/web` is the Next.js product surface, with feature modules under `app/` and `modules/`, assets in `public/` and `images/`, and Playwright specs in `apps/web/playwright/`. `apps/storybook` renders reusable UI pieces for review. Shared logic lives in `packages/*`: `database` (Prisma schemas/migrations), `surveys`, `js-core`, `types`, plus linting and TypeScript presets (`config-*`). Deployment collateral is kept in `docs/`, `docker/`, and `helm-chart/`. Unit tests sit next to their source as `*.test.ts` or inside `__tests__`.
|
||||||
|
|
||||||
|
## Build, Test & Development Commands
|
||||||
|
|
||||||
|
- `pnpm install` — install workspace dependencies pinned by `pnpm-lock.yaml`.
|
||||||
|
- `pnpm db:up` / `pnpm db:down` — start/stop the Docker services backing the app.
|
||||||
|
- `pnpm dev` — run all app and worker dev servers in parallel via Turborepo.
|
||||||
|
- `pnpm build` — generate production builds for every package and app.
|
||||||
|
- `pnpm lint` — apply the shared ESLint rules across the workspace.
|
||||||
|
- `pnpm test` / `pnpm test:coverage` — execute Vitest suites with optional coverage.
|
||||||
|
- `pnpm test:e2e` — launch the Playwright browser regression suite.
|
||||||
|
- `pnpm db:migrate:dev` — apply Prisma migrations against the dev database.
|
||||||
|
|
||||||
|
## Coding Style & Naming Conventions
|
||||||
|
|
||||||
|
TypeScript, React, and Prisma are the primary languages. Use the shared ESLint presets (`@formbricks/eslint-config`) and Prettier preset (110-char width, semicolons, double quotes, sorted import groups). Two-space indentation is standard; prefer `PascalCase` for React components and folders under `modules/`, `camelCase` for functions/variables, and `SCREAMING_SNAKE_CASE` only for constants. When adding mocks, place them inside `__mocks__` so import ordering stays stable.
|
||||||
|
|
||||||
|
## Testing Guidelines
|
||||||
|
|
||||||
|
Prefer Vitest with Testing Library for logic in `.ts` files, keeping specs colocated with the code they exercise (`utility.test.ts`). Do not write tests for `.tsx` files—React components are covered by Playwright E2E tests instead. Mock network and storage boundaries through helpers from `@formbricks/*`. Run `pnpm test` before opening a PR and `pnpm test:coverage` when touching critical flows; keep coverage from regressing. End-to-end scenarios belong in `apps/web/playwright`, using descriptive filenames (`billing.spec.ts`) and tagging slow suites with `@slow` when necessary.
|
||||||
|
|
||||||
|
## Commit & Pull Request Guidelines
|
||||||
|
|
||||||
|
Commits follow a lightweight Conventional Commit format (`fix:`, `chore:`, `feat:`) and usually append the PR number, e.g. `fix: update OpenAPI schema (#6617)`. Keep commits scoped and lint-clean. Pull requests should outline the problem, summarize the solution, and link to issues or product specs. Attach screenshots or gifs for UI-facing work, list any migrations or env changes, and paste the output of relevant commands (`pnpm test`, `pnpm lint`, `pnpm db:migrate:dev`) so reviewers can verify readiness.
|
||||||
@@ -14,17 +14,7 @@ Are you brimming with brilliant ideas? For new features that can elevate Formbri
|
|||||||
|
|
||||||
## 🛠 Crafting Pull Requests
|
## 🛠 Crafting Pull Requests
|
||||||
|
|
||||||
Ready to dive into the code and make a real impact? Here's your path:
|
For the time being, we don't have the capacity to properly facilitate community contributions. It's a lot of engineering attention often spent on issues which don't follow our prioritization, so we've decided to only facilitate community code contributions in rare exceptions in the coming months.
|
||||||
|
|
||||||
1. **Read our Best Practices**: [It takes 5 minutes](https://formbricks.com/docs/developer-docs/contributing/get-started) but will help you save hours 🤓
|
|
||||||
|
|
||||||
1. **Fork the Repository:** Fork our repository or use [Gitpod](https://gitpod.io) or use [Github Codespaces](https://github.com/features/codespaces) to get started instantly.
|
|
||||||
|
|
||||||
1. **Tweak and Transform:** Work your coding magic and apply your changes.
|
|
||||||
|
|
||||||
1. **Pull Request Act:** If you're ready to go, craft a new pull request closely following our PR template 🙏
|
|
||||||
|
|
||||||
Would you prefer a chat before you dive into a lot of work? [Github Discussions](https://github.com/formbricks/formbricks/discussions) is your harbor. Share your thoughts, and we'll meet you there with open arms. We're responsive and friendly, promise!
|
|
||||||
|
|
||||||
## 🚀 Aspiring Features
|
## 🚀 Aspiring Features
|
||||||
|
|
||||||
|
|||||||
2
LICENSE
@@ -3,7 +3,7 @@ Copyright (c) 2024 Formbricks GmbH
|
|||||||
Portions of this software are licensed as follows:
|
Portions of this software are licensed as follows:
|
||||||
|
|
||||||
- All content that resides under the "apps/web/modules/ee" directory of this repository, if these directories exist, is licensed under the license defined in "apps/web/modules/ee/LICENSE".
|
- All content that resides under the "apps/web/modules/ee" directory of this repository, if these directories exist, is licensed under the license defined in "apps/web/modules/ee/LICENSE".
|
||||||
- All content that resides under the "packages/js/", "packages/react-native/", "packages/android/", "packages/ios/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
- All content that resides under the "packages/js/", "packages/android/", "packages/ios/" and "packages/api/" directories of this repository, if that directories exist, is licensed under the "MIT" license as defined in the "LICENSE" files of these packages.
|
||||||
- All third party components incorporated into the Formbricks Software are licensed under the original license provided by the owner of the applicable component.
|
- All third party components incorporated into the Formbricks Software are licensed under the original license provided by the owner of the applicable component.
|
||||||
- Content outside of the above mentioned directories or restrictions above is available under the "AGPLv3" license as defined below.
|
- Content outside of the above mentioned directories or restrictions above is available under the "AGPLv3" license as defined below.
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ The Open Source Qualtrics Alternative
|
|||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
<a href="https://github.com/formbricks/formbricks/blob/main/LICENSE"><img src="https://img.shields.io/badge/License-AGPL-purple" alt="License"></a> <a href="https://github.com/formbricks/formbricks/stargazers"><img src="https://img.shields.io/github/stars/formbricks/formbricks?logo=github" alt="Github Stars"></a>
|
||||||
|
<a href="https://insights.linuxfoundation.org/project/formbricks"><img src="https://insights.linuxfoundation.org/api/badge/health-score?project=formbricks"></a>
|
||||||
<a href="https://news.ycombinator.com/item?id=32303986"><img src="https://img.shields.io/badge/Hacker%20News-122-%23FF6600" alt="Hacker News"></a>
|
<a href="https://news.ycombinator.com/item?id=32303986"><img src="https://img.shields.io/badge/Hacker%20News-122-%23FF6600" alt="Hacker News"></a>
|
||||||
<a href="[https://www.producthunt.com/products/formbricks](https://www.producthunt.com/posts/formbricks)"><img src="https://img.shields.io/badge/Product%20Hunt-455-orange?logo=producthunt&logoColor=%23fff" alt="Product Hunt"></a>
|
<a href="[https://www.producthunt.com/products/formbricks](https://www.producthunt.com/posts/formbricks)"><img src="https://img.shields.io/badge/Product%20Hunt-455-orange?logo=producthunt&logoColor=%23fff" alt="Product Hunt"></a>
|
||||||
<a href="https://github.blog/2023-04-12-github-accelerator-our-first-cohort-and-whats-next/"><img src="https://img.shields.io/badge/2023-blue?logo=github&label=Github%20Accelerator" alt="Github Accelerator"></a>
|
<a href="https://github.blog/2023-04-12-github-accelerator-our-first-cohort-and-whats-next/"><img src="https://img.shields.io/badge/2023-blue?logo=github&label=Github%20Accelerator" alt="Github Accelerator"></a>
|
||||||
@@ -192,7 +193,7 @@ Here are a few options:
|
|||||||
|
|
||||||
- Upvote issues with 👍 reaction so we know what the demand for a particular issue is to prioritize it within the roadmap.
|
- Upvote issues with 👍 reaction so we know what the demand for a particular issue is to prioritize it within the roadmap.
|
||||||
|
|
||||||
Please check out [our contribution guide](https://formbricks.com/docs/developer-docs/contributing/get-started) and our [list of open issues](https://github.com/formbricks/formbricks/issues) for more information.
|
- Note: For the time being, we can only facilitate code contributions as an exception.
|
||||||
|
|
||||||
## All Thanks To Our Contributors
|
## All Thanks To Our Contributors
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
EXPO_PUBLIC_APP_URL=http://192.168.0.197:3000
|
|
||||||
EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=cm5p0cs7r000819182b32j0a1
|
|
||||||
35
apps/demo-react-native/.gitignore
vendored
@@ -1,35 +0,0 @@
|
|||||||
# Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files
|
|
||||||
|
|
||||||
# dependencies
|
|
||||||
node_modules/
|
|
||||||
|
|
||||||
# Expo
|
|
||||||
.expo/
|
|
||||||
dist/
|
|
||||||
web-build/
|
|
||||||
|
|
||||||
# Native
|
|
||||||
*.orig.*
|
|
||||||
*.jks
|
|
||||||
*.p8
|
|
||||||
*.p12
|
|
||||||
*.key
|
|
||||||
*.mobileprovision
|
|
||||||
|
|
||||||
# Metro
|
|
||||||
.metro-health-check*
|
|
||||||
|
|
||||||
# debug
|
|
||||||
npm-debug.*
|
|
||||||
yarn-debug.*
|
|
||||||
yarn-error.*
|
|
||||||
|
|
||||||
# macOS
|
|
||||||
.DS_Store
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
# local env files
|
|
||||||
.env*.local
|
|
||||||
|
|
||||||
# typescript
|
|
||||||
*.tsbuildinfo
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
{
|
|
||||||
"expo": {
|
|
||||||
"android": {
|
|
||||||
"adaptiveIcon": {
|
|
||||||
"backgroundColor": "#ffffff",
|
|
||||||
"foregroundImage": "./assets/adaptive-icon.png"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"assetBundlePatterns": ["**/*"],
|
|
||||||
"icon": "./assets/icon.png",
|
|
||||||
"ios": {
|
|
||||||
"infoPlist": {
|
|
||||||
"NSCameraUsageDescription": "Take pictures for certain activities.",
|
|
||||||
"NSMicrophoneUsageDescription": "Need microphone access for recording videos.",
|
|
||||||
"NSPhotoLibraryUsageDescription": "Select pictures for certain activities."
|
|
||||||
},
|
|
||||||
"supportsTablet": true
|
|
||||||
},
|
|
||||||
"jsEngine": "hermes",
|
|
||||||
"name": "react-native-demo",
|
|
||||||
"newArchEnabled": true,
|
|
||||||
"orientation": "portrait",
|
|
||||||
"slug": "react-native-demo",
|
|
||||||
"splash": {
|
|
||||||
"backgroundColor": "#ffffff",
|
|
||||||
"image": "./assets/splash.png",
|
|
||||||
"resizeMode": "contain"
|
|
||||||
},
|
|
||||||
"userInterfaceStyle": "light",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"web": {
|
|
||||||
"favicon": "./assets/favicon.png"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
Before Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 46 KiB |
@@ -1,6 +0,0 @@
|
|||||||
module.exports = function babel(api) {
|
|
||||||
api.cache(true);
|
|
||||||
return {
|
|
||||||
presets: ["babel-preset-expo"],
|
|
||||||
};
|
|
||||||
};
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
import { registerRootComponent } from "expo";
|
|
||||||
import { LogBox } from "react-native";
|
|
||||||
import App from "./src/app";
|
|
||||||
|
|
||||||
registerRootComponent(App);
|
|
||||||
|
|
||||||
LogBox.ignoreAllLogs();
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
// Learn more https://docs.expo.io/guides/customizing-metro
|
|
||||||
const path = require("node:path");
|
|
||||||
const { getDefaultConfig } = require("expo/metro-config");
|
|
||||||
|
|
||||||
// Find the workspace root, this can be replaced with `find-yarn-workspace-root`
|
|
||||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
|
||||||
const projectRoot = __dirname;
|
|
||||||
|
|
||||||
const config = getDefaultConfig(projectRoot);
|
|
||||||
|
|
||||||
// 1. Watch all files within the monorepo
|
|
||||||
config.watchFolders = [workspaceRoot];
|
|
||||||
// 2. Let Metro know where to resolve packages, and in what order
|
|
||||||
config.resolver.nodeModulesPaths = [
|
|
||||||
path.resolve(projectRoot, "node_modules"),
|
|
||||||
path.resolve(workspaceRoot, "node_modules"),
|
|
||||||
];
|
|
||||||
// 3. Force Metro to resolve (sub)dependencies only from the `nodeModulesPaths`
|
|
||||||
config.resolver.disableHierarchicalLookup = true;
|
|
||||||
|
|
||||||
module.exports = config;
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@formbricks/demo-react-native",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"main": "./index.js",
|
|
||||||
"scripts": {
|
|
||||||
"dev": "expo start",
|
|
||||||
"android": "expo start --android",
|
|
||||||
"ios": "expo start --ios",
|
|
||||||
"web": "expo start --web",
|
|
||||||
"eject": "expo eject",
|
|
||||||
"clean": "rimraf .turbo node_modules .expo"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@formbricks/js": "workspace:*",
|
|
||||||
"@formbricks/react-native": "workspace:*",
|
|
||||||
"@react-native-async-storage/async-storage": "2.1.0",
|
|
||||||
"expo": "52.0.28",
|
|
||||||
"expo-status-bar": "2.0.1",
|
|
||||||
"react": "18.3.1",
|
|
||||||
"react-dom": "18.3.1",
|
|
||||||
"react-native": "0.76.6",
|
|
||||||
"react-native-webview": "13.12.5"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@babel/core": "7.26.0",
|
|
||||||
"@types/react": "18.3.18",
|
|
||||||
"typescript": "5.7.2"
|
|
||||||
},
|
|
||||||
"private": true
|
|
||||||
}
|
|
||||||
@@ -1,117 +0,0 @@
|
|||||||
import { StatusBar } from "expo-status-bar";
|
|
||||||
import React, { type JSX } from "react";
|
|
||||||
import { Button, LogBox, StyleSheet, Text, View } from "react-native";
|
|
||||||
import Formbricks, {
|
|
||||||
logout,
|
|
||||||
setAttribute,
|
|
||||||
setAttributes,
|
|
||||||
setLanguage,
|
|
||||||
setUserId,
|
|
||||||
track,
|
|
||||||
} from "@formbricks/react-native";
|
|
||||||
|
|
||||||
LogBox.ignoreAllLogs();
|
|
||||||
|
|
||||||
export default function App(): JSX.Element {
|
|
||||||
if (!process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID) {
|
|
||||||
throw new Error("EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID is required");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!process.env.EXPO_PUBLIC_APP_URL) {
|
|
||||||
throw new Error("EXPO_PUBLIC_APP_URL is required");
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
<Text>Formbricks React Native SDK Demo</Text>
|
|
||||||
|
|
||||||
<View
|
|
||||||
style={{
|
|
||||||
display: "flex",
|
|
||||||
flexDirection: "column",
|
|
||||||
gap: 10,
|
|
||||||
}}>
|
|
||||||
<Button
|
|
||||||
title="Trigger Code Action"
|
|
||||||
onPress={() => {
|
|
||||||
track("code").catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error tracking event:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
title="Set User Id"
|
|
||||||
onPress={() => {
|
|
||||||
setUserId("random-user-id").catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error setting user id:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
title="Set User Attributess (multiple)"
|
|
||||||
onPress={() => {
|
|
||||||
setAttributes({
|
|
||||||
testAttr: "attr-test",
|
|
||||||
testAttr2: "attr-test-2",
|
|
||||||
testAttr3: "attr-test-3",
|
|
||||||
testAttr4: "attr-test-4",
|
|
||||||
}).catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error setting user attributes:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
title="Set User Attributes (single)"
|
|
||||||
onPress={() => {
|
|
||||||
setAttribute("testSingleAttr", "testSingleAttr").catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error setting user attributes:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
title="Logout"
|
|
||||||
onPress={() => {
|
|
||||||
logout().catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error logging out:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
title="Set Language (de)"
|
|
||||||
onPress={() => {
|
|
||||||
setLanguage("de").catch((error: unknown) => {
|
|
||||||
// eslint-disable-next-line no-console -- logging is allowed in demo apps
|
|
||||||
console.error("Error setting language:", error);
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</View>
|
|
||||||
|
|
||||||
<StatusBar style="auto" />
|
|
||||||
|
|
||||||
<Formbricks
|
|
||||||
appUrl={process.env.EXPO_PUBLIC_APP_URL as string}
|
|
||||||
environmentId={process.env.EXPO_PUBLIC_FORMBRICKS_ENVIRONMENT_ID as string}
|
|
||||||
/>
|
|
||||||
</View>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
backgroundColor: "#fff",
|
|
||||||
alignItems: "center",
|
|
||||||
justifyContent: "center",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"extends": "expo/tsconfig.base"
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
NEXT_PUBLIC_FORMBRICKS_API_HOST=http://localhost:3000
|
|
||||||
NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID=YOUR_ENVIRONMENT_ID
|
|
||||||
|
|
||||||
# Copy the environment ID for the URL of your Formbricks App and
|
|
||||||
# paste it above to connect your Formbricks App with the Demo App.
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
extends: ["@formbricks/eslint-config/next.js"],
|
|
||||||
parserOptions: {
|
|
||||||
project: "tsconfig.json",
|
|
||||||
tsconfigRootDir: __dirname,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
36
apps/demo/.gitignore
vendored
@@ -1,36 +0,0 @@
|
|||||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
|
||||||
|
|
||||||
# dependencies
|
|
||||||
/node_modules
|
|
||||||
/.pnp
|
|
||||||
.pnp.js
|
|
||||||
|
|
||||||
# testing
|
|
||||||
/coverage
|
|
||||||
|
|
||||||
# next.js
|
|
||||||
/.next/
|
|
||||||
/out/
|
|
||||||
|
|
||||||
# production
|
|
||||||
/build
|
|
||||||
|
|
||||||
# misc
|
|
||||||
.DS_Store
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
# debug
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
.pnpm-debug.log*
|
|
||||||
|
|
||||||
# local env files
|
|
||||||
.env*.local
|
|
||||||
|
|
||||||
# vercel
|
|
||||||
.vercel
|
|
||||||
|
|
||||||
# typescript
|
|
||||||
*.tsbuildinfo
|
|
||||||
next-env.d.ts
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import { Sidebar } from "./sidebar";
|
|
||||||
|
|
||||||
export function LayoutApp({ children }: { children: React.ReactNode }): React.JSX.Element {
|
|
||||||
return (
|
|
||||||
<div className="min-h-full">
|
|
||||||
{/* Static sidebar for desktop */}
|
|
||||||
<div className="hidden lg:fixed lg:inset-y-0 lg:flex lg:w-64 lg:flex-col">
|
|
||||||
<Sidebar />
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-1 flex-col lg:pl-64">{children}</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
import {
|
|
||||||
ClockIcon,
|
|
||||||
CogIcon,
|
|
||||||
CreditCardIcon,
|
|
||||||
FileBarChartIcon,
|
|
||||||
HelpCircleIcon,
|
|
||||||
HomeIcon,
|
|
||||||
ScaleIcon,
|
|
||||||
ShieldCheckIcon,
|
|
||||||
UsersIcon,
|
|
||||||
} from "lucide-react";
|
|
||||||
import { classNames } from "../lib/utils";
|
|
||||||
|
|
||||||
const navigation = [
|
|
||||||
{ name: "Home", href: "#", icon: HomeIcon, current: true },
|
|
||||||
{ name: "History", href: "#", icon: ClockIcon, current: false },
|
|
||||||
{ name: "Balances", href: "#", icon: ScaleIcon, current: false },
|
|
||||||
{ name: "Cards", href: "#", icon: CreditCardIcon, current: false },
|
|
||||||
{ name: "Recipients", href: "#", icon: UsersIcon, current: false },
|
|
||||||
{ name: "Reports", href: "#", icon: FileBarChartIcon, current: false },
|
|
||||||
];
|
|
||||||
const secondaryNavigation = [
|
|
||||||
{ name: "Settings", href: "#", icon: CogIcon },
|
|
||||||
{ name: "Help", href: "#", icon: HelpCircleIcon },
|
|
||||||
{ name: "Privacy", href: "#", icon: ShieldCheckIcon },
|
|
||||||
];
|
|
||||||
|
|
||||||
export function Sidebar(): React.JSX.Element {
|
|
||||||
return (
|
|
||||||
<div className="flex flex-grow flex-col overflow-y-auto bg-cyan-700 pb-4 pt-5">
|
|
||||||
<nav
|
|
||||||
className="mt-5 flex flex-1 flex-col divide-y divide-cyan-800 overflow-y-auto"
|
|
||||||
aria-label="Sidebar">
|
|
||||||
<div className="space-y-1 px-2">
|
|
||||||
{navigation.map((item) => (
|
|
||||||
<a
|
|
||||||
key={item.name}
|
|
||||||
href={item.href}
|
|
||||||
className={classNames(
|
|
||||||
item.current ? "bg-cyan-800 text-white" : "text-cyan-100 hover:bg-cyan-600 hover:text-white",
|
|
||||||
"group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6"
|
|
||||||
)}
|
|
||||||
aria-current={item.current ? "page" : undefined}>
|
|
||||||
<item.icon className="mr-4 h-6 w-6 flex-shrink-0 text-cyan-200" aria-hidden="true" />
|
|
||||||
{item.name}
|
|
||||||
</a>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
<div className="mt-6 pt-6">
|
|
||||||
<div className="space-y-1 px-2">
|
|
||||||
{secondaryNavigation.map((item) => (
|
|
||||||
<a
|
|
||||||
key={item.name}
|
|
||||||
href={item.href}
|
|
||||||
className="group flex items-center rounded-md px-2 py-2 text-sm font-medium leading-6 text-cyan-100 hover:bg-cyan-600 hover:text-white">
|
|
||||||
<item.icon className="mr-4 h-6 w-6 text-cyan-200" aria-hidden="true" />
|
|
||||||
{item.name}
|
|
||||||
</a>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
@tailwind base;
|
|
||||||
@tailwind components;
|
|
||||||
@tailwind utilities;
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
export function classNames(...classes: string[]): string {
|
|
||||||
return classes.filter(Boolean).join(" ");
|
|
||||||
}
|
|
||||||
5
apps/demo/next-env.d.ts
vendored
@@ -1,5 +0,0 @@
|
|||||||
/// <reference types="next" />
|
|
||||||
/// <reference types="next/image-types/global" />
|
|
||||||
|
|
||||||
// NOTE: This file should not be edited
|
|
||||||
// see https://nextjs.org/docs/pages/api-reference/config/typescript for more information.
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
/** @type {import('next').NextConfig} */
|
|
||||||
const nextConfig = {
|
|
||||||
images: {
|
|
||||||
remotePatterns: [
|
|
||||||
{
|
|
||||||
protocol: "https",
|
|
||||||
hostname: "tailwindui.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
protocol: "https",
|
|
||||||
hostname: "images.unsplash.com",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
export default nextConfig;
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@formbricks/demo",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"private": true,
|
|
||||||
"scripts": {
|
|
||||||
"clean": "rimraf .turbo node_modules .next",
|
|
||||||
"dev": "next dev -p 3002 --turbopack",
|
|
||||||
"go": "next dev -p 3002 --turbopack",
|
|
||||||
"build": "next build",
|
|
||||||
"start": "next start",
|
|
||||||
"lint": "next lint"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@formbricks/js": "workspace:*",
|
|
||||||
"lucide-react": "0.468.0",
|
|
||||||
"next": "15.2.3",
|
|
||||||
"react": "19.0.0",
|
|
||||||
"react-dom": "19.0.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@formbricks/config-typescript": "workspace:*",
|
|
||||||
"@formbricks/eslint-config": "workspace:*"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
import type { AppProps } from "next/app";
|
|
||||||
import Head from "next/head";
|
|
||||||
import "../globals.css";
|
|
||||||
|
|
||||||
export default function App({ Component, pageProps }: AppProps): React.JSX.Element {
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<Head>
|
|
||||||
<title>Demo App</title>
|
|
||||||
</Head>
|
|
||||||
{(!process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID ||
|
|
||||||
!process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) && (
|
|
||||||
<div className="w-full bg-red-500 p-3 text-center text-sm text-white">
|
|
||||||
Please set Formbricks environment variables in apps/demo/.env
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<Component {...pageProps} />
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import { Head, Html, Main, NextScript } from "next/document";
|
|
||||||
|
|
||||||
export default function Document(): React.JSX.Element {
|
|
||||||
return (
|
|
||||||
<Html lang="en" className="h-full bg-slate-50">
|
|
||||||
<Head />
|
|
||||||
<body className="h-full">
|
|
||||||
<Main />
|
|
||||||
<NextScript />
|
|
||||||
</body>
|
|
||||||
</Html>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,359 +0,0 @@
|
|||||||
import Image from "next/image";
|
|
||||||
import { useRouter } from "next/router";
|
|
||||||
import { useEffect, useState } from "react";
|
|
||||||
import formbricks from "@formbricks/js";
|
|
||||||
import fbsetup from "../public/fb-setup.png";
|
|
||||||
|
|
||||||
declare const window: Window;
|
|
||||||
|
|
||||||
export default function AppPage(): React.JSX.Element {
|
|
||||||
const [darkMode, setDarkMode] = useState(false);
|
|
||||||
const router = useRouter();
|
|
||||||
const userId = "THIS-IS-A-VERY-LONG-USER-ID-FOR-TESTING";
|
|
||||||
const userAttributes = {
|
|
||||||
"Attribute 1": "one",
|
|
||||||
"Attribute 2": "two",
|
|
||||||
"Attribute 3": "three",
|
|
||||||
};
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (darkMode) {
|
|
||||||
document.body.classList.add("dark");
|
|
||||||
} else {
|
|
||||||
document.body.classList.remove("dark");
|
|
||||||
}
|
|
||||||
}, [darkMode]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const initFormbricks = () => {
|
|
||||||
// enable Formbricks debug mode by adding formbricksDebug=true GET parameter
|
|
||||||
const addFormbricksDebugParam = (): void => {
|
|
||||||
const urlParams = new URLSearchParams(window.location.search);
|
|
||||||
if (!urlParams.has("formbricksDebug")) {
|
|
||||||
urlParams.set("formbricksDebug", "true");
|
|
||||||
const newUrl = `${window.location.pathname}?${urlParams.toString()}`;
|
|
||||||
window.history.replaceState({}, "", newUrl);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
addFormbricksDebugParam();
|
|
||||||
|
|
||||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
|
||||||
void formbricks.setup({
|
|
||||||
environmentId: process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID,
|
|
||||||
appUrl: process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Connect next.js router to Formbricks
|
|
||||||
if (process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID && process.env.NEXT_PUBLIC_FORMBRICKS_API_HOST) {
|
|
||||||
const handleRouteChange = formbricks.registerRouteChange;
|
|
||||||
|
|
||||||
router.events.on("routeChangeComplete", () => {
|
|
||||||
void handleRouteChange();
|
|
||||||
});
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
router.events.off("routeChangeComplete", () => {
|
|
||||||
void handleRouteChange();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
initFormbricks();
|
|
||||||
}, [router.events]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="min-h-screen bg-white px-12 py-6 dark:bg-slate-800">
|
|
||||||
<div className="flex flex-col justify-between md:flex-row">
|
|
||||||
<div className="flex flex-col items-center gap-2 sm:flex-row">
|
|
||||||
<div>
|
|
||||||
<h1 className="text-2xl font-bold text-slate-900 dark:text-white">
|
|
||||||
Formbricks In-product Survey Demo App
|
|
||||||
</h1>
|
|
||||||
<p className="text-slate-700 dark:text-slate-300">
|
|
||||||
This app helps you test your app surveys. You can create and test user actions, create and
|
|
||||||
update user attributes, etc.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="mt-2 rounded-lg bg-slate-200 px-6 py-1 dark:bg-slate-700 dark:text-slate-100"
|
|
||||||
onClick={() => {
|
|
||||||
setDarkMode(!darkMode);
|
|
||||||
}}>
|
|
||||||
{darkMode ? "Toggle Light Mode" : "Toggle Dark Mode"}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="my-4 grid grid-cols-1 gap-6 md:grid-cols-2">
|
|
||||||
<div>
|
|
||||||
<div className="rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
|
||||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">1. Setup .env</h3>
|
|
||||||
<p className="text-slate-700 dark:text-slate-300">
|
|
||||||
Copy the environment ID of your Formbricks app to the env variable in /apps/demo/.env
|
|
||||||
</p>
|
|
||||||
<Image src={fbsetup} alt="fb setup" className="mt-4 rounded" priority />
|
|
||||||
|
|
||||||
<div className="mt-4 flex-col items-start text-sm text-slate-700 sm:flex sm:items-center sm:text-base dark:text-slate-300">
|
|
||||||
<p className="mb-1 sm:mb-0 sm:mr-2">You're connected with env:</p>
|
|
||||||
<div className="flex items-center">
|
|
||||||
<strong className="w-32 truncate sm:w-auto">
|
|
||||||
{process.env.NEXT_PUBLIC_FORMBRICKS_ENVIRONMENT_ID}
|
|
||||||
</strong>
|
|
||||||
<span className="relative ml-2 flex h-3 w-3">
|
|
||||||
<span className="absolute inline-flex h-full w-full animate-ping rounded-full bg-green-500 opacity-75" />
|
|
||||||
<span className="relative inline-flex h-3 w-3 rounded-full bg-green-500" />
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="mt-4 rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
|
||||||
<h3 className="text-lg font-semibold text-slate-900 dark:text-white">2. Widget Logs</h3>
|
|
||||||
<p className="text-slate-700 dark:text-slate-300">
|
|
||||||
Look at the logs to understand how the widget works.{" "}
|
|
||||||
<strong className="dark:text-white">Open your browser console</strong> to see the logs.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="md:grid md:grid-cols-3">
|
|
||||||
<div className="col-span-3 self-start rounded-lg border border-slate-300 bg-slate-100 p-6 dark:border-slate-600 dark:bg-slate-900">
|
|
||||||
<h3 className="text-lg font-semibold dark:text-white">
|
|
||||||
Set a user ID / pull data from Formbricks app
|
|
||||||
</h3>
|
|
||||||
<p className="text-slate-700 dark:text-slate-300">
|
|
||||||
On formbricks.setUserId() the user state will <strong>be fetched from Formbricks</strong> and
|
|
||||||
the local state gets <strong>updated with the user state</strong>.
|
|
||||||
</p>
|
|
||||||
<button
|
|
||||||
className="my-4 rounded-lg bg-slate-500 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600"
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setUserId(userId);
|
|
||||||
}}>
|
|
||||||
Set user ID
|
|
||||||
</button>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
If you made a change in Formbricks app and it does not seem to work, hit 'Reset' and
|
|
||||||
try again.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
No-Code Action
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sends a{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/actions#setting-up-no-code-actions"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500"
|
|
||||||
target="_blank">
|
|
||||||
No Code Action
|
|
||||||
</a>{" "}
|
|
||||||
as long as you created it beforehand in the Formbricks App.{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/actions#setting-up-no-code-actions"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
target="_blank"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
Here are instructions on how to do it.
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setAttribute("Plan", "Free");
|
|
||||||
}}
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
Set Plan to 'Free'
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sets the{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/user-identification#setting-custom-user-attributes"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
attribute
|
|
||||||
</a>{" "}
|
|
||||||
'Plan' to 'Free'. If the attribute does not exist, it creates it.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setAttribute("Plan", "Paid");
|
|
||||||
}}
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
Set Plan to 'Paid'
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sets the{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/user-identification#setting-custom-user-attributes"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
attribute
|
|
||||||
</a>{" "}
|
|
||||||
'Plan' to 'Paid'. If the attribute does not exist, it creates it.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setEmail("test@web.com");
|
|
||||||
}}
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
Set Email
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sets the{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/user-identification"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
user email
|
|
||||||
</a>{" "}
|
|
||||||
'test@web.com'
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setAttributes(userAttributes);
|
|
||||||
}}
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
Set Multiple Attributes
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sets the{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/user-identification#setting-custom-user-attributes"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
user attributes
|
|
||||||
</a>{" "}
|
|
||||||
to 'one', 'two', 'three'.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.setLanguage("de");
|
|
||||||
}}
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600">
|
|
||||||
Set Language to 'de'
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sets the{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/general-features/multi-language-surveys"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
language
|
|
||||||
</a>{" "}
|
|
||||||
to 'de'.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.track("code");
|
|
||||||
}}>
|
|
||||||
Code Action
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button sends a{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/actions#setting-up-code-actions"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
className="underline dark:text-blue-500"
|
|
||||||
target="_blank">
|
|
||||||
Code Action
|
|
||||||
</a>{" "}
|
|
||||||
as long as you created it beforehand in the Formbricks App.{" "}
|
|
||||||
<a
|
|
||||||
href="https://formbricks.com/docs/xm-and-surveys/surveys/website-app-surveys/actions#setting-up-code-actions"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
target="_blank"
|
|
||||||
className="underline dark:text-blue-500">
|
|
||||||
Here are instructions on how to do it.
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-6">
|
|
||||||
<div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="mb-4 rounded-lg bg-slate-800 px-6 py-3 text-white hover:bg-slate-700 dark:bg-slate-700 dark:hover:bg-slate-600"
|
|
||||||
onClick={() => {
|
|
||||||
void formbricks.logout();
|
|
||||||
}}>
|
|
||||||
Logout
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p className="text-xs text-slate-700 dark:text-slate-300">
|
|
||||||
This button logs out the user and syncs the local state with Formbricks. (Only works if a
|
|
||||||
userId is set)
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
plugins: {
|
|
||||||
tailwindcss: {},
|
|
||||||
autoprefixer: {},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 6.2 KiB |
@@ -1 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>
|
|
||||||
|
Before Width: | Height: | Size: 1.3 KiB |
@@ -1 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" width="40" height="31" fill="none"><g opacity=".9"><path fill="url(#a)" d="M13 .4v29.3H7V6.3h-.2L0 10.5V5L7.2.4H13Z"/><path fill="url(#b)" d="M28.8 30.1c-2.2 0-4-.3-5.7-1-1.7-.8-3-1.8-4-3.1a7.7 7.7 0 0 1-1.4-4.6h6.2c0 .8.3 1.4.7 2 .4.5 1 .9 1.7 1.2.7.3 1.6.4 2.5.4 1 0 1.7-.2 2.5-.5.7-.3 1.3-.8 1.7-1.4.4-.6.6-1.2.6-2s-.2-1.5-.7-2.1c-.4-.6-1-1-1.8-1.4-.8-.4-1.8-.5-2.9-.5h-2.7v-4.6h2.7a6 6 0 0 0 2.5-.5 4 4 0 0 0 1.7-1.3c.4-.6.6-1.3.6-2a3.5 3.5 0 0 0-2-3.3 5.6 5.6 0 0 0-4.5 0 4 4 0 0 0-1.7 1.2c-.4.6-.6 1.2-.6 2h-6c0-1.7.6-3.2 1.5-4.5 1-1.3 2.2-2.3 3.8-3C25 .4 26.8 0 28.8 0s3.8.4 5.3 1.1c1.5.7 2.7 1.7 3.6 3a7.2 7.2 0 0 1 1.2 4.2c0 1.6-.5 3-1.5 4a7 7 0 0 1-4 2.2v.2c2.2.3 3.8 1 5 2.2a6.4 6.4 0 0 1 1.6 4.6c0 1.7-.5 3.1-1.4 4.4a9.7 9.7 0 0 1-4 3.1c-1.7.8-3.7 1.1-5.8 1.1Z"/></g><defs><linearGradient id="a" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient><linearGradient id="b" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient></defs></svg>
|
|
||||||
|
Before Width: | Height: | Size: 1.1 KiB |
@@ -1 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>
|
|
||||||
|
Before Width: | Height: | Size: 629 B |
@@ -1,13 +0,0 @@
|
|||||||
/** @type {import('tailwindcss').Config} */
|
|
||||||
module.exports = {
|
|
||||||
content: [
|
|
||||||
"./app/**/*.{js,ts,jsx,tsx}",
|
|
||||||
"./pages/**/*.{js,ts,jsx,tsx}",
|
|
||||||
"./components/**/*.{js,ts,jsx,tsx}",
|
|
||||||
],
|
|
||||||
darkMode: "class",
|
|
||||||
theme: {
|
|
||||||
extend: {},
|
|
||||||
},
|
|
||||||
plugins: [require("@tailwindcss/forms")],
|
|
||||||
};
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"exclude": ["node_modules"],
|
|
||||||
"extends": "@formbricks/config-typescript/nextjs.json",
|
|
||||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"]
|
|
||||||
}
|
|
||||||
@@ -1,27 +1,52 @@
|
|||||||
import type { StorybookConfig } from "@storybook/react-vite";
|
import type { StorybookConfig } from "@storybook/react-vite";
|
||||||
import { dirname, join } from "path";
|
import { createRequire } from "module";
|
||||||
|
import { dirname, join, resolve } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url);
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function is used to resolve the absolute path of a package.
|
* This function is used to resolve the absolute path of a package.
|
||||||
* It is needed in projects that use Yarn PnP or are set up within a monorepo.
|
* It is needed in projects that use Yarn PnP or are set up within a monorepo.
|
||||||
*/
|
*/
|
||||||
const getAbsolutePath = (value: string) => {
|
function getAbsolutePath(value: string): any {
|
||||||
return dirname(require.resolve(join(value, "package.json")));
|
return dirname(require.resolve(join(value, "package.json")));
|
||||||
};
|
}
|
||||||
|
|
||||||
const config: StorybookConfig = {
|
const config: StorybookConfig = {
|
||||||
stories: ["../src/**/*.mdx", "../../web/modules/ui/**/stories.@(js|jsx|mjs|ts|tsx)"],
|
stories: ["../src/**/*.mdx", "../../../packages/survey-ui/src/**/*.stories.@(js|jsx|mjs|ts|tsx)"],
|
||||||
addons: [
|
addons: [
|
||||||
getAbsolutePath("@storybook/addon-onboarding"),
|
getAbsolutePath("@storybook/addon-onboarding"),
|
||||||
getAbsolutePath("@storybook/addon-links"),
|
getAbsolutePath("@storybook/addon-links"),
|
||||||
getAbsolutePath("@storybook/addon-essentials"),
|
|
||||||
getAbsolutePath("@chromatic-com/storybook"),
|
getAbsolutePath("@chromatic-com/storybook"),
|
||||||
getAbsolutePath("@storybook/addon-interactions"),
|
|
||||||
getAbsolutePath("@storybook/addon-a11y"),
|
getAbsolutePath("@storybook/addon-a11y"),
|
||||||
|
getAbsolutePath("@storybook/addon-docs"),
|
||||||
],
|
],
|
||||||
framework: {
|
framework: {
|
||||||
name: getAbsolutePath("@storybook/react-vite"),
|
name: getAbsolutePath("@storybook/react-vite"),
|
||||||
options: {},
|
options: {},
|
||||||
},
|
},
|
||||||
|
async viteFinal(config) {
|
||||||
|
const surveyUiPath = resolve(__dirname, "../../../packages/survey-ui/src");
|
||||||
|
const rootPath = resolve(__dirname, "../../../");
|
||||||
|
|
||||||
|
// Configure server to allow files from outside the storybook directory
|
||||||
|
config.server = config.server || {};
|
||||||
|
config.server.fs = {
|
||||||
|
...config.server.fs,
|
||||||
|
allow: [...(config.server.fs?.allow || []), rootPath],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Configure simple alias resolution
|
||||||
|
config.resolve = config.resolve || {};
|
||||||
|
config.resolve.alias = {
|
||||||
|
...config.resolve.alias,
|
||||||
|
"@": surveyUiPath,
|
||||||
|
};
|
||||||
|
|
||||||
|
return config;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
export default config;
|
export default config;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import type { Preview } from "@storybook/react";
|
import type { Preview } from "@storybook/react-vite";
|
||||||
import "../../web/modules/ui/globals.css";
|
import React from "react";
|
||||||
|
import "../../../packages/survey-ui/src/styles/globals.css";
|
||||||
|
|
||||||
const preview: Preview = {
|
const preview: Preview = {
|
||||||
parameters: {
|
parameters: {
|
||||||
@@ -8,8 +9,23 @@ const preview: Preview = {
|
|||||||
color: /(background|color)$/i,
|
color: /(background|color)$/i,
|
||||||
date: /Date$/i,
|
date: /Date$/i,
|
||||||
},
|
},
|
||||||
|
expanded: true,
|
||||||
|
},
|
||||||
|
backgrounds: {
|
||||||
|
default: "light",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
decorators: [
|
||||||
|
(Story) =>
|
||||||
|
React.createElement(
|
||||||
|
"div",
|
||||||
|
{
|
||||||
|
id: "fbjs",
|
||||||
|
className: "w-full h-full min-h-screen p-4 bg-background font-sans antialiased text-foreground",
|
||||||
|
},
|
||||||
|
React.createElement(Story)
|
||||||
|
),
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
export default preview;
|
export default preview;
|
||||||
|
|||||||
@@ -11,30 +11,24 @@
|
|||||||
"clean": "rimraf .turbo node_modules dist storybook-static"
|
"clean": "rimraf .turbo node_modules dist storybook-static"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"eslint-plugin-react-refresh": "0.4.16",
|
"@formbricks/survey-ui": "workspace:*",
|
||||||
"react": "19.0.0",
|
"eslint-plugin-react-refresh": "0.4.24"
|
||||||
"react-dom": "19.0.0"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@chromatic-com/storybook": "3.2.2",
|
"@chromatic-com/storybook": "^4.1.3",
|
||||||
"@formbricks/config-typescript": "workspace:*",
|
"@storybook/addon-a11y": "10.0.8",
|
||||||
"@storybook/addon-a11y": "8.4.7",
|
"@storybook/addon-links": "10.0.8",
|
||||||
"@storybook/addon-essentials": "8.4.7",
|
"@storybook/addon-onboarding": "10.0.8",
|
||||||
"@storybook/addon-interactions": "8.4.7",
|
"@storybook/react-vite": "10.0.8",
|
||||||
"@storybook/addon-links": "8.4.7",
|
"@typescript-eslint/eslint-plugin": "8.48.0",
|
||||||
"@storybook/addon-onboarding": "8.4.7",
|
"@tailwindcss/vite": "4.1.17",
|
||||||
"@storybook/blocks": "8.4.7",
|
"@typescript-eslint/parser": "8.48.0",
|
||||||
"@storybook/react": "8.4.7",
|
"@vitejs/plugin-react": "5.1.1",
|
||||||
"@storybook/react-vite": "8.4.7",
|
"esbuild": "0.27.0",
|
||||||
"@storybook/test": "8.4.7",
|
"eslint-plugin-storybook": "10.0.8",
|
||||||
"@typescript-eslint/eslint-plugin": "8.18.0",
|
|
||||||
"@typescript-eslint/parser": "8.18.0",
|
|
||||||
"@vitejs/plugin-react": "4.3.4",
|
|
||||||
"esbuild": "0.25.1",
|
|
||||||
"eslint-plugin-storybook": "0.11.1",
|
|
||||||
"prop-types": "15.8.1",
|
"prop-types": "15.8.1",
|
||||||
"storybook": "8.4.7",
|
"storybook": "10.0.8",
|
||||||
"tsup": "8.3.5",
|
"vite": "7.2.4",
|
||||||
"vite": "6.0.12"
|
"@storybook/addon-docs": "10.0.8"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||