Compare commits

...

242 Commits

Author SHA1 Message Date
Pujit Mehrotra
d0db8a098d fix(web): reset infinite scroll when notification filters change 2024-11-20 09:38:33 -05:00
Pujit Mehrotra
94dfe85716 feat(web): group notifications list by importance 2024-11-20 09:15:47 -05:00
Pujit Mehrotra
eeb3289ae8 fix(web): infinite scroll loop when there's only 1 page of notifications 2024-11-19 14:22:08 -05:00
Pujit Mehrotra
939d7a304d feat(web): add an 'all' option to notification filter
allows users to "reset" after selecting a filter. ideally, we'd be able to
clear the filter if it was clicked again, but I couldn't find a way to listen
to a second/repeat click on a SelectItem, so I added a new filter item instead.
2024-11-19 14:22:08 -05:00
Pujit Mehrotra
acccb3694c chore(web): add testing step to github actions 2024-11-19 13:21:41 -05:00
Pujit Mehrotra
2724485989 test(web): Markdown sanitization & extensibility
fix(web): replaces dompurify with isomorphic-dompurify to enable server-side usage with same syntax
2024-11-19 13:21:41 -05:00
Pujit Mehrotra
2f4ff21986 feat(web): use Markdown helper class to interact with markdown 2024-11-19 13:21:41 -05:00
Pujit Mehrotra
83e00c640a fix(web): sanitize changelog markup after parsing 2024-11-19 13:21:41 -05:00
Pujit Mehrotra
abcaa5aedb feat(web): support markdown in notification messages 2024-11-19 13:21:41 -05:00
Pujit Mehrotra
4c663dc69c feat(web): add confirmation before archiving or deleting all notifications 2024-11-18 14:44:20 -05:00
Pujit Mehrotra
89eb841b20 feat(web): add delete all notifications button to archive view in notifications sidebar 2024-11-18 14:44:20 -05:00
Pujit Mehrotra
7296195495 feat(web): add link to settings in notification sidebar 2024-11-18 14:44:20 -05:00
Pujit Mehrotra
696b55de6c refactor(web): use optional chaining for graphql errors
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-11-14 15:24:20 -05:00
Pujit Mehrotra
aa5fad39f3 refactor(web): improve incoming notifications var name in infinite scroll loader 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
9c38fa6a9c fix(api): exclude duplicates from legacy script in archive retrieval 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
da5d1132d1 chore(web): remove noisy console log in apollo disable link 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
001be86181 fix(web): infinite trigger at bottom of infinite scroll 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
ecfc797e7d fix(web): stop opening notification sidebar to archive tab 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
dffbfc2dab fix(web): env var typo 2024-11-14 15:24:20 -05:00
Pujit Mehrotra
e5f029830b chore: add import organizer to prettier config (#959)
* chore(web): add import organizer plugin to prettier config

* chore(api): add import organizer plugin to prettier config

* chore(api): sort imports in notifications resolver & service

as a demonstration

* chore(web): sort imports in notifications indicator

as a demonstration
2024-11-12 09:46:29 -05:00
Pujit Mehrotra
1a33e6343a chore(web): add prettier config & tailwind class sorting (#955) 2024-11-08 13:17:10 -05:00
Eli Bosley
69441d890e feat: myservers_fb keepalive location 2024-11-08 10:17:45 -05:00
Eli Bosley
46c82ecae3 feat: upgrade dependencies 2024-11-08 10:17:45 -05:00
Pujit Mehrotra
0b469f5b3f feat(web): enhance notifications indicator in UPC (#950)
* feat(web): scaffold ui for notifications indicator

* refactor(web): poll for notification overview instead of subscription

* test: rm failing notifications.resolver test stub

* feat(web): pulse indicator when new notifications are received
2024-11-07 14:36:30 -05:00
Eli Bosley
3fc41480a2 fix: cwd on ecosystem.config.json 2024-11-06 15:29:11 -05:00
Eli Bosley
e27776df3d fix: use cwd when running application 2024-11-06 15:27:02 -05:00
Eli Bosley
abd8e09908 fix: forced restarting on commands 2024-11-06 15:21:08 -05:00
Eli Bosley
504283f227 fix: attempt to restore upgradepkg if install failed 2024-11-06 15:11:51 -05:00
Eli Bosley
ff7e09e15c Revert "fix: delete upgradepkg"
This reverts commit dc1c4fb6ec.
2024-11-06 15:03:21 -05:00
Eli Bosley
deb42f6a81 fix: delete upgradepkg 2024-11-06 15:03:21 -05:00
Eli Bosley
95d018ea05 Update plugin/source/dynamix.unraid.net/pkg_build.sh
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-11-06 15:03:21 -05:00
Eli Bosley
106b2e42c0 feat: nodejs issues with version 2 2024-11-06 15:03:21 -05:00
Eli Bosley
1c5ff58d2d Update plugin/plugins/dynamix.unraid.net.plg
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-11-06 15:03:21 -05:00
Eli Bosley
d7bab9f443 fix: used TGZ instead of TXZ for nghttp3 2024-11-06 15:03:21 -05:00
Eli Bosley
902c76c759 fix: capitalize name 2024-11-06 15:03:21 -05:00
Eli Bosley
5e50f24d70 fix: node_txz naming 2024-11-06 15:03:21 -05:00
Eli Bosley
4f0210d16a feat: nghttp3 sha256 missing 2024-11-06 15:03:21 -05:00
Eli Bosley
ddb8772692 feat: add validation step to ensure that variables are set 2024-11-06 15:03:21 -05:00
Eli Bosley
787f8b9bf5 fix: proper file replacements 2024-11-06 15:03:21 -05:00
Eli Bosley
61ba324ca0 fix: variables passed properly 2024-11-06 15:03:21 -05:00
Eli Bosley
a230a33df5 fix: pull node version directly from nvmrc 2024-11-06 15:03:21 -05:00
Eli Bosley
84b234c9cf feat: upload files directly to cloudflare 2024-11-06 15:03:21 -05:00
Eli Bosley
9bfc04c2a5 fix: dnserr on new line 2024-11-06 15:03:21 -05:00
Eli Bosley
e84430471d fix: add error check to nodejs 2024-11-06 15:03:21 -05:00
Eli Bosley
2d60045784 fix: pkg_build 2024-11-06 15:03:21 -05:00
Eli Bosley
e9137f2553 Update plugin/source/dynamix.unraid.net/pkg_build.sh
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-11-06 15:03:21 -05:00
Eli Bosley
dbe0dd5dfb Update plugin/source/dynamix.unraid.net/pkg_build.sh
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-11-06 15:03:21 -05:00
Eli Bosley
9d2796f2c9 feat: track node version in slackware 2024-11-06 15:03:21 -05:00
Eli Bosley
972a19be04 fix: better logging when error 2024-11-05 16:11:21 -05:00
Eli Bosley
c8da8fe314 fix: remove uneeded env variable 2024-11-05 16:11:21 -05:00
Eli Bosley
353132b67a feat: code review changes 2024-11-05 16:11:21 -05:00
Eli Bosley
88b7cbfe95 feat: actually exit on stop and start 2024-11-05 16:11:21 -05:00
Eli Bosley
3ed1d10c98 fix: properly restart the API when installed 2024-11-05 16:11:21 -05:00
Eli Bosley
62693cfcc0 feat: cleanup unused variables 2024-11-05 16:11:21 -05:00
Eli Bosley
810708f775 fix: use unraid binary path to call unraid commands 2024-11-05 16:11:21 -05:00
Eli Bosley
08f6d6df65 fix: no vite-node in non-dev mode 2024-11-05 16:11:21 -05:00
Eli Bosley
da673c3f2b feat: exit after running status 2024-11-05 16:11:21 -05:00
Eli Bosley
cb463bfdd0 feat: add ecosystem.config.json to files 2024-11-05 16:11:21 -05:00
Eli Bosley
7177171b75 feat: vite dev mode 2024-11-05 16:11:21 -05:00
Eli Bosley
9f0ab7fa38 feat: start command path 2024-11-05 16:11:21 -05:00
Eli Bosley
a32374a3ac feat: attempt to fix pm2 2024-11-05 16:11:21 -05:00
Eli Bosley
cb6534d9d9 feat: pm2 fixes 2024-11-05 16:11:21 -05:00
Eli Bosley
2eaf175515 fix: load PM2 from node_modules 2024-11-05 16:11:21 -05:00
Eli Bosley
50376a0d66 fix: make cli.js executable 2024-11-05 16:11:21 -05:00
Eli Bosley
4b2007b689 fix: plugin download route and add env node to cli script 2024-11-05 16:11:21 -05:00
Eli Bosley
72fcaca4f3 feat: process env fixed and copy gql files 2024-11-05 16:11:21 -05:00
Eli Bosley
2f48ddf942 feat: fix more imports 2024-11-05 16:11:21 -05:00
Eli Bosley
62dfa6c83a fix: invalid type 2024-11-05 16:11:21 -05:00
Eli Bosley
27bb375460 feat: vite 2024-11-05 16:11:21 -05:00
Eli Bosley
cc4d5bdefb feat: substantial docs updates 2024-11-05 16:11:21 -05:00
Eli Bosley
f55302c130 feat: add web gitignore 2024-11-05 16:11:21 -05:00
Eli Bosley
b8dbe3f9d9 fix: execa upgrade snapshots fixed 2024-11-05 16:11:21 -05:00
Eli Bosley
20771f61a8 feat: fix header strategy 2024-11-05 16:11:21 -05:00
Eli Bosley
b9b8bbe871 fix: unit test failure 2024-11-05 16:11:21 -05:00
Eli Bosley
b8e61007e3 fix: changelog parser 2024-11-05 16:11:21 -05:00
Eli Bosley
49536032df fix: handle special chars better 2024-11-05 16:11:21 -05:00
Eli Bosley
9229cf3df6 fix: version and EOF key 2024-11-05 16:11:21 -05:00
Eli Bosley
58665a4e98 fix: trigger loading correctly 2024-11-05 16:11:21 -05:00
Eli Bosley
885d1537b6 feat: fix issues with permissions and invalid modules 2024-11-05 16:11:21 -05:00
Eli Bosley
198cfe5015 fix: unraid-api in usr/bin 2024-11-05 16:11:21 -05:00
Eli Bosley
42189dd451 fix: detection script path bin instead of sbin 2024-11-05 16:11:21 -05:00
Eli Bosley
6122b3c001 feat: comment URL for plugin on PR 2024-11-05 16:11:21 -05:00
Eli Bosley
cda7368d3d fix: connect plugin location 2024-11-05 16:11:21 -05:00
Eli Bosley
447cecd19d feat: fix missing import in ESM 2024-11-05 16:11:21 -05:00
Eli Bosley
7321bd0088 fix: add ecosystem config 2024-11-05 16:11:21 -05:00
Eli Bosley
67e898efe1 fix: missing ip-regex module 2024-11-05 16:11:21 -05:00
Eli Bosley
41e5de83a2 feat: remove many unneded simple libraries 2024-11-05 16:11:21 -05:00
Eli Bosley
5c020a62d6 feat: package scripts 2024-11-05 16:11:21 -05:00
Eli Bosley
1393e967fa fix: production env for web build 2024-11-05 16:11:21 -05:00
Eli Bosley
f07c14354f feat: pack everything in API 2024-11-05 16:11:21 -05:00
Eli Bosley
d42a426244 fix: actually install dependencies 2024-11-05 16:11:21 -05:00
Eli Bosley
125bc29166 feat: also copy in other files 2024-11-05 16:11:21 -05:00
Eli Bosley
a6333bf5a2 fix: more filename fixes and PR var passing 2024-11-05 16:11:21 -05:00
Eli Bosley
e8e985ad6a feat: properly set outputs 2024-11-05 16:11:21 -05:00
Eli Bosley
1a598885cc feat: copy 2024-11-05 16:11:21 -05:00
Eli Bosley
d73f267245 feat: copy node modules 2024-11-05 16:11:21 -05:00
Eli Bosley
7c1873249e feat: more process improvements 2024-11-05 16:11:21 -05:00
Eli Bosley
09f33a0127 fix: don't LS in the release folder 2024-11-05 16:11:21 -05:00
Eli Bosley
db00d7442d feat: diff 2024-11-05 16:11:21 -05:00
Eli Bosley
724159314c fix: PR build missing files 2024-11-05 16:11:21 -05:00
Eli Bosley
180f115b71 feat: plg builder improvements to be more explicit 2024-11-05 16:11:21 -05:00
Eli Bosley
eb38eb219e feat: PR builds 2024-11-05 16:11:21 -05:00
Eli Bosley
3da701a53b fix: local variable assignment 2024-11-05 16:11:21 -05:00
Eli Bosley
6e5b2f1f67 fix: unused import 2024-11-05 16:11:21 -05:00
Eli Bosley
812053d7a4 feat: simplify getting version 2024-11-05 16:11:21 -05:00
Eli Bosley
a929c7e3b3 fix: pass env through to docker 2024-11-05 16:11:21 -05:00
Eli Bosley
c0179c8351 fix: env correct 2024-11-05 16:11:21 -05:00
Eli Bosley
d5c7be54b0 fix: only test when API is changed 2024-11-05 16:11:21 -05:00
Eli Bosley
32478f34c2 fix: variable naming 2024-11-05 16:11:21 -05:00
Eli Bosley
4daa09b340 fix: only test if API was changed 2024-11-05 16:11:21 -05:00
Eli Bosley
346ce91f73 fix: EOF 2024-11-05 16:11:21 -05:00
Eli Bosley
cee3a6d0ef fix: env input 2024-11-05 16:11:21 -05:00
Eli Bosley
e90f606f43 feat: pass env into builder 2024-11-05 16:11:21 -05:00
Eli Bosley
05fa344454 feat: mount git folder to builder 2024-11-05 16:11:21 -05:00
Eli Bosley
406c400bd2 fix: proper directory in rc.unraid-api 2024-11-05 16:11:21 -05:00
Eli Bosley
1ae466899e fix: rm rf to fix build issues 2024-11-05 16:11:21 -05:00
Eli Bosley
5178e131ce fix: docker formatting and build mkdir issues 2024-11-05 16:11:21 -05:00
Eli Bosley
0bd11bce5a feat: don't remove directory, only files 2024-11-05 16:11:21 -05:00
Eli Bosley
fddde33977 fix: remove unused job dependency 2024-11-05 16:11:21 -05:00
Eli Bosley
1f5df845eb feat: build and pack in docker 2024-11-05 16:11:21 -05:00
Eli Bosley
ef54af655e fix: build issues based on removed code 2024-11-05 16:11:21 -05:00
Eli Bosley
bb44862b7b fix: builder cache 2024-11-05 16:11:21 -05:00
Eli Bosley
9709dc82ea feat: swap to action 2024-11-05 16:11:21 -05:00
Eli Bosley
38f0699e19 feat: linting continues on error 2024-11-05 16:11:21 -05:00
Eli Bosley
6ca9f421eb fix: apollo client lint issues 2024-11-05 16:11:21 -05:00
Eli Bosley
935825571b fix: load builder image to cache 2024-11-05 16:11:21 -05:00
Eli Bosley
9beaa78820 feat: move to singular build and test step 2024-11-05 16:11:21 -05:00
Eli Bosley
420c2c1afd feat: buildx build caching 2024-11-05 16:11:21 -05:00
Eli Bosley
7c0cb07b83 feat: only run mainline build 2024-11-05 16:11:21 -05:00
Eli Bosley
c6a7137f19 feat: right workin directory 2024-11-05 16:11:21 -05:00
Eli Bosley
44f9ba0e7f fix: subdependenies 2024-11-05 16:11:21 -05:00
Eli Bosley
1c61e64169 feat: workflow changes 2024-11-05 16:11:21 -05:00
Eli Bosley
cf0eeebd31 feat: remove more unused calls 2024-11-05 16:11:21 -05:00
Eli Bosley
f118597e47 feat: massive rc.unraid-api updates to facilitate installing and linking 2024-11-05 16:11:21 -05:00
Eli Bosley
6f2fcffd3e fix: remove unused imports 2024-11-05 16:11:21 -05:00
Eli Bosley
8f7748404c chore: dependency updates 2024-11-05 16:11:21 -05:00
Eli Bosley
88c2605d4f fix: delete boot script and update nvmrc 2024-11-05 16:11:21 -05:00
Eli Bosley
c2d645612a feat: add exclude to vite.config 2024-11-05 16:11:21 -05:00
Eli Bosley
b20f69c208 feat: pm2 fully working 2024-11-05 16:11:21 -05:00
Eli Bosley
b9cedb70ff fix: logging location 2024-11-05 16:11:21 -05:00
Eli Bosley
a11978aa33 fix: app running 2024-11-05 16:11:21 -05:00
Eli Bosley
b0efcc0d51 feat: pm2 initial setup 2024-11-05 16:11:21 -05:00
Eli Bosley
92b5f2226e fix: eslint config 2024-11-05 16:11:21 -05:00
Eli Bosley
98f2603525 feat: more cleanup 2024-11-05 16:11:21 -05:00
Eli Bosley
cfb1d50c8e feat: remove wtfnode 2024-11-05 16:11:21 -05:00
Eli Bosley
545ccf1938 feat: working 2024-11-05 16:11:21 -05:00
Eli Bosley
0c79995107 feat: almost working 2024-11-05 16:11:21 -05:00
Pujit Mehrotra
9d3397a687 refactor(web): reduce magic in identifying apollo cache item 2024-11-05 09:27:43 -05:00
Pujit Mehrotra
11c160835a feat(web): display error when a notification mutation fails 2024-11-05 09:27:43 -05:00
Pujit Mehrotra
e388b37aa6 refactor(web): simplify naming of notification mutations 2024-11-05 09:27:43 -05:00
Pujit Mehrotra
1da882b807 refactor(web): extract notification object cache prefix to a constant 2024-11-05 09:27:43 -05:00
Pujit Mehrotra
d9d5a24b70 feat(web): delete notifications from archive view 2024-11-05 09:27:43 -05:00
Pujit Mehrotra
24e3cad882 feat(web): make empty notification message clearer 2024-11-01 12:18:20 -04:00
Pujit Mehrotra
323a4a17cf feat(web): add empty state to notifications list 2024-11-01 12:18:20 -04:00
Pujit Mehrotra
9968e0f7df feat(web): implement notification filtering 2024-11-01 12:18:20 -04:00
Pujit Mehrotra
2ccc53630b doc(web): using codegen scripts & graphql fragments 2024-11-01 09:53:46 -04:00
Pujit Mehrotra
d7bb3defc3 doc(web): intro to using graphql 2024-11-01 09:53:46 -04:00
Pujit Mehrotra
ddb8bf8a5c refactor(web): improve signature & readability of mergeAndDedup cache function 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
6234d61ae5 fix(web): dedupe incoming notifications during cache merge 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
a665ee3ec6 fix(web): remove unused infinite-scroll emit from SheetContent 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
7ca3efe8b8 doc(web): possibly ambiguous css & confusing cache policies/types 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
28f4952599 fix(web): replace manual height hack in notifications infinite scroll 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
7e4022518d feat(web): reconcile pagination with notifications apollo cache 2024-10-28 10:00:23 -04:00
Pujit Mehrotra
4d1656eaa8 feat(web): make notifications list scrollable inside the sheet & tabs 2024-10-28 10:00:23 -04:00
Michael Datelle
5b2421cb0c feat: add date formatting helper (#938) 2024-10-25 10:43:38 -04:00
Pujit Mehrotra
0578b066f1 refactor(web): extract notifications list for cleaner state management 2024-10-23 16:19:33 -04:00
Pujit Mehrotra
57fdcf3e60 refactor(api): parameterize max iterations of updateObject util 2024-10-23 16:19:33 -04:00
Pujit Mehrotra
eb7bdb6a85 feat(api): sort notifications file listing by date (latest first) 2024-10-23 16:19:33 -04:00
Pujit Mehrotra
ebd671e7b6 refactor(web): move archiveAll cache invalidation into apollo client config 2024-10-23 16:19:33 -04:00
Pujit Mehrotra
15a1a3ac15 feat(web): update cache & view when archiving notifications 2024-10-23 16:19:33 -04:00
Pujit Mehrotra
9a0c7fe9c8 refactor(Notifications): return modified notification when mutated
updated archiveNotification & unreadNotification mutations to return the
modified notification instead of an overview to improve default caching mechanics
when updating/moving notifications.
2024-10-23 16:19:33 -04:00
Pujit Mehrotra
91bcbc3d6f fix(api): strip server id prefixes from graphql request variables 2024-10-23 16:19:33 -04:00
Eli Bosley
b3d046f4ea feat: actual install url 2024-10-23 09:57:46 -04:00
Eli Bosley
0f13e34562 feat: install nghttp3 2024-10-23 09:37:12 -04:00
Eli Bosley
e18cd87180 fix: add max var 2024-10-22 16:15:23 -04:00
Eli Bosley
421949a9f8 fix: node install process improvements 2024-10-22 16:15:23 -04:00
Eli Bosley
8c7c580f3f fix: no more node_dl_server 2024-10-22 16:15:23 -04:00
Eli Bosley
c616641044 fix: no nodehost 2024-10-22 16:15:23 -04:00
Eli Bosley
fd16243287 fix: unused node dl line 2024-10-22 16:15:23 -04:00
Eli Bosley
7352bbe77a feat: install node 2024-10-22 16:15:23 -04:00
Eli Bosley
4d33908e01 fix: always mangle 2024-10-22 11:16:03 -04:00
Pujit Mehrotra
adabe92f72 refactor(web): move cn utility inside shadcn for modularization 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
958f9e57e1 refactor(api): use a type wrapper around fastify request in cookie.strategy 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
ac5032df83 feat(api): add default dynamix config to dev docker container 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
5f4cc07473 fix(api): load dynamix config in the same way as the webgui
merge defaults + custom config

see original php implementation in the webgui:
[link](95c6913c62/emhttp/plugins/dynamix/include/Wrappers.php (L42))
2024-10-18 11:42:38 -04:00
Pujit Mehrotra
38524bce88 fix(web): add default values to optional vue component props 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
64db2f19a7 fix(web): replace incorrect custom types with codegen from gql & update values to match expected shapes 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
8fe1e80bbd feat(web): add gql archival mutations to notifications sidebar & item 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
1c4506cf50 refactor(web): shadcn styles for consistency 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
84fe7f6df6 refactor(web): rm shadcn border radius overrides for consistency & simplicity 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
5c7e650b3b fix(web): inline shadcn variables into tailwind config to simplify build 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
6cac078e15 refactor(web): instantiate apollo client statically instead of dynamically 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
4e555021a7 fix: type & build errors 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
b1e2f043b1 chore(web): fix lint errors about imports 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
bc69852333 chore(web): omit notifications sidebar from UPC in staging & prod 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
2c79ccc883 fix(NotificationItem): icon & text alignment in header 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
c240fab58a refactor(NotificationItem): use button to view link instead of making the whole notification a clickable target 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
3c50022ac3 fix(NotificationsSidebar): occupy full viewport on small screens 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
9201136cb1 refactor(NotificationsItem): component design & layout 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
ff52f75abf refactor(NotificationsSidebar): de-emphasize archive-all & filter 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
eed40f7875 test(api): update permissions snapshots 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
754d4560ea feat: integrate cross-domain authentication to api 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
f6d09f4ba2 refactor(web): use tabs instead of buttons in NotificationsSidebar header 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
a1f0dac42d chore(web): display NotificationsSidebar in UserProfile component 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
fff935cf02 chore(api): add script to create mock user session in api container 2024-10-18 11:42:38 -04:00
Pujit Mehrotra
0849468fc2 chore(web): restore lockfile to correct state 2024-10-18 11:42:38 -04:00
Zack Spear
6a57924fbf feat: WIP sidebar filter select 2024-10-18 11:42:38 -04:00
mdatelle
57802c2ea0 feat(web): wip query api for notifications 2024-10-18 11:42:38 -04:00
mdatelle
924df0dc9e refactor(api): local dev permissions for notifications 2024-10-18 11:42:38 -04:00
Zack Spear
d04001e052 feat: WIP create teleport composable 2024-10-18 11:42:38 -04:00
Zack Spear
92ec931aff refactor: Update connectPluginInstalled value in serverState.ts 2024-10-18 11:42:38 -04:00
Eli Bosley
30f92374d0 fix: floating-ui fixes 2024-10-18 11:42:38 -04:00
Zack Spear
6bfd221cd1 test: sidebar tabs 2024-10-18 11:42:38 -04:00
Zack Spear
ceb537ae91 refactor: Update NotificationItemProps interface
- Add 'event' and 'date' properties to the NotificationItemProps interface
- Add 'view' property to the NotificationItemProps interface
- Remove trailing newline at the end of the file
2024-10-18 11:42:38 -04:00
Zack Spear
81b197a9aa refactor: Remove duplicate declaration of 'combinations' in terserReservations function 2024-10-18 11:42:38 -04:00
Zack Spear
54b4ad0df8 refactor: Remove extra whitespace in Notifications Sidebar and optimize Terser options in nuxt.config.ts 2024-10-18 11:42:38 -04:00
Zack Spear
e84c3ebe14 feat: WIP notifications w/ shadcn
Currently the build doesn't work in webgui
2024-10-18 11:42:38 -04:00
Zack Spear
81acf1d947 feat: wip Notification UI starter 2024-10-18 11:42:38 -04:00
renovate[bot]
80bfc231e0 chore(deps): update dependency @swc/core to v1.7.36 2024-10-18 10:06:20 -04:00
Zack Spear
b1409684db refactor: conditionally skip removeConsole plugin based on VITE_ALLOW_CONSOLE_LOGS env 2024-10-11 10:58:36 -04:00
Zack Spear
14d9448e4c refactor: build removeConsole conditionally skip via VITE_ALLOW_CONSOLE_LOGS env 2024-10-11 10:58:36 -04:00
Eli Bosley
924fa699eb fix: linter error 2024-10-10 09:42:38 -04:00
Eli Bosley
999a8e39eb fix: remove console logs with vue plugin 2024-10-10 09:41:12 -04:00
Eli Bosley
5a1c85d739 fix: remove unused disableProductionConsoleLogs call 2024-10-09 13:49:57 -04:00
Eli Bosley
ba77ff4a4c feat: remove console log disabler 2024-10-09 13:49:57 -04:00
Pujit Mehrotra
05765495c4 test(NotificationsService): add snapshot test to legacy script execution error 2024-10-09 13:12:15 -04:00
Pujit Mehrotra
f7cccc8c37 test(NotificationsService): add special characters to legacy script test 2024-10-09 13:12:15 -04:00
Pujit Mehrotra
85e0f7993e feat(NotificationsService): use existing notifier script to create notifications when possible 2024-10-09 13:12:15 -04:00
Pujit Mehrotra
d5a424ebe1 refactor(api): directly accept importance level in UnraidLocalNotifier 2024-10-09 13:12:15 -04:00
Pujit Mehrotra
01441961c3 doc(cors): update name of bypass flag 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
836f64d28f test(api): add auth-sessions to paths test snapshot 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
79bb4e585b refactor(CookieService): use paths store to get default sessions directory instead of a literal 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
409e88b727 refactor(cors): use BYPASS_CORS_CHECKS flag to ignore cors failures instead of BYPASS_PERMISSION_CHECKS 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
5034a8981a chore(CookieService): remove unused CookieGuard 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
e61d9f195d fix(CookieService): potential race condition in unit tests 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
b3e213ba04 refactor(CookieService): rename SESSION_COOKIE_OPTIONS to SESSION_COOKIE_CONFIG for clearer semantics 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
a7ea678683 fix(cors): excessive instantiation of CookieService to improve memory overhead 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
791e16ce52 test(CookieService): reading valid & invalid session cookies 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
173da0e65b refactor(CookieService): make cookie prefix & session directory injectable via Nest.js 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
287aabfda7 feat(auth): make cors aware of authenticated sessions 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
d8656cc6b3 fix: replace express cookie parser with fastify's 2024-10-08 15:52:43 -04:00
Pujit Mehrotra
a3500c9bc9 feat(Auth): add cookie guard to check for valid sessions 2024-10-08 15:52:43 -04:00
244 changed files with 19691 additions and 21170 deletions

View File

@@ -1,74 +0,0 @@
name: Lint, Test, and Build Web Components
on:
workflow_dispatch:
jobs:
lint-web:
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Create env file
run: |
touch .env
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
cat .env
- name: Install node
uses: actions/setup-node@v4
with:
cache: "npm"
cache-dependency-path: "web/package-lock.json"
node-version-file: "web/.nvmrc"
- name: Installing node deps
run: npm install
- name: Lint files
run: npm run lint
build-web:
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
needs: [lint-web]
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Create env file
run: |
touch .env
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
cat .env
- name: Install node
uses: actions/setup-node@v4
with:
cache: "npm"
cache-dependency-path: "web/package-lock.json"
node-version-file: "web/.nvmrc"
- name: Installing node deps
run: npm install
- name: Build
run: npm run build
- name: Upload build to Github artifacts
uses: actions/upload-artifact@v4
with:
name: unraid-web
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components

View File

@@ -1,6 +1,7 @@
name: CI - Main (API)
on:
pull_request:
push:
branches:
- main
@@ -13,6 +14,7 @@ concurrency:
jobs:
release-please:
if: startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
permissions:
contents: write
@@ -32,153 +34,66 @@ jobs:
- name: Validate branch and tag
run: exit 0
lint-api:
continue-on-error: true
build-test-api:
name: Build and Test API
runs-on: ubuntu-latest
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Reconfigure git to use HTTP authenti:cation
run: >
git config --global url."https://github.com/".insteadOf
ssh://git@github.com/
- name: Install node
uses: actions/setup-node@v4
with:
node-version-file: "api/.nvmrc"
# - name: Get npm cache directory
# id: npm-cache
# run: echo "::set-output name=dir::$(npm config get cache)"
# - name: Load npm cache
# uses: actions/cache@v3
# with:
# path: ${{ steps.npm-cache.outputs.dir }}
# key: ${{ runner.os }}-npm-cache-${{ hashFiles('**/package-lock.json') }}
- name: Install libvirt-dev
run: sudo apt-get update && sudo apt-get install libvirt-dev
- name: Installing node deps
run: npm install
- name: Lint files
run: npm run lint
test-api:
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Reconfigure git to use HTTP authentication
run: >
git config --global url."https://github.com/".insteadOf
ssh://git@github.com/
- name: Build Docker Compose
run: |
docker network create mothership_default
GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker compose build builder
- name: Run Docker Compose
run: GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker compose run builder npm run coverage
lint-web:
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Create env file
run: |
touch .env
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
cat .env
- name: Install node
uses: actions/setup-node@v4
with:
cache: "npm"
cache-dependency-path: "web/package-lock.json"
node-version-file: "web/.nvmrc"
- name: Installing node deps
run: npm install
- name: Lint files
run: npm run lint
build-api:
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
outputs:
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
API_VERSION: ${{ steps.vars.outputs.API_VERSION }}
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Add SSH deploy key
uses: shimataro/ssh-key-action@v2
- name: Build with Buildx
uses: docker/setup-buildx-action@v3
with:
key: ${{ secrets.UNRAID_BOT_SSH_KEY }}
known_hosts: ${{ secrets.KNOWN_HOSTS }}
- name: Install node
uses: actions/setup-node@v4
install: true
platforms: linux/amd64
- name: Build Builder
uses: docker/build-push-action@v6
with:
node-version-file: "api/.nvmrc"
context: ./api
push: false
tags: builder:latest
cache-from: type=gha,ref=builder:latest
cache-to: type=gha,mode=max,ref=builder:latest
load: true
- name: Lint inside of the docker container
continue-on-error: true
run: |
docker run --rm builder npm run lint
- name: Install libvirt-dev
run: sudo apt-get update && sudo apt-get install libvirt-dev
- name: Installing node deps
run: npm install
- name: Install pkg and node-prune
run: npm i -g pkg && curl -sf https://gobinaries.com/tj/node-prune | sh
# See https://github.com/apollographql/subscriptions-transport-ws/issues/433
- name: Patch subscriptions-transport-ws
run: npm run patch:subscriptions-transport-ws
- name: Build and Pack
- name: Test inside of the docker container
run: |
git fetch --depth=2 origin main
if git diff --name-only --relative=api origin/main HEAD | grep -q '.'; then
docker run --rm builder npm run coverage
else
echo "No changes in /api folder, skipping coverage."
fi
- name: Get Git Short Sha and API version
id: vars
run: |
GIT_SHA=$(git rev-parse --short HEAD)
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
PACKAGE_LOCK_VERSION=$(jq -r '.version' package-lock.json)
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
echo "IS_TAGGED=$IS_TAGGED" >> $GITHUB_OUTPUT
echo "PACKAGE_LOCK_VERSION=$PACKAGE_LOCK_VERSION" >> $GITHUB_OUTPUT
echo "API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")" >> $GITHUB_OUTPUT
- name: Build inside of the docker container
id: build-pack-binary
run: WORKDIR=${{ github.workspace }} && npm run build-pkg
run: |
docker run --rm -v ${{ github.workspace }}/api/deploy/release:/app/deploy/release -e API_VERSION=${{ steps.vars.outputs.API_VERSION }} builder npm run build-and-pack
- name: Set Hashes
id: set-hashes
run: |
API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
echo "::set-output name=API_MD5::${API_MD5}"
echo "::set-output name=API_SHA256::${API_SHA256}"
echo "API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
echo "API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
- name: Upload tgz to Github artifacts
uses: actions/upload-artifact@v4
@@ -187,13 +102,13 @@ jobs:
path: ${{ github.workspace }}/api/deploy/release/*.tgz
build-web:
name: Build Web App
environment:
name: production
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
environment:
name: production
needs: [lint-web]
steps:
- name: Checkout repo
uses: actions/checkout@v4
@@ -217,6 +132,13 @@ jobs:
- name: Installing node deps
run: npm install
- name: Lint files
continue-on-error: true
run: npm run lint
- name: Test
run: npm run test:ci
- name: Build
run: npm run build
@@ -227,7 +149,7 @@ jobs:
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
build-plugin:
needs: [lint-api, lint-web, test-api, build-api, build-web]
needs: [build-test-api, build-web]
defaults:
run:
working-directory: plugin
@@ -244,13 +166,58 @@ jobs:
with:
name: unraid-web
path: ./plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
- name: Download Node.js From Slackbuilds (skipped due to node.js issues)
if: false
id: download-nodejs
run: |
# Get latest node version (based on main_node_version) from slackware
main_node_version=$(sed 's/^v//' ../api/.nvmrc)
base_node_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/l/"
latest_nodejs=$(wget -q -O- "${base_node_url}" | grep -o "nodejs-${main_node_version}\.[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
if [[ -z "${latest_nodejs}" ]]; then
echo "Error: Failed to fetch the latest nodejs version."
exit 1
fi
node_download_url="${base_node_url}${latest_nodejs}"
if ! wget -q "${node_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nodejs}"; then
echo "Error: Failed to download nodejs package."
exit 1
fi
node_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nodejs}" | cut -f 1 -d ' ')
echo "NODEJS_FILENAME=${latest_nodejs}" >> $GITHUB_OUTPUT
echo "NODEJS_SHA256=${node_sha256}" >> $GITHUB_OUTPUT
- name: Download nghttp3
id: download-nghttp3
run: |
# Get latest nghttp3 version
base_nghttp3_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/n/"
latest_nghttp3=$(wget -q -O- "${base_nghttp3_url}" | grep -o "nghttp3-[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
nghttp3_download_url="${base_nghttp3_url}${latest_nghttp3}"
if ! wget -q "${nghttp3_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nghttp3}"; then
echo "Error: Failed to download nghttp3 package."
exit 1
fi
nghttp3_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nghttp3}" | cut -f 1 -d ' ')
echo "NGHTTP3_FILENAME=${latest_nghttp3}" >> $GITHUB_OUTPUT
echo "NGHTTP3_SHA256=${nghttp3_sha256}" >> $GITHUB_OUTPUT
- name: Build Plugin
run: |
cd source/dynamix.unraid.net
export API_VERSION=${{needs.build-api.outputs.API_VERSION}}
export API_MD5=${{needs.build-api.outputs.API_MD5}}
export API_SHA256=${{needs.build-api.outputs.API_SHA256}}
bash ./pkg_build.sh s
export API_VERSION=${{needs.build-test-api.outputs.API_VERSION}}
export API_MD5=${{needs.build-test-api.outputs.API_MD5}}
export API_SHA256=${{needs.build-test-api.outputs.API_SHA256}}
export NGHTTP3_FILENAME=${{ steps.download-nghttp3.outputs.NGHTTP3_FILENAME }}
export NGHTTP3_SHA256=${{ steps.download-nghttp3.outputs.NGHTTP3_SHA256 }}
if [ -z "${API_VERSION}" ] ||
[ -z "${API_MD5}" ] ||
[ -z "${API_SHA256}" ] ||
[ -z "${NGHTTP3_FILENAME}" ] ||
[ -z "${NGHTTP3_SHA256}" ]; then
echo "Error: One or more required variables are not set."
exit 1
fi
bash ./pkg_build.sh s ${{github.event.pull_request.number}}
bash ./pkg_build.sh p
- name: Upload binary txz and plg to Github artifacts
uses: actions/upload-artifact@v4
@@ -262,7 +229,69 @@ jobs:
retention-days: 5
if-no-files-found: error
release-pull-request:
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.base.ref == 'main'
runs-on: ubuntu-latest
needs: [build-plugin]
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Make PR Release Folder
run: mkdir pr-release/
- name: Download unraid-api binary tgz
uses: actions/download-artifact@v4
with:
name: unraid-api
path: pr-release
- name: Download plugin binary tgz
uses: actions/download-artifact@v4
with:
name: connect-files
- name: Write Changelog to Plugin XML
run: |
# Capture the pull request number and latest commit message
pr_number="${{ github.event.pull_request.number }}"
commit_message=$(git log -1 --pretty=%B)
# Clean up newlines, escape special characters, and handle line breaks
notes=$(echo -e "Pull Request Build: ${pr_number}\n${commit_message}" | \
sed ':a;N;$!ba;s/\n/\\n/g' | \
sed -e 's/[&\\/]/\\&/g')
# Replace <CHANGES> tag content in the file
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${notes}\n<\/CHANGES>/g" "plugins/dynamix.unraid.net.staging.plg"
- name: Copy other release files to pr-release
run: |
cp archive/*.txz pr-release/
cp plugins/dynamix.unraid.net.staging.plg pr-release/
- name: Upload to Cloudflare
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
AWS_REGION: "auto"
SOURCE_DIR: pr-release
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
- name: Comment URL
uses: thollander/actions-comment-pull-request@v3
with:
message: |
This plugin has been deployed to Cloudflare R2 and is available for testing.
Download it at this URL: [https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg](https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg)
release-staging:
environment:
name: staging
# Only release if this is a push to the main branch
if: startsWith(github.ref, 'refs/heads/main')
runs-on: ubuntu-latest
@@ -293,9 +322,9 @@ jobs:
removeMarkdown: false
filePath: "./api/CHANGELOG.md"
- name: Run LS in unraid-api folder
- name: Copy Files for Staging Release
run: |
cp archive/dynamix.unraid.net.staging-*.txz staging-release/
cp archive/*.txz staging-release/
cp plugins/dynamix.unraid.net.staging.plg staging-release/
ls -al staging-release
@@ -316,10 +345,9 @@ jobs:
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
AWS_REGION: 'auto'
AWS_REGION: "auto"
SOURCE_DIR: staging-release
DEST_DIR: unraid-api
create-draft-release:
# Only create new draft if this is a version tag
@@ -350,6 +378,6 @@ jobs:
files: |
unraid-api-*.tgz
plugins/dynamix.unraid.net*
archive/dynamix.unraid.net*
archive/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,82 +0,0 @@
name: Pull Request Web
on:
pull_request:
paths:
- 'web/**'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-web
cancel-in-progress: true
jobs:
lint-web:
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Create env file
run: |
touch .env
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
cat .env
- name: Install node
uses: actions/setup-node@v4
with:
cache: "npm"
cache-dependency-path: "web/package-lock.json"
node-version-file: "web/.nvmrc"
- name: Installing node deps
run: npm install
- name: Lint files
run: npm run lint
build-web:
defaults:
run:
working-directory: web
runs-on: ubuntu-latest
environment:
name: production
needs: [lint-web]
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Create env file
run: |
touch .env
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
cat .env
- name: Install node
uses: actions/setup-node@v4
with:
cache: "npm"
cache-dependency-path: "web/package-lock.json"
node-version-file: "web/.nvmrc"
- name: Installing node deps
run: npm install
- name: Build
run: npm run build
- name: Upload build to Github artifacts
uses: actions/upload-artifact@v4
with:
name: unraid-web
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components

View File

@@ -1,183 +0,0 @@
name: Pull Request
on:
pull_request:
paths:
- api/**
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint-api:
services:
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
image: registry:2
ports:
- 5000:5000
continue-on-error: true
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
persist-credentials: true
- uses: docker/setup-buildx-action@v3
with:
# network=host driver-opt needed to push to local registry
driver-opts: network=host
- name: Build and push
uses: docker/build-push-action@v5
with:
context: api
target: builder
push: true
tags: localhost:5000/unraid-api:builder
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Lint
run: |
docker run localhost:5000/unraid-api:builder npm run lint
test-api:
services:
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
image: registry:2
ports:
- 5000:5000
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
persist-credentials: true
- uses: docker/setup-buildx-action@v3
with:
# network=host driver-opt needed to push to local registry
driver-opts: network=host
- name: Build and push
uses: docker/build-push-action@v5
with:
context: api
target: builder
push: true
tags: localhost:5000/unraid-api:builder
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Test
run: |
docker run localhost:5000/unraid-api:builder npm run coverage
build-api:
services:
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
image: registry:2
ports:
- 5000:5000
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
outputs:
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
persist-credentials: true
- uses: docker/setup-buildx-action@v3
with:
# network=host driver-opt needed to push to local registry
driver-opts: network=host
- name: Build and push
uses: docker/build-push-action@v5
with:
context: api
target: builder
push: true
tags: localhost:5000/unraid-api:builder
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Run Build
run: docker run -e GIT_SHA=$(git rev-parse --short HEAD) -e IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match) -v $(pwd)/deploy:/app/deploy/ localhost:5000/unraid-api:builder npm run build-pkg
- name: Set Hashes
id: set-hashes
run: |
API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
echo "::set-output name=API_MD5::${API_MD5}"
echo "::set-output name=API_SHA256::${API_SHA256}"
- name: Upload tgz to Github artifacts
uses: actions/upload-artifact@v4
with:
name: unraid-api
path: ${{ github.workspace }}/api/deploy/release/*.tgz
- name: Parse Changelog
id: changelog
uses: ocavue/changelog-parser-action@v1
with:
removeMarkdown: false
filePath: "./api/CHANGELOG.md"
- name: View release notes
run: |
escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"${{steps.changelog.outputs.latestBody}}")
echo "${escapedNotes}"
build-plugin:
defaults:
run:
working-directory: plugin
runs-on: ubuntu-latest
needs: [lint-api, test-api, build-api]
steps:
- name: Set Timezone
uses: szenius/set-timezone@v1.2
with:
timezoneLinux: "America/Los_Angeles"
- name: Checkout repo
uses: actions/checkout@v4
- name: Build Plugin
run: |
cd source/dynamix.unraid.net
export API_VERSION=${{needs.build-api.outputs.API_VERSION}}
export API_MD5=${{needs.build-api.outputs.API_MD5}}
export API_SHA256=${{needs.build-api.outputs.API_SHA256}}
bash ./pkg_build.sh s
bash ./pkg_build.sh p
- name: Create release notes
run: |
LAST_RELEASE=$(git tag --list --sort=v:refname | tail -1)
echo ${LAST_RELEASE}
RELEASE_NOTES=$(git log "$LAST_RELEASE...HEAD" --pretty=format:"- %s [\`%h\`](http://github.com/$GITHUB_REPOSITORY/commit/%H)" --reverse)
echo "${RELEASE_NOTES}"
# escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"${RELEASE_NOTES}")
# sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${escapedNotes}\n<\/CHANGES>/g" "plugins/dynamix.unraid.net.staging.plg"
- name: Upload binary txz and plg to Github artifacts
uses: actions/upload-artifact@v4
with:
name: connect-files
path: |
${{ github.workspace }}/plugin/archive/*.txz
${{ github.workspace }}/plugin/plugins/*.plg
retention-days: 5
if-no-files-found: error

View File

@@ -1,11 +1,12 @@
PATHS_UNRAID_DATA=./dev/data # Where we store plugin data (e.g. permissions.json)
PATHS_STATES=./dev/states # Where .ini files live (e.g. vars.ini)
PATHS_DYNAMIX_BASE=./dev/dynamix # Dynamix's data directory
PATHS_DYNAMIX_CONFIG_DEFAULT=./dev/dynamix/default.cfg # Dynamix's default config file, which ships with unraid
PATHS_DYNAMIX_CONFIG=./dev/dynamix/dynamix.cfg # Dynamix's config file
PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
PATHS_MACHINE_ID=./dev/data/machine-id
ENVIRONMENT="development"
NODE_ENV="development"
PORT="3001"
@@ -16,3 +17,5 @@ NODE_TLS_REJECT_UNAUTHORIZED=0
BYPASS_PERMISSION_CHECKS=false
BYPASS_CORS_CHECKS=true
CHOKIDAR_USEPOLLING=true
LOG_TRANSPORT=console
LOG_LEVEL=trace

View File

@@ -1,47 +0,0 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
root: true,
plugins: [
'@typescript-eslint/eslint-plugin',
'unused-imports',
'eslint-plugin-unicorn',
],
ignorePatterns: ['src/graphql/generated/**/*.ts', '*.test.ts', 'tsup.config.ts', 'vite.config.ts'],
parser: '@typescript-eslint/parser',
rules: {
'@typescript-eslint/no-redundant-type-constituents': 'off',
'@typescript-eslint/no-unsafe-call': 'off',
'@typescript-eslint/naming-convention': 'off',
'@typescript-eslint/no-unsafe-assignment': 'off',
'@typescript-eslint/no-unsafe-return': 'off',
'@typescript-eslint/ban-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/consistent-type-imports': [
'warn',
{ fixStyle: 'inline-type-imports' },
],
'unicorn/numeric-separators-style': [
'error',
{ number: { minimumDigits: 0, groupLength: 3 } },
],
'import/no-cycle': 'off', // Change this to "error" to find circular imports
'@typescript-eslint/no-use-before-define': ['error'],
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
},
overrides: [
{
files: ['*.ts'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
],
parserOptions: {
project: true,
tsconfigRootDir: __dirname,
},
rules: {
'@typescript-eslint/no-explicit-any': 'off',
},
},
],
};

21
api/.eslintrc.ts Normal file
View File

@@ -0,0 +1,21 @@
import type { Linter } from 'eslint';
import eslint from '@eslint/js';
import tseslint from 'typescript-eslint';
export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.recommended, {
rules: {
'@typescript-eslint/no-redundant-type-constituents': 'off',
'@typescript-eslint/no-unsafe-call': 'off',
'@typescript-eslint/naming-convention': 'off',
'@typescript-eslint/no-unsafe-assignment': 'off',
'@typescript-eslint/no-unsafe-return': 'off',
'@typescript-eslint/ban-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-object-type': 'off',
'no-use-before-define': ['off'],
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
'@typescript-eslint/no-unused-vars': 'off',
'@typescript-eslint/no-unused-expressions': 'off',
},
});

View File

@@ -1 +1 @@
18.19.1
v20

View File

@@ -1,8 +1,38 @@
// prettier.config.js or .prettierrc.js
/**
* @see https://prettier.io/docs/en/configuration.html
* @type {import("prettier").Config}
*/
module.exports = {
trailingComma: "es5",
trailingComma: 'es5',
tabWidth: 4,
semi: true,
singleQuote: true,
printWidth: 105,
plugins: ['@ianvs/prettier-plugin-sort-imports'],
// decorators-legacy lets the import sorter transform files with decorators
importOrderParserPlugins: ['typescript', 'decorators-legacy'],
importOrder: [
/**----------------------
* Nest.js & node.js imports
*------------------------**/
'<TYPES>^@nestjs(/.*)?$',
'^@nestjs(/.*)?$', // matches imports starting with @nestjs
'<TYPES>^(node:)',
'<BUILTIN_MODULES>', // Node.js built-in modules
'',
/**----------------------
* Third party packages
*------------------------**/
'<TYPES>',
'<THIRD_PARTY_MODULES>', // Imports not matched by other special words or groups.
'',
/**----------------------
* Application Code
*------------------------**/
'<TYPES>^@app(/.*)?$', // matches type imports starting with @app
'^@app(/.*)?$',
'',
'<TYPES>^[.]',
'^[.]', // relative imports
],
};

View File

@@ -1,7 +1,7 @@
###########################################################
# Development/Build Image
###########################################################
FROM node:18.19.1-bookworm-slim As development
FROM node:20-bookworm-slim AS development
# Install build tools and dependencies
RUN apt-get update -y && apt-get install -y \
@@ -20,17 +20,10 @@ WORKDIR /app
# Set app env
ENV NODE_ENV=development
# Setup cache for pkg
ENV PKG_CACHE_PATH /app/.pkg-cache
RUN mkdir -p ${PKG_CACHE_PATH}
COPY tsconfig.json tsup.config.ts .eslintrc.cjs .npmrc .env.production .env.staging ./
COPY tsconfig.json .eslintrc.ts .npmrc .env.production .env.staging ./
COPY package.json package-lock.json ./
# Install pkg
RUN npm i -g pkg zx
# Install deps
RUN npm i
@@ -42,6 +35,8 @@ EXPOSE 4000
FROM development AS builder
ENV NODE_ENV=production
COPY . .
CMD ["npm", "run", "build-pkg"]
CMD ["npm", "run", "build-and-pack"]

View File

@@ -2,7 +2,11 @@
## Installation
Install the production plugin via the apps tab (search for "my servers") on Unraid 6.9.2 or later.
Install the production plugin via the apps tab (search for "Unraid Connect")
Manual install can be done with the following routes:
[production](https://stable.dl.unraid.net/unraid-api/dynamix.unraid.net.plg)
[staging](https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.staging.plg)
## CLI
@@ -31,7 +35,7 @@ Options:
--environment production/staging/development Set the working environment.
--log-level ALL/TRACE/DEBUG/INFO/WARN/ERROR/FATAL/MARK/OFF Set the log level.
Copyright © 2022 Lime Technology, Inc.
Copyright © 2024 Lime Technology, Inc.
```
@@ -55,4 +59,4 @@ unraid-api report -vv
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
## License
Copyright 2019-2022 Lime Technology Inc. All rights reserved.
Copyright Lime Technology Inc. All rights reserved.

View File

@@ -1,5 +1,5 @@
[api]
version="3.8.1+d06e215a"
version="3.11.0"
extraOrigins="https://google.com,https://test.com"
[local]
[notifier]

View File

@@ -0,0 +1,80 @@
[confirm]
down="1"
stop="1"
[display]
width=""
font=""
tty="15"
date="%c"
time="%R"
number=".,"
unit="C"
scale="-1"
resize="0"
wwn="0"
total="1"
banner=""
header=""
background=""
tabs="1"
users="Tasks:3"
usage="0"
text="1"
warning="70"
critical="90"
hot="45"
max="55"
hotssd="60"
maxssd="70"
power=""
theme="white"
locale=""
raw=""
rtl=""
headermetacolor=""
headerdescription="yes"
showBannerGradient="yes"
[parity]
mode="0"
hour="0 0"
dotm="1"
month="1"
day="0"
cron=""
write="NOCORRECT"
[notify]
display="0"
life="5"
date="d-m-Y"
time="H:i"
position="top-right"
path="/tmp/notifications"
system="*/1 * * * *"
entity="1"
normal="1"
warning="1"
alert="1"
unraid="1"
plugin="1"
docker_notify="1"
language_notify="1"
report="1"
unraidos=""
version=""
docker_update=""
language_update=""
status=""
[ssmtp]
root=""
RcptTo=""
SetEmailPriority="True"
Subject="Unraid Status: "
server="smtp.gmail.com"
port="465"
UseTLS="YES"
UseSTARTTLS="NO"
UseTLSCert="NO"
TLSCert=""
AuthMethod="login"
AuthUser=""
AuthPass=""

View File

@@ -3,3 +3,4 @@ event=Unraid Parity check
subject=Notice [UNRAID] - Parity check finished (0 errors)
description=Canceled
importance=warning
link=/

View File

@@ -1,11 +1,11 @@
[api]
version="3.8.1+d06e215a"
version="3.11.0"
extraOrigins="https://google.com,https://test.com"
[local]
[notifier]
apikey="unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5"
[remote]
wanaccess="no"
wanaccess="yes"
wanport="8443"
upnpEnabled="no"
apikey="_______________________BIG_API_KEY_HERE_________________________"
@@ -17,8 +17,8 @@ idtoken=""
accesstoken=""
refreshtoken=""
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://10-100-0-1.hash.myunraid.net:4443, https://10-100-0-2.hash.myunraid.net:4443, https://10-123-1-2.hash.myunraid.net:4443, https://221-123-121-112.hash.myunraid.net:4443, https://google.com, https://test.com, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
dynamicRemoteAccessType="STATIC"
dynamicRemoteAccessType="DISABLED"
[upc]
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
[connectionStatus]
minigraph="CONNECTED"
minigraph="ERROR_RETRYING"

View File

@@ -4,11 +4,9 @@ x-volumes: &volumes
volumes:
- ./dev:/app/dev
- ./src:/app/src
- ./patches:/app/patches
- ./package.json:/app/package.json
- ./package-lock.json:/app/package-lock.json
- ./tsconfig.json:/app/tsconfig.json
- ./tsup.config.ts:/app/tsup.config.ts
- ./vite.config.ts:/app/vite.config.ts
- ./dist/:/app/dist/
- ./deploy/:/app/deploy/
@@ -19,10 +17,11 @@ x-volumes: &volumes
- ./.env.staging:/app/.env.staging
- ./.env.test:/app/.env.test
- ./.env.development:/app/.env.development
- ./.pkg-cache:/app/.pkg-cache
- ./codegen.yml:/app/codegen.yml
- ./fix-array-type.cjs:/app/fix-array-type.cjs
- /var/run/docker.sock:/var/run/docker.sock
- ./unraid-api.js:/app/unraid-api.js
- ./ecosystem.config.json:/app/ecosystem.config.json
services:

View File

@@ -2,7 +2,9 @@
## Installation
Install the [production](https://unraid-dl.sfo2.digitaloceanspaces.com/unraid-api/dynamix.unraid.net.plg) or [staging](https://unraid-dl.sfo2.digitaloceanspaces.com/unraid-api/dynamix.unraid.net.staging.plg) plugin on Unraid 6.9.0-rc1 or later (6.9.2 or higher recommended).
Manual install can be done with the following routes:
[production](https://stable.dl.unraid.net/unraid-api/dynamix.unraid.net.plg)
[staging](https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.staging.plg)
## Connecting to the API
@@ -12,25 +14,18 @@ This can be accessed by default via `http://tower.local/graphql`.
See <https://graphql.org/learn/serving-over-http/#http-methods-headers-and-body>
### WS
If you're using the ApolloClient please see <https://github.com/apollographql/subscriptions-transport-ws#full-websocket-transport> otherwise see <https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md>
<br>
<hr>
<br>
## Building in Docker
To get a development environment for testing start by running this docker command:
``docker compose run build-interactive``
`npm run build:docker`
`npm run start:ddev`
which will give you an interactive shell inside of the newly build linux container.
To automatically build the plugin run the command below:
``docker compose run builder``
`npm run build:docker`
The builder command will build the plugin into deploy/release, and the interactive plugin lets you build the plugin or install node modules how you like.
@@ -42,29 +37,13 @@ Log levels can be set when the api starts via `LOG_LEVEL=all/trace/debug/info/wa
Additional detail for the log entry can be added with `LOG_CONTEXT=true` (warning, generates a lot of data).
By default, logs will be sent to syslog. Or you can set `LOG_TRANSPORT=file` to have logs saved in `/var/log/unraid-api/stdout.log`. Or enable debug mode to view logs inline.
By default, logs will be sent to syslog. Or you can set `LOG_TRANSPORT=file` to have logs saved in `/var/log/unraid-api/stdout.log`. Or enable debug mode to view logs inline.
Examples:
* `unraid-api start`
* `LOG_LEVEL=debug unraid-api start --debug`
* `LOG_LEVEL=trace LOG_CONTEXT=true LOG_TRANSPORT=file unraid-api start`
Log levels can be increased without restarting the api by issuing this command:
```
kill -s SIGUSR2 `pidof unraid-api`
```
and decreased via:
```
kill -s SIGUSR1 `pidof unraid-api`
```
<br>
<hr>
<br>
- `unraid-api start`
- `LOG_LEVEL=debug unraid-api start --debug`
- `LOG_LEVEL=trace LOG_CONTEXT=true LOG_TRANSPORT=file unraid-api start`
## Viewing data sent to mothership
@@ -72,46 +51,16 @@ If the environment variable `LOG_MOTHERSHIP_MESSAGES=true` exists, any data the
Examples:
* `LOG_MOTHERSHIP_MESSAGES=true unraid-api start`
* `LOG_MOTHERSHIP_MESSAGES=true LOG_LEVEL=debug unraid-api start --debug`
<br>
- `LOG_MOTHERSHIP_MESSAGES=true unraid-api start`
- `LOG_MOTHERSHIP_MESSAGES=true LOG_LEVEL=debug unraid-api start --debug`
<hr>
<br>
## Debug mode
Debug mode can be enabled with the `-d` or `--debug` flag.
This will enable the graphql playground and prevent the application starting as a daemon. Logs will be shown inline rather than saved to a file.
## Debug Logging
To view debug logs, change the log level when starting the API. Then type unraid-api logs to trail the logs.
Examples:
* `unraid-api start --debug`
* `LOG_LEVEL=debug unraid-api start --debug`
<br>
<hr>
<br>
## Crash API On Demand
The `PLEASE_SEGFAULT_FOR_ME` env var can be to used to make the api crash after 30 seconds:
Examples:
* `PLEASE_SEGFAULT_FOR_ME=true LOG_LEVEL=debug unraid-api start --debug`
* `PLEASE_SEGFAULT_FOR_ME=true unraid-api start`
The crash log will be stored here:
* `/var/log/unraid-api/crash.log`
* `/var/log/unraid-api/crash.json`
`crash.json` just includes the most recent crash, while the reports get appended to `crash.log`.
<br>
<hr>
<br>
- `LOG_LEVEL=debug unraid-api start`
- `unraid-api logs`
## Switching between staging and production environments
@@ -120,10 +69,6 @@ The crash log will be stored here:
3. Start the api: `unraid-api start`
4. Confirm the environment: `unraid-api report`
<br>
<hr>
<br>
## Playground
The playground can be access via `http://tower.local/graphql` while in debug mode.
@@ -131,7 +76,7 @@ To get your API key open a terminal on your server and run `cat /boot/config/plu
```json
{
"x-api-key":"__REPLACE_ME_WITH_API_KEY__"
"x-api-key": "__REPLACE_ME_WITH_API_KEY__"
}
```
@@ -139,9 +84,9 @@ Next add the query you want to run and hit the play icon.
```gql
query welcome {
welcome {
message
}
welcome {
message
}
}
```
@@ -149,39 +94,16 @@ You should get something like this back.
```json
{
"data": {
"welcome": {
"message": "Welcome root to this Unraid 6.10.0 server"
"data": {
"welcome": {
"message": "Welcome root to this Unraid 6.10.0 server"
}
}
}
}
```
Click the "Schema" and "Docs" button on the right side of the playground to learn more.
For exploring the schema visually I'd suggest using [Voyager](https://apis.guru/graphql-voyager/) (click Change Schema -> Introspection, then copy/paste the introspection query into the local Graph Playground, and copy/paste the results back into Voyager).
<br>
<hr>
<br>
## Running this locally
```bash
MOTHERSHIP_RELAY_WS_LINK=ws://localhost:8000 \ # Switch to local copy of mothership
PATHS_UNRAID_DATA=$(pwd)/dev/data \ # Where we store plugin data (e.g. permissions.json)
PATHS_STATES=$(pwd)/dev/states \ # Where .ini files live (e.g. vars.ini)
PATHS_DYNAMIX_BASE=$(pwd)/dev/dynamix \ # Dynamix's data directory
PATHS_DYNAMIX_CONFIG=$(pwd)/dev/dynamix/dynamix.cfg \ # Dynamix's config file
PATHS_MY_SERVERS_CONFIG=$(pwd)/dev/Unraid.net/myservers.cfg \ # My servers config file
PORT=8500 \ # What port unraid-api should start on (e.g. /var/run/unraid-api.sock or 8000)
node dist/cli.js --debug # Enable debug logging
```
<br>
<hr>
<br>
## Create a new release
To create a new version run `npm run release` and then run **ONLY** the `git push` section of the commands it returns.
@@ -189,41 +111,6 @@ To create a new prerelease run `npm run release -- --prerelease alpha`.
Pushing to this repo will cause an automatic "rolling" release to be built which can be accessed via the page for the associated Github action run.
<br>
<hr>
<br>
## Using a custom version (e.g. testing a new release)
1. Install the staging or production plugin (links in the Installation section at the top of this file)
2. Download or build the api tgz file you want
* Download from [the releases page](https://github.com/unraid/api/releases)
* Build it on your local machine (``docker compose run builder``) and copy from the `deploy/release` folder
3. Copy the file to `/boot/config/plugins/dynamix.my.servers/unraid-api.tgz`.
4. Install the new api: `/etc/rc.d/rc.unraid-api (install / _install)`
* `_install` will no start the plugin for you after running, so you can make sure you launch in dev mode
* `install` will start the plugin after install
5. Start the api: `unraid-api start`
6. Confirm the version: `unraid-api report`
## Cloning Secrets from AWS
1. Go here to create security credentials for your user [S3 Creds](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1&skipRegion=true#/security_credentials)
2. Export your AWS secrets OR run `aws configure` to setup your environment
```sh
export AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
export AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
export AWS_DEFAULT_REGION=us-east-1
```
3. Set variables for staging and production to the ARN of the secret you would like to clone:
* `STAGING_SECRET_ID`
* `PRODUCTION_SECRET_ID`
4. Run `scripts/copy-env-from-aws.sh` to pull down the secrets into their respective files
Find the Pull Request you'd like to install, and a link will be present as a comment to install a PR-specific version.

18
api/ecosystem.config.json Normal file
View File

@@ -0,0 +1,18 @@
{
"apps": [
{
"name": "unraid-api",
"script": "npm",
"args": "start",
"cwd": "/usr/local/unraid-api",
"log": "/var/log/unraid-api/unraid-api.log",
"exec_mode": "fork",
"ignore_watch": [
"node_modules",
"src",
".env.*",
"myservers.cfg"
]
}
]
}

17095
api/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,107 +1,86 @@
{
"name": "@unraid/api",
"version": "3.11.0",
"main": "dist/index.js",
"bin": "dist/unraid-api.cjs",
"main": "src/cli/index.ts",
"type": "module",
"repository": "git@github.com:unraid/api.git",
"author": "Alexis Tyler <xo@wvvw.me> (https://wvvw.me/)",
"author": "Lime Technology, Inc. <unraid.net>",
"license": "UNLICENSED",
"engines": {
"node": ">=16.5.0"
},
"pkg": {
"assets": [
"dist/index.cjs",
"node_modules/@vmngr/libvirt/build/Release",
"node_modules/ts-invariant/",
"src/**/*.graphql"
],
"targets": [
"node18-linux-x64"
],
"outputPath": "dist"
},
"scripts": {
"compile": "tsup --config ./tsup.config.ts",
"bundle": "pkg . --public",
"build": "npm run compile && npm run bundle",
"start": "node dist/main.js",
"build:docker": "./scripts/dc.sh run --rm builder",
"build-pkg": "./scripts/build.mjs",
"build": "vite build --mode=production",
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
"build-and-pack": "./scripts/build.mjs",
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.yml -r dotenv/config './.env.staging'",
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.yml --watch -r dotenv/config",
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen-esm --config codegen.yml --watch",
"tsc": "tsc --noEmit",
"lint": "DEBUG=eslint:cli-engine eslint . --config .eslintrc.cjs",
"lint:fix": "DEBUG=eslint:cli-engine eslint . --fix --config .eslintrc.cjs",
"lint": "eslint --flag unstable_ts_config --config .eslintrc.ts src/",
"lint:fix": "eslint --flag unstable_ts_config --fix --config .eslintrc.ts src/",
"test:watch": "vitest --pool=forks",
"test": "vitest run --pool=forks",
"coverage": "vitest run --coverage",
"patch:subscriptions-transport-ws": "node ./.scripts/patches/subscriptions-transport-ws.cjs",
"coverage": "vitest run --pool=forks --coverage",
"release": "standard-version",
"typesync": "typesync",
"install:unraid": "./scripts/install-in-unraid.sh",
"start:plugin": "INTROSPECTION=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
"start:plugin-verbose": "LOG_CONTEXT=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
"start:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs start --debug'",
"restart:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs restart --debug'",
"stop:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs stop --debug'",
"start:report": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development LOG_CONTEXT=true tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs report --debug'",
"build:dev": "./scripts/dc.sh build dev",
"start:local": "./scripts/dc.sh run --rm --service-ports local",
"start:ddev": "./scripts/dc.sh run --rm --service-ports dev",
"start:dtest": "./scripts/dc.sh run --rm builder npm run test",
"enter:ddev": "./scripts/dc.sh exec dev /bin/sh"
"dev": "vite",
"container:build": "./scripts/dc.sh build dev",
"container:start": "./scripts/dc.sh run --rm --service-ports dev",
"container:test": "./scripts/dc.sh run --rm builder npm run test",
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
},
"files": [
".env.staging",
".env.production",
"dist",
"unraid-api"
"ecosystem.config.json",
"README.md",
"src",
"node_modules/"
],
"bin": {
"unraid-api": "dist/cli.js"
},
"dependencies": {
"@apollo/client": "^3.10.4",
"@apollo/server": "^4.10.4",
"@apollo/client": "^3.11.8",
"@apollo/server": "^4.11.2",
"@as-integrations/fastify": "^2.1.1",
"@graphql-codegen/client-preset": "^4.2.5",
"@fastify/cookie": "^9.4.0",
"@graphql-codegen/client-preset": "^4.5.0",
"@graphql-tools/load-files": "^7.0.0",
"@graphql-tools/merge": "^9.0.4",
"@graphql-tools/schema": "^10.0.3",
"@graphql-tools/utils": "^10.2.0",
"@nestjs/apollo": "^12.1.0",
"@nestjs/core": "^10.3.8",
"@nestjs/graphql": "^12.1.1",
"@graphql-tools/merge": "^9.0.8",
"@graphql-tools/schema": "^10.0.7",
"@graphql-tools/utils": "^10.5.5",
"@nestjs/apollo": "^12.2.1",
"@nestjs/core": "^10.4.7",
"@nestjs/graphql": "^12.2.1",
"@nestjs/passport": "^10.0.3",
"@nestjs/platform-fastify": "^10.3.8",
"@nestjs/schedule": "^4.0.2",
"@reduxjs/toolkit": "^2.2.4",
"@nestjs/platform-fastify": "^10.4.7",
"@nestjs/schedule": "^4.1.1",
"@reduxjs/toolkit": "^2.3.0",
"@reflet/cron": "^1.3.1",
"@runonflux/nat-upnp": "^1.0.2",
"accesscontrol": "^2.2.1",
"am": "github:unraid/am",
"async-exit-hook": "^2.0.1",
"btoa": "^1.2.1",
"bycontract": "^2.0.11",
"bytes": "^3.1.2",
"cacheable-lookup": "^6.1.0",
"cacheable-lookup": "^7.0.0",
"camelcase-keys": "^9.1.3",
"catch-exit": "^1.2.2",
"chokidar": "^3.6.0",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.1",
"chokidar": "^4.0.1",
"cli-table": "^0.3.11",
"command-exists": "^1.2.9",
"convert": "^4.14.1",
"cors": "^2.8.5",
"convert": "^5.5.1",
"cross-fetch": "^4.0.0",
"docker-event-emitter": "^0.3.0",
"dockerode": "^3.3.5",
"dotenv": "^16.4.5",
"express": "^4.19.2",
"execa": "^9.5.1",
"exit-hook": "^4.0.0",
"express": "^4.21.1",
"filenamify": "^6.0.0",
"find-process": "^1.4.7",
"fs-extra": "^11.2.0",
"global-agent": "^3.0.0",
"graphql": "^16.8.1",
"got": "^14.4.4",
"graphql": "^16.9.0",
"graphql-fields": "^2.0.3",
"graphql-scalars": "^1.23.0",
"graphql-subscriptions": "^2.0.0",
@@ -109,113 +88,99 @@
"graphql-type-json": "^0.3.2",
"graphql-type-uuid": "^0.2.0",
"graphql-ws": "^5.16.0",
"htpasswd-js": "^1.0.2",
"ini": "^4.1.2",
"ip": "^2.0.1",
"jose": "^5.3.0",
"lodash": "^4.17.21",
"multi-ini": "^2.2.0",
"ip-regex": "^5.0.0",
"jose": "^5.9.6",
"lodash-es": "^4.17.21",
"multi-ini": "^2.3.2",
"mustache": "^4.2.0",
"nanobus": "^4.5.0",
"nest-access-control": "^3.1.0",
"nest-authz": "^2.11.0",
"nestjs-pino": "^4.0.0",
"nestjs-pino": "^4.1.0",
"node-cache": "^5.1.2",
"node-window-polyfill": "^1.0.2",
"openid-client": "^5.6.5",
"p-iteration": "^1.1.8",
"p-retry": "^4.6.2",
"openid-client": "^6.1.3",
"p-retry": "^6.2.0",
"passport-custom": "^1.1.1",
"passport-http-header-strategy": "^1.1.0",
"path-type": "^6.0.0",
"pidusage": "^3.0.2",
"pino": "^9.1.0",
"pino-http": "^9.0.0",
"pino-pretty": "^11.0.0",
"pino": "^9.5.0",
"pino-http": "^10.3.0",
"pino-pretty": "^11.3.0",
"pm2": "^5.4.2",
"reflect-metadata": "^0.1.14",
"request": "^2.88.2",
"semver": "^7.6.2",
"semver": "^7.6.3",
"stoppable": "^1.1.0",
"systeminformation": "^5.22.9",
"systeminformation": "^5.23.5",
"ts-command-line-args": "^2.5.1",
"uuid": "^10.0.0",
"ws": "^8.17.0",
"wtfnode": "^0.9.2",
"uuid": "^11.0.2",
"ws": "^8.18.0",
"xhr2": "^0.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@babel/runtime": "^7.24.5",
"@graphql-codegen/add": "^5.0.2",
"@graphql-codegen/cli": "^5.0.2",
"@graphql-codegen/add": "^5.0.3",
"@graphql-codegen/cli": "^5.0.3",
"@graphql-codegen/fragment-matcher": "^5.0.2",
"@graphql-codegen/import-types-preset": "^3.0.0",
"@graphql-codegen/typed-document-node": "^5.0.6",
"@graphql-codegen/typescript": "^4.0.6",
"@graphql-codegen/typescript-operations": "^4.2.0",
"@graphql-codegen/typescript-resolvers": "4.0.6",
"@graphql-codegen/typed-document-node": "^5.0.11",
"@graphql-codegen/typescript": "^4.1.1",
"@graphql-codegen/typescript-operations": "^4.3.1",
"@graphql-codegen/typescript-resolvers": "4.4.0",
"@graphql-typed-document-node/core": "^3.2.0",
"@nestjs/testing": "^10.3.8",
"@swc/core": "^1.5.7",
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
"@nestjs/testing": "^10.4.7",
"@originjs/vite-plugin-commonjs": "^1.0.3",
"@rollup/plugin-node-resolve": "^15.3.0",
"@types/async-exit-hook": "^2.0.2",
"@types/btoa": "^1.2.5",
"@types/bytes": "^3.1.4",
"@types/cli-table": "^0.3.4",
"@types/command-exists": "^1.2.3",
"@types/dockerode": "^3.3.29",
"@types/express": "^4.17.21",
"@types/cors": "^2.8.17",
"@types/dockerode": "^3.3.31",
"@types/express": "^5.0.0",
"@types/graphql-fields": "^1.3.9",
"@types/graphql-type-uuid": "^0.2.6",
"@types/ini": "^4.1.0",
"@types/lodash": "^4.17.1",
"@types/ini": "^4.1.1",
"@types/ip": "^1.1.3",
"@types/lodash": "^4.17.13",
"@types/mustache": "^4.2.5",
"@types/node": "^20.12.12",
"@types/node": "^22.9.0",
"@types/pidusage": "^2.0.5",
"@types/pify": "^5.0.4",
"@types/semver": "^7.5.8",
"@types/sendmail": "^1.4.7",
"@types/stoppable": "^1.1.3",
"@types/uuid": "^10.0.0",
"@types/ws": "^8.5.10",
"@types/ws": "^8.5.13",
"@types/wtfnode": "^0.7.3",
"@typescript-eslint/eslint-plugin": "^7.9.0",
"@typescript-eslint/parser": "^7.9.0",
"@unraid/eslint-config": "github:unraid/eslint-config",
"@vitest/coverage-v8": "^2.1.1",
"@vitest/ui": "^2.1.1",
"camelcase-keys": "^8.0.2",
"@vitest/coverage-v8": "^2.1.4",
"@vitest/ui": "^2.1.4",
"cz-conventional-changelog": "3.3.0",
"eslint": "^8.56.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^53.0.0",
"eslint-plugin-unused-imports": "^3.2.0",
"execa": "^7.1.1",
"filter-obj": "^5.1.0",
"got": "^13",
"graphql-codegen-typescript-validation-schema": "^0.14.1",
"ip-regex": "^5.0.0",
"json-difference": "^1.16.1",
"map-obj": "^5.0.2",
"p-props": "^5.0.0",
"path-exists": "^5.0.0",
"path-type": "^5.0.0",
"pkg": "^5.8.1",
"pretty-bytes": "^6.1.1",
"pretty-ms": "^8.0.0",
"eslint": "^9.14.0",
"graphql-codegen-typescript-validation-schema": "^0.16.0",
"jiti": "^2.4.0",
"rollup-plugin-node-externals": "^7.1.3",
"standard-version": "^9.5.0",
"tsup": "^8.0.2",
"typescript": "^5.4.5",
"typesync": "^0.12.1",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^2.1.1",
"zx": "^7.2.3"
"typescript": "^5.6.3",
"typescript-eslint": "^8.13.0",
"vite": "^5.4.10",
"vite-plugin-node": "^4.0.0",
"vite-plugin-static-copy": "^2.0.0",
"vite-tsconfig-paths": "^5.1.0",
"vitest": "^2.1.4",
"zx": "^8.2.0"
},
"optionalDependencies": {
"@vmngr/libvirt": "github:unraid/libvirt"
},
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
"overrides": {
"eslint": {
"jiti": "2"
}
}
}

View File

@@ -2,71 +2,83 @@
import { exit } from 'process';
import { cd, $ } from 'zx';
import getTags from './get-tags.mjs'
import { getDeploymentVersion } from './get-deployment-version.mjs';
try {
// Enable colours in output
process.env.FORCE_COLOR = '1';
// Enable colours in output
process.env.FORCE_COLOR = '1';
// Ensure we have the correct working directory
process.env.WORKDIR = process.env.WORKDIR ?? process.env.PWD;
cd(process.env.WORKDIR);
// Ensure we have the correct working directory
process.env.WORKDIR ??= process.env.PWD;
cd(process.env.WORKDIR);
// Clean up last deploy
await $`rm -rf ./deploy/release`;
await $`rm -rf ./deploy/pre-pack`;
await $`mkdir -p ./deploy/release/`;
await $`mkdir -p ./deploy/pre-pack/`;
// Create deployment directories - ignore if they already exist
await $`mkdir -p ./deploy/release`;
await $`mkdir -p ./deploy/pre-pack`;
// Ensure all deps are installed
await $`npm i`;
await $`rm -rf ./deploy/release/*`;
await $`rm -rf ./deploy/pre-pack/*`;
// Build Generated Types
await $`npm run codegen`;
// Build binary
await $`npm run build`;
// Build Generated Types
await $`npm run codegen`;
// Copy binary + extra files to deployment directory
await $`cp ./dist/api ./deploy/pre-pack/unraid-api`;
await $`cp ./.env.production ./deploy/pre-pack/.env.production`;
await $`cp ./.env.staging ./deploy/pre-pack/.env.staging`;
await $`npm run build`;
// Copy app files to plugin directory
await $`cp -r ./src/ ./deploy/pre-pack/src/`;
await $`cp -r ./dist/ ./deploy/pre-pack/dist/`;
// Get package details
const { name, version } = await import('../package.json', {
assert: { type: 'json' },
}).then(pkg => pkg.default);
// Copy environment to deployment directory
const files = [
'.env.production',
'.env.staging',
'tsconfig.json',
'codegen.yml',
'ecosystem.config.json'
]
const tags = getTags(process.env);
// Decide whether to use full version or just tag
const isTaggedRelease = tags.isTagged;
const gitShaShort = tags.shortSha;
const deploymentVersion = isTaggedRelease ? version : `${version}+${gitShaShort}`;
for (const file of files) {
await $`cp ./${file} ./deploy/pre-pack/${file}`;
}
// Create deployment package.json
await $`echo ${JSON.stringify({ name, version: deploymentVersion })} > ./deploy/pre-pack/package.json`;
// Get package details
const { name, version, ...rest } = await import('../package.json', {
assert: { type: 'json' },
}).then((pkg) => pkg.default);
// # Create final tgz
await $`cp ./README.md ./deploy/pre-pack/`;
cd('./deploy/pre-pack');
await $`npm pack`;
const deploymentVersion = getDeploymentVersion(process.env, version);
// Move unraid-api.tgz to release directory
await $`mv unraid-api-${deploymentVersion}.tgz ../release`;
// Create deployment package.json
await $`echo ${JSON.stringify({
...rest,
name,
version: deploymentVersion,
})} > ./deploy/pre-pack/package.json`;
// Set API_VERSION output based on this command
await $`echo "::set-output name=API_VERSION::${deploymentVersion}"`;
// # Create final tgz
await $`cp ./README.md ./deploy/pre-pack/`;
await $`cp -r ./node_modules ./deploy/pre-pack/node_modules`;
// Install production dependencies
cd('./deploy/pre-pack');
await $`npm prune --omit=dev`;
await $`npm install --omit=dev`;
// Now we'll pack everything in the pre-pack directory
await $`tar -czf ../unraid-api-${deploymentVersion}.tgz .`;
// Move unraid-api.tgz to release directory
await $`mv ../unraid-api-${deploymentVersion}.tgz ../release`;
} catch (error) {
// Error with a command
if (Object.keys(error).includes('stderr')) {
console.log(`Failed building package. Exit code: ${error.exitCode}`);
console.log(`Error: ${error.stderr}`);
} else {
// Normal js error
console.log('Failed building package.');
console.log(`Error: ${error.message}`);
}
// Error with a command
if (Object.keys(error).includes('stderr')) {
console.log(`Failed building package. Exit code: ${error.exitCode}`);
console.log(`Error: ${error.stderr}`);
} else {
// Normal js error
console.log('Failed building package.');
console.log(`Error: ${error.message}`);
}
exit(error.exitCode);
exit(error.exitCode);
}

33
api/scripts/create-session.sh Executable file
View File

@@ -0,0 +1,33 @@
# This script creates a mock session on a server.
# During local dev/testing, you should run it in the api container,
# so the nest.js api can authenticate cookies against it.
#
# You should also set a cookie named 'unraid_...' whose value matches
# the name of the session you created (where name is sess_<name>).
# By default, this is my-session
sessions_dir=/var/lib/php
default_session_name=my-session
if [ "$1" = "--help" ]; then
echo "This script creates a mock session on a server."
echo ""
echo "Usage: $0 [options]"
echo ""
echo "Options:"
echo " [name] Name of the session to create (default: my-session)"
echo " --help Display this help message and exit"
echo ""
echo "Example: $0 a-session-name"
echo ""
echo "Current list of sessions:"
ls $sessions_dir
exit 0
fi
session_name="${1:-$default_session_name}"
mkdir -p $sessions_dir
touch "$sessions_dir/sess_$session_name"
ls $sessions_dir

59
api/scripts/deploy-dev.sh Executable file
View File

@@ -0,0 +1,59 @@
#!/bin/bash
# Path to store the last used server name
state_file="$HOME/.deploy_state"
# Read the last used server name from the state file
if [[ -f "$state_file" ]]; then
last_server_name=$(cat "$state_file")
else
last_server_name=""
fi
# Read the server name from the command-line argument or use the last used server name as the default
server_name="${1:-$last_server_name}"
# Check if the server name is provided
if [[ -z "$server_name" ]]; then
echo "Please provide the SSH server name."
exit 1
fi
# Save the current server name to the state file
echo "$server_name" > "$state_file"
# Source directory path
source_directory="./src"
if [ ! -d "$source_directory" ]; then
echo "The src directory does not exist."
exit 1
fi
# Replace the value inside the rsync command with the user's input
rsync_command="rsync -avz -e ssh $source_directory root@${server_name}:/usr/local/unraid-api"
echo "Executing the following command:"
echo "$rsync_command"
# Execute the rsync command and capture the exit code
eval "$rsync_command"
exit_code=$?
# Run unraid-api restart on remote host
ssh root@"${server_name}" "unraid-api restart"
# Play built-in sound based on the operating system
if [[ "$OSTYPE" == "darwin"* ]]; then
# macOS
afplay /System/Library/Sounds/Glass.aiff
elif [[ "$OSTYPE" == "linux-gnu" ]]; then
# Linux
paplay /usr/share/sounds/freedesktop/stereo/complete.oga
elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "win32" ]]; then
# Windows
powershell.exe -c "(New-Object Media.SoundPlayer 'C:\Windows\Media\Windows Default.wav').PlaySync()"
fi
# Exit with the rsync command's exit code
exit $exit_code

View File

@@ -3,32 +3,27 @@ import { execSync } from 'child_process';
const runCommand = (command) => {
try {
return execSync(command, { stdio: 'pipe' }).toString().trim();
} catch(error) {
} catch (error) {
console.log('Failed to get value from tag command: ', command, error.message);
return;
}
};
const getTags = (env = process.env) => {
if (env.GIT_SHA) {
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`)
return {
shortSha: env.GIT_SHA,
isTagged: Boolean(env.IS_TAGGED)
}
export const getDeploymentVersion = (env = process.env, packageVersion) => {
if (env.API_VERSION) {
console.log(`Using env var for version: ${env.API_VERSION}`);
return env.API_VERSION;
} else if (env.GIT_SHA && env.IS_TAGGED) {
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`);
return env.IS_TAGGED ? packageVersion : `${packageVersion}+${env.GIT_SHA}`;
} else {
const gitShortSHA = runCommand('git rev-parse --short HEAD');
const isCommitTagged = runCommand('git describe --tags --abbrev=0 --exact-match') !== undefined;
console.log('gitShortSHA', gitShortSHA, 'isCommitTagged', isCommitTagged);
if (!gitShortSHA) {
throw new Error('Failing build due to missing SHA');
}
return {
shortSha: gitShortSHA,
isTagged: isCommitTagged
console.error('Failed to get git short SHA');
process.exit(1);
}
return isCommitTagged ? packageVersion : `${packageVersion}+${gitShortSHA}`;
}
}
export default getTags;
};

View File

@@ -1,31 +0,0 @@
import { beforeEach, expect, test, vi } from 'vitest';
// Preloading imports for faster tests
import '@app/cli/commands/restart';
import '@app/cli/commands/start';
import '@app/cli/commands/stop';
beforeEach(() => {
vi.resetAllMocks();
});
test('calls stop and then start', async () => {
vi.mock('@app/cli/commands/start');
vi.mock('@app/cli/commands/stop');
// Call restart
const { restart } = await import('@app/cli/commands/restart');
const { start } = await import('@app/cli/commands/start');
const { stop } = await import('@app/cli/commands/stop');
await restart();
// Check stop was called
expect(vi.mocked(stop).mock.calls.length).toBe(1);
// Check start was called
expect(vi.mocked(start).mock.calls.length).toBe(1);
// Check stop was called first
expect(vi.mocked(stop).mock.invocationCallOrder[0]).toBeLessThan(
vi.mocked(start).mock.invocationCallOrder[0]
);
});

View File

@@ -23,6 +23,9 @@ RolesBuilder {
],
},
"config": {
"read:any": [
"*",
],
"update:own": [
"*",
],
@@ -409,6 +412,14 @@ RolesBuilder {
"*",
],
},
"notifications": {
"read:any": [
"*",
],
"update:any": [
"*",
],
},
"os": {
"read:any": [
"*",

View File

@@ -3,7 +3,7 @@ import 'reflect-metadata';
import { test, expect } from 'vitest';
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
import { initialState } from '@app/store/modules/config';
import { cloneDeep } from 'lodash';
import { cloneDeep } from 'lodash-es';
test('it creates a FLASH config with NO OPTIONAL values', () => {
const basicConfig = initialState;

View File

@@ -4,7 +4,6 @@ import * as apiKeyValidator from '@app/mothership/api-key/validate-api-key-with-
import { describe, expect, it, vi } from 'vitest';
import { type RecursivePartial } from '@app/types/index';
import { type RootState } from '@app/store/index';
import { logoutUser } from '@app/store/modules/config';
describe('apiKeyCheckJob Tests', () => {
it('API Check Job (with success)', async () => {

View File

@@ -0,0 +1,6 @@
/* eslint-disable */
process.title = 'unraid-api';
setInterval(() => {
console.log('I NEED TO DIE');
}, 5_000);

View File

@@ -0,0 +1,10 @@
/* eslint-disable */
process.title = 'unraid-api';
setInterval(() => {
console.log('I NEED TO DIE (but i am very hard to kill)');
}, 5_000);
process.on('SIGTERM', () => {
// Do nothing
console.log('you cant kill me haha');
});

View File

@@ -0,0 +1,6 @@
import { config } from 'dotenv';
config({
path: './.env.test',
debug: false,
encoding: 'utf-8',
})

View File

@@ -6,7 +6,7 @@ exports[`loads notifications properly 1`] = `
"description": "Canceled",
"id": "/app/dev/notifications/unread/Unraid_Parity_check_1683971161.notify",
"importance": "WARNING",
"link": undefined,
"link": "/",
"subject": "Notice [UNRAID] - Parity check finished (0 errors)",
"timestamp": "2023-05-13T09:46:01.000Z",
"title": "Unraid Parity check",

View File

@@ -25,6 +25,7 @@ test('Returns paths', async () => {
"machine-id",
"log-base",
"var-run",
"auth-sessions",
]
`);
});

View File

@@ -1,11 +1,16 @@
import 'wtfnode';
#!/usr/bin/env node
import '@app/dotenv';
import { am } from 'am';
import { main } from '@app/cli/index';
import { internalLogger } from '@app/core/log';
void am(main, (error: unknown) => {
internalLogger.fatal((error as Error).message);
// Ensure process is exited
process.exit(1);
});
try {
await main();
} catch (error) {
console.log(error);
internalLogger.error({
message: 'Failed to start unraid-api',
error,
});
process.exit(1);
}

View File

@@ -1,7 +1,7 @@
import ipRegex from 'ip-regex';
import readLine from 'readline';
import { setEnv } from '@app/cli/set-env';
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running';
import { cliLogger } from '@app/core/log';
import { getters, store } from '@app/store';
import { stdout } from 'process';
@@ -13,21 +13,13 @@ import {
type getServersQuery,
type getCloudQuery,
} from '../../graphql/generated/api/operations';
import {
type ApolloQueryResult,
type ApolloClient,
type NormalizedCacheObject,
} from '@apollo/client/core/core.cjs';
import { MinigraphStatus } from '@app/graphql/generated/api/types';
import { API_VERSION } from '@app/environment';
import { loadStateFiles } from '@app/store/modules/emhttp';
import { ApolloClient, ApolloQueryResult, NormalizedCacheObject } from '@apollo/client/core/index.js';
type CloudQueryResult = NonNullable<
ApolloQueryResult<getCloudQuery>['data']['cloud']
>;
type ServersQueryResultServer = NonNullable<
ApolloQueryResult<getServersQuery>['data']['servers']
>[0];
type CloudQueryResult = NonNullable<ApolloQueryResult<getCloudQuery>['data']['cloud']>;
type ServersQueryResultServer = NonNullable<ApolloQueryResult<getServersQuery>['data']['servers']>[0];
type Verbosity = '' | '-v' | '-vv';
@@ -132,8 +124,7 @@ export const getServersData = async ({
const hashUrlRegex = () => /(.*)([a-z0-9]{40})(.*)/g;
export const anonymiseOrigins = (origins?: string[]): string[] => {
const originsWithoutSocks =
origins?.filter((url) => !url.endsWith('.sock')) ?? [];
const originsWithoutSocks = origins?.filter((url) => !url.endsWith('.sock')) ?? [];
return originsWithoutSocks
.map((origin) =>
origin
@@ -142,29 +133,17 @@ export const anonymiseOrigins = (origins?: string[]): string[] => {
// Replace ipv4 address using . separator with "IPV4ADDRESS"
.replace(ipRegex(), 'IPV4ADDRESS')
// Replace ipv4 address using - separator with "IPV4ADDRESS"
.replace(
new RegExp(ipRegex().toString().replace('\\.', '-')),
'/IPV4ADDRESS'
)
.replace(new RegExp(ipRegex().toString().replace('\\.', '-')), '/IPV4ADDRESS')
// Report WAN port
.replace(
`:${getters.config().remote.wanport || 443}`,
':WANPORT'
)
.replace(`:${getters.config().remote.wanport || 443}`, ':WANPORT')
)
.filter(Boolean);
};
const getAllowedOrigins = (
cloud: CloudQueryResult | null,
v: Verbosity
): string[] | null => {
const getAllowedOrigins = (cloud: CloudQueryResult | null, v: Verbosity): string[] | null => {
switch (v) {
case '-vv':
return (
cloud?.allowedOrigins.filter((url) => !url.endsWith('.sock')) ??
[]
);
return cloud?.allowedOrigins.filter((url) => !url.endsWith('.sock')) ?? [];
case '-v':
return anonymiseOrigins(cloud?.allowedOrigins ?? []);
default:
@@ -172,37 +151,23 @@ const getAllowedOrigins = (
}
};
const getReadableCloudDetails = (
reportObject: ReportObject,
v: Verbosity
): string => {
const error = reportObject.cloud.error
? `\n ERROR [${reportObject.cloud.error}]`
: '';
const status = reportObject.cloud.status
? reportObject.cloud.status
: 'disconnected';
const ip =
reportObject.cloud.ip && v !== ''
? `\n IP: [${reportObject.cloud.ip}]`
: '';
const getReadableCloudDetails = (reportObject: ReportObject, v: Verbosity): string => {
const error = reportObject.cloud.error ? `\n ERROR [${reportObject.cloud.error}]` : '';
const status = reportObject.cloud.status ? reportObject.cloud.status : 'disconnected';
const ip = reportObject.cloud.ip && v !== '' ? `\n IP: [${reportObject.cloud.ip}]` : '';
return `
STATUS: [${status}] ${ip} ${error}`;
};
const getReadableMinigraphDetails = (reportObject: ReportObject): string => {
const statusLine = `STATUS: [${reportObject.minigraph.status}]`;
const errorLine = reportObject.minigraph.error
? ` ERROR: [${reportObject.minigraph.error}]`
: null;
const errorLine = reportObject.minigraph.error ? ` ERROR: [${reportObject.minigraph.error}]` : null;
const timeoutLine = reportObject.minigraph.timeout
? ` TIMEOUT: [${(reportObject.minigraph.timeout || 1) / 1_000}s]`
: null; // 1 in case of divide by zero
return `
${statusLine}${errorLine ? `\n${errorLine}` : ''}${
timeoutLine ? `\n${timeoutLine}` : ''
}`;
${statusLine}${errorLine ? `\n${errorLine}` : ''}${timeoutLine ? `\n${timeoutLine}` : ''}`;
};
// Convert server to string output
@@ -215,10 +180,7 @@ const serverToString = (v: Verbosity) => (server: ServersQueryResultServer) =>
: ''
}`;
const getReadableServerDetails = (
reportObject: ReportObject,
v: Verbosity
): string => {
const getReadableServerDetails = (reportObject: ReportObject, v: Verbosity): string => {
if (!reportObject.servers) {
return '';
}
@@ -236,9 +198,7 @@ const getReadableServerDetails = (
return `
SERVERS:
ONLINE: ${reportObject.servers.online.map(serverToString(v)).join(',')}
OFFLINE: ${reportObject.servers.offline
.map(serverToString(v))
.join(',')}${invalid}`;
OFFLINE: ${reportObject.servers.offline.map(serverToString(v)).join(',')}${invalid}`;
};
const getReadableAllowedOrigins = (reportObject: ReportObject): string => {
@@ -263,7 +223,6 @@ const getVerbosity = (argv: string[]): Verbosity => {
return '';
};
// eslint-disable-next-line complexity
export const report = async (...argv: string[]) => {
// Check if the user has raw output enabled
const rawOutput = argv.includes('--raw');
@@ -295,7 +254,7 @@ export const report = async (...argv: string[]) => {
const v = getVerbosity(argv);
// Find all processes called "unraid-api" which aren't this process
const unraidApiPid = await getUnraidApiPid();
const unraidApiRunning = await isUnraidApiRunning();
// Load my servers config file into store
await store.dispatch(loadConfigFile());
@@ -321,43 +280,37 @@ export const report = async (...argv: string[]) => {
const reportObject: ReportObject = {
os: {
serverName: emhttp.var.name,
version: emhttp.var.version
version: emhttp.var.version,
},
api: {
version: API_VERSION,
status: unraidApiPid ? 'running' : 'stopped',
environment:
process.env.ENVIRONMENT ??
'THIS_WILL_BE_REPLACED_WHEN_BUILT',
status: unraidApiRunning ? 'running' : 'stopped',
environment: process.env.ENVIRONMENT ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT',
nodeVersion: process.version,
},
apiKey: isApiKeyValid ? 'valid' : cloud?.apiKey.error ?? 'invalid',
...(servers ? { servers } : {}),
myServers: {
status: config?.remote?.username
? 'authenticated'
: 'signed out',
status: config?.remote?.username ? 'authenticated' : 'signed out',
...(config?.remote?.username
? { myServersUsername: config?.remote?.username?.includes('@') ? 'REDACTED' : config?.remote.username }
? {
myServersUsername: config?.remote?.username?.includes('@')
? 'REDACTED'
: config?.remote.username,
}
: {}),
},
minigraph: {
status: cloud?.minigraphql.status ?? MinigraphStatus.PRE_INIT,
timeout: cloud?.minigraphql.timeout ?? null,
error:
cloud?.minigraphql.error ?? !cloud?.minigraphql.status
? 'API Disconnected'
: null,
cloud?.minigraphql.error ?? !cloud?.minigraphql.status ? 'API Disconnected' : null,
},
cloud: {
status: cloud?.cloud.status ?? 'error',
...(cloud?.cloud.error ? { error: cloud.cloud.error } : {}),
...(cloud?.cloud.status === 'ok'
? { ip: cloud.cloud.ip ?? 'NO_IP' }
: {}),
...(getAllowedOrigins(cloud, v)
? { allowedOrigins: getAllowedOrigins(cloud, v) }
: {}),
...(cloud?.cloud.status === 'ok' ? { ip: cloud.cloud.ip ?? 'NO_IP' } : {}),
...(getAllowedOrigins(cloud, v) ? { allowedOrigins: getAllowedOrigins(cloud, v) } : {}),
},
};
@@ -370,8 +323,8 @@ export const report = async (...argv: string[]) => {
if (jsonReport) {
stdout.write(JSON.stringify(reportObject) + '\n');
stdoutLogger.close();
return reportObject;
stdoutLogger.close();
return reportObject;
} else {
// Generate the actual report
const report = `
@@ -388,9 +341,7 @@ MY_SERVERS: ${reportObject.myServers.status}${
: ''
}
CLOUD: ${getReadableCloudDetails(reportObject, v)}
MINI-GRAPH: ${getReadableMinigraphDetails(
reportObject
)}${getReadableServerDetails(
MINI-GRAPH: ${getReadableMinigraphDetails(reportObject)}${getReadableServerDetails(
reportObject,
v
)}${getReadableAllowedOrigins(reportObject)}
@@ -405,9 +356,7 @@ MINI-GRAPH: ${getReadableMinigraphDetails(
console.log({ error });
if (error instanceof Error) {
cliLogger.trace(error);
stdoutLogger.write(
`\nFailed generating report with "${error.message}"\n`
);
stdoutLogger.write(`\nFailed generating report with "${error.message}"\n`);
return;
}

View File

@@ -1,88 +1,16 @@
import { spawn } from 'child_process';
import { addExitCallback } from 'catch-exit';
import { PM2_PATH } from '@app/consts';
import { cliLogger } from '@app/core/log';
import { mainOptions } from '@app/cli/options';
import { logToSyslog } from '@app/cli/log-to-syslog';
import { getters } from '@app/store';
import { getAllUnraidApiPids } from '@app/cli/get-unraid-api-pid';
import { API_VERSION } from '@app/environment';
import { execSync } from 'child_process';
import { join } from 'node:path';
/**
* Start a new API process.
*/
export const start = async () => {
// Set process title
cliLogger.info('Starting unraid-api with command', `${PM2_PATH} start ${join(import.meta.dirname, 'ecosystem.config.json')} --update-env`);
process.title = 'unraid-api';
const runningProcesses = await getAllUnraidApiPids();
if (runningProcesses.length > 0) {
cliLogger.info('unraid-api is Already Running!');
cliLogger.info('Run "unraid-api restart" to stop all running processes and restart');
process.exit(1);
}
// Start API
cliLogger.info('Starting unraid-api@v%s', API_VERSION);
// If we're in debug mode or we're NOT
// in debug but ARE in the child process
if (mainOptions.debug || process.env._DAEMONIZE_PROCESS) {
// Log when the API exits
addExitCallback((signal, exitCode, error) => {
if (exitCode === 0 || exitCode === 130 || signal === 'SIGTERM') {
logToSyslog('👋 Farewell. UNRAID API shutting down!');
return;
}
// Log when the API crashes
if (signal === 'uncaughtException' && error) {
logToSyslog(`⚠️ Caught exception: ${error.message}`);
}
// Log when we crash
if (exitCode) {
logToSyslog(`⚠️ UNRAID API crashed with exit code ${exitCode}`);
return;
}
logToSyslog('🛑 UNRAID API crashed without an exit code?');
});
logToSyslog('✔️ UNRAID API started successfully!');
}
// Load bundled index file
// eslint-disable-next-line @typescript-eslint/no-require-imports
require('../../index');
if (!mainOptions.debug) {
if ('_DAEMONIZE_PROCESS' in process.env) {
// In the child, clean up the tracking environment variable
delete process.env._DAEMONIZE_PROCESS;
} else {
cliLogger.debug('Daemonizing process. %s %o', process.execPath, process.argv);
// Spawn child
// First arg is path (inside PKG), second arg is restart, stop, etc, rest is args to main argument
const [path, , ...rest] = process.argv.slice(1);
const replacedCommand = [path, 'start', ...rest];
const child = spawn(process.execPath, replacedCommand, {
// In the parent set the tracking environment variable
env: Object.assign(process.env, { _DAEMONIZE_PROCESS: '1' }),
// The process MUST have it's cwd set to the
// path where it resides within the Nexe VFS
cwd: getters.paths()['unraid-api-base'],
stdio: 'ignore',
detached: true,
});
// Convert process into daemon
child.unref();
cliLogger.debug('Daemonized successfully!');
// Exit cleanly
process.exit(0);
}
}
execSync(`${PM2_PATH} start ${join(import.meta.dirname, '../../', 'ecosystem.config.json')} --update-env`, {
env: process.env,
stdio: 'inherit',
cwd: process.cwd()
});
};

View File

@@ -1,19 +1,7 @@
import prettyMs from 'pretty-ms';
import pidUsage from 'pidusage';
import { cliLogger } from '@app/core/log';
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
import { setEnv } from '@app/cli/set-env';
import { PM2_PATH } from '@app/consts';
import { execSync } from 'child_process';
export const status = async () => {
setEnv('LOG_TYPE', 'raw');
// Find all processes called "unraid-api" which aren't this process
const unraidApiPid = await getUnraidApiPid();
if (!unraidApiPid) {
cliLogger.info('Found no running processes.');
return;
}
const stats = await pidUsage(unraidApiPid);
cliLogger.info(`API has been running for ${prettyMs(stats.elapsed)} and is in "${process.env.ENVIRONMENT ?? 'ERR: Unknown Environment'}" mode!`);
execSync(`${PM2_PATH} status unraid-api`, { stdio: 'inherit' });
process.exit(0);
};

View File

@@ -1,44 +1,6 @@
import { cliLogger } from '@app/core/log';
import { getAllUnraidApiPids } from '@app/cli/get-unraid-api-pid';
import { sleep } from '@app/core/utils/misc/sleep';
import pRetry from 'p-retry';
/**
* Stop a running API process.
*/
import { PM2_PATH } from '@app/consts';
import { execSync } from 'child_process';
export const stop = async () => {
try {
await pRetry(async (attempts) => {
const runningApis = await getAllUnraidApiPids();
if (runningApis.length > 0) {
cliLogger.info('Stopping %s unraid-api process(es)...', runningApis.length);
runningApis.forEach(pid => process.kill(pid, 'SIGTERM'));
await sleep(50);
const newPids = await getAllUnraidApiPids();
if (newPids.length > 0) {
throw new Error('Not all processes have exited yet');
}
} else if (attempts < 1) {
cliLogger.info('Found no running processes.');
}
return true;
}, {
retries: 2,
minTimeout: 1_000,
factor: 1,
});
} catch (error: unknown) {
cliLogger.info('Process did not exit cleanly, forcing shutdown', error);
const processes = await getAllUnraidApiPids();
for (const pid of processes) {
process.kill(pid, 'SIGKILL');
await sleep(100);
}
}
await sleep(500);
execSync(`${PM2_PATH} stop unraid-api`, { stdio: 'inherit' });
};

View File

@@ -1,27 +1,17 @@
import { copyFile, readFile, writeFile } from 'fs/promises';
import { join } from 'path';
import { cliLogger } from '@app/core/log';
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
import { setEnv } from '@app/cli/set-env';
import { getters } from '@app/store';
import { start } from '@app/cli/commands/start';
import { stop } from '@app/cli/commands/stop';
export const switchEnv = async () => {
setEnv('LOG_TYPE', 'raw');
const paths = getters.paths();
const basePath = paths['unraid-api-base'];
const envFlashFilePath = paths['myservers-env'];
const envFile = await readFile(envFlashFilePath, 'utf-8').catch(() => '');
let shouldStartAfterRunning = false;
if (await getUnraidApiPid()) {
cliLogger.info('unraid-api is running, stopping...');
// Stop Running Process
await stop();
shouldStartAfterRunning = true;
}
await stop();
cliLogger.debug(
'Checking %s for current ENV, found %s',
@@ -70,11 +60,5 @@ export const switchEnv = async () => {
await copyFile(source, destination);
cliLogger.info('Now using %s', newEnv);
if (shouldStartAfterRunning) {
cliLogger.debug('Restarting unraid-api');
// Start Process
await start();
} else {
cliLogger.info('Run "unraid-api start" to start the API.');
}
await start();
};

43
api/src/cli/index.ts Normal file → Executable file
View File

@@ -2,37 +2,18 @@ import { parse } from 'ts-command-line-args';
import { cliLogger } from '@app/core/log';
import { type Flags, mainOptions, options, args } from '@app/cli/options';
import { setEnv } from '@app/cli/set-env';
import { env } from '@app/dotenv';
import { getters } from '@app/store';
import { execSync } from 'child_process';
import { PM2_PATH } from '@app/consts';
import * as ENVIRONMENT from '@app/environment';
const command = mainOptions.command as unknown as string;
export const main = async (...argv: string[]) => {
cliLogger.debug(env, 'Loading env file');
// Set envs
setEnv('LOG_TYPE', 'pretty');
cliLogger.debug({ paths: getters.paths() }, 'Starting CLI');
cliLogger.debug({ paths: getters.paths(), environment: ENVIRONMENT }, 'Starting CLI');
setEnv('DEBUG', mainOptions.debug ?? false);
setEnv('ENVIRONMENT', process.env.ENVIRONMENT ?? 'production');
setEnv('PORT', process.env.PORT ?? mainOptions.port ?? '9000');
setEnv(
'LOG_LEVEL',
process.env.LOG_LEVEL ?? mainOptions['log-level'] ?? 'INFO'
);
if (!process.env.LOG_TRANSPORT) {
if (process.env.ENVIRONMENT === 'production' && !mainOptions.debug) {
setEnv('LOG_TRANSPORT', 'file');
setEnv('LOG_LEVEL', 'INFO');
} else if (!mainOptions.debug) {
// Staging Environment, backgrounded plugin
setEnv('LOG_TRANSPORT', 'file');
setEnv('LOG_LEVEL', 'TRACE');
} else {
cliLogger.debug('In Debug Mode - Log Level Defaulting to: stdout');
}
}
if (!command) {
// Run help command
@@ -49,15 +30,12 @@ export const main = async (...argv: string[]) => {
start: import('@app/cli/commands/start').then((pkg) => pkg.start),
stop: import('@app/cli/commands/stop').then((pkg) => pkg.stop),
restart: import('@app/cli/commands/restart').then((pkg) => pkg.restart),
'switch-env': import('@app/cli/commands/switch-env').then(
(pkg) => pkg.switchEnv
),
logs: async () => execSync(`${PM2_PATH} logs unraid-api --lines 200`, { stdio: 'inherit' }),
'switch-env': import('@app/cli/commands/switch-env').then((pkg) => pkg.switchEnv),
version: import('@app/cli/commands/version').then((pkg) => pkg.version),
status: import('@app/cli/commands/status').then((pkg) => pkg.status),
report: import('@app/cli/commands/report').then((pkg) => pkg.report),
'validate-token': import('@app/cli/commands/validate-token').then(
(pkg) => pkg.validateToken
),
'validate-token': import('@app/cli/commands/validate-token').then((pkg) => pkg.validateToken),
};
// Unknown command
@@ -71,10 +49,5 @@ export const main = async (...argv: string[]) => {
// Run the command
await commandMethod(...argv);
// Allow the process to exit
// Don't exit when we start though
if (!['start', 'restart'].includes(command)) {
// Ensure process is exited
process.exit(0);
}
process.exit(0);
};

View File

@@ -1,5 +1,5 @@
import { getters, type RootState, store } from '@app/store';
import { uniq } from 'lodash';
import uniq from 'lodash/uniq';
import {
getServerIps,
getUrlForField,

View File

@@ -1,5 +1,6 @@
import { PORT } from '@app/environment';
import { type JSONWebKeySet } from 'jose';
import { join } from 'path';
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
const envPort = PORT;
@@ -46,11 +47,6 @@ export const KEEP_ALIVE_INTERVAL_MS = THREE_MINUTES_MS; // This is set to 45 sec
/**
* Graphql link.
*/
export const MOTHERSHIP_GRAPHQL_LINK =
process.env.MOTHERSHIP_GRAPHQL_LINK ??
(process.env.ENVIRONMENT === 'staging'
? 'https://staging.mothership.unraid.net/ws'
: 'https://mothership.unraid.net/ws');
export const JWKS_LOCAL_PAYLOAD: JSONWebKeySet = {
keys: [
@@ -84,3 +80,5 @@ export const KEYSERVER_VALIDATION_ENDPOINT =
/** Set the max retries for the GraphQL Client */
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', '.bin', 'pm2');

View File

@@ -1,6 +0,0 @@
import NanoBus from 'nanobus';
/**
* Graphql event bus.
*/
export const bus = new NanoBus();

View File

@@ -4,7 +4,7 @@ import { AppError } from '@app/core/errors/app-error';
* API key error.
*/
export class ApiKeyError extends AppError {
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
constructor(message: string) {
super(message);
}

View File

@@ -1,7 +1,6 @@
export * as modules from '@app/core/modules';
export * as notifiers from '@app/core/notifiers';
export * as utils from '@app/core/utils';
export * from '@app/core/bus';
export * from '@app/core/log';
export * from '@app/core/permission-manager';
export * from '@app/core/permissions';

View File

@@ -1,34 +1,7 @@
import { pino } from 'pino';
import { LOG_TRANSPORT, LOG_TYPE } from '@app/environment';
import { LOG_TYPE } from '@app/environment';
import pretty from 'pino-pretty';
import { chmodSync, existsSync, mkdirSync, rmSync, statSync } from 'node:fs';
import { getters } from '@app/store/index';
import { join } from 'node:path';
const makeLoggingDirectoryIfNotExists = () => {
if (!existsSync(getters.paths()['log-base'])) {
console.log('Creating logging directory');
mkdirSync(getters.paths()['log-base']);
}
chmodSync(getters.paths()['log-base'], 0o644);
if (
existsSync(`${getters.paths()['log-base']}/stdout.log`) &&
statSync(`${getters.paths()['log-base']}/stdout.log`).size > 5_000_000
) {
rmSync(`${getters.paths()['log-base']}/stdout.log`);
}
try {
rmSync(`${getters.paths()['log-base']}/stdout.log.*`);
} catch (e) {
// Ignore Error
}
};
if (LOG_TRANSPORT === 'file') {
makeLoggingDirectoryIfNotExists();
}
export const levels = [
'trace',
@@ -47,12 +20,8 @@ const level =
] ?? 'info';
export const logDestination = pino.destination({
dest:
LOG_TRANSPORT === 'file'
? join(getters.paths()['log-base'], 'stdout.log')
: 1,
minLength: 1_024,
sync: false,
sync: true,
});
const stream =
@@ -112,30 +81,3 @@ export const loggers = [
remoteQueryLogger,
apiLogger,
];
// Send SIGUSR1 to increase log level
process.on('SIGUSR1', () => {
const level = logger.level;
const nextLevel =
levels[levels.findIndex((_level) => _level === level) + 1] ?? levels[0];
loggers.forEach((logger) => {
logger.level = nextLevel;
});
internalLogger.info({
message: `Log level changed from ${level} to ${nextLevel}`,
});
});
// Send SIGUSR1 to decrease log level
process.on('SIGUSR2', () => {
const level = logger.level;
const nextLevel =
levels[levels.findIndex((_level) => _level === level) - 1] ??
levels[levels.length - 1];
loggers.forEach((logger) => {
logger.level = nextLevel;
});
internalLogger.info({
message: `Log level changed from ${level} to ${nextLevel}`,
});
});

View File

@@ -1,11 +1,11 @@
import type { CoreContext, CoreResult } from '@app/core/types';
import { bus } from '@app/core/bus';
import { AppError } from '@app/core/errors/app-error';
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
import { hasFields } from '@app/core/utils/validation/has-fields';
import { FieldMissingError } from '@app/core/errors/field-missing-error';
import { emcmd } from '@app/core/utils/clients/emcmd';
import { getters } from '@app/store';
import { pubsub } from '@app/core/pubsub';
interface Context extends CoreContext {
readonly data: {
@@ -61,7 +61,7 @@ export const addUser = async (context: Context): Promise<CoreResult> => {
}
// Update users channel with new user
bus.emit('users', {
pubsub.publish('users', {
users: {
mutation: 'CREATED',
node: [user],
@@ -69,7 +69,7 @@ export const addUser = async (context: Context): Promise<CoreResult> => {
});
// Update user channel with new user
bus.emit('user', {
pubsub.publish('user', {
user: {
mutation: 'CREATED',
node: user,

View File

@@ -4,7 +4,6 @@ import {
blockDevices,
diskLayout,
} from 'systeminformation';
import { map as asyncMap } from 'p-iteration';
import {
type Disk,
DiskInterfaceType,
@@ -91,8 +90,9 @@ export const getDisks = async (
const partitions = await blockDevices().then((devices) =>
devices.filter((device) => device.type === 'part')
);
const disks = await asyncMap(await diskLayout(), async (disk) =>
parseDisk(disk, partitions)
const diskLayoutData = await diskLayout();
const disks = await Promise.all(
diskLayoutData.map((disk) => parseDisk(disk, partitions))
);
return disks;

View File

@@ -30,7 +30,7 @@ export const getPermissions = async function (context: CoreContext): Promise<Cor
const grants = Object.entries(ac.getGrants())
.map(([name, grant]) => {
// @ts-expect-error - $extend and grants are any
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { $extend: _, ...grants } = grant;
return [name, grants];
})

View File

@@ -4,27 +4,22 @@ import type { CoreResult, CoreContext } from '@app/core/types';
import { getUnraidApiService } from '@app/core/modules/services/get-unraid-api';
import { NODE_ENV } from '@app/environment';
const devNames = [
'emhttpd',
'rest-api',
];
const devNames = ['emhttpd', 'rest-api'];
const coreNames = [
'unraid-api',
];
const coreNames = ['unraid-api'];
interface Service {
online: boolean;
uptime: string;
version: string;
online: boolean;
uptime: string;
version: string;
}
interface ServiceResult extends CoreResult {
json: Service;
json: Service;
}
interface ServiceWithName extends Service {
name: string;
name: string;
}
/**
@@ -33,39 +28,40 @@ interface ServiceWithName extends Service {
* @param services
* @param names
*/
const addNameToService = (services: ServiceResult[], names: string[]): ServiceWithName[] => services.map((service, index) => ({
name: names[index],
...service.json,
}));
const addNameToService = (services: ServiceResult[], names: string[]): ServiceWithName[] =>
services.map((service, index) => ({
name: names[index],
...service.json,
}));
interface Result extends CoreResult {
json: ServiceWithName[];
json: ServiceWithName[];
}
/**
* Get all services.
*/
export const getServices = async (context: CoreContext): Promise<Result> => {
const logErrorAndReturnEmptyArray = (error: Error) => {
logger.error(error);
return [];
};
const logErrorAndReturnEmptyArray = (error: Error) => {
logger.error(error);
return [];
};
const devServices: ServiceResult[] = NODE_ENV === 'development' ? await Promise.all([
getEmhttpdService(context),
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[] : [];
const devServices: ServiceResult[] = (await Promise.all([getEmhttpdService(context)]).catch(
logErrorAndReturnEmptyArray
)) as ServiceResult[];
const coreServices: ServiceResult[] = await Promise.all([
getUnraidApiService(context),
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[];
const coreServices: ServiceResult[] = (await Promise.all([getUnraidApiService(context)]).catch(
logErrorAndReturnEmptyArray
)) as ServiceResult[];
const result = [
...addNameToService(devServices, devNames),
...addNameToService(coreServices, coreNames),
];
const result = [
...addNameToService(devServices, devNames),
...addNameToService(coreServices, coreNames),
];
return {
text: `Services: ${JSON.stringify(result, null, 2)}`,
json: result,
};
return {
text: `Services: ${JSON.stringify(result, null, 2)}`,
json: result,
};
};

View File

@@ -78,7 +78,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
7: '7 hours',
8: '8 hours',
9: '9 hours',
/* eslint-enable @typescript-eslint/naming-convention */
});
// Defines the type of partition layout to create when formatting hard drives 2TB in size and smaller **only**. (All devices larger then 2TB are always set up with GPT partition tables.)
@@ -89,7 +89,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
1: 'MBR: unaligned',
2: 'MBR: 4K-aligned',
/* eslint-enable @typescript-eslint/naming-convention */
});
// Selects the method to employ when writing to enabled disk in parity protected array.
@@ -98,7 +98,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
0: 'read/modify/write',
1: 'reconstruct write',
/* eslint-enable @typescript-eslint/naming-convention */
});
// Defines the default file system type to create when an * unmountable * array device is formatted.
@@ -111,7 +111,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
'luks:xfs': 'xfs - encrypted',
'luks:btrfs': 'btrfs - encrypted',
'luks:reiserfs': 'reiserfs - encrypted',
/* eslint-enable @typescript-eslint/naming-convention */
});
const {

View File

@@ -8,8 +8,7 @@ export type Options = NotifierOptions
*/
export class HttpNotifier extends Notifier {
readonly $http = got;
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
constructor(options: Options) {
super(options);
}

View File

@@ -1,10 +1,12 @@
import Mustache from 'mustache';
import { type LooseObject } from '@app/core/types';
import { type NotificationIni } from '../types/states/notification';
export type NotifierLevel = 'info' | 'warn' | 'error';
export type NotifierOptions = Partial<{
level: NotifierLevel;
importance?: NotificationIni['importance'];
helpers?: Record<string, unknown>;
template?: string;
}>;

View File

@@ -0,0 +1,39 @@
import { logger } from '@app/core/log';
import { Notifier, type NotifierSendOptions, type NotifierOptions } from '@app/core/notifiers/notifier';
import { execa } from 'execa';
type ValidLocalLevels = 'alert' | 'warning' | 'normal';
export class UnraidLocalNotifier extends Notifier {
private convertNotifierLevel(level: NotifierOptions['level']): ValidLocalLevels {
switch (level) {
case 'error':
return 'alert';
case 'warn':
return 'warning';
case 'info':
return 'normal';
default:
return 'normal';
}
}
constructor(options: NotifierOptions = {}) {
super(options);
this.level = options.importance ?? this.convertNotifierLevel(options.level ?? 'info');
this.template = options.template ?? '{{ message }}';
}
async send(options: NotifierSendOptions) {
const { title, data } = options;
const { level } = this;
const template = this.render(data);
try {
await execa('/usr/local/emhttp/webGui/scripts/notify', ['-i', `${level}`, '-s', 'Unraid API', '-d', `${template}`, '-e', `${title}`]);
} catch (error: unknown) {
logger.warn(`Error sending unraid notification: ${error instanceof Error ? error.message : 'No Error Information'}`);
}
}
}

View File

@@ -20,6 +20,7 @@ const roles: Record<string, Role> = {
{ resource: 'apikey', action: 'read:any', attributes: '*' },
{ resource: 'cloud', action: 'read:own', attributes: '*' },
{ resource: 'config', action: 'update:own', attributes: '*' },
{ resource: 'config', action: 'read:any', attributes: '*' },
{ resource: 'connect', action: 'read:own', attributes: '*' },
{ resource: 'connect', action: 'update:own', attributes: '*' },
{ resource: 'customizations', action: 'read:any', attributes: '*' },
@@ -117,6 +118,8 @@ const roles: Record<string, Role> = {
{ resource: 'config', action: 'update:own', attributes: '*' },
{ resource: 'connect', action: 'read:own', attributes: '*' },
{ resource: 'connect', action: 'update:own', attributes: '*' },
{ resource: 'notifications', action: 'read:any', attributes: '*' },
{ resource: 'notifications', action: 'update:any', attributes: '*' },
],
},
my_servers: {

View File

@@ -1,23 +0,0 @@
import { getters } from '@app/store';
import htpasswd from 'htpasswd-js';
interface Options {
username: string;
password: string;
file?: string;
}
/**
* Check if the username and password match a htpasswd file.
*/
export const checkAuth = async (options: Options): Promise<unknown> => {
const { username, password, file } = options;
// `valid` will be true if and only if
// username and password were correct.
return htpasswd.authenticate({
username,
password,
file: file ?? getters.paths().htpasswd,
});
};

View File

@@ -1,28 +0,0 @@
import { PermissionError } from '@app/core/errors/permission-error';
import { checkAuth } from '@app/core/utils/authentication/check-auth';
import { getters } from '@app/store';
interface Options {
username: string;
password: string;
file: string;
}
/**
* Check if the username and password match a htpasswd file
*/
export const ensureAuth = async (options: Options) => {
const { username, password, file } = options;
// `valid` will be true if and only if
// username and password were correct.
const valid = await checkAuth({
username,
password,
file: file || getters.paths().htpasswd,
});
if (!valid) {
throw new PermissionError('Invalid auth!');
}
};

View File

@@ -9,7 +9,7 @@ import type {
MyServersConfig,
MyServersConfigMemory,
} from '@app/types/my-servers-config';
import { isEqual } from 'lodash';
import { isEqual } from 'lodash-es';
export type ConfigType = 'flash' | 'memory';
type ConfigObject<T> = T extends 'flash'
@@ -23,7 +23,7 @@ type ConfigObject<T> = T extends 'flash'
* @param mode 'flash' or 'memory', changes what fields are included in the writeable payload
* @returns
*/
// eslint-disable-next-line complexity
export const getWriteableConfig = <T extends ConfigType>(
config: ConfigSliceState,
mode: T
@@ -32,7 +32,7 @@ export const getWriteableConfig = <T extends ConfigType>(
const { api, local, notifier, remote, upc, connectionStatus } = config;
// Create new state
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
const newState: ConfigObject<T> = {
api: {
version: api?.version ?? initialState.api.version,

View File

@@ -1,7 +1,6 @@
// Created from 'create-ts-index'
export * from './array';
export * from './authentication';
export * from './clients';
export * from './plugins';
export * from './shares';

View File

@@ -1,7 +1,5 @@
import { parse as parseIni } from 'ini';
import camelCaseKeys from 'camelcase-keys';
import { includeKeys } from 'filter-obj';
import mapObject from 'map-obj';
import { AppError } from '@app/core/errors/app-error';
import { accessSync, readFileSync } from 'fs';
import { access } from 'fs/promises';
@@ -11,15 +9,15 @@ import { extname } from 'path';
type ConfigType = 'ini' | 'cfg';
type OptionsWithPath = {
/** Relative or absolute file path. */
filePath: string;
/** If the file is an "ini" or a "cfg". */
type?: ConfigType;
/** Relative or absolute file path. */
filePath: string;
/** If the file is an "ini" or a "cfg". */
type?: ConfigType;
};
type OptionsWithLoadedFile = {
file: string;
type: ConfigType;
file: string;
type: ConfigType;
};
/**
@@ -38,53 +36,66 @@ type OptionsWithLoadedFile = {
* ```
*/
const fixObjectArrays = (object: Record<string, any>) => {
// An object of arrays for keys that end in `:${number}`
const temporaryArrays = {};
// An object of arrays for keys that end in `:${number}`
const temporaryArrays = {};
// An object without any array items
const filteredObject = includeKeys(object, (key, value) => {
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
const [, name, index] = [...((key).match(/(.*):(\d+$)/) ?? [])];
if (!name || !index) {
return true;
}
// An object without any array items
const filteredObject = Object.fromEntries(
Object.entries(object).filter(([key, value]) => {
const match = key.match(/(.*):(\d+$)/);
if (!match) {
return true;
}
// Create initial array
if (!Array.isArray(temporaryArrays[name])) {
temporaryArrays[name] = [];
}
const [, name, index] = match;
if (!name || !index) {
return true;
}
// Add value
temporaryArrays[name].push(value);
// Create initial array
if (!Array.isArray(temporaryArrays[name])) {
temporaryArrays[name] = [];
}
// Remove the old field
return false;
});
// Add value
temporaryArrays[name].push(value);
return {
...filteredObject,
...temporaryArrays,
};
// Remove the old field
return false;
})
);
return {
...filteredObject,
...temporaryArrays,
};
};
export const fileExists = async (path: string) => access(path, F_OK).then(() => true).catch(() => false);
export const fileExists = async (path: string) =>
access(path, F_OK)
.then(() => true)
.catch(() => false);
export const fileExistsSync = (path: string) => {
try {
accessSync(path, F_OK);
return true;
} catch (error: unknown) {
return false;
}
try {
accessSync(path, F_OK);
return true;
} catch (error: unknown) {
return false;
}
};
export const getExtensionFromPath = (filePath: string): string => extname(filePath);
const isFilePathOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithPath => Object.keys(options).includes('filePath');
const isFileOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithLoadedFile => Object.keys(options).includes('file');
const isFilePathOptions = (
options: OptionsWithLoadedFile | OptionsWithPath
): options is OptionsWithPath => Object.keys(options).includes('filePath');
const isFileOptions = (
options: OptionsWithLoadedFile | OptionsWithPath
): options is OptionsWithLoadedFile => Object.keys(options).includes('file');
export const loadFileFromPathSync = (filePath: string): string => {
if (!fileExistsSync(filePath)) throw new Error(`Failed to load file at path: ${filePath}`);
return readFileSync(filePath, 'utf-8').toString();
if (!fileExistsSync(filePath)) throw new Error(`Failed to load file at path: ${filePath}`);
return readFileSync(filePath, 'utf-8').toString();
};
/**
@@ -94,48 +105,51 @@ export const loadFileFromPathSync = (filePath: string): string => {
*/
const isValidConfigExtension = (extension: string): boolean => ['ini', 'cfg'].includes(extension);
export const parseConfig = <T extends Record<string, any>>(options: OptionsWithLoadedFile | OptionsWithPath): T => {
let fileContents: string;
let extension: string;
export const parseConfig = <T extends Record<string, any>>(
options: OptionsWithLoadedFile | OptionsWithPath
): T => {
let fileContents: string;
let extension: string;
if (isFilePathOptions(options)) {
const { filePath, type } = options;
if (isFilePathOptions(options)) {
const { filePath, type } = options;
const validFile = fileExistsSync(filePath);
extension = type ?? getExtensionFromPath(filePath);
const validExtension = isValidConfigExtension(extension);
const validFile = fileExistsSync(filePath);
extension = type ?? getExtensionFromPath(filePath);
const validExtension = isValidConfigExtension(extension);
if (validFile && validExtension) {
fileContents = loadFileFromPathSync(options.filePath);
} else {
throw new AppError(`Invalid File Path: ${options.filePath}, or Extension: ${extension}`);
}
} else if (isFileOptions(options)) {
const { file, type } = options;
fileContents = file;
const extension = type;
if (!isValidConfigExtension(extension)) {
throw new AppError(`Invalid Extension for Ini File: ${extension}`);
}
} else {
throw new AppError('Invalid Parameters Passed to ParseConfig');
}
if (validFile && validExtension) {
fileContents = loadFileFromPathSync(options.filePath);
} else {
throw new AppError(`Invalid File Path: ${options.filePath}, or Extension: ${extension}`);
}
} else if (isFileOptions(options)) {
const { file, type } = options;
fileContents = file;
const extension = type;
if (!isValidConfigExtension(extension)) {
throw new AppError(`Invalid Extension for Ini File: ${extension}`);
}
} else {
throw new AppError('Invalid Parameters Passed to ParseConfig');
}
const data: Record<string, any> = parseIni(fileContents);
// Remove quotes around keys
const dataWithoutQuoteKeys = mapObject(data, (key, value) =>
// @SEE: https://stackoverflow.com/a/19156197/2311366
[(key).replace(/^"(.+(?="$))"$/, '$1'), value],
);
const data: Record<string, any> = parseIni(fileContents);
// Remove quotes around keys
const dataWithoutQuoteKeys = Object.fromEntries(
Object.entries(data).map(([key, value]) => [key.replace(/^"(.+(?="$))"$/, '$1'), value])
);
// Result object with array items as actual arrays
const result = Object.fromEntries(
Object.entries(dataWithoutQuoteKeys)
.map(([key, value]) => [key, typeof value === 'object' ? fixObjectArrays(value) : value]),
);
// Result object with array items as actual arrays
const result = Object.fromEntries(
Object.entries(dataWithoutQuoteKeys).map(([key, value]) => [
key,
typeof value === 'object' ? fixObjectArrays(value) : value,
])
);
// Convert all keys to camel case
return camelCaseKeys(result, {
deep: true,
}) as T;
// Convert all keys to camel case
return camelCaseKeys(result, {
deep: true,
}) as T;
};

View File

@@ -8,6 +8,6 @@ export const getPermissions = (role: string): Record<string, Record<string, stri
const grants: Record<string, Record<string, string[]>> = ac.getGrants();
const { $extend, ...roles } = grants[role] ?? {};
const inheritedRoles = Array.isArray($extend) ? $extend.map(role => getPermissions(role))[0] : {};
// eslint-disable-next-line prefer-object-spread
return Object.assign({}, roles, inheritedRoles);
};

View File

@@ -40,16 +40,23 @@ export const phpLoader = async (options: Options) => {
encodeParameters(body),
];
return execa('php', options_, { cwd: __dirname })
.then(({ stdout }) => {
// Missing php file
if (stdout.includes(`Warning: include(${file}): failed to open stream: No such file or directory in ${path.join(__dirname, '/wrapper.php')}`)) {
throw new FileMissingError(file);
}
return execa('php', options_, { cwd: import.meta.dirname })
.then(({ stdout }) => {
// Missing php file
if (
stdout.includes(
`Warning: include(${file}): failed to open stream: No such file or directory in ${path.join(
import.meta.dirname,
'/wrapper.php'
)}`
)
) {
throw new FileMissingError(file);
}
return stdout;
})
.catch(error => {
throw new PhpError(error);
});
return stdout;
})
.catch((error) => {
throw new PhpError(error);
});
};

View File

@@ -0,0 +1,18 @@
<?php
// Borrowed with love from https://b3z13r.wordpress.com/2011/05/16/passing-values-from-the-commandline-to-php-by-getpost-method/
// e.g. `./wrapper.php GET /tmp/random_file.php?arg1=true&arg2=a-really-long-string` { "username": "root" }
$method = $argv[1];
$query_parts = explode('?', $argv[2], 2);
$file = $query_parts[0];
$query_params = $query_parts[1];
$body = $argv[3];
// Load query_params or body into correct var
if ($method === 'GET') {
parse_str($query_params, $_GET);
} else {
parse_str($body, $_POST);
}
include($file);
?>

View File

@@ -0,0 +1,26 @@
import pm2 from 'pm2';
export const isUnraidApiRunning = async (): Promise<boolean | undefined> => {
return new Promise((resolve, reject) => {
pm2.connect(function (err) {
if (err) {
console.error(err);
reject('Could not connect to pm2');
}
pm2.describe('unraid-api', function (err, processDescription) {
console.log(err);
if (err || processDescription.length === 0) {
console.log(false); // Service not found or error occurred
resolve(false);
} else {
const isOnline = processDescription?.[0]?.pm2_env?.status === 'online';
console.log(isOnline); // Output true if online, false otherwise
resolve(isOnline);
}
pm2.disconnect();
});
});
});
};

View File

@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { getters } from '@app/store';
import type { DiskShare, Share, UserShare } from '@app/core/types/states/share';
import { type ArrayDisk } from '@app/graphql/generated/api/types';

View File

@@ -1,5 +1,4 @@
import { execa } from 'execa';
import { map as asyncMap } from 'p-iteration';
import { sync as commandExistsSync } from 'command-exists';
interface Device {
@@ -13,9 +12,9 @@ interface Device {
* @param devices Devices to be checked.
* @returns Processed devices.
*/
export const filterDevices = async (devices: Device[]): Promise<Device[]> => asyncMap(devices, async (device: Device) => {
export const filterDevices = async (devices: Device[]): Promise<Device[]> => {
// Don't run if we don't have the udevadm command available
if (!commandExistsSync('udevadm')) return device;
if (!commandExistsSync('udevadm')) return devices;
const networkDeviceIds = await execa('udevadm', 'info -q path -p /sys/class/net/eth0'.split(' '))
.then(({ stdout }) => {
@@ -25,7 +24,11 @@ export const filterDevices = async (devices: Device[]): Promise<Device[]> => asy
.catch(() => []);
const allowed = new Set(networkDeviceIds);
device.allowed = allowed.has(device.id);
return device;
});
const processedDevices = devices.map((device: Device) => {
device.allowed = allowed.has(device.id);
return device;
});
return processedDevices;
};

View File

@@ -1,4 +1,3 @@
import pProps from 'p-props';
import { type Domain } from '@app/core/types';
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
@@ -27,24 +26,34 @@ export const parseDomain = async (type: DomainLookupType, id: string): Promise<D
const domain = await client[method](id);
const info = await domain.getInfoAsync();
const results = await pProps({
uuid: domain.getUUIDAsync(),
osType: domain.getOSTypeAsync(),
autostart: domain.getAutostartAsync(),
maxMemory: domain.getMaxMemoryAsync(),
schedulerType: domain.getSchedulerTypeAsync(),
schedulerParameters: domain.getSchedulerParametersAsync(),
securityLabel: domain.getSecurityLabelAsync(),
name: domain.getNameAsync(),
const [uuid, osType, autostart, maxMemory, schedulerType, schedulerParameters, securityLabel, name] = await Promise.all([
domain.getUUIDAsync(),
domain.getOSTypeAsync(),
domain.getAutostartAsync(),
domain.getMaxMemoryAsync(),
domain.getSchedulerTypeAsync(),
domain.getSchedulerParametersAsync(),
domain.getSecurityLabelAsync(),
domain.getNameAsync(),
]);
const results = {
uuid,
osType,
autostart,
maxMemory,
schedulerType,
schedulerParameters,
securityLabel,
name,
...info,
state: info.state.replace(' ', '_'),
});
};
if (info.state === 'running') {
results.vcpus = await domain.getVcpusAsync();
results.memoryStats = await domain.getMemoryStatsAsync();
}
// @ts-expect-error fix pProps inferred type
return results;
};

View File

@@ -0,0 +1,15 @@
import fs from 'fs';
import path from 'path';
import { logger } from '@app/core/log';
import convert from 'convert';
const writeFile = async (filePath: string, fileContents: string | Buffer) => {
logger.debug(`Writing ${convert(fileContents.length, 'bytes').to('kilobytes')} to ${filePath}`);
await fs.promises.writeFile(filePath, fileContents);
};
export const writeToBoot = async (filePath: string, fileContents: string | Buffer) => {
const basePath = '/boot/config/plugins/dynamix/';
const resolvedPath = path.resolve(basePath, filePath);
await writeFile(resolvedPath, fileContents);
};

11
api/src/dotenv.ts Normal file
View File

@@ -0,0 +1,11 @@
import { config } from 'dotenv';
const env =
process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'test'
? config({ debug: true, path: `./.env.${process.env.NODE_ENV}`, encoding: 'utf-8' })
: config({
path: '/usr/local/unraid-api/.env',
encoding: 'utf-8',
});
export default env;

View File

@@ -1,20 +1,33 @@
export const API_VERSION = process.env.VERSION ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT';
export const NODE_ENV = process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production';
import { version } from 'package.json';
export const API_VERSION =
process.env.npm_package_version ?? version ?? new Error('API_VERSION not set');
export const NODE_ENV = process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production' ?? 'production';
export const environment = {
IS_MAIN_PROCESS: false,
IS_MAIN_PROCESS: false,
};
export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
export const DEBUG = process.env.DEBUG === 'true';
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
export const ENVIRONMENT = process.env.ENVIRONMENT as 'production' | 'staging' | 'development'
export const GRAPHQL_INTROSPECTION = Boolean(
INTROSPECTION ?? DEBUG ?? ENVIRONMENT !== 'production'
);
export const ENVIRONMENT = process.env.ENVIRONMENT as 'production' | 'staging' | 'development' ?? 'production';
export const GRAPHQL_INTROSPECTION = Boolean(INTROSPECTION ?? DEBUG ?? ENVIRONMENT !== 'production');
export const PORT = process.env.PORT ?? '/var/run/unraid-api.sock';
export const DRY_RUN = process.env.DRY_RUN === 'true';
export const BYPASS_PERMISSION_CHECKS = process.env.BYPASS_PERMISSION_CHECKS === 'true';
export const BYPASS_CORS_CHECKS = process.env.BYPASS_CORS_CHECKS === 'true';
export const LOG_CORS = process.env.LOG_CORS === 'true';
export const LOG_TYPE = process.env.LOG_TYPE as 'pretty' | 'raw' ?? 'pretty';
export const LOG_LEVEL = process.env.LOG_LEVEL as 'TRACE' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'FATAL';
export const LOG_TRANSPORT = process.env.LOG_TRANSPORT as 'file' | 'stdout';
export const LOG_TYPE = (process.env.LOG_TYPE as 'pretty' | 'raw') ?? 'pretty';
export const LOG_LEVEL = process.env.LOG_LEVEL as
| 'TRACE'
| 'DEBUG'
| 'INFO'
| 'WARN'
| 'ERROR'
| 'FATAL' ?? process.env.ENVIRONMENT === 'production' ? 'INFO' : 'TRACE';
export const MOTHERSHIP_GRAPHQL_LINK =
process.env.MOTHERSHIP_GRAPHQL_LINK ??
(process.env.ENVIRONMENT === 'staging'
? 'https://staging.mothership.unraid.net/ws'
: 'https://mothership.unraid.net/ws');

View File

@@ -3,14 +3,14 @@ import {
HttpLink,
InMemoryCache,
split,
} from '@apollo/client/core/core.cjs';
import { onError } from '@apollo/client/link/error';
} from '@apollo/client/core/index.js';
import { onError } from '@apollo/client/link/error/index.js';
import { getInternalApiAddress } from '@app/consts';
import WebSocket from 'ws';
import { fetch } from 'cross-fetch';
import { getMainDefinition } from '@apollo/client/utilities';
import { getMainDefinition } from '@apollo/client/utilities/index.js';
import { graphqlLogger } from '@app/core/log';
import { GraphQLWsLink } from '@apollo/client/link/subscriptions';
import { GraphQLWsLink } from '@apollo/client/link/subscriptions/index.js';
import { createClient } from 'graphql-ws';
import { getters } from '@app/store/index';

View File

@@ -591,6 +591,7 @@ export function NotificationSchema(): z.ZodObject<Properties<Notification>> {
return z.object({
__typename: z.literal('Notification').optional(),
description: z.string(),
formattedTimestamp: z.string().nullish(),
id: z.string(),
importance: ImportanceSchema,
link: z.string().nullish(),

View File

@@ -624,7 +624,7 @@ export type Mutation = {
addUser?: Maybe<User>;
archiveAll: NotificationOverview;
/** Marks a notification as archived. */
archiveNotification: NotificationOverview;
archiveNotification: Notification;
archiveNotifications: NotificationOverview;
/** Cancel parity check */
cancelParityCheck?: Maybe<Scalars['JSON']['output']>;
@@ -632,6 +632,7 @@ export type Mutation = {
connectSignIn: Scalars['Boolean']['output'];
connectSignOut: Scalars['Boolean']['output'];
createNotification: Notification;
deleteAllNotifications: NotificationOverview;
deleteNotification: NotificationOverview;
/** Delete a user */
deleteUser?: Maybe<User>;
@@ -662,7 +663,7 @@ export type Mutation = {
unarchiveNotifications: NotificationOverview;
unmountArrayDisk?: Maybe<Disk>;
/** Marks a notification as unread. */
unreadNotification: NotificationOverview;
unreadNotification: Notification;
/** Update an existing API key */
updateApikey?: Maybe<ApiKey>;
};
@@ -817,6 +818,7 @@ export type Node = {
export type Notification = Node & {
__typename?: 'Notification';
description: Scalars['String']['output'];
formattedTimestamp?: Maybe<Scalars['String']['output']>;
id: Scalars['ID']['output'];
importance: Importance;
link?: Maybe<Scalars['String']['output']>;
@@ -1722,7 +1724,7 @@ export type DirectiveResolverFn<TResult = {}, TParent = {}, TContext = {}, TArgs
/** Mapping of interface types */
export type ResolversInterfaceTypes<RefType extends Record<string, unknown>> = ResolversObject<{
export type ResolversInterfaceTypes<_RefType extends Record<string, unknown>> = ResolversObject<{
Node: ( ArrayType ) | ( Config ) | ( Connect ) | ( Docker ) | ( Info ) | ( Network ) | ( Notification ) | ( Notifications ) | ( Service ) | ( Vars );
UserAccount: ( Me ) | ( User );
}>;
@@ -2347,13 +2349,14 @@ export type MutationResolvers<ContextType = Context, ParentType extends Resolver
addDiskToArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType, Partial<MutationaddDiskToArrayArgs>>;
addUser?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<MutationaddUserArgs, 'input'>>;
archiveAll?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationarchiveAllArgs>>;
archiveNotification?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, RequireFields<MutationarchiveNotificationArgs, 'id'>>;
archiveNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationarchiveNotificationArgs, 'id'>>;
archiveNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationarchiveNotificationsArgs>>;
cancelParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
clearArrayDiskStatistics?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType, RequireFields<MutationclearArrayDiskStatisticsArgs, 'id'>>;
connectSignIn?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType, RequireFields<MutationconnectSignInArgs, 'input'>>;
connectSignOut?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
createNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationcreateNotificationArgs, 'input'>>;
deleteAllNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType>;
deleteNotification?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, RequireFields<MutationdeleteNotificationArgs, 'id' | 'type'>>;
deleteUser?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<MutationdeleteUserArgs, 'input'>>;
enableDynamicRemoteAccess?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType, RequireFields<MutationenableDynamicRemoteAccessArgs, 'input'>>;
@@ -2374,7 +2377,7 @@ export type MutationResolvers<ContextType = Context, ParentType extends Resolver
unarchiveAll?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationunarchiveAllArgs>>;
unarchiveNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationunarchiveNotificationsArgs>>;
unmountArrayDisk?: Resolver<Maybe<ResolversTypes['Disk']>, ParentType, ContextType, RequireFields<MutationunmountArrayDiskArgs, 'id'>>;
unreadNotification?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, RequireFields<MutationunreadNotificationArgs, 'id'>>;
unreadNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationunreadNotificationArgs, 'id'>>;
updateApikey?: Resolver<Maybe<ResolversTypes['ApiKey']>, ParentType, ContextType, RequireFields<MutationupdateApikeyArgs, 'name'>>;
}>;
@@ -2403,6 +2406,7 @@ export type NodeResolvers<ContextType = Context, ParentType extends ResolversPar
export type NotificationResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Notification'] = ResolversParentTypes['Notification']> = ResolversObject<{
description?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
formattedTimestamp?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
importance?: Resolver<ResolversTypes['Importance'], ParentType, ContextType>;
link?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;

View File

@@ -20,25 +20,45 @@ export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>>
): TType;
// return nullable if `fragmentType` is undefined
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | undefined
): TType | undefined;
// return nullable if `fragmentType` is nullable
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | null
): TType | null;
// return nullable if `fragmentType` is nullable or undefined
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | null | undefined
): TType | null | undefined;
// return array of non-nullable if `fragmentType` is array of non-nullable
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: Array<FragmentType<DocumentTypeDecoration<TType, any>>>
): Array<TType>;
// return array of nullable if `fragmentType` is array of nullable
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: Array<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
): Array<TType> | null | undefined;
// return readonly array of non-nullable if `fragmentType` is array of non-nullable
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>>
): ReadonlyArray<TType>;
// return array of nullable if `fragmentType` is array of nullable
// return readonly array of nullable if `fragmentType` is array of nullable
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
): ReadonlyArray<TType> | null | undefined;
export function useFragment<TType>(
_documentNode: DocumentTypeDecoration<TType, any>,
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
): TType | ReadonlyArray<TType> | null | undefined {
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | Array<FragmentType<DocumentTypeDecoration<TType, any>>> | ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
): TType | Array<TType> | ReadonlyArray<TType> | null | undefined {
return fragmentType as any;
}

View File

@@ -11,6 +11,7 @@ import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-
* 3. It does not support dead code elimination, so it will add unused operations.
*
* Therefore it is highly recommended to use the babel or swc plugin for production.
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
*/
const documents = {
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.sendRemoteGraphQLResponseDocument,

View File

@@ -0,0 +1,39 @@
import { GraphQLScalarType, type ASTNode } from 'graphql';
import { Kind } from 'graphql/language/index.js';
const MAX_LONG = Number.MAX_SAFE_INTEGER;
const MIN_LONG = Number.MIN_SAFE_INTEGER;
const coerceLong = (value) => {
if (value === '')
throw new TypeError(
'Long cannot represent non 52-bit signed integer value: (empty string)'
);
const num = Number(value);
if (num == num && num <= MAX_LONG && num >= MIN_LONG) {
if (num < 0) {
return Math.ceil(num);
}
return Math.floor(num);
}
throw new TypeError(
'Long cannot represent non 52-bit signed integer value: ' +
String(value)
);
};
const parseLiteral = (ast: ASTNode) => {
if (ast.kind === Kind.INT) {
const num = parseInt(ast.value, 10);
if (num <= MAX_LONG && num >= MIN_LONG) return num;
}
return null;
};
export const GraphQLLong = new GraphQLScalarType({
name: 'Long',
description: 'The `Long` scalar type represents 52-bit integers',
serialize: coerceLong,
parseValue: coerceLong,
parseLiteral: parseLiteral,
});

View File

@@ -0,0 +1,12 @@
import { logger } from '@app/core/log';
import { getters } from '@app/store';
import { type ApiKeyResponse } from '@app/graphql/generated/api/types';
import { isApiKeyValid } from '@app/store/getters/index';
export const checkApi = async (): Promise<ApiKeyResponse> => {
logger.trace('Cloud endpoint: Checking API');
const valid = isApiKeyValid();
const error = valid ? null : getters.apiKey().status;
return { valid, error };
};

View File

@@ -0,0 +1,100 @@
import { FIVE_DAYS_SECS, ONE_DAY_SECS } from '@app/consts';
import { logger } from '@app/core/log';
import { checkDNS } from '@app/graphql/resolvers/query/cloud/check-dns';
import { checkMothershipAuthentication } from '@app/graphql/resolvers/query/cloud/check-mothership-authentication';
import { getters, store } from '@app/store';
import { getCloudCache, getDnsCache } from '@app/store/getters';
import { setCloudCheck, setDNSCheck } from '@app/store/modules/cache';
import { got } from 'got';
import { type CloudResponse, MinigraphStatus } from '@app/graphql/generated/api/types';
import { API_VERSION, MOTHERSHIP_GRAPHQL_LINK } from '@app/environment';
const mothershipBaseUrl = new URL(MOTHERSHIP_GRAPHQL_LINK).origin;
const createGotOptions = (apiVersion: string, apiKey: string) => ({
timeout: {
request: 5_000,
},
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
'x-unraid-api-version': apiVersion,
'x-api-key': apiKey,
},
});
/**
* This is mainly testing the user's network config
* If they cannot resolve this they may have it blocked or have a routing issue
*/
const checkCanReachMothership = async (apiVersion: string, apiKey: string): Promise<void> => {
const mothershipCanBeResolved = await got.head(mothershipBaseUrl, createGotOptions(apiVersion, apiKey)).then(() => true).catch(() => false);
if (!mothershipCanBeResolved) throw new Error(`Unable to connect to ${mothershipBaseUrl}`);
};
/**
* Run a more performant cloud check with permanent DNS checking
*/
const fastCloudCheck = async (): Promise<CloudResponse> => {
const result = { status: 'ok', error: null, ip: 'FAST_CHECK_NO_IP_FOUND' };
const cloudIp = getDnsCache()?.cloudIp ?? null;
if (cloudIp) {
result.ip = cloudIp;
} else {
try {
result.ip = (await checkDNS()).cloudIp;
logger.debug('DNS_CHECK_RESULT', await checkDNS());
store.dispatch(setDNSCheck({ cloudIp: result.ip, ttl: FIVE_DAYS_SECS, error: null }));
} catch (error: unknown) {
logger.warn('Failed to fetch DNS, but Minigraph is connected - continuing');
result.ip = `ERROR: ${error instanceof Error ? error.message : 'Unknown Error'}`;
// Don't set an error since we're actually connected to the cloud
store.dispatch(setDNSCheck({ cloudIp: result.ip, ttl: ONE_DAY_SECS, error: null }));
}
}
return result;
};
export const checkCloud = async (): Promise<CloudResponse> => {
logger.trace('Cloud endpoint: Checking mothership');
try {
const config = getters.config();
const apiVersion = API_VERSION;
const apiKey = config.remote.apikey;
const graphqlStatus = getters.minigraph().status;
const result = { status: 'ok', error: null, ip: 'NO_IP_FOUND' };
// If minigraph is connected, skip the follow cloud checks
if (graphqlStatus === MinigraphStatus.CONNECTED) {
return await fastCloudCheck();
}
// Check GraphQL Conneciton State, if it's broken, run these checks
if (!apiKey) throw new Error('API key is missing');
const oldCheckResult = getCloudCache();
if (oldCheckResult) {
logger.trace('Using cached result for cloud check', oldCheckResult);
return oldCheckResult;
}
// Check DNS
result.ip = (await checkDNS()).cloudIp;
// Check if we can reach mothership
await checkCanReachMothership(apiVersion, apiKey);
// Check auth, rate limiting, etc.
await checkMothershipAuthentication(apiVersion, apiKey);
// Cache for 10 minutes
store.dispatch(setCloudCheck(result));
return result;
} catch (error: unknown) {
if (!(error instanceof Error)) throw new Error(`Unknown Error "${error as string}"`);
return { status: 'error', error: error.message };
}
};

View File

@@ -0,0 +1,60 @@
import { MOTHERSHIP_GRAPHQL_LINK } from '@app/environment';
import { store } from '@app/store';
import { getDnsCache } from '@app/store/getters';
import { setDNSCheck } from '@app/store/modules/cache';
import { lookup as lookupDNS, resolve as resolveDNS } from 'dns';
import ip from 'ip';
import { promisify } from 'util';
const msHostname = new URL(MOTHERSHIP_GRAPHQL_LINK).host;
/**
* Check if the local and network resolvers are able to see mothership
*
* See: https://nodejs.org/docs/latest/api/dns.html#dns_implementation_considerations
*/
export const checkDNS = async (hostname = msHostname): Promise<{ cloudIp: string }> => {
const dnsCachedResuslt = getDnsCache();
if (dnsCachedResuslt) {
if (dnsCachedResuslt.cloudIp) {
return { cloudIp: dnsCachedResuslt.cloudIp };
}
if (dnsCachedResuslt.error) {
throw dnsCachedResuslt.error;
}
}
let local: string | null = null;
let network: string | null = null;
try {
// Check the local resolver like "ping" does
// Check the DNS server the server has set - does a DNS query on the network
const [localRes, networkRes] = await Promise.all([
promisify(lookupDNS)(hostname).then(({ address }) => address),
promisify(resolveDNS)(hostname).then(([address]) => address),
]);
local = localRes;
network = networkRes;
// The user's server and the DNS server they're using are returning different results
if (!local.includes(network)) throw new Error(`Local and network resolvers showing different IP for "${hostname}". [local="${local ?? 'NOT FOUND'}"] [network="${network ?? 'NOT FOUND'}"]`);
// The user likely has a PI-hole or something similar running.
if (ip.isPrivate(local)) throw new Error(`"${hostname}" is being resolved to a private IP. [IP=${local ?? 'NOT FOUND'}]`);
} catch (error: unknown) {
if (!(error instanceof Error)) {
throw error;
}
store.dispatch(setDNSCheck({ cloudIp: null, error }));
}
if (typeof local === 'string' || typeof network === 'string') {
const validIp: string = local ?? network ?? '';
store.dispatch(setDNSCheck({ cloudIp: validIp, error: null }));
return { cloudIp: validIp };
}
return { cloudIp: '' };
};

View File

@@ -0,0 +1,52 @@
import { logger } from '@app/core';
import { MOTHERSHIP_GRAPHQL_LINK } from '@app/environment';
import { got, HTTPError, TimeoutError } from 'got';
const createGotOptions = (apiVersion: string, apiKey: string) => ({
timeout: {
request: 5_000,
},
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
'x-unraid-api-version': apiVersion,
'x-api-key': apiKey,
},
});
// Check if we're rate limited, etc.
export const checkMothershipAuthentication = async (apiVersion: string, apiKey: string) => {
const msURL = new URL(MOTHERSHIP_GRAPHQL_LINK);
const url = `https://${msURL.hostname}${msURL.pathname}`;
try {
const options = createGotOptions(apiVersion, apiKey);
// This will throw if there is a non 2XX/3XX code
await got.head(url, options);
} catch (error: unknown) {
// HTTP errors
if (error instanceof HTTPError) {
switch (error.response.statusCode) {
case 429: {
const retryAfter = error.response.headers['retry-after'];
throw new Error(retryAfter ? `${url} is rate limited for another ${retryAfter} seconds` : `${url} is rate limited`);
}
case 401:
throw new Error('Invalid credentials');
default:
throw new Error(`Failed to connect to ${url} with a "${error.response.statusCode}" HTTP error.`);
}
}
// Timeout error
if (error instanceof TimeoutError) throw new Error(`Timed-out while connecting to "${url}"`);
// Unknown error
logger.trace('Unknown Error', error);
// @TODO: Add in the cause when we move to a newer node version
// throw new Error('Unknown Error', { cause: error as Error });
throw new Error('Unknown Error');
}
};

View File

@@ -0,0 +1,14 @@
export type Cloud = {
error: string | null;
apiKey: { valid: true; error: null } | { valid: false; error: string };
minigraphql: {
status: 'connected' | 'disconnected';
};
cloud: { status: 'ok'; error: null; ip: string } | { status: 'error'; error: string };
allowedOrigins: string[];
};
export const createResponse = (cloud: Omit<Cloud, 'error'>): Cloud => ({
...cloud,
error: cloud.apiKey.error ?? cloud.cloud.error,
});

View File

@@ -1,11 +1,4 @@
import {
cpu,
cpuFlags,
mem,
memLayout,
osInfo,
versions,
} from 'systeminformation';
import { cpu, cpuFlags, mem, memLayout, osInfo, versions } from 'systeminformation';
import { docker } from '@app/core/utils/clients/docker';
import {
type InfoApps,
@@ -29,8 +22,6 @@ import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version';
import { AppError } from '@app/core/errors/app-error';
import { cleanStdout } from '@app/core/utils/misc/clean-stdout';
import { execaCommandSync, execa } from 'execa';
import { pathExists } from 'path-exists';
import { filter as asyncFilter } from 'p-iteration';
import { isSymlink } from 'path-type';
import type { PciDevice } from '@app/core/types';
import { vmRegExps } from '@app/core/utils/vms/domain/vm-regexps';
@@ -39,6 +30,7 @@ import { filterDevices } from '@app/core/utils/vms/filter-devices';
import { sanitizeVendor } from '@app/core/utils/vms/domain/sanitize-vendor';
import { sanitizeProduct } from '@app/core/utils/vms/domain/sanitize-product';
import { bootTimestamp } from '@app/common/dashboard/boot-timestamp';
import { access } from 'fs/promises';
export const generateApps = async (): Promise<InfoApps> => {
const installed = await docker
@@ -63,8 +55,7 @@ export const generateOs = async (): Promise<InfoOs> => {
};
export const generateCpu = async (): Promise<InfoCpu> => {
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } =
await cpu();
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } = await cpu();
const flags = await cpuFlags()
.then((flags) => flags.split(' '))
.catch(() => []);
@@ -118,9 +109,9 @@ export const generateVersions = async (): Promise<Versions> => {
};
export const generateMemory = async (): Promise<InfoMemory> => {
const layout = await memLayout().then((dims) =>
dims.map((dim) => dim as MemoryLayout)
).catch(() => []);
const layout = await memLayout()
.then((dims) => dims.map((dim) => dim as MemoryLayout))
.catch(() => []);
const info = await mem();
let max = info.total;
@@ -136,14 +127,10 @@ export const generateMemory = async (): Promise<InfoMemory> => {
throw error;
});
const lines = memoryInfo.split('\n');
const header = lines.find((line) =>
line.startsWith('Physical Memory Array')
);
const header = lines.find((line) => line.startsWith('Physical Memory Array'));
if (header) {
const start = lines.indexOf(header);
const nextHeaders = lines
.slice(start, -1)
.find((line) => line.startsWith('Handle '));
const nextHeaders = lines.slice(start, -1).find((line) => line.startsWith('Handle '));
if (nextHeaders) {
const end = lines.indexOf(nextHeaders);
@@ -151,9 +138,7 @@ export const generateMemory = async (): Promise<InfoMemory> => {
max = toBytes(
fields
?.find((line) =>
line.trim().startsWith('Maximum Capacity')
)
?.find((line) => line.trim().startsWith('Maximum Capacity'))
?.trim()
?.split(': ')[1] ?? '0'
);
@@ -215,11 +200,14 @@ export const generateDevices = async (): Promise<Devices> => {
const basePath = '/sys/bus/pci/devices/0000:';
// Remove devices with no IOMMU support
const filteredDevices = await asyncFilter(
devices,
async (device: Readonly<PciDevice>) =>
pathExists(`${basePath}${device.id}/iommu_group/`)
);
const filteredDevices = await Promise.all(
devices.map(async (device: Readonly<PciDevice>) => {
const exists = await access(`${basePath}${device.id}/iommu_group/`)
.then(() => true)
.catch(() => false);
return exists ? device : null;
})
).then((devices) => devices.filter((device) => device !== null));
/**
* Run device cleanup
@@ -230,36 +218,29 @@ export const generateDevices = async (): Promise<Devices> => {
* - Add whether kernel-bound driver exists
* - Cleanup device vendor/product names
*/
const processedDevices = await filterDevices(filteredDevices).then(
async (devices) =>
Promise.all(
devices
// @ts-expect-error - Device is not PciDevice
.map((device) => addDeviceClass(device))
.map(async (device) => {
// Attempt to get the current kernel-bound driver for this pci device
await isSymlink(
`${basePath}${device.id}/driver`
).then((symlink) => {
if (symlink) {
// $strLink = @readlink('/sys/bus/pci/devices/0000:'.$arrMatch['id']. '/driver');
// if (!empty($strLink)) {
// $strDriver = basename($strLink);
// }
}
});
const processedDevices = await filterDevices(filteredDevices).then(async (devices) =>
Promise.all(
devices
// @ts-expect-error - Device is not PciDevice
.map((device) => addDeviceClass(device))
.map(async (device) => {
// Attempt to get the current kernel-bound driver for this pci device
await isSymlink(`${basePath}${device.id}/driver`).then((symlink) => {
if (symlink) {
// $strLink = @readlink('/sys/bus/pci/devices/0000:'.$arrMatch['id']. '/driver');
// if (!empty($strLink)) {
// $strDriver = basename($strLink);
// }
}
});
// Clean up the vendor and product name
device.vendorname = sanitizeVendor(
device.vendorname
);
device.productname = sanitizeProduct(
device.productname
);
// Clean up the vendor and product name
device.vendorname = sanitizeVendor(device.vendorname);
device.productname = sanitizeProduct(device.productname);
return device;
})
)
return device;
})
)
);
return processedDevices;
@@ -298,13 +279,9 @@ export const generateDevices = async (): Promise<Devices> => {
const getSystemUSBDevices = async () => {
try {
// Get a list of all usb hubs so we can filter the allowed/disallowed
const usbHubs = await execa(
'cat /sys/bus/usb/drivers/hub/*/modalias',
{ shell: true }
)
const usbHubs = await execa('cat /sys/bus/usb/drivers/hub/*/modalias', { shell: true })
.then(({ stdout }) =>
stdout.split('\n').map((line) => {
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
const [, id] = line.match(/usb:v(\w{9})/) ?? [];
return id.replace('p', ':');
})
@@ -318,8 +295,7 @@ export const generateDevices = async (): Promise<Devices> => {
emhttp.var.flashGuid !== device.guid;
// Remove usb hubs
const filterUsbHubs = (device: Readonly<PciDevice>): boolean =>
!usbHubs.includes(device.id);
const filterUsbHubs = (device: Readonly<PciDevice>): boolean => !usbHubs.includes(device.id);
// Clean up the name
const sanitizeVendorName = (device: Readonly<PciDevice>) => {
@@ -330,9 +306,7 @@ export const generateDevices = async (): Promise<Devices> => {
};
};
const parseDeviceLine = (
line: Readonly<string>
): { value: string; string: string } => {
const parseDeviceLine = (line: Readonly<string>): { value: string; string: string } => {
const emptyLine = { value: '', string: '' };
// If the line is blank return nothing
@@ -342,7 +316,7 @@ export const generateDevices = async (): Promise<Devices> => {
// Parse the line
const [, _] = line.split(/[ \t]{2,}/).filter(Boolean);
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
const match = _.match(/^(\S+)\s(.*)/)?.slice(1);
// If there's no match return nothing
@@ -361,31 +335,19 @@ export const generateDevices = async (): Promise<Devices> => {
const modifiedDevice: PciDevice = {
...device,
};
const info = execaCommandSync(
`lsusb -d ${device.id} -v`
).stdout.split('\n');
const info = execaCommandSync(`lsusb -d ${device.id} -v`).stdout.split('\n');
const deviceName = device.name.trim();
const iSerial = parseDeviceLine(
info.filter((line) => line.includes('iSerial'))[0]
);
const iProduct = parseDeviceLine(
info.filter((line) => line.includes('iProduct'))[0]
);
const iSerial = parseDeviceLine(info.filter((line) => line.includes('iSerial'))[0]);
const iProduct = parseDeviceLine(info.filter((line) => line.includes('iProduct'))[0]);
const iManufacturer = parseDeviceLine(
info.filter((line) => line.includes('iManufacturer'))[0]
);
const idProduct = parseDeviceLine(
info.filter((line) => line.includes('idProduct'))[0]
);
const idVendor = parseDeviceLine(
info.filter((line) => line.includes('idVendor'))[0]
);
const serial = `${iSerial.string
const idProduct = parseDeviceLine(info.filter((line) => line.includes('idProduct'))[0]);
const idVendor = parseDeviceLine(info.filter((line) => line.includes('idVendor'))[0]);
const serial = `${iSerial.string.slice(8).slice(0, 4)}-${iSerial.string
.slice(8)
.slice(0, 4)}-${iSerial.string.slice(8).slice(4)}`;
const guid = `${idVendor.value.slice(
2
)}-${idProduct.value.slice(2)}-${serial}`;
.slice(4)}`;
const guid = `${idVendor.value.slice(2)}-${idProduct.value.slice(2)}-${serial}`;
modifiedDevice.serial = iSerial.string;
modifiedDevice.product = iProduct.string;
@@ -394,8 +356,7 @@ export const generateDevices = async (): Promise<Devices> => {
// Set name if missing
if (deviceName === '') {
modifiedDevice.name =
`${iProduct.string} ${iManufacturer.string}`.trim();
modifiedDevice.name = `${iProduct.string} ${iManufacturer.string}`.trim();
}
// Name still blank? Replace using fallback default

View File

@@ -128,8 +128,7 @@ const getUrlTypeFromFqdn = (fqdnType: string): URL_TYPE => {
return URL_TYPE.WIREGUARD;
}
};
// eslint-disable-next-line complexity
export const getServerIps = (
state: RootState = store.getState()
): { urls: AccessUrl[]; errors: Error[] } => {

View File

@@ -0,0 +1,25 @@
import { gql, QueryOptions } from "@apollo/client/core/index.js";
interface ParsedQuery {
query?: string;
variables?: Record<string, string>;
}
export const parseGraphQLQuery = (body: string): QueryOptions => {
try {
const parsedBody: ParsedQuery = JSON.parse(body);
if (
parsedBody.query &&
parsedBody.variables &&
typeof parsedBody.variables === 'object'
) {
return {
query: gql(parsedBody.query),
variables: parsedBody.variables,
};
}
throw new Error('Invalid Body');
} catch (error) {
throw new Error('Invalid Body Provided');
}
};

View File

@@ -1,5 +1,4 @@
import { remoteQueryLogger } from '@app/core/log';
import { ENVIRONMENT } from '@app/environment';
import { getApiApolloClient } from '@app/graphql/client/api/get-api-client';
import {
RemoteGraphQLEventType,
@@ -20,17 +19,19 @@ export const executeRemoteGraphQLQuery = async (
try {
const parsedQuery = parseGraphQLQuery(originalBody);
const localClient = getApiApolloClient({
upcApiKey: apiKey
upcApiKey: apiKey,
});
if (ENVIRONMENT === 'development') {
remoteQueryLogger.debug({ query: parsedQuery.query }, '[DEVONLY] Running query');
}
remoteQueryLogger.trace({ query: parsedQuery.query }, '[DEVONLY] Running query');
const localResult = await localClient.query({
query: parsedQuery.query,
variables: parsedQuery.variables,
});
if (localResult.data) {
remoteQueryLogger.trace({ data: localResult.data }, 'Got data from remoteQuery request', data.sha256);
remoteQueryLogger.trace(
{ data: localResult.data },
'Got data from remoteQuery request',
data.sha256
);
await client?.mutate({
mutation: SEND_REMOTE_QUERY_RESPONSE,
@@ -71,6 +72,10 @@ export const executeRemoteGraphQLQuery = async (
} catch (error) {
remoteQueryLogger.warn('Could not respond %o', error);
}
remoteQueryLogger.error('Error executing remote query %s', err instanceof Error ? err.message: 'Unknown Error');
remoteQueryLogger.error(
'Error executing remote query %s',
err instanceof Error ? err.message : 'Unknown Error'
);
remoteQueryLogger.trace(err);
}
};

View File

@@ -2,6 +2,8 @@ import { join } from 'path';
import { loadFilesSync } from '@graphql-tools/load-files';
import { mergeTypeDefs } from '@graphql-tools/merge';
const files = loadFilesSync(join(__dirname, '../src/graphql/schema/types'), { extensions: ['graphql'] });
const files = loadFilesSync(join(import.meta.dirname, './types'), {
extensions: ['graphql'],
});
export const typeDefs = mergeTypeDefs(files);
export const typeDefs = mergeTypeDefs(files);

View File

@@ -17,15 +17,22 @@ type Query {
type Mutation {
createNotification(input: NotificationData!): Notification!
deleteNotification(id: String!, type: NotificationType!): NotificationOverview!
"""Marks a notification as archived."""
archiveNotification(id: String!): NotificationOverview!
"""Marks a notification as unread."""
unreadNotification(id: String!): NotificationOverview!
deleteAllNotifications: NotificationOverview!
"""
Marks a notification as archived.
"""
archiveNotification(id: String!): Notification!
"""
Marks a notification as unread.
"""
unreadNotification(id: String!): Notification!
archiveNotifications(ids: [String!]): NotificationOverview!
unarchiveNotifications(ids: [String!]): NotificationOverview!
archiveAll(importance: Importance): NotificationOverview!
unarchiveAll(importance: Importance): NotificationOverview!
"""Reads each notification to recompute & update the overview."""
"""
Reads each notification to recompute & update the overview.
"""
recalculateOverview: NotificationOverview!
}
@@ -42,7 +49,9 @@ enum Importance {
type Notifications implements Node {
id: ID!
"""A cached overview of the notifications in the system & their severity."""
"""
A cached overview of the notifications in the system & their severity.
"""
overview: NotificationOverview!
list(filter: NotificationFilter!): [Notification!]!
}
@@ -62,6 +71,7 @@ type Notification implements Node {
ISO Timestamp for when the notification occurred
"""
timestamp: String
formattedTimestamp: String
}
input NotificationData {

View File

@@ -1,9 +1,9 @@
import { hasSubscribedToChannel } from '@app/ws';
import { type User } from '@app/core/types/states/user';
import { AppError } from '@app/core/errors/app-error';
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
import { pubsub } from '@app/core/pubsub';
import { store } from '@app/store';
import { graphqlLogger } from '@app/core/log';
import {
ServerStatus,
type Server,
@@ -15,6 +15,40 @@ export interface Context {
websocketId: string;
}
type Subscription = {
total: number;
channels: string[];
};
const subscriptions: Record<string, Subscription> = {};
/**
* Return current ws connection count.
*/
export const getWsConnectionCount = () => Object.values(subscriptions).filter(subscription => subscription.total >= 1).length;
/**
* Return current ws connection count in channel.
*/
export const getWsConnectionCountInChannel = (channel: string) => Object.values(subscriptions).filter(subscription => subscription.channels.includes(channel)).length;
export const hasSubscribedToChannel = (id: string, channel: string) => {
graphqlLogger.debug('Subscribing to %s', channel);
// Setup initial object
if (subscriptions[id] === undefined) {
subscriptions[id] = {
total: 1,
channels: [channel],
};
return;
}
subscriptions[id].total++;
subscriptions[id].channels.push(channel);
};
/**
* Create a pubsub subscription.
* @param channel The pubsub channel to subscribe to.
@@ -33,12 +67,11 @@ export const createSubscription = (channel: string, resource?: string) => ({
possession: 'any',
});
hasSubscribedToChannel(context.websocketId, channel);
hasSubscribedToChannel(context.websocketId, channel);
return pubsub.asyncIterator(channel);
},
});
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export const getLocalServer = (getState = store.getState): Array<Server> => {
const { emhttp, config, minigraph } = getState();
const guid = emhttp.var.regGuid;

View File

@@ -1,11 +1,9 @@
import 'reflect-metadata';
import 'global-agent/bootstrap';
import { am } from 'am';
import 'global-agent/bootstrap.js';
import '@app/dotenv';
import http from 'http';
import https from 'https';
import CacheableLookup from 'cacheable-lookup';
import exitHook from 'async-exit-hook';
import { store } from '@app/store';
import { loadConfigFile } from '@app/store/modules/config';
import { logger } from '@app/core/log';
@@ -28,91 +26,96 @@ import { bootstrapNestServer } from '@app/unraid-api/main';
import { type NestFastifyApplication } from '@nestjs/platform-fastify';
import { type RawServerDefault } from 'fastify';
import { setupLogRotation } from '@app/core/logrotate/setup-logrotate';
import * as env from '@app/environment';
import { WebSocket } from 'ws';
import exitHook from 'exit-hook';
import * as envVars from '@app/environment';
let server: NestFastifyApplication<RawServerDefault>;
let server: NestFastifyApplication<RawServerDefault> | null = null;
const unlinkUnixPort = () => {
if (isNaN(parseInt(PORT, 10))) {
if (fileExistsSync(PORT)) unlinkSync(PORT);
}
};
// Boot app
void am(
async () => {
environment.IS_MAIN_PROCESS = true;
logger.debug('ENV %o', env);
try {
environment.IS_MAIN_PROCESS = true;
const cacheable = new CacheableLookup();
logger.info('ENV %o', envVars);
logger.info('PATHS %o', store.getState().paths);
Object.assign(global, { WebSocket: require('ws') });
// Ensure all DNS lookups are cached for their TTL
cacheable.install(http.globalAgent);
cacheable.install(https.globalAgent);
const cacheable = new CacheableLookup();
// Start file <-> store sync
// Must occur before config is loaded to ensure that the handler can fix broken configs
await startStoreSync();
Object.assign(global, { WebSocket });
// Ensure all DNS lookups are cached for their TTL
cacheable.install(http.globalAgent);
cacheable.install(https.globalAgent);
await setupLogRotation();
// Start file <-> store sync
// Must occur before config is loaded to ensure that the handler can fix broken configs
await startStoreSync();
// Load my servers config file into store
await store.dispatch(loadConfigFile());
await setupLogRotation();
// Load emhttp state into store
await store.dispatch(loadStateFiles());
// Load my servers config file into store
await store.dispatch(loadConfigFile());
// Load initial registration key into store
await store.dispatch(loadRegistrationKey());
// Load emhttp state into store
await store.dispatch(loadStateFiles());
// Load my dynamix config file into store
await store.dispatch(loadDynamixConfigFile());
// Load initial registration key into store
await store.dispatch(loadRegistrationKey());
// Start listening to file updates
StateManager.getInstance();
// Load my dynamix config file into store
await store.dispatch(loadDynamixConfigFile());
// Start listening to key file changes
setupRegistrationKeyWatch();
// Start listening to file updates
StateManager.getInstance();
// Start listening to docker events
setupVarRunWatch();
// Start listening to key file changes
setupRegistrationKeyWatch();
// Start listening to dynamix config file changes
setupDynamixConfigWatch();
// Start listening to docker events
setupVarRunWatch();
// Disabled until we need the access token to work
// TokenRefresh.init();
// Start listening to dynamix config file changes
setupDynamixConfigWatch();
// If port is unix socket, delete old socket before starting http server
// Disabled until we need the access token to work
// TokenRefresh.init();
// If port is unix socket, delete old socket before starting http server
unlinkUnixPort();
// Start webserver
server = await bootstrapNestServer();
PingTimeoutJobs.init();
startMiddlewareListeners();
await validateApiKeyIfPresent();
// On process exit stop HTTP server
exitHook(() => {
console.log('exithook');
server?.close?.();
// If port is unix socket, delete socket before exiting
unlinkUnixPort();
// Start webserver
server = await bootstrapNestServer();
PingTimeoutJobs.init();
startMiddlewareListeners();
await validateApiKeyIfPresent();
// On process exit stop HTTP server - this says it supports async but it doesnt seem to
exitHook(() => {
server?.close?.();
// If port is unix socket, delete socket before exiting
unlinkUnixPort();
shutdownApiEvent();
process.exit(0);
});
},
async (error: NodeJS.ErrnoException) => {
logger.error('API-GLOBAL-ERROR %s %s', error.message, error.stack);
if (server) {
await server?.close?.();
}
shutdownApiEvent();
// Kill application
process.exit(1);
process.exit(0);
});
// Start a loop to run the app
await new Promise(() => {});
} catch (error: unknown) {
if (error instanceof Error) {
logger.error('API-ERROR %s %s', error.message, error.stack);
}
);
if (server) {
await server?.close?.();
}
shutdownApiEvent();
// Kill application
process.exit(1);
}

View File

@@ -1,4 +1,4 @@
import { FIVE_MINUTES_MS, MOTHERSHIP_GRAPHQL_LINK } from '@app/consts';
import { FIVE_MINUTES_MS } from '@app/consts';
import { minigraphLogger } from '@app/core/log';
import {
getMothershipConnectionParams,
@@ -11,17 +11,17 @@ import {
ApolloClient,
InMemoryCache,
type NormalizedCacheObject,
} from '@apollo/client/core/core.cjs';
import { GraphQLWsLink } from '@apollo/client/link/subscriptions';
} from '@apollo/client/core/index.js';
import { GraphQLWsLink } from '@apollo/client/link/subscriptions/index.js';
import { MinigraphStatus } from '@app/graphql/generated/api/types';
import { API_VERSION } from '@app/environment';
import { API_VERSION, MOTHERSHIP_GRAPHQL_LINK } from '@app/environment';
import {
receivedMothershipPing,
setMothershipTimeout,
} from '@app/store/modules/minigraph';
import { logoutUser } from '@app/store/modules/config';
import { RetryLink } from '@apollo/client/link/retry';
import { ErrorLink } from '@apollo/client/link/error';
import { RetryLink } from '@apollo/client/link/retry/index.js';
import { ErrorLink } from '@apollo/client/link/error/index.js';
import { isApiKeyValid } from '@app/store/getters/index';
import { buildDelayFunction } from '@app/mothership/utils/delay-function';
import { WebSocket } from 'ws';
@@ -56,12 +56,11 @@ export const isAPIStateDataFullyLoaded = (state = store.getState()) => {
Boolean(emhttp.var.version)
);
};
// eslint-disable-next-line @typescript-eslint/no-extraneous-class
export class GraphQLClient {
public static instance: ApolloClient<NormalizedCacheObject> | null = null;
public static client: Client | null = null;
// eslint-disable-next-line @typescript-eslint/no-empty-function
private constructor() {}
/**
@@ -188,6 +187,9 @@ export class GraphQLClient {
MOTHERSHIP_GRAPHQL_LINK.replace('http', 'ws')
);
});
GraphQLClient.client.on('error', (err) => {
minigraphLogger.error('GraphQL Client Error: %o', err);
})
GraphQLClient.client.on('connected', () => {
store.dispatch(
setGraphqlConnectionStatus({

View File

@@ -9,49 +9,55 @@ import { isApiKeyValid } from '@app/store/getters/index';
import { isApiKeyCorrectLength } from '@app/mothership/api-key/is-api-key-correct-length';
import { NODE_ENV } from '@app/environment';
export const apiKeyCheckJob = async (getState: () => RootState, dispatch: AppDispatch, count?: number): Promise<boolean> => {
keyServerLogger.debug('Running keyserver validation number: %s', count);
const state = getState();
if (state.apiKey.status === API_KEY_STATUS.NO_API_KEY) {
// Stop Job
return false;
}
export const apiKeyCheckJob = async (
getState: () => RootState,
dispatch: AppDispatch,
count?: number
): Promise<boolean> => {
keyServerLogger.debug('Running keyserver validation number: %s', count);
const state = getState();
if (state.apiKey.status === API_KEY_STATUS.NO_API_KEY) {
// Stop Job
return false;
}
if (isAPIStateDataFullyLoaded(state)) {
if (isApiKeyValid(state)) {
return true;
}
if (isAPIStateDataFullyLoaded(state)) {
if (isApiKeyValid(state)) {
return true;
}
if (!isApiKeyCorrectLength(state.config.remote.apikey)) {
keyServerLogger.error('API Key has invalid length, logging you out.');
await dispatch(logoutUser({ reason: 'API Key has invalid length' }));
return false;
}
if (!isApiKeyCorrectLength(state.config.remote.apikey)) {
keyServerLogger.error('API Key has invalid length, logging you out.');
await dispatch(logoutUser({ reason: 'API Key has invalid length' }));
return false;
}
if (['development'].includes(NODE_ENV)) {
keyServerLogger.debug('In dev environment, marking API Key as Valid');
dispatch(setApiKeyState(API_KEY_STATUS.API_KEY_VALID));
return true;
}
if (['development'].includes(NODE_ENV)) {
keyServerLogger.debug('In dev environment, marking API Key as Valid');
dispatch(setApiKeyState(API_KEY_STATUS.API_KEY_VALID));
return true;
}
const validationResponse = await validateApiKeyWithKeyServer({ flashGuid: state.emhttp.var.flashGuid, apiKey: state.config.remote.apikey });
switch (validationResponse) {
case API_KEY_STATUS.API_KEY_VALID:
keyServerLogger.info('Stopping API Key Job as Keyserver Marked API Key Valid');
dispatch(setApiKeyState(validationResponse));
return true;
case API_KEY_STATUS.API_KEY_INVALID:
await dispatch(logoutUser({ reason: 'Invalid API Key' }));
return false;
default:
keyServerLogger.info('Request failed with status:', validationResponse);
dispatch(setApiKeyState(validationResponse));
throw new Error('Keyserver Failure, must retry');
}
} else {
keyServerLogger.warn('State Data Has Not Fully Loaded, this should not be possible');
dispatch(setApiKeyState(API_KEY_STATUS.NO_API_KEY));
return false;
}
const validationResponse = await validateApiKeyWithKeyServer({
flashGuid: state.emhttp.var.flashGuid,
apiKey: state.config.remote.apikey,
});
switch (validationResponse) {
case API_KEY_STATUS.API_KEY_VALID:
keyServerLogger.info('Stopping API Key Job as Keyserver Marked API Key Valid');
dispatch(setApiKeyState(validationResponse));
return true;
case API_KEY_STATUS.API_KEY_INVALID:
await dispatch(logoutUser({ reason: 'Invalid API Key' }));
return false;
default:
keyServerLogger.info('Request failed with status:', validationResponse);
dispatch(setApiKeyState(validationResponse));
throw new Error('Keyserver Failure, must retry');
}
} else {
keyServerLogger.warn('State Data Has Not Fully Loaded, this should not be possible');
dispatch(setApiKeyState(API_KEY_STATUS.NO_API_KEY));
return false;
}
};

View File

@@ -1,4 +1,3 @@
/* eslint-disable max-depth */
import { minigraphLogger, mothershipLogger } from '@app/core/log';
import { GraphQLClient } from './graphql-client';
import { store } from '@app/store';

View File

@@ -11,12 +11,12 @@ export function buildDelayFunction(
const baseDelay = jitter ? initial : initial / 2;
return (count: number) => {
// eslint-disable-next-line no-mixed-operators
let delay = Math.min(max, baseDelay * 2 ** count);
if (jitter) {
// We opt for a full jitter approach for a mostly uniform distribution,
// but bound it within initialDelay and delay for everyone's sanity.
// eslint-disable-next-line operator-assignment
delay = Math.random() * delay;
}

View File

@@ -0,0 +1,57 @@
import { logger } from '@app/core/log';
import { API_VERSION } from '@app/environment';
import { ClientType } from '@app/graphql/generated/client/graphql';
import { isAPIStateDataFullyLoaded } from '@app/mothership/graphql-client';
import { store } from '@app/store';
import { isApiKeyValid } from '@app/store/getters/index';
import { type OutgoingHttpHeaders } from 'node:http2';
interface MothershipWebsocketHeaders extends OutgoingHttpHeaders {
'x-api-key': string;
'x-flash-guid': string;
'x-unraid-api-version': string;
'x-unraid-server-version': string;
'User-Agent': string;
}
export const getMothershipWebsocketHeaders = (state = store.getState()): MothershipWebsocketHeaders | OutgoingHttpHeaders => {
const { config, emhttp } = state;
if (isAPIStateDataFullyLoaded(state) && isApiKeyValid(state)) {
const headers: MothershipWebsocketHeaders = {
'x-api-key': config.remote.apikey,
'x-flash-guid': emhttp.var.flashGuid,
'x-unraid-api-version': API_VERSION,
'x-unraid-server-version': emhttp.var.version,
'User-Agent': `unraid-api/${API_VERSION}`
};
logger.debug('Mothership websocket headers: %o', headers);
return headers;
}
return {};
};
interface MothershipConnectionParams extends Record<string, unknown> {
clientType: ClientType;
apiKey: string;
flashGuid: string;
apiVersion: string;
unraidVersion: string;
}
export const getMothershipConnectionParams = (state = store.getState()): MothershipConnectionParams | Record<string, unknown> => {
const { config, emhttp } = state;
if (isAPIStateDataFullyLoaded(state) && isApiKeyValid(state)) {
return {
clientType: ClientType.API,
apiKey: config.remote.apikey,
flashGuid: emhttp.var.flashGuid,
apiVersion: API_VERSION,
unraidVersion: emhttp.var.version,
};
}
return {};
};

View File

@@ -18,8 +18,7 @@ export class RemoteAccessController implements IRemoteAccessController {
static _instance: RemoteAccessController | null = null;
activeRemoteAccess: UpnpRemoteAccess | StaticRemoteAccess | null = null;
notifier: UnraidLocalNotifier = new UnraidLocalNotifier({ level: 'info' });
// eslint-disable-next-line @typescript-eslint/no-useless-constructor, @typescript-eslint/no-empty-function
constructor() {}
public static get instance(): RemoteAccessController {

View File

@@ -1,11 +1,30 @@
import { parseConfig } from '@app/core/utils/misc/parse-config';
import {
createAsyncThunk,
} from '@reduxjs/toolkit';
import { createAsyncThunk } from '@reduxjs/toolkit';
import { access } from 'fs/promises';
import { F_OK } from 'constants';
import { type RecursivePartial, type RecursiveNullable } from '@app/types';
import { type DynamixConfig } from '@app/core/types/ini';
import { batchProcess } from '@app/utils';
/**
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
*
* If the file is inaccessible, an empty object is returned instead.
*
* @param path The path to the configuration file on disk.
* @returns A parsed RecursivePartial of the provided type.
*/
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
const fileIsAccessible = await access(path, F_OK)
.then(() => true)
.catch(() => false);
return fileIsAccessible
? parseConfig<RecursivePartial<ConfigType>>({
filePath: path,
type: 'ini',
})
: {};
}
/**
* Load the dynamix.cfg into the store.
@@ -16,18 +35,12 @@ export const loadDynamixConfigFile = createAsyncThunk<
RecursiveNullable<RecursivePartial<DynamixConfig>>,
string | undefined
>('config/load-dynamix-config-file', async (filePath) => {
if (filePath) {
return loadConfigFile<DynamixConfig>(filePath);
}
const store = await import('@app/store');
const paths = store.getters.paths();
const path = filePath ?? paths['dynamix-config'];
const fileExists = await access(path, F_OK)
.then(() => true)
.catch(() => false);
const file: RecursivePartial<DynamixConfig> = fileExists
? parseConfig<RecursivePartial<DynamixConfig>>({
filePath: path,
type: 'ini',
})
: {};
return file;
const paths = store.getters.paths()['dynamix-config'];
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
const [defaultConfig = {}, customConfig = {}] = configs;
return { ...defaultConfig, ...customConfig };
});

View File

@@ -5,7 +5,7 @@ import { startAppListening } from '@app/store/listeners/listener-middleware';
import { loadSingleStateFile } from '@app/store/modules/emhttp';
import { StateFileKey } from '@app/store/types';
import { isAnyOf } from '@reduxjs/toolkit';
import { isEqual } from 'lodash';
import { isEqual } from 'lodash-es';
export const enableArrayEventListener = () =>
startAppListening({

View File

@@ -1,6 +1,5 @@
import { startAppListening } from '@app/store/listeners/listener-middleware';
import { getDiff } from 'json-difference';
import { isEqual } from 'lodash';
import { isEqual } from 'lodash-es';
import { logger } from '@app/core/log';
import {
type ConfigType,
@@ -40,15 +39,6 @@ export const enableConfigFileListener = (mode: ConfigType) => () =>
action.type !== loadConfigFile.fulfilled.type &&
action.type !== loadConfigFile.rejected.type
) {
logger.trace(
{
diff: getDiff(oldFlashConfig ?? {}, newFlashConfig),
},
`${mode} Config Changed!`,
'Action:',
action.type
);
return true;
}

View File

@@ -0,0 +1,21 @@
import { logger } from '@app/core/log';
import { API_VERSION } from '@app/environment';
import { startAppListening } from '@app/store/listeners/listener-middleware';
import { updateUserConfig } from '@app/store/modules/config';
import { FileLoadStatus } from '@app/store/types';
export const enableVersionListener = () => startAppListening({
predicate(_, currentState) {
if (currentState.config.status === FileLoadStatus.LOADED && (currentState.config.api.version === '' || currentState.config.api.version !== API_VERSION)) {
logger.trace('Config Loaded, setting API Version in myservers.cfg to ', API_VERSION);
return true;
}
return false;
}, async effect(_, { dispatch }) {
dispatch(updateUserConfig({
api: { version: API_VERSION },
}));
},
});

View File

@@ -22,8 +22,9 @@ import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer
import { writeFileSync } from 'fs';
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
import { isEqual } from 'lodash';
import { isEqual } from 'lodash-es';
import { setupRemoteAccessThunk } from '@app/store/actions/setup-remote-access';
import { NODE_ENV } from '@app/environment';
export type SliceState = {
status: FileLoadStatus;
@@ -32,7 +33,7 @@ export type SliceState = {
export const initialState: SliceState = {
status: FileLoadStatus.UNLOADED,
nodeEnv: process.env.NODE_ENV ?? 'production',
nodeEnv: NODE_ENV,
remote: {
'2Fa': '',
wanaccess: '',

Some files were not shown because too many files have changed in this diff Show More