mirror of
https://github.com/bluewave-labs/Checkmate.git
synced 2026-02-10 19:39:36 -06:00
Merge pull request #1784 from bluewave-labs/reorg/client
reorg: extract server
This commit is contained in:
34
.gitignore
vendored
34
.gitignore
vendored
@@ -1,6 +1,28 @@
|
||||
.vite
|
||||
.vscode
|
||||
*.sh
|
||||
!uptime.sh
|
||||
*.idea
|
||||
*.notes
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
.env
|
||||
|
||||
!env.sh
|
||||
28
Client/.gitignore
vendored
28
Client/.gitignore
vendored
@@ -1,28 +0,0 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
.env
|
||||
|
||||
!env.sh
|
||||
@@ -1,8 +0,0 @@
|
||||
# React + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
@@ -1,16 +0,0 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="./checkmate_favicon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Checkmate</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
11
Docker/.gitignore
vendored
11
Docker/.gitignore
vendored
@@ -1,11 +0,0 @@
|
||||
*.sh
|
||||
!quickstart.sh
|
||||
!build_images.sh
|
||||
dev/mongo/data/*
|
||||
dev/redis/data/*
|
||||
dist/mongo/data/*
|
||||
dist/redis/data/*
|
||||
prod/mongo/data/*
|
||||
prod/redis/data/*
|
||||
*.env
|
||||
prod/certbot/*
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Change directory to root Server directory for correct Docker Context
|
||||
cd "$(dirname "$0")"
|
||||
cd ../..
|
||||
|
||||
# Define an array of services and their Dockerfiles
|
||||
declare -A services=(
|
||||
["uptime_client"]="./Docker/dev/client.Dockerfile"
|
||||
["uptime_database_mongo"]="./Docker/dev/mongoDB.Dockerfile"
|
||||
["uptime_redis"]="./Docker/dev/redis.Dockerfile"
|
||||
["uptime_server"]="./Docker/dev/server.Dockerfile"
|
||||
)
|
||||
|
||||
# Loop through each service and build the corresponding image
|
||||
for service in "${!services[@]}"; do
|
||||
docker build -f "${services[$service]}" -t "$service" .
|
||||
|
||||
## Check if the build succeeded
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error building $service image. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All images built successfully"
|
||||
@@ -1,27 +0,0 @@
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
make g++ \
|
||||
gcc \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
libusb-dev \
|
||||
eudev-dev
|
||||
|
||||
|
||||
COPY ../../Client/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Client .
|
||||
|
||||
RUN npm run build-dev
|
||||
|
||||
RUN npm install -g serve
|
||||
|
||||
CMD ["serve","-s", "dist", "-l", "5173"]
|
||||
@@ -1,40 +0,0 @@
|
||||
services:
|
||||
client:
|
||||
image: uptime_client:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "5173:5173"
|
||||
|
||||
depends_on:
|
||||
- server
|
||||
server:
|
||||
image: uptime_server:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "5000:5000"
|
||||
env_file:
|
||||
- server.env
|
||||
depends_on:
|
||||
- redis
|
||||
- mongodb
|
||||
redis:
|
||||
image: uptime_redis:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./redis/data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 5s
|
||||
mongodb:
|
||||
image: uptime_database_mongo:latest
|
||||
restart: always
|
||||
command: ["mongod", "--quiet"]
|
||||
ports:
|
||||
- "27017:27017"
|
||||
volumes:
|
||||
- ./mongo/data:/data/db
|
||||
@@ -1,3 +0,0 @@
|
||||
FROM mongo
|
||||
EXPOSE 27017
|
||||
CMD ["mongod"]
|
||||
@@ -1,2 +0,0 @@
|
||||
FROM redis
|
||||
EXPOSE 6379
|
||||
@@ -1,13 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ../../Server/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Server/ ./
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["node", "index.js"]
|
||||
26
Docker/dist/build_images.sh
vendored
26
Docker/dist/build_images.sh
vendored
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Change directory to root Server directory for correct Docker Context
|
||||
cd "$(dirname "$0")"
|
||||
cd ../..
|
||||
|
||||
# Define an array of services and their Dockerfiles
|
||||
declare -A services=(
|
||||
["bluewaveuptime/uptime_client"]="./Docker/dist/client.Dockerfile"
|
||||
["bluewaveuptime/uptime_database_mongo"]="./Docker/dist/mongoDB.Dockerfile"
|
||||
["bluewaveuptime/uptime_redis"]="./Docker/dist/redis.Dockerfile"
|
||||
["bluewaveuptime/uptime_server"]="./Docker/dist/server.Dockerfile"
|
||||
)
|
||||
|
||||
# Loop through each service and build the corresponding image
|
||||
for service in "${!services[@]}"; do
|
||||
docker build -f "${services[$service]}" -t "$service" .
|
||||
|
||||
# Check if the build succeeded
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error building $service image. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All images built successfully"
|
||||
33
Docker/dist/client.Dockerfile
vendored
33
Docker/dist/client.Dockerfile
vendored
@@ -1,33 +0,0 @@
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
make g++ \
|
||||
gcc \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
libusb-dev \
|
||||
eudev-dev
|
||||
|
||||
|
||||
COPY ../../Client/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Client .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
|
||||
FROM nginx:1.27.1-alpine
|
||||
|
||||
COPY ./Docker/dist/nginx/conf.d/default.conf /etc/nginx/conf.d/default.conf
|
||||
COPY --from=build /app/dist /usr/share/nginx/html
|
||||
COPY --from=build /app/env.sh /docker-entrypoint.d/env.sh
|
||||
RUN chmod +x /docker-entrypoint.d/env.sh
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
46
Docker/dist/docker-compose.yaml
vendored
46
Docker/dist/docker-compose.yaml
vendored
@@ -1,46 +0,0 @@
|
||||
services:
|
||||
client:
|
||||
image: bluewaveuptime/uptime_client:latest
|
||||
restart: always
|
||||
environment:
|
||||
UPTIME_APP_API_BASE_URL: "http://localhost:5000/api/v1"
|
||||
UPTIME_STATUS_PAGE_SUBDOMAIN_PREFIX: "http://uptimegenie.com/"
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
depends_on:
|
||||
- server
|
||||
server:
|
||||
image: bluewaveuptime/uptime_server:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "5000:5000"
|
||||
depends_on:
|
||||
- redis
|
||||
- mongodb
|
||||
environment:
|
||||
- DB_CONNECTION_STRING=mongodb://mongodb:27017/uptime_db
|
||||
- REDIS_HOST=redis
|
||||
# volumes:
|
||||
# - /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
redis:
|
||||
image: bluewaveuptime/uptime_redis:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./redis/data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 5s
|
||||
mongodb:
|
||||
image: bluewaveuptime/uptime_database_mongo:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./mongo/data:/data/db
|
||||
command: ["mongod", "--quiet"]
|
||||
ports:
|
||||
- "27017:27017"
|
||||
3
Docker/dist/mongoDB.Dockerfile
vendored
3
Docker/dist/mongoDB.Dockerfile
vendored
@@ -1,3 +0,0 @@
|
||||
FROM mongo
|
||||
EXPOSE 27017
|
||||
CMD ["mongod"]
|
||||
35
Docker/dist/nginx/conf.d/default.conf
vendored
35
Docker/dist/nginx/conf.d/default.conf
vendored
@@ -1,35 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
server_name checkmate-demo.bluewavelabs.ca;
|
||||
server_tokens off;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://server:5000/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /api-docs/ {
|
||||
proxy_pass http://server:5000/api-docs/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
2
Docker/dist/redis.Dockerfile
vendored
2
Docker/dist/redis.Dockerfile
vendored
@@ -1,2 +0,0 @@
|
||||
FROM redis
|
||||
EXPOSE 6379
|
||||
13
Docker/dist/server.Dockerfile
vendored
13
Docker/dist/server.Dockerfile
vendored
@@ -1,13 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ../../Server/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Server/ ./
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["node", "index.js"]
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Change directory to root directory for correct Docker Context
|
||||
cd "$(dirname "$0")"
|
||||
cd ../..
|
||||
|
||||
# Define an array of services and their Dockerfiles
|
||||
declare -A services=(
|
||||
["uptime_client"]="./Docker/prod/client.Dockerfile"
|
||||
["uptime_database_mongo"]="./Docker/prod/mongoDB.Dockerfile"
|
||||
["uptime_redis"]="./Docker/prod/redis.Dockerfile"
|
||||
["uptime_server"]="./Docker/prod/server.Dockerfile"
|
||||
)
|
||||
|
||||
# Loop through each service and build the corresponding image
|
||||
for service in "${!services[@]}"; do
|
||||
docker build -f "${services[$service]}" -t "$service" .
|
||||
|
||||
# Check if the build succeeded
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error building $service image. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All images built successfully"
|
||||
@@ -1,19 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
webserver:
|
||||
image: nginx:latest
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
restart: always
|
||||
volumes:
|
||||
- ./nginx/conf.d/:/etc/nginx/conf.d/:ro
|
||||
- ./certbot/www/:/var/www/certbot/:ro
|
||||
certbot:
|
||||
image: certbot/certbot:latest
|
||||
volumes:
|
||||
- ./certbot/www/:/var/www/certbot/:rw
|
||||
- ./certbot/conf/:/etc/letsencrypt/:rw
|
||||
depends_on:
|
||||
- webserver
|
||||
@@ -1,29 +0,0 @@
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
make g++ \
|
||||
gcc \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
libusb-dev \
|
||||
eudev-dev
|
||||
|
||||
COPY ../../Client/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Client .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:1.27.1-alpine
|
||||
|
||||
COPY --from=build /app/dist /usr/share/nginx/html
|
||||
COPY --from=build /app/env.sh /docker-entrypoint.d/env.sh
|
||||
RUN chmod +x /docker-entrypoint.d/env.sh
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
@@ -1,57 +0,0 @@
|
||||
services:
|
||||
client:
|
||||
image: uptime_client:latest
|
||||
restart: always
|
||||
environment:
|
||||
UPTIME_APP_API_BASE_URL: "https://checkmate-demo.bluewavelabs.ca/api/v1"
|
||||
UPTIME_STATUS_PAGE_SUBDOMAIN_PREFIX: "http://uptimegenie.com/"
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
depends_on:
|
||||
- server
|
||||
volumes:
|
||||
- ./nginx/conf.d:/etc/nginx/conf.d/:ro
|
||||
- ./certbot/www:/var/www/certbot/:ro
|
||||
- ./certbot/conf/:/etc/nginx/ssl/:ro
|
||||
|
||||
certbot:
|
||||
image: certbot/certbot:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./certbot/www/:/var/www/certbot/:rw
|
||||
- ./certbot/conf/:/etc/letsencrypt/:rw
|
||||
server:
|
||||
image: uptime_server:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "5000:5000"
|
||||
env_file:
|
||||
- server.env
|
||||
depends_on:
|
||||
- redis
|
||||
- mongodb
|
||||
redis:
|
||||
image: uptime_redis:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./redis/data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 5s
|
||||
mongodb:
|
||||
image: uptime_database_mongo:latest
|
||||
restart: always
|
||||
command: ["mongod", "--quiet", "--auth"]
|
||||
ports:
|
||||
- "27017:27017"
|
||||
volumes:
|
||||
- ./mongo/data:/data/db
|
||||
- ./mongo/init/create_users.js:/docker-entrypoint-initdb.d/create_users.js
|
||||
env_file:
|
||||
- mongo.env
|
||||
@@ -1,16 +0,0 @@
|
||||
var username = process.env.USERNAME_ENV_VAR;
|
||||
var password = process.env.PASSWORD_ENV_VAR;
|
||||
|
||||
db = db.getSiblingDB("uptime_db");
|
||||
|
||||
db.createUser({
|
||||
user: username,
|
||||
pwd: password,
|
||||
roles: [
|
||||
{
|
||||
role: "readWrite",
|
||||
db: "uptime_db",
|
||||
},
|
||||
],
|
||||
});
|
||||
print("User uptime_user created successfully");
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM mongo
|
||||
COPY ./Docker/prod/mongo/init/create_users.js /docker-entrypoint-initdb.d/
|
||||
EXPOSE 27017
|
||||
CMD ["mongod"]
|
||||
@@ -1,15 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
server_name checkmate-demo.bluewavelabs.ca www.checkmate-demo.bluewavelabs.ca;
|
||||
server_tokens off;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://[domain-name]$request_uri;
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
server_name checkmate-demo.bluewavelabs.ca;
|
||||
server_tokens off;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://server:5000/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /api-docs/ {
|
||||
proxy_pass http://server:5000/api-docs/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 default_server ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
|
||||
server_name checkmate-demo.bluewavelabs.ca;
|
||||
|
||||
ssl_certificate /etc/nginx/ssl/live/checkmate-demo.bluewavelabs.ca/fullchain.pem;
|
||||
ssl_certificate_key /etc/nginx/ssl/live/checkmate-demo.bluewavelabs.ca/privkey.pem;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://server:5000/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /api-docs/ {
|
||||
proxy_pass http://server:5000/api-docs/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
FROM redis
|
||||
EXPOSE 6379
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./Server/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ./Server/ ./
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["node", "index.js"]
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Change directory to root directory for correct Docker Context
|
||||
cd "$(dirname "$0")"
|
||||
cd ../..
|
||||
|
||||
# Define an array of services and their Dockerfiles
|
||||
declare -A services=(
|
||||
["uptime_client"]="./Docker/prod/client.Dockerfile"
|
||||
["uptime_database_mongo"]="./Docker/prod/mongoDB.Dockerfile"
|
||||
["uptime_redis"]="./Docker/prod/redis.Dockerfile"
|
||||
["uptime_server"]="./Docker/prod/server.Dockerfile"
|
||||
)
|
||||
|
||||
# Loop through each service and build the corresponding image
|
||||
for service in "${!services[@]}"; do
|
||||
docker build -f "${services[$service]}" -t "$service" .
|
||||
|
||||
# Check if the build succeeded
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error building $service image. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All images built successfully"
|
||||
@@ -1,19 +0,0 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
webserver:
|
||||
image: nginx:latest
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
restart: always
|
||||
volumes:
|
||||
- ./nginx/conf.d/:/etc/nginx/conf.d/:ro
|
||||
- ./certbot/www/:/var/www/certbot/:ro
|
||||
certbot:
|
||||
image: certbot/certbot:latest
|
||||
volumes:
|
||||
- ./certbot/www/:/var/www/certbot/:rw
|
||||
- ./certbot/conf/:/etc/letsencrypt/:rw
|
||||
depends_on:
|
||||
- webserver
|
||||
@@ -1,30 +0,0 @@
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
make g++ \
|
||||
gcc \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
libusb-dev \
|
||||
eudev-dev
|
||||
|
||||
|
||||
COPY ../../Client/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ../../Client .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:1.27.1-alpine
|
||||
|
||||
COPY --from=build /app/dist /usr/share/nginx/html
|
||||
COPY --from=build /app/env.sh /docker-entrypoint.d/env.sh
|
||||
RUN chmod +x /docker-entrypoint.d/env.sh
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
@@ -1,57 +0,0 @@
|
||||
services:
|
||||
client:
|
||||
image: uptime_client:latest
|
||||
restart: always
|
||||
environment:
|
||||
UPTIME_APP_API_BASE_URL: "https://checkmate-test.bluewavelabs.ca/api/v1"
|
||||
UPTIME_STATUS_PAGE_SUBDOMAIN_PREFIX: "http://uptimegenie.com/"
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
depends_on:
|
||||
- server
|
||||
volumes:
|
||||
- ./nginx/conf.d:/etc/nginx/conf.d/:ro
|
||||
- ./certbot/www:/var/www/certbot/:ro
|
||||
- ./certbot/conf/:/etc/nginx/ssl/:ro
|
||||
|
||||
certbot:
|
||||
image: certbot/certbot:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./certbot/www/:/var/www/certbot/:rw
|
||||
- ./certbot/conf/:/etc/letsencrypt/:rw
|
||||
server:
|
||||
image: uptime_server:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "5000:5000"
|
||||
env_file:
|
||||
- server.env
|
||||
depends_on:
|
||||
- redis
|
||||
- mongodb
|
||||
redis:
|
||||
image: uptime_redis:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./redis/data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 5s
|
||||
mongodb:
|
||||
image: uptime_database_mongo:latest
|
||||
restart: always
|
||||
command: ["mongod", "--quiet", "--replSet", "rs0", "--bind_ip_all"]
|
||||
ports:
|
||||
- "27017:27017"
|
||||
volumes:
|
||||
- ./mongo/data:/data/db
|
||||
- ./mongo/init/create_users.js:/docker-entrypoint-initdb.d/create_users.js
|
||||
env_file:
|
||||
- mongo.env
|
||||
@@ -1,16 +0,0 @@
|
||||
var username = process.env.USERNAME_ENV_VAR;
|
||||
var password = process.env.PASSWORD_ENV_VAR;
|
||||
|
||||
db = db.getSiblingDB("uptime_db");
|
||||
|
||||
db.createUser({
|
||||
user: username,
|
||||
pwd: password,
|
||||
roles: [
|
||||
{
|
||||
role: "readWrite",
|
||||
db: "uptime_db",
|
||||
},
|
||||
],
|
||||
});
|
||||
print("User uptime_user created successfully");
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM mongo
|
||||
COPY ./Docker/prod/mongo/init/create_users.js /docker-entrypoint-initdb.d/
|
||||
EXPOSE 27017
|
||||
CMD ["mongod"]
|
||||
@@ -1,15 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
server_name checkmate-test.bluewavelabs.ca www.checkmate-test.bluewavelabs.ca;
|
||||
server_tokens off;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://[domain-name]$request_uri;
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
server_name checkmate-test.bluewavelabs.ca;
|
||||
server_tokens off;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://server:5000/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /api-docs/ {
|
||||
proxy_pass http://server:5000/api-docs/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 default_server ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
|
||||
server_name checkmate-test.bluewavelabs.ca;
|
||||
|
||||
ssl_certificate /etc/nginx/ssl/live/checkmate-test.bluewavelabs.ca/fullchain.pem;
|
||||
ssl_certificate_key /etc/nginx/ssl/live/checkmate-test.bluewavelabs.ca/privkey.pem;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://server:5000/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /api-docs/ {
|
||||
proxy_pass http://server:5000/api-docs/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
FROM redis
|
||||
EXPOSE 6379
|
||||
@@ -1,13 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./Server/package*.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ./Server/ ./
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["node", "index.js"]
|
||||
142
README.md
142
README.md
@@ -1,140 +1,8 @@
|
||||
# React + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
<p align=center> <a href="https://trendshift.io/repositories/12443" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12443" alt="bluewave-labs%2Fcheckmate | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a></p>
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
[](https://www.bestpractices.dev/projects/9901)
|
||||
Currently, two official plugins are available:
|
||||
|
||||
|
||||
<h1 align="center"><a href="https://bluewavelabs.ca" target="_blank">Checkmate</a></h1>
|
||||
|
||||
<p align="center"><strong>An open source uptime and infrastructure monitoring application</strong></p>
|
||||
|
||||

|
||||
|
||||
Checkmate is an open source monitoring tool used to track the operational status and performance of servers and websites. It regularly checks whether a server/website is accessible and performs optimally, providing real-time alerts and reports on the monitored services' availability, downtime, and response time.
|
||||
|
||||
Checkmate also has an agent, called [Capture](https://github.com/bluewave-labs/capture), to retrieve data from remote servers. While Capture is not required to run Checkmate, it provides additional insigths about your servers' CPU, RAM, disk and temperature status.
|
||||
|
||||
Checkmate has ben stress tested with 1000+ active monitors without any particular issues or performance bottlenecks.
|
||||
|
||||
We **love** what we are building here, and we contibuously learn a few things about Reactjs, Nodejs, MongoDB and Docker while building Checkmate.
|
||||
|
||||
## 📦 Demo
|
||||
|
||||
See [Checkmate](https://checkmate-demo.bluewavelabs.ca/) in action. The username is uptimedemo@demo.com and the password is Demouser1! (just a note that we update the demo server from time to time, so if it doesn't work for you, please ping us on Discussions channel).
|
||||
|
||||
## 🔗 User's guide
|
||||
|
||||
Usage instructions can be found [here](https://docs.checkmate.so/). It's still WIP and some of the information there might be outdated. Rest assured, we are doing our best! :)
|
||||
|
||||
## 🛠️ Installation
|
||||
|
||||
See installation instructions in [Checkmate documentation portal](https://docs.checkmate.so/quickstart). Alternatively, you can also use [Coolify](https://coolify.io/) or [Elestio](https://elest.io/open-source/checkmate) for a one click Docker deployment. If you would like to monitor your server infrastructure, you'll need [Capture agent](https://github.com/bluewave-labs/capture). Capture repository also contains the installation instructions.
|
||||
|
||||
## 🚀 Performance
|
||||
Thanks to extensive optimizations, Checkmate operates with an exceptionally small memory footprint, requiring minimal memory and CPU resources. Here’s the memory usage of a Node.js instance running on a server that monitors 323 servers every minute:
|
||||
|
||||

|
||||
|
||||
You can see the memory footprint of MongoDB and Redis on the same server (398Mb and 15Mb) for the same amount of servers:
|
||||
|
||||

|
||||
|
||||
|
||||
## 💚 Questions & ideas
|
||||
|
||||
If you have any questions, suggestions or comments, please use our [Discord channel](https://discord.gg/NAb6H3UTjK). We've also launched our [Discussions](https://github.com/bluewave-labs/bluewave-uptime/discussions) page! Feel free to ask questions or share your ideas—we'd love to hear from you!
|
||||
|
||||
## 🧩 Features
|
||||
|
||||
- Completely open source, deployable on your servers
|
||||
- Website monitoring
|
||||
- Page speed monitoring
|
||||
- Infrastructure monitoring (memory, disk usage, CPU performance etc) - requires [Capture](https://github.com/bluewave-labs/capture)
|
||||
- Docker monitoring
|
||||
- Ping monitoring
|
||||
- SSL monitoring
|
||||
- Port monitoring
|
||||
- Incidents at a glance
|
||||
- E-mail notifications
|
||||
- Scheduled maintenance
|
||||
|
||||
**Short term roadmap:**
|
||||
|
||||
- Global (distributed) uptime checking on Solana network (**in progress**) https://github.com/bluewave-labs/Checkmate/issues/1593
|
||||
- Status pages (**in progress**) https://github.com/bluewave-labs/Checkmate/issues/1131
|
||||
- Translations (i18n) (**in progress**)
|
||||
- Better notification options (Webhooks, Discord, Telegram, Slack) (**in progress**) https://github.com/bluewave-labs/Checkmate/issues/1545
|
||||
- Command line interface (CLI) (**in progress**) https://github.com/bluewave-labs/Checkmate/issues/1558
|
||||
- JSON query monitoring https://github.com/bluewave-labs/Checkmate/issues/1573
|
||||
- Tagging/grouping monitors https://github.com/bluewave-labs/Checkmate/issues/1546
|
||||
- More configuration options
|
||||
- DNS monitoring
|
||||
|
||||
## 🏗️ Screenshots
|
||||
<p>
|
||||
<img width="2714" alt="server" src="https://github.com/user-attachments/assets/f7cb272a-69a6-48c5-93b0-249ecf20ecc6" />
|
||||
</p>
|
||||
<p>
|
||||
<img width="2714" alt="uptime" src="https://github.com/user-attachments/assets/98ddc6c0-3384-47fd-96ce-7e53e6b688ac" />
|
||||
</p>
|
||||
<p>
|
||||
<img width="2714" alt="page speed" src="https://github.com/user-attachments/assets/b5589f79-da30-4239-9846-1f8bb2637ff9" />
|
||||
</p>
|
||||
|
||||
## 🏗️ Tech stack
|
||||
|
||||
- [ReactJs](https://react.dev/)
|
||||
- [MUI (React framework)](https://mui.com/)
|
||||
- [Node.js](https://nodejs.org/en)
|
||||
- [MongoDB](https://mongodb.com)
|
||||
- [Recharts](https://recharts.org)
|
||||
- Lots of other open source components!
|
||||
|
||||
## A few links
|
||||
|
||||
- If you would like to support us, please consider giving it a ⭐ and click on "watch".
|
||||
- Have a question or suggestion for the roadmap/featureset? Check our [Discord channel](https://discord.gg/NAb6H3UTjK) or [Discussions](https://github.com/bluewave-labs/checkmate/discussions) forum.
|
||||
- Need a ping when there's a new release? Use [Newreleases](https://newreleases.io/), a free service to track releases.
|
||||
- Watch a Checkmate [installation and usage video](https://www.youtube.com/watch?v=GfFOc0xHIwY)
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We pride ourselves on building strong connections with contributors at every level. Despite being a young project, Checkmate has already earned 3700+ stars and attracted 45+ contributors from around the globe. So, don’t hold back — jump in, contribute and learn with us!
|
||||
|
||||
Here's how you can contribute:
|
||||
|
||||
0. Star this repo :)
|
||||
1. Check [Contributor's guideline](https://github.com/bluewave-labs/bluewave-uptime/blob/master/CONTRIBUTING.md). First timers are encouraged to check `good-first-issue` tag.
|
||||
2. Optionally, read [project structure](https://docs.checkmate.so/developers-guide/general-project-structure) and [high level overview](https://bluewavelabs.gitbook.io/checkmate/developers-guide/high-level-overview).
|
||||
3. Have a look at our Figma designs [here](https://www.figma.com/design/RPSfaw66HjzSwzntKcgDUV/Uptime-Genie?node-id=0-1&t=WqOFv9jqNTFGItpL-1) if you are going to use one of our designs. We encourage you to copy to your own Figma page, then work on it as it is read-only.
|
||||
4. Open an issue if you believe you've encountered a bug.
|
||||
5. Check for good-first-issue's if you are a newcomer.
|
||||
6. Make a pull request to add new features/make quality-of-life improvements/fix bugs.
|
||||
|
||||
<a href="https://github.com/bluewave-labs/checkmate/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=bluewave-labs/checkmate" />
|
||||
</a>
|
||||
|
||||
## 💰 Our sponsors
|
||||
|
||||
Thanks to [Gitbook](https://gitbook.io/) for giving us a free tier for their documentation platform, and [Poeditor](https://poeditor.com/) providing us a free account to use their i18n services. If you would like to sponsor Checkmate, please send an email to hello@bluewavelabs.ca
|
||||
|
||||
[](https://star-history.com/#bluewave-labs/bluewave-uptime&Date)
|
||||
|
||||
Also check other developer and contributor-friendly projects of BlueWave:
|
||||
|
||||
- [LangRoute](https://github.com/bluewave-labs/langroute), an LLM proxy and gateway
|
||||
- [DataRoom](https://github.com/bluewave-labs/bluewave-dataroom), an secure file sharing application, aka dataroom.
|
||||
- [Headcount](https://github.com/bluewave-labs/bluewave-hrm), a complete Human Resource Management platform.
|
||||
- [Guidefox](https://github.com/bluewave-labs/guidefox), an application that helps new users learn how to use your product via hints, tours, popups and banners.
|
||||
- [VerifyWise](https://github.com/bluewave-labs/verifywise), the first open source AI governance platform.
|
||||
|
||||
 since 14 Jan, 2025
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
./docker
|
||||
7
Server/.gitignore
vendored
7
Server/.gitignore
vendored
@@ -1,7 +0,0 @@
|
||||
node_modules
|
||||
.env
|
||||
*.log
|
||||
*.sh
|
||||
.nyc_output
|
||||
coverage
|
||||
.clinic
|
||||
@@ -1,8 +0,0 @@
|
||||
module.exports = {
|
||||
require: ["esm", "chai/register-expect.js"], // Include Chai's "expect" interface globally
|
||||
spec: "tests/**/*.test.js", // Specify test files
|
||||
timeout: 5000, // Set test-case timeout in milliseconds
|
||||
recursive: true, // Include subdirectories
|
||||
reporter: "spec", // Use the "spec" reporter
|
||||
exit: true, // Force Mocha to quit after tests complete
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"all": true,
|
||||
"include": ["controllers/*.js", "utils/*.js", "service/*.js", "db/mongo/modules/*.js"],
|
||||
"exclude": ["**/*.test.js"],
|
||||
"reporter": ["html", "text", "lcov"],
|
||||
"sourceMap": false,
|
||||
"instrument": true
|
||||
}
|
||||
@@ -1,523 +0,0 @@
|
||||
import {
|
||||
registrationBodyValidation,
|
||||
loginValidation,
|
||||
editUserParamValidation,
|
||||
editUserBodyValidation,
|
||||
recoveryValidation,
|
||||
recoveryTokenValidation,
|
||||
newPasswordValidation,
|
||||
} from "../validation/joi.js";
|
||||
import logger from "../utils/logger.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { getTokenFromHeaders, tokenType } from "../utils/utils.js";
|
||||
import crypto from "crypto";
|
||||
import { handleValidationError, handleError } from "./controllerUtils.js";
|
||||
const SERVICE_NAME = "authController";
|
||||
|
||||
class AuthController {
|
||||
constructor(db, settingsService, emailService, jobQueue, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.emailService = emailService;
|
||||
this.jobQueue = jobQueue;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns JWT token with an arbitrary payload
|
||||
* @function
|
||||
* @param {Object} payload
|
||||
* @param {tokenType} typeOfToken - Whether to generate refresh token with long TTL or access token with short TTL.
|
||||
* @param {Object} appSettings
|
||||
* @returns {String}
|
||||
* @throws {Error}
|
||||
*/
|
||||
issueToken = (payload, typeOfToken, appSettings) => {
|
||||
try {
|
||||
const tokenTTL =
|
||||
typeOfToken === tokenType.REFRESH_TOKEN
|
||||
? (appSettings?.refreshTokenTTL ?? "7d")
|
||||
: (appSettings?.jwtTTL ?? "2h");
|
||||
const tokenSecret =
|
||||
typeOfToken === tokenType.REFRESH_TOKEN
|
||||
? appSettings?.refreshTokenSecret
|
||||
: appSettings?.jwtSecret;
|
||||
const payloadData = typeOfToken === tokenType.REFRESH_TOKEN ? {} : payload;
|
||||
|
||||
return jwt.sign(payloadData, tokenSecret, { expiresIn: tokenTTL });
|
||||
} catch (error) {
|
||||
throw handleError(error, SERVICE_NAME, "issueToken");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers a new user. If the user is the first account, a JWT secret is created. If not, an invite token is required.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.inviteToken - The invite token for registration.
|
||||
* @property {Object} req.file - The file object for the user's profile image.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the creation of the user, the created user data, and a JWT token.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
registerUser = async (req, res, next) => {
|
||||
try {
|
||||
await registrationBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
// Create a new user
|
||||
try {
|
||||
const { inviteToken } = req.body;
|
||||
// If superAdmin exists, a token should be attached to all further register requests
|
||||
const superAdminExists = await this.db.checkSuperadmin(req, res);
|
||||
if (superAdminExists) {
|
||||
await this.db.getInviteTokenAndDelete(inviteToken);
|
||||
} else {
|
||||
// This is the first account, create JWT secret to use if one is not supplied by env
|
||||
const jwtSecret = crypto.randomBytes(64).toString("hex");
|
||||
await this.db.updateAppSettings({ jwtSecret });
|
||||
}
|
||||
|
||||
const newUser = await this.db.insertUser({ ...req.body }, req.file);
|
||||
logger.info({
|
||||
message: this.stringService.authCreateUser,
|
||||
service: SERVICE_NAME,
|
||||
details: newUser._id,
|
||||
});
|
||||
|
||||
const userForToken = { ...newUser._doc };
|
||||
delete userForToken.profileImage;
|
||||
delete userForToken.avatarImage;
|
||||
|
||||
const appSettings = await this.settingsService.getSettings();
|
||||
|
||||
const token = this.issueToken(userForToken, tokenType.ACCESS_TOKEN, appSettings);
|
||||
const refreshToken = this.issueToken({}, tokenType.REFRESH_TOKEN, appSettings);
|
||||
|
||||
this.emailService
|
||||
.buildAndSendEmail(
|
||||
"welcomeEmailTemplate",
|
||||
{ name: newUser.firstName },
|
||||
newUser.email,
|
||||
"Welcome to Uptime Monitor"
|
||||
)
|
||||
.catch((error) => {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "registerUser",
|
||||
stack: error.stack,
|
||||
});
|
||||
});
|
||||
|
||||
res.success({
|
||||
msg: this.stringService.authCreateUser,
|
||||
data: { user: newUser, token: token, refreshToken: refreshToken },
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "registerController"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Logs in a user by validating the user's credentials and issuing a JWT token.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.email - The email of the user.
|
||||
* @property {string} req.body.password - The password of the user.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the login of the user, the user data (without password and avatar image), and a JWT token.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422) or the password is incorrect.
|
||||
*/
|
||||
loginUser = async (req, res, next) => {
|
||||
try {
|
||||
await loginValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const { email, password } = req.body;
|
||||
|
||||
// Check if user exists
|
||||
const user = await this.db.getUserByEmail(email);
|
||||
|
||||
// Compare password
|
||||
const match = await user.comparePassword(password);
|
||||
if (match !== true) {
|
||||
const error = new Error(this.stringService.authIncorrectPassword);
|
||||
error.status = 401;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove password from user object. Should this be abstracted to DB layer?
|
||||
const userWithoutPassword = { ...user._doc };
|
||||
delete userWithoutPassword.password;
|
||||
delete userWithoutPassword.avatarImage;
|
||||
|
||||
// Happy path, return token
|
||||
const appSettings = await this.settingsService.getSettings();
|
||||
const token = this.issueToken(
|
||||
userWithoutPassword,
|
||||
tokenType.ACCESS_TOKEN,
|
||||
appSettings
|
||||
);
|
||||
const refreshToken = this.issueToken({}, tokenType.REFRESH_TOKEN, appSettings);
|
||||
// reset avatar image
|
||||
userWithoutPassword.avatarImage = user.avatarImage;
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authLoginUser,
|
||||
data: {
|
||||
user: userWithoutPassword,
|
||||
token: token,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
error.status = 401;
|
||||
next(handleError(error, SERVICE_NAME, "loginUser"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates new auth token if the refresh token is valid
|
||||
* @async
|
||||
* @param {Express.Request} req - The Express request object.
|
||||
* @property {Object} req.headers - The parameter of the request.
|
||||
* @param {Express.Response} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating new auth token is generated.
|
||||
* @throws {Error} If there is an error during the process such as any of the token is not received
|
||||
*/
|
||||
refreshAuthToken = async (req, res, next) => {
|
||||
|
||||
try {
|
||||
// check for refreshToken
|
||||
const refreshToken = req.headers["x-refresh-token"];
|
||||
|
||||
if (!refreshToken) {
|
||||
// No refresh token provided
|
||||
const error = new Error(this.stringService.noRefreshToken);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "refreshAuthToken";
|
||||
return next(error);
|
||||
}
|
||||
|
||||
// Verify refresh token
|
||||
const appSettings = await this.settingsService.getSettings();
|
||||
const { refreshTokenSecret } = appSettings;
|
||||
jwt.verify(refreshToken, refreshTokenSecret, async (refreshErr, refreshDecoded) => {
|
||||
if (refreshErr) {
|
||||
// Invalid or expired refresh token, trigger logout
|
||||
const errorMessage =
|
||||
refreshErr.name === "TokenExpiredError"
|
||||
? this.stringService.expiredAuthToken
|
||||
: this.stringService.invalidAuthToken;
|
||||
const error = new Error(errorMessage);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
// Refresh token is valid and unexpired, generate new access token
|
||||
const oldAuthToken = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = await this.settingsService.getSettings();
|
||||
const payloadData = jwt.verify(oldAuthToken, jwtSecret, { ignoreExpiration: true });
|
||||
// delete old token related data
|
||||
delete payloadData.iat;
|
||||
delete payloadData.exp;
|
||||
const newAuthToken = this.issueToken(
|
||||
payloadData,
|
||||
tokenType.ACCESS_TOKEN,
|
||||
appSettings
|
||||
);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authTokenRefreshed,
|
||||
data: { user: payloadData, token: newAuthToken, refreshToken: refreshToken },
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "refreshAuthToken"));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Edits a user's information. If the user wants to change their password, the current password is checked before updating to the new password.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.params - The parameters of the request.
|
||||
* @property {string} req.params.userId - The ID of the user to be edited.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.password - The current password of the user.
|
||||
* @property {string} req.body.newPassword - The new password of the user.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the update of the user, and the updated user data.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422), the user is unauthorized (401), or the password is incorrect (403).
|
||||
*/
|
||||
editUser = async (req, res, next) => {
|
||||
|
||||
try {
|
||||
await editUserParamValidation.validateAsync(req.params);
|
||||
await editUserBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO is this neccessary any longer? Verify ownership middleware should handle this
|
||||
if (req.params.userId !== req.user._id.toString()) {
|
||||
const error = new Error(this.stringService.unauthorized);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Change Password check
|
||||
if (req.body.password && req.body.newPassword) {
|
||||
// Get token from headers
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
// Get email from token
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { email } = jwt.verify(token, jwtSecret);
|
||||
// Add user email to body for DB operation
|
||||
req.body.email = email;
|
||||
// Get user
|
||||
const user = await this.db.getUserByEmail(email);
|
||||
// Compare passwords
|
||||
const match = await user.comparePassword(req.body.password);
|
||||
// If not a match, throw a 403
|
||||
// 403 instead of 401 to avoid triggering axios interceptor
|
||||
if (!match) {
|
||||
const error = new Error(this.stringService.authIncorrectPassword);
|
||||
error.status = 403;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
// If a match, update the password
|
||||
req.body.password = req.body.newPassword;
|
||||
}
|
||||
|
||||
const updatedUser = await this.db.updateUser(req, res);
|
||||
res.success({
|
||||
msg: this.stringService.authUpdateUser,
|
||||
data: updatedUser,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "userEditController"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a superadmin account exists in the database.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the existence of a superadmin, and a boolean indicating the existence of a superadmin.
|
||||
* @throws {Error} If there is an error during the process.
|
||||
*/
|
||||
checkSuperadminExists = async (req, res, next) => {
|
||||
try {
|
||||
const superAdminExists = await this.db.checkSuperadmin(req, res);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authAdminExists,
|
||||
data: superAdminExists,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "checkSuperadminController"));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Requests a recovery token for a user. The user's email is validated and a recovery token is created and sent via email.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.email - The email of the user requesting recovery.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the creation of the recovery token, and the message ID of the sent email.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
requestRecovery = async (req, res, next) => {
|
||||
try {
|
||||
await recoveryValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { email } = req.body;
|
||||
const user = await this.db.getUserByEmail(email);
|
||||
const recoveryToken = await this.db.requestRecoveryToken(req, res);
|
||||
const name = user.firstName;
|
||||
const { clientHost } = this.settingsService.getSettings();
|
||||
const url = `${clientHost}/set-new-password/${recoveryToken.token}`;
|
||||
const msgId = await this.emailService.buildAndSendEmail(
|
||||
"passwordResetTemplate",
|
||||
{
|
||||
name,
|
||||
email,
|
||||
url,
|
||||
},
|
||||
email,
|
||||
"Checkmate Password Reset"
|
||||
);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authCreateRecoveryToken,
|
||||
data: msgId,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "recoveryRequestController"));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Validates a recovery token. The recovery token is validated and if valid, a success message is returned.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.token - The recovery token to be validated.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status and a message indicating the validation of the recovery token.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
validateRecovery = async (req, res, next) => {
|
||||
try {
|
||||
await recoveryTokenValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.db.validateRecoveryToken(req, res);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authVerifyRecoveryToken,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "validateRecoveryTokenController"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Resets a user's password. The new password is validated and if valid, the user's password is updated in the database and a new JWT token is issued.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.token - The recovery token.
|
||||
* @property {string} req.body.password - The new password of the user.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the reset of the password, the updated user data (without password and avatar image), and a new JWT token.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
resetPassword = async (req, res, next) => {
|
||||
try {
|
||||
await newPasswordValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
const validationError = handleValidationError(error, SERVICE_NAME);
|
||||
next(validationError);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const user = await this.db.resetPassword(req, res);
|
||||
const appSettings = await this.settingsService.getSettings();
|
||||
const token = this.issueToken(user._doc, tokenType.ACCESS_TOKEN, appSettings);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authResetPassword,
|
||||
data: { user, token },
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "resetPasswordController"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a user and all associated monitors, checks, and alerts.
|
||||
*
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} res - The response object.
|
||||
* @param {Function} next - The next middleware function.
|
||||
* @returns {Object} The response object with success status and message.
|
||||
* @throws {Error} If user validation fails or user is not found in the database.
|
||||
*/
|
||||
deleteUser = async (req, res, next) => {
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const decodedToken = jwt.decode(token);
|
||||
const { email } = decodedToken;
|
||||
|
||||
// Check if the user exists
|
||||
const user = await this.db.getUserByEmail(email);
|
||||
// 1. Find all the monitors associated with the team ID if superadmin
|
||||
|
||||
const result = await this.db.getMonitorsByTeamId({
|
||||
params: { teamId: user.teamId },
|
||||
});
|
||||
|
||||
if (user.role.includes("superadmin")) {
|
||||
//2. Remove all jobs, delete checks and alerts
|
||||
result?.monitors.length > 0 &&
|
||||
(await Promise.all(
|
||||
result.monitors.map(async (monitor) => {
|
||||
await this.jobQueue.deleteJob(monitor);
|
||||
await this.db.deleteChecks(monitor._id);
|
||||
await this.db.deletePageSpeedChecksByMonitorId(monitor._id);
|
||||
await this.db.deleteNotificationsByMonitorId(monitor._id);
|
||||
})
|
||||
));
|
||||
|
||||
// 3. Delete team
|
||||
await this.db.deleteTeam(user.teamId);
|
||||
// 4. Delete all other team members
|
||||
await this.db.deleteAllOtherUsers();
|
||||
// 5. Delete each monitor
|
||||
await this.db.deleteMonitorsByUserId(user._id);
|
||||
}
|
||||
// 6. Delete the user by id
|
||||
await this.db.deleteUser(user._id);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authDeleteUser,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteUserController"));
|
||||
}
|
||||
};
|
||||
|
||||
getAllUsers = async (req, res, next) => {
|
||||
try {
|
||||
const allUsers = await this.db.getAllUsers(req, res);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.authGetAllUsers,
|
||||
data: allUsers,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getAllUsersController"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default AuthController;
|
||||
@@ -1,154 +0,0 @@
|
||||
import {
|
||||
createCheckParamValidation,
|
||||
createCheckBodyValidation,
|
||||
getChecksParamValidation,
|
||||
getChecksQueryValidation,
|
||||
getTeamChecksParamValidation,
|
||||
getTeamChecksQueryValidation,
|
||||
deleteChecksParamValidation,
|
||||
deleteChecksByTeamIdParamValidation,
|
||||
updateChecksTTLBodyValidation,
|
||||
} from "../validation/joi.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { getTokenFromHeaders } from "../utils/utils.js";
|
||||
import { handleValidationError, handleError } from "./controllerUtils.js";
|
||||
|
||||
const SERVICE_NAME = "checkController";
|
||||
|
||||
class CheckController {
|
||||
constructor(db, settingsService, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
createCheck = async (req, res, next) => {
|
||||
try {
|
||||
await createCheckParamValidation.validateAsync(req.params);
|
||||
await createCheckBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const checkData = { ...req.body };
|
||||
const check = await this.db.createCheck(checkData);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkCreate,
|
||||
data: check,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "createCheck"));
|
||||
}
|
||||
};
|
||||
|
||||
getChecksByMonitor = async (req, res, next) => {
|
||||
try {
|
||||
await getChecksParamValidation.validateAsync(req.params);
|
||||
await getChecksQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await this.db.getChecksByMonitor(req);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkGet,
|
||||
data: result,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getChecks"));
|
||||
}
|
||||
};
|
||||
|
||||
getChecksByTeam = async (req, res, next) => {
|
||||
try {
|
||||
await getTeamChecksParamValidation.validateAsync(req.params);
|
||||
await getTeamChecksQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const checkData = await this.db.getChecksByTeam(req);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkGet,
|
||||
data: checkData,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getTeamChecks"));
|
||||
}
|
||||
};
|
||||
|
||||
deleteChecks = async (req, res, next) => {
|
||||
try {
|
||||
await deleteChecksParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const deletedCount = await this.db.deleteChecks(req.params.monitorId);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkDelete,
|
||||
data: { deletedCount },
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteChecks"));
|
||||
}
|
||||
};
|
||||
|
||||
deleteChecksByTeamId = async (req, res, next) => {
|
||||
try {
|
||||
await deleteChecksByTeamIdParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const deletedCount = await this.db.deleteChecksByTeamId(req.params.teamId);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkDelete,
|
||||
data: { deletedCount },
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteChecksByTeamId"));
|
||||
}
|
||||
};
|
||||
|
||||
updateChecksTTL = async (req, res, next) => {
|
||||
const SECONDS_PER_DAY = 86400;
|
||||
|
||||
try {
|
||||
await updateChecksTTLBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get user's teamId
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { teamId } = jwt.verify(token, jwtSecret);
|
||||
const ttl = parseInt(req.body.ttl, 10) * SECONDS_PER_DAY;
|
||||
await this.db.updateChecksTTL(teamId, ttl);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.checkUpdateTTL,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "updateTTL"));
|
||||
}
|
||||
};
|
||||
}
|
||||
export default CheckController;
|
||||
@@ -1,26 +0,0 @@
|
||||
const handleValidationError = (error, serviceName) => {
|
||||
error.status = 422;
|
||||
error.service = serviceName;
|
||||
error.message = error.details?.[0]?.message || error.message || "Validation Error";
|
||||
return error;
|
||||
};
|
||||
|
||||
const handleError = (error, serviceName, method, status = 500) => {
|
||||
error.status === undefined ? (error.status = status) : null;
|
||||
error.service === undefined ? (error.service = serviceName) : null;
|
||||
error.method === undefined ? (error.method = method) : null;
|
||||
return error;
|
||||
};
|
||||
|
||||
const fetchMonitorCertificate = async (sslChecker, monitor) => {
|
||||
const monitorUrl = new URL(monitor.url);
|
||||
const hostname = monitorUrl.hostname;
|
||||
const cert = await sslChecker(hostname);
|
||||
// Throw an error if no cert or if cert.validTo is not present
|
||||
if (cert?.validTo === null || cert?.validTo === undefined) {
|
||||
throw new Error("Certificate not found");
|
||||
}
|
||||
return cert;
|
||||
};
|
||||
|
||||
export { handleValidationError, handleError, fetchMonitorCertificate };
|
||||
@@ -1,212 +0,0 @@
|
||||
import { handleError } from "./controllerUtils.js";
|
||||
import Monitor from "../db/models/Monitor.js";
|
||||
import DistributedUptimeCheck from "../db/models/DistributedUptimeCheck.js";
|
||||
const SERVICE_NAME = "DistributedUptimeQueueController";
|
||||
|
||||
class DistributedUptimeController {
|
||||
constructor(db, http, statusService) {
|
||||
this.db = db;
|
||||
this.http = http;
|
||||
this.statusService = statusService;
|
||||
this.resultsCallback = this.resultsCallback.bind(this);
|
||||
this.getDistributedUptimeMonitors = this.getDistributedUptimeMonitors.bind(this);
|
||||
this.getDistributedUptimeMonitorDetails =
|
||||
this.getDistributedUptimeMonitorDetails.bind(this);
|
||||
}
|
||||
|
||||
async resultsCallback(req, res, next) {
|
||||
try {
|
||||
const { id, result } = req.body;
|
||||
// Calculate response time
|
||||
const {
|
||||
first_byte_took,
|
||||
body_read_took,
|
||||
dns_took,
|
||||
conn_took,
|
||||
connect_took,
|
||||
tls_took,
|
||||
status_code,
|
||||
error,
|
||||
} = result;
|
||||
|
||||
// Calculate response time
|
||||
const responseTime = first_byte_took / 1_000_000;
|
||||
|
||||
// Calculate if server is up or down
|
||||
const isErrorStatus = status_code >= 400;
|
||||
const hasError = error !== "";
|
||||
|
||||
const status = isErrorStatus || hasError ? false : true;
|
||||
|
||||
// Build response
|
||||
const distributedUptimeResponse = {
|
||||
monitorId: id,
|
||||
type: "distributed_http",
|
||||
payload: result,
|
||||
status,
|
||||
code: status_code,
|
||||
responseTime,
|
||||
first_byte_took,
|
||||
body_read_took,
|
||||
dns_took,
|
||||
conn_took,
|
||||
connect_took,
|
||||
tls_took,
|
||||
};
|
||||
if (error) {
|
||||
const code = status_code || this.NETWORK_ERROR;
|
||||
distributedUptimeResponse.code = code;
|
||||
distributedUptimeResponse.message =
|
||||
this.http.STATUS_CODES[code] || "Network Error";
|
||||
} else {
|
||||
distributedUptimeResponse.message = this.http.STATUS_CODES[status_code];
|
||||
}
|
||||
|
||||
await this.statusService.updateStatus(distributedUptimeResponse);
|
||||
|
||||
res.status(200).json({ message: "OK" });
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "resultsCallback"));
|
||||
}
|
||||
}
|
||||
|
||||
async getDistributedUptimeMonitors(req, res, next) {
|
||||
try {
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("Access-Control-Allow-Origin", "*");
|
||||
|
||||
const BATCH_DELAY = 1000;
|
||||
let batchTimeout = null;
|
||||
let opInProgress = false;
|
||||
|
||||
// Do things here
|
||||
const notifyChange = async () => {
|
||||
if (opInProgress) {
|
||||
// Get data
|
||||
const monitors = await this.db.getMonitorsByTeamId(req);
|
||||
res.write(`data: ${JSON.stringify({ monitors })}\n\n`);
|
||||
opInProgress = false;
|
||||
}
|
||||
batchTimeout = null;
|
||||
};
|
||||
|
||||
const handleChange = () => {
|
||||
opInProgress = true;
|
||||
if (batchTimeout) clearTimeout(batchTimeout);
|
||||
batchTimeout = setTimeout(notifyChange, BATCH_DELAY);
|
||||
};
|
||||
|
||||
const monitorStream = Monitor.watch(
|
||||
[{ $match: { operationType: { $in: ["insert", "update", "delete"] } } }],
|
||||
{ fullDocument: "updateLookup" }
|
||||
);
|
||||
|
||||
const checksStream = DistributedUptimeCheck.watch(
|
||||
[{ $match: { operationType: { $in: ["insert", "update", "delete"] } } }],
|
||||
{ fullDocument: "updateLookup" }
|
||||
);
|
||||
|
||||
monitorStream.on("change", handleChange);
|
||||
checksStream.on("change", handleChange);
|
||||
|
||||
// Send initial data
|
||||
const monitors = await this.db.getMonitorsByTeamId(req);
|
||||
res.write(`data: ${JSON.stringify({ monitors })}\n\n`);
|
||||
|
||||
// Handle client disconnect
|
||||
req.on("close", () => {
|
||||
if (batchTimeout) {
|
||||
clearTimeout(batchTimeout);
|
||||
}
|
||||
monitorStream.close();
|
||||
checksStream.close();
|
||||
clearInterval(keepAlive);
|
||||
});
|
||||
|
||||
// Keep connection alive
|
||||
const keepAlive = setInterval(() => {
|
||||
res.write(": keepalive\n\n");
|
||||
}, 30000);
|
||||
|
||||
// Clean up on close
|
||||
req.on("close", () => {
|
||||
clearInterval(keepAlive);
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
next(handleError(error, SERVICE_NAME, "getDistributedUptimeMonitors"));
|
||||
}
|
||||
}
|
||||
|
||||
async getDistributedUptimeMonitorDetails(req, res, next) {
|
||||
try {
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("Access-Control-Allow-Origin", "*");
|
||||
|
||||
const BATCH_DELAY = 1000;
|
||||
let batchTimeout = null;
|
||||
let opInProgress = false;
|
||||
|
||||
// Do things here
|
||||
const notifyChange = async () => {
|
||||
try {
|
||||
if (opInProgress) {
|
||||
// Get data
|
||||
const monitor = await this.db.getDistributedUptimeDetailsById(req);
|
||||
res.write(`data: ${JSON.stringify({ monitor })}\n\n`);
|
||||
opInProgress = false;
|
||||
}
|
||||
batchTimeout = null;
|
||||
} catch (error) {
|
||||
console.error("Error in notifyChange:", error);
|
||||
opInProgress = false;
|
||||
batchTimeout = null;
|
||||
next(handleError(error, SERVICE_NAME, "getDistributedUptimeMonitorDetails"));
|
||||
}
|
||||
};
|
||||
|
||||
const handleChange = () => {
|
||||
opInProgress = true;
|
||||
if (batchTimeout) clearTimeout(batchTimeout);
|
||||
batchTimeout = setTimeout(notifyChange, BATCH_DELAY);
|
||||
};
|
||||
|
||||
const checksStream = DistributedUptimeCheck.watch(
|
||||
[{ $match: { operationType: { $in: ["insert", "update", "delete"] } } }],
|
||||
{ fullDocument: "updateLookup" }
|
||||
);
|
||||
|
||||
checksStream.on("change", handleChange);
|
||||
|
||||
// Send initial data
|
||||
const monitor = await this.db.getDistributedUptimeDetailsById(req);
|
||||
res.write(`data: ${JSON.stringify({ monitor })}\n\n`);
|
||||
|
||||
// Handle client disconnect
|
||||
req.on("close", () => {
|
||||
if (batchTimeout) {
|
||||
clearTimeout(batchTimeout);
|
||||
}
|
||||
checksStream.close();
|
||||
clearInterval(keepAlive);
|
||||
});
|
||||
|
||||
// Keep connection alive
|
||||
const keepAlive = setInterval(() => {
|
||||
res.write(": keepalive\n\n");
|
||||
}, 30000);
|
||||
|
||||
// Clean up on close
|
||||
req.on("close", () => {
|
||||
clearInterval(keepAlive);
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getDistributedUptimeMonitorDetails"));
|
||||
}
|
||||
}
|
||||
}
|
||||
export default DistributedUptimeController;
|
||||
@@ -1,99 +0,0 @@
|
||||
import {
|
||||
inviteRoleValidation,
|
||||
inviteBodyValidation,
|
||||
inviteVerificationBodyValidation,
|
||||
} from "../validation/joi.js";
|
||||
import logger from "../utils/logger.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { handleError, handleValidationError } from "./controllerUtils.js";
|
||||
import { getTokenFromHeaders } from "../utils/utils.js";
|
||||
|
||||
const SERVICE_NAME = "inviteController";
|
||||
|
||||
class InviteController {
|
||||
constructor(db, settingsService, emailService, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.emailService = emailService;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Issues an invitation to a new user. Only admins can invite new users. An invitation token is created and sent via email.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.headers - The headers of the request.
|
||||
* @property {string} req.headers.authorization - The authorization header containing the JWT token.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {string} req.body.email - The email of the user to be invited.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the sending of the invitation, and the invitation token.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
issueInvitation = async (req, res, next) => {
|
||||
try {
|
||||
// Only admins can invite
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { role, firstname, teamId } = jwt.decode(token);
|
||||
req.body.teamId = teamId;
|
||||
try {
|
||||
await inviteRoleValidation.validateAsync({ roles: role });
|
||||
await inviteBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
const inviteToken = await this.db.requestInviteToken({ ...req.body });
|
||||
const { clientHost } = this.settingsService.getSettings();
|
||||
this.emailService
|
||||
.buildAndSendEmail(
|
||||
"employeeActivationTemplate",
|
||||
{
|
||||
name: firstname,
|
||||
link: `${clientHost}/register/${inviteToken.token}`,
|
||||
},
|
||||
req.body.email,
|
||||
"Welcome to Uptime Monitor"
|
||||
)
|
||||
.catch((error) => {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "issueInvitation",
|
||||
stack: error.stack,
|
||||
});
|
||||
});
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.inviteIssued,
|
||||
data: inviteToken,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "inviteController"));
|
||||
}
|
||||
};
|
||||
|
||||
inviteVerifyController = async (req, res, next) => {
|
||||
try {
|
||||
await inviteVerificationBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const invite = await this.db.getInviteToken(req.body.token);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.inviteVerified,
|
||||
data: invite,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "inviteVerifyController"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default InviteController;
|
||||
@@ -1,163 +0,0 @@
|
||||
import {
|
||||
createMaintenanceWindowBodyValidation,
|
||||
editMaintenanceWindowByIdParamValidation,
|
||||
editMaintenanceByIdWindowBodyValidation,
|
||||
getMaintenanceWindowByIdParamValidation,
|
||||
getMaintenanceWindowsByMonitorIdParamValidation,
|
||||
getMaintenanceWindowsByTeamIdQueryValidation,
|
||||
deleteMaintenanceWindowByIdParamValidation,
|
||||
} from "../validation/joi.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { getTokenFromHeaders } from "../utils/utils.js";
|
||||
import { handleValidationError, handleError } from "./controllerUtils.js";
|
||||
|
||||
const SERVICE_NAME = "maintenanceWindowController";
|
||||
|
||||
class MaintenanceWindowController {
|
||||
constructor(db, settingsService, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
createMaintenanceWindows = async (req, res, next) => {
|
||||
try {
|
||||
await createMaintenanceWindowBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { teamId } = jwt.verify(token, jwtSecret);
|
||||
const monitorIds = req.body.monitors;
|
||||
const dbTransactions = monitorIds.map((monitorId) => {
|
||||
return this.db.createMaintenanceWindow({
|
||||
teamId,
|
||||
monitorId,
|
||||
name: req.body.name,
|
||||
active: req.body.active ? req.body.active : true,
|
||||
repeat: req.body.repeat,
|
||||
start: req.body.start,
|
||||
end: req.body.end,
|
||||
});
|
||||
});
|
||||
await Promise.all(dbTransactions);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowCreate,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "createMaintenanceWindow"));
|
||||
}
|
||||
};
|
||||
|
||||
getMaintenanceWindowById = async (req, res, next) => {
|
||||
try {
|
||||
await getMaintenanceWindowByIdParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const maintenanceWindow = await this.db.getMaintenanceWindowById(req.params.id);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowGetById,
|
||||
data: maintenanceWindow,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMaintenanceWindowById"));
|
||||
}
|
||||
};
|
||||
|
||||
getMaintenanceWindowsByTeamId = async (req, res, next) => {
|
||||
try {
|
||||
await getMaintenanceWindowsByTeamIdQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { teamId } = jwt.verify(token, jwtSecret);
|
||||
const maintenanceWindows = await this.db.getMaintenanceWindowsByTeamId(
|
||||
teamId,
|
||||
req.query
|
||||
);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowGetByTeam,
|
||||
data: maintenanceWindows,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMaintenanceWindowsByUserId"));
|
||||
}
|
||||
};
|
||||
|
||||
getMaintenanceWindowsByMonitorId = async (req, res, next) => {
|
||||
try {
|
||||
await getMaintenanceWindowsByMonitorIdParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const maintenanceWindows = await this.db.getMaintenanceWindowsByMonitorId(
|
||||
req.params.monitorId
|
||||
);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowGetByUser,
|
||||
data: maintenanceWindows,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMaintenanceWindowsByMonitorId"));
|
||||
}
|
||||
};
|
||||
|
||||
deleteMaintenanceWindow = async (req, res, next) => {
|
||||
try {
|
||||
await deleteMaintenanceWindowByIdParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await this.db.deleteMaintenanceWindowById(req.params.id);
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowDelete,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteMaintenanceWindow"));
|
||||
}
|
||||
};
|
||||
|
||||
editMaintenanceWindow = async (req, res, next) => {
|
||||
try {
|
||||
await editMaintenanceWindowByIdParamValidation.validateAsync(req.params);
|
||||
await editMaintenanceByIdWindowBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const editedMaintenanceWindow = await this.db.editMaintenanceWindowById(
|
||||
req.params.id,
|
||||
req.body
|
||||
);
|
||||
return res.success({
|
||||
msg: this.stringService.maintenanceWindowEdit,
|
||||
data: editedMaintenanceWindow,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "editMaintenanceWindow"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default MaintenanceWindowController;
|
||||
@@ -1,548 +0,0 @@
|
||||
import {
|
||||
getMonitorByIdParamValidation,
|
||||
getMonitorByIdQueryValidation,
|
||||
getMonitorsByTeamIdParamValidation,
|
||||
getMonitorsByTeamIdQueryValidation,
|
||||
createMonitorBodyValidation,
|
||||
getMonitorURLByQueryValidation,
|
||||
editMonitorBodyValidation,
|
||||
pauseMonitorParamValidation,
|
||||
getMonitorStatsByIdParamValidation,
|
||||
getMonitorStatsByIdQueryValidation,
|
||||
getCertificateParamValidation,
|
||||
getHardwareDetailsByIdParamValidation,
|
||||
getHardwareDetailsByIdQueryValidation,
|
||||
} from "../validation/joi.js";
|
||||
import sslChecker from "ssl-checker";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { getTokenFromHeaders } from "../utils/utils.js";
|
||||
import logger from "../utils/logger.js";
|
||||
import { handleError, handleValidationError } from "./controllerUtils.js";
|
||||
import axios from "axios";
|
||||
import seedDb from "../db/mongo/utils/seedDb.js";
|
||||
const SERVICE_NAME = "monitorController";
|
||||
|
||||
class MonitorController {
|
||||
constructor(db, settingsService, jobQueue, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.jobQueue = jobQueue;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all monitors
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @param {function} next
|
||||
* @returns {Promise<Express.Response>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
getAllMonitors = async (req, res, next) => {
|
||||
try {
|
||||
const monitors = await this.db.getAllMonitors();
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetAll,
|
||||
data: monitors,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getAllMonitors"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns all monitors with uptime stats for 1,7,30, and 90 days
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @param {function} next
|
||||
* @returns {Promise<Express.Response>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
getAllMonitorsWithUptimeStats = async (req, res, next) => {
|
||||
try {
|
||||
const monitors = await this.db.getAllMonitorsWithUptimeStats();
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetAll,
|
||||
data: monitors,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getAllMonitorsWithUptimeStats"));
|
||||
}
|
||||
};
|
||||
|
||||
getUptimeDetailsById = async (req, res, next) => {
|
||||
try {
|
||||
const monitor = await this.db.getUptimeDetailsById(req);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetById,
|
||||
data: monitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMonitorDetailsById"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns monitor stats for monitor with matching ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @param {function} next
|
||||
* @returns {Promise<Express.Response>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
getMonitorStatsById = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorStatsByIdParamValidation.validateAsync(req.params);
|
||||
await getMonitorStatsByIdQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const monitorStats = await this.db.getMonitorStatsById(req);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorStatsById,
|
||||
data: monitorStats,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMonitorStatsById"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get hardware details for a specific monitor by ID
|
||||
* @async
|
||||
* @param {Express.Request} req - Express request object containing monitorId in params
|
||||
* @param {Express.Response} res - Express response object
|
||||
* @param {Express.NextFunction} next - Express next middleware function
|
||||
* @returns {Promise<Express.Response>}
|
||||
* @throws {Error} - Throws error if monitor not found or other database errors
|
||||
*/
|
||||
getHardwareDetailsById = async (req, res, next) => {
|
||||
try {
|
||||
await getHardwareDetailsByIdParamValidation.validateAsync(req.params);
|
||||
await getHardwareDetailsByIdQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const monitor = await this.db.getHardwareDetailsById(req);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetById,
|
||||
data: monitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getHardwareDetailsById"));
|
||||
}
|
||||
};
|
||||
|
||||
getMonitorCertificate = async (req, res, next, fetchMonitorCertificate) => {
|
||||
try {
|
||||
await getCertificateParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
}
|
||||
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
const monitor = await this.db.getMonitorById(monitorId);
|
||||
const certificate = await fetchMonitorCertificate(sslChecker, monitor);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.monitorCertificate,
|
||||
data: {
|
||||
certificateDate: new Date(certificate.validTo),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMonitorCertificate"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves a monitor by its ID.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.params - The parameters of the request.
|
||||
* @property {string} req.params.monitorId - The ID of the monitor to be retrieved.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message, and the retrieved monitor data.
|
||||
* @throws {Error} If there is an error during the process, especially if the monitor is not found (404) or if there is a validation error (422).
|
||||
*/
|
||||
getMonitorById = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorByIdParamValidation.validateAsync(req.params);
|
||||
await getMonitorByIdQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const monitor = await this.db.getMonitorById(req.params.monitorId);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetById,
|
||||
data: monitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMonitorById"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new monitor and adds it to the job queue.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {Array} req.body.notifications - The notifications associated with the monitor.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the creation of the monitor, and the created monitor data.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
createMonitor = async (req, res, next) => {
|
||||
try {
|
||||
await createMonitorBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const notifications = req.body.notifications;
|
||||
const monitor = await this.db.createMonitor(req, res);
|
||||
|
||||
if (notifications && notifications.length > 0) {
|
||||
monitor.notifications = await Promise.all(
|
||||
notifications.map(async (notification) => {
|
||||
notification.monitorId = monitor._id;
|
||||
return await this.db.createNotification(notification);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await monitor.save();
|
||||
// Add monitor to job queue
|
||||
this.jobQueue.addJob(monitor._id, monitor);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorCreate,
|
||||
data: monitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "createMonitor"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if the endpoint can be resolved
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.query - The query parameters of the request.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message, and the resolution result.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
checkEndpointResolution = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorURLByQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { monitorURL } = req.query;
|
||||
const parsedUrl = new URL(monitorURL);
|
||||
const response = await axios.get(parsedUrl, {
|
||||
timeout: 5000,
|
||||
validateStatus: () => true,
|
||||
});
|
||||
return res.success({
|
||||
status: response.status,
|
||||
msg: response.statusText,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "checkEndpointResolution"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a monitor by its ID and also deletes associated checks, alerts, and notifications.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.params - The parameters of the request.
|
||||
* @property {string} req.params.monitorId - The ID of the monitor to be deleted.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status and a message indicating the deletion of the monitor.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422) or an error in deleting associated records.
|
||||
*/
|
||||
deleteMonitor = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorByIdParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const monitor = await this.db.deleteMonitor(req, res, next);
|
||||
// Delete associated checks,alerts,and notifications
|
||||
|
||||
try {
|
||||
const operations = [
|
||||
{ name: "deleteJob", fn: () => this.jobQueue.deleteJob(monitor) },
|
||||
{ name: "deleteChecks", fn: () => this.db.deleteChecks(monitor._id) },
|
||||
{
|
||||
name: "deletePageSpeedChecks",
|
||||
fn: () => this.db.deletePageSpeedChecksByMonitorId(monitor._id),
|
||||
},
|
||||
{
|
||||
name: "deleteNotifications",
|
||||
fn: () => this.db.deleteNotificationsByMonitorId(monitor._id),
|
||||
},
|
||||
{
|
||||
name: "deleteHardwareChecks",
|
||||
fn: () => this.db.deleteHardwareChecksByMonitorId(monitor._id),
|
||||
},
|
||||
{
|
||||
name: "deleteDistributedUptimeChecks",
|
||||
fn: () => this.db.deleteDistributedChecksByMonitorId(monitor._id),
|
||||
},
|
||||
|
||||
// TODO We don't actually want to delete the status page if there are other monitors in it
|
||||
// We actually just want to remove the monitor being deleted from the status page.
|
||||
// Only delete he status page if there are no other monitors in it.
|
||||
{
|
||||
name: "deleteStatusPages",
|
||||
fn: () => this.db.deleteStatusPagesByMonitorId(monitor._id),
|
||||
},
|
||||
];
|
||||
const results = await Promise.allSettled(operations.map((op) => op.fn()));
|
||||
|
||||
results.forEach((result, index) => {
|
||||
if (result.status === "rejected") {
|
||||
const operationName = operations[index].name;
|
||||
logger.error({
|
||||
message: `Failed to ${operationName} for monitor ${monitor._id}`,
|
||||
service: SERVICE_NAME,
|
||||
method: "deleteMonitor",
|
||||
stack: result.reason.stack,
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: `Error deleting associated records for monitor ${monitor._id} with name ${monitor.name}`,
|
||||
service: SERVICE_NAME,
|
||||
method: "deleteMonitor",
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
return res.success({ msg: this.stringService.monitorDelete });
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteMonitor"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes all monitors associated with a team.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.headers - The headers of the request.
|
||||
* @property {string} req.headers.authorization - The authorization header containing the JWT token.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next
|
||||
* @returns {Object} The response object with a success status and a message indicating the number of deleted monitors.
|
||||
* @throws {Error} If there is an error during the deletion process.
|
||||
*/
|
||||
deleteAllMonitors = async (req, res, next) => {
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { teamId } = jwt.verify(token, jwtSecret);
|
||||
const { monitors, deletedCount } = await this.db.deleteAllMonitors(teamId);
|
||||
await Promise.all(
|
||||
monitors.map(async (monitor) => {
|
||||
try {
|
||||
await this.jobQueue.deleteJob(monitor);
|
||||
await this.db.deleteChecks(monitor._id);
|
||||
await this.db.deletePageSpeedChecksByMonitorId(monitor._id);
|
||||
await this.db.deleteNotificationsByMonitorId(monitor._id);
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: `Error deleting associated records for monitor ${monitor._id} with name ${monitor.name}`,
|
||||
service: SERVICE_NAME,
|
||||
method: "deleteAllMonitors",
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
return res.success({ msg: `Deleted ${deletedCount} monitors` });
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteAllMonitors"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Edits a monitor by its ID, updates its notifications, and updates its job in the job queue.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.params - The parameters of the request.
|
||||
* @property {string} req.params.monitorId - The ID of the monitor to be edited.
|
||||
* @property {Object} req.body - The body of the request.
|
||||
* @property {Array} req.body.notifications - The notifications to be associated with the monitor.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the editing of the monitor, and the edited monitor data.
|
||||
* @throws {Error} If there is an error during the process, especially if there is a validation error (422).
|
||||
*/
|
||||
editMonitor = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorByIdParamValidation.validateAsync(req.params);
|
||||
await editMonitorBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
const monitorBeforeEdit = await this.db.getMonitorById(monitorId);
|
||||
|
||||
// Get notifications from the request body
|
||||
const notifications = req.body.notifications;
|
||||
|
||||
const editedMonitor = await this.db.editMonitor(monitorId, req.body);
|
||||
|
||||
await this.db.deleteNotificationsByMonitorId(editedMonitor._id);
|
||||
|
||||
await Promise.all(
|
||||
notifications &&
|
||||
notifications.map(async (notification) => {
|
||||
notification.monitorId = editedMonitor._id;
|
||||
await this.db.createNotification(notification);
|
||||
})
|
||||
);
|
||||
|
||||
// Delete the old job(editedMonitor has the same ID as the old monitor)
|
||||
await this.jobQueue.deleteJob(monitorBeforeEdit);
|
||||
// Add the new job back to the queue
|
||||
await this.jobQueue.addJob(editedMonitor._id, editedMonitor);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorEdit,
|
||||
data: editedMonitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "editMonitor"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Pauses or resumes a monitor based on its current state.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.params - The parameters of the request.
|
||||
* @property {string} req.params.monitorId - The ID of the monitor to be paused or resumed.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the new state of the monitor, and the updated monitor data.
|
||||
* @throws {Error} If there is an error during the process.
|
||||
*/
|
||||
pauseMonitor = async (req, res, next) => {
|
||||
try {
|
||||
await pauseMonitorParamValidation.validateAsync(req.params);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
}
|
||||
|
||||
try {
|
||||
const monitor = await this.db.getMonitorById(req.params.monitorId);
|
||||
monitor.isActive === true
|
||||
? await this.jobQueue.deleteJob(monitor)
|
||||
: await this.jobQueue.addJob(monitor._id, monitor);
|
||||
|
||||
monitor.isActive = !monitor.isActive;
|
||||
monitor.status = undefined;
|
||||
monitor.save();
|
||||
return res.success({
|
||||
msg: monitor.isActive
|
||||
? this.stringService.monitorResume
|
||||
: this.stringService.monitorPause,
|
||||
data: monitor,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "pauseMonitor"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds demo monitors for a team.
|
||||
* @async
|
||||
* @param {Object} req - The Express request object.
|
||||
* @property {Object} req.headers - The headers of the request.
|
||||
* @property {string} req.headers.authorization - The authorization header containing the JWT token.
|
||||
* @param {Object} res - The Express response object.
|
||||
* @param {function} next - The next middleware function.
|
||||
* @returns {Object} The response object with a success status, a message indicating the addition of demo monitors, and the number of demo monitors added.
|
||||
* @throws {Error} If there is an error during the process.
|
||||
*/
|
||||
addDemoMonitors = async (req, res, next) => {
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { _id, teamId } = jwt.verify(token, jwtSecret);
|
||||
const demoMonitors = await this.db.addDemoMonitors(_id, teamId);
|
||||
await Promise.all(
|
||||
demoMonitors.map((monitor) => this.jobQueue.addJob(monitor._id, monitor))
|
||||
);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.monitorDemoAdded,
|
||||
data: demoMonitors.length,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "addDemoMonitors"));
|
||||
}
|
||||
};
|
||||
|
||||
getMonitorsByTeamId = async (req, res, next) => {
|
||||
try {
|
||||
await getMonitorsByTeamIdParamValidation.validateAsync(req.params);
|
||||
await getMonitorsByTeamIdQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
}
|
||||
|
||||
try {
|
||||
const monitors = await this.db.getMonitorsByTeamId(req);
|
||||
return res.success({
|
||||
msg: this.stringService.monitorGetByTeamId,
|
||||
data: monitors,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMonitorsByTeamId"));
|
||||
}
|
||||
};
|
||||
|
||||
seedDb = async (req, res, next) => {
|
||||
try {
|
||||
const token = getTokenFromHeaders(req.headers);
|
||||
const { jwtSecret } = this.settingsService.getSettings();
|
||||
const { _id, teamId } = jwt.verify(token, jwtSecret);
|
||||
await seedDb(_id, teamId);
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "seedDb"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default MonitorController;
|
||||
@@ -1,59 +0,0 @@
|
||||
import {
|
||||
triggerNotificationBodyValidation,
|
||||
} from '../validation/joi.js';
|
||||
import { handleError, handleValidationError } from './controllerUtils.js';
|
||||
|
||||
const SERVICE_NAME = "NotificationController";
|
||||
|
||||
class NotificationController {
|
||||
constructor(notificationService, stringService) {
|
||||
this.notificationService = notificationService;
|
||||
this.stringService = stringService;
|
||||
this.triggerNotification = this.triggerNotification.bind(this);
|
||||
}
|
||||
|
||||
async triggerNotification(req, res, next) {
|
||||
try {
|
||||
await triggerNotificationBodyValidation.validateAsync(req.body, {
|
||||
abortEarly: false,
|
||||
stripUnknown: true
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { monitorId, type, platform, config } = req.body;
|
||||
|
||||
const networkResponse = {
|
||||
monitor: { _id: monitorId, name: "Test Monitor", url: "http://www.google.com" },
|
||||
status: false,
|
||||
statusChanged: true,
|
||||
prevStatus: true,
|
||||
};
|
||||
|
||||
if (type === "webhook") {
|
||||
const notification = {
|
||||
type,
|
||||
platform,
|
||||
config
|
||||
};
|
||||
|
||||
await this.notificationService.sendWebhookNotification(
|
||||
networkResponse,
|
||||
notification
|
||||
);
|
||||
}
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.webhookSendSuccess
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "triggerNotification"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default NotificationController;
|
||||
@@ -1,61 +0,0 @@
|
||||
import { handleError } from "./controllerUtils.js";
|
||||
|
||||
const SERVICE_NAME = "JobQueueController";
|
||||
|
||||
class JobQueueController {
|
||||
constructor(jobQueue, stringService) {
|
||||
this.jobQueue = jobQueue;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
getMetrics = async (req, res, next) => {
|
||||
try {
|
||||
const metrics = await this.jobQueue.getMetrics();
|
||||
res.success({
|
||||
msg: this.stringService.queueGetMetrics,
|
||||
data: metrics,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getMetrics"));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
getJobs = async (req, res, next) => {
|
||||
try {
|
||||
const jobs = await this.jobQueue.getJobStats();
|
||||
return res.success({
|
||||
msg: this.stringService.queueGetMetrics,
|
||||
data: jobs,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getJobs"));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
addJob = async (req, res, next) => {
|
||||
try {
|
||||
await this.jobQueue.addJob(Math.random().toString(36).substring(7));
|
||||
return res.success({
|
||||
msg: this.stringService.queueAddJob,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "addJob"));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
obliterateQueue = async (req, res, next) => {
|
||||
try {
|
||||
await this.jobQueue.obliterate();
|
||||
return res.success({
|
||||
msg: this.stringService.queueObliterate,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "obliterateQueue"));
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
export default JobQueueController;
|
||||
@@ -1,48 +0,0 @@
|
||||
import { updateAppSettingsBodyValidation } from "../validation/joi.js";
|
||||
import { handleValidationError, handleError } from "./controllerUtils.js";
|
||||
|
||||
const SERVICE_NAME = "SettingsController";
|
||||
|
||||
class SettingsController {
|
||||
constructor(db, settingsService, stringService) {
|
||||
this.db = db;
|
||||
this.settingsService = settingsService;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
getAppSettings = async (req, res, next) => {
|
||||
try {
|
||||
const settings = { ...(await this.settingsService.getSettings()) };
|
||||
delete settings.jwtSecret;
|
||||
return res.success({
|
||||
msg: this.stringService.getAppSettings,
|
||||
data: settings,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getAppSettings"));
|
||||
}
|
||||
};
|
||||
|
||||
updateAppSettings = async (req, res, next) => {
|
||||
try {
|
||||
await updateAppSettingsBodyValidation.validateAsync(req.body);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.db.updateAppSettings(req.body);
|
||||
const updatedSettings = { ...(await this.settingsService.reloadSettings()) };
|
||||
delete updatedSettings.jwtSecret;
|
||||
return res.success({
|
||||
msg: this.stringService.updateAppSettings,
|
||||
data: updatedSettings,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "updateAppSettings"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default SettingsController;
|
||||
@@ -1,120 +0,0 @@
|
||||
import { handleError, handleValidationError } from "./controllerUtils.js";
|
||||
import {
|
||||
createStatusPageBodyValidation,
|
||||
getStatusPageParamValidation,
|
||||
getStatusPageQueryValidation,
|
||||
imageValidation,
|
||||
} from "../validation/joi.js";
|
||||
|
||||
const SERVICE_NAME = "statusPageController";
|
||||
|
||||
class StatusPageController {
|
||||
constructor(db, stringService) {
|
||||
this.db = db;
|
||||
this.stringService = stringService;
|
||||
}
|
||||
|
||||
createStatusPage = async (req, res, next) => {
|
||||
try {
|
||||
await createStatusPageBodyValidation.validateAsync(req.body);
|
||||
await imageValidation.validateAsync(req.file);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const statusPage = await this.db.createStatusPage(req.body, req.file);
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageCreate,
|
||||
data: statusPage,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "createStatusPage"));
|
||||
}
|
||||
};
|
||||
|
||||
updateStatusPage = async (req, res, next) => {
|
||||
try {
|
||||
await createStatusPageBodyValidation.validateAsync(req.body);
|
||||
await imageValidation.validateAsync(req.file);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const statusPage = await this.db.updateStatusPage(req.body, req.file);
|
||||
if (statusPage === null) {
|
||||
const error = new Error(this.stringService.statusPageNotFound);
|
||||
error.status = 404;
|
||||
throw error;
|
||||
}
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageUpdate,
|
||||
data: statusPage,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "updateStatusPage"));
|
||||
}
|
||||
};
|
||||
|
||||
getStatusPage = async (req, res, next) => {
|
||||
try {
|
||||
const statusPage = await this.db.getStatusPage();
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageByUrl,
|
||||
data: statusPage,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getStatusPage"));
|
||||
}
|
||||
};
|
||||
|
||||
getStatusPageByUrl = async (req, res, next) => {
|
||||
try {
|
||||
await getStatusPageParamValidation.validateAsync(req.params);
|
||||
await getStatusPageQueryValidation.validateAsync(req.query);
|
||||
} catch (error) {
|
||||
next(handleValidationError(error, SERVICE_NAME));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const statusPage = await this.db.getStatusPageByUrl(req.params.url, req.query.type);
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageByUrl,
|
||||
data: statusPage,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getStatusPageByUrl"));
|
||||
}
|
||||
};
|
||||
|
||||
getStatusPagesByTeamId = async (req, res, next) => {
|
||||
try {
|
||||
const teamId = req.params.teamId;
|
||||
const statusPages = await this.db.getStatusPagesByTeamId(teamId);
|
||||
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageByTeamId,
|
||||
data: statusPages,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "getStatusPageByTeamId"));
|
||||
}
|
||||
};
|
||||
|
||||
deleteStatusPage = async (req, res, next) => {
|
||||
try {
|
||||
await this.db.deleteStatusPage(req.params.url);
|
||||
return res.success({
|
||||
msg: this.stringService.statusPageDelete,
|
||||
});
|
||||
} catch (error) {
|
||||
next(handleError(error, SERVICE_NAME, "deleteStatusPage"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export default StatusPageController;
|
||||
@@ -1,138 +0,0 @@
|
||||
// **************************
|
||||
// The idea here is to provide a layer of abstraction between the database and whoever is using it.
|
||||
// Instead of directly calling mongoose methods, we can call the methods on the DB object.
|
||||
// If this were Typescript or Java or Golang an interface would be implemented to ensure the methods are available.
|
||||
// But we do the best we can with Javascript.
|
||||
//
|
||||
// If the methods are consistent all we have to do to swap out one DB for another is simply change the import.
|
||||
//
|
||||
// Example:
|
||||
// We start with the fake DB:
|
||||
//
|
||||
// const db = require("../db/FakeDb");
|
||||
// const monitors = await db.getAllMonitors();
|
||||
//
|
||||
// And when we want to swtich to a real DB, all we have to do is swap the import
|
||||
//
|
||||
// const db = require("../db/MongoDb");
|
||||
// const monitors = await db.getAllMonitors();
|
||||
//
|
||||
// The rest of the code is the same, as all the `db` methods are standardized.
|
||||
// **************************
|
||||
|
||||
const Monitor = require("./models/Monitor");
|
||||
const UserModel = require("./models/User");
|
||||
const bcrypt = require("bcrypt");
|
||||
|
||||
let FAKE_MONITOR_DATA = [];
|
||||
const USERS = [];
|
||||
|
||||
const connect = async () => {
|
||||
try {
|
||||
await console.log("Connected to FakeDB");
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
const insertUser = async (req, res) => {
|
||||
try {
|
||||
const newUser = new UserModel({ ...req.body });
|
||||
const salt = await bcrypt.genSalt(10); //genSalt is asynchronous, need to wait
|
||||
newUser.password = await bcrypt.hash(newUser.password, salt); // hash is also async, need to eitehr await or use hashSync
|
||||
USERS.push(newUser);
|
||||
const userToReturn = { ...newUser._doc };
|
||||
delete userToReturn.password;
|
||||
return userToReturn;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getUserByEmail = async (req, res) => {
|
||||
const email = req.body.email;
|
||||
try {
|
||||
const idx = USERS.findIndex((user) => {
|
||||
return user.email === email;
|
||||
});
|
||||
if (idx === -1) {
|
||||
return null;
|
||||
}
|
||||
return USERS[idx];
|
||||
} catch (error) {
|
||||
throw new Error(`User with email ${email} not found`);
|
||||
}
|
||||
};
|
||||
|
||||
const getAllMonitors = async () => {
|
||||
return FAKE_MONITOR_DATA;
|
||||
};
|
||||
|
||||
const getMonitorById = async (monitorId) => {
|
||||
const idx = FAKE_MONITOR_DATA.findIndex((monitor) => {
|
||||
return monitor.id === monitorId;
|
||||
});
|
||||
if (idx === -1) {
|
||||
throw new Error(`Monitor with id ${monitorId} not found`);
|
||||
}
|
||||
return FAKE_MONITOR_DATA[idx];
|
||||
};
|
||||
|
||||
const getMonitorsByUserId = async (userId) => {
|
||||
const userMonitors = FAKE_MONITOR_DATA.filter((monitor) => {
|
||||
return monitor.userId === userId;
|
||||
});
|
||||
|
||||
if (userMonitors.length === 0) {
|
||||
throw new Error(`Monitors for user ${userId} not found`);
|
||||
}
|
||||
return userMonitors;
|
||||
};
|
||||
|
||||
const createMonitor = async (req, res) => {
|
||||
const monitor = new Monitor(req.body);
|
||||
monitor.createdAt = Date.now();
|
||||
monitor.updatedAt = Date.now();
|
||||
FAKE_MONITOR_DATA.push(monitor);
|
||||
return monitor;
|
||||
};
|
||||
|
||||
const deleteMonitor = async (req, res) => {
|
||||
const monitorId = req.params.monitorId;
|
||||
try {
|
||||
const monitor = getMonitorById(monitorId);
|
||||
FAKE_MONITOR_DATA = FAKE_MONITOR_DATA.filter((monitor) => {
|
||||
return monitor.id !== monitorId;
|
||||
});
|
||||
return monitor;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const editMonitor = async (req, res) => {
|
||||
const monitorId = req.params.monitorId;
|
||||
const idx = FAKE_MONITOR_DATA.findIndex((monitor) => {
|
||||
return monitor._id.toString() === monitorId;
|
||||
});
|
||||
const oldMonitor = FAKE_MONITOR_DATA[idx];
|
||||
const editedMonitor = new Monitor({ ...req.body });
|
||||
editedMonitor._id = oldMonitor._id;
|
||||
editedMonitor.userId = oldMonitor.userId;
|
||||
editedMonitor.updatedAt = Date.now();
|
||||
editedMonitor.createdAt = oldMonitor.createdAt;
|
||||
FAKE_MONITOR_DATA[idx] = editedMonitor;
|
||||
return FAKE_MONITOR_DATA[idx];
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
connect,
|
||||
insertUser,
|
||||
getUserByEmail,
|
||||
getAllMonitors,
|
||||
getMonitorById,
|
||||
getMonitorsByUserId,
|
||||
createMonitor,
|
||||
deleteMonitor,
|
||||
editMonitor,
|
||||
};
|
||||
@@ -1,91 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const AppSettingsSchema = mongoose.Schema(
|
||||
{
|
||||
apiBaseUrl: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "http://localhost:5000/api/v1",
|
||||
},
|
||||
logLevel: {
|
||||
type: String,
|
||||
default: "debug",
|
||||
enum: ["debug", "none", "error", "warn"],
|
||||
},
|
||||
clientHost: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "http://localhost:5173",
|
||||
},
|
||||
jwtSecret: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "my_secret",
|
||||
},
|
||||
refreshTokenSecret: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "my_refresh_secret",
|
||||
},
|
||||
dbType: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "MongoDB",
|
||||
},
|
||||
dbConnectionString: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "mongodb://localhost:27017/uptime_db",
|
||||
},
|
||||
redisHost: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "127.0.0.1",
|
||||
},
|
||||
redisPort: {
|
||||
type: Number,
|
||||
default: "6379",
|
||||
},
|
||||
jwtTTL: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "2h",
|
||||
},
|
||||
refreshTokenTTL: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "7d",
|
||||
},
|
||||
pagespeedApiKey: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
systemEmailHost: {
|
||||
type: String,
|
||||
default: "smtp.gmail.com",
|
||||
},
|
||||
systemEmailPort: {
|
||||
type: Number,
|
||||
default: 465,
|
||||
},
|
||||
systemEmailAddress: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
systemEmailPassword: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
singleton: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
unique: true,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
export default mongoose.model("AppSettings", AppSettingsSchema);
|
||||
@@ -1,83 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const BaseCheckSchema = mongoose.Schema({
|
||||
/**
|
||||
* Reference to the associated Monitor document.
|
||||
*
|
||||
* @type {mongoose.Schema.Types.ObjectId}
|
||||
*/
|
||||
monitorId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Monitor",
|
||||
immutable: true,
|
||||
index: true,
|
||||
},
|
||||
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
index: true,
|
||||
},
|
||||
/**
|
||||
* Status of the check (true for up, false for down).
|
||||
*
|
||||
* @type {Boolean}
|
||||
*/
|
||||
status: {
|
||||
type: Boolean,
|
||||
index: true,
|
||||
},
|
||||
/**
|
||||
* Response time of the check in milliseconds.
|
||||
*
|
||||
* @type {Number}
|
||||
*/
|
||||
responseTime: {
|
||||
type: Number,
|
||||
},
|
||||
/**
|
||||
* HTTP status code received during the check.
|
||||
*
|
||||
* @type {Number}
|
||||
*/
|
||||
statusCode: {
|
||||
type: Number,
|
||||
index: true,
|
||||
},
|
||||
/**
|
||||
* Message or description of the check result.
|
||||
*
|
||||
* @type {String}
|
||||
*/
|
||||
message: {
|
||||
type: String,
|
||||
},
|
||||
/**
|
||||
* Expiry date of the check, auto-calculated to expire after 30 days.
|
||||
*
|
||||
* @type {Date}
|
||||
*/
|
||||
|
||||
expiry: {
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
expires: 60 * 60 * 24 * 30, // 30 days
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Check Schema for MongoDB collection.
|
||||
*
|
||||
* Represents a check associated with a monitor, storing information
|
||||
* about the status and response of a particular check event.
|
||||
*/
|
||||
const CheckSchema = mongoose.Schema({ ...BaseCheckSchema.obj }, { timestamps: true });
|
||||
CheckSchema.index({ createdAt: 1 });
|
||||
CheckSchema.index({ monitorId: 1, createdAt: 1 });
|
||||
CheckSchema.index({ monitorId: 1, createdAt: -1 });
|
||||
CheckSchema.index({ teamId: 1, createdAt: -1 });
|
||||
CheckSchema.index({ teamId: 1 });
|
||||
|
||||
export default mongoose.model("Check", CheckSchema);
|
||||
export { BaseCheckSchema };
|
||||
@@ -1,123 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
import { BaseCheckSchema } from "./Check.js";
|
||||
|
||||
// {
|
||||
// "id": "12123",
|
||||
// "result": {
|
||||
// "task_arrived": "2025-01-13T19:21:37.463466602Z",
|
||||
// "dns_start": "2025-01-14T00:21:33.1801319+05:00",
|
||||
// "dns_end": "2025-01-14T00:21:33.4582552+05:00",
|
||||
// "conn_start": "2025-01-14T00:21:33.1801319+05:00",
|
||||
// "conn_end": "2025-01-14T00:21:33.7076318+05:00",
|
||||
// "connect_start": "2025-01-14T00:21:33.4582552+05:00",
|
||||
// "connect_end": "2025-01-14T00:21:33.541899+05:00",
|
||||
// "tls_hand_shake_start": "2025-01-14T00:21:33.541899+05:00",
|
||||
// "tls_hand_shake_end": "2025-01-14T00:21:33.7076318+05:00",
|
||||
// "body_read_start": "2025-01-14T00:21:34.1894707+05:00",
|
||||
// "body_read_end": "2025-01-14T00:21:34.1894707+05:00",
|
||||
// "wrote_request": "2025-01-14T00:21:33.7076318+05:00",
|
||||
// "got_first_response_byte": "2025-01-14T00:21:34.1327652+05:00",
|
||||
// "first_byte_took": 425133400,
|
||||
// "body_read_took": 56030000,
|
||||
// "dns_took": 278123300,
|
||||
// "conn_took": 527499900,
|
||||
// "connect_took": 83643800,
|
||||
// "tls_took": 165732800,
|
||||
// "sni_name": "uprock.com",
|
||||
// "status_code": 200,
|
||||
// "body_size": 19320,
|
||||
// "request_header_size": 95,
|
||||
// "response_header_size": 246,
|
||||
// "response_headers": "X-Vercel-Id: bom1::iad1::sm87v-1736796096856-aec270c01f23\nDate: Mon, 13 Jan 2025 19:21:37 GMT\nServer: Vercel\nStrict-Transport-Security: max-age=63072000\nVary: RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url\nX-Matched-Path: /\nX-Powered-By: Next.js\nX-Vercel-Cache: MISS\nAge: 0\nCache-Control: private, no-cache, no-store, max-age=0, must-revalidate\nContent-Type: text/html; charset=utf-8",
|
||||
// "error": "",
|
||||
// "device_id": "d5f578e143a2cd603dd6bf5f846a86a538bde4a8fbe2ad1fca284ad9f033daf8",
|
||||
// "ip_address": "223.123.19.0",
|
||||
// "proof": "",
|
||||
// "created_at": "2025-01-13T19:21:37.463466912Z",
|
||||
// "continent": "AS",
|
||||
// "country_code": "PK",
|
||||
// "city": "Muzaffargarh",
|
||||
// "upt_burnt" : "0.01",
|
||||
// "location": {
|
||||
// "lat": 71.0968,
|
||||
// "lng": 30.0208
|
||||
// },
|
||||
// "payload": {
|
||||
// "callback": "https://webhook.site/2a15b0af-545a-4ac2-b913-153b97592d7a",
|
||||
// "x": "y"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
const LocationSchema = new mongoose.Schema(
|
||||
{
|
||||
lat: { type: Number, required: true },
|
||||
lng: { type: Number, required: true },
|
||||
},
|
||||
{ _id: false }
|
||||
);
|
||||
|
||||
const DistributedUptimeCheckSchema = mongoose.Schema(
|
||||
{
|
||||
...BaseCheckSchema.obj,
|
||||
first_byte_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
body_read_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
dns_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
conn_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
connect_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
tls_took: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
location: {
|
||||
type: LocationSchema,
|
||||
required: false,
|
||||
},
|
||||
continent: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
countryCode: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
city: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
uptBurnt: {
|
||||
type: mongoose.Schema.Types.Decimal128,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
DistributedUptimeCheckSchema.pre("save", function (next) {
|
||||
if (this.isModified("uptBurnt") && typeof this.uptBurnt === "string") {
|
||||
this.uptBurnt = mongoose.Types.Decimal128.fromString(this.uptBurnt);
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
DistributedUptimeCheckSchema.index({ createdAt: 1 });
|
||||
DistributedUptimeCheckSchema.index({ monitorId: 1, createdAt: 1 });
|
||||
DistributedUptimeCheckSchema.index({ monitorId: 1, createdAt: -1 });
|
||||
|
||||
export default mongoose.model("DistributedUptimeCheck", DistributedUptimeCheckSchema);
|
||||
@@ -1,70 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
import { BaseCheckSchema } from "./Check.js";
|
||||
const cpuSchema = mongoose.Schema({
|
||||
physical_core: { type: Number, default: 0 },
|
||||
logical_core: { type: Number, default: 0 },
|
||||
frequency: { type: Number, default: 0 },
|
||||
temperature: { type: [Number], default: [] },
|
||||
free_percent: { type: Number, default: 0 },
|
||||
usage_percent: { type: Number, default: 0 },
|
||||
});
|
||||
|
||||
const memorySchema = mongoose.Schema({
|
||||
total_bytes: { type: Number, default: 0 },
|
||||
available_bytes: { type: Number, default: 0 },
|
||||
used_bytes: { type: Number, default: 0 },
|
||||
usage_percent: { type: Number, default: 0 },
|
||||
});
|
||||
|
||||
const diskSchema = mongoose.Schema({
|
||||
read_speed_bytes: { type: Number, default: 0 },
|
||||
write_speed_bytes: { type: Number, default: 0 },
|
||||
total_bytes: { type: Number, default: 0 },
|
||||
free_bytes: { type: Number, default: 0 },
|
||||
usage_percent: { type: Number, default: 0 },
|
||||
});
|
||||
|
||||
const hostSchema = mongoose.Schema({
|
||||
os: { type: String, default: "" },
|
||||
platform: { type: String, default: "" },
|
||||
kernel_version: { type: String, default: "" },
|
||||
});
|
||||
|
||||
const errorSchema = mongoose.Schema({
|
||||
metric: { type: [String], default: [] },
|
||||
err: { type: String, default: "" },
|
||||
});
|
||||
|
||||
const HardwareCheckSchema = mongoose.Schema(
|
||||
{
|
||||
...BaseCheckSchema.obj,
|
||||
cpu: {
|
||||
type: cpuSchema,
|
||||
default: () => ({}),
|
||||
},
|
||||
memory: {
|
||||
type: memorySchema,
|
||||
default: () => ({}),
|
||||
},
|
||||
disk: {
|
||||
type: [diskSchema],
|
||||
default: () => [],
|
||||
},
|
||||
host: {
|
||||
type: hostSchema,
|
||||
default: () => ({}),
|
||||
},
|
||||
|
||||
errors: {
|
||||
type: [errorSchema],
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
HardwareCheckSchema.index({ createdAt: 1 });
|
||||
HardwareCheckSchema.index({ monitorId: 1, createdAt: 1 });
|
||||
HardwareCheckSchema.index({ monitorId: 1, createdAt: -1 });
|
||||
|
||||
export default mongoose.model("HardwareCheck", HardwareCheckSchema);
|
||||
@@ -1,34 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
const InviteTokenSchema = mongoose.Schema(
|
||||
{
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
required: true,
|
||||
},
|
||||
role: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiry: {
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
expires: 3600,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
export default mongoose.model("InviteToken", InviteTokenSchema);
|
||||
@@ -1,68 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
/**
|
||||
* MaintenanceWindow Schema
|
||||
* @module MaintenanceWindow
|
||||
* @typedef {Object} MaintenanceWindow
|
||||
* @property {mongoose.Schema.Types.ObjectId} monitorId - The ID of the monitor. This is a reference to the Monitor model and is immutable.
|
||||
* @property {Boolean} active - Indicates whether the maintenance window is active.
|
||||
* @property {Number} repeat - Indicates how often this maintenance window should repeat.
|
||||
* @property {Date} start - The start date and time of the maintenance window.
|
||||
* @property {Date} end - The end date and time of the maintenance window.
|
||||
* @property {Date} expiry - The expiry date and time of the maintenance window. This is used for MongoDB's TTL index to automatically delete the document at this time. This field is set to the same value as `end` when `oneTime` is `true`.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* let maintenanceWindow = new MaintenanceWindow({
|
||||
* monitorId: monitorId,
|
||||
* active: active,
|
||||
* repeat: repeat,
|
||||
* start: start,
|
||||
* end: end,
|
||||
* });
|
||||
*
|
||||
* if (repeat === 0) {
|
||||
* maintenanceWindow.expiry = end;
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
const MaintenanceWindow = mongoose.Schema(
|
||||
{
|
||||
monitorId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Monitor",
|
||||
immutable: true,
|
||||
},
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
},
|
||||
active: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
},
|
||||
repeat: {
|
||||
type: Number,
|
||||
},
|
||||
start: {
|
||||
type: Date,
|
||||
},
|
||||
end: {
|
||||
type: Date,
|
||||
},
|
||||
expiry: {
|
||||
type: Date,
|
||||
index: { expires: "0s" },
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
export default mongoose.model("MaintenanceWindow", MaintenanceWindow);
|
||||
@@ -1,98 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const MonitorSchema = mongoose.Schema(
|
||||
{
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "User",
|
||||
immutable: true,
|
||||
required: true,
|
||||
},
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
required: true,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
},
|
||||
status: {
|
||||
type: Boolean,
|
||||
default: undefined,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: [
|
||||
"http",
|
||||
"ping",
|
||||
"pagespeed",
|
||||
"hardware",
|
||||
"docker",
|
||||
"port",
|
||||
"distributed_http",
|
||||
],
|
||||
},
|
||||
jsonPath: {
|
||||
type: String,
|
||||
},
|
||||
expectedValue: {
|
||||
type: String,
|
||||
},
|
||||
matchMethod: {
|
||||
type: String,
|
||||
enum: [
|
||||
"equal",
|
||||
"include",
|
||||
"regex",
|
||||
],
|
||||
},
|
||||
url: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: Number,
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
interval: {
|
||||
// in milliseconds
|
||||
type: Number,
|
||||
default: 60000,
|
||||
},
|
||||
uptimePercentage: {
|
||||
type: Number,
|
||||
default: undefined,
|
||||
},
|
||||
thresholds: {
|
||||
type: {
|
||||
usage_cpu: { type: Number },
|
||||
usage_memory: { type: Number },
|
||||
usage_disk: { type: Number },
|
||||
},
|
||||
_id: false,
|
||||
},
|
||||
notifications: [
|
||||
{
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Notification",
|
||||
},
|
||||
],
|
||||
secret: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
export default mongoose.model("Monitor", MonitorSchema);
|
||||
@@ -1,91 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const configSchema = mongoose.Schema({
|
||||
webhookUrl: { type: String },
|
||||
botToken: { type: String },
|
||||
chatId: { type: String }
|
||||
}, { _id: false });
|
||||
|
||||
const NotificationSchema = mongoose.Schema(
|
||||
{
|
||||
monitorId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Monitor",
|
||||
immutable: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: ["email", "sms", "webhook"],
|
||||
},
|
||||
config: {
|
||||
type: configSchema,
|
||||
default: () => ({})
|
||||
},
|
||||
address: {
|
||||
type: String,
|
||||
},
|
||||
phone: {
|
||||
type: String,
|
||||
},
|
||||
alertThreshold: {
|
||||
type: Number,
|
||||
default: 5,
|
||||
},
|
||||
cpuAlertThreshold: {
|
||||
type: Number,
|
||||
default: function () {
|
||||
return this.alertThreshold;
|
||||
},
|
||||
},
|
||||
memoryAlertThreshold: {
|
||||
type: Number,
|
||||
default: function () {
|
||||
return this.alertThreshold;
|
||||
},
|
||||
},
|
||||
diskAlertThreshold: {
|
||||
type: Number,
|
||||
default: function () {
|
||||
return this.alertThreshold;
|
||||
},
|
||||
},
|
||||
tempAlertThreshold: {
|
||||
type: Number,
|
||||
default: function () {
|
||||
return this.alertThreshold;
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
NotificationSchema.pre("save", function (next) {
|
||||
if (!this.cpuAlertThreshold || this.isModified("alertThreshold")) {
|
||||
this.cpuAlertThreshold = this.alertThreshold;
|
||||
}
|
||||
if (!this.memoryAlertThreshold || this.isModified("alertThreshold")) {
|
||||
this.memoryAlertThreshold = this.alertThreshold;
|
||||
}
|
||||
if (!this.diskAlertThreshold || this.isModified("alertThreshold")) {
|
||||
this.diskAlertThreshold = this.alertThreshold;
|
||||
}
|
||||
if (!this.tempAlertThreshold || this.isModified("alertThreshold")) {
|
||||
this.tempAlertThreshold = this.alertThreshold;
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
NotificationSchema.pre("findOneAndUpdate", function (next) {
|
||||
const update = this.getUpdate();
|
||||
if (update.alertThreshold) {
|
||||
update.cpuAlertThreshold = update.alertThreshold;
|
||||
update.memoryAlertThreshold = update.alertThreshold;
|
||||
update.diskAlertThreshold = update.alertThreshold;
|
||||
update.tempAlertThreshold = update.alertThreshold;
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
export default mongoose.model("Notification", NotificationSchema);
|
||||
@@ -1,111 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
import { BaseCheckSchema } from "./Check.js";
|
||||
import logger from "../../utils/logger.js";
|
||||
import { time } from "console";
|
||||
const AuditSchema = mongoose.Schema({
|
||||
id: { type: String, required: true },
|
||||
title: { type: String, required: true },
|
||||
description: { type: String, required: true },
|
||||
score: { type: Number, required: true },
|
||||
scoreDisplayMode: { type: String, required: true },
|
||||
displayValue: { type: String, required: true },
|
||||
numericValue: { type: Number, required: true },
|
||||
numericUnit: { type: String, required: true },
|
||||
});
|
||||
|
||||
const AuditsSchema = mongoose.Schema({
|
||||
cls: {
|
||||
type: AuditSchema,
|
||||
required: true,
|
||||
},
|
||||
si: {
|
||||
type: AuditSchema,
|
||||
required: true,
|
||||
},
|
||||
fcp: {
|
||||
type: AuditSchema,
|
||||
required: true,
|
||||
},
|
||||
lcp: {
|
||||
type: AuditSchema,
|
||||
required: true,
|
||||
},
|
||||
tbt: {
|
||||
type: AuditSchema,
|
||||
required: true,
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Mongoose schema for storing metrics from Google Lighthouse.
|
||||
* @typedef {Object} PageSpeedCheck
|
||||
* @property {mongoose.Schema.Types.ObjectId} monitorId - Reference to the Monitor model.
|
||||
* @property {number} accessibility - Accessibility score.
|
||||
* @property {number} bestPractices - Best practices score.
|
||||
* @property {number} seo - SEO score.
|
||||
* @property {number} performance - Performance score.
|
||||
*/
|
||||
|
||||
const PageSpeedCheck = mongoose.Schema(
|
||||
{
|
||||
...BaseCheckSchema.obj,
|
||||
accessibility: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
bestPractices: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
seo: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
performance: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
audits: {
|
||||
type: AuditsSchema,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
/**
|
||||
* Mongoose model for storing metrics from Google Lighthouse.
|
||||
* @typedef {mongoose.Model<PageSpeedCheck>} LighthouseMetricsModel
|
||||
*/
|
||||
|
||||
PageSpeedCheck.pre("save", async function (next) {
|
||||
try {
|
||||
const monitor = await mongoose.model("Monitor").findById(this.monitorId);
|
||||
if (monitor && monitor.status !== this.status) {
|
||||
if (monitor.status === true && this.status === false) {
|
||||
logger.info({ message: "Monitor went down", monitorId: monitor._id });
|
||||
}
|
||||
|
||||
if (monitor.status === false && this.status === true) {
|
||||
logger.info({ message: "Monitor went up", monitorId: monitor._id });
|
||||
}
|
||||
monitor.status = this.status;
|
||||
await monitor.save();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: "PageSpeedCheck",
|
||||
method: "pre-save",
|
||||
stack: error.stack,
|
||||
});
|
||||
} finally {
|
||||
next();
|
||||
}
|
||||
});
|
||||
|
||||
PageSpeedCheck.index({ createdAt: 1 });
|
||||
PageSpeedCheck.index({ monitorId: 1, createdAt: 1 });
|
||||
PageSpeedCheck.index({ monitorId: 1, createdAt: -1 });
|
||||
|
||||
export default mongoose.model("PageSpeedCheck", PageSpeedCheck);
|
||||
@@ -1,25 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const RecoveryTokenSchema = mongoose.Schema(
|
||||
{
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiry: {
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
expires: 600,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
export default mongoose.model("RecoveryToken", RecoveryTokenSchema);
|
||||
@@ -1,70 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const StatusPageSchema = mongoose.Schema(
|
||||
{
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "User",
|
||||
immutable: true,
|
||||
required: true,
|
||||
},
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "uptime",
|
||||
enum: ["uptime", "distributed"],
|
||||
},
|
||||
companyName: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "",
|
||||
},
|
||||
url: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
default: "",
|
||||
},
|
||||
timezone: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
color: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: "#4169E1",
|
||||
},
|
||||
monitors: [
|
||||
{
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Monitor",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
logo: {
|
||||
data: Buffer,
|
||||
contentType: String,
|
||||
},
|
||||
isPublished: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
showCharts: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
showUptimePercentage: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
export default mongoose.model("StatusPage", StatusPageSchema);
|
||||
@@ -1,14 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
const TeamSchema = mongoose.Schema(
|
||||
{
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
export default mongoose.model("Team", TeamSchema);
|
||||
@@ -1,92 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
import bcrypt from "bcrypt";
|
||||
import logger from "../../utils/logger.js";
|
||||
|
||||
const UserSchema = mongoose.Schema(
|
||||
{
|
||||
firstName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
lastName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
password: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
avatarImage: {
|
||||
type: String,
|
||||
},
|
||||
profileImage: {
|
||||
data: Buffer,
|
||||
contentType: String,
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
isVerified: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
role: {
|
||||
type: [String],
|
||||
default: "user",
|
||||
enum: ["user", "admin", "superadmin", "demo"],
|
||||
},
|
||||
teamId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: "Team",
|
||||
immutable: true,
|
||||
},
|
||||
checkTTL: {
|
||||
type: Number,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
UserSchema.pre("save", async function (next) {
|
||||
if (!this.isModified("password")) {
|
||||
next();
|
||||
}
|
||||
const salt = await bcrypt.genSalt(10); //genSalt is asynchronous, need to wait
|
||||
this.password = await bcrypt.hash(this.password, salt); // hash is also async, need to eitehr await or use hashSync
|
||||
next();
|
||||
});
|
||||
|
||||
UserSchema.pre("findOneAndUpdate", async function (next) {
|
||||
const update = this.getUpdate();
|
||||
if ("password" in update) {
|
||||
const salt = await bcrypt.genSalt(10); //genSalt is asynchronous, need to wait
|
||||
update.password = await bcrypt.hash(update.password, salt); // hash is also async, need to eitehr await or use hashSync
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
|
||||
UserSchema.methods.comparePassword = async function (submittedPassword) {
|
||||
const res = await bcrypt.compare(submittedPassword, this.password);
|
||||
return res;
|
||||
};
|
||||
|
||||
const User = mongoose.model("User", UserSchema);
|
||||
|
||||
User.init().then(() => {
|
||||
logger.info({
|
||||
message: "User model initialized",
|
||||
service: "UserModel",
|
||||
method: "init",
|
||||
});
|
||||
});
|
||||
|
||||
export default mongoose.model("User", UserSchema);
|
||||
@@ -1,147 +0,0 @@
|
||||
import mongoose from "mongoose";
|
||||
import UserModel from "../models/User.js";
|
||||
import AppSettings from "../models/AppSettings.js";
|
||||
import logger from "../../utils/logger.js";
|
||||
|
||||
//****************************************
|
||||
// User Operations
|
||||
//****************************************
|
||||
|
||||
import * as userModule from "./modules/userModule.js";
|
||||
|
||||
//****************************************
|
||||
// Invite Token Operations
|
||||
//****************************************
|
||||
|
||||
import * as inviteModule from "./modules/inviteModule.js";
|
||||
|
||||
//****************************************
|
||||
// Recovery Operations
|
||||
//****************************************
|
||||
import * as recoveryModule from "./modules/recoveryModule.js";
|
||||
|
||||
//****************************************
|
||||
// Monitors
|
||||
//****************************************
|
||||
|
||||
import * as monitorModule from "./modules/monitorModule.js";
|
||||
|
||||
//****************************************
|
||||
// Page Speed Checks
|
||||
//****************************************
|
||||
|
||||
import * as pageSpeedCheckModule from "./modules/pageSpeedCheckModule.js";
|
||||
|
||||
//****************************************
|
||||
// Hardware Checks
|
||||
//****************************************
|
||||
import * as hardwareCheckModule from "./modules/hardwareCheckModule.js";
|
||||
|
||||
//****************************************
|
||||
// Checks
|
||||
//****************************************
|
||||
|
||||
import * as checkModule from "./modules/checkModule.js";
|
||||
|
||||
//****************************************
|
||||
// Distributed Checks
|
||||
//****************************************
|
||||
import * as distributedCheckModule from "./modules/distributedCheckModule.js";
|
||||
|
||||
//****************************************
|
||||
// Maintenance Window
|
||||
//****************************************
|
||||
import * as maintenanceWindowModule from "./modules/maintenanceWindowModule.js";
|
||||
|
||||
//****************************************
|
||||
// Notifications
|
||||
//****************************************
|
||||
import * as notificationModule from "./modules/notificationModule.js";
|
||||
|
||||
//****************************************
|
||||
// AppSettings
|
||||
//****************************************
|
||||
import * as settingsModule from "./modules/settingsModule.js";
|
||||
|
||||
//****************************************
|
||||
// Status Page
|
||||
//****************************************
|
||||
import * as statusPageModule from "./modules/statusPageModule.js";
|
||||
|
||||
class MongoDB {
|
||||
static SERVICE_NAME = "MongoDB";
|
||||
|
||||
constructor() {
|
||||
Object.assign(this, userModule);
|
||||
Object.assign(this, inviteModule);
|
||||
Object.assign(this, recoveryModule);
|
||||
Object.assign(this, monitorModule);
|
||||
Object.assign(this, pageSpeedCheckModule);
|
||||
Object.assign(this, hardwareCheckModule);
|
||||
Object.assign(this, checkModule);
|
||||
Object.assign(this, distributedCheckModule);
|
||||
Object.assign(this, maintenanceWindowModule);
|
||||
Object.assign(this, notificationModule);
|
||||
Object.assign(this, settingsModule);
|
||||
Object.assign(this, statusPageModule);
|
||||
}
|
||||
|
||||
connect = async () => {
|
||||
try {
|
||||
const connectionString =
|
||||
process.env.DB_CONNECTION_STRING || "mongodb://localhost:27017/uptime_db";
|
||||
await mongoose.connect(connectionString);
|
||||
// If there are no AppSettings, create one
|
||||
let appSettings = await AppSettings.find();
|
||||
if (appSettings.length === 0) {
|
||||
appSettings = new AppSettings({});
|
||||
await appSettings.save();
|
||||
}
|
||||
// Sync indexes
|
||||
const models = mongoose.modelNames();
|
||||
for (const modelName of models) {
|
||||
const model = mongoose.model(modelName);
|
||||
await model.syncIndexes();
|
||||
}
|
||||
|
||||
logger.info({
|
||||
message: "Connected to MongoDB",
|
||||
service: this.SERVICE_NAME,
|
||||
method: "connect",
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: this.SERVICE_NAME,
|
||||
method: "connect",
|
||||
stack: error.stack,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
disconnect = async () => {
|
||||
try {
|
||||
logger.info({ message: "Disconnecting from MongoDB" });
|
||||
await mongoose.disconnect();
|
||||
logger.info({ message: "Disconnected from MongoDB" });
|
||||
return;
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: this.SERVICE_NAME,
|
||||
method: "disconnect",
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
};
|
||||
checkSuperadmin = async (req, res) => {
|
||||
const superAdmin = await UserModel.findOne({ role: "superadmin" });
|
||||
if (superAdmin !== null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
export default MongoDB;
|
||||
@@ -1,301 +0,0 @@
|
||||
import Check from "../../models/Check.js";
|
||||
import Monitor from "../../models/Monitor.js";
|
||||
import User from "../../models/User.js";
|
||||
import logger from "../../../utils/logger.js";
|
||||
import { ObjectId } from "mongodb";
|
||||
|
||||
const SERVICE_NAME = "checkModule";
|
||||
const dateRangeLookup = {
|
||||
hour: new Date(new Date().setHours(new Date().getHours() - 1)),
|
||||
day: new Date(new Date().setDate(new Date().getDate() - 1)),
|
||||
week: new Date(new Date().setDate(new Date().getDate() - 7)),
|
||||
month: new Date(new Date().setMonth(new Date().getMonth() - 1)),
|
||||
all: undefined,
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a check for a monitor
|
||||
* @async
|
||||
* @param {Object} checkData
|
||||
* @param {string} checkData.monitorId
|
||||
* @param {boolean} checkData.status
|
||||
* @param {number} checkData.responseTime
|
||||
* @param {number} checkData.statusCode
|
||||
* @param {string} checkData.message
|
||||
* @returns {Promise<Check>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
const createCheck = async (checkData) => {
|
||||
try {
|
||||
const { monitorId, status } = checkData;
|
||||
const n = (await Check.countDocuments({ monitorId })) + 1;
|
||||
const check = await new Check({ ...checkData }).save();
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
|
||||
if (!monitor) {
|
||||
logger.error({
|
||||
message: "Monitor not found",
|
||||
service: SERVICE_NAME,
|
||||
method: "createCheck",
|
||||
details: `monitor ID: ${monitorId}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Update uptime percentage
|
||||
if (monitor.uptimePercentage === undefined) {
|
||||
monitor.uptimePercentage = status === true ? 1 : 0;
|
||||
} else {
|
||||
monitor.uptimePercentage =
|
||||
(monitor.uptimePercentage * (n - 1) + (status === true ? 1 : 0)) / n;
|
||||
}
|
||||
|
||||
await monitor.save();
|
||||
return check;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createCheck";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all checks for a monitor
|
||||
* @async
|
||||
* @param {string} monitorId
|
||||
* @returns {Promise<Array<Check>>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getChecksByMonitor = async (req) => {
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
let { sortOrder, dateRange, filter, page, rowsPerPage, status } = req.query;
|
||||
status = typeof status !== "undefined" ? false : undefined;
|
||||
page = parseInt(page);
|
||||
rowsPerPage = parseInt(rowsPerPage);
|
||||
// Match
|
||||
const matchStage = {
|
||||
monitorId: ObjectId.createFromHexString(monitorId),
|
||||
...(typeof status !== "undefined" && { status }),
|
||||
...(dateRangeLookup[dateRange] && {
|
||||
createdAt: {
|
||||
$gte: dateRangeLookup[dateRange],
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
if (filter !== undefined) {
|
||||
switch (filter) {
|
||||
case "all":
|
||||
break;
|
||||
case "down":
|
||||
break;
|
||||
case "resolve":
|
||||
matchStage.statusCode = 5000;
|
||||
break;
|
||||
default:
|
||||
logger.warn({
|
||||
message: "invalid filter",
|
||||
service: SERVICE_NAME,
|
||||
method: "getChecks",
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
//Sort
|
||||
sortOrder = sortOrder === "asc" ? 1 : -1;
|
||||
|
||||
// Pagination
|
||||
let skip = 0;
|
||||
if (page && rowsPerPage) {
|
||||
skip = page * rowsPerPage;
|
||||
}
|
||||
const checks = await Check.aggregate([
|
||||
{ $match: matchStage },
|
||||
{ $sort: { createdAt: sortOrder } },
|
||||
{
|
||||
$facet: {
|
||||
summary: [{ $count: "checksCount" }],
|
||||
checks: [{ $skip: skip }, { $limit: rowsPerPage }],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
checksCount: {
|
||||
$ifNull: [{ $arrayElemAt: ["$summary.checksCount", 0] }, 0],
|
||||
},
|
||||
checks: {
|
||||
$ifNull: ["$checks", []],
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
return checks[0];
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getChecks";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getChecksByTeam = async (req) => {
|
||||
try {
|
||||
let { sortOrder, dateRange, filter, page, rowsPerPage } = req.query;
|
||||
page = parseInt(page);
|
||||
rowsPerPage = parseInt(rowsPerPage);
|
||||
const { teamId } = req.params;
|
||||
const matchStage = {
|
||||
teamId: ObjectId.createFromHexString(teamId),
|
||||
status: false,
|
||||
...(dateRangeLookup[dateRange] && {
|
||||
createdAt: {
|
||||
$gte: dateRangeLookup[dateRange],
|
||||
},
|
||||
}),
|
||||
};
|
||||
// Add filter to match stage
|
||||
if (filter !== undefined) {
|
||||
switch (filter) {
|
||||
case "all":
|
||||
break;
|
||||
case "down":
|
||||
break;
|
||||
case "resolve":
|
||||
matchStage.statusCode = 5000;
|
||||
break;
|
||||
default:
|
||||
logger.warn({
|
||||
message: "invalid filter",
|
||||
service: SERVICE_NAME,
|
||||
method: "getChecksByTeam",
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
sortOrder = sortOrder === "asc" ? 1 : -1;
|
||||
|
||||
// pagination
|
||||
let skip = 0;
|
||||
if (page && rowsPerPage) {
|
||||
skip = page * rowsPerPage;
|
||||
}
|
||||
|
||||
const checks = await Check.aggregate([
|
||||
{ $match: matchStage },
|
||||
{ $sort: { createdAt: sortOrder } },
|
||||
|
||||
{
|
||||
$facet: {
|
||||
summary: [{ $count: "checksCount" }],
|
||||
checks: [{ $skip: skip }, { $limit: rowsPerPage }],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
checksCount: { $arrayElemAt: ["$summary.checksCount", 0] },
|
||||
checks: "$checks",
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
return checks[0];
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getChecksByTeam";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete all checks for a monitor
|
||||
* @async
|
||||
* @param {string} monitorId
|
||||
* @returns {number}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
const deleteChecks = async (monitorId) => {
|
||||
try {
|
||||
const result = await Check.deleteMany({ monitorId });
|
||||
return result.deletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteChecks";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete all checks for a team
|
||||
* @async
|
||||
* @param {string} monitorId
|
||||
* @returns {number}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
const deleteChecksByTeamId = async (teamId) => {
|
||||
try {
|
||||
const teamMonitors = await Monitor.find({ teamId: teamId });
|
||||
let totalDeletedCount = 0;
|
||||
|
||||
await Promise.all(
|
||||
teamMonitors.map(async (monitor) => {
|
||||
const result = await Check.deleteMany({ monitorId: monitor._id });
|
||||
totalDeletedCount += result.deletedCount;
|
||||
monitor.status = true;
|
||||
await monitor.save();
|
||||
})
|
||||
);
|
||||
|
||||
return totalDeletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteChecksByTeamId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const updateChecksTTL = async (teamId, ttl) => {
|
||||
try {
|
||||
await Check.collection.dropIndex("expiry_1");
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "updateChecksTTL",
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await Check.collection.createIndex(
|
||||
{ expiry: 1 },
|
||||
{ expireAfterSeconds: ttl } // TTL in seconds, adjust as necessary
|
||||
);
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "updateChecksTTL";
|
||||
throw error;
|
||||
}
|
||||
// Update user
|
||||
try {
|
||||
await User.updateMany({ teamId: teamId }, { checkTTL: ttl });
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "updateChecksTTL";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
createCheck,
|
||||
getChecksByMonitor,
|
||||
getChecksByTeam,
|
||||
deleteChecks,
|
||||
deleteChecksByTeamId,
|
||||
updateChecksTTL,
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
import DistributedUptimeCheck from "../../models/DistributedUptimeCheck.js";
|
||||
const SERVICE_NAME = "distributedCheckModule";
|
||||
|
||||
const createDistributedCheck = async (checkData) => {
|
||||
try {
|
||||
const check = await new DistributedUptimeCheck({ ...checkData }).save();
|
||||
return check;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createCheck";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteDistributedChecksByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const result = await DistributedUptimeCheck.deleteMany({ monitorId });
|
||||
return result.deletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteDistributedChecksByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { createDistributedCheck, deleteDistributedChecksByMonitorId };
|
||||
@@ -1,58 +0,0 @@
|
||||
import HardwareCheck from "../../models/HardwareCheck.js";
|
||||
import Monitor from "../../models/Monitor.js";
|
||||
import logger from "../../../utils/logger.js";
|
||||
|
||||
const SERVICE_NAME = "hardwareCheckModule";
|
||||
const createHardwareCheck = async (hardwareCheckData) => {
|
||||
try {
|
||||
const { monitorId, status } = hardwareCheckData;
|
||||
const n = (await HardwareCheck.countDocuments({ monitorId })) + 1;
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
|
||||
if (!monitor) {
|
||||
logger.error({
|
||||
message: "Monitor not found",
|
||||
service: SERVICE_NAME,
|
||||
method: "createHardwareCheck",
|
||||
details: `monitor ID: ${monitorId}`,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
let newUptimePercentage;
|
||||
if (monitor.uptimePercentage === undefined) {
|
||||
newUptimePercentage = status === true ? 1 : 0;
|
||||
} else {
|
||||
newUptimePercentage =
|
||||
(monitor.uptimePercentage * (n - 1) + (status === true ? 1 : 0)) / n;
|
||||
}
|
||||
|
||||
await Monitor.findOneAndUpdate(
|
||||
{ _id: monitorId },
|
||||
{ uptimePercentage: newUptimePercentage }
|
||||
);
|
||||
|
||||
const hardwareCheck = await new HardwareCheck({
|
||||
...hardwareCheckData,
|
||||
}).save();
|
||||
return hardwareCheck;
|
||||
} catch (error) {
|
||||
console.log("error creating hardware check", error);
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createHardwareCheck";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteHardwareChecksByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const result = await HardwareCheck.deleteMany({ monitorId });
|
||||
return result.deletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteHardwareChecksByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { createHardwareCheck, deleteHardwareChecksByMonitorId };
|
||||
@@ -1,89 +0,0 @@
|
||||
import InviteToken from "../../models/InviteToken.js";
|
||||
import crypto from "crypto";
|
||||
import ServiceRegistry from "../../../service/serviceRegistry.js";
|
||||
import StringService from "../../../service/stringService.js";
|
||||
|
||||
const SERVICE_NAME = "inviteModule";
|
||||
/**
|
||||
* Request an invite token for a user.
|
||||
*
|
||||
* This function deletes any existing invite tokens for the user's email,
|
||||
* generates a new token, saves it, and then returns the new token.
|
||||
*
|
||||
* @param {Object} userData - The user data.
|
||||
* @param {string} userData.email - The user's email.
|
||||
* @param {mongoose.Schema.Types.ObjectId} userData.teamId - The ID of the team.
|
||||
* @param {Array} userData.role - The user's role(s).
|
||||
* @param {Date} [userData.expiry=Date.now] - The expiry date of the token. Defaults to the current date and time.
|
||||
* @returns {Promise<InviteToken>} The invite token.
|
||||
* @throws {Error} If there is an error.
|
||||
*/
|
||||
const requestInviteToken = async (userData) => {
|
||||
try {
|
||||
await InviteToken.deleteMany({ email: userData.email });
|
||||
userData.token = crypto.randomBytes(32).toString("hex");
|
||||
let inviteToken = new InviteToken(userData);
|
||||
await inviteToken.save();
|
||||
return inviteToken;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "requestInviteToken";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves an invite token
|
||||
*
|
||||
* This function searches for an invite token in the database and deletes it.
|
||||
* If the invite token is not found, it throws an error.
|
||||
*
|
||||
* @param {string} token - The invite token to search for.
|
||||
* @returns {Promise<InviteToken>} The invite token data.
|
||||
* @throws {Error} If the invite token is not found or there is another error.
|
||||
*/
|
||||
const getInviteToken = async (token) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const invite = await InviteToken.findOne({
|
||||
token,
|
||||
});
|
||||
if (invite === null) {
|
||||
throw new Error(stringService.authInviteNotFound);
|
||||
}
|
||||
return invite;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getInviteToken";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves and deletes an invite token
|
||||
*
|
||||
* This function searches for an invite token in the database and deletes it.
|
||||
* If the invite token is not found, it throws an error.
|
||||
*
|
||||
* @param {string} token - The invite token to search for.
|
||||
* @returns {Promise<InviteToken>} The invite token data.
|
||||
* @throws {Error} If the invite token is not found or there is another error.
|
||||
*/
|
||||
const getInviteTokenAndDelete = async (token) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const invite = await InviteToken.findOneAndDelete({
|
||||
token,
|
||||
});
|
||||
if (invite === null) {
|
||||
throw new Error(stringService.authInviteNotFound);
|
||||
}
|
||||
return invite;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getInviteTokenAndDelete";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { requestInviteToken, getInviteToken, getInviteTokenAndDelete };
|
||||
@@ -1,228 +0,0 @@
|
||||
import MaintenanceWindow from "../../models/MaintenanceWindow.js";
|
||||
const SERVICE_NAME = "maintenanceWindowModule";
|
||||
|
||||
/**
|
||||
* Asynchronously creates a new MaintenanceWindow document and saves it to the database.
|
||||
* If the maintenance window is a one-time event, the expiry field is set to the same value as the end field.
|
||||
* @async
|
||||
* @function createMaintenanceWindow
|
||||
* @param {Object} maintenanceWindowData - The data for the new MaintenanceWindow document.
|
||||
* @param {mongoose.Schema.Types.ObjectId} maintenanceWindowData.monitorId - The ID of the monitor.
|
||||
* @param {Boolean} maintenanceWindowData.active - Indicates whether the maintenance window is active.
|
||||
* @param {Boolean} maintenanceWindowData.oneTime - Indicates whether the maintenance window is a one-time event.
|
||||
* @param {Date} maintenanceWindowData.start - The start date and time of the maintenance window.
|
||||
* @param {Date} maintenanceWindowData.end - The end date and time of the maintenance window.
|
||||
* @returns {Promise<MaintenanceWindow>} The saved MaintenanceWindow document.
|
||||
* @throws {Error} If there is an error saving the document.
|
||||
* @example
|
||||
* const maintenanceWindowData = {
|
||||
* monitorId: 'yourMonitorId',
|
||||
* active: true,
|
||||
* oneTime: true,
|
||||
* start: new Date(),
|
||||
* end: new Date(),
|
||||
* };
|
||||
* createMaintenanceWindow(maintenanceWindowData)
|
||||
* .then(maintenanceWindow => console.log(maintenanceWindow))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const createMaintenanceWindow = async (maintenanceWindowData) => {
|
||||
try {
|
||||
const maintenanceWindow = new MaintenanceWindow({
|
||||
...maintenanceWindowData,
|
||||
});
|
||||
|
||||
// If the maintenance window is a one time window, set the expiry to the end date
|
||||
if (maintenanceWindowData.oneTime) {
|
||||
maintenanceWindow.expiry = maintenanceWindowData.end;
|
||||
}
|
||||
const result = await maintenanceWindow.save();
|
||||
return result;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createMaintenanceWindow";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getMaintenanceWindowById = async (maintenanceWindowId) => {
|
||||
try {
|
||||
const maintenanceWindow = await MaintenanceWindow.findById({
|
||||
_id: maintenanceWindowId,
|
||||
});
|
||||
return maintenanceWindow;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getMaintenanceWindowById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously retrieves all MaintenanceWindow documents associated with a specific team ID.
|
||||
* @async
|
||||
* @function getMaintenanceWindowByUserId
|
||||
* @param {String} teamId - The ID of the team.
|
||||
* @param {Object} query - The request body.
|
||||
* @returns {Promise<Array<MaintenanceWindow>>} An array of MaintenanceWindow documents.
|
||||
* @throws {Error} If there is an error retrieving the documents.
|
||||
* @example
|
||||
* getMaintenanceWindowByUserId(teamId)
|
||||
* .then(maintenanceWindows => console.log(maintenanceWindows))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const getMaintenanceWindowsByTeamId = async (teamId, query) => {
|
||||
try {
|
||||
let { active, page, rowsPerPage, field, order } = query || {};
|
||||
const maintenanceQuery = { teamId };
|
||||
|
||||
if (active !== undefined) maintenanceQuery.active = active;
|
||||
|
||||
const maintenanceWindowCount =
|
||||
await MaintenanceWindow.countDocuments(maintenanceQuery);
|
||||
|
||||
// Pagination
|
||||
let skip = 0;
|
||||
if (page && rowsPerPage) {
|
||||
skip = page * rowsPerPage;
|
||||
}
|
||||
|
||||
// Sorting
|
||||
let sort = {};
|
||||
if (field !== undefined && order !== undefined) {
|
||||
sort[field] = order === "asc" ? 1 : -1;
|
||||
}
|
||||
|
||||
const maintenanceWindows = await MaintenanceWindow.find(maintenanceQuery)
|
||||
.skip(skip)
|
||||
.limit(rowsPerPage)
|
||||
.sort(sort);
|
||||
|
||||
return { maintenanceWindows, maintenanceWindowCount };
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getMaintenanceWindowByUserId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously retrieves all MaintenanceWindow documents associated with a specific monitor ID.
|
||||
* @async
|
||||
* @function getMaintenanceWindowsByMonitorId
|
||||
* @param {mongoose.Schema.Types.ObjectId} monitorId - The ID of the monitor.
|
||||
* @returns {Promise<Array<MaintenanceWindow>>} An array of MaintenanceWindow documents.
|
||||
* @throws {Error} If there is an error retrieving the documents.
|
||||
* @example
|
||||
* getMaintenanceWindowsByMonitorId('monitorId')
|
||||
* .then(maintenanceWindows => console.log(maintenanceWindows))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const getMaintenanceWindowsByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const maintenanceWindows = await MaintenanceWindow.find({
|
||||
monitorId: monitorId,
|
||||
});
|
||||
return maintenanceWindows;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getMaintenanceWindowsByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously deletes a MaintenanceWindow document by its ID.
|
||||
* @async
|
||||
* @function deleteMaintenanceWindowById
|
||||
* @param {mongoose.Schema.Types.ObjectId} maintenanceWindowId - The ID of the MaintenanceWindow document to delete.
|
||||
* @returns {Promise<MaintenanceWindow>} The deleted MaintenanceWindow document.
|
||||
* @throws {Error} If there is an error deleting the document.
|
||||
* @example
|
||||
* deleteMaintenanceWindowById('maintenanceWindowId')
|
||||
* .then(maintenanceWindow => console.log(maintenanceWindow))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const deleteMaintenanceWindowById = async (maintenanceWindowId) => {
|
||||
try {
|
||||
const maintenanceWindow =
|
||||
await MaintenanceWindow.findByIdAndDelete(maintenanceWindowId);
|
||||
return maintenanceWindow;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteMaintenanceWindowById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously deletes all MaintenanceWindow documents associated with a specific monitor ID.
|
||||
* @async
|
||||
* @function deleteMaintenanceWindowByMonitorId
|
||||
* @param {mongoose.Schema.Types.ObjectId} monitorId - The ID of the monitor.
|
||||
* @returns {Promise<Object>} The result of the delete operation. This object contains information about the operation, such as the number of documents deleted.
|
||||
* @throws {Error} If there is an error deleting the documents.
|
||||
* @example
|
||||
* deleteMaintenanceWindowByMonitorId('monitorId')
|
||||
* .then(result => console.log(result))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const deleteMaintenanceWindowByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const result = await MaintenanceWindow.deleteMany({ monitorId: monitorId });
|
||||
return result;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteMaintenanceWindowByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously deletes all MaintenanceWindow documents associated with a specific user ID.
|
||||
* @async
|
||||
* @function deleteMaintenanceWindowByUserId
|
||||
* @param {String} userId - The ID of the user.
|
||||
* @returns {Promise<Object>} The result of the delete operation. This object contains information about the operation, such as the number of documents deleted.
|
||||
* @throws {Error} If there is an error deleting the documents.
|
||||
* @example
|
||||
* deleteMaintenanceWindowByUserId('userId')
|
||||
* .then(result => console.log(result))
|
||||
* .catch(error => console.error(error));
|
||||
*/
|
||||
const deleteMaintenanceWindowByUserId = async (userId) => {
|
||||
try {
|
||||
const result = await MaintenanceWindow.deleteMany({ userId: userId });
|
||||
return result;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteMaintenanceWindowByUserId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const editMaintenanceWindowById = async (maintenanceWindowId, maintenanceWindowData) => {
|
||||
try {
|
||||
const editedMaintenanceWindow = await MaintenanceWindow.findByIdAndUpdate(
|
||||
maintenanceWindowId,
|
||||
maintenanceWindowData,
|
||||
{ new: true }
|
||||
);
|
||||
return editedMaintenanceWindow;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "editMaintenanceWindowById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
createMaintenanceWindow,
|
||||
getMaintenanceWindowById,
|
||||
getMaintenanceWindowsByTeamId,
|
||||
getMaintenanceWindowsByMonitorId,
|
||||
deleteMaintenanceWindowById,
|
||||
deleteMaintenanceWindowByMonitorId,
|
||||
deleteMaintenanceWindowByUserId,
|
||||
editMaintenanceWindowById,
|
||||
};
|
||||
@@ -1,937 +0,0 @@
|
||||
import Monitor from "../../models/Monitor.js";
|
||||
import Check from "../../models/Check.js";
|
||||
import PageSpeedCheck from "../../models/PageSpeedCheck.js";
|
||||
import HardwareCheck from "../../models/HardwareCheck.js";
|
||||
import DistributedUptimeCheck from "../../models/DistributedUptimeCheck.js";
|
||||
import Notification from "../../models/Notification.js";
|
||||
import { NormalizeData, NormalizeDataUptimeDetails } from "../../../utils/dataUtils.js";
|
||||
import ServiceRegistry from "../../../service/serviceRegistry.js";
|
||||
import StringService from "../../../service/stringService.js";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import {
|
||||
buildUptimeDetailsPipeline,
|
||||
buildHardwareDetailsPipeline,
|
||||
buildDistributedUptimeDetailsPipeline,
|
||||
} from "./monitorModuleQueries.js";
|
||||
import { ObjectId } from "mongodb";
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const demoMonitorsPath = path.resolve(__dirname, "../../../utils/demoMonitors.json");
|
||||
const demoMonitors = JSON.parse(fs.readFileSync(demoMonitorsPath, "utf8"));
|
||||
|
||||
const SERVICE_NAME = "monitorModule";
|
||||
|
||||
const CHECK_MODEL_LOOKUP = {
|
||||
http: Check,
|
||||
ping: Check,
|
||||
docker: Check,
|
||||
port: Check,
|
||||
pagespeed: PageSpeedCheck,
|
||||
hardware: HardwareCheck,
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all monitors
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Array<Monitor>>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getAllMonitors = async (req, res) => {
|
||||
try {
|
||||
const monitors = await Monitor.find();
|
||||
return monitors;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getAllMonitors";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all monitors with uptime stats for 1,7,30, and 90 days
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Array<Monitor>>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getAllMonitorsWithUptimeStats = async () => {
|
||||
const timeRanges = {
|
||||
1: new Date(Date.now() - 24 * 60 * 60 * 1000),
|
||||
7: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000),
|
||||
30: new Date(Date.now() - 30 * 24 * 60 * 60 * 1000),
|
||||
90: new Date(Date.now() - 90 * 24 * 60 * 60 * 1000),
|
||||
};
|
||||
|
||||
try {
|
||||
const monitors = await Monitor.find();
|
||||
const monitorsWithStats = await Promise.all(
|
||||
monitors.map(async (monitor) => {
|
||||
const model = CHECK_MODEL_LOOKUP[monitor.type];
|
||||
|
||||
const uptimeStats = await Promise.all(
|
||||
Object.entries(timeRanges).map(async ([days, startDate]) => {
|
||||
const checks = await model.find({
|
||||
monitorId: monitor._id,
|
||||
createdAt: { $gte: startDate },
|
||||
});
|
||||
return [days, getUptimePercentage(checks)];
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
...monitor.toObject(),
|
||||
...Object.fromEntries(uptimeStats),
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return monitorsWithStats;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getAllMonitorsWithUptimeStats";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Function to calculate uptime duration based on the most recent check.
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Uptime duration in ms.
|
||||
*/
|
||||
const calculateUptimeDuration = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
const latestCheck = new Date(checks[0].createdAt);
|
||||
let latestDownCheck = 0;
|
||||
|
||||
for (let i = checks.length - 1; i >= 0; i--) {
|
||||
if (checks[i].status === false) {
|
||||
latestDownCheck = new Date(checks[i].createdAt);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If no down check is found, uptime is from the last check to now
|
||||
if (latestDownCheck === 0) {
|
||||
return Date.now() - new Date(checks[checks.length - 1].createdAt);
|
||||
}
|
||||
|
||||
// Otherwise the uptime is from the last check to the last down check
|
||||
return latestCheck - latestDownCheck;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get duration since last check
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Timestamp of the most recent check.
|
||||
*/
|
||||
const getLastChecked = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0; // Handle case when no checks are available
|
||||
}
|
||||
// Data is sorted newest->oldest, so last check is the most recent
|
||||
return new Date() - new Date(checks[0].createdAt);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get latestResponseTime
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Timestamp of the most recent check.
|
||||
*/
|
||||
const getLatestResponseTime = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checks[0]?.responseTime ?? 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get average response time
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Timestamp of the most recent check.
|
||||
*/
|
||||
const getAverageResponseTime = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const validChecks = checks.filter((check) => typeof check.responseTime === "number");
|
||||
if (validChecks.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
const aggResponseTime = validChecks.reduce((sum, check) => {
|
||||
return sum + check.responseTime;
|
||||
}, 0);
|
||||
return aggResponseTime / validChecks.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get percentage 24h uptime
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Timestamp of the most recent check.
|
||||
*/
|
||||
|
||||
const getUptimePercentage = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
const upCount = checks.reduce((count, check) => {
|
||||
return check.status === true ? count + 1 : count;
|
||||
}, 0);
|
||||
return (upCount / checks.length) * 100;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get all incidents
|
||||
* @param {Array} checks Array of check objects.
|
||||
* @returns {number} Timestamp of the most recent check.
|
||||
*/
|
||||
|
||||
const getIncidents = (checks) => {
|
||||
if (!checks || checks.length === 0) {
|
||||
return 0; // Handle case when no checks are available
|
||||
}
|
||||
return checks.reduce((acc, check) => {
|
||||
return check.status === false ? (acc += 1) : acc;
|
||||
}, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get date range parameters
|
||||
* @param {string} dateRange - 'day' | 'week' | 'month' | 'all'
|
||||
* @returns {Object} Start and end dates
|
||||
*/
|
||||
const getDateRange = (dateRange) => {
|
||||
const startDates = {
|
||||
day: new Date(new Date().setDate(new Date().getDate() - 1)),
|
||||
week: new Date(new Date().setDate(new Date().getDate() - 7)),
|
||||
month: new Date(new Date().setMonth(new Date().getMonth() - 1)),
|
||||
all: new Date(0),
|
||||
};
|
||||
return {
|
||||
start: startDates[dateRange],
|
||||
end: new Date(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get checks for a monitor
|
||||
* @param {string} monitorId - Monitor ID
|
||||
* @param {Object} model - Check model to use
|
||||
* @param {Object} dateRange - Date range parameters
|
||||
* @param {number} sortOrder - Sort order (1 for ascending, -1 for descending)
|
||||
* @returns {Promise<Object>} All checks and date-ranged checks
|
||||
*/
|
||||
const getMonitorChecks = async (monitorId, model, dateRange, sortOrder) => {
|
||||
const indexSpec = {
|
||||
monitorId: 1,
|
||||
createdAt: sortOrder, // This will be 1 or -1
|
||||
};
|
||||
|
||||
const [checksAll, checksForDateRange] = await Promise.all([
|
||||
model.find({ monitorId }).sort({ createdAt: sortOrder }).hint(indexSpec).lean(),
|
||||
model
|
||||
.find({
|
||||
monitorId,
|
||||
createdAt: { $gte: dateRange.start, $lte: dateRange.end },
|
||||
})
|
||||
.hint(indexSpec)
|
||||
.lean(),
|
||||
]);
|
||||
|
||||
return { checksAll, checksForDateRange };
|
||||
};
|
||||
|
||||
/**
|
||||
* Process checks for display
|
||||
* @param {Array} checks - Checks to process
|
||||
* @param {number} numToDisplay - Number of checks to display
|
||||
* @param {boolean} normalize - Whether to normalize the data
|
||||
* @returns {Array} Processed checks
|
||||
*/
|
||||
const processChecksForDisplay = (normalizeData, checks, numToDisplay, normalize) => {
|
||||
let processedChecks = checks;
|
||||
if (numToDisplay && checks.length > numToDisplay) {
|
||||
const n = Math.ceil(checks.length / numToDisplay);
|
||||
processedChecks = checks.filter((_, index) => index % n === 0);
|
||||
}
|
||||
return normalize ? normalizeData(processedChecks, 1, 100) : processedChecks;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get time-grouped checks based on date range
|
||||
* @param {Array} checks Array of check objects
|
||||
* @param {string} dateRange 'day' | 'week' | 'month'
|
||||
* @returns {Object} Grouped checks by time period
|
||||
*/
|
||||
const groupChecksByTime = (checks, dateRange) => {
|
||||
return checks.reduce((acc, check) => {
|
||||
// Validate the date
|
||||
const checkDate = new Date(check.createdAt);
|
||||
if (Number.isNaN(checkDate.getTime()) || checkDate.getTime() === 0) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
const time =
|
||||
dateRange === "day"
|
||||
? checkDate.setMinutes(0, 0, 0)
|
||||
: checkDate.toISOString().split("T")[0];
|
||||
|
||||
if (!acc[time]) {
|
||||
acc[time] = { time, checks: [] };
|
||||
}
|
||||
acc[time].checks.push(check);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
/**
|
||||
* Calculate aggregate stats for a group of checks
|
||||
* @param {Object} group Group of checks
|
||||
* @returns {Object} Stats for the group
|
||||
*/
|
||||
const calculateGroupStats = (group) => {
|
||||
const totalChecks = group.checks.length;
|
||||
|
||||
const checksWithResponseTime = group.checks.filter(
|
||||
(check) => typeof check.responseTime === "number" && !Number.isNaN(check.responseTime)
|
||||
);
|
||||
|
||||
return {
|
||||
time: group.time,
|
||||
uptimePercentage: getUptimePercentage(group.checks),
|
||||
totalChecks,
|
||||
totalIncidents: group.checks.filter((check) => !check.status).length,
|
||||
avgResponseTime:
|
||||
checksWithResponseTime.length > 0
|
||||
? checksWithResponseTime.reduce((sum, check) => sum + check.responseTime, 0) /
|
||||
checksWithResponseTime.length
|
||||
: 0,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get uptime details by monitor ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getUptimeDetailsById = async (req) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
if (monitor === null || monitor === undefined) {
|
||||
throw new Error(stringService.dbFindMonitorById(monitorId));
|
||||
}
|
||||
|
||||
const { dateRange, normalize } = req.query;
|
||||
const dates = getDateRange(dateRange);
|
||||
const formatLookup = {
|
||||
day: "%Y-%m-%dT%H:00:00Z",
|
||||
week: "%Y-%m-%dT%H:00:00Z",
|
||||
month: "%Y-%m-%dT00:00:00Z",
|
||||
};
|
||||
|
||||
const dateString = formatLookup[dateRange];
|
||||
|
||||
const results = await Check.aggregate(
|
||||
buildUptimeDetailsPipeline(monitor, dates, dateString)
|
||||
);
|
||||
|
||||
const monitorData = results[0];
|
||||
const normalizedGroupChecks = NormalizeDataUptimeDetails(
|
||||
monitorData.groupedChecks,
|
||||
10,
|
||||
100
|
||||
);
|
||||
|
||||
const monitorStats = {
|
||||
...monitor.toObject(),
|
||||
...monitorData,
|
||||
groupedChecks: normalizedGroupChecks,
|
||||
};
|
||||
|
||||
return monitorStats;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getUptimeDetailsById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getDistributedUptimeDetailsById = async (req) => {
|
||||
try {
|
||||
const { monitorId } = req?.params ?? {};
|
||||
if (typeof monitorId === "undefined") {
|
||||
throw new Error();
|
||||
}
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
if (monitor === null || monitor === undefined) {
|
||||
throw new Error(this.stringService.dbFindMonitorById(monitorId));
|
||||
}
|
||||
|
||||
const { dateRange, normalize } = req.query;
|
||||
const dates = getDateRange(dateRange);
|
||||
const formatLookup = {
|
||||
day: "%Y-%m-%dT%H:%M:00Z",
|
||||
week: "%Y-%m-%dT%H:00:00Z",
|
||||
month: "%Y-%m-%dT00:00:00Z",
|
||||
};
|
||||
|
||||
const dateString = formatLookup[dateRange];
|
||||
const results = await DistributedUptimeCheck.aggregate(
|
||||
buildDistributedUptimeDetailsPipeline(monitor, dates, dateString)
|
||||
);
|
||||
|
||||
const monitorData = results[0];
|
||||
const normalizedGroupChecks = NormalizeDataUptimeDetails(
|
||||
monitorData.groupedChecks,
|
||||
10,
|
||||
100
|
||||
);
|
||||
|
||||
const monitorStats = {
|
||||
...monitor.toObject(),
|
||||
...monitorData,
|
||||
groupedChecks: normalizedGroupChecks,
|
||||
};
|
||||
|
||||
return monitorStats;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getDistributedUptimeDetailsById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get stats by monitor ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getMonitorStatsById = async (req) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
|
||||
// Get monitor, if we can't find it, abort with error
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
if (monitor === null || monitor === undefined) {
|
||||
throw new Error(stringService.getDbFindMonitorById(monitorId));
|
||||
}
|
||||
|
||||
// Get query params
|
||||
let { limit, sortOrder, dateRange, numToDisplay, normalize } = req.query;
|
||||
const sort = sortOrder === "asc" ? 1 : -1;
|
||||
|
||||
// Get Checks for monitor in date range requested
|
||||
const model = CHECK_MODEL_LOOKUP[monitor.type];
|
||||
const dates = getDateRange(dateRange);
|
||||
const { checksAll, checksForDateRange } = await getMonitorChecks(
|
||||
monitorId,
|
||||
model,
|
||||
dates,
|
||||
sort
|
||||
);
|
||||
|
||||
// Build monitor stats
|
||||
const monitorStats = {
|
||||
...monitor.toObject(),
|
||||
uptimeDuration: calculateUptimeDuration(checksAll),
|
||||
lastChecked: getLastChecked(checksAll),
|
||||
latestResponseTime: getLatestResponseTime(checksAll),
|
||||
periodIncidents: getIncidents(checksForDateRange),
|
||||
periodTotalChecks: checksForDateRange.length,
|
||||
checks: processChecksForDisplay(
|
||||
NormalizeData,
|
||||
checksForDateRange,
|
||||
numToDisplay,
|
||||
normalize
|
||||
),
|
||||
};
|
||||
|
||||
if (
|
||||
monitor.type === "http" ||
|
||||
monitor.type === "ping" ||
|
||||
monitor.type === "docker" ||
|
||||
monitor.type === "port"
|
||||
) {
|
||||
// HTTP/PING Specific stats
|
||||
monitorStats.periodAvgResponseTime = getAverageResponseTime(checksForDateRange);
|
||||
monitorStats.periodUptime = getUptimePercentage(checksForDateRange);
|
||||
const groupedChecks = groupChecksByTime(checksForDateRange, dateRange);
|
||||
monitorStats.aggregateData = Object.values(groupedChecks).map(calculateGroupStats);
|
||||
}
|
||||
|
||||
return monitorStats;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getMonitorStatsById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getHardwareDetailsById = async (req) => {
|
||||
try {
|
||||
const { monitorId } = req.params;
|
||||
const { dateRange } = req.query;
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
const dates = getDateRange(dateRange);
|
||||
const formatLookup = {
|
||||
day: "%Y-%m-%dT%H:00:00Z",
|
||||
week: "%Y-%m-%dT%H:00:00Z",
|
||||
month: "%Y-%m-%dT00:00:00Z",
|
||||
};
|
||||
const dateString = formatLookup[dateRange];
|
||||
const hardwareStats = await HardwareCheck.aggregate(
|
||||
buildHardwareDetailsPipeline(monitor, dates, dateString)
|
||||
);
|
||||
|
||||
const monitorStats = {
|
||||
...monitor.toObject(),
|
||||
stats: hardwareStats[0],
|
||||
};
|
||||
return monitorStats;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getHardwareDetailsById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a monitor by ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getMonitorById = async (monitorId) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const monitor = await Monitor.findById(monitorId);
|
||||
if (monitor === null || monitor === undefined) {
|
||||
const error = new Error(stringService.getDbFindMonitorById(monitorId));
|
||||
error.status = 404;
|
||||
throw error;
|
||||
}
|
||||
// Get notifications
|
||||
const notifications = await Notification.find({
|
||||
monitorId: monitorId,
|
||||
});
|
||||
|
||||
// Update monitor with notifications and save
|
||||
monitor.notifications = notifications;
|
||||
await monitor.save();
|
||||
|
||||
return monitor;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getMonitorById";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getMonitorsByTeamId = async (req) => {
|
||||
let { limit, type, page, rowsPerPage, filter, field, order } = req.query;
|
||||
limit = parseInt(limit);
|
||||
page = parseInt(page);
|
||||
rowsPerPage = parseInt(rowsPerPage);
|
||||
if (field === undefined) {
|
||||
field = "name";
|
||||
order = "asc";
|
||||
}
|
||||
// Build the match stage
|
||||
const matchStage = { teamId: ObjectId.createFromHexString(req.params.teamId) };
|
||||
if (type !== undefined) {
|
||||
matchStage.type = Array.isArray(type) ? { $in: type } : type;
|
||||
}
|
||||
|
||||
const skip = page && rowsPerPage ? page * rowsPerPage : 0;
|
||||
const sort = { [field]: order === "asc" ? 1 : -1 };
|
||||
const results = await Monitor.aggregate([
|
||||
{ $match: matchStage },
|
||||
{
|
||||
$facet: {
|
||||
summary: [
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
totalMonitors: { $sum: 1 },
|
||||
upMonitors: {
|
||||
$sum: {
|
||||
$cond: [{ $eq: ["$status", true] }, 1, 0],
|
||||
},
|
||||
},
|
||||
downMonitors: {
|
||||
$sum: {
|
||||
$cond: [{ $eq: ["$status", false] }, 1, 0],
|
||||
},
|
||||
},
|
||||
pausedMonitors: {
|
||||
$sum: {
|
||||
$cond: [{ $eq: ["$isActive", false] }, 1, 0],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
monitors: [
|
||||
{ $sort: sort },
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
name: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
filteredMonitors: [
|
||||
...(filter !== undefined
|
||||
? [
|
||||
{
|
||||
$match: {
|
||||
$or: [
|
||||
{ name: { $regex: filter, $options: "i" } },
|
||||
{ url: { $regex: filter, $options: "i" } },
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{ $sort: sort },
|
||||
{ $skip: skip },
|
||||
...(rowsPerPage ? [{ $limit: rowsPerPage }] : []),
|
||||
...(limit
|
||||
? [
|
||||
{
|
||||
$lookup: {
|
||||
from: "checks",
|
||||
let: { monitorId: "$_id" },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ["$monitorId", "$$monitorId"] },
|
||||
},
|
||||
},
|
||||
{ $sort: { createdAt: -1 } },
|
||||
...(limit ? [{ $limit: limit }] : []),
|
||||
],
|
||||
as: "standardchecks",
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(limit
|
||||
? [
|
||||
{
|
||||
$lookup: {
|
||||
from: "pagespeedchecks",
|
||||
let: { monitorId: "$_id" },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ["$monitorId", "$$monitorId"] },
|
||||
},
|
||||
},
|
||||
{ $sort: { createdAt: -1 } },
|
||||
...(limit ? [{ $limit: limit }] : []),
|
||||
],
|
||||
as: "pagespeedchecks",
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(limit
|
||||
? [
|
||||
{
|
||||
$lookup: {
|
||||
from: "hardwarechecks",
|
||||
let: { monitorId: "$_id" },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ["$monitorId", "$$monitorId"] },
|
||||
},
|
||||
},
|
||||
{ $sort: { createdAt: -1 } },
|
||||
...(limit ? [{ $limit: limit }] : []),
|
||||
],
|
||||
as: "hardwarechecks",
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(limit
|
||||
? [
|
||||
{
|
||||
$lookup: {
|
||||
from: "distributeduptimechecks",
|
||||
let: { monitorId: "$_id" },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ["$monitorId", "$$monitorId"] },
|
||||
},
|
||||
},
|
||||
{ $sort: { createdAt: -1 } },
|
||||
...(limit ? [{ $limit: limit }] : []),
|
||||
],
|
||||
as: "distributeduptimechecks",
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
|
||||
{
|
||||
$addFields: {
|
||||
checks: {
|
||||
$switch: {
|
||||
branches: [
|
||||
{
|
||||
case: { $in: ["$type", ["http", "ping", "docker", "port"]] },
|
||||
then: "$standardchecks",
|
||||
},
|
||||
{
|
||||
case: { $eq: ["$type", "pagespeed"] },
|
||||
then: "$pagespeedchecks",
|
||||
},
|
||||
{
|
||||
case: { $eq: ["$type", "hardware"] },
|
||||
then: "$hardwarechecks",
|
||||
},
|
||||
{
|
||||
case: { $eq: ["$type", "distributed_http"] },
|
||||
then: "$distributeduptimechecks",
|
||||
},
|
||||
],
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
standardchecks: 0,
|
||||
pagespeedchecks: 0,
|
||||
hardwarechecks: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
summary: { $arrayElemAt: ["$summary", 0] },
|
||||
filteredMonitors: 1,
|
||||
monitors: 1,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
let { monitors, filteredMonitors, summary } = results[0];
|
||||
filteredMonitors = filteredMonitors.map((monitor) => {
|
||||
if (!monitor.checks) {
|
||||
return monitor;
|
||||
}
|
||||
monitor.checks = NormalizeData(monitor.checks, 10, 100);
|
||||
return monitor;
|
||||
});
|
||||
return { monitors, filteredMonitors, summary };
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a monitor
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const createMonitor = async (req, res) => {
|
||||
try {
|
||||
const monitor = new Monitor({ ...req.body });
|
||||
// Remove notifications fom monitor as they aren't needed here
|
||||
monitor.notifications = undefined;
|
||||
await monitor.save();
|
||||
return monitor;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createMonitor";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a monitor by ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const deleteMonitor = async (req, res) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
const monitorId = req.params.monitorId;
|
||||
try {
|
||||
const monitor = await Monitor.findByIdAndDelete(monitorId);
|
||||
if (!monitor) {
|
||||
throw new Error(stringService.getDbFindMonitorById(monitorId));
|
||||
}
|
||||
return monitor;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteMonitor";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* DELETE ALL MONITORS (TEMP)
|
||||
*/
|
||||
|
||||
const deleteAllMonitors = async (teamId) => {
|
||||
try {
|
||||
const monitors = await Monitor.find({ teamId });
|
||||
const { deletedCount } = await Monitor.deleteMany({ teamId });
|
||||
|
||||
return { monitors, deletedCount };
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteAllMonitors";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete all monitors associated with a user ID
|
||||
* @async
|
||||
* @param {string} userId - The ID of the user whose monitors are to be deleted.
|
||||
* @returns {Promise} A promise that resolves when the operation is complete.
|
||||
*/
|
||||
const deleteMonitorsByUserId = async (userId) => {
|
||||
try {
|
||||
const result = await Monitor.deleteMany({ userId: userId });
|
||||
return result;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteMonitorsByUserId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Edit a monitor by ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<Monitor>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const editMonitor = async (candidateId, candidateMonitor) => {
|
||||
candidateMonitor.notifications = undefined;
|
||||
|
||||
try {
|
||||
const editedMonitor = await Monitor.findByIdAndUpdate(candidateId, candidateMonitor, {
|
||||
new: true,
|
||||
});
|
||||
return editedMonitor;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "editMonitor";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const addDemoMonitors = async (userId, teamId) => {
|
||||
try {
|
||||
const demoMonitorsToInsert = demoMonitors.map((monitor) => {
|
||||
return {
|
||||
userId,
|
||||
teamId,
|
||||
name: monitor.name,
|
||||
description: monitor.name,
|
||||
type: "http",
|
||||
url: monitor.url,
|
||||
interval: 60000,
|
||||
};
|
||||
});
|
||||
const insertedMonitors = await Monitor.insertMany(demoMonitorsToInsert);
|
||||
return insertedMonitors;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "addDemoMonitors";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
getAllMonitors,
|
||||
getAllMonitorsWithUptimeStats,
|
||||
getMonitorStatsById,
|
||||
getMonitorById,
|
||||
getMonitorsByTeamId,
|
||||
getUptimeDetailsById,
|
||||
getDistributedUptimeDetailsById,
|
||||
createMonitor,
|
||||
deleteMonitor,
|
||||
deleteAllMonitors,
|
||||
deleteMonitorsByUserId,
|
||||
editMonitor,
|
||||
addDemoMonitors,
|
||||
getHardwareDetailsById,
|
||||
};
|
||||
|
||||
// Helper functions
|
||||
export {
|
||||
calculateUptimeDuration,
|
||||
getLastChecked,
|
||||
getLatestResponseTime,
|
||||
getAverageResponseTime,
|
||||
getUptimePercentage,
|
||||
getIncidents,
|
||||
getDateRange,
|
||||
getMonitorChecks,
|
||||
processChecksForDisplay,
|
||||
groupChecksByTime,
|
||||
calculateGroupStats,
|
||||
};
|
||||
|
||||
// limit 25
|
||||
// page 1
|
||||
// rowsPerPage 25
|
||||
// filter undefined
|
||||
// field name
|
||||
// order asc
|
||||
// skip 25
|
||||
// sort { name: 1 }
|
||||
// filteredMonitors []
|
||||
|
||||
// limit 25
|
||||
// page NaN
|
||||
// rowsPerPage 25
|
||||
// filter undefined
|
||||
// field name
|
||||
// order asc
|
||||
// skip 0
|
||||
// sort { name: 1 }
|
||||
@@ -1,721 +0,0 @@
|
||||
const buildUptimeDetailsPipeline = (monitor, dates, dateString) => {
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
monitorId: monitor._id,
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
createdAt: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
$facet: {
|
||||
aggregateData: [
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
lastCheck: {
|
||||
$last: "$$ROOT",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
uptimeStreak: [
|
||||
{
|
||||
$sort: {
|
||||
createdAt: -1,
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
checks: { $push: "$$ROOT" },
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
streak: {
|
||||
$reduce: {
|
||||
input: "$checks",
|
||||
initialValue: { checks: [], foundFalse: false },
|
||||
in: {
|
||||
$cond: [
|
||||
{
|
||||
$and: [
|
||||
{ $not: "$$value.foundFalse" }, // stop reducing if a false check has been found
|
||||
{ $eq: ["$$this.status", true] }, // continue reducing if current check true
|
||||
],
|
||||
},
|
||||
// true case
|
||||
{
|
||||
checks: { $concatArrays: ["$$value.checks", ["$$this"]] },
|
||||
foundFalse: false, // Add the check to the streak
|
||||
},
|
||||
// false case
|
||||
{
|
||||
checks: "$$value.checks",
|
||||
foundFalse: true, // Mark that we found a false
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
// For the response time chart, should return checks for date window
|
||||
// Grouped by: {day: hour}, {week: day}, {month: day}
|
||||
groupedChecks: [
|
||||
{
|
||||
$match: {
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
_id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
// Average response time for the date window
|
||||
groupAvgResponseTime: [
|
||||
{
|
||||
$match: {
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
// All UpChecks for the date window
|
||||
upChecks: [
|
||||
{
|
||||
$match: {
|
||||
status: true,
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
// Up checks grouped by: {day: hour}, {week: day}, {month: day}
|
||||
groupedUpChecks: [
|
||||
{
|
||||
$match: {
|
||||
status: true,
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { _id: 1 },
|
||||
},
|
||||
],
|
||||
// All down checks for the date window
|
||||
downChecks: [
|
||||
{
|
||||
$match: {
|
||||
status: false,
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
// Down checks grouped by: {day: hour}, {week: day}, {month: day} for the date window
|
||||
groupedDownChecks: [
|
||||
{
|
||||
$match: {
|
||||
status: false,
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { _id: 1 },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
uptimeStreak: {
|
||||
$let: {
|
||||
vars: {
|
||||
checks: { $ifNull: [{ $first: "$uptimeStreak.streak.checks" }, []] },
|
||||
},
|
||||
in: {
|
||||
$cond: [
|
||||
{ $eq: [{ $size: "$$checks" }, 0] },
|
||||
0,
|
||||
{
|
||||
$subtract: [new Date(), { $last: "$$checks.createdAt" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
avgResponseTime: {
|
||||
$arrayElemAt: ["$aggregateData.avgResponseTime", 0],
|
||||
},
|
||||
totalChecks: {
|
||||
$arrayElemAt: ["$aggregateData.totalChecks", 0],
|
||||
},
|
||||
latestResponseTime: {
|
||||
$arrayElemAt: ["$aggregateData.lastCheck.responseTime", 0],
|
||||
},
|
||||
timeSinceLastCheck: {
|
||||
$let: {
|
||||
vars: {
|
||||
lastCheck: {
|
||||
$arrayElemAt: ["$aggregateData.lastCheck", 0],
|
||||
},
|
||||
},
|
||||
in: {
|
||||
$cond: [
|
||||
{
|
||||
$ifNull: ["$$lastCheck", false],
|
||||
},
|
||||
{
|
||||
$subtract: [new Date(), "$$lastCheck.createdAt"],
|
||||
},
|
||||
0,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
groupedChecks: "$groupedChecks",
|
||||
groupedAvgResponseTime: {
|
||||
$arrayElemAt: ["$groupAvgResponseTime", 0],
|
||||
},
|
||||
upChecks: {
|
||||
$arrayElemAt: ["$upChecks", 0],
|
||||
},
|
||||
groupedUpChecks: "$groupedUpChecks",
|
||||
downChecks: {
|
||||
$arrayElemAt: ["$downChecks", 0],
|
||||
},
|
||||
groupedDownChecks: "$groupedDownChecks",
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
const buildHardwareDetailsPipeline = (monitor, dates, dateString) => {
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
monitorId: monitor._id,
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
createdAt: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
$facet: {
|
||||
aggregateData: [
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
latestCheck: {
|
||||
$last: "$$ROOT",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
upChecks: [
|
||||
{
|
||||
$match: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
checks: [
|
||||
{
|
||||
$limit: 1,
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
diskCount: {
|
||||
$size: "$disk",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$lookup: {
|
||||
from: "hardwarechecks",
|
||||
let: {
|
||||
diskCount: "$diskCount",
|
||||
},
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [
|
||||
{ $eq: ["$monitorId", monitor._id] },
|
||||
{ $gte: ["$createdAt", dates.start] },
|
||||
{ $lte: ["$createdAt", dates.end] },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
avgCpuUsage: {
|
||||
$avg: "$cpu.usage_percent",
|
||||
},
|
||||
avgMemoryUsage: {
|
||||
$avg: "$memory.usage_percent",
|
||||
},
|
||||
avgTemperatures: {
|
||||
$push: {
|
||||
$ifNull: ["$cpu.temperature", [0]],
|
||||
},
|
||||
},
|
||||
disks: {
|
||||
$push: "$disk",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
avgCpuUsage: 1,
|
||||
avgMemoryUsage: 1,
|
||||
avgTemperature: {
|
||||
$map: {
|
||||
input: {
|
||||
$range: [
|
||||
0,
|
||||
{
|
||||
$size: {
|
||||
// Handle null temperatures array
|
||||
$ifNull: [
|
||||
{ $arrayElemAt: ["$avgTemperatures", 0] },
|
||||
[0], // Default to single-element array if null
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
as: "index",
|
||||
in: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$avgTemperatures",
|
||||
as: "tempArray",
|
||||
in: {
|
||||
$ifNull: [
|
||||
{ $arrayElemAt: ["$$tempArray", "$$index"] },
|
||||
0, // Default to 0 if element is null
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
disks: {
|
||||
$map: {
|
||||
input: {
|
||||
$range: [0, "$$diskCount"],
|
||||
},
|
||||
as: "diskIndex",
|
||||
in: {
|
||||
name: {
|
||||
$concat: [
|
||||
"disk",
|
||||
{
|
||||
$toString: "$$diskIndex",
|
||||
},
|
||||
],
|
||||
},
|
||||
readSpeed: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$disks",
|
||||
as: "diskArray",
|
||||
in: {
|
||||
$arrayElemAt: [
|
||||
"$$diskArray.read_speed_bytes",
|
||||
"$$diskIndex",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
writeSpeed: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$disks",
|
||||
as: "diskArray",
|
||||
in: {
|
||||
$arrayElemAt: [
|
||||
"$$diskArray.write_speed_bytes",
|
||||
"$$diskIndex",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
totalBytes: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$disks",
|
||||
as: "diskArray",
|
||||
in: {
|
||||
$arrayElemAt: [
|
||||
"$$diskArray.total_bytes",
|
||||
"$$diskIndex",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
freeBytes: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$disks",
|
||||
as: "diskArray",
|
||||
in: {
|
||||
$arrayElemAt: ["$$diskArray.free_bytes", "$$diskIndex"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
usagePercent: {
|
||||
$avg: {
|
||||
$map: {
|
||||
input: "$disks",
|
||||
as: "diskArray",
|
||||
in: {
|
||||
$arrayElemAt: [
|
||||
"$$diskArray.usage_percent",
|
||||
"$$diskIndex",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
as: "hourlyStats",
|
||||
},
|
||||
},
|
||||
{
|
||||
$unwind: "$hourlyStats",
|
||||
},
|
||||
{
|
||||
$replaceRoot: {
|
||||
newRoot: "$hourlyStats",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
aggregateData: {
|
||||
$arrayElemAt: ["$aggregateData", 0],
|
||||
},
|
||||
upChecks: {
|
||||
$arrayElemAt: ["$upChecks", 0],
|
||||
},
|
||||
checks: {
|
||||
$sortArray: {
|
||||
input: "$checks",
|
||||
sortBy: { _id: 1 },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
const buildDistributedUptimeDetailsPipeline = (monitor, dates, dateString) => {
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
monitorId: monitor._id,
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
createdAt: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
$facet: {
|
||||
aggregateData: [
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
lastCheck: {
|
||||
$last: "$$ROOT",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
uptBurnt: {
|
||||
$sum: "$uptBurnt",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
// For the response time chart, should return checks for date window
|
||||
// Grouped by: {day: hour}, {week: day}, {month: day}
|
||||
groupedMapChecks: [
|
||||
{
|
||||
$match: {
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
date: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
city: "$city",
|
||||
lat: "$location.lat",
|
||||
lng: "$location.lng",
|
||||
},
|
||||
city: { $first: "$city" }, // Add this line to include city in output
|
||||
lat: { $first: "$location.lat" },
|
||||
lng: { $first: "$location.lng" },
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
"_id.date": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
groupedChecks: [
|
||||
{
|
||||
$match: {
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
$dateToString: {
|
||||
format: dateString,
|
||||
date: "$createdAt",
|
||||
},
|
||||
},
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
totalChecks: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
_id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
// Average response time for the date window
|
||||
groupAvgResponseTime: [
|
||||
{
|
||||
$match: {
|
||||
createdAt: { $gte: dates.start, $lte: dates.end },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: null,
|
||||
avgResponseTime: {
|
||||
$avg: "$responseTime",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
latestChecks: [
|
||||
{
|
||||
$sort: { createdAt: -1 }, // Sort by newest first
|
||||
},
|
||||
{
|
||||
$limit: 5, // Get only the first 5 documents
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
responseTime: 1,
|
||||
city: 1,
|
||||
countryCode: 1,
|
||||
uptBurnt: { $toString: "$uptBurnt" },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
totalUptBurnt: {
|
||||
$toString: {
|
||||
$arrayElemAt: ["$aggregateData.uptBurnt", 0],
|
||||
},
|
||||
},
|
||||
avgResponseTime: {
|
||||
$arrayElemAt: ["$aggregateData.avgResponseTime", 0],
|
||||
},
|
||||
totalChecks: {
|
||||
$arrayElemAt: ["$aggregateData.totalChecks", 0],
|
||||
},
|
||||
latestResponseTime: {
|
||||
$arrayElemAt: ["$aggregateData.lastCheck.responseTime", 0],
|
||||
},
|
||||
timeSinceLastCheck: {
|
||||
$let: {
|
||||
vars: {
|
||||
lastCheck: {
|
||||
$arrayElemAt: ["$aggregateData.lastCheck", 0],
|
||||
},
|
||||
},
|
||||
in: {
|
||||
$cond: [
|
||||
{
|
||||
$ifNull: ["$$lastCheck", false],
|
||||
},
|
||||
{
|
||||
$subtract: [new Date(), "$$lastCheck.createdAt"],
|
||||
},
|
||||
0,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
groupedMapChecks: "$groupedMapChecks",
|
||||
groupedChecks: "$groupedChecks",
|
||||
groupedAvgResponseTime: {
|
||||
$arrayElemAt: ["$groupAvgResponseTime", 0],
|
||||
},
|
||||
latestChecks: "$latestChecks",
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export {
|
||||
buildUptimeDetailsPipeline,
|
||||
buildHardwareDetailsPipeline,
|
||||
buildDistributedUptimeDetailsPipeline,
|
||||
};
|
||||
@@ -1,56 +0,0 @@
|
||||
import Notification from "../../models/Notification.js";
|
||||
const SERVICE_NAME = "notificationModule";
|
||||
/**
|
||||
* Creates a new notification.
|
||||
* @param {Object} notificationData - The data for the new notification.
|
||||
* @param {mongoose.Types.ObjectId} notificationData.monitorId - The ID of the monitor.
|
||||
* @param {string} notificationData.type - The type of the notification (e.g., "email", "sms").
|
||||
* @param {string} [notificationData.address] - The address for the notification (if applicable).
|
||||
* @param {string} [notificationData.phone] - The phone number for the notification (if applicable).
|
||||
* @returns {Promise<Object>} The created notification.
|
||||
* @throws Will throw an error if the notification cannot be created.
|
||||
*/
|
||||
const createNotification = async (notificationData) => {
|
||||
try {
|
||||
const notification = await new Notification({ ...notificationData }).save();
|
||||
return notification;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createNotification";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves notifications by monitor ID.
|
||||
* @param {mongoose.Types.ObjectId} monitorId - The ID of the monitor.
|
||||
* @returns {Promise<Array<Object>>} An array of notifications.
|
||||
* @throws Will throw an error if the notifications cannot be retrieved.
|
||||
*/
|
||||
const getNotificationsByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const notifications = await Notification.find({ monitorId });
|
||||
return notifications;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getNotificationsByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteNotificationsByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const result = await Notification.deleteMany({ monitorId });
|
||||
return result.deletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteNotificationsByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
createNotification,
|
||||
getNotificationsByMonitorId,
|
||||
deleteNotificationsByMonitorId,
|
||||
};
|
||||
@@ -1,47 +0,0 @@
|
||||
import PageSpeedCheck from "../../models/PageSpeedCheck.js";
|
||||
const SERVICE_NAME = "pageSpeedCheckModule";
|
||||
/**
|
||||
* Create a PageSpeed check for a monitor
|
||||
* @async
|
||||
* @param {Object} pageSpeedCheckData
|
||||
* @param {string} pageSpeedCheckData.monitorId
|
||||
* @param {number} pageSpeedCheckData.accessibility
|
||||
* @param {number} pageSpeedCheckData.bestPractices
|
||||
* @param {number} pageSpeedCheckData.seo
|
||||
* @param {number} pageSpeedCheckData.performance
|
||||
* @returns {Promise<PageSpeedCheck>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const createPageSpeedCheck = async (pageSpeedCheckData) => {
|
||||
try {
|
||||
const pageSpeedCheck = await new PageSpeedCheck({
|
||||
...pageSpeedCheckData,
|
||||
}).save();
|
||||
return pageSpeedCheck;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createPageSpeedCheck";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete all PageSpeed checks for a monitor
|
||||
* @async
|
||||
* @param {string} monitorId
|
||||
* @returns {number}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
const deletePageSpeedChecksByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
const result = await PageSpeedCheck.deleteMany({ monitorId });
|
||||
return result.deletedCount;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deletePageSpeedChecksByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { createPageSpeedCheck, deletePageSpeedChecksByMonitorId };
|
||||
@@ -1,88 +0,0 @@
|
||||
import UserModel from "../../models/User.js";
|
||||
import RecoveryToken from "../../models/RecoveryToken.js";
|
||||
import crypto from "crypto";
|
||||
import serviceRegistry from "../../../service/serviceRegistry.js";
|
||||
import StringService from "../../../service/stringService.js";
|
||||
|
||||
const SERVICE_NAME = "recoveryModule";
|
||||
|
||||
/**
|
||||
* Request a recovery token
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<UserModel>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const requestRecoveryToken = async (req, res) => {
|
||||
try {
|
||||
// Delete any existing tokens
|
||||
await RecoveryToken.deleteMany({ email: req.body.email });
|
||||
let recoveryToken = new RecoveryToken({
|
||||
email: req.body.email,
|
||||
token: crypto.randomBytes(32).toString("hex"),
|
||||
});
|
||||
await recoveryToken.save();
|
||||
return recoveryToken;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "requestRecoveryToken";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const validateRecoveryToken = async (req, res) => {
|
||||
const stringService = serviceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const candidateToken = req.body.recoveryToken;
|
||||
const recoveryToken = await RecoveryToken.findOne({
|
||||
token: candidateToken,
|
||||
});
|
||||
if (recoveryToken !== null) {
|
||||
return recoveryToken;
|
||||
} else {
|
||||
throw new Error(stringService.dbTokenNotFound);
|
||||
}
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "validateRecoveryToken";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const resetPassword = async (req, res) => {
|
||||
const stringService = serviceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
const newPassword = req.body.password;
|
||||
|
||||
// Validate token again
|
||||
const recoveryToken = await validateRecoveryToken(req, res);
|
||||
const user = await UserModel.findOne({ email: recoveryToken.email });
|
||||
|
||||
if (user === null) {
|
||||
throw new Error(stringService.dbUserNotFound);
|
||||
}
|
||||
|
||||
const match = await user.comparePassword(newPassword);
|
||||
if (match === true) {
|
||||
throw new Error(stringService.dbResetPasswordBadMatch);
|
||||
}
|
||||
|
||||
user.password = newPassword;
|
||||
await user.save();
|
||||
await RecoveryToken.deleteMany({ email: recoveryToken.email });
|
||||
// Fetch the user again without the password
|
||||
const userWithoutPassword = await UserModel.findOne({
|
||||
email: recoveryToken.email,
|
||||
})
|
||||
.select("-password")
|
||||
.select("-profileImage");
|
||||
return userWithoutPassword;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "resetPassword";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { requestRecoveryToken, validateRecoveryToken, resetPassword };
|
||||
@@ -1,30 +0,0 @@
|
||||
import AppSettings from "../../models/AppSettings.js";
|
||||
const SERVICE_NAME = "SettingsModule";
|
||||
|
||||
const getAppSettings = async () => {
|
||||
try {
|
||||
const settings = AppSettings.findOne();
|
||||
return settings;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getSettings";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const updateAppSettings = async (newSettings) => {
|
||||
try {
|
||||
const settings = await AppSettings.findOneAndUpdate(
|
||||
{},
|
||||
{ $set: newSettings },
|
||||
{ new: true }
|
||||
);
|
||||
return settings;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "updateAppSettings";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export { getAppSettings, updateAppSettings };
|
||||
@@ -1,211 +0,0 @@
|
||||
import StatusPage from "../../models/StatusPage.js";
|
||||
import { NormalizeData } from "../../../utils/dataUtils.js";
|
||||
import ServiceRegistry from "../../../service/serviceRegistry.js";
|
||||
import StringService from "../../../service/stringService.js";
|
||||
|
||||
const SERVICE_NAME = "statusPageModule";
|
||||
|
||||
const createStatusPage = async (statusPageData, image) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
try {
|
||||
const statusPage = new StatusPage({ ...statusPageData });
|
||||
if (image) {
|
||||
statusPage.logo = {
|
||||
data: image.buffer,
|
||||
contentType: image.mimetype,
|
||||
};
|
||||
}
|
||||
await statusPage.save();
|
||||
return statusPage;
|
||||
} catch (error) {
|
||||
if (error?.code === 11000) {
|
||||
// Handle duplicate URL errors
|
||||
error.status = 400;
|
||||
error.message = stringService.statusPageUrlNotUnique;
|
||||
}
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "createStatusPage";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const updateStatusPage = async (statusPageData, image) => {
|
||||
try {
|
||||
if (image) {
|
||||
statusPageData.logo = {
|
||||
data: image.buffer,
|
||||
contentType: image.mimetype,
|
||||
};
|
||||
}
|
||||
const statusPage = await StatusPage.findOneAndUpdate(
|
||||
{ url: statusPageData.url },
|
||||
statusPageData,
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
|
||||
return statusPage;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "updateStatusPage";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusPageByUrl = async (url, type) => {
|
||||
try {
|
||||
if (type === "distributed") {
|
||||
const statusPage = await StatusPage.aggregate([{ $match: { url } }]);
|
||||
return statusPage[0];
|
||||
} else {
|
||||
return getStatusPage(url);
|
||||
}
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getStatusPageByUrl";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusPagesByTeamId = async (teamId) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
try {
|
||||
const statusPages = await StatusPage.find({ teamId });
|
||||
return statusPages;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getStatusPagesByTeamId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusPage = async (url) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
try {
|
||||
const statusPageQuery = await StatusPage.aggregate([
|
||||
{ $match: { url: url } },
|
||||
{
|
||||
$set: {
|
||||
originalMonitors: "$monitors",
|
||||
},
|
||||
},
|
||||
{
|
||||
$lookup: {
|
||||
from: "monitors",
|
||||
localField: "monitors",
|
||||
foreignField: "_id",
|
||||
as: "monitors",
|
||||
},
|
||||
},
|
||||
{
|
||||
$unwind: "$monitors",
|
||||
},
|
||||
{
|
||||
$lookup: {
|
||||
from: "checks",
|
||||
let: { monitorId: "$monitors._id" },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ["$monitorId", "$$monitorId"] },
|
||||
},
|
||||
},
|
||||
{ $sort: { createdAt: -1 } },
|
||||
{ $limit: 25 },
|
||||
],
|
||||
as: "monitors.checks",
|
||||
},
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
"monitors.orderIndex": {
|
||||
$indexOfArray: ["$originalMonitors", "$monitors._id"],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$_id",
|
||||
statusPage: { $first: "$$ROOT" },
|
||||
monitors: { $push: "$monitors" },
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
statusPage: {
|
||||
_id: 1,
|
||||
color: 1,
|
||||
companyName: 1,
|
||||
isPublished: 1,
|
||||
logo: 1,
|
||||
originalMonitors: 1,
|
||||
showCharts: 1,
|
||||
showUptimePercentage: 1,
|
||||
timezone: 1,
|
||||
url: 1,
|
||||
},
|
||||
monitors: {
|
||||
$sortArray: {
|
||||
input: "$monitors",
|
||||
sortBy: { orderIndex: 1 },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
if (!statusPageQuery.length) {
|
||||
const error = new Error(stringService.statusPageNotFound);
|
||||
error.status = 404;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { statusPage, monitors } = statusPageQuery[0];
|
||||
|
||||
const normalizedMonitors = monitors.map((monitor) => {
|
||||
return {
|
||||
...monitor,
|
||||
checks: NormalizeData(monitor.checks, 10, 100),
|
||||
};
|
||||
});
|
||||
|
||||
return { statusPage, monitors: normalizedMonitors };
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getStatusPageByUrl";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteStatusPage = async (url) => {
|
||||
try {
|
||||
await StatusPage.deleteOne({ url });
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteStatusPage";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteStatusPagesByMonitorId = async (monitorId) => {
|
||||
try {
|
||||
await StatusPage.deleteMany({ monitors: { $in: [monitorId] } });
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteStatusPageByMonitorId";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
createStatusPage,
|
||||
updateStatusPage,
|
||||
getStatusPagesByTeamId,
|
||||
getStatusPage,
|
||||
getStatusPageByUrl,
|
||||
deleteStatusPage,
|
||||
deleteStatusPagesByMonitorId,
|
||||
};
|
||||
@@ -1,232 +0,0 @@
|
||||
import UserModel from "../../models/User.js";
|
||||
import TeamModel from "../../models/Team.js";
|
||||
import { GenerateAvatarImage } from "../../../utils/imageProcessing.js";
|
||||
|
||||
const DUPLICATE_KEY_CODE = 11000; // MongoDB error code for duplicate key
|
||||
import { ParseBoolean } from "../../../utils/utils.js";
|
||||
import ServiceRegistry from "../../../service/serviceRegistry.js";
|
||||
import StringService from "../../../service/stringService.js";
|
||||
const SERVICE_NAME = "userModule";
|
||||
|
||||
/**
|
||||
* Insert a User
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<UserModel>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const insertUser = async (
|
||||
userData,
|
||||
imageFile,
|
||||
generateAvatarImage = GenerateAvatarImage
|
||||
) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
try {
|
||||
if (imageFile) {
|
||||
// 1. Save the full size image
|
||||
userData.profileImage = {
|
||||
data: imageFile.buffer,
|
||||
contentType: imageFile.mimetype,
|
||||
};
|
||||
|
||||
// 2. Get the avatar sized image
|
||||
const avatar = await generateAvatarImage(imageFile);
|
||||
userData.avatarImage = avatar;
|
||||
}
|
||||
|
||||
// Handle creating team if superadmin
|
||||
if (userData.role.includes("superadmin")) {
|
||||
const team = new TeamModel({
|
||||
email: userData.email,
|
||||
});
|
||||
userData.teamId = team._id;
|
||||
userData.checkTTL = 60 * 60 * 24 * 30;
|
||||
await team.save();
|
||||
}
|
||||
|
||||
const newUser = new UserModel(userData);
|
||||
await newUser.save();
|
||||
return await UserModel.findOne({ _id: newUser._id })
|
||||
.select("-password")
|
||||
.select("-profileImage"); // .select() doesn't work with create, need to save then find
|
||||
} catch (error) {
|
||||
if (error.code === DUPLICATE_KEY_CODE) {
|
||||
error.message = stringService.dbUserExists;
|
||||
}
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "insertUser";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get User by Email
|
||||
* Gets a user by Email. Not sure if we'll ever need this except for login.
|
||||
* If not needed except for login, we can move password comparison here
|
||||
* Throws error if user not found
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<UserModel>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const getUserByEmail = async (email) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
try {
|
||||
// Need the password to be able to compare, removed .select()
|
||||
// We can strip the hash before returning the user
|
||||
const user = await UserModel.findOne({ email: email }).select("-profileImage");
|
||||
if (!user) {
|
||||
throw new Error(stringService.dbUserNotFound);
|
||||
}
|
||||
return user;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getUserByEmail";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update a user by ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<UserModel>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
|
||||
const updateUser = async (
|
||||
req,
|
||||
res,
|
||||
parseBoolean = ParseBoolean,
|
||||
generateAvatarImage = GenerateAvatarImage
|
||||
) => {
|
||||
const candidateUserId = req.params.userId;
|
||||
try {
|
||||
const candidateUser = { ...req.body };
|
||||
// ******************************************
|
||||
// Handle profile image
|
||||
// ******************************************
|
||||
|
||||
if (parseBoolean(candidateUser.deleteProfileImage) === true) {
|
||||
candidateUser.profileImage = null;
|
||||
candidateUser.avatarImage = null;
|
||||
} else if (req.file) {
|
||||
// 1. Save the full size image
|
||||
candidateUser.profileImage = {
|
||||
data: req.file.buffer,
|
||||
contentType: req.file.mimetype,
|
||||
};
|
||||
|
||||
// 2. Get the avatar sized image
|
||||
const avatar = await generateAvatarImage(req.file);
|
||||
candidateUser.avatarImage = avatar;
|
||||
}
|
||||
|
||||
// ******************************************
|
||||
// End handling profile image
|
||||
// ******************************************
|
||||
|
||||
const updatedUser = await UserModel.findByIdAndUpdate(
|
||||
candidateUserId,
|
||||
candidateUser,
|
||||
{ new: true } // Returns updated user instead of pre-update user
|
||||
)
|
||||
.select("-password")
|
||||
.select("-profileImage");
|
||||
return updatedUser;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "updateUser";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a user by ID
|
||||
* @async
|
||||
* @param {Express.Request} req
|
||||
* @param {Express.Response} res
|
||||
* @returns {Promise<UserModel>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const deleteUser = async (userId) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
|
||||
try {
|
||||
const deletedUser = await UserModel.findByIdAndDelete(userId);
|
||||
if (!deletedUser) {
|
||||
throw new Error(stringService.dbUserNotFound);
|
||||
}
|
||||
return deletedUser;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteUser";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a user by ID
|
||||
* @async
|
||||
* @param {string} teamId
|
||||
* @returns {void}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const deleteTeam = async (teamId) => {
|
||||
try {
|
||||
await TeamModel.findByIdAndDelete(teamId);
|
||||
return true;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteTeam";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteAllOtherUsers = async () => {
|
||||
try {
|
||||
await UserModel.deleteMany({ role: { $ne: "superadmin" } });
|
||||
return true;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "deleteAllOtherUsers";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getAllUsers = async (req, res) => {
|
||||
try {
|
||||
const users = await UserModel.find().select("-password").select("-profileImage");
|
||||
return users;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "getAllUsers";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const logoutUser = async (userId) => {
|
||||
try {
|
||||
await UserModel.updateOne({ _id: userId }, { $unset: { authToken: null } });
|
||||
return true;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "logoutUser";
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
insertUser,
|
||||
getUserByEmail,
|
||||
updateUser,
|
||||
deleteUser,
|
||||
deleteTeam,
|
||||
deleteAllOtherUsers,
|
||||
getAllUsers,
|
||||
logoutUser,
|
||||
};
|
||||
@@ -1,57 +0,0 @@
|
||||
import Monitor from "../../models/Monitor.js";
|
||||
import Check from "../../models/Check.js";
|
||||
|
||||
const generateRandomUrl = () => {
|
||||
const domains = ["example.com", "test.org", "demo.net", "sample.io", "mock.dev"];
|
||||
const paths = ["api", "status", "health", "ping", "check"];
|
||||
return `https://${domains[Math.floor(Math.random() * domains.length)]}/${paths[Math.floor(Math.random() * paths.length)]}`;
|
||||
};
|
||||
|
||||
const generateChecks = (monitorId, teamId, count) => {
|
||||
const checks = [];
|
||||
const endTime = new Date(Date.now() - 10 * 60 * 1000); // 10 minutes ago
|
||||
const startTime = new Date(endTime - count * 60 * 1000); // count minutes before endTime
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const timestamp = new Date(startTime.getTime() + i * 60 * 1000);
|
||||
const status = Math.random() > 0.05; // 95% chance of being up
|
||||
|
||||
checks.push({
|
||||
monitorId,
|
||||
teamId,
|
||||
status,
|
||||
responseTime: Math.floor(Math.random() * 1000), // Random response time between 0-1000ms
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
return checks;
|
||||
};
|
||||
|
||||
const seedDb = async (userId, teamId) => {
|
||||
try {
|
||||
console.log("Deleting all monitors and checks");
|
||||
await Monitor.deleteMany({});
|
||||
await Check.deleteMany({});
|
||||
console.log("Adding monitors");
|
||||
for (let i = 0; i < 300; i++) {
|
||||
const monitor = await Monitor.create({
|
||||
name: `Monitor ${i}`,
|
||||
url: generateRandomUrl(),
|
||||
type: "http",
|
||||
userId,
|
||||
teamId,
|
||||
interval: 60000,
|
||||
active: false,
|
||||
});
|
||||
console.log(`Adding monitor and checks for monitor ${i}`);
|
||||
const checks = generateChecks(monitor._id, teamId, 10000);
|
||||
await Check.insertMany(checks);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
export default seedDb;
|
||||
@@ -1,30 +0,0 @@
|
||||
import globals from "globals";
|
||||
import pluginJs from "@eslint/js";
|
||||
import mochaPlugin from "eslint-plugin-mocha";
|
||||
|
||||
/*
|
||||
Please do not forget to look at the latest eslint configurations and rules.
|
||||
ESlint v9 configuration is different than v8.
|
||||
"https://eslint.org/docs/latest/use/configure/"
|
||||
*/
|
||||
|
||||
/** @type {import('eslint').Linter.Config[]} */
|
||||
export default [
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node, // Add Node.js globals
|
||||
...globals.chai, // Add Chai globals
|
||||
},
|
||||
ecmaVersion: 2023,
|
||||
sourceType: "module",
|
||||
},
|
||||
},
|
||||
pluginJs.configs.recommended, // Core JS rules
|
||||
mochaPlugin.configs.flat.recommended, // Mocha rules
|
||||
{
|
||||
rules: {
|
||||
"mocha/max-top-level-suites": "warn", // Warn if there are too many top-level suites instead of failing
|
||||
},
|
||||
},
|
||||
];
|
||||
320
Server/index.js
320
Server/index.js
@@ -1,320 +0,0 @@
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import swaggerUi from "swagger-ui-express";
|
||||
|
||||
import express from "express";
|
||||
import helmet from "helmet";
|
||||
import cors from "cors";
|
||||
import logger from "./utils/logger.js";
|
||||
import { verifyJWT } from "./middleware/verifyJWT.js";
|
||||
import { handleErrors } from "./middleware/handleErrors.js";
|
||||
import { responseHandler } from "./middleware/responseHandler.js";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
import AuthRoutes from "./routes/authRoute.js";
|
||||
import AuthController from "./controllers/authController.js";
|
||||
|
||||
import InviteRoutes from "./routes/inviteRoute.js";
|
||||
import InviteController from "./controllers/inviteController.js";
|
||||
|
||||
import MonitorRoutes from "./routes/monitorRoute.js";
|
||||
import MonitorController from "./controllers/monitorController.js";
|
||||
|
||||
import CheckRoutes from "./routes/checkRoute.js";
|
||||
import CheckController from "./controllers/checkController.js";
|
||||
|
||||
import MaintenanceWindowRoutes from "./routes/maintenanceWindowRoute.js";
|
||||
import MaintenanceWindowController from "./controllers/maintenanceWindowController.js";
|
||||
|
||||
import SettingsRoutes from "./routes/settingsRoute.js";
|
||||
import SettingsController from "./controllers/settingsController.js";
|
||||
|
||||
import StatusPageRoutes from "./routes/statusPageRoute.js";
|
||||
import StatusPageController from "./controllers/statusPageController.js";
|
||||
|
||||
import QueueRoutes from "./routes/queueRoute.js";
|
||||
import QueueController from "./controllers/queueController.js";
|
||||
|
||||
import DistributedUptimeRoutes from "./routes/distributedUptimeRoute.js";
|
||||
import DistributedUptimeController from "./controllers/distributedUptimeController.js";
|
||||
|
||||
import NotificationRoutes from "./routes/notificationRoute.js";
|
||||
|
||||
import NotificationController from "./controllers/notificationController.js";
|
||||
|
||||
//JobQueue service and dependencies
|
||||
import JobQueue from "./service/jobQueue.js";
|
||||
import { Queue, Worker } from "bullmq";
|
||||
|
||||
//Network service and dependencies
|
||||
import NetworkService from "./service/networkService.js";
|
||||
import axios from "axios";
|
||||
import ping from "ping";
|
||||
import http from "http";
|
||||
import Docker from "dockerode";
|
||||
import net from "net";
|
||||
// Email service and dependencies
|
||||
import EmailService from "./service/emailService.js";
|
||||
import nodemailer from "nodemailer";
|
||||
import pkg from "handlebars";
|
||||
const { compile } = pkg;
|
||||
import mjml2html from "mjml";
|
||||
|
||||
// Settings Service and dependencies
|
||||
import SettingsService from "./service/settingsService.js";
|
||||
import AppSettings from "./db/models/AppSettings.js";
|
||||
|
||||
// Status Service and dependencies
|
||||
import StatusService from "./service/statusService.js";
|
||||
|
||||
// Notification Service and dependencies
|
||||
import NotificationService from "./service/notificationService.js";
|
||||
|
||||
// Service Registry
|
||||
import ServiceRegistry from "./service/serviceRegistry.js";
|
||||
|
||||
import MongoDB from "./db/mongo/MongoDB.js";
|
||||
|
||||
import IORedis from "ioredis";
|
||||
|
||||
import TranslationService from "./service/translationService.js";
|
||||
import languageMiddleware from "./middleware/languageMiddleware.js";
|
||||
import StringService from "./service/stringService.js";
|
||||
|
||||
const SERVICE_NAME = "Server";
|
||||
const SHUTDOWN_TIMEOUT = 1000;
|
||||
let isShuttingDown = false;
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const openApiSpec = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, "openapi.json"), "utf8")
|
||||
);
|
||||
|
||||
let server;
|
||||
|
||||
const PORT = 5000;
|
||||
|
||||
const shutdown = async () => {
|
||||
if (isShuttingDown) {
|
||||
return;
|
||||
}
|
||||
isShuttingDown = true;
|
||||
logger.info({ message: "Attempting graceful shutdown" });
|
||||
setTimeout(async () => {
|
||||
logger.error({
|
||||
message: "Could not shut down in time, forcing shutdown",
|
||||
service: SERVICE_NAME,
|
||||
method: "shutdown",
|
||||
});
|
||||
// flush Redis
|
||||
const settings =
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME).getSettings() || {};
|
||||
|
||||
const { redisHost = "127.0.0.1", redisPort = 6379 } = settings;
|
||||
const redis = new IORedis({
|
||||
host: redisHost,
|
||||
port: redisPort,
|
||||
});
|
||||
logger.info({ message: "Flushing Redis" });
|
||||
await redis.flushall();
|
||||
logger.info({ message: "Redis flushed" });
|
||||
process.exit(1);
|
||||
}, SHUTDOWN_TIMEOUT);
|
||||
try {
|
||||
server.close();
|
||||
await ServiceRegistry.get(JobQueue.SERVICE_NAME).obliterate();
|
||||
await ServiceRegistry.get(MongoDB.SERVICE_NAME).disconnect();
|
||||
logger.info({ message: "Graceful shutdown complete" });
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "shutdown",
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
};
|
||||
// Need to wrap server setup in a function to handle async nature of JobQueue
|
||||
const startApp = async () => {
|
||||
const app = express();
|
||||
|
||||
// Create and Register Primary services
|
||||
const translationService = new TranslationService(logger);
|
||||
const stringService = new StringService(translationService);
|
||||
ServiceRegistry.register(StringService.SERVICE_NAME, stringService);
|
||||
|
||||
// Create DB
|
||||
const db = new MongoDB();
|
||||
await db.connect();
|
||||
|
||||
// Create services
|
||||
const networkService = new NetworkService(
|
||||
axios,
|
||||
ping,
|
||||
logger,
|
||||
http,
|
||||
Docker,
|
||||
net,
|
||||
stringService
|
||||
);
|
||||
const settingsService = new SettingsService(AppSettings);
|
||||
await settingsService.loadSettings();
|
||||
const emailService = new EmailService(
|
||||
settingsService,
|
||||
fs,
|
||||
path,
|
||||
compile,
|
||||
mjml2html,
|
||||
nodemailer,
|
||||
logger
|
||||
);
|
||||
const statusService = new StatusService(db, logger);
|
||||
const notificationService = new NotificationService(emailService, db, logger, networkService, stringService);
|
||||
|
||||
const jobQueue = new JobQueue(
|
||||
db,
|
||||
statusService,
|
||||
networkService,
|
||||
notificationService,
|
||||
settingsService,
|
||||
stringService,
|
||||
logger,
|
||||
Queue,
|
||||
Worker
|
||||
);
|
||||
|
||||
// Register services
|
||||
ServiceRegistry.register(JobQueue.SERVICE_NAME, jobQueue);
|
||||
ServiceRegistry.register(MongoDB.SERVICE_NAME, db);
|
||||
ServiceRegistry.register(SettingsService.SERVICE_NAME, settingsService);
|
||||
ServiceRegistry.register(EmailService.SERVICE_NAME, emailService);
|
||||
ServiceRegistry.register(NetworkService.SERVICE_NAME, networkService);
|
||||
ServiceRegistry.register(StatusService.SERVICE_NAME, statusService);
|
||||
ServiceRegistry.register(NotificationService.SERVICE_NAME, notificationService);
|
||||
ServiceRegistry.register(TranslationService.SERVICE_NAME, translationService);
|
||||
|
||||
await translationService.initialize();
|
||||
|
||||
server = app.listen(PORT, () => {
|
||||
logger.info({ message: `server started on port:${PORT}` });
|
||||
});
|
||||
|
||||
process.on("SIGUSR2", shutdown);
|
||||
process.on("SIGINT", shutdown);
|
||||
process.on("SIGTERM", shutdown);
|
||||
|
||||
//Create controllers
|
||||
const authController = new AuthController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(EmailService.SERVICE_NAME),
|
||||
ServiceRegistry.get(JobQueue.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const monitorController = new MonitorController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(JobQueue.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const settingsController = new SettingsController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const checkController = new CheckController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const inviteController = new InviteController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(EmailService.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const maintenanceWindowController = new MaintenanceWindowController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(SettingsService.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const queueController = new QueueController(
|
||||
ServiceRegistry.get(JobQueue.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const statusPageController = new StatusPageController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const notificationController = new NotificationController(
|
||||
ServiceRegistry.get(NotificationService.SERVICE_NAME),
|
||||
ServiceRegistry.get(StringService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
const distributedUptimeController = new DistributedUptimeController(
|
||||
ServiceRegistry.get(MongoDB.SERVICE_NAME),
|
||||
http,
|
||||
ServiceRegistry.get(StatusService.SERVICE_NAME)
|
||||
);
|
||||
|
||||
//Create routes
|
||||
const authRoutes = new AuthRoutes(authController);
|
||||
const monitorRoutes = new MonitorRoutes(monitorController);
|
||||
const settingsRoutes = new SettingsRoutes(settingsController);
|
||||
const checkRoutes = new CheckRoutes(checkController);
|
||||
const inviteRoutes = new InviteRoutes(inviteController);
|
||||
const maintenanceWindowRoutes = new MaintenanceWindowRoutes(
|
||||
maintenanceWindowController
|
||||
);
|
||||
const queueRoutes = new QueueRoutes(queueController);
|
||||
const statusPageRoutes = new StatusPageRoutes(statusPageController);
|
||||
const distributedUptimeRoutes = new DistributedUptimeRoutes(
|
||||
distributedUptimeController
|
||||
);
|
||||
|
||||
const notificationRoutes = new NotificationRoutes(notificationController);
|
||||
|
||||
// Init job queue
|
||||
await jobQueue.initJobQueue();
|
||||
// Middleware
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(helmet());
|
||||
app.use(languageMiddleware(stringService, translationService));
|
||||
// Swagger UI
|
||||
app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(openApiSpec));
|
||||
|
||||
//routes
|
||||
app.use(responseHandler);
|
||||
|
||||
app.use("/api/v1/auth", authRoutes.getRouter());
|
||||
app.use("/api/v1/settings", verifyJWT, settingsRoutes.getRouter());
|
||||
app.use("/api/v1/invite", inviteRoutes.getRouter());
|
||||
app.use("/api/v1/monitors", verifyJWT, monitorRoutes.getRouter());
|
||||
app.use("/api/v1/checks", verifyJWT, checkRoutes.getRouter());
|
||||
app.use("/api/v1/maintenance-window", verifyJWT, maintenanceWindowRoutes.getRouter());
|
||||
app.use("/api/v1/queue", verifyJWT, queueRoutes.getRouter());
|
||||
app.use("/api/v1/distributed-uptime", distributedUptimeRoutes.getRouter());
|
||||
app.use("/api/v1/status-page", statusPageRoutes.getRouter());
|
||||
app.use("/api/v1/notifications", verifyJWT, notificationRoutes.getRouter());
|
||||
app.use(handleErrors);
|
||||
};
|
||||
|
||||
startApp().catch((error) => {
|
||||
logger.error({
|
||||
message: error.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "startApp",
|
||||
stack: error.stack,
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,154 +0,0 @@
|
||||
{
|
||||
"dontHaveAccount": "Don't have account",
|
||||
"email": "E-mail",
|
||||
"forgotPassword": "Forgot Password",
|
||||
"password": "password",
|
||||
"signUp": "Sign up",
|
||||
"submit": "Submit",
|
||||
"title": "Title",
|
||||
"continue": "Continue",
|
||||
"enterEmail": "Enter your email",
|
||||
"authLoginTitle": "Log In",
|
||||
"authLoginEnterPassword": "Enter your password",
|
||||
"commonPassword": "Password",
|
||||
"commonBack": "Back",
|
||||
"authForgotPasswordTitle": "Forgot password?",
|
||||
"authForgotPasswordResetPassword": "Reset password",
|
||||
"createPassword": "Create your password",
|
||||
"createAPassword": "Create a password",
|
||||
"authRegisterAlreadyHaveAccount": "Already have an account?",
|
||||
"commonAppName": "BlueWave Uptime",
|
||||
"authLoginEnterEmail": "Enter your email",
|
||||
"authRegisterTitle": "Create an account",
|
||||
"authRegisterStepOneTitle": "Create your account",
|
||||
"authRegisterStepOneDescription": "Enter your details to get started",
|
||||
"authRegisterStepTwoTitle": "Set up your profile",
|
||||
"authRegisterStepTwoDescription": "Tell us more about yourself",
|
||||
"authRegisterStepThreeTitle": "Almost done!",
|
||||
"authRegisterStepThreeDescription": "Review your information",
|
||||
"authForgotPasswordDescription": "No worries, we'll send you reset instructions.",
|
||||
"authForgotPasswordSendInstructions": "Send instructions",
|
||||
"authForgotPasswordBackTo": "Back to",
|
||||
"authCheckEmailTitle": "Check your email",
|
||||
"authCheckEmailDescription": "We sent a password reset link to {{email}}",
|
||||
"authCheckEmailResendEmail": "Resend email",
|
||||
"authCheckEmailBackTo": "Back to",
|
||||
"goBackTo": "Go back to",
|
||||
"authCheckEmailDidntReceiveEmail": "Didn't receive the email?",
|
||||
"authCheckEmailClickToResend": "Click to resend",
|
||||
"authSetNewPasswordTitle": "Set new password",
|
||||
"authSetNewPasswordDescription": "Your new password must be different from previously used passwords.",
|
||||
"authSetNewPasswordNewPassword": "New password",
|
||||
"authSetNewPasswordConfirmPassword": "Confirm password",
|
||||
"confirmPassword": "Confirm your password",
|
||||
"authSetNewPasswordResetPassword": "Reset password",
|
||||
"authSetNewPasswordBackTo": "Back to",
|
||||
"authPasswordMustBeAtLeast": "Must be at least",
|
||||
"authPasswordCharactersLong": "8 characters long",
|
||||
"authPasswordMustContainAtLeast": "Must contain at least",
|
||||
"authPasswordSpecialCharacter": "one special character",
|
||||
"authPasswordOneNumber": "one number",
|
||||
"authPasswordUpperCharacter": "one upper character",
|
||||
"authPasswordLowerCharacter": "one lower character",
|
||||
"authPasswordConfirmAndPassword": "Confirm password and password",
|
||||
"authPasswordMustMatch": "must match",
|
||||
"friendlyError": "Something went wrong...",
|
||||
"unknownError": "An unknown error occurred",
|
||||
"unauthorized": "Unauthorized access",
|
||||
"authAdminExists": "Admin already exists",
|
||||
"authInviteNotFound": "Invite not found",
|
||||
"unknownService": "Unknown service",
|
||||
"noAuthToken": "No auth token provided",
|
||||
"invalidAuthToken": "Invalid auth token",
|
||||
"expiredAuthToken": "Token expired",
|
||||
"noRefreshToken": "No refresh token provided",
|
||||
"invalidRefreshToken": "Invalid refresh token",
|
||||
"expiredRefreshToken": "Refresh token expired",
|
||||
"requestNewAccessToken": "Request new access token",
|
||||
"invalidPayload": "Invalid payload",
|
||||
"verifyOwnerNotFound": "Document not found",
|
||||
"verifyOwnerUnauthorized": "Unauthorized access",
|
||||
"insufficientPermissions": "Insufficient permissions",
|
||||
"dbUserExists": "User already exists",
|
||||
"dbUserNotFound": "User not found",
|
||||
"dbTokenNotFound": "Token not found",
|
||||
"dbResetPasswordBadMatch": "New password must be different from old password",
|
||||
"dbFindMonitorById": "Monitor with id ${monitorId} not found",
|
||||
"dbDeleteChecks": "No checks found for monitor with id ${monitorId}",
|
||||
"authIncorrectPassword": "Incorrect password",
|
||||
"authUnauthorized": "Unauthorized access",
|
||||
"monitorGetById": "Monitor not found",
|
||||
"monitorGetByUserId": "No monitors found for user",
|
||||
"jobQueueWorkerClose": "Error closing worker",
|
||||
"jobQueueDeleteJob": "Job not found in queue",
|
||||
"jobQueueObliterate": "Error obliterating queue",
|
||||
"pingCannotResolve": "No response",
|
||||
"statusPageNotFound": "Status page not found",
|
||||
"statusPageUrlNotUnique": "Status page url must be unique",
|
||||
"dockerFail": "Failed to fetch Docker container information",
|
||||
"dockerNotFound": "Docker container not found",
|
||||
"portFail": "Failed to connect to port",
|
||||
"alertCreate": "Alert created successfully",
|
||||
"alertGetByUser": "Got alerts successfully",
|
||||
"alertGetByMonitor": "Got alerts by Monitor successfully",
|
||||
"alertGetById": "Got alert by Id successfully",
|
||||
"alertEdit": "Alert edited successfully",
|
||||
"alertDelete": "Alert deleted successfully",
|
||||
"authCreateUser": "User created successfully",
|
||||
"authLoginUser": "User logged in successfully",
|
||||
"authLogoutUser": "User logged out successfully",
|
||||
"authUpdateUser": "User updated successfully",
|
||||
"authCreateRecoveryToken": "Recovery token created successfully",
|
||||
"authVerifyRecoveryToken": "Recovery token verified successfully",
|
||||
"authResetPassword": "Password reset successfully",
|
||||
"authAdminCheck": "Admin check completed successfully",
|
||||
"authDeleteUser": "User deleted successfully",
|
||||
"authTokenRefreshed": "Auth token is refreshed",
|
||||
"authGetAllUsers": "Got all users successfully",
|
||||
"inviteIssued": "Invite sent successfully",
|
||||
"inviteVerified": "Invite verified successfully",
|
||||
"checkCreate": "Check created successfully",
|
||||
"checkGet": "Got checks successfully",
|
||||
"checkDelete": "Checks deleted successfully",
|
||||
"checkUpdateTtl": "Checks TTL updated successfully",
|
||||
"monitorGetAll": "Got all monitors successfully",
|
||||
"monitorStatsById": "Got monitor stats by Id successfully",
|
||||
"monitorGetByIdSuccess": "Got monitor by Id successfully",
|
||||
"monitorGetByTeamId": "Got monitors by Team Id successfully",
|
||||
"monitorGetByUserIdSuccess": "Got monitor for ${userId} successfully",
|
||||
"monitorCreate": "Monitor created successfully",
|
||||
"monitorDelete": "Monitor deleted successfully",
|
||||
"monitorEdit": "Monitor edited successfully",
|
||||
"monitorCertificate": "Got monitor certificate successfully",
|
||||
"monitorDemoAdded": "Successfully added demo monitors",
|
||||
"queueGetMetrics": "Got metrics successfully",
|
||||
"queueAddJob": "Job added successfully",
|
||||
"queueObliterate": "Queue obliterated",
|
||||
"jobQueueDeleteJobSuccess": "Job removed successfully",
|
||||
"jobQueuePauseJob": "Job paused successfully",
|
||||
"jobQueueResumeJob": "Job resumed successfully",
|
||||
"maintenanceWindowGetById": "Got Maintenance Window by Id successfully",
|
||||
"maintenanceWindowCreate": "Maintenance Window created successfully",
|
||||
"maintenanceWindowGetByTeam": "Got Maintenance Windows by Team successfully",
|
||||
"maintenanceWindowDelete": "Maintenance Window deleted successfully",
|
||||
"maintenanceWindowEdit": "Maintenance Window edited successfully",
|
||||
"pingSuccess": "Success",
|
||||
"getAppSettings": "Got app settings successfully",
|
||||
"updateAppSettings": "Updated app settings successfully",
|
||||
"statusPageByUrl": "Got status page by url successfully",
|
||||
"statusPageCreate": "Status page created successfully",
|
||||
"newTermsAdded": "New terms added to POEditor",
|
||||
"dockerSuccess": "Docker container status fetched successfully",
|
||||
"portSuccess": "Port connected successfully",
|
||||
"monitorPause": "Monitor paused successfully",
|
||||
"monitorResume": "Monitor resumed successfully",
|
||||
"statusPageDelete": "Status page deleted successfully",
|
||||
"statusPageUpdate": "Status page updated successfully",
|
||||
"statusPageByTeamId": "Got status pages by team id successfully",
|
||||
"httpNetworkError": "Network error",
|
||||
"httpNotJson": "Response data is not json",
|
||||
"httpJsonPathError": "Failed to parse json data",
|
||||
"httpEmptyResult": "Result is empty",
|
||||
"httpMatchSuccess": "Response data match successfully",
|
||||
"httpMatchFail": "Failed to match response data"
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
{
|
||||
"dontHaveAccount": "Don't have account",
|
||||
"email": "E-mail",
|
||||
"forgotPassword": "Forgot Password",
|
||||
"password": "password",
|
||||
"signUp": "Sign up",
|
||||
"submit": "Submit",
|
||||
"title": "Title",
|
||||
"continue": "Continue",
|
||||
"enterEmail": "Enter your email",
|
||||
"authLoginTitle": "Log In",
|
||||
"authLoginEnterPassword": "Enter your password",
|
||||
"commonPassword": "Password",
|
||||
"commonBack": "Back",
|
||||
"authForgotPasswordTitle": "Forgot password?",
|
||||
"authForgotPasswordResetPassword": "Reset password",
|
||||
"createPassword": "Create your password",
|
||||
"createAPassword": "Create a password",
|
||||
"authRegisterAlreadyHaveAccount": "Already have an account?",
|
||||
"commonAppName": "BlueWave Uptime",
|
||||
"authLoginEnterEmail": "Enter your email",
|
||||
"authRegisterTitle": "Create an account",
|
||||
"authRegisterStepOneTitle": "Create your account",
|
||||
"authRegisterStepOneDescription": "Enter your details to get started",
|
||||
"authRegisterStepTwoTitle": "Set up your profile",
|
||||
"authRegisterStepTwoDescription": "Tell us more about yourself",
|
||||
"authRegisterStepThreeTitle": "Almost done!",
|
||||
"authRegisterStepThreeDescription": "Review your information",
|
||||
"authForgotPasswordDescription": "No worries, we'll send you reset instructions.",
|
||||
"authForgotPasswordSendInstructions": "Send instructions",
|
||||
"authForgotPasswordBackTo": "Back to",
|
||||
"authCheckEmailTitle": "Check your email",
|
||||
"authCheckEmailDescription": "We sent a password reset link to {{email}}",
|
||||
"authCheckEmailResendEmail": "Resend email",
|
||||
"authCheckEmailBackTo": "Back to",
|
||||
"goBackTo": "Go back to",
|
||||
"authCheckEmailDidntReceiveEmail": "Didn't receive the email?",
|
||||
"authCheckEmailClickToResend": "Click to resend",
|
||||
"authSetNewPasswordTitle": "Set new password",
|
||||
"authSetNewPasswordDescription": "Your new password must be different from previously used passwords.",
|
||||
"authSetNewPasswordNewPassword": "New password",
|
||||
"authSetNewPasswordConfirmPassword": "Confirm password",
|
||||
"confirmPassword": "Confirm your password",
|
||||
"authSetNewPasswordResetPassword": "Reset password",
|
||||
"authSetNewPasswordBackTo": "Back to",
|
||||
"authPasswordMustBeAtLeast": "Must be at least",
|
||||
"authPasswordCharactersLong": "8 characters long",
|
||||
"authPasswordMustContainAtLeast": "Must contain at least",
|
||||
"authPasswordSpecialCharacter": "one special character",
|
||||
"authPasswordOneNumber": "one number",
|
||||
"authPasswordUpperCharacter": "one upper character",
|
||||
"authPasswordLowerCharacter": "one lower character",
|
||||
"authPasswordConfirmAndPassword": "Confirm password and password",
|
||||
"authPasswordMustMatch": "must match",
|
||||
"friendlyError": "Something went wrong...",
|
||||
"unknownError": "An unknown error occurred",
|
||||
"unauthorized": "Unauthorized access",
|
||||
"authAdminExists": "Admin already exists",
|
||||
"authInviteNotFound": "Invite not found",
|
||||
"unknownService": "Unknown service",
|
||||
"noAuthToken": "No auth token provided",
|
||||
"invalidAuthToken": "Invalid auth token",
|
||||
"expiredAuthToken": "Token expired",
|
||||
"noRefreshToken": "No refresh token provided",
|
||||
"invalidRefreshToken": "Invalid refresh token",
|
||||
"expiredRefreshToken": "Refresh token expired",
|
||||
"requestNewAccessToken": "Request new access token",
|
||||
"invalidPayload": "Invalid payload",
|
||||
"verifyOwnerNotFound": "Document not found",
|
||||
"verifyOwnerUnauthorized": "Unauthorized access",
|
||||
"insufficientPermissions": "Insufficient permissions",
|
||||
"dbUserExists": "User already exists",
|
||||
"dbUserNotFound": "User not found",
|
||||
"dbTokenNotFound": "Token not found",
|
||||
"dbResetPasswordBadMatch": "New password must be different from old password",
|
||||
"dbFindMonitorById": "Monitor with id ${monitorId} not found",
|
||||
"dbDeleteChecks": "No checks found for monitor with id ${monitorId}",
|
||||
"authIncorrectPassword": "Incorrect password",
|
||||
"authUnauthorized": "Unauthorized access",
|
||||
"monitorGetById": "Monitor not found",
|
||||
"monitorGetByUserId": "No monitors found for user",
|
||||
"jobQueueWorkerClose": "Error closing worker",
|
||||
"jobQueueDeleteJob": "Job not found in queue",
|
||||
"jobQueueObliterate": "Error obliterating queue",
|
||||
"pingCannotResolve": "No response",
|
||||
"statusPageNotFound": "Status page not found",
|
||||
"statusPageUrlNotUnique": "Status page url must be unique",
|
||||
"dockerFail": "Failed to fetch Docker container information",
|
||||
"dockerNotFound": "Docker container not found",
|
||||
"portFail": "Failed to connect to port",
|
||||
"alertCreate": "Alert created successfully",
|
||||
"alertGetByUser": "Got alerts successfully",
|
||||
"alertGetByMonitor": "Got alerts by Monitor successfully",
|
||||
"alertGetById": "Got alert by Id successfully",
|
||||
"alertEdit": "Alert edited successfully",
|
||||
"alertDelete": "Alert deleted successfully",
|
||||
"authCreateUser": "User created successfully",
|
||||
"authLoginUser": "User logged in successfully",
|
||||
"authLogoutUser": "User logged out successfully",
|
||||
"authUpdateUser": "User updated successfully",
|
||||
"authCreateRecoveryToken": "Recovery token created successfully",
|
||||
"authVerifyRecoveryToken": "Recovery token verified successfully",
|
||||
"authResetPassword": "Password reset successfully",
|
||||
"authAdminCheck": "Admin check completed successfully",
|
||||
"authDeleteUser": "User deleted successfully",
|
||||
"authTokenRefreshed": "Auth token is refreshed",
|
||||
"authGetAllUsers": "Got all users successfully",
|
||||
"inviteIssued": "Invite sent successfully",
|
||||
"inviteVerified": "Invite verified successfully",
|
||||
"checkCreate": "Check created successfully",
|
||||
"checkGet": "Got checks successfully",
|
||||
"checkDelete": "Checks deleted successfully",
|
||||
"checkUpdateTtl": "Checks TTL updated successfully",
|
||||
"monitorGetAll": "Got all monitors successfully",
|
||||
"monitorStatsById": "Got monitor stats by Id successfully",
|
||||
"monitorGetByIdSuccess": "Got monitor by Id successfully",
|
||||
"monitorGetByTeamId": "Got monitors by Team Id successfully",
|
||||
"monitorGetByUserIdSuccess": "Got monitor for ${userId} successfully",
|
||||
"monitorCreate": "Monitor created successfully",
|
||||
"monitorDelete": "Monitor deleted successfully",
|
||||
"monitorEdit": "Monitor edited successfully",
|
||||
"monitorCertificate": "Got monitor certificate successfully",
|
||||
"monitorDemoAdded": "Successfully added demo monitors",
|
||||
"queueGetMetrics": "Got metrics successfully",
|
||||
"queueAddJob": "Job added successfully",
|
||||
"queueObliterate": "Queue obliterated",
|
||||
"jobQueueDeleteJobSuccess": "Job removed successfully",
|
||||
"jobQueuePauseJob": "Job paused successfully",
|
||||
"jobQueueResumeJob": "Job resumed successfully",
|
||||
"maintenanceWindowGetById": "Got Maintenance Window by Id successfully",
|
||||
"maintenanceWindowCreate": "Maintenance Window created successfully",
|
||||
"maintenanceWindowGetByTeam": "Got Maintenance Windows by Team successfully",
|
||||
"maintenanceWindowDelete": "Maintenance Window deleted successfully",
|
||||
"maintenanceWindowEdit": "Maintenance Window edited successfully",
|
||||
"pingSuccess": "Success",
|
||||
"getAppSettings": "Got app settings successfully",
|
||||
"updateAppSettings": "Updated app settings successfully",
|
||||
"statusPageByUrl": "Got status page by url successfully",
|
||||
"statusPageCreate": "Status page created successfully",
|
||||
"newTermsAdded": "New terms added to POEditor",
|
||||
"dockerSuccess": "Docker container status fetched successfully",
|
||||
"portSuccess": "Port connected successfully",
|
||||
"monitorPause": "Monitor paused successfully",
|
||||
"monitorResume": "Monitor resumed successfully",
|
||||
"statusPageDelete": "Status page deleted successfully",
|
||||
"statusPageUpdate": "Status page updated successfully"
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import logger from "../utils/logger.js";
|
||||
import ServiceRegistry from "../service/serviceRegistry.js";
|
||||
import StringService from "../service/stringService.js";
|
||||
|
||||
const handleErrors = (error, req, res, next) => {
|
||||
const status = error.status || 500;
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
const message = error.message || stringService.friendlyError;
|
||||
const service = error.service || stringService.unknownService;
|
||||
logger.error({
|
||||
message: message,
|
||||
service: service,
|
||||
method: error.method,
|
||||
stack: error.stack,
|
||||
});
|
||||
res.error({
|
||||
status,
|
||||
msg: message,
|
||||
});
|
||||
};
|
||||
|
||||
export { handleErrors };
|
||||
@@ -1,60 +0,0 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
const TOKEN_PREFIX = "Bearer ";
|
||||
const SERVICE_NAME = "allowedRoles";
|
||||
import ServiceRegistry from "../service/serviceRegistry.js";
|
||||
import StringService from "../service/stringService.js";
|
||||
import SettingsService from "../service/settingsService.js";
|
||||
|
||||
|
||||
const isAllowed = (allowedRoles) => {
|
||||
return (req, res, next) => {
|
||||
const token = req.headers["authorization"];
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
// If no token is pressent, return an error
|
||||
if (!token) {
|
||||
const error = new Error(stringService.noAuthToken);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the token is improperly formatted, return an error
|
||||
if (!token.startsWith(TOKEN_PREFIX)) {
|
||||
const error = new Error(stringService.invalidAuthToken);
|
||||
error.status = 400;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
// Parse the token
|
||||
try {
|
||||
const parsedToken = token.slice(TOKEN_PREFIX.length, token.length);
|
||||
const { jwtSecret } = ServiceRegistry.get(
|
||||
SettingsService.SERVICE_NAME
|
||||
).getSettings();
|
||||
var decoded = jwt.verify(parsedToken, jwtSecret);
|
||||
const userRoles = decoded.role;
|
||||
|
||||
// Check if the user has the required role
|
||||
if (userRoles.some((role) => allowedRoles.includes(role))) {
|
||||
next();
|
||||
return;
|
||||
} else {
|
||||
const error = new Error(stringService.insufficientPermissions);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
error.status = 401;
|
||||
error.method = "isAllowed";
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export { isAllowed };
|
||||
@@ -1,11 +0,0 @@
|
||||
const languageMiddleware = (stringService, translationService) => (req, res, next) => {
|
||||
const acceptLanguage = req.headers['accept-language'] || 'en';
|
||||
const language = acceptLanguage.split(',')[0].slice(0, 2).toLowerCase();
|
||||
|
||||
translationService.setLanguage(language);
|
||||
stringService.setLanguage(language);
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
export default languageMiddleware;
|
||||
@@ -1,20 +0,0 @@
|
||||
const responseHandler = (req, res, next) => {
|
||||
res.success = ({ status = 200, msg = "OK", data = null }) => {
|
||||
return res.status(status).json({
|
||||
success: true,
|
||||
msg: msg,
|
||||
data: data,
|
||||
});
|
||||
};
|
||||
|
||||
res.error = ({ status = 500, msg = "Internal server error", data = null }) => {
|
||||
return res.status(status).json({
|
||||
success: false,
|
||||
msg,
|
||||
data,
|
||||
});
|
||||
};
|
||||
next();
|
||||
};
|
||||
|
||||
export { responseHandler };
|
||||
@@ -1,97 +0,0 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
import ServiceRegistry from "../service/serviceRegistry.js";
|
||||
import SettingsService from "../service/settingsService.js";
|
||||
import StringService from "../service/stringService.js";
|
||||
const SERVICE_NAME = "verifyJWT";
|
||||
const TOKEN_PREFIX = "Bearer ";
|
||||
|
||||
/**
|
||||
* Verifies the JWT token
|
||||
* @function
|
||||
* @param {express.Request} req
|
||||
* @param {express.Response} res
|
||||
* @param {express.NextFunction} next
|
||||
* @returns {express.Response}
|
||||
*/
|
||||
const verifyJWT = (req, res, next) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
const token = req.headers["authorization"];
|
||||
// Make sure a token is provided
|
||||
if (!token) {
|
||||
const error = new Error(stringService.noAuthToken);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
// Make sure it is properly formatted
|
||||
if (!token.startsWith(TOKEN_PREFIX)) {
|
||||
const error = new Error(stringService.invalidAuthToken); // Instantiate a new Error object for improperly formatted token
|
||||
error.status = 400;
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "verifyJWT";
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
|
||||
const parsedToken = token.slice(TOKEN_PREFIX.length, token.length);
|
||||
// Verify the token's authenticity
|
||||
const { jwtSecret } = ServiceRegistry.get(SettingsService.SERVICE_NAME).getSettings();
|
||||
jwt.verify(parsedToken, jwtSecret, (err, decoded) => {
|
||||
if (err) {
|
||||
if (err.name === "TokenExpiredError") {
|
||||
// token has expired
|
||||
handleExpiredJwtToken(req, res, next);
|
||||
} else {
|
||||
// Invalid token (signature or token altered or other issue)
|
||||
const errorMessage = stringService.invalidAuthToken;
|
||||
return res.status(401).json({ success: false, msg: errorMessage });
|
||||
}
|
||||
} else {
|
||||
// Token is valid and not expired, carry on with request, Add the decoded payload to the request
|
||||
req.user = decoded;
|
||||
next();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
function handleExpiredJwtToken(req, res, next) {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
// check for refreshToken
|
||||
const refreshToken = req.headers["x-refresh-token"];
|
||||
|
||||
if (!refreshToken) {
|
||||
// No refresh token provided
|
||||
const error = new Error(stringService.noRefreshToken);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "handleExpiredJwtToken";
|
||||
return next(error);
|
||||
}
|
||||
|
||||
// Verify refresh token
|
||||
const { refreshTokenSecret } = ServiceRegistry.get(
|
||||
SettingsService.SERVICE_NAME
|
||||
).getSettings();
|
||||
jwt.verify(refreshToken, refreshTokenSecret, (refreshErr, refreshDecoded) => {
|
||||
if (refreshErr) {
|
||||
// Invalid or expired refresh token, trigger logout
|
||||
const errorMessage =
|
||||
refreshErr.name === "TokenExpiredError"
|
||||
? stringService.expiredRefreshToken
|
||||
: stringService.invalidRefreshToken;
|
||||
const error = new Error(errorMessage);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
return next(error);
|
||||
}
|
||||
|
||||
// Refresh token is valid and unexpired, request for new access token
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
msg: stringService.requestNewAccessToken,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export { verifyJWT };
|
||||
@@ -1,53 +0,0 @@
|
||||
import logger from "../utils/logger.js";
|
||||
import ServiceRegistry from "../service/serviceRegistry.js";
|
||||
import StringService from "../service/stringService.js";
|
||||
const SERVICE_NAME = "verifyOwnership";
|
||||
|
||||
const verifyOwnership = (Model, paramName) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
return async (req, res, next) => {
|
||||
const userId = req.user._id;
|
||||
const documentId = req.params[paramName];
|
||||
try {
|
||||
const doc = await Model.findById(documentId);
|
||||
//If the document is not found, return a 404 error
|
||||
if (!doc) {
|
||||
logger.error({
|
||||
message: stringService.verifyOwnerNotFound,
|
||||
service: SERVICE_NAME,
|
||||
method: "verifyOwnership",
|
||||
});
|
||||
const error = new Error(stringService.verifyOwnerNotFound);
|
||||
error.status = 404;
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Special case for User model, as it will not have a `userId` field as other docs will
|
||||
if (Model.modelName === "User") {
|
||||
if (userId.toString() !== doc._id.toString()) {
|
||||
const error = new Error(stringService.verifyOwnerUnauthorized);
|
||||
error.status = 403;
|
||||
throw error;
|
||||
}
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
// If the userID does not match the document's userID, return a 403 error
|
||||
if (userId.toString() !== doc.userId.toString()) {
|
||||
const error = new Error(stringService.verifyOwnerUnauthorized);
|
||||
error.status = 403;
|
||||
throw error;
|
||||
}
|
||||
next();
|
||||
return;
|
||||
} catch (error) {
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "verifyOwnership";
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export { verifyOwnership };
|
||||
@@ -1,68 +0,0 @@
|
||||
const jwt = require("jsonwebtoken");
|
||||
const logger = require("../utils/logger");
|
||||
const SERVICE_NAME = "verifyAdmin";
|
||||
const TOKEN_PREFIX = "Bearer ";
|
||||
import ServiceRegistry from "../service/serviceRegistry.js";
|
||||
import SettingsService from "../service/settingsService.js";
|
||||
import StringService from "../service/stringService.js";
|
||||
/**
|
||||
* Verifies the JWT token
|
||||
* @function
|
||||
* @param {express.Request} req
|
||||
* @param {express.Response} res
|
||||
* @param {express.NextFunction} next
|
||||
* @returns {express.Response}
|
||||
*/
|
||||
const verifySuperAdmin = (req, res, next) => {
|
||||
const stringService = ServiceRegistry.get(StringService.SERVICE_NAME);
|
||||
const token = req.headers["authorization"];
|
||||
// Make sure a token is provided
|
||||
if (!token) {
|
||||
const error = new Error(stringService.noAuthToken);
|
||||
error.status = 401;
|
||||
error.service = SERVICE_NAME;
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
// Make sure it is properly formatted
|
||||
if (!token.startsWith(TOKEN_PREFIX)) {
|
||||
const error = new Error(stringService.invalidAuthToken); // Instantiate a new Error object for improperly formatted token
|
||||
error.status = 400;
|
||||
error.service = SERVICE_NAME;
|
||||
error.method = "verifySuperAdmin";
|
||||
next(error);
|
||||
return;
|
||||
}
|
||||
|
||||
const parsedToken = token.slice(TOKEN_PREFIX.length, token.length);
|
||||
// verify admin role is present
|
||||
const { jwtSecret } = ServiceRegistry.get(SettingsService.SERVICE_NAME).getSettings();
|
||||
|
||||
jwt.verify(parsedToken, jwtSecret, (err, decoded) => {
|
||||
if (err) {
|
||||
logger.error({
|
||||
message: err.message,
|
||||
service: SERVICE_NAME,
|
||||
method: "verifySuperAdmin",
|
||||
stack: err.stack,
|
||||
details: stringService.invalidAuthToken,
|
||||
});
|
||||
return res
|
||||
.status(401)
|
||||
.json({ success: false, msg: stringService.invalidAuthToken });
|
||||
}
|
||||
|
||||
if (decoded.role.includes("superadmin") === false) {
|
||||
logger.error({
|
||||
message: stringService.invalidAuthToken,
|
||||
service: SERVICE_NAME,
|
||||
method: "verifySuperAdmin",
|
||||
stack: err.stack,
|
||||
});
|
||||
return res.status(401).json({ success: false, msg: stringService.unauthorized });
|
||||
}
|
||||
next();
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = { verifySuperAdmin };
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"ignore": [
|
||||
"locales/*",
|
||||
"*.log",
|
||||
"node_modules/*"
|
||||
],
|
||||
"watch": [
|
||||
"*.js",
|
||||
"*.json"
|
||||
],
|
||||
"ext": "js,json"
|
||||
}
|
||||
2562
Server/openapi.json
2562
Server/openapi.json
File diff suppressed because it is too large
Load Diff
8334
Server/package-lock.json
generated
8334
Server/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"name": "server",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "c8 mocha",
|
||||
"dev": "nodemon index.js",
|
||||
"lint": "eslint .",
|
||||
"lint-fix": "eslint --fix .",
|
||||
"format": "prettier --write .",
|
||||
"format-check": "prettier --check ."
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.2",
|
||||
"bcrypt": "5.1.1",
|
||||
"bullmq": "5.41.2",
|
||||
"cors": "^2.8.5",
|
||||
"dockerode": "4.0.4",
|
||||
"dotenv": "^16.4.5",
|
||||
"express": "^4.19.2",
|
||||
"handlebars": "^4.7.8",
|
||||
"helmet": "^8.0.0",
|
||||
"ioredis": "^5.4.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"joi": "^17.13.1",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"mailersend": "^2.2.0",
|
||||
"mjml": "^5.0.0-alpha.4",
|
||||
"mongoose": "^8.3.3",
|
||||
"multer": "1.4.5-lts.1",
|
||||
"nodemailer": "^6.9.14",
|
||||
"ping": "0.4.4",
|
||||
"sharp": "0.33.5",
|
||||
"ssl-checker": "2.0.10",
|
||||
"swagger-ui-express": "5.0.1",
|
||||
"winston": "^3.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.17.0",
|
||||
"c8": "10.1.3",
|
||||
"chai": "5.2.0",
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-plugin-mocha": "^10.5.0",
|
||||
"esm": "3.2.25",
|
||||
"globals": "^15.14.0",
|
||||
"mocha": "11.1.0",
|
||||
"nodemon": "3.1.9",
|
||||
"prettier": "^3.3.3",
|
||||
"sinon": "19.0.2"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user