@@ -511,16 +546,11 @@
}}
>
-
+
-
- YES
-
+ YES
NO
@@ -544,12 +574,8 @@
-
- CRITICAL
-
-
- WARNING
-
+ CRITICAL
+ WARNING
@@ -576,29 +602,13 @@
}}
/>
{#if trigger.trigger_type == "webhook"}
-

+

{:else if trigger.trigger_type == "email"}
-

+

{:else if trigger.trigger_type == "slack"}
-

+

{:else if trigger.trigger_type == "discord"}
-

+

{/if}
{trigger.name}
diff --git a/src/lib/server/constants.js b/src/lib/server/constants.js
index ffeecd83..42860d8e 100644
--- a/src/lib/server/constants.js
+++ b/src/lib/server/constants.js
@@ -10,90 +10,111 @@ const DOWN = "DOWN";
const DEGRADED = "DEGRADED";
const API_TIMEOUT = 10 * 1000; // 10 seconds
const AnalyticsProviders = {
- GA: "https://unpkg.com/@analytics/google-analytics@1.0.7/dist/@analytics/google-analytics.min.js",
- AMPLITUDE: "https://unpkg.com/@analytics/amplitude@0.1.3/dist/@analytics/amplitude.min.js",
- MIXPANEL: "https://unpkg.com/@analytics/mixpanel@0.4.0/dist/@analytics/mixpanel.min.js"
+ GA: "https://unpkg.com/@analytics/google-analytics@1.0.7/dist/@analytics/google-analytics.min.js",
+ AMPLITUDE:
+ "https://unpkg.com/@analytics/amplitude@0.1.3/dist/@analytics/amplitude.min.js",
+ MIXPANEL:
+ "https://unpkg.com/@analytics/mixpanel@0.4.0/dist/@analytics/mixpanel.min.js",
};
const AllRecordTypes = {
- A: 1,
- NS: 2,
- MD: 3,
- MF: 4,
- CNAME: 5,
- SOA: 6,
- MB: 7,
- MG: 8,
- MR: 9,
- NULL: 10,
- WKS: 11,
- PTR: 12,
- HINFO: 13,
- MINFO: 14,
- MX: 15,
- TXT: 16,
- RP: 17,
- AFSDB: 18,
- X25: 19,
- ISDN: 20,
- RT: 21,
- NSAP: 22,
- NSAP_PTR: 23,
- SIG: 24,
- KEY: 25,
- PX: 26,
- GPOS: 27,
- AAAA: 28,
- LOC: 29,
- NXT: 30,
- EID: 31,
- NIMLOC: 32,
- SRV: 33,
- ATMA: 34,
- NAPTR: 35,
- KX: 36,
- CERT: 37,
- A6: 38,
- DNAME: 39,
- SINK: 40,
- OPT: 41,
- APL: 42,
- DS: 43,
- SSHFP: 44,
- IPSECKEY: 45,
- RRSIG: 46,
- NSEC: 47,
- DNSKEY: 48,
- DHCID: 49,
- NSEC3: 50,
- NSEC3PARAM: 51,
- TLSA: 52,
- SMIMEA: 53,
- HIP: 55,
- NINFO: 56,
- RKEY: 57,
- TALINK: 58,
- CDS: 59,
- CDNSKEY: 60,
- OPENPGPKEY: 61,
- CSYNC: 62,
- SPF: 99,
- UINFO: 100,
- UID: 101,
- GID: 102,
- UNSPEC: 103,
- NID: 104,
- L32: 105,
- L64: 106,
- LP: 107,
- EUI48: 108,
- EUI64: 109,
- TKEY: 249,
- TSIG: 250,
- IXFR: 251,
- AXFR: 252,
- MAILB: 253,
- MAILA: 254,
- ANY: 255
+ A: 1,
+ NS: 2,
+ MD: 3,
+ MF: 4,
+ CNAME: 5,
+ SOA: 6,
+ MB: 7,
+ MG: 8,
+ MR: 9,
+ NULL: 10,
+ WKS: 11,
+ PTR: 12,
+ HINFO: 13,
+ MINFO: 14,
+ MX: 15,
+ TXT: 16,
+ RP: 17,
+ AFSDB: 18,
+ X25: 19,
+ ISDN: 20,
+ RT: 21,
+ NSAP: 22,
+ NSAP_PTR: 23,
+ SIG: 24,
+ KEY: 25,
+ PX: 26,
+ GPOS: 27,
+ AAAA: 28,
+ LOC: 29,
+ NXT: 30,
+ EID: 31,
+ NIMLOC: 32,
+ SRV: 33,
+ ATMA: 34,
+ NAPTR: 35,
+ KX: 36,
+ CERT: 37,
+ A6: 38,
+ DNAME: 39,
+ SINK: 40,
+ OPT: 41,
+ APL: 42,
+ DS: 43,
+ SSHFP: 44,
+ IPSECKEY: 45,
+ RRSIG: 46,
+ NSEC: 47,
+ DNSKEY: 48,
+ DHCID: 49,
+ NSEC3: 50,
+ NSEC3PARAM: 51,
+ TLSA: 52,
+ SMIMEA: 53,
+ HIP: 55,
+ NINFO: 56,
+ RKEY: 57,
+ TALINK: 58,
+ CDS: 59,
+ CDNSKEY: 60,
+ OPENPGPKEY: 61,
+ CSYNC: 62,
+ SPF: 99,
+ UINFO: 100,
+ UID: 101,
+ GID: 102,
+ UNSPEC: 103,
+ NID: 104,
+ L32: 105,
+ L64: 106,
+ LP: 107,
+ EUI48: 108,
+ EUI64: 109,
+ TKEY: 249,
+ TSIG: 250,
+ IXFR: 251,
+ AXFR: 252,
+ MAILB: 253,
+ MAILA: 254,
+ ANY: 255,
};
// Export the constants
-export { MONITOR, UP, DOWN, SITE, DEGRADED, API_TIMEOUT, ENV, AnalyticsProviders, AllRecordTypes };
+const REALTIME = "realtime";
+const TIMEOUT = "timeout";
+const ERROR = "error";
+const MANUAL = "manual";
+
+export {
+ MONITOR,
+ UP,
+ DOWN,
+ SITE,
+ DEGRADED,
+ API_TIMEOUT,
+ ENV,
+ AnalyticsProviders,
+ AllRecordTypes,
+ REALTIME,
+ TIMEOUT,
+ ERROR,
+ MANUAL,
+};
diff --git a/src/lib/server/cron-minute.js b/src/lib/server/cron-minute.js
index 301e24c4..62311186 100644
--- a/src/lib/server/cron-minute.js
+++ b/src/lib/server/cron-minute.js
@@ -2,12 +2,8 @@
import axios from "axios";
import { Ping, ExtractIPv6HostAndPort, TCP } from "./ping.js";
import { UP, DOWN, DEGRADED } from "./constants.js";
-import {
- GetMinuteStartNowTimestampUTC,
- ReplaceAllOccurrences,
- GetRequiredSecrets,
- Wait,
-} from "./tool.js";
+import Service from "./services/service.js";
+import { GetMinuteStartNowTimestampUTC, ReplaceAllOccurrences, GetRequiredSecrets, Wait } from "./tool.js";
import alerting from "./alerting.js";
import Queue from "queue";
@@ -35,65 +31,10 @@ const apiQueue = new Queue({
autostart: true, // Automatically start the queue (optional)
});
-const defaultEval = `(async function (statusCode, responseTime, responseData) {
- let statusCodeShort = Math.floor(statusCode/100);
- if(statusCode == 429 || (statusCodeShort >=2 && statusCodeShort <= 3)) {
- return {
- status: 'UP',
- latency: responseTime,
- }
- }
- return {
- status: 'DOWN',
- latency: responseTime,
- }
-})`;
-
-const defaultPingEval = `(async function (responseDataBase64) {
- let arrayOfPings = JSON.parse(atob(responseDataBase64));
- let latencyTotal = arrayOfPings.reduce((acc, ping) => {
- return acc + ping.latency;
- }, 0);
-
- let alive = arrayOfPings.reduce((acc, ping) => {
- return acc && ping.alive;
- }, true);
-
- return {
- status: alive ? 'UP' : 'DOWN',
- latency: latencyTotal / arrayOfPings.length,
- }
-})`;
-const defaultTcpEval = `(async function (responseDataBase64) {
- let arrayOfPings = JSON.parse(atob(responseDataBase64));
- let latencyTotal = arrayOfPings.reduce((acc, ping) => {
- return acc + ping.latency;
- }, 0);
-
- let alive = arrayOfPings.reduce((acc, ping) => {
- if (ping.status === "open") {
- return acc && true;
- } else {
- return false;
- }
- }, true);
-
- return {
- status: alive ? 'UP' : 'DOWN',
- latency: latencyTotal / arrayOfPings.length,
- }
-})`;
-
async function manualIncident(monitor) {
let startTs = GetMinuteStartNowTimestampUTC();
- let incidentArr = await db.getIncidentsByMonitorTagRealtime(
- monitor.tag,
- startTs,
- );
- let maintenanceArr = await db.getMaintenanceByMonitorTagRealtime(
- monitor.tag,
- startTs,
- );
+ let incidentArr = await db.getIncidentsByMonitorTagRealtime(monitor.tag, startTs);
+ let maintenanceArr = await db.getMaintenanceByMonitorTagRealtime(monitor.tag, startTs);
let impactArr = incidentArr.concat(maintenanceArr);
@@ -105,11 +46,7 @@ async function manualIncident(monitor) {
for (let i = 0; i < impactArr.length; i++) {
const element = impactArr[i];
- let autoIncidents = await db.getActiveAlertIncident(
- monitor.tag,
- element.monitor_impact,
- element.id,
- );
+ let autoIncidents = await db.getActiveAlertIncident(monitor.tag, element.monitor_impact, element.id);
if (!!autoIncidents) {
continue;
@@ -160,275 +97,24 @@ const tcpCall = async (hosts, tcpEval, tag) => {
type: REALTIME,
};
};
-const pingCall = async (hosts, pingEval, tag) => {
- if (hosts === undefined) {
- console.log(
- "Hosts is undefined. The ping monitor has changed in version 3.0.10. Please update your monitor with tag",
- tag,
- );
- return {
- status: DOWN,
- latency: 0,
- type: ERROR,
- };
- }
- let arrayOfPings = [];
- for (let i = 0; i < hosts.length; i++) {
- const host = hosts[i];
- arrayOfPings.push(
- await Ping(host.type, host.host, host.timeout, host.count),
- );
- }
- let respBase64 = Buffer.from(JSON.stringify(arrayOfPings)).toString("base64");
-
- let evalResp = undefined;
-
- try {
- evalResp = await eval(pingEval + `("${respBase64}")`);
- } catch (error) {
- console.log(`Error in pingEval for ${tag}`, error.message);
- }
- //reduce to get the status
- return {
- status: evalResp.status,
- latency: evalResp.latency,
- type: REALTIME,
- };
-};
-const apiCall = async (
- envSecrets,
- url,
- method,
- headers,
- body,
- timeout,
- monitorEval,
- tag,
-) => {
- let axiosHeaders = {};
- axiosHeaders["User-Agent"] = "Kener/3.0.2";
- axiosHeaders["Accept"] = "*/*";
- const start = Date.now();
- //replace all secrets
- for (let i = 0; i < envSecrets.length; i++) {
- const secret = envSecrets[i];
- if (!!body) {
- body = ReplaceAllOccurrences(body, secret.find, secret.replace);
- }
- if (!!url) {
- url = ReplaceAllOccurrences(url, secret.find, secret.replace);
- }
- if (!!headers) {
- headers = ReplaceAllOccurrences(headers, secret.find, secret.replace);
- }
- }
- if (!!headers) {
- headers = JSON.parse(headers);
- headers = headers.reduce((acc, header) => {
- acc[header.key] = header.value;
- return acc;
- }, {});
- axiosHeaders = { ...axiosHeaders, ...headers };
- }
-
- const options = {
- method: method,
- headers: headers,
- timeout: timeout,
- transformResponse: (r) => r,
- };
- if (!!headers) {
- options.headers = headers;
- }
- if (!!body) {
- options.data = body;
- }
- let statusCode = 500;
- let latency = 0;
- let resp = "";
- let timeoutError = false;
- try {
- let data = await axios(url, options);
- statusCode = data.status;
- resp = data.data;
- } catch (err) {
- console.log(`Error in apiCall ${tag}`, err.message);
- if (
- err.message.startsWith("timeout of") &&
- err.message.endsWith("exceeded")
- ) {
- timeoutError = true;
- }
- if (err.response !== undefined && err.response.status !== undefined) {
- statusCode = err.response.status;
- }
- if (err.response !== undefined && err.response.data !== undefined) {
- resp = err.response.data;
- } else {
- resp = JSON.stringify(resp);
- }
- } finally {
- const end = Date.now();
- latency = end - start;
- if (resp === undefined || resp === null) {
- resp = "";
- }
- }
- resp = Buffer.from(resp).toString("base64");
-
- let evalResp = undefined;
-
- try {
- evalResp = await eval(
- monitorEval + `(${statusCode}, ${latency}, "${resp}")`,
- );
- } catch (error) {
- console.log(`Error in monitorEval for ${tag}`, error.message);
- }
-
- if (evalResp === undefined || evalResp === null) {
- evalResp = {
- status: DOWN,
- latency: latency,
- type: ERROR,
- };
- } else if (
- evalResp.status === undefined ||
- evalResp.status === null ||
- [UP, DOWN, DEGRADED].indexOf(evalResp.status) === -1
- ) {
- evalResp = {
- status: DOWN,
- latency: latency,
- type: ERROR,
- };
- } else {
- evalResp.type = REALTIME;
- }
-
- let toWrite = {
- status: DOWN,
- latency: latency,
- type: ERROR,
- };
- if (evalResp.status !== undefined && evalResp.status !== null) {
- toWrite.status = evalResp.status;
- }
- if (evalResp.latency !== undefined && evalResp.latency !== null) {
- toWrite.latency = evalResp.latency;
- }
- if (evalResp.type !== undefined && evalResp.type !== null) {
- toWrite.type = evalResp.type;
- }
- if (timeoutError) {
- toWrite.type = TIMEOUT;
- }
-
- return toWrite;
-};
-
-async function dsnChecker(dnsResolver, host, recordType, matchType, values) {
- try {
- let queryStartTime = Date.now();
- let dnsRes = await dnsResolver.getRecord(host, recordType);
- let latency = Date.now() - queryStartTime;
-
- if (dnsRes[recordType] === undefined) {
- return {
- status: DOWN,
- latency: latency,
- type: REALTIME,
- };
- }
- let data = dnsRes[recordType];
- let dnsData = data.map((d) => d.data);
- if (matchType === "ALL") {
- for (let i = 0; i < values.length; i++) {
- if (dnsData.indexOf(values[i].trim()) === -1) {
- return {
- status: DOWN,
- latency: latency,
- type: REALTIME,
- };
- }
- }
- return {
- status: UP,
- latency: latency,
- type: REALTIME,
- };
- } else if (matchType === "ANY") {
- for (let i = 0; i < values.length; i++) {
- if (dnsData.indexOf(values[i].trim()) !== -1) {
- return {
- status: UP,
- latency: latency,
- type: REALTIME,
- };
- }
- }
- return {
- status: DOWN,
- latency: latency,
- type: REALTIME,
- };
- }
- } catch (error) {
- console.log("Error in dnsChecker", error);
- return {
- status: DOWN,
- latency: 0,
- type: REALTIME,
- };
- }
-}
const Minuter = async (monitor) => {
let realTimeData = {};
let manualData = {};
const startOfMinute = GetMinuteStartNowTimestampUTC();
+ const serviceClient = new Service(monitor);
if (monitor.monitor_type === "API") {
- let envSecrets = GetRequiredSecrets(
- `${monitor.type_data.url} ${monitor.type_data.body} ${JSON.stringify(monitor.type_data.headers)}`,
- );
-
- if (monitor.type_data.eval === "") {
- monitor.type_data.eval = defaultEval;
- }
-
- let apiResponse = await apiCall(
- envSecrets,
- monitor.type_data.url,
- monitor.type_data.method,
- JSON.stringify(monitor.type_data.headers),
- monitor.type_data.body,
- monitor.type_data.timeout,
- monitor.type_data.eval,
- monitor.tag,
- );
+ let apiResponse = await serviceClient.execute();
realTimeData[startOfMinute] = apiResponse;
+
+ //if timeout, retry after 500ms
if (apiResponse.type === TIMEOUT) {
apiQueue.push(async (cb) => {
await Wait(500); //wait for 500ms
- console.log(
- "Retrying api call for " +
- monitor.name +
- " at " +
- startOfMinute +
- " due to timeout",
- );
- apiCall(
- envSecrets,
- monitor.type_data.url,
- monitor.type_data.method,
- JSON.stringify(monitor.type_data.headers),
- monitor.type_data.body,
- monitor.type_data.timeout,
- monitor.type_data.eval,
- monitor.tag,
- ).then(async (data) => {
+ console.log("Retrying api call for " + monitor.name + " at " + startOfMinute + " due to timeout");
+ serviceClient.execute().then(async (data) => {
await db.insertMonitoringData({
monitor_tag: monitor.tag,
timestamp: startOfMinute,
@@ -441,35 +127,11 @@ const Minuter = async (monitor) => {
});
}
} else if (monitor.monitor_type === "PING") {
- if (!!!monitor.type_data.pingEval) {
- monitor.type_data.pingEval = defaultPingEval;
- }
- let pingResponse = await pingCall(
- monitor.type_data.hosts,
- monitor.type_data.pingEval,
- monitor.tag,
- );
- realTimeData[startOfMinute] = pingResponse;
+ realTimeData[startOfMinute] = await serviceClient.execute();
} else if (monitor.monitor_type === "TCP") {
- if (!!!monitor.type_data.tcpEval) {
- monitor.type_data.tcpEval = defaultTcpEval;
- }
- let pingResponse = await tcpCall(
- monitor.type_data.hosts,
- monitor.type_data.tcpEval,
- monitor.tag,
- );
- realTimeData[startOfMinute] = pingResponse;
+ realTimeData[startOfMinute] = await serviceClient.execute();
} else if (monitor.monitor_type === "DNS") {
- const dnsResolver = new DNSResolver(monitor.type_data.nameServer);
- let dnsResponse = await dsnChecker(
- dnsResolver,
- monitor.type_data.host,
- monitor.type_data.lookupRecord,
- monitor.type_data.matchType,
- monitor.type_data.values,
- );
- realTimeData[startOfMinute] = dnsResponse;
+ realTimeData[startOfMinute] = await serviceClient.execute();
}
manualData = await manualIncident(monitor);
diff --git a/src/lib/server/db/dbimpl.js b/src/lib/server/db/dbimpl.js
index 2d088398..d0710549 100644
--- a/src/lib/server/db/dbimpl.js
+++ b/src/lib/server/db/dbimpl.js
@@ -3,655 +3,648 @@ import { GetMinuteStartNowTimestampUTC } from "../tool.js";
import Knex from "knex";
class DbImpl {
- knex;
- constructor(opts) {
- // Initialize Knex
- this.knex = Knex(opts);
-
- this.init();
- }
-
- async init() {}
-
- async insertMonitoringData(data) {
- const { monitor_tag, timestamp, status, latency, type } = data;
- return await this.knex("monitoring_data")
- .insert({ monitor_tag, timestamp, status, latency, type })
- .onConflict(["monitor_tag", "timestamp"])
- .merge({ status, latency, type });
- }
-
- //given monitor_tag, start and end timestamp in utc seconds return data
- async getMonitoringData(monitor_tag, start, end) {
- return await this.knex("monitoring_data")
- .where("monitor_tag", monitor_tag)
- .where("timestamp", ">=", start)
- .where("timestamp", "<=", end)
- .orderBy("timestamp", "asc");
- }
-
- //get latest data for a monitor_tag
- async getLatestMonitoringData(monitor_tag) {
- return await this.knex("monitoring_data")
- .where("monitor_tag", monitor_tag)
- .orderBy("timestamp", "desc")
- .limit(1)
- .first();
- }
-
- //given monitor_tag, start and end timestamp in utc seconds return total degraded, up, down, avg(latency), max(latency), min(latency)
- async getAggregatedMonitoringData(monitor_tag, start, end) {
- return await this.knex("monitoring_data")
- .select(
- this.knex.raw("COUNT(CASE WHEN status = 'DEGRADED' THEN 1 END) as DEGRADED"),
- this.knex.raw("COUNT(CASE WHEN status = 'UP' THEN 1 END) as UP"),
- this.knex.raw("COUNT(CASE WHEN status = 'DOWN' THEN 1 END) as DOWN"),
- this.knex.raw("AVG(latency) as avg_latency"),
- this.knex.raw("MAX(latency) as max_latency"),
- this.knex.raw("MIN(latency) as min_latency")
- )
- .where("monitor_tag", monitor_tag)
- .where("timestamp", ">=", start)
- .where("timestamp", "<=", end)
- .first();
- }
-
- //get the last status before the timestamp given monitor_tag and start timestamp
- async getLastStatusBefore(monitor_tag, timestamp) {
- return await this.knex("monitoring_data")
- .where("monitor_tag", monitor_tag)
- .where("timestamp", "<", timestamp)
- .orderBy("timestamp", "desc")
- .limit(1)
- .first();
- }
-
- async getDataGroupByDayAlternative(monitor_tag, start, end) {
- // Fetch all raw data
- //{ timestamp: 1732900380, status: 'UP', latency: 42 }
-
- const data = await this.knex("monitoring_data")
- .select("timestamp", "status", "latency")
- .where("monitor_tag", monitor_tag)
- .andWhere("timestamp", ">=", start)
- .andWhere("timestamp", "<=", end)
- .orderBy("timestamp", "asc");
- return data;
- }
-
- async background() {
- //clear data older than 90 days
- let ninetyDaysAgo = GetMinuteStartNowTimestampUTC() - 86400 * 100;
-
- return await this.knex("monitoring_data").where("timestamp", "<", ninetyDaysAgo).del();
- }
-
- async consecutivelyStatusFor(monitor_tag, status, lastX) {
- const result = await this.knex
- .with("last_records", (qb) => {
- qb.select("*")
- .from("monitoring_data")
- .where("monitor_tag", monitor_tag)
- .orderBy("timestamp", "desc")
- .limit(lastX);
- })
- .select(
- this.knex.raw(
- "CASE WHEN COUNT(*) <= SUM(CASE WHEN status = ? THEN 1 ELSE 0 END) THEN 1 ELSE 0 END as isAffected",
- [status]
- )
- )
- .from("last_records")
- .first();
-
- return result.isAffected === 1;
- }
-
- //insert alert
- async insertAlert(data) {
- return await this.knex("monitor_alerts").insert({
- monitor_tag: data.monitor_tag,
- monitor_status: data.monitor_status,
- alert_status: data.alert_status,
- health_checks: data.health_checks
- });
- }
-
- //check if alert exists given monitor_tag, monitor_status, trigger_status
- async alertExists(monitor_tag, monitor_status, alert_status) {
- const result = await this.knex("monitor_alerts")
- .count("* as count")
- .where({
- monitor_tag,
- monitor_status,
- alert_status
- })
- .first();
- return result.count > 0;
- }
-
- //get active alert given incident id, monitor tag, monitor status
- async getActiveAlertIncident(monitor_tag, monitor_status, incident_number) {
- return await this.knex("monitor_alerts")
- .where({
- monitor_tag,
- monitor_status,
- incident_number
- })
- .first();
- }
-
- //return active alert for a monitor_tag, monitor_status, trigger_status = ACTIVE
- async getActiveAlert(monitor_tag, monitor_status, alert_status) {
- return await this.knex("monitor_alerts")
- .where({
- monitor_tag,
- monitor_status,
- alert_status
- })
- .first();
- }
-
- //get all monitor_alerts paginated descending order
- async getMonitorAlertsPaginated(page, limit) {
- return await this.knex("monitor_alerts")
- .orderBy("id", "desc")
- .limit(limit)
- .offset((page - 1) * limit);
- }
-
- //get total count of monitor_alerts
- async getMonitorAlertsCount() {
- return await this.knex("monitor_alerts").count("* as count").first();
- }
-
- //update alert to inactive given monitor_tag, monitor_status, given id
- async updateAlertStatus(id, alert_status) {
- return await this.knex("monitor_alerts").where({ id }).update({
- alert_status,
- updated_at: this.knex.fn.now()
- });
- }
-
- //increment health_checks for an alert given id
- async incrementAlertHealthChecks(id) {
- return await this.knex("monitor_alerts")
- .where({ id })
- .increment("health_checks", 1)
- .update({ updated_at: this.knex.fn.now() });
- }
-
- //add incident_number to an alert given id
- async addIncidentNumberToAlert(id, incident_number) {
- return await this.knex("monitor_alerts").where({ id }).update({
- incident_number,
- updated_at: this.knex.fn.now()
- });
- }
-
- //insert or update site data
- async insertOrUpdateSiteData(key, value, data_type) {
- return await this.knex("site_data")
- .insert({ key, value, data_type })
- .onConflict("key")
- .merge({ value, updated_at: this.knex.fn.now() });
- }
- //get all site data
- async getAllSiteData() {
- return await this.knex("site_data");
- }
-
- //given key get data
- async getSiteData(key) {
- return await this.knex("site_data").select("value").where("key", key).first();
- }
-
- async getSiteDataByKey(key) {
- //select all
- return await this.knex("site_data").where("key", key).first();
- }
-
- //insert into monitors
- async insertMonitor(data) {
- return await this.knex("monitors").insert({
- tag: data.tag,
- name: data.name,
- description: data.description,
- image: data.image,
- cron: data.cron,
- default_status: data.default_status,
- status: data.status,
- category_name: data.category_name,
- monitor_type: data.monitor_type,
- type_data: data.type_data,
- day_degraded_minimum_count: data.day_degraded_minimum_count,
- day_down_minimum_count: data.day_down_minimum_count,
- include_degraded_in_downtime: data.include_degraded_in_downtime,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now()
- });
- }
-
- //update monitor by id
- async updateMonitor(data) {
- return await this.knex("monitors").where({ id: data.id }).update({
- tag: data.tag,
- name: data.name,
- description: data.description,
- image: data.image,
- cron: data.cron,
- default_status: data.default_status,
- status: data.status,
- category_name: data.category_name,
- monitor_type: data.monitor_type,
- type_data: data.type_data,
- day_degraded_minimum_count: data.day_degraded_minimum_count,
- day_down_minimum_count: data.day_down_minimum_count,
- include_degraded_in_downtime: data.include_degraded_in_downtime,
- updated_at: this.knex.fn.now()
- });
- }
-
- async updateMonitorTrigger(data) {
- return await this.knex("monitors").where({ id: data.id }).update({
- down_trigger: data.down_trigger,
- degraded_trigger: data.degraded_trigger,
- updated_at: this.knex.fn.now()
- });
- }
-
- //get monitors given status
- async getMonitors(data) {
- let query = this.knex("monitors").where("status", data.status);
- if (data.category_name && data.category_name !== "All Categories") {
- query = query.andWhere("category_name", data.category_name);
- }
- return await query.orderBy("id", "desc");
- }
-
- //get monitor by tag
- async getMonitorByTag(tag) {
- return await this.knex("monitors").where("tag", tag).first();
- }
-
- //insert alert
- async createNewTrigger(data) {
- return await this.knex("triggers").insert({
- name: data.name,
- trigger_type: data.trigger_type,
- trigger_status: data.trigger_status,
- trigger_meta: data.trigger_meta,
- trigger_desc: data.trigger_desc,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now()
- });
- }
-
- //update alert
- async updateTrigger(data) {
- return await this.knex("triggers").where({ id: data.id }).update({
- name: data.name,
- trigger_type: data.trigger_type,
- trigger_status: data.trigger_status,
- trigger_desc: data.trigger_desc,
- trigger_meta: data.trigger_meta,
- updated_at: this.knex.fn.now()
- });
- }
-
- //get all alerts with given status
- async getTriggers(data) {
- return await this.knex("triggers")
- .where("trigger_status", data.status)
- .orderBy("id", "desc");
- }
-
- //get trigger by id
- async getTriggerByID(id) {
- return await this.knex("triggers").where("id", id).first();
- }
-
- //get count of users
- async getUsersCount() {
- return await this.knex("users").count("* as count").first();
- }
-
- // get user by email, do not return password_hash
- async getUserByEmail(email) {
- return await this.knex("users")
- .select(
- "id",
- "email",
- "name",
- "is_active",
- "is_verified",
- "role",
- "created_at",
- "updated_at"
- )
- .where("email", email)
- .first();
- }
- async getUserPasswordHashById(id) {
- return await this.knex("users").select("password_hash").where("id", id).first();
- }
-
- //insert user
- async insertUser(data) {
- return await this.knex("users").insert({
- email: data.email,
- name: data.name,
- password_hash: data.password_hash,
- role: data.role,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now()
- });
- }
-
- //update password
- async updateUserPassword(data) {
- return await this.knex("users").where({ id: data.id }).update({
- password_hash: data.password_hash,
- updated_at: this.knex.fn.now()
- });
- }
-
- //new api key
- async createNewApiKey(data) {
- return await this.knex("api_keys").insert({
- name: data.name,
- hashed_key: data.hashed_key,
- masked_key: data.masked_key,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now()
- });
- }
-
- //update status of api key
- async updateApiKeyStatus(data) {
- return await this.knex("api_keys").where({ id: data.id }).update({
- status: data.status,
- updated_at: this.knex.fn.now()
- });
- }
-
- //get key by hashed_key
- async getApiKeyByHashedKey(hashed_key) {
- return await this.knex("api_keys").where("hashed_key", hashed_key).first();
- }
-
- //get all api keys
- async getAllApiKeys() {
- return await this.knex("api_keys").orderBy("id", "desc");
- }
-
- //close
- async close() {
- return await this.knex.destroy();
- }
-
- async createIncident(data) {
- return await this.knex("incidents").insert({
- title: data.title,
- start_date_time: data.start_date_time,
- end_date_time: data.end_date_time,
- status: data.status,
- state: data.state,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now(),
- incident_type: data.incident_type
- });
- }
-
- //get incidents paginated
- async getIncidentsPaginatedDesc(page, limit, filter) {
- let query = this.knex("incidents").select("*").whereRaw("1=1");
-
- if (filter && filter.status) {
- query = query.andWhere("status", filter.status);
- }
-
- if (filter && filter.start) {
- query = query.andWhere("start_date_time", ">=", filter.start);
- }
-
- if (filter && filter.end) {
- query = query.andWhere("start_date_time", "<=", filter.end);
- }
-
- if (filter && filter.state) {
- query = query.andWhere("state", filter.state);
- }
-
- query = query
- .orderBy("id", "desc")
- .limit(limit)
- .offset((page - 1) * limit);
-
- return await query;
- }
-
- //get last 10 recent updated incidents
- async getRecentUpdatedIncidents(limit, start, end) {
- return await this.knex("incidents")
- .where("status", "OPEN")
- .andWhere("start_date_time", ">=", start)
- .andWhere("start_date_time", "<=", end)
- .orderBy("updated_at", "desc")
- .limit(limit);
- }
-
- //get id of first incident less than given start_date_time
- async getPreviousIncidentId(start_date_time) {
- return await this.knex("incidents")
- .select("id")
- .where("start_date_time", "<", start_date_time)
- .orderBy("start_date_time", "desc")
- .first();
- }
-
- //get incidents where ts between start and end
- async getIncidentsBetween(start, end) {
- return await this.knex("incidents")
- .where("status", "OPEN")
- .andWhere("start_date_time", ">=", start)
- .andWhere("start_date_time", "<=", end)
- .orderBy("start_date_time", "asc");
- }
-
- //get total incident count
- async getIncidentsCount(filter) {
- let query = this.knex("incidents").count("* as count");
-
- if (filter && filter.status) {
- query = query.where("status", filter.status);
- }
-
- return await query.first();
- }
-
- //update incident given id
- async updateIncident(data) {
- return await this.knex("incidents").where({ id: data.id }).update({
- title: data.title,
- start_date_time: data.start_date_time,
- end_date_time: data.end_date_time,
- status: data.status,
- state: data.state,
- updated_at: this.knex.fn.now()
- });
- }
-
- //set incident end time to null
- async setIncidentEndTimeToNull(id) {
- return await this.knex("incidents").where({ id }).update({
- end_date_time: null,
- updated_at: this.knex.fn.now()
- });
- }
-
- async insertIncidentMonitor(data) {
- return await this.knex("incident_monitors").insert({
- monitor_tag: data.monitor_tag,
- monitor_impact: data.monitor_impact,
- incident_id: data.incident_id
- });
- }
-
- //get incident by id
- async getIncidentById(id) {
- return await this.knex("incidents")
- .select(
- "id",
- "title",
- "start_date_time",
- "end_date_time",
- "created_at",
- "updated_at",
- "status",
- "state",
- "incident_type"
- )
- .where("id", id)
- .first();
- }
-
- //get incident_monitors by incident_id
- async getIncidentMonitorsByIncidentID(incident_id) {
- return await this.knex("incident_monitors")
- .select("monitor_tag", "monitor_impact")
- .where("incident_id", incident_id);
- }
-
- //given a monitor tag get incidents last 90 days status = OPEN
- async getIncidentsByMonitorTag(monitor_tag, start, end) {
- return await this.knex("incidents as i")
- .select(
- "i.id as id",
- "i.title as title",
- "i.start_date_time as start_date_time",
- "i.end_date_time as end_date_time",
- "i.created_at as created_at",
- "i.updated_at as updated_at",
- "i.status as status",
- "i.state as state",
- "i.incident_type as incident_type",
- "im.monitor_impact"
- )
- .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
- .where("im.monitor_tag", monitor_tag)
- .andWhere("i.start_date_time", ">=", start)
- .andWhere("i.start_date_time", "<=", end)
- .andWhere("i.status", "OPEN");
- }
-
- //given a timestamp get incidents that are open and start time is less than given timestamp
- async getIncidentsByMonitorTagRealtime(monitor_tag, timestamp) {
- return await this.knex("incidents as i")
- .select(
- "i.id as id",
- "i.start_date_time as start_date_time",
- "i.end_date_time as end_date_time",
- "im.monitor_impact"
- )
- .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
- .where("im.monitor_tag", monitor_tag)
- .andWhere("i.start_date_time", "<=", timestamp)
- .andWhere("i.status", "OPEN")
- .andWhere("i.incident_type", "INCIDENT")
- .andWhere("i.state", "!=", "RESOLVED");
- }
-
- //get maintenance incidents by monitor tag
- async getMaintenanceByMonitorTagRealtime(monitor_tag, timestamp) {
- return await this.knex("incidents as i")
- .select(
- "i.id as id",
- "i.start_date_time as start_date_time",
- "i.end_date_time as end_date_time",
- "im.monitor_impact"
- )
- .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
- .where("im.monitor_tag", monitor_tag)
- .andWhere("i.start_date_time", "<=", timestamp)
- .andWhere("i.end_date_time", ">=", timestamp)
- .andWhere("i.status", "OPEN")
- .andWhere("i.incident_type", "MAINTENANCE")
- .andWhere("i.state", "=", "RESOLVED");
- }
-
- //given array of ids get incidents
- async getIncidentsByIds(ids) {
- return await this.knex("incidents").whereIn("id", ids).andWhere("status", "OPEN");
- }
-
- //remove monitor tag from incident given incident_id and monitor_tag
- async removeIncidentMonitor(incident_id, monitor_tag) {
- return await this.knex("incident_monitors").where({ incident_id, monitor_tag }).del();
- }
-
- //add monitor tag to incident given incident_id and monitor_tag along with monitor_impact
- async insertIncidentMonitor(incident_id, monitor_tag, monitor_impact) {
- return await this.knex("incident_monitors")
- .insert({ monitor_tag, monitor_impact, incident_id })
- .onConflict(["monitor_tag", "incident_id"])
- .merge({ monitor_impact, updated_at: this.knex.fn.now() });
- }
-
- //insert incident_comment
- async insertIncidentComment(incident_id, comment, state, commented_at) {
- return await this.knex("incident_comments").insert({
- comment,
- incident_id,
- state,
- commented_at,
- created_at: this.knex.fn.now(),
- updated_at: this.knex.fn.now()
- });
- }
-
- //get comments for an incident
- async getIncidentComments(incident_id) {
- return await this.knex("incident_comments")
- .where("incident_id", incident_id)
- .orderBy("commented_at", "desc");
- }
-
- //get active comments
- async getActiveIncidentComments(incident_id) {
- return await this.knex("incident_comments")
- .where("incident_id", incident_id)
- .andWhere("status", "ACTIVE")
- .orderBy("commented_at", "desc");
- }
-
- //get comment by id and incident_id
- async getIncidentCommentByIDAndIncident(incident_id, id) {
- return await this.knex("incident_comments").where({ incident_id, id }).first();
- }
-
- //update incident comment
- async updateIncidentCommentByID(id, comment, state, commented_at) {
- return await this.knex("incident_comments").where({ id }).update({
- comment,
- state,
- commented_at,
- updated_at: this.knex.fn.now()
- });
- }
-
- //update status of incident comment
- async updateIncidentCommentStatusByID(id, status) {
- return await this.knex("incident_comments").where({ id }).update({
- status,
- updated_at: this.knex.fn.now()
- });
- }
-
- //getIncidentCommentByID
- async getIncidentCommentByID(id) {
- return await this.knex("incident_comments").where({ id }).first();
- }
+ knex;
+ constructor(opts) {
+ // Initialize Knex
+ this.knex = Knex(opts);
+
+ this.init();
+ }
+
+ async init() {}
+
+ async insertMonitoringData(data) {
+ const { monitor_tag, timestamp, status, latency, type } = data;
+ return await this.knex("monitoring_data")
+ .insert({ monitor_tag, timestamp, status, latency, type })
+ .onConflict(["monitor_tag", "timestamp"])
+ .merge({ status, latency, type });
+ }
+
+ //given monitor_tag, start and end timestamp in utc seconds return data
+ async getMonitoringData(monitor_tag, start, end) {
+ return await this.knex("monitoring_data")
+ .where("monitor_tag", monitor_tag)
+ .where("timestamp", ">=", start)
+ .where("timestamp", "<=", end)
+ .orderBy("timestamp", "asc");
+ }
+
+ //get latest data for a monitor_tag
+ async getLatestMonitoringData(monitor_tag) {
+ return await this.knex("monitoring_data")
+ .where("monitor_tag", monitor_tag)
+ .orderBy("timestamp", "desc")
+ .limit(1)
+ .first();
+ }
+
+ //given monitor_tag, start and end timestamp in utc seconds return total degraded, up, down, avg(latency), max(latency), min(latency)
+ async getAggregatedMonitoringData(monitor_tag, start, end) {
+ return await this.knex("monitoring_data")
+ .select(
+ this.knex.raw("COUNT(CASE WHEN status = 'DEGRADED' THEN 1 END) as DEGRADED"),
+ this.knex.raw("COUNT(CASE WHEN status = 'UP' THEN 1 END) as UP"),
+ this.knex.raw("COUNT(CASE WHEN status = 'DOWN' THEN 1 END) as DOWN"),
+ this.knex.raw("AVG(latency) as avg_latency"),
+ this.knex.raw("MAX(latency) as max_latency"),
+ this.knex.raw("MIN(latency) as min_latency"),
+ )
+ .where("monitor_tag", monitor_tag)
+ .where("timestamp", ">=", start)
+ .where("timestamp", "<=", end)
+ .first();
+ }
+
+ //get the last status before the timestamp given monitor_tag and start timestamp
+ async getLastStatusBefore(monitor_tag, timestamp) {
+ return await this.knex("monitoring_data")
+ .where("monitor_tag", monitor_tag)
+ .where("timestamp", "<", timestamp)
+ .orderBy("timestamp", "desc")
+ .limit(1)
+ .first();
+ }
+
+ async getDataGroupByDayAlternative(monitor_tag, start, end) {
+ // Fetch all raw data
+ //{ timestamp: 1732900380, status: 'UP', latency: 42 }
+
+ const data = await this.knex("monitoring_data")
+ .select("timestamp", "status", "latency")
+ .where("monitor_tag", monitor_tag)
+ .andWhere("timestamp", ">=", start)
+ .andWhere("timestamp", "<=", end)
+ .orderBy("timestamp", "asc");
+ return data;
+ }
+
+ async background() {
+ //clear data older than 90 days
+ let ninetyDaysAgo = GetMinuteStartNowTimestampUTC() - 86400 * 100;
+
+ return await this.knex("monitoring_data").where("timestamp", "<", ninetyDaysAgo).del();
+ }
+
+ async consecutivelyStatusFor(monitor_tag, status, lastX) {
+ const result = await this.knex
+ .with("last_records", (qb) => {
+ qb.select("*")
+ .from("monitoring_data")
+ .where("monitor_tag", monitor_tag)
+ .orderBy("timestamp", "desc")
+ .limit(lastX);
+ })
+ .select(
+ this.knex.raw(
+ "CASE WHEN COUNT(*) <= SUM(CASE WHEN status = ? THEN 1 ELSE 0 END) THEN 1 ELSE 0 END as isAffected",
+ [status],
+ ),
+ )
+ .from("last_records")
+ .first();
+
+ return result.isAffected === 1;
+ }
+
+ //insert alert
+ async insertAlert(data) {
+ return await this.knex("monitor_alerts").insert({
+ monitor_tag: data.monitor_tag,
+ monitor_status: data.monitor_status,
+ alert_status: data.alert_status,
+ health_checks: data.health_checks,
+ });
+ }
+
+ //check if alert exists given monitor_tag, monitor_status, trigger_status
+ async alertExists(monitor_tag, monitor_status, alert_status) {
+ const result = await this.knex("monitor_alerts")
+ .count("* as count")
+ .where({
+ monitor_tag,
+ monitor_status,
+ alert_status,
+ })
+ .first();
+ return result.count > 0;
+ }
+
+ //get active alert given incident id, monitor tag, monitor status
+ async getActiveAlertIncident(monitor_tag, monitor_status, incident_number) {
+ return await this.knex("monitor_alerts")
+ .where({
+ monitor_tag,
+ monitor_status,
+ incident_number,
+ })
+ .first();
+ }
+
+ //return active alert for a monitor_tag, monitor_status, trigger_status = ACTIVE
+ async getActiveAlert(monitor_tag, monitor_status, alert_status) {
+ return await this.knex("monitor_alerts")
+ .where({
+ monitor_tag,
+ monitor_status,
+ alert_status,
+ })
+ .first();
+ }
+
+ //get all monitor_alerts paginated descending order
+ async getMonitorAlertsPaginated(page, limit) {
+ return await this.knex("monitor_alerts")
+ .orderBy("id", "desc")
+ .limit(limit)
+ .offset((page - 1) * limit);
+ }
+
+ //get total count of monitor_alerts
+ async getMonitorAlertsCount() {
+ return await this.knex("monitor_alerts").count("* as count").first();
+ }
+
+ //update alert to inactive given monitor_tag, monitor_status, given id
+ async updateAlertStatus(id, alert_status) {
+ return await this.knex("monitor_alerts").where({ id }).update({
+ alert_status,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //increment health_checks for an alert given id
+ async incrementAlertHealthChecks(id) {
+ return await this.knex("monitor_alerts")
+ .where({ id })
+ .increment("health_checks", 1)
+ .update({ updated_at: this.knex.fn.now() });
+ }
+
+ //add incident_number to an alert given id
+ async addIncidentNumberToAlert(id, incident_number) {
+ return await this.knex("monitor_alerts").where({ id }).update({
+ incident_number,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //insert or update site data
+ async insertOrUpdateSiteData(key, value, data_type) {
+ return await this.knex("site_data")
+ .insert({ key, value, data_type })
+ .onConflict("key")
+ .merge({ value, updated_at: this.knex.fn.now() });
+ }
+ //get all site data
+ async getAllSiteData() {
+ return await this.knex("site_data");
+ }
+
+ //given key get data
+ async getSiteData(key) {
+ return await this.knex("site_data").select("value").where("key", key).first();
+ }
+
+ async getSiteDataByKey(key) {
+ //select all
+ return await this.knex("site_data").where("key", key).first();
+ }
+
+ //insert into monitors
+ async insertMonitor(data) {
+ return await this.knex("monitors").insert({
+ tag: data.tag,
+ name: data.name,
+ description: data.description,
+ image: data.image,
+ cron: data.cron,
+ default_status: data.default_status,
+ status: data.status,
+ category_name: data.category_name,
+ monitor_type: data.monitor_type,
+ type_data: data.type_data,
+ day_degraded_minimum_count: data.day_degraded_minimum_count,
+ day_down_minimum_count: data.day_down_minimum_count,
+ include_degraded_in_downtime: data.include_degraded_in_downtime,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //update monitor by id
+ async updateMonitor(data) {
+ return await this.knex("monitors").where({ id: data.id }).update({
+ tag: data.tag,
+ name: data.name,
+ description: data.description,
+ image: data.image,
+ cron: data.cron,
+ default_status: data.default_status,
+ status: data.status,
+ category_name: data.category_name,
+ monitor_type: data.monitor_type,
+ type_data: data.type_data,
+ day_degraded_minimum_count: data.day_degraded_minimum_count,
+ day_down_minimum_count: data.day_down_minimum_count,
+ include_degraded_in_downtime: data.include_degraded_in_downtime,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ async updateMonitorTrigger(data) {
+ return await this.knex("monitors").where({ id: data.id }).update({
+ down_trigger: data.down_trigger,
+ degraded_trigger: data.degraded_trigger,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //get monitors given status
+ async getMonitors(data) {
+ let query = this.knex("monitors").whereRaw("1=1");
+ if (!!data.status) {
+ query = query.andWhere("status", data.status);
+ }
+ if (data.category_name && data.category_name !== "All Categories") {
+ query = query.andWhere("category_name", data.category_name);
+ }
+ if (!!data.id) {
+ query = query.andWhere("id", data.id);
+ }
+ return await query.orderBy("id", "desc");
+ }
+
+ //get monitor by tag
+ async getMonitorByTag(tag) {
+ return await this.knex("monitors").where("tag", tag).first();
+ }
+
+ //insert alert
+ async createNewTrigger(data) {
+ return await this.knex("triggers").insert({
+ name: data.name,
+ trigger_type: data.trigger_type,
+ trigger_status: data.trigger_status,
+ trigger_meta: data.trigger_meta,
+ trigger_desc: data.trigger_desc,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //update alert
+ async updateTrigger(data) {
+ return await this.knex("triggers").where({ id: data.id }).update({
+ name: data.name,
+ trigger_type: data.trigger_type,
+ trigger_status: data.trigger_status,
+ trigger_desc: data.trigger_desc,
+ trigger_meta: data.trigger_meta,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //get all alerts with given status
+ async getTriggers(data) {
+ return await this.knex("triggers").where("trigger_status", data.status).orderBy("id", "desc");
+ }
+
+ //get trigger by id
+ async getTriggerByID(id) {
+ return await this.knex("triggers").where("id", id).first();
+ }
+
+ //get count of users
+ async getUsersCount() {
+ return await this.knex("users").count("* as count").first();
+ }
+
+ // get user by email, do not return password_hash
+ async getUserByEmail(email) {
+ return await this.knex("users")
+ .select("id", "email", "name", "is_active", "is_verified", "role", "created_at", "updated_at")
+ .where("email", email)
+ .first();
+ }
+ async getUserPasswordHashById(id) {
+ return await this.knex("users").select("password_hash").where("id", id).first();
+ }
+
+ //insert user
+ async insertUser(data) {
+ return await this.knex("users").insert({
+ email: data.email,
+ name: data.name,
+ password_hash: data.password_hash,
+ role: data.role,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //update password
+ async updateUserPassword(data) {
+ return await this.knex("users").where({ id: data.id }).update({
+ password_hash: data.password_hash,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //new api key
+ async createNewApiKey(data) {
+ return await this.knex("api_keys").insert({
+ name: data.name,
+ hashed_key: data.hashed_key,
+ masked_key: data.masked_key,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //update status of api key
+ async updateApiKeyStatus(data) {
+ return await this.knex("api_keys").where({ id: data.id }).update({
+ status: data.status,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //get key by hashed_key
+ async getApiKeyByHashedKey(hashed_key) {
+ return await this.knex("api_keys").where("hashed_key", hashed_key).first();
+ }
+
+ //get all api keys
+ async getAllApiKeys() {
+ return await this.knex("api_keys").orderBy("id", "desc");
+ }
+
+ //close
+ async close() {
+ return await this.knex.destroy();
+ }
+
+ async createIncident(data) {
+ return await this.knex("incidents").insert({
+ title: data.title,
+ start_date_time: data.start_date_time,
+ end_date_time: data.end_date_time,
+ status: data.status,
+ state: data.state,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ incident_type: data.incident_type,
+ });
+ }
+
+ //get incidents paginated
+ async getIncidentsPaginatedDesc(page, limit, filter) {
+ let query = this.knex("incidents").select("*").whereRaw("1=1");
+
+ if (filter && filter.status) {
+ query = query.andWhere("status", filter.status);
+ }
+
+ if (filter && filter.start) {
+ query = query.andWhere("start_date_time", ">=", filter.start);
+ }
+
+ if (filter && filter.end) {
+ query = query.andWhere("start_date_time", "<=", filter.end);
+ }
+
+ if (filter && filter.state) {
+ query = query.andWhere("state", filter.state);
+ }
+
+ query = query
+ .orderBy("id", "desc")
+ .limit(limit)
+ .offset((page - 1) * limit);
+
+ return await query;
+ }
+
+ //get last 10 recent updated incidents
+ async getRecentUpdatedIncidents(limit, start, end) {
+ return await this.knex("incidents")
+ .where("status", "OPEN")
+ .andWhere("start_date_time", ">=", start)
+ .andWhere("start_date_time", "<=", end)
+ .orderBy("updated_at", "desc")
+ .limit(limit);
+ }
+
+ //get id of first incident less than given start_date_time
+ async getPreviousIncidentId(start_date_time) {
+ return await this.knex("incidents")
+ .select("id")
+ .where("start_date_time", "<", start_date_time)
+ .orderBy("start_date_time", "desc")
+ .first();
+ }
+
+ //get incidents where ts between start and end
+ async getIncidentsBetween(start, end) {
+ return await this.knex("incidents")
+ .where("status", "OPEN")
+ .andWhere("start_date_time", ">=", start)
+ .andWhere("start_date_time", "<=", end)
+ .orderBy("start_date_time", "asc");
+ }
+
+ //get total incident count
+ async getIncidentsCount(filter) {
+ let query = this.knex("incidents").count("* as count");
+
+ if (filter && filter.status) {
+ query = query.where("status", filter.status);
+ }
+
+ return await query.first();
+ }
+
+ //update incident given id
+ async updateIncident(data) {
+ return await this.knex("incidents").where({ id: data.id }).update({
+ title: data.title,
+ start_date_time: data.start_date_time,
+ end_date_time: data.end_date_time,
+ status: data.status,
+ state: data.state,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //set incident end time to null
+ async setIncidentEndTimeToNull(id) {
+ return await this.knex("incidents").where({ id }).update({
+ end_date_time: null,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ async insertIncidentMonitor(data) {
+ return await this.knex("incident_monitors").insert({
+ monitor_tag: data.monitor_tag,
+ monitor_impact: data.monitor_impact,
+ incident_id: data.incident_id,
+ });
+ }
+
+ //get incident by id
+ async getIncidentById(id) {
+ return await this.knex("incidents")
+ .select(
+ "id",
+ "title",
+ "start_date_time",
+ "end_date_time",
+ "created_at",
+ "updated_at",
+ "status",
+ "state",
+ "incident_type",
+ )
+ .where("id", id)
+ .first();
+ }
+
+ //get incident_monitors by incident_id
+ async getIncidentMonitorsByIncidentID(incident_id) {
+ return await this.knex("incident_monitors")
+ .select("monitor_tag", "monitor_impact")
+ .where("incident_id", incident_id);
+ }
+
+ //given a monitor tag get incidents last 90 days status = OPEN
+ async getIncidentsByMonitorTag(monitor_tag, start, end) {
+ return await this.knex("incidents as i")
+ .select(
+ "i.id as id",
+ "i.title as title",
+ "i.start_date_time as start_date_time",
+ "i.end_date_time as end_date_time",
+ "i.created_at as created_at",
+ "i.updated_at as updated_at",
+ "i.status as status",
+ "i.state as state",
+ "i.incident_type as incident_type",
+ "im.monitor_impact",
+ )
+ .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
+ .where("im.monitor_tag", monitor_tag)
+ .andWhere("i.start_date_time", ">=", start)
+ .andWhere("i.start_date_time", "<=", end)
+ .andWhere("i.status", "OPEN");
+ }
+
+ //given a timestamp get incidents that are open and start time is less than given timestamp
+ async getIncidentsByMonitorTagRealtime(monitor_tag, timestamp) {
+ return await this.knex("incidents as i")
+ .select(
+ "i.id as id",
+ "i.start_date_time as start_date_time",
+ "i.end_date_time as end_date_time",
+ "im.monitor_impact",
+ )
+ .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
+ .where("im.monitor_tag", monitor_tag)
+ .andWhere("i.start_date_time", "<=", timestamp)
+ .andWhere("i.status", "OPEN")
+ .andWhere("i.incident_type", "INCIDENT")
+ .andWhere("i.state", "!=", "RESOLVED");
+ }
+
+ //get maintenance incidents by monitor tag
+ async getMaintenanceByMonitorTagRealtime(monitor_tag, timestamp) {
+ return await this.knex("incidents as i")
+ .select(
+ "i.id as id",
+ "i.start_date_time as start_date_time",
+ "i.end_date_time as end_date_time",
+ "im.monitor_impact",
+ )
+ .innerJoin("incident_monitors as im", "i.id", "im.incident_id")
+ .where("im.monitor_tag", monitor_tag)
+ .andWhere("i.start_date_time", "<=", timestamp)
+ .andWhere("i.end_date_time", ">=", timestamp)
+ .andWhere("i.status", "OPEN")
+ .andWhere("i.incident_type", "MAINTENANCE")
+ .andWhere("i.state", "=", "RESOLVED");
+ }
+
+ //given array of ids get incidents
+ async getIncidentsByIds(ids) {
+ return await this.knex("incidents").whereIn("id", ids).andWhere("status", "OPEN");
+ }
+
+ //remove monitor tag from incident given incident_id and monitor_tag
+ async removeIncidentMonitor(incident_id, monitor_tag) {
+ return await this.knex("incident_monitors").where({ incident_id, monitor_tag }).del();
+ }
+
+ //add monitor tag to incident given incident_id and monitor_tag along with monitor_impact
+ async insertIncidentMonitor(incident_id, monitor_tag, monitor_impact) {
+ return await this.knex("incident_monitors")
+ .insert({ monitor_tag, monitor_impact, incident_id })
+ .onConflict(["monitor_tag", "incident_id"])
+ .merge({ monitor_impact, updated_at: this.knex.fn.now() });
+ }
+
+ //insert incident_comment
+ async insertIncidentComment(incident_id, comment, state, commented_at) {
+ return await this.knex("incident_comments").insert({
+ comment,
+ incident_id,
+ state,
+ commented_at,
+ created_at: this.knex.fn.now(),
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //get comments for an incident
+ async getIncidentComments(incident_id) {
+ return await this.knex("incident_comments").where("incident_id", incident_id).orderBy("commented_at", "desc");
+ }
+
+ //get active comments
+ async getActiveIncidentComments(incident_id) {
+ return await this.knex("incident_comments")
+ .where("incident_id", incident_id)
+ .andWhere("status", "ACTIVE")
+ .orderBy("commented_at", "desc");
+ }
+
+ //get comment by id and incident_id
+ async getIncidentCommentByIDAndIncident(incident_id, id) {
+ return await this.knex("incident_comments").where({ incident_id, id }).first();
+ }
+
+ //update incident comment
+ async updateIncidentCommentByID(id, comment, state, commented_at) {
+ return await this.knex("incident_comments").where({ id }).update({
+ comment,
+ state,
+ commented_at,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //update status of incident comment
+ async updateIncidentCommentStatusByID(id, status) {
+ return await this.knex("incident_comments").where({ id }).update({
+ status,
+ updated_at: this.knex.fn.now(),
+ });
+ }
+
+ //getIncidentCommentByID
+ async getIncidentCommentByID(id) {
+ return await this.knex("incident_comments").where({ id }).first();
+ }
}
export default DbImpl;
diff --git a/src/lib/server/services/apiCall.js b/src/lib/server/services/apiCall.js
new file mode 100644
index 00000000..bbbd9d7d
--- /dev/null
+++ b/src/lib/server/services/apiCall.js
@@ -0,0 +1,168 @@
+// @ts-nocheck
+import axios from "axios";
+import { GetRequiredSecrets, ReplaceAllOccurrences } from "../tool.js";
+import { UP, DOWN, DEGRADED, REALTIME, TIMEOUT, ERROR, MANUAL } from "../constants.js";
+
+const defaultEval = `(async function (statusCode, responseTime, responseData) {
+ let statusCodeShort = Math.floor(statusCode/100);
+ if(statusCode == 429 || (statusCodeShort >=2 && statusCodeShort <= 3)) {
+ return {
+ status: 'UP',
+ latency: responseTime,
+ }
+ }
+ return {
+ status: 'DOWN',
+ latency: responseTime,
+ }
+})`;
+
+class ApiCall {
+ monitor;
+ envSecrets;
+
+ constructor(monitor) {
+ this.monitor = monitor;
+ this.envSecrets = GetRequiredSecrets(
+ `${monitor.type_data.url} ${monitor.type_data.body} ${JSON.stringify(monitor.type_data.headers)}`,
+ );
+ }
+
+ async execute() {
+ let axiosHeaders = {};
+ axiosHeaders["User-Agent"] = "Kener/" + "3.1.0";
+ axiosHeaders["Accept"] = "*/*";
+
+ let body = this.monitor.type_data.body;
+ let url = this.monitor.type_data.url;
+
+ //headers to string
+ let headers = "";
+ if (!!this.monitor.type_data.headers) {
+ headers = JSON.stringify(this.monitor.type_data.headers);
+ }
+
+ let method = this.monitor.type_data.method;
+ let timeout = this.monitor.type_data.timeout || 5000;
+ let tag = this.monitor.tag;
+ let monitorEval = !!this.monitor.type_data.monitorEval ? this.monitor.type_data.monitorEval : defaultEval;
+
+ for (let i = 0; i < this.envSecrets.length; i++) {
+ const secret = this.envSecrets[i];
+ if (!!body) {
+ body = ReplaceAllOccurrences(body, secret.find, secret.replace);
+ }
+ if (!!url) {
+ url = ReplaceAllOccurrences(url, secret.find, secret.replace);
+ }
+ if (!!headers) {
+ headers = ReplaceAllOccurrences(headers, secret.find, secret.replace);
+ }
+ }
+
+ if (!!headers) {
+ try {
+ headers = JSON.parse(headers);
+ headers = headers.reduce((acc, header) => {
+ acc[header.key] = header.value;
+ return acc;
+ }, {});
+ axiosHeaders = { ...axiosHeaders, ...headers };
+ } catch (e) {
+ console.log(e);
+ }
+ }
+
+ const options = {
+ method: method,
+ headers: axiosHeaders,
+ timeout: timeout,
+ transformResponse: (r) => r,
+ };
+
+ if (!!body) {
+ options.data = body;
+ }
+ let statusCode = 500;
+ let latency = 0;
+ let resp = "";
+ let timeoutError = false;
+ const start = Date.now();
+ try {
+ let data = await axios(url, options);
+ statusCode = data.status;
+ resp = data.data;
+ } catch (err) {
+ console.log(`Error in apiCall ${tag}`, err.message);
+ if (err.message.startsWith("timeout of") && err.message.endsWith("exceeded")) {
+ timeoutError = true;
+ }
+ if (err.response !== undefined && err.response.status !== undefined) {
+ statusCode = err.response.status;
+ }
+ if (err.response !== undefined && err.response.data !== undefined) {
+ resp = err.response.data;
+ } else {
+ resp = JSON.stringify(resp);
+ }
+ } finally {
+ const end = Date.now();
+ latency = end - start;
+ if (resp === undefined || resp === null) {
+ resp = "";
+ }
+ }
+
+ resp = Buffer.from(resp).toString("base64");
+
+ let evalResp = undefined;
+
+ try {
+ evalResp = await eval(monitorEval + `(${statusCode}, ${latency}, "${resp}")`);
+ } catch (error) {
+ console.log(`Error in monitorEval for ${tag}`, error.message);
+ }
+
+ if (evalResp === undefined || evalResp === null) {
+ evalResp = {
+ status: DOWN,
+ latency: latency,
+ type: ERROR,
+ };
+ } else if (
+ evalResp.status === undefined ||
+ evalResp.status === null ||
+ [UP, DOWN, DEGRADED].indexOf(evalResp.status) === -1
+ ) {
+ evalResp = {
+ status: DOWN,
+ latency: latency,
+ type: ERROR,
+ };
+ } else {
+ evalResp.type = REALTIME;
+ }
+
+ let toWrite = {
+ status: DOWN,
+ latency: latency,
+ type: ERROR,
+ };
+ if (evalResp.status !== undefined && evalResp.status !== null) {
+ toWrite.status = evalResp.status;
+ }
+ if (evalResp.latency !== undefined && evalResp.latency !== null) {
+ toWrite.latency = evalResp.latency;
+ }
+ if (evalResp.type !== undefined && evalResp.type !== null) {
+ toWrite.type = evalResp.type;
+ }
+ if (timeoutError) {
+ toWrite.type = TIMEOUT;
+ }
+
+ return toWrite;
+ }
+}
+
+export default ApiCall;
diff --git a/src/lib/server/services/dnsCall.js b/src/lib/server/services/dnsCall.js
new file mode 100644
index 00000000..48507230
--- /dev/null
+++ b/src/lib/server/services/dnsCall.js
@@ -0,0 +1,76 @@
+// @ts-nocheck
+import axios from "axios";
+import DNSResolver from "../dns.js";
+import { UP, DOWN, DEGRADED, REALTIME, TIMEOUT, ERROR, MANUAL } from "../constants.js";
+
+class DnsCall {
+ monitor;
+
+ constructor(monitor) {
+ this.monitor = monitor;
+ }
+
+ async execute() {
+ const dnsResolver = new DNSResolver(this.monitor.type_data.nameServer);
+ let host = this.monitor.type_data.host;
+ let recordType = this.monitor.type_data.lookupRecord;
+ let matchType = this.monitor.type_data.matchType;
+ let values = this.monitor.type_data.values;
+
+ try {
+ let queryStartTime = Date.now();
+ let dnsRes = await dnsResolver.getRecord(host, recordType);
+ let latency = Date.now() - queryStartTime;
+
+ if (dnsRes[recordType] === undefined) {
+ return {
+ status: DOWN,
+ latency: latency,
+ type: REALTIME,
+ };
+ }
+ let data = dnsRes[recordType];
+ let dnsData = data.map((d) => d.data);
+ if (matchType === "ALL") {
+ for (let i = 0; i < values.length; i++) {
+ if (dnsData.indexOf(values[i].trim()) === -1) {
+ return {
+ status: DOWN,
+ latency: latency,
+ type: REALTIME,
+ };
+ }
+ }
+ return {
+ status: UP,
+ latency: latency,
+ type: REALTIME,
+ };
+ } else if (matchType === "ANY") {
+ for (let i = 0; i < values.length; i++) {
+ if (dnsData.indexOf(values[i].trim()) !== -1) {
+ return {
+ status: UP,
+ latency: latency,
+ type: REALTIME,
+ };
+ }
+ }
+ return {
+ status: DOWN,
+ latency: latency,
+ type: REALTIME,
+ };
+ }
+ } catch (error) {
+ console.log("Error in dnsChecker", error);
+ return {
+ status: DOWN,
+ latency: 0,
+ type: REALTIME,
+ };
+ }
+ }
+}
+
+export default DnsCall;
diff --git a/src/lib/server/services/pingCall.js b/src/lib/server/services/pingCall.js
new file mode 100644
index 00000000..7595cd55
--- /dev/null
+++ b/src/lib/server/services/pingCall.js
@@ -0,0 +1,81 @@
+// @ts-nocheck
+import axios from "axios";
+import { Ping } from "../ping.js";
+import {
+ UP,
+ DOWN,
+ DEGRADED,
+ REALTIME,
+ TIMEOUT,
+ ERROR,
+ MANUAL,
+} from "../constants.js";
+
+const defaultPingEval = `(async function (responseDataBase64) {
+ let arrayOfPings = JSON.parse(atob(responseDataBase64));
+ let latencyTotal = arrayOfPings.reduce((acc, ping) => {
+ return acc + ping.latency;
+ }, 0);
+
+ let alive = arrayOfPings.reduce((acc, ping) => {
+ return acc && ping.alive;
+ }, true);
+
+ return {
+ status: alive ? 'UP' : 'DOWN',
+ latency: latencyTotal / arrayOfPings.length,
+ }
+})`;
+
+class PingCall {
+ monitor;
+
+ constructor(monitor) {
+ this.monitor = monitor;
+ }
+
+ async execute() {
+ let hosts = this.monitor.type_data.hosts;
+ let pingEval = !!this.monitor.type_data.pingEval
+ ? this.monitor.type_data.pingEval
+ : defaultPingEval;
+ let tag = this.monitor.tag;
+ if (hosts === undefined) {
+ console.log(
+ "Hosts is undefined. The ping monitor has changed in version 3.0.10. Please update your monitor with tag",
+ tag,
+ );
+ return {
+ status: DOWN,
+ latency: 0,
+ type: ERROR,
+ };
+ }
+ let arrayOfPings = [];
+ for (let i = 0; i < hosts.length; i++) {
+ const host = hosts[i];
+ arrayOfPings.push(
+ await Ping(host.type, host.host, host.timeout, host.count),
+ );
+ }
+ let respBase64 = Buffer.from(JSON.stringify(arrayOfPings)).toString(
+ "base64",
+ );
+
+ let evalResp = undefined;
+
+ try {
+ evalResp = await eval(pingEval + `("${respBase64}")`);
+ } catch (error) {
+ console.log(`Error in pingEval for ${tag}`, error.message);
+ }
+ //reduce to get the status
+ return {
+ status: evalResp.status,
+ latency: evalResp.latency,
+ type: REALTIME,
+ };
+ }
+}
+
+export default PingCall;
diff --git a/src/lib/server/services/service.js b/src/lib/server/services/service.js
new file mode 100644
index 00000000..23eb06c8
--- /dev/null
+++ b/src/lib/server/services/service.js
@@ -0,0 +1,31 @@
+// @ts-nocheck
+import ApiCall from "./apiCall.js";
+import PingCall from "./pingCall.js";
+import TcpCall from "./tcpCall.js";
+import DnsCall from "./dnsCall.js";
+
+class Service {
+ service;
+
+ constructor(monitor) {
+ if (monitor.monitor_type === "API") {
+ this.service = new ApiCall(monitor);
+ } else if (monitor.monitor_type === "PING") {
+ this.service = new PingCall(monitor);
+ } else if (monitor.monitor_type === "TCP") {
+ this.service = new TcpCall(monitor);
+ } else if (monitor.monitor_type === "DNS") {
+ this.service = new DnsCall(monitor);
+ } else if (monitor.monitor_type === "NONE") {
+ this.service = null;
+ } else {
+ console.log("Invalid monitor.monitor_type ", monitor.monitor_type);
+ process.exit(1);
+ }
+ }
+ async execute() {
+ return await this.service.execute();
+ }
+}
+
+export default Service;
diff --git a/src/lib/server/services/tcpCall.js b/src/lib/server/services/tcpCall.js
new file mode 100644
index 00000000..118b6535
--- /dev/null
+++ b/src/lib/server/services/tcpCall.js
@@ -0,0 +1,72 @@
+// @ts-nocheck
+import axios from "axios";
+import { TCP } from "../ping.js";
+import { UP, DOWN, DEGRADED, REALTIME, TIMEOUT, ERROR, MANUAL } from "../constants.js";
+
+const defaultTcpEval = `(async function (responseDataBase64) {
+ let arrayOfPings = JSON.parse(atob(responseDataBase64));
+ let latencyTotal = arrayOfPings.reduce((acc, ping) => {
+ return acc + ping.latency;
+ }, 0);
+
+ let alive = arrayOfPings.reduce((acc, ping) => {
+ if (ping.status === "open") {
+ return acc && true;
+ } else {
+ return false;
+ }
+ }, true);
+
+ return {
+ status: alive ? 'UP' : 'DOWN',
+ latency: latencyTotal / arrayOfPings.length,
+ }
+})`;
+
+class TcpCall {
+ monitor;
+
+ constructor(monitor) {
+ this.monitor = monitor;
+ }
+
+ async execute() {
+ let hosts = this.monitor.type_data.hosts;
+ let tcpEval = !!this.monitor.type_data.tcpEval ? this.monitor.type_data.tcpEval : defaultTcpEval;
+ let tag = this.monitor.tag;
+
+ if (hosts === undefined) {
+ console.log(
+ "Hosts is undefined. The ping monitor has changed in version 3.0.10. Please update your monitor with tag",
+ tag,
+ );
+ return {
+ status: DOWN,
+ latency: 0,
+ type: ERROR,
+ };
+ }
+ let arrayOfPings = [];
+ for (let i = 0; i < hosts.length; i++) {
+ const host = hosts[i];
+ arrayOfPings.push(await TCP(host.type, host.host, host.port, host.timeout));
+ }
+ let respBase64 = Buffer.from(JSON.stringify(arrayOfPings)).toString("base64");
+
+ let evalResp = undefined;
+
+ try {
+ evalResp = await eval(tcpEval + `("${respBase64}")`);
+ } catch (error) {
+ console.log(`Error in tcpEval for ${tag}`, error.message);
+ }
+ //reduce to get the status
+ return {
+ status: evalResp.status,
+ latency: evalResp.latency,
+ type: REALTIME,
+ };
+ }
+}
+
+export default TcpCall;
diff --git a/src/routes/(docs)/+layout.svelte b/src/routes/(docs)/+layout.svelte
index 0746347b..86312931 100644
--- a/src/routes/(docs)/+layout.svelte
+++ b/src/routes/(docs)/+layout.svelte
@@ -1,34 +1,34 @@
@@ -57,13 +57,13 @@
-
+
@@ -99,12 +97,8 @@
/>
API Reference
-
- Report Issue
-
-
- Sponsor
-
+
Report Issue
+
Sponsor