all in one data

This commit is contained in:
Raj Nandan Sharma
2023-12-19 23:27:41 +05:30
parent 397dd4a971
commit 037cba7b3e
36 changed files with 236 additions and 265 deletions
+22 -89
View File
@@ -28,17 +28,25 @@ async function manualIncident(monitor, githubConfig){
const incident = incidentsResp[i];
let start_time = GetStartTimeFromBody(incident.body);
if (allLabels.indexOf("incident-degraded") == -1 || allLabels.indexOf("incident-down") == -1) {
continue;
}
if (start_time === null) {
continue;
start_time = GetMinuteStartTimestampUTC(new Date(incident.created_at).getTime() / 1000);
}
let newIncident = {
start_time: start_time,
};
let end_time = GetEndTimeFromBody(incident.body);
if (end_time !== null) {
newIncident.end_time = end_time;
} else {
newIncident.end_time = GetNowTimestampUTC();
if (!!incident.closed_at) {
newIncident.end_time = GetMinuteStartTimestampUTC(new Date(incident.closed_at).getTime() / 1000);
}
}
let allLabels = incident.labels.map((label) => label.name);
@@ -203,22 +211,19 @@ const getWebhookData = async (monitor) => {
}
return originalData;
};
const getDayData = async (monitor) => {
let originalData = {};
try {
let fd = fs.readFileSync(monitor.path0Day, "utf8");
originalData = JSON.parse(fd);
} catch (error) {
console.error(error);
}
return originalData;
};
const updateDayData = async (mergedData, startOfMinute, monitor) => {
let since = 48
let dayData = JSON.parse(fs.readFileSync(monitor.path0Day, "utf8"));
for (const timestamp in mergedData) {
dayData[timestamp] = mergedData[timestamp];
}
let since = 24*91;
let mxBackDate = startOfMinute - since * 3600;
let _0Day = {};
for (const ts in mergedData) {
const element = mergedData[ts];
for (const ts in dayData) {
const element = dayData[ts];
if (ts >= mxBackDate) {
_0Day[ts] = element;
}
@@ -228,12 +233,9 @@ const updateDayData = async (mergedData, startOfMinute, monitor) => {
let keys = Object.keys(_0Day);
keys.sort();
let sortedDay0 = {};
keys.reverse() //reverse to keep 90days data
.slice(0, since * 60)
.reverse() //reverse to keep 0day data
.forEach((key) => {
sortedDay0[key] = _0Day[key];
});
keys.reverse().forEach((key) => {
sortedDay0[key] = _0Day[key];
});
try {
fs.writeFileSync(monitor.path0Day, JSON.stringify(sortedDay0, null, 2));
} catch (error) {
@@ -241,73 +243,12 @@ const updateDayData = async (mergedData, startOfMinute, monitor) => {
}
};
const update90DayData = async (monitor) => {
const mergedData = JSON.parse(fs.readFileSync(monitor.path0Day, "utf8"));
let _90Day = {};
let _90File = monitor.path90Day;
try {
let fd = fs.readFileSync(_90File, "utf8");
_90Day = JSON.parse(fd);
} catch (err) {
fs.ensureFileSync(_90File);
fs.writeFileSync(_90File, JSON.stringify({}));
}
let temp = {};
for (const timestamp in mergedData) {
let dayTS = GetDayStartTimestampUTC(timestamp);
if (temp[dayTS] === undefined) {
temp[dayTS] = {
timestamp: dayTS,
UP: 0,
DEGRADED: 0,
DOWN: 0,
avgLatency: 0,
latency: 0,
};
}
let _this = mergedData[timestamp];
let d = temp[dayTS];
temp[dayTS].UP = d.UP + (_this.status == UP ? 1 : 0);
temp[dayTS].DEGRADED = d.DEGRADED + (_this.status == DEGRADED ? 1 : 0);
temp[dayTS].DOWN = d.DOWN + (_this.status == DOWN ? 1 : 0);
temp[dayTS].latency = d.latency + _this.latency;
}
for (const dayTS in temp) {
let d = temp[dayTS];
if (d.UP + d.DEGRADED + d.DOWN === 0) {
continue;
}
let avgLatency = (d.latency / (d.UP + d.DEGRADED + d.DOWN)).toFixed(0);
temp[dayTS].avgLatency = avgLatency;
}
_90Day = {..._90Day, ...temp};
//sort the keys
let keys = Object.keys(_90Day);
keys.sort();
let sorted90Day = {};
keys.reverse() //reverse to keep 90days data
.slice(0, 90) //90days data
.reverse() //reverse to keep 0day data
.forEach((key) => {
sorted90Day[key] = _90Day[key];
});
fs.writeFileSync(_90File, JSON.stringify(sorted90Day, null, 2));
};
const Minuter = async (envSecrets, monitor, githubConfig) => {
if (apiQueue.length > 0) console.log("Queue length is " + apiQueue.length);
let apiData = {};
let webhookData = {};
let manualData = {};
const startOfMinute = GetMinuteStartNowTimestampUTC();
let dayData = {};
if (monitor.hasAPI) {
let apiResponse = await apiCall(envSecrets, monitor.url, monitor.method, JSON.stringify(monitor.headers), monitor.body, monitor.timeout, monitor.eval);
@@ -326,15 +267,9 @@ const Minuter = async (envSecrets, monitor, githubConfig) => {
}
}
webhookData = await getWebhookData(monitor);
dayData = await getDayData(monitor);
manualData = await manualIncident(monitor, githubConfig);
//merge apiData, webhookData, dayData
let mergedData = {};
// console.log(Object.keys(dayData).length);;
for (const timestamp in dayData) {
mergedData[timestamp] = dayData[timestamp];
}
for (const timestamp in apiData) {
mergedData[timestamp] = apiData[timestamp];
}
@@ -347,8 +282,6 @@ const Minuter = async (envSecrets, monitor, githubConfig) => {
//update day data
await updateDayData(mergedData, startOfMinute, monitor);
//update 90day data
await update90DayData(monitor);
};
apiQueue.start((err) => {
if (err) {
+11 -3
View File
@@ -25,7 +25,15 @@ const GetAllGHLabels = async function (owner, repo) {
}
return labels;
};
const CreateGHLabel = async function (owner, repo, label, description) {
function generateRandomColor() {
var randomColor = Math.floor(Math.random() * 16777215).toString(16);
return randomColor;
//random color will be freshly served
}
const CreateGHLabel = async function (owner, repo, label, description, color) {
if(color === undefined){
color = generateRandomColor();
}
const options = {
method: "POST",
url: `https://api.github.com/repos/${owner}/${repo}/labels`,
@@ -36,7 +44,7 @@ const CreateGHLabel = async function (owner, repo, label, description) {
},
data: {
name: label,
color: generateRandomColor(),
color: color,
description: description,
},
};
@@ -82,7 +90,7 @@ const GetIncidentsOpen = async function (tagName, githubConfig) {
const sinceISO = new Date(since * 1000).toISOString();
const options = {
method: "GET",
url: `https://api.github.com/repos/${githubConfig.owner}/${githubConfig.repo}/issues?labels=${tagName},incident&state=open&sort=created&direction=desc&since=${sinceISO}`,
url: `https://api.github.com/repos/${githubConfig.owner}/${githubConfig.repo}/issues?labels=${tagName},incident&sort=created&direction=desc&since=${sinceISO}`,
headers: {
Accept: "application/vnd.github+json",
Authorization: "Bearer " + GH_TOKEN,
+12 -8
View File
@@ -122,7 +122,6 @@ const Startup = async () => {
}
monitors[i].path0Day = `${FOLDER}/${folderName}.0day.utc.json`;
monitors[i].path90Day = `${FOLDER}/${folderName}.90day.utc.json`;
monitors[i].hasAPI = hasAPI;
//secrets can be in url/body/headers
@@ -178,12 +177,21 @@ const Startup = async () => {
if (ghlabels.indexOf("incident") === -1) {
await CreateGHLabel(ghowner, ghrepo, "incident", "Status of the site");
}
if (ghlabels.indexOf("resolved") === -1) {
await CreateGHLabel(ghowner, ghrepo, "resolved", "Incident is resolved", "65dba6");
}
if (ghlabels.indexOf("identified") === -1) {
await CreateGHLabel(ghowner, ghrepo, "identified", "Incident is Identified", "EBE3D5");
}
if (ghlabels.indexOf("investigating") === -1) {
await CreateGHLabel(ghowner, ghrepo, "investigating", "Incident is investigated", "D4E2D4");
}
if (ghlabels.indexOf("incident-degraded") === -1) {
await CreateGHLabel(ghowner, ghrepo, "incident-degraded", "Status is degraded of the site");
await CreateGHLabel(ghowner, ghrepo, "incident-degraded", "Status is degraded of the site", "f5ba60");
}
if (ghlabels.indexOf("incident-down") === -1) {
await CreateGHLabel(ghowner, ghrepo, "incident-down", "Status is down of the site");
await CreateGHLabel(ghowner, ghrepo, "incident-down", "Status is down of the site", "ea3462");
}
//add tags if does not exist
for (let i = 0; i < tagsAndDescription.length; i++) {
@@ -205,14 +213,10 @@ const Startup = async () => {
fs.ensureFileSync(monitor.path0Day);
fs.writeFileSync(monitor.path0Day, JSON.stringify({}));
}
if(!fs.existsSync(monitor.path90Day)) {
fs.ensureFileSync(monitor.path90Day);
fs.writeFileSync(monitor.path90Day, JSON.stringify({}));
}
console.log("Staring One Minute Cron for ", monitor.path0Day);
await Minuter(envSecrets, monitor, site.github);
//await Minuter(envSecrets, monitor, site.github);
}
//trigger minute cron