ESLint fixes

This commit is contained in:
Stephen Papierski 2023-11-09 13:15:32 -07:00
parent d3709802c5
commit 9dd577dc98
No known key found for this signature in database
3 changed files with 21 additions and 19 deletions

View file

@ -1,7 +1,7 @@
exports.up = function (knex) {
// add various slow_response_notification parameters
return knex.schema
.alterTable("monitor", function(table) {
.alterTable("monitor", function (table) {
table.boolean("slow_response_notification").notNullable().defaultTo(false);
table.string("slow_response_notification_method").notNullable().defaultTo("average");
table.integer("slow_response_notification_range").notNullable().defaultTo(300);
@ -10,15 +10,15 @@ exports.up = function (knex) {
table.float("slow_response_notification_threshold_multiplier").notNullable().defaultTo(5.0);
table.integer("slow_response_notification_resend_interval").notNullable().defaultTo(0);
})
.alterTable("heartbeat", function(table) {
.alterTable("heartbeat", function (table) {
table.integer("slow_response_count").notNullable().defaultTo(0);
});
}
};
exports.down = function (knex) {
// remove various slow_response_notification parameters
return knex.schema
.alterTable("monitor", function(table) {
.alterTable("monitor", function (table) {
table.dropColumn("slow_response_notification");
table.dropColumn("slow_response_notification_method");
table.dropColumn("slow_response_notification_range");
@ -27,7 +27,7 @@ exports.down = function (knex) {
table.dropColumn("slow_response_notification_threshold_multiplier");
table.dropColumn("slow_response_notification_resend_interval");
})
.alterTable("heartbeat", function(table) {
.alterTable("heartbeat", function (table) {
table.dropColumn("slow_response_count");
});
}
};

View file

@ -307,7 +307,7 @@ class Monitor extends BeanModel {
/**
* Is the slow response notification enabled?
* @returns {boolean}
* @returns {boolean} Slow response notification is enabled?
*/
isEnabledSlowResponseNotification() {
return Boolean(this.slowResponseNotification);
@ -1434,7 +1434,7 @@ class Monitor extends BeanModel {
if (bean.status !== UP) {
log.debug("monitor", `[${this.name}] Monitor status is not UP, skipping slow response check`);
return;
return;
}
const method = monitor.slowResponseNotificationMethod;
@ -1444,7 +1444,7 @@ class Monitor extends BeanModel {
let actualResponseTime = 0;
let previousBeats;
if (method != "last") {
if (method !== "last") {
//Get recent heartbeat list with range of time
const afterThisDate = new Date(Date.now() - (1000 * (monitor.slowResponseNotificationRange + 1))); // add 1 second otherwise we grab 0 previous beats when Time Range == Heartbeat Interval
previousBeats = await R.getAll(`
@ -1482,6 +1482,8 @@ class Monitor extends BeanModel {
let threshold;
let thresholdDescription;
let afterThisDate;
let avgPing;
switch (thresholdMethod) {
case "threshold-static":
threshold = monitor.slowResponseNotificationThreshold;
@ -1490,8 +1492,8 @@ class Monitor extends BeanModel {
case "threshold-relative-24-hour":
//Get average response time over last 24 hours
const afterThisDate = new Date(Date.now() - (1000 * (24 * 60 * 60))); // 24 hours in milliseconds
const avgPing = parseInt(await R.getCell(`
afterThisDate = new Date(Date.now() - (1000 * (24 * 60 * 60))); // 24 hours in milliseconds
avgPing = parseInt(await R.getCell(`
SELECT AVG(ping) FROM heartbeat
WHERE time > datetime(?)
AND ping IS NOT NULL
@ -1511,12 +1513,12 @@ class Monitor extends BeanModel {
}
// Create stats to append to messages/logs
const methodDescription = ["average", "max"].includes(method) ? `${method} of ${windowDuration}s` : method;
let msgStats = `Response: ${actualResponseTime}ms (${methodDescription}) | Threshold: ${threshold}ms (${thresholdDescription})`
const methodDescription = [ "average", "max" ].includes(method) ? `${method} of ${windowDuration}s` : method;
let msgStats = `Response: ${actualResponseTime}ms (${methodDescription}) | Threshold: ${threshold}ms (${thresholdDescription})`;
// Add window duration for methods that make sense
// Verify valid response time was calculated
if (actualResponseTime == 0 || !Number.isInteger(actualResponseTime)) {
if (actualResponseTime === 0 || !Number.isInteger(actualResponseTime)) {
log.debug("monitor", `[${this.name}] Failed to calculate valid response time`);
return;
}
@ -1529,7 +1531,7 @@ class Monitor extends BeanModel {
// Responding normally
if (actualResponseTime < threshold) {
if (bean.slowResponseCount == 0) {
if (bean.slowResponseCount === 0) {
log.debug("monitor", `[${this.name}] Responding normally. No need to send slow response notification | ${msgStats}`);
} else {
msgStats += ` | Slow for: ${bean.slowResponseCount * monitor.interval}s`;
@ -1546,13 +1548,13 @@ class Monitor extends BeanModel {
++bean.slowResponseCount;
// Always send first notification
if (bean.slowResponseCount == 1) {
if (bean.slowResponseCount === 1) {
log.debug("monitor", `[${this.name}] Responded slowly, sending notification | ${msgStats}`);
let msg = `[${this.name}] Responded Slowly \n${msgStats}`;
Monitor.sendSlowResponseNotification(monitor, bean, msg);
// Send notification every x times
} else if (this.slowResponseNotificationResendInterval > 0){
if (((bean.slowResponseCount) % this.slowResponseNotificationResendInterval) == 0) {
} else if (this.slowResponseNotificationResendInterval > 0) {
if (((bean.slowResponseCount) % this.slowResponseNotificationResendInterval) === 0) {
// Send notification again, because we are still responding slow
msgStats += ` | Slow for: ${bean.slowResponseCount * monitor.interval}s`;
log.debug("monitor", `[${this.name}] Still responding slowly, sendSlowResponseNotification again | ${msgStats}`);

View file

@ -978,7 +978,7 @@ const monitorDefaults = {
slowResponseNotification: false,
slowResponseNotificationMethod: "average",
slowResponseNotificationRange: 300,
slowResponseNotificationThresholdMethod:"threshold-relative-24-hour",
slowResponseNotificationThresholdMethod: "threshold-relative-24-hour",
slowResponseNotificationThreshold: 2500,
slowResponseNotificationThresholdMultiplier: 5.0,
slowResponseNotificationResendInterval: 0,