unified our logging infrastructure

This commit is contained in:
Frank Elsinga 2024-01-18 22:31:10 +01:00
parent 36196f632d
commit d622dfbb57
11 changed files with 57 additions and 75 deletions

View file

@ -514,12 +514,12 @@ class Database {
let title = await setting("title");
if (title) {
console.log("Migrating Status Page");
log.info("database", "Migrating Status Page");
let statusPageCheck = await R.findOne("status_page", " slug = 'default' ");
if (statusPageCheck !== null) {
console.log("Migrating Status Page - Skip, default slug record is already existing");
log.info("database", "Migrating Status Page - Skip, default slug record is already existing");
return;
}
@ -565,7 +565,7 @@ class Database {
await setSetting("entryPage", "statusPage-default", "general");
}
console.log("Migrating Status Page - Done");
log.info("database", "Migrating Status Page - Done");
}
}

View file

@ -1,5 +1,6 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
const { log } = require("../../../src/util");
let t = {
ms: 1,
@ -90,24 +91,6 @@ function ApiCache() {
instances.push(this);
this.id = instances.length;
/**
* Logs a message to the console if the `DEBUG` environment variable is set.
* @param {string} a The first argument to log.
* @param {string} b The second argument to log.
* @param {string} c The third argument to log.
* @param {string} d The fourth argument to log, and so on... (optional)
*
* Generated by Trelent
*/
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
/**
* Returns true if the given request and response should be logged.
* @param {Object} request The HTTP request object.
@ -146,7 +129,7 @@ function ApiCache() {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
log.debug("apicache", `group detected "${groupName}"`);
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
@ -212,7 +195,7 @@ function ApiCache() {
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
debug("[apicache] error in redis.hset()");
log.debug("apicache", `error in redis.hset(): ${err}`);
}
} else {
memCache.add(key, value, duration, expireCallback);
@ -320,10 +303,10 @@ function ApiCache() {
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
log.debug("apicache", `adding cache entry for "${key}" @ ${strDuration} ${logDuration(elapsed)}`);
log.debug("apicache", `_apicache.headers: ${res._apicache.headers}`);
log.debug("apicache", `res.getHeaders(): ${getSafeHeaders(res)}`);
log.debug("apicache", `cacheObject: ${cacheObject}`);
}
}
@ -402,10 +385,10 @@ function ApiCache() {
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
log.debug("apicache", `clearing group "${target}"`);
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
log.debug("apicache", `clearing cached entry for "${key}"`);
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
@ -414,7 +397,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
log.info("apicache", "error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
@ -422,7 +405,7 @@ function ApiCache() {
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
log.debug("apicache", `clearing ${isAutomatic ? "expired" : "cached"} entry for "${target}"`);
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
@ -432,7 +415,7 @@ function ApiCache() {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
log.error("apicache", "error in redis.del(\"" + target + "\")");
}
}
@ -449,7 +432,7 @@ function ApiCache() {
}
});
} else {
debug("clearing entire index");
log.debug("apicache", "clearing entire index");
if (!redis) {
memCache.clear();
@ -461,7 +444,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
log.error("apicache", `error in redis.del("${key}"): ${err}`);
}
});
}
@ -752,7 +735,7 @@ function ApiCache() {
*/
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
log.debug("apicache", "bypass detected, skipping cache.");
return next();
}
@ -805,7 +788,7 @@ function ApiCache() {
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
log.debug("apicache", `sending cached (memory-cache) version of ${key} ${logDuration(elapsed)}`);
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
@ -817,7 +800,7 @@ function ApiCache() {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (redis) version of", key, logDuration(elapsed));
log.debug("apicache", "sending cached (redis) version of "+ key+" "+ logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
@ -873,7 +856,7 @@ function ApiCache() {
}
if (globalOptions.trackPerformance) {
debug("WARNING: using trackPerformance flag can cause high memory usage!");
log.debug("apicache", "WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;

View file

@ -59,7 +59,7 @@ if (process.platform === "win32") {
* @returns {Promise<boolean>} The executable is allowed?
*/
async function isAllowedChromeExecutable(executablePath) {
console.log(config.args);
log.info("Chromium", config.args);
if (config.args["allow-all-chrome-exec"] || process.env.UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC === "1") {
return true;
}
@ -95,7 +95,7 @@ async function getBrowser() {
*/
async function getRemoteBrowser(remoteBrowserID, userId) {
let remoteBrowser = await RemoteBrowser.get(remoteBrowserID, userId);
log.debug("MONITOR", `Using remote browser: ${remoteBrowser.name} (${remoteBrowser.id})`);
log.debug("Chromium", `Using remote browser: ${remoteBrowser.name} (${remoteBrowser.id})`);
browser = chromium.connect(remoteBrowser.url);
return browser;
}

View file

@ -13,7 +13,7 @@ const {
const semver = require("semver");
const nodeVersion = process.version;
if (semver.lt(nodeVersion, "16.0.0")) {
log.warn("monitor", "Node <= 16 is unsupported for nostr, sorry :(");
log.warn("notification", "Node <= 16 is unsupported for nostr, sorry :(");
} else if (semver.lt(nodeVersion, "18.0.0")) {
// polyfills for node 16
global.crypto = require("crypto");

View file

@ -1,7 +1,8 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { setSettings, setting } = require("../util-server");
const { getMonitorRelativeURL, UP } = require("../../src/util");
const { Settings } = require("../settings");
const { log } = require("../../src/util");
class Slack extends NotificationProvider {
@ -15,15 +16,13 @@ class Slack extends NotificationProvider {
* @returns {Promise<void>}
*/
static async deprecateURL(url) {
let currentPrimaryBaseURL = await setting("primaryBaseURL");
let currentPrimaryBaseURL = await Settings.get("primaryBaseURL");
if (!currentPrimaryBaseURL) {
console.log("Move the url to be the primary base URL");
await setSettings("general", {
primaryBaseURL: url,
});
log.error("notification", "Move the url to be the primary base URL");
await Settings.set("primaryBaseURL", url, "general");
} else {
console.log("Already there, no need to move the primary base URL");
log.debug("notification", "Already there, no need to move the primary base URL");
}
}
@ -86,7 +85,7 @@ class Slack extends NotificationProvider {
await Slack.deprecateURL(notification.slackbutton);
}
const baseURL = await setting("primaryBaseURL");
const baseURL = await Settings.get("primaryBaseURL");
// Button
if (baseURL) {

View file

@ -1418,7 +1418,7 @@ let needSetup = false;
});
} catch (e) {
console.error(e);
log.error("server", e);
callback({
ok: false,

View file

@ -60,7 +60,7 @@ module.exports.apiKeySocketHandler = (socket) => {
ok: true,
});
} catch (e) {
console.error(e);
log.error("apikeys", e);
callback({
ok: false,
msg: e.message,

View file

@ -100,11 +100,11 @@ module.exports.autoStart = async (token) => {
} else {
// Override the current token via args or env var
await setSetting("cloudflaredTunnelToken", token);
console.log("Use cloudflared token from args or env var");
log.info("cloudflare", "Use cloudflared token from args or env var");
}
if (token) {
console.log("Start cloudflared");
log.info("cloudflare", "Start cloudflared");
cloudflared.token = token;
cloudflared.start();
}

View file

@ -67,7 +67,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
});
} catch (e) {
console.error(e);
log.error("maintenance", e);
callback({
ok: false,
msg: e.message,
@ -177,7 +177,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
ok: true,
});
} catch (e) {
console.error(e);
log.error("maintenance", e);
callback({
ok: false,
msg: e.message,
@ -201,7 +201,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
});
} catch (e) {
console.error(e);
log.error("maintenance", e);
callback({
ok: false,
msg: e.message,
@ -225,7 +225,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
});
} catch (e) {
console.error(e);
log.error("maintenance", e);
callback({
ok: false,
msg: e.message,

View file

@ -289,7 +289,7 @@ module.exports.statusPageSocketHandler = (socket) => {
});
} catch (error) {
console.error(error);
log.error("socket", error);
callback({
ok: false,
msg: error.message,

View file

@ -204,12 +204,13 @@ class Logger {
/**
* Write a message to the log
* @private
* @param module The module the log comes from
* @param msg Message to write
* @param level Log level. One of INFO, WARN, ERROR, DEBUG or can be customized.
* @param level {"INFO"|"WARN"|"ERROR"|"DEBUG"} Log level
* @returns {void}
*/
log(module: string, msg: any, level: string) {
log(module: string, msg: unknown, level: "INFO"|"WARN"|"ERROR"|"DEBUG"): void {
if (level === "DEBUG" && !isDev) {
return;
}
@ -219,7 +220,6 @@ class Logger {
}
module = module.toUpperCase();
level = level.toUpperCase();
let now;
if (dayjs.tz) {
@ -306,8 +306,8 @@ class Logger {
* @param msg Message to write
* @returns {void}
*/
info(module: string, msg: unknown) {
this.log(module, msg, "info");
info(module: string, msg: string): void {
this.log(module, msg, "INFO");
}
/**
@ -316,8 +316,8 @@ class Logger {
* @param msg Message to write
* @returns {void}
*/
warn(module: string, msg: unknown) {
this.log(module, msg, "warn");
warn(module: string, msg: string): void {
this.log(module, msg, "WARN");
}
/**
@ -326,8 +326,8 @@ class Logger {
* @param msg Message to write
* @returns {void}
*/
error(module: string, msg: unknown) {
this.log(module, msg, "error");
error(module: string, msg: string): void {
this.log(module, msg, "ERROR");
}
/**
@ -336,8 +336,8 @@ class Logger {
* @param msg Message to write
* @returns {void}
*/
debug(module: string, msg: unknown) {
this.log(module, msg, "debug");
debug(module: string, msg: string): void {
this.log(module, msg, "DEBUG");
}
/**
@ -354,7 +354,7 @@ class Logger {
finalMessage = `${msg}: ${exception}`;
}
this.log(module, finalMessage, "error");
this.log(module, finalMessage, "ERROR");
}
}
@ -398,7 +398,7 @@ export class TimeLogger {
* @param name Name of monitor
* @returns {void}
*/
print(name: string) {
print(name: string): void {
if (isDev && process.env.TIMELOGGER === "1") {
console.log(name + ": " + (dayjs().valueOf() - this.startTime) + "ms");
}