2021-07-22 02:02:35 +08:00
const fs = require ( "fs" ) ;
2021-07-30 12:24:46 +08:00
const { R } = require ( "redbean-node" ) ;
2024-10-09 07:43:44 +08:00
const { setSetting , setting } = require ( "./util-server" ) ;
2022-04-13 23:33:37 +08:00
const { log , sleep } = require ( "../src/util" ) ;
2021-09-21 01:15:20 +08:00
const knex = require ( "knex" ) ;
2023-08-03 20:54:11 +08:00
const path = require ( "path" ) ;
2023-02-05 17:45:36 +08:00
const { EmbeddedMariaDB } = require ( "./embedded-mariadb" ) ;
2023-04-03 19:35:31 +08:00
const mysql = require ( "mysql2/promise" ) ;
2024-10-26 20:50:29 +08:00
const { Settings } = require ( "./settings" ) ;
const { UptimeCalculator } = require ( "./uptime-calculator" ) ;
const dayjs = require ( "dayjs" ) ;
2024-10-27 13:22:23 +08:00
const { SimpleMigrationServer } = require ( "./utils/simple-migration-server" ) ;
2024-10-28 13:16:22 +08:00
const KumaColumnCompiler = require ( "./utils/knex/lib/dialects/mysql2/schema/mysql2-columncompiler" ) ;
2021-07-22 02:02:35 +08:00
2021-09-21 21:22:35 +08:00
/ * *
* Database & App Data Folder
* /
2021-07-22 02:02:35 +08:00
class Database {
2023-10-16 10:18:28 +08:00
/ * *
* Boostrap database for SQLite
* @ type { string }
* /
2021-09-10 17:23:45 +08:00
static templatePath = "./db/kuma.db" ;
2021-09-21 21:22:35 +08:00
/ * *
* Data Dir ( Default : . / data )
2023-10-16 10:18:28 +08:00
* @ type { string }
2021-09-21 21:22:35 +08:00
* /
2021-09-02 21:08:00 +08:00
static dataDir ;
2021-09-21 21:22:35 +08:00
/ * *
* User Upload Dir ( Default : . / data / upload )
2023-10-16 10:18:28 +08:00
* @ type { string }
2021-09-21 21:22:35 +08:00
* /
static uploadDir ;
2023-10-16 10:18:28 +08:00
/ * *
* Chrome Screenshot Dir ( Default : . / data / screenshots )
* @ type { string }
* /
2023-06-27 15:54:33 +08:00
static screenshotDir ;
2023-10-16 10:18:28 +08:00
/ * *
* SQLite file path ( Default : . / data / kuma . db )
* @ type { string }
* /
2023-02-11 14:41:02 +08:00
static sqlitePath ;
2021-09-10 17:23:45 +08:00
2023-10-16 10:18:28 +08:00
/ * *
* For storing Docker TLS certs ( Default : . / data / docker - tls )
* @ type { string }
* /
2023-08-04 23:08:44 +08:00
static dockerTLSDir ;
2021-09-10 17:23:45 +08:00
/ * *
* @ type { boolean }
* /
static patched = false ;
/ * *
2023-02-11 22:21:06 +08:00
* SQLite only
2021-09-10 17:23:45 +08:00
* Add patch filename in key
* Values :
* true : Add it regardless of order
* false : Do nothing
* { parents : [ ] } : Need parents before add it
2023-02-11 22:21:06 +08:00
* @ deprecated
2021-09-10 17:23:45 +08:00
* /
static patchList = {
"patch-setting-value-type.sql" : true ,
"patch-improve-performance.sql" : true ,
2021-09-11 16:37:33 +02:00
"patch-2fa.sql" : true ,
2021-09-12 17:05:23 +02:00
"patch-add-retry-interval-monitor.sql" : true ,
2021-09-16 22:48:28 +08:00
"patch-incident-table.sql" : true ,
2021-09-19 19:04:51 +08:00
"patch-group-table.sql" : true ,
2021-10-01 00:09:43 +08:00
"patch-monitor-push_token.sql" : true ,
2021-10-02 16:48:27 +02:00
"patch-http-monitor-method-body-and-headers.sql" : true ,
2021-10-19 00:42:33 +02:00
"patch-2fa-invalidate-used-token.sql" : true ,
2021-10-27 15:33:15 +08:00
"patch-notification_sent_history.sql" : true ,
2021-11-04 10:12:06 +01:00
"patch-monitor-basic-auth.sql" : true ,
2022-01-14 09:09:37 +00:00
"patch-add-docker-columns.sql" : true ,
2021-12-27 18:54:48 +08:00
"patch-status-page.sql" : true ,
2021-10-30 20:37:15 +03:00
"patch-proxy.sql" : true ,
2022-04-05 21:27:50 +08:00
"patch-monitor-expiry-notification.sql" : true ,
2022-04-17 14:53:13 +08:00
"patch-status-page-footer-css.sql" : true ,
2021-11-03 21:46:43 -04:00
"patch-added-mqtt-monitor.sql" : true ,
2022-06-11 17:23:12 +01:00
"patch-add-clickable-status-page-link.sql" : true ,
2022-05-12 12:48:03 -05:00
"patch-add-sqlserver-monitor.sql" : true ,
2022-05-13 12:58:23 -05:00
"patch-add-other-auth.sql" : { parents : [ "patch-monitor-basic-auth.sql" ] } ,
2022-08-03 12:00:39 +07:00
"patch-grpc-monitor.sql" : true ,
2022-05-12 11:48:38 +02:00
"patch-add-radius-monitor.sql" : true ,
2022-01-24 09:18:12 +01:00
"patch-monitor-add-resend-interval.sql" : true ,
2022-07-14 08:32:51 +01:00
"patch-ping-packet-size.sql" : true ,
2022-10-11 21:48:43 +08:00
"patch-maintenance-table2.sql" : true ,
2023-01-08 21:22:36 +13:00
"patch-add-gamedig-monitor.sql" : true ,
2023-01-10 20:25:45 +00:00
"patch-add-google-analytics-status-page-tag.sql" : true ,
2023-02-25 17:59:25 +08:00
"patch-http-body-encoding.sql" : true ,
2021-11-11 20:06:32 -03:00
"patch-add-description-monitor.sql" : true ,
2023-02-28 16:58:36 +08:00
"patch-api-key-table.sql" : true ,
2023-01-04 14:37:03 +07:00
"patch-monitor-tls.sql" : true ,
2023-03-31 04:04:17 +08:00
"patch-maintenance-cron.sql" : true ,
2023-01-28 02:58:03 +01:00
"patch-add-parent-monitor.sql" : true ,
2023-04-05 19:10:21 -05:00
"patch-add-invert-keyword.sql" : true ,
2023-07-13 19:07:26 +03:30
"patch-added-json-query.sql" : true ,
2023-07-17 11:45:44 +03:30
"patch-added-kafka-producer.sql" : true ,
2023-07-04 19:37:45 -04:00
"patch-add-certificate-expiry-status-page.sql" : true ,
2023-08-02 09:40:19 +02:00
"patch-monitor-oauth-cc.sql" : true ,
2023-08-07 01:14:56 +09:00
"patch-add-timeout-monitor.sql" : true ,
2023-08-08 03:14:21 +08:00
"patch-add-gamedig-given-port.sql" : true ,
2023-11-13 21:19:43 +08:00
"patch-notification-config.sql" : true ,
2023-10-28 10:12:55 +03:30
"patch-fix-kafka-producer-booleans.sql" : true ,
2024-03-31 12:04:22 +08:00
"patch-timeout.sql" : true ,
"patch-monitor-tls-info-add-fk.sql" : true , // The last file so far converted to a knex migration file
2022-04-26 01:26:57 +02:00
} ;
2021-09-10 17:23:45 +08:00
/ * *
2021-10-18 17:02:05 +08:00
* The final version should be 10 after merged tag feature
2021-09-10 17:23:45 +08:00
* @ deprecated Use patchList for any new feature
* /
2021-08-26 18:55:19 +08:00
static latestVersion = 10 ;
2021-09-10 17:23:45 +08:00
2021-07-22 02:02:35 +08:00
static noReject = true ;
2023-02-11 22:21:06 +08:00
static dbConfig = { } ;
static knexMigrationsPath = "./db/knex_migrations" ;
2022-04-20 19:56:40 +01:00
/ * *
2023-02-11 14:41:02 +08:00
* Initialize the data directory
2023-08-11 09:46:41 +02:00
* @ param { object } args Arguments to initialize DB with
* @ returns { void }
2022-04-20 19:56:40 +01:00
* /
2023-02-11 14:41:02 +08:00
static initDataDir ( args ) {
2021-09-20 16:29:18 +08:00
// Data Directory (must be end with "/")
Database . dataDir = process . env . DATA _DIR || args [ "data-dir" ] || "./data/" ;
2023-01-27 18:25:57 +08:00
2023-08-09 20:09:56 +08:00
Database . sqlitePath = path . join ( Database . dataDir , "kuma.db" ) ;
2021-09-20 16:29:18 +08:00
if ( ! fs . existsSync ( Database . dataDir ) ) {
fs . mkdirSync ( Database . dataDir , { recursive : true } ) ;
}
2021-09-21 21:22:35 +08:00
2023-08-03 20:54:11 +08:00
Database . uploadDir = path . join ( Database . dataDir , "upload/" ) ;
2021-09-21 21:22:35 +08:00
if ( ! fs . existsSync ( Database . uploadDir ) ) {
fs . mkdirSync ( Database . uploadDir , { recursive : true } ) ;
}
2023-06-27 15:54:33 +08:00
// Create screenshot dir
2023-08-03 20:54:11 +08:00
Database . screenshotDir = path . join ( Database . dataDir , "screenshots/" ) ;
2023-06-27 15:54:33 +08:00
if ( ! fs . existsSync ( Database . screenshotDir ) ) {
fs . mkdirSync ( Database . screenshotDir , { recursive : true } ) ;
}
2023-08-04 23:08:44 +08:00
Database . dockerTLSDir = path . join ( Database . dataDir , "docker-tls/" ) ;
if ( ! fs . existsSync ( Database . dockerTLSDir ) ) {
fs . mkdirSync ( Database . dockerTLSDir , { recursive : true } ) ;
}
2023-10-14 03:00:34 +08:00
log . info ( "server" , ` Data Dir: ${ Database . dataDir } ` ) ;
2021-09-20 16:29:18 +08:00
}
2023-08-11 22:29:45 +08:00
/ * *
2023-09-07 09:42:44 +02:00
* Read the database config
* @ throws { Error } If the config is invalid
* @ typedef { string | undefined } envString
* @ returns { { type : "sqlite" } | { type : envString , hostname : envString , port : envString , database : envString , username : envString , password : envString } } Database config
2023-08-11 22:29:45 +08:00
* /
2023-02-11 14:41:02 +08:00
static readDBConfig ( ) {
let dbConfig ;
let dbConfigString = fs . readFileSync ( path . join ( Database . dataDir , "db-config.json" ) ) . toString ( "utf-8" ) ;
dbConfig = JSON . parse ( dbConfigString ) ;
if ( typeof dbConfig !== "object" ) {
throw new Error ( "Invalid db-config.json, it must be an object" ) ;
}
if ( typeof dbConfig . type !== "string" ) {
throw new Error ( "Invalid db-config.json, type must be a string" ) ;
}
return dbConfig ;
}
2023-08-11 22:29:45 +08:00
/ * *
2023-09-07 09:42:44 +02:00
* @ typedef { string | undefined } envString
* @ param { { type : "sqlite" } | { type : envString , hostname : envString , port : envString , database : envString , username : envString , password : envString } } dbConfig the database configuration that should be written
* @ returns { void }
2023-08-11 22:29:45 +08:00
* /
2023-02-11 14:41:02 +08:00
static writeDBConfig ( dbConfig ) {
fs . writeFileSync ( path . join ( Database . dataDir , "db-config.json" ) , JSON . stringify ( dbConfig , null , 4 ) ) ;
2021-09-20 16:29:18 +08:00
}
2022-04-20 19:56:40 +01:00
/ * *
* Connect to the database
2023-09-07 09:42:44 +02:00
* @ param { boolean } testMode Should the connection be started in test mode ?
* @ param { boolean } autoloadModels Should models be automatically loaded ?
2023-08-11 09:46:41 +02:00
* @ param { boolean } noLog Should logs not be output ?
2022-04-20 19:56:40 +01:00
* @ returns { Promise < void > }
* /
2022-04-08 00:56:56 +08:00
static async connect ( testMode = false , autoloadModels = true , noLog = false ) {
2024-10-28 13:16:22 +08:00
// Patch "mysql2" knex client
// Workaround: Tried extending the ColumnCompiler class, but it didn't work for unknown reasons, so I override the function via prototype
const { getDialectByNameOrAlias } = require ( "knex/lib/dialects" ) ;
const mysql2 = getDialectByNameOrAlias ( "mysql2" ) ;
mysql2 . prototype . columnCompiler = function ( ) {
return new KumaColumnCompiler ( this , ... arguments ) ;
} ;
2021-08-22 23:35:24 +08:00
const acquireConnectionTimeout = 120 * 1000 ;
2022-12-23 22:43:56 +08:00
let dbConfig ;
try {
2023-02-11 14:41:02 +08:00
dbConfig = this . readDBConfig ( ) ;
2023-02-11 22:21:06 +08:00
Database . dbConfig = dbConfig ;
2023-02-11 14:41:02 +08:00
} catch ( err ) {
log . warn ( "db" , err . message ) ;
2022-12-23 22:43:56 +08:00
dbConfig = {
2023-02-06 22:26:13 +08:00
type : "sqlite" ,
2022-12-23 22:43:56 +08:00
} ;
}
let config = { } ;
2023-09-01 05:19:21 +08:00
let mariadbPoolConfig = {
2024-05-19 22:46:22 +08:00
min : 0 ,
max : 10 ,
idleTimeoutMillis : 30000 ,
2023-09-01 05:19:21 +08:00
} ;
2023-02-12 03:44:15 +08:00
log . info ( "db" , ` Database Type: ${ dbConfig . type } ` ) ;
2022-12-23 22:43:56 +08:00
if ( dbConfig . type === "sqlite" ) {
2023-02-11 22:21:06 +08:00
if ( ! fs . existsSync ( Database . sqlitePath ) ) {
log . info ( "server" , "Copying Database" ) ;
fs . copyFileSync ( Database . templatePath , Database . sqlitePath ) ;
}
2024-07-06 23:56:56 +08:00
// Check if Database.sqlitePath is on NFS
if ( fs . existsSync ( Database . sqlitePath ) ) {
let stats = fs . statSync ( Database . sqlitePath ) ;
log . debug ( "server" , "SQLite database inode: " + stats . ino ) ;
if ( stats . ino === 0 ) {
log . error ( "server" , "It seems that the database is on a network drive (NFS). Uptime Kuma will be UNSTABLE and the database will be CORRUPTED. Please use a local disk." ) ;
}
}
2023-02-11 22:21:06 +08:00
2024-06-24 15:52:07 +08:00
const Dialect = require ( "knex/lib/dialects/sqlite3/index.js" ) ;
Dialect . prototype . _driver = ( ) => require ( "@louislam/sqlite3" ) ;
2022-12-23 22:43:56 +08:00
config = {
2024-06-24 15:52:07 +08:00
client : Dialect ,
2022-12-23 22:43:56 +08:00
connection : {
2023-02-11 14:41:02 +08:00
filename : Database . sqlitePath ,
2022-12-23 22:43:56 +08:00
acquireConnectionTimeout : acquireConnectionTimeout ,
} ,
useNullAsDefault : true ,
pool : {
min : 1 ,
max : 1 ,
idleTimeoutMillis : 120 * 1000 ,
propagateCreateError : false ,
acquireTimeoutMillis : acquireConnectionTimeout ,
}
} ;
2023-02-11 22:21:06 +08:00
} else if ( dbConfig . type === "mariadb" ) {
2023-04-03 19:35:31 +08:00
if ( ! /^\w+$/ . test ( dbConfig . dbName ) ) {
2023-05-21 15:42:13 +08:00
throw Error ( "Invalid database name. A database name can only consist of letters, numbers and underscores" ) ;
2023-04-03 19:35:31 +08:00
}
const connection = await mysql . createConnection ( {
host : dbConfig . hostname ,
port : dbConfig . port ,
user : dbConfig . username ,
password : dbConfig . password ,
} ) ;
await connection . execute ( "CREATE DATABASE IF NOT EXISTS " + dbConfig . dbName + " CHARACTER SET utf8mb4" ) ;
2023-07-01 02:48:42 +08:00
connection . end ( ) ;
2023-04-03 19:35:31 +08:00
2023-02-11 22:21:06 +08:00
config = {
client : "mysql2" ,
connection : {
host : dbConfig . hostname ,
port : dbConfig . port ,
user : dbConfig . username ,
password : dbConfig . password ,
database : dbConfig . dbName ,
2023-12-03 01:34:26 +08:00
timezone : "Z" ,
typeCast : function ( field , next ) {
if ( field . type === "DATETIME" ) {
// Do not perform timezone conversion
return field . string ( ) ;
}
return next ( ) ;
} ,
2023-09-01 05:19:21 +08:00
} ,
pool : mariadbPoolConfig ,
2023-02-11 22:21:06 +08:00
} ;
2023-02-05 17:45:36 +08:00
} else if ( dbConfig . type === "embedded-mariadb" ) {
let embeddedMariaDB = EmbeddedMariaDB . getInstance ( ) ;
await embeddedMariaDB . start ( ) ;
log . info ( "mariadb" , "Embedded MariaDB started" ) ;
2022-12-23 22:43:56 +08:00
config = {
2023-02-05 17:45:36 +08:00
client : "mysql2" ,
2022-12-23 22:43:56 +08:00
connection : {
2023-02-05 17:45:36 +08:00
socketPath : embeddedMariaDB . socketPath ,
2024-11-05 20:25:08 +08:00
user : embeddedMariaDB . username ,
2023-02-11 22:21:06 +08:00
database : "kuma" ,
2023-12-03 01:34:26 +08:00
timezone : "Z" ,
typeCast : function ( field , next ) {
if ( field . type === "DATETIME" ) {
// Do not perform timezone conversion
return field . string ( ) ;
}
return next ( ) ;
} ,
2023-09-01 05:19:21 +08:00
} ,
pool : mariadbPoolConfig ,
2022-12-23 22:43:56 +08:00
} ;
} else {
2023-02-05 17:45:36 +08:00
throw new Error ( "Unknown Database type: " + dbConfig . type ) ;
2022-12-23 22:43:56 +08:00
}
2023-02-11 22:21:06 +08:00
// Set to utf8mb4 for MariaDB
if ( dbConfig . type . endsWith ( "mariadb" ) ) {
config . pool = {
afterCreate ( conn , done ) {
conn . query ( "SET CHARACTER SET utf8mb4;" , ( err ) => done ( err , conn ) ) ;
} ,
} ;
}
2022-12-23 22:43:56 +08:00
const knexInstance = knex ( config ) ;
2021-08-17 15:59:23 +08:00
2021-09-21 01:15:20 +08:00
R . setup ( knexInstance ) ;
2021-08-17 02:09:40 +08:00
if ( process . env . SQL _LOG === "1" ) {
R . debug ( true ) ;
}
2021-08-09 13:34:44 +08:00
// Auto map the model to a bean object
2021-09-19 19:04:51 +08:00
R . freeze ( true ) ;
2022-04-08 00:56:56 +08:00
if ( autoloadModels ) {
await R . autoloadModels ( "./server/model" ) ;
}
2021-08-23 17:27:03 +08:00
2023-02-11 22:21:06 +08:00
if ( dbConfig . type === "sqlite" ) {
await this . initSQLite ( testMode , noLog ) ;
} else if ( dbConfig . type . endsWith ( "mariadb" ) ) {
await this . initMariaDB ( ) ;
}
}
2023-08-11 22:29:45 +08:00
/ * *
2023-09-07 09:42:44 +02:00
@ param { boolean } testMode Should the connection be started in test mode ?
@ param { boolean } noLog Should logs not be output ?
@ returns { Promise < void > }
2023-08-11 22:29:45 +08:00
* /
2023-02-11 22:21:06 +08:00
static async initSQLite ( testMode , noLog ) {
2021-09-23 23:21:08 +08:00
await R . exec ( "PRAGMA foreign_keys = ON" ) ;
2021-11-04 23:19:31 +08:00
if ( testMode ) {
// Change to MEMORY
await R . exec ( "PRAGMA journal_mode = MEMORY" ) ;
} else {
// Change to WAL
await R . exec ( "PRAGMA journal_mode = WAL" ) ;
}
2021-09-09 11:56:42 +08:00
await R . exec ( "PRAGMA cache_size = -12000" ) ;
2023-06-29 22:41:01 +08:00
await R . exec ( "PRAGMA auto_vacuum = INCREMENTAL" ) ;
2021-09-09 11:56:42 +08:00
2022-04-06 20:48:13 +08:00
// This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
2023-06-29 22:41:01 +08:00
await R . exec ( "PRAGMA synchronous = NORMAL" ) ;
2022-04-06 20:48:13 +08:00
2022-04-08 00:56:56 +08:00
if ( ! noLog ) {
2023-10-14 03:00:34 +08:00
log . debug ( "db" , "SQLite config:" ) ;
log . debug ( "db" , await R . getAll ( "PRAGMA journal_mode" ) ) ;
log . debug ( "db" , await R . getAll ( "PRAGMA cache_size" ) ) ;
log . debug ( "db" , "SQLite Version: " + await R . getCell ( "SELECT sqlite_version()" ) ) ;
2022-04-08 00:56:56 +08:00
}
2021-08-06 19:09:00 +08:00
}
2023-08-11 22:29:45 +08:00
/ * *
2023-09-07 09:42:44 +02:00
* Initialize MariaDB
* @ returns { Promise < void > }
2023-08-11 22:29:45 +08:00
* /
2023-02-11 22:21:06 +08:00
static async initMariaDB ( ) {
log . debug ( "db" , "Checking if MariaDB database exists..." ) ;
let hasTable = await R . hasTable ( "docker_host" ) ;
if ( ! hasTable ) {
2023-02-12 03:44:15 +08:00
const { createTables } = require ( "../db/knex_init_db" ) ;
2023-02-11 22:21:06 +08:00
await createTables ( ) ;
} else {
log . debug ( "db" , "MariaDB database already exists" ) ;
2022-04-08 00:56:56 +08:00
}
2021-08-06 19:09:00 +08:00
}
2023-08-11 09:46:41 +02:00
/ * *
* Patch the database
2024-10-27 13:22:23 +08:00
* @ param { number } port Start the migration server for aggregate tables on this port if provided
* @ param { string } hostname Start the migration server for aggregate tables on this hostname if provided
2024-03-15 15:02:55 +01:00
* @ returns { Promise < void > }
2023-08-11 09:46:41 +02:00
* /
2024-10-27 13:22:23 +08:00
static async patch ( port = undefined , hostname = undefined ) {
2023-06-30 17:26:37 +08:00
// Still need to keep this for old versions of Uptime Kuma
2023-02-11 22:21:06 +08:00
if ( Database . dbConfig . type === "sqlite" ) {
await this . patchSqlite ( ) ;
}
2023-06-30 17:26:37 +08:00
// Using knex migrations
2023-02-11 22:21:06 +08:00
// https://knexjs.org/guide/migrations.html
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
2023-06-30 17:26:37 +08:00
try {
2024-10-26 20:50:29 +08:00
// Disable foreign key check for SQLite
// Known issue of knex: https://github.com/drizzle-team/drizzle-orm/issues/1813
if ( Database . dbConfig . type === "sqlite" ) {
await R . exec ( "PRAGMA foreign_keys = OFF" ) ;
}
2023-06-30 17:26:37 +08:00
await R . knex . migrate . latest ( {
directory : Database . knexMigrationsPath ,
} ) ;
2024-10-26 20:50:29 +08:00
// Enable foreign key check for SQLite
if ( Database . dbConfig . type === "sqlite" ) {
await R . exec ( "PRAGMA foreign_keys = ON" ) ;
}
2024-10-27 13:22:23 +08:00
await this . migrateAggregateTable ( port , hostname ) ;
2024-10-26 20:50:29 +08:00
2023-06-30 17:26:37 +08:00
} catch ( e ) {
2023-09-21 20:12:10 +08:00
// Allow missing patch files for downgrade or testing pr.
if ( e . message . includes ( "the following files are missing:" ) ) {
log . warn ( "db" , e . message ) ;
log . warn ( "db" , "Database migration failed, you may be downgrading Uptime Kuma." ) ;
} else {
log . error ( "db" , "Database migration failed" ) ;
throw e ;
}
2023-06-30 17:26:37 +08:00
}
}
/ * *
2023-09-01 05:19:21 +08:00
* TODO
2023-06-30 17:26:37 +08:00
* @ returns { Promise < void > }
* /
static async rollbackLatestPatch ( ) {
2023-02-11 22:21:06 +08:00
}
/ * *
* Patch the database for SQLite
2023-09-07 09:42:44 +02:00
* @ returns { Promise < void > }
2023-02-11 22:21:06 +08:00
* @ deprecated
* /
static async patchSqlite ( ) {
2024-10-09 07:43:44 +08:00
let version = parseInt ( await setting ( "database_version" ) ) ;
2021-07-22 02:02:35 +08:00
if ( ! version ) {
version = 0 ;
}
2023-10-14 03:00:34 +08:00
if ( version !== this . latestVersion ) {
log . info ( "db" , "Your database version: " + version ) ;
log . info ( "db" , "Latest database version: " + this . latestVersion ) ;
}
2021-07-22 02:02:35 +08:00
if ( version === this . latestVersion ) {
2023-10-14 03:00:34 +08:00
log . debug ( "db" , "Database patch not needed" ) ;
2021-08-08 15:04:20 +08:00
} else if ( version > this . latestVersion ) {
2023-10-14 03:00:34 +08:00
log . warn ( "db" , "Warning: Database version is newer than expected" ) ;
2021-07-22 02:02:35 +08:00
} else {
2022-04-13 23:33:37 +08:00
log . info ( "db" , "Database patch is needed" ) ;
2021-07-22 02:02:35 +08:00
2023-03-31 23:52:24 +08:00
// Try catch anything here
2021-07-22 02:02:35 +08:00
try {
for ( let i = version + 1 ; i <= this . latestVersion ; i ++ ) {
2023-07-01 20:54:47 +08:00
const sqlFile = ` ./db/old_migrations/patch ${ i } .sql ` ;
2022-04-13 23:33:37 +08:00
log . info ( "db" , ` Patching ${ sqlFile } ` ) ;
2021-07-22 02:02:35 +08:00
await Database . importSQLFile ( sqlFile ) ;
2022-04-13 23:33:37 +08:00
log . info ( "db" , ` Patched ${ sqlFile } ` ) ;
2024-10-09 07:43:44 +08:00
await setSetting ( "database_version" , i ) ;
2021-07-22 02:02:35 +08:00
}
} catch ( ex ) {
await Database . close ( ) ;
2022-04-13 23:33:37 +08:00
log . error ( "db" , ex ) ;
log . error ( "db" , "Start Uptime-Kuma failed due to issue patching the database" ) ;
log . error ( "db" , "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues" ) ;
2021-09-19 19:04:51 +08:00
2021-07-22 02:02:35 +08:00
process . exit ( 1 ) ;
}
}
2021-09-10 17:23:45 +08:00
2023-02-11 22:21:06 +08:00
await this . patchSqlite2 ( ) ;
2022-03-08 14:33:35 +08:00
await this . migrateNewStatusPage ( ) ;
2021-09-10 17:23:45 +08:00
}
/ * *
2022-04-20 19:56:40 +01:00
* Patch DB using new process
2021-09-10 17:23:45 +08:00
* Call it from patch ( ) only
2023-02-11 22:21:06 +08:00
* @ deprecated
2022-04-20 19:56:40 +01:00
* @ private
2021-09-10 17:23:45 +08:00
* @ returns { Promise < void > }
* /
2023-02-11 22:21:06 +08:00
static async patchSqlite2 ( ) {
2023-10-14 03:00:34 +08:00
log . debug ( "db" , "Database Patch 2.0 Process" ) ;
2024-10-09 07:43:44 +08:00
let databasePatchedFiles = await setting ( "databasePatchedFiles" ) ;
2021-09-10 17:23:45 +08:00
if ( ! databasePatchedFiles ) {
databasePatchedFiles = { } ;
}
2022-04-13 23:33:37 +08:00
log . debug ( "db" , "Patched files:" ) ;
log . debug ( "db" , databasePatchedFiles ) ;
2021-09-10 17:23:45 +08:00
try {
for ( let sqlFilename in this . patchList ) {
2021-09-19 19:04:51 +08:00
await this . patch2Recursion ( sqlFilename , databasePatchedFiles ) ;
2021-09-10 17:23:45 +08:00
}
if ( this . patched ) {
2022-04-13 23:33:37 +08:00
log . info ( "db" , "Database Patched Successfully" ) ;
2021-09-10 17:23:45 +08:00
}
} catch ( ex ) {
await Database . close ( ) ;
2022-04-13 23:33:37 +08:00
log . error ( "db" , ex ) ;
log . error ( "db" , "Start Uptime-Kuma failed due to issue patching the database" ) ;
log . error ( "db" , "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues" ) ;
2021-09-19 19:04:51 +08:00
2021-09-10 17:23:45 +08:00
process . exit ( 1 ) ;
}
2024-10-09 07:43:44 +08:00
await setSetting ( "databasePatchedFiles" , databasePatchedFiles ) ;
2021-09-10 17:23:45 +08:00
}
2022-04-21 17:15:39 +01:00
/ * *
2023-02-11 22:21:06 +08:00
* SQlite only
2022-03-08 14:33:35 +08:00
* Migrate status page value in setting to "status_page" table
* @ returns { Promise < void > }
* /
static async migrateNewStatusPage ( ) {
2022-03-24 23:43:07 +08:00
// Fix 1.13.0 empty slug bug
await R . exec ( "UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''" ) ;
2024-10-09 07:43:44 +08:00
let title = await setting ( "title" ) ;
2022-03-08 14:33:35 +08:00
if ( title ) {
2024-10-09 07:43:44 +08:00
console . log ( "Migrating Status Page" ) ;
2022-03-08 14:33:35 +08:00
2022-03-15 12:00:29 +08:00
let statusPageCheck = await R . findOne ( "status_page" , " slug = 'default' " ) ;
2022-03-08 14:33:35 +08:00
if ( statusPageCheck !== null ) {
2024-10-09 07:43:44 +08:00
console . log ( "Migrating Status Page - Skip, default slug record is already existing" ) ;
2022-03-08 14:33:35 +08:00
return ;
}
let statusPage = R . dispense ( "status_page" ) ;
2022-03-18 14:14:22 +08:00
statusPage . slug = "default" ;
2022-03-08 14:33:35 +08:00
statusPage . title = title ;
2024-10-09 07:43:44 +08:00
statusPage . description = await setting ( "description" ) ;
statusPage . icon = await setting ( "icon" ) ;
statusPage . theme = await setting ( "statusPageTheme" ) ;
statusPage . published = ! ! await setting ( "statusPagePublished" ) ;
statusPage . search _engine _index = ! ! await setting ( "searchEngineIndex" ) ;
statusPage . show _tags = ! ! await setting ( "statusPageTags" ) ;
2022-03-08 14:33:35 +08:00
statusPage . password = null ;
2022-03-18 14:14:22 +08:00
if ( ! statusPage . title ) {
statusPage . title = "My Status Page" ;
}
if ( ! statusPage . icon ) {
statusPage . icon = "" ;
}
if ( ! statusPage . theme ) {
statusPage . theme = "light" ;
}
2022-03-16 15:38:10 +08:00
let id = await R . store ( statusPage ) ;
await R . exec ( "UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL" , [
id
] ) ;
await R . exec ( "UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL" , [
id
] ) ;
2022-03-08 14:33:35 +08:00
await R . exec ( "DELETE FROM setting WHERE type = 'statusPage'" ) ;
2022-03-16 15:38:10 +08:00
2022-03-18 14:14:22 +08:00
// Migrate Entry Page if it is status page
2024-10-09 07:43:44 +08:00
let entryPage = await setting ( "entryPage" ) ;
2022-03-18 14:14:22 +08:00
if ( entryPage === "statusPage" ) {
2024-10-09 07:43:44 +08:00
await setSetting ( "entryPage" , "statusPage-default" , "general" ) ;
2022-03-18 14:14:22 +08:00
}
2024-10-09 07:43:44 +08:00
console . log ( "Migrating Status Page - Done" ) ;
2022-03-08 14:33:35 +08:00
}
}
2021-09-10 17:23:45 +08:00
/ * *
2022-04-20 19:56:40 +01:00
* Patch database using new patching process
2021-09-10 17:23:45 +08:00
* Used it patch2 ( ) only
2022-04-20 19:56:40 +01:00
* @ private
2023-08-11 09:46:41 +02:00
* @ param { string } sqlFilename Name of SQL file to load
* @ param { object } databasePatchedFiles Patch status of database files
2022-04-20 19:56:40 +01:00
* @ returns { Promise < void > }
2021-09-10 17:23:45 +08:00
* /
static async patch2Recursion ( sqlFilename , databasePatchedFiles ) {
let value = this . patchList [ sqlFilename ] ;
if ( ! value ) {
2022-04-13 23:33:37 +08:00
log . info ( "db" , sqlFilename + " skip" ) ;
2021-09-10 17:23:45 +08:00
return ;
}
// Check if patched
if ( ! databasePatchedFiles [ sqlFilename ] ) {
2022-04-13 23:33:37 +08:00
log . info ( "db" , sqlFilename + " is not patched" ) ;
2021-09-10 17:23:45 +08:00
if ( value . parents ) {
2022-04-13 23:33:37 +08:00
log . info ( "db" , sqlFilename + " need parents" ) ;
2021-09-10 17:23:45 +08:00
for ( let parentSQLFilename of value . parents ) {
await this . patch2Recursion ( parentSQLFilename , databasePatchedFiles ) ;
}
}
2022-04-13 23:33:37 +08:00
log . info ( "db" , sqlFilename + " is patching" ) ;
2021-09-10 17:23:45 +08:00
this . patched = true ;
2023-07-01 20:54:47 +08:00
await this . importSQLFile ( "./db/old_migrations/" + sqlFilename ) ;
2021-09-10 17:23:45 +08:00
databasePatchedFiles [ sqlFilename ] = true ;
2022-04-13 23:33:37 +08:00
log . info ( "db" , sqlFilename + " was patched successfully" ) ;
2021-09-10 17:23:45 +08:00
} else {
2022-04-13 23:33:37 +08:00
log . debug ( "db" , sqlFilename + " is already patched, skip" ) ;
2021-09-10 17:23:45 +08:00
}
2021-07-22 02:02:35 +08:00
}
/ * *
2022-04-20 19:56:40 +01:00
* Load an SQL file and execute it
2023-08-11 09:46:41 +02:00
* @ param { string } filename Filename of SQL file to import
2021-07-22 02:02:35 +08:00
* @ returns { Promise < void > }
* /
static async importSQLFile ( filename ) {
2022-04-20 19:56:40 +01:00
// Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
2021-07-22 02:02:35 +08:00
await R . getCell ( "SELECT 1" ) ;
let text = fs . readFileSync ( filename ) . toString ( ) ;
// Remove all comments (--)
let lines = text . split ( "\n" ) ;
lines = lines . filter ( ( line ) => {
2021-09-19 19:04:51 +08:00
return ! line . startsWith ( "--" ) ;
2021-07-22 02:02:35 +08:00
} ) ;
// Split statements by semicolon
// Filter out empty line
2021-09-19 19:04:51 +08:00
text = lines . join ( "\n" ) ;
2021-07-22 02:02:35 +08:00
let statements = text . split ( ";" )
. map ( ( statement ) => {
return statement . trim ( ) ;
} )
. filter ( ( statement ) => {
return statement !== "" ;
2021-09-19 19:04:51 +08:00
} ) ;
2021-07-22 02:02:35 +08:00
for ( let statement of statements ) {
2021-09-01 15:02:04 +08:00
await R . exec ( statement ) ;
2021-07-22 02:02:35 +08:00
}
}
/ * *
* Special handle , because tarn . js throw a promise reject that cannot be caught
* @ returns { Promise < void > }
* /
static async close ( ) {
2021-09-10 17:23:45 +08:00
const listener = ( reason , p ) => {
Database . noReject = false ;
} ;
process . addListener ( "unhandledRejection" , listener ) ;
2022-04-13 23:33:37 +08:00
log . info ( "db" , "Closing the database" ) ;
2021-09-10 17:23:45 +08:00
2023-02-15 15:30:28 +08:00
// Flush WAL to main database
2023-09-01 05:19:21 +08:00
if ( Database . dbConfig . type === "sqlite" ) {
await R . exec ( "PRAGMA wal_checkpoint(TRUNCATE)" ) ;
}
2023-02-15 15:30:28 +08:00
2021-09-10 17:23:45 +08:00
while ( true ) {
Database . noReject = true ;
await R . close ( ) ;
await sleep ( 2000 ) ;
if ( Database . noReject ) {
break ;
} else {
2022-04-13 23:33:37 +08:00
log . info ( "db" , "Waiting to close the database" ) ;
2021-09-10 17:23:45 +08:00
}
}
2023-09-01 05:19:21 +08:00
log . info ( "db" , "Database closed" ) ;
2021-09-10 17:23:45 +08:00
process . removeListener ( "unhandledRejection" , listener ) ;
}
2023-08-11 09:46:41 +02:00
/ * *
2023-09-01 05:19:21 +08:00
* Get the size of the database ( SQLite only )
2023-08-11 09:46:41 +02:00
* @ returns { number } Size of database
* /
2021-10-26 23:02:32 +08:00
static getSize ( ) {
2023-09-01 05:19:21 +08:00
if ( Database . dbConfig . type === "sqlite" ) {
log . debug ( "db" , "Database.getSize()" ) ;
let stats = fs . statSync ( Database . sqlitePath ) ;
log . debug ( "db" , stats ) ;
return stats . size ;
}
return 0 ;
2021-10-26 23:02:32 +08:00
}
2022-04-20 19:56:40 +01:00
/ * *
* Shrink the database
* @ returns { Promise < void > }
* /
2021-10-26 23:02:32 +08:00
static async shrink ( ) {
2023-09-01 05:19:21 +08:00
if ( Database . dbConfig . type === "sqlite" ) {
await R . exec ( "VACUUM" ) ;
}
2021-10-26 23:02:32 +08:00
}
2023-02-12 16:59:07 +08:00
2023-08-11 22:29:45 +08:00
/ * *
2023-09-07 09:42:44 +02:00
* @ returns { string } Get the SQL for the current time plus a number of hours
2023-08-11 22:29:45 +08:00
* /
2023-02-12 16:59:07 +08:00
static sqlHourOffset ( ) {
2023-09-07 14:00:49 +08:00
if ( Database . dbConfig . type === "sqlite" ) {
2023-02-12 16:59:07 +08:00
return "DATETIME('now', ? || ' hours')" ;
} else {
return "DATE_ADD(NOW(), INTERVAL ? HOUR)" ;
}
}
2024-10-26 20:50:29 +08:00
/ * *
* Migrate the old data in the heartbeat table to the new format ( stat _daily , stat _hourly , stat _minutely )
* It should be run once while upgrading V1 to V2
*
* Normally , it should be in transaction , but UptimeCalculator wasn ' t designed to be in transaction before that .
* I don ' t want to heavily modify the UptimeCalculator , so it is not in transaction .
* Run ` npm run reset-migrate-aggregate-table-state ` to reset , in case the migration is interrupted .
2024-10-27 13:22:23 +08:00
* @ param { number } port Start the migration server on this port if provided
* @ param { string } hostname Start the migration server on this hostname if provided
2024-10-26 20:50:29 +08:00
* @ returns { Promise < void > }
* /
2024-10-27 13:22:23 +08:00
static async migrateAggregateTable ( port , hostname = undefined ) {
2024-10-26 20:50:29 +08:00
log . debug ( "db" , "Enter Migrate Aggregate Table function" ) ;
// Add a setting for 2.0.0-dev users to skip this migration
if ( process . env . SET _MIGRATE _AGGREGATE _TABLE _TO _TRUE === "1" ) {
log . warn ( "db" , "SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)" ) ;
await Settings . set ( "migrateAggregateTableState" , "migrated" ) ;
}
let migrateState = await Settings . get ( "migrateAggregateTableState" ) ;
// Skip if already migrated
// If it is migrating, it possibly means the migration was interrupted, or the migration is in progress
if ( migrateState === "migrated" ) {
log . debug ( "db" , "Migrated aggregate table already, skip" ) ;
return ;
} else if ( migrateState === "migrating" ) {
log . warn ( "db" , "Aggregate table migration is already in progress, or it was interrupted" ) ;
throw new Error ( "Aggregate table migration is already in progress" ) ;
}
2024-10-27 13:22:23 +08:00
/ * *
* Start migration server for displaying the migration status
* @ type { SimpleMigrationServer }
* /
let migrationServer ;
let msg ;
if ( port ) {
migrationServer = new SimpleMigrationServer ( ) ;
await migrationServer . start ( port , hostname ) ;
}
2024-10-26 20:50:29 +08:00
log . info ( "db" , "Migrating Aggregate Table" ) ;
log . info ( "db" , "Getting list of unique monitors" ) ;
// Get a list of unique monitors from the heartbeat table, using raw sql
let monitors = await R . getAll ( `
SELECT DISTINCT monitor _id
FROM heartbeat
ORDER BY monitor _id ASC
` );
// Stop if stat_* tables are not empty
for ( let table of [ "stat_minutely" , "stat_hourly" , "stat_daily" ] ) {
let countResult = await R . getRow ( ` SELECT COUNT(*) AS count FROM ${ table } ` ) ;
let count = countResult . count ;
if ( count > 0 ) {
log . warn ( "db" , ` Aggregate table ${ table } is not empty, migration will not be started (Maybe you were using 2.0.0-dev?) ` ) ;
2024-10-27 13:22:23 +08:00
await migrationServer ? . stop ( ) ;
2024-10-26 20:50:29 +08:00
return ;
}
}
2024-10-27 20:30:44 +08:00
await Settings . set ( "migrateAggregateTableState" , "migrating" ) ;
2024-10-26 20:50:29 +08:00
let progressPercent = 0 ;
let part = 100 / monitors . length ;
let i = 1 ;
for ( let monitor of monitors ) {
// Get a list of unique dates from the heartbeat table, using raw sql
let dates = await R . getAll ( `
SELECT DISTINCT DATE ( time ) AS date
FROM heartbeat
WHERE monitor _id = ?
ORDER BY date ASC
` , [
monitor . monitor _id
] ) ;
for ( let date of dates ) {
// New Uptime Calculator
let calculator = new UptimeCalculator ( ) ;
calculator . monitorID = monitor . monitor _id ;
calculator . setMigrationMode ( true ) ;
// Get all the heartbeats for this monitor and date
let heartbeats = await R . getAll ( `
SELECT status , ping , time
FROM heartbeat
WHERE monitor _id = ?
AND DATE ( time ) = ?
ORDER BY time ASC
` , [ monitor.monitor_id, date.date ]);
if ( heartbeats . length > 0 ) {
2024-10-27 13:22:23 +08:00
msg = ` [DON'T STOP] Migrating monitor data ${ monitor . monitor _id } - ${ date . date } [ ${ progressPercent . toFixed ( 2 ) } %][ ${ i } / ${ monitors . length } ] ` ;
log . info ( "db" , msg ) ;
migrationServer ? . update ( msg ) ;
2024-10-26 20:50:29 +08:00
}
for ( let heartbeat of heartbeats ) {
await calculator . update ( heartbeat . status , parseFloat ( heartbeat . ping ) , dayjs ( heartbeat . time ) ) ;
}
progressPercent += ( Math . round ( part / dates . length * 100 ) / 100 ) ;
// Lazy to fix the floating point issue, it is acceptable since it is just a progress bar
if ( progressPercent > 100 ) {
progressPercent = 100 ;
}
}
i ++ ;
}
2024-10-27 13:22:23 +08:00
msg = "Clearing non-important heartbeats" ;
log . info ( "db" , msg ) ;
migrationServer ? . update ( msg ) ;
2024-10-26 20:50:29 +08:00
2024-10-27 13:22:23 +08:00
await Database . clearHeartbeatData ( true ) ;
2024-10-26 20:50:29 +08:00
await Settings . set ( "migrateAggregateTableState" , "migrated" ) ;
2024-10-27 13:22:23 +08:00
await migrationServer ? . stop ( ) ;
2024-10-26 20:50:29 +08:00
if ( monitors . length > 0 ) {
log . info ( "db" , "Aggregate Table Migration Completed" ) ;
} else {
log . info ( "db" , "No data to migrate" ) ;
}
}
/ * *
* Remove all non - important heartbeats from heartbeat table , keep last 24 - hour or { KEEP _LAST _ROWS } rows for each monitor
* @ param { boolean } detailedLog Log detailed information
* @ returns { Promise < void > }
* /
static async clearHeartbeatData ( detailedLog = false ) {
let monitors = await R . getAll ( "SELECT id FROM monitor" ) ;
const sqlHourOffset = Database . sqlHourOffset ( ) ;
for ( let monitor of monitors ) {
if ( detailedLog ) {
log . info ( "db" , "Deleting non-important heartbeats for monitor " + monitor . id ) ;
}
await R . exec ( `
DELETE FROM heartbeat
WHERE monitor _id = ?
AND important = 0
AND time < $ { sqlHourOffset }
AND id NOT IN (
SELECT id
FROM heartbeat
WHERE monitor _id = ?
ORDER BY time DESC
LIMIT ?
)
` , [
monitor . id ,
- 24 ,
monitor . id ,
100 ,
] ) ;
}
}
2021-07-22 02:02:35 +08:00
}
module . exports = Database ;