mirror of
https://github.com/louislam/uptime-kuma.git
synced 2024-11-27 16:54:04 +00:00
Merge branch 'louislam:master' into feature/auto_build_and_release
This commit is contained in:
commit
2778fa583b
208 changed files with 38875 additions and 9873 deletions
|
@ -2,8 +2,12 @@
|
||||||
/dist
|
/dist
|
||||||
/node_modules
|
/node_modules
|
||||||
/data
|
/data
|
||||||
|
/out
|
||||||
|
/test
|
||||||
|
/kubernetes
|
||||||
/.do
|
/.do
|
||||||
**/.dockerignore
|
**/.dockerignore
|
||||||
|
/private
|
||||||
**/.git
|
**/.git
|
||||||
**/.gitignore
|
**/.gitignore
|
||||||
**/docker-compose*
|
**/docker-compose*
|
||||||
|
@ -15,11 +19,16 @@ README.md
|
||||||
.eslint*
|
.eslint*
|
||||||
.stylelint*
|
.stylelint*
|
||||||
/.github
|
/.github
|
||||||
package-lock.json
|
|
||||||
yarn.lock
|
yarn.lock
|
||||||
app.json
|
app.json
|
||||||
CODE_OF_CONDUCT.md
|
CODE_OF_CONDUCT.md
|
||||||
CONTRIBUTING.md
|
CONTRIBUTING.md
|
||||||
|
CNAME
|
||||||
|
install.sh
|
||||||
|
SECURITY.md
|
||||||
|
tsconfig.json
|
||||||
|
.env
|
||||||
|
/tmp
|
||||||
|
|
||||||
### .gitignore content (commented rules are duplicated)
|
### .gitignore content (commented rules are duplicated)
|
||||||
|
|
||||||
|
|
42
.eslintrc.js
42
.eslintrc.js
|
@ -1,4 +1,5 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
root: true,
|
||||||
env: {
|
env: {
|
||||||
browser: true,
|
browser: true,
|
||||||
commonjs: true,
|
commonjs: true,
|
||||||
|
@ -16,6 +17,11 @@ module.exports = {
|
||||||
requireConfigFile: false,
|
requireConfigFile: false,
|
||||||
},
|
},
|
||||||
rules: {
|
rules: {
|
||||||
|
"linebreak-style": ["error", "unix"],
|
||||||
|
"camelcase": ["warn", {
|
||||||
|
"properties": "never",
|
||||||
|
"ignoreImports": true
|
||||||
|
}],
|
||||||
// override/add rules settings here, such as:
|
// override/add rules settings here, such as:
|
||||||
// 'vue/no-unused-vars': 'error'
|
// 'vue/no-unused-vars': 'error'
|
||||||
"no-unused-vars": "warn",
|
"no-unused-vars": "warn",
|
||||||
|
@ -28,11 +34,12 @@ module.exports = {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
quotes: ["warn", "double"],
|
quotes: ["warn", "double"],
|
||||||
//semi: ['off', 'never'],
|
semi: "warn",
|
||||||
"vue/html-indent": ["warn", 4], // default: 2
|
"vue/html-indent": ["warn", 4], // default: 2
|
||||||
"vue/max-attributes-per-line": "off",
|
"vue/max-attributes-per-line": "off",
|
||||||
"vue/singleline-html-element-content-newline": "off",
|
"vue/singleline-html-element-content-newline": "off",
|
||||||
"vue/html-self-closing": "off",
|
"vue/html-self-closing": "off",
|
||||||
|
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
|
||||||
"no-multi-spaces": ["error", {
|
"no-multi-spaces": ["error", {
|
||||||
ignoreEOLComments: true,
|
ignoreEOLComments: true,
|
||||||
}],
|
}],
|
||||||
|
@ -71,5 +78,36 @@ module.exports = {
|
||||||
"eol-last": ["error", "always"],
|
"eol-last": ["error", "always"],
|
||||||
//'prefer-template': 'error',
|
//'prefer-template': 'error',
|
||||||
"comma-dangle": ["warn", "only-multiline"],
|
"comma-dangle": ["warn", "only-multiline"],
|
||||||
|
"no-empty": ["error", {
|
||||||
|
"allowEmptyCatch": true
|
||||||
|
}],
|
||||||
|
"no-control-regex": "off",
|
||||||
|
"one-var": ["error", "never"],
|
||||||
|
"max-statements-per-line": ["error", { "max": 1 }]
|
||||||
},
|
},
|
||||||
}
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": [ "src/languages/*.js", "src/icon.js" ],
|
||||||
|
"rules": {
|
||||||
|
"comma-dangle": ["error", "always-multiline"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Override for jest puppeteer
|
||||||
|
{
|
||||||
|
"files": [
|
||||||
|
"**/*.spec.js",
|
||||||
|
"**/*.spec.jsx"
|
||||||
|
],
|
||||||
|
env: {
|
||||||
|
jest: true,
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
page: true,
|
||||||
|
browser: true,
|
||||||
|
context: true,
|
||||||
|
jestPuppeteer: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: louislam # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||||
|
#patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: uptime-kuma # Replace with a single Open Collective username
|
||||||
|
#ko_fi: # Replace with a single Ko-fi username
|
||||||
|
#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
#liberapay: # Replace with a single Liberapay username
|
||||||
|
#issuehunt: # Replace with a single IssueHunt username
|
||||||
|
#otechie: # Replace with a single Otechie username
|
||||||
|
#custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
10
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
10
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
|
@ -9,3 +9,13 @@ assignees: ''
|
||||||
**Is it a duplicate question?**
|
**Is it a duplicate question?**
|
||||||
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
|
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
|
||||||
|
|
||||||
|
**Describe your problem**
|
||||||
|
Please describe what you are asking for
|
||||||
|
|
||||||
|
**Info**
|
||||||
|
Uptime Kuma Version:
|
||||||
|
Using Docker?: Yes/No
|
||||||
|
Docker Version:
|
||||||
|
Node.js Version (Without Docker only):
|
||||||
|
OS:
|
||||||
|
Browser:
|
||||||
|
|
21
.github/ISSUE_TEMPLATE/bug_report.md
vendored
21
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -15,6 +15,7 @@ A clear and concise description of what the bug is.
|
||||||
|
|
||||||
**To Reproduce**
|
**To Reproduce**
|
||||||
Steps to reproduce the behavior:
|
Steps to reproduce the behavior:
|
||||||
|
|
||||||
1. Go to '...'
|
1. Go to '...'
|
||||||
2. Click on '....'
|
2. Click on '....'
|
||||||
3. Scroll down to '....'
|
3. Scroll down to '....'
|
||||||
|
@ -23,15 +24,19 @@ Steps to reproduce the behavior:
|
||||||
**Expected behavior**
|
**Expected behavior**
|
||||||
A clear and concise description of what you expected to happen.
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Info**
|
||||||
|
Uptime Kuma Version:
|
||||||
|
Using Docker?: Yes/No
|
||||||
|
Docker Version:
|
||||||
|
Node.js Version (Without Docker only):
|
||||||
|
OS:
|
||||||
|
Browser:
|
||||||
|
|
||||||
**Screenshots**
|
**Screenshots**
|
||||||
If applicable, add screenshots to help explain your problem.
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
**Desktop (please complete the following information):**
|
**Error Log**
|
||||||
- Uptime Kuma Version:
|
It is easier for us to find out the problem.
|
||||||
- Using Docker?: Yes/No
|
|
||||||
- OS:
|
|
||||||
- Browser:
|
|
||||||
|
|
||||||
|
Docker: `docker logs <container id>`
|
||||||
**Additional context**
|
PM2: `~/.pm2/logs/` (e.g. `/home/ubuntu/.pm2/logs`)
|
||||||
Add any other context about the problem here.
|
|
||||||
|
|
35
.github/workflows/auto-test.yml
vendored
Normal file
35
.github/workflows/auto-test.yml
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
|
||||||
|
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
||||||
|
|
||||||
|
name: Auto Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-test:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||||
|
node-version: [14.x, 16.x]
|
||||||
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
cache: 'npm'
|
||||||
|
- run: npm run install-legacy
|
||||||
|
- run: npm run build
|
||||||
|
- run: npm test
|
||||||
|
env:
|
||||||
|
HEADLESS_TEST: 1
|
||||||
|
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -7,4 +7,9 @@ dist-ssr
|
||||||
|
|
||||||
/data
|
/data
|
||||||
!/data/.gitkeep
|
!/data/.gitkeep
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
|
/private
|
||||||
|
/out
|
||||||
|
/tmp
|
||||||
|
.env
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
{
|
{
|
||||||
"extends": "stylelint-config-recommended",
|
"extends": "stylelint-config-standard",
|
||||||
|
"rules": {
|
||||||
|
"indentation": 4,
|
||||||
|
"no-descending-specificity": null,
|
||||||
|
"selector-list-comma-newline-after": null,
|
||||||
|
"declaration-empty-line-before": null
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
1
CNAME
Normal file
1
CNAME
Normal file
|
@ -0,0 +1 @@
|
||||||
|
git.kuma.pet
|
|
@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
||||||
### 4. Permanent Ban
|
### 4. Permanent Ban
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
standards, including sustained inappropriate behavior, harassment of an
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
individual, or aggression toward or disparagement of classes of individuals.
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
|
176
CONTRIBUTING.md
176
CONTRIBUTING.md
|
@ -2,103 +2,179 @@
|
||||||
|
|
||||||
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structed and commented so well, lol. Sorry about that.
|
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structed and commented so well, lol. Sorry about that.
|
||||||
|
|
||||||
The project was created with vite.js (vue3). Then I created a sub-directory called "server" for server part. Both frontend and backend share the same package.json.
|
The project was created with vite.js (vue3). Then I created a sub-directory called "server" for server part. Both frontend and backend share the same package.json.
|
||||||
|
|
||||||
The frontend code build into "dist" directory. The server uses "dist" as root. This is how production is working.
|
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
|
||||||
|
|
||||||
Your IDE should follow the config in ".editorconfig". The most special thing is I set it to 4 spaces indentation. I know 2 spaces indentation became a kind of standard nowadays for js, but my eyes is not so comfortable for this. In my opinion, there is no callback-hell nowadays, it is good to go back 4 spaces world again.
|
## Key Technical Skills
|
||||||
|
|
||||||
# Project Styles
|
- Node.js (You should know what are promise, async/await and arrow function etc.)
|
||||||
|
- Socket.io
|
||||||
|
- SCSS
|
||||||
|
- Vue.js
|
||||||
|
- Bootstrap
|
||||||
|
- SQLite
|
||||||
|
|
||||||
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
|
## Directories
|
||||||
|
|
||||||
For example, recently, because I am not a python expert, I spent a 2 hours to resolve all problems in order to install and use the Apprise cli. Apprise requires so many hidden requirements, I have to figure out myself how to solve the problems by Google search for my OS. That is painful. I do not want Uptime Kuma to be like this way, so:
|
- data (App data)
|
||||||
|
- dist (Frontend build)
|
||||||
|
- extra (Extra useful scripts)
|
||||||
|
- public (Frontend resources for dev only)
|
||||||
|
- server (Server source code)
|
||||||
|
- src (Frontend source code)
|
||||||
|
- test (unit test)
|
||||||
|
|
||||||
|
## Can I create a pull request for Uptime Kuma?
|
||||||
|
|
||||||
|
Generally, if the pull request is working fine and it do not affect any existing logic, workflow and perfomance, I will merge into the master branch once it is tested.
|
||||||
|
|
||||||
|
If you are not sure, feel free to create an empty pull request draft first.
|
||||||
|
|
||||||
|
### Pull Request Examples
|
||||||
|
|
||||||
|
#### ✅ High - Medium Priority
|
||||||
|
|
||||||
|
- Add a new notification
|
||||||
|
- Add a chart
|
||||||
|
- Fix a bug
|
||||||
|
- Translations
|
||||||
|
|
||||||
|
#### *️⃣ Requires one more reviewer
|
||||||
|
|
||||||
|
I do not have such knowledge to test it.
|
||||||
|
|
||||||
|
- Add k8s supports
|
||||||
|
|
||||||
|
#### *️⃣ Low Priority
|
||||||
|
|
||||||
|
It changed my current workflow and require further studies.
|
||||||
|
|
||||||
|
- Change my release approach
|
||||||
|
|
||||||
|
#### ❌ Won't Merge
|
||||||
|
|
||||||
|
- Duplicated pull request
|
||||||
|
- Buggy
|
||||||
|
- Existing logic is completely modified or deleted
|
||||||
|
- A function that is completely out of scope
|
||||||
|
|
||||||
|
## Project Styles
|
||||||
|
|
||||||
|
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
|
||||||
|
|
||||||
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
|
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
|
||||||
- Single container for Docker users, no very complex docker-composer file. Just map the volume and expose the port, then good to go
|
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
||||||
- All settings in frontend.
|
- Settings should be configurable in the frontend. Env var is not encouraged.
|
||||||
- Easy to use
|
- Easy to use
|
||||||
|
|
||||||
# Tools
|
## Coding Styles
|
||||||
|
|
||||||
|
- 4 spaces indentation
|
||||||
|
- Follow `.editorconfig`
|
||||||
|
- Follow ESLint
|
||||||
|
|
||||||
|
## Name convention
|
||||||
|
|
||||||
|
- Javascript/Typescript: camelCaseType
|
||||||
|
- SQLite: underscore_type
|
||||||
|
- CSS/SCSS: dash-type
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
- Node.js >= 14
|
- Node.js >= 14
|
||||||
- Git
|
- Git
|
||||||
- IDE that supports .editorconfig (I am using Intellji Idea)
|
- IDE that supports ESLint and EditorConfig (I am using Intellji Idea)
|
||||||
- A SQLite tool (I am using SQLite Expert Personal)
|
- A SQLite tool (SQLite Expert Personal is suggested)
|
||||||
|
|
||||||
# Prepare the dev
|
## Install dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install
|
npm ci
|
||||||
```
|
```
|
||||||
|
|
||||||
# Backend Dev
|
## How to start the Backend Dev Server
|
||||||
|
|
||||||
|
(2021-09-23 Update)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run start-server
|
npm run start-server-dev
|
||||||
|
|
||||||
# Or
|
|
||||||
|
|
||||||
node server/server.js
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
It binds to 0.0.0.0:3001 by default.
|
It binds to `0.0.0.0:3001` by default.
|
||||||
|
|
||||||
|
### Backend Details
|
||||||
## Backend Details
|
|
||||||
|
|
||||||
It is mainly a socket.io app + express.js.
|
It is mainly a socket.io app + express.js.
|
||||||
|
|
||||||
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
|
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
|
||||||
|
|
||||||
# Frontend Dev
|
- model/ (Object model, auto mapping to the database table name)
|
||||||
|
- modules/ (Modified 3rd-party modules)
|
||||||
|
- notification-providers/ (indivdual notification logic)
|
||||||
|
- routers/ (Express Routers)
|
||||||
|
- scoket-handler (Socket.io Handlers)
|
||||||
|
- server.js (Server main logic)
|
||||||
|
|
||||||
Start frontend dev server. Hot-reload enabled in this way. It binds to 0.0.0.0:3000.
|
## How to start the Frontend Dev Server
|
||||||
|
|
||||||
```bash
|
1. Set the env var `NODE_ENV` to "development".
|
||||||
npm run dev
|
2. Start the frontend dev server by the following command.
|
||||||
```
|
|
||||||
|
|
||||||
PS: You can ignore those scss warnings, those warnings are from Bootstrap that I cannot fix.
|
```bash
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
You can use Vue Devtool Chrome extension for debugging.
|
It binds to `0.0.0.0:3000` by default.
|
||||||
|
|
||||||
After the frontend server started. It cannot connect to the websocket server even you have started the server. You need to tell the frontend that is a dev env by running this in DevTool console and refresh:
|
You can use Vue.js devtools Chrome extension for debugging.
|
||||||
|
|
||||||
```javascript
|
### Build the frontend
|
||||||
localStorage.dev = "dev";
|
|
||||||
```
|
|
||||||
|
|
||||||
So that the frontend will try to connect websocket server in 3001.
|
|
||||||
|
|
||||||
Alternately, you can specific NODE_ENV to "development".
|
|
||||||
|
|
||||||
|
|
||||||
## Build the frontend
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run build
|
npm run build
|
||||||
```
|
```
|
||||||
|
|
||||||
## Frontend Details
|
### Frontend Details
|
||||||
|
|
||||||
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
|
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
|
||||||
|
|
||||||
The router in "src/main.js"
|
The router is in `src/router.js`
|
||||||
|
|
||||||
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
|
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
|
||||||
|
|
||||||
The data and socket logic in "src/mixins/socket.js"
|
The data and socket logic are in `src/mixins/socket.js`.
|
||||||
|
|
||||||
# Database Migration
|
## Database Migration
|
||||||
|
|
||||||
TODO
|
1. Create `patch-{name}.sql` in `./db/`
|
||||||
|
2. Add your patch filename in the `patchList` list in `./server/database.js`
|
||||||
|
|
||||||
# Unit Test
|
## Unit Test
|
||||||
|
|
||||||
Yes, no unit test for now. I know it is very important, but at the same time my spare time is very limited. I want to implement my ideas first. I will go back to this in some points.
|
It is an end-to-end testing. It is using Jest and Puppeteer.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments.
|
||||||
|
|
||||||
|
## Update Dependencies
|
||||||
|
|
||||||
|
Install `ncu`
|
||||||
|
https://github.com/raineorshine/npm-check-updates
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ncu -u -t patch
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
Since previously updating vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
|
||||||
|
|
||||||
|
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
||||||
|
|
||||||
|
## Translations
|
||||||
|
|
||||||
|
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
||||||
|
|
138
README.md
138
README.md
|
@ -1,6 +1,6 @@
|
||||||
# Uptime Kuma
|
# Uptime Kuma
|
||||||
|
|
||||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a>
|
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Backers&color=brightgreen" /></a>
|
||||||
|
|
||||||
<div align="center" width="100%">
|
<div align="center" width="100%">
|
||||||
<img src="./public/icon.svg" width="128" alt="" />
|
<img src="./public/icon.svg" width="128" alt="" />
|
||||||
|
@ -8,115 +8,93 @@
|
||||||
|
|
||||||
It is a self-hosted monitoring tool like "Uptime Robot".
|
It is a self-hosted monitoring tool like "Uptime Robot".
|
||||||
|
|
||||||
<img src="https://louislam.net/uptimekuma/1.jpg" width="512" alt="" />
|
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" />
|
||||||
|
|
||||||
## Features
|
## 🥔 Live Demo
|
||||||
|
|
||||||
* Monitoring uptime for HTTP(s) / TCP / Ping.
|
Try it!
|
||||||
|
|
||||||
|
https://demo.uptime.kuma.pet
|
||||||
|
|
||||||
|
It is a 10 minutes live demo, all data will be deleted after that. The server is located at Tokyo, if you live far away from here, it may affact your experience. I suggest that you should install to try it.
|
||||||
|
|
||||||
|
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
|
||||||
|
|
||||||
|
## ⭐ Features
|
||||||
|
|
||||||
|
* Monitoring uptime for HTTP(s) / TCP / Ping / DNS Record / Push.
|
||||||
* Fancy, Reactive, Fast UI/UX.
|
* Fancy, Reactive, Fast UI/UX.
|
||||||
* Notifications via Webhook, Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP) and more by Apprise.
|
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [70+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
|
||||||
* 20 seconds interval.
|
* 20 seconds interval.
|
||||||
|
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
|
||||||
|
* Simple Status Page
|
||||||
|
* Ping Chart
|
||||||
|
* Certificate Info
|
||||||
|
|
||||||
## How to Use
|
## 🔧 How to Install
|
||||||
|
|
||||||
### Docker
|
### 🐳 Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create a volume
|
|
||||||
docker volume create uptime-kuma
|
docker volume create uptime-kuma
|
||||||
|
|
||||||
# Start the container
|
|
||||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||||
```
|
```
|
||||||
|
|
||||||
Browse to http://localhost:3001 after started.
|
Browse to http://localhost:3001 after started.
|
||||||
|
|
||||||
Change Port and Volume
|
### 💪🏻 Without Docker
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d --restart=always -p <YOUR_PORT>:3001 -v <YOUR_DIR OR VOLUME>:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
|
||||||
```
|
|
||||||
|
|
||||||
### Without Docker (x86/x64 only)
|
|
||||||
|
|
||||||
Required Tools: Node.js >= 14, git and pm2.
|
Required Tools: Node.js >= 14, git and pm2.
|
||||||
|
|
||||||
(**Not recommanded for ARM CPU users.** Since there is no prebuilt for node-sqlite3, it is hard to get it running)
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Update your npm to the latest version
|
||||||
|
npm install npm -g
|
||||||
|
|
||||||
git clone https://github.com/louislam/uptime-kuma.git
|
git clone https://github.com/louislam/uptime-kuma.git
|
||||||
cd uptime-kuma
|
cd uptime-kuma
|
||||||
npm run setup
|
npm run setup
|
||||||
|
|
||||||
# Option 1. Try it
|
# Option 1. Try it
|
||||||
npm run start-server
|
node server/server.js
|
||||||
|
|
||||||
# (Recommended)
|
# (Recommended) Option 2. Run in background using PM2
|
||||||
# Option 2. Run in background using PM2
|
|
||||||
# Install PM2 if you don't have: npm install pm2 -g
|
# Install PM2 if you don't have: npm install pm2 -g
|
||||||
pm2 start npm --name uptime-kuma -- run start-server
|
pm2 start server/server.js --name uptime-kuma
|
||||||
|
|
||||||
# Listen to different port or hostname
|
|
||||||
pm2 start npm --name uptime-kuma -- run start-server -- --port=80 --hostname=0.0.0.0
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
More useful commands if you have installed.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pm2 start uptime-kuma
|
|
||||||
pm2 restart uptime-kuma
|
|
||||||
pm2 stop uptime-kuma
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Browse to http://localhost:3001 after started.
|
Browse to http://localhost:3001 after started.
|
||||||
|
|
||||||
### (Optional) One more step for Reverse Proxy
|
### Advanced Installation
|
||||||
|
|
||||||
This is optional for someone who want to do reverse proxy.
|
If you need more options or need to browse via a reserve proxy, please read:
|
||||||
|
|
||||||
Unlikely other web apps, Uptime Kuma is based on WebSocket. You need two more headers **"Upgrade"** and **"Connection"** in order to reverse proxy WebSocket.
|
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
|
||||||
|
|
||||||
Please read wiki for more info:
|
## 🆙 How to Update
|
||||||
https://github.com/louislam/uptime-kuma/wiki/Reverse-Proxy
|
|
||||||
|
|
||||||
## How to Update
|
Please read:
|
||||||
|
|
||||||
### Docker
|
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
|
||||||
|
|
||||||
Re-pull the latest docker image and create another container with the same volume.
|
## 🆕 What's Next?
|
||||||
|
|
||||||
For someone who used my "How-to-use" commands to install Uptime Kuma, you can update by this:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull louislam/uptime-kuma:1
|
|
||||||
docker stop uptime-kuma
|
|
||||||
docker rm uptime-kuma
|
|
||||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
|
||||||
```
|
|
||||||
|
|
||||||
PS: For every new release, it takes some time to build the docker image, please be patient if it is not available yet.
|
|
||||||
|
|
||||||
### Without Docker
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git fetch --all
|
|
||||||
git checkout 1.1.0 --force
|
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
pm2 restart uptime-kuma
|
|
||||||
```
|
|
||||||
|
|
||||||
## What's Next?
|
|
||||||
|
|
||||||
I will mark requests/issues to the next milestone.
|
I will mark requests/issues to the next milestone.
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/milestones
|
https://github.com/louislam/uptime-kuma/milestones
|
||||||
|
|
||||||
## More Screenshots
|
Project Plan:
|
||||||
|
|
||||||
Dark Mode:
|
https://github.com/louislam/uptime-kuma/projects/1
|
||||||
|
|
||||||
<img src="https://user-images.githubusercontent.com/1336778/128710166-908f8d88-9256-43f3-9c49-bfc2c56011d2.png" width="400" alt="" />
|
## 🖼 More Screenshots
|
||||||
|
|
||||||
|
Light Mode:
|
||||||
|
|
||||||
|
<img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="" />
|
||||||
|
|
||||||
|
Status Page:
|
||||||
|
|
||||||
|
<img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="" />
|
||||||
|
|
||||||
Settings Page:
|
Settings Page:
|
||||||
|
|
||||||
|
@ -128,7 +106,7 @@ Telegram Notification Sample:
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close one is statping. Unfortunately, it is not stable and unmaintained.
|
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
|
||||||
* Want to build a fancy UI.
|
* Want to build a fancy UI.
|
||||||
* Learn Vue 3 and vite.js.
|
* Learn Vue 3 and vite.js.
|
||||||
* Show the power of Bootstrap 5.
|
* Show the power of Bootstrap 5.
|
||||||
|
@ -137,10 +115,24 @@ Telegram Notification Sample:
|
||||||
|
|
||||||
If you love this project, please consider giving me a ⭐.
|
If you love this project, please consider giving me a ⭐.
|
||||||
|
|
||||||
|
## 🗣️ Discussion
|
||||||
|
|
||||||
|
### Issues Page
|
||||||
|
|
||||||
|
You can discuss or ask for help in [Issues](https://github.com/louislam/uptime-kuma/issues).
|
||||||
|
|
||||||
|
### Subreddit
|
||||||
|
|
||||||
|
My Reddit account: louislamlam
|
||||||
|
You can mention me if you ask question on Reddit.
|
||||||
|
https://www.reddit.com/r/UptimeKuma/
|
||||||
|
|
||||||
## Contribute
|
## Contribute
|
||||||
|
|
||||||
If you want to report a bug or request a new feature. Free feel to open a new issue.
|
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
|
||||||
|
|
||||||
If you want to modify Uptime Kuma, this guideline maybe useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
If you want to translate Uptime Kuma into your langauge, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
||||||
|
|
||||||
English proofreading is needed too, because my grammar is not that great sadly. Feel free to correct my grammar in this Readme, source code or wiki.
|
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||||
|
|
||||||
|
English proofreading is needed too because my grammar is not that great sadly. Feel free to correct my grammar in this readme, source code, or wiki.
|
||||||
|
|
31
SECURITY.md
Normal file
31
SECURITY.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
Use this section to tell people about which versions of your project are
|
||||||
|
currently being supported with security updates.
|
||||||
|
|
||||||
|
### Uptime Kuma Versions
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 1.7.X | :white_check_mark: |
|
||||||
|
| < 1.7 | ❌ |
|
||||||
|
|
||||||
|
### Upgradable Docker Tags
|
||||||
|
|
||||||
|
| Tag | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 1 | :white_check_mark: |
|
||||||
|
| 1-debian | :white_check_mark: |
|
||||||
|
| 1-alpine | :white_check_mark: |
|
||||||
|
| latest | :white_check_mark: |
|
||||||
|
| debian | :white_check_mark: |
|
||||||
|
| alpine | :white_check_mark: |
|
||||||
|
| All other tags | ❌ |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
Please report security issues to uptime@kuma.pet.
|
||||||
|
|
||||||
|
Do not use the issue tracker or discuss it in the public as it will cause more damage.
|
11
babel.config.js
Normal file
11
babel.config.js
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
const config = {};
|
||||||
|
|
||||||
|
if (process.env.TEST_FRONTEND) {
|
||||||
|
config.presets = ["@babel/preset-env"];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.TEST_BACKEND) {
|
||||||
|
config.plugins = ["babel-plugin-rewire"];
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = config;
|
5
config/jest-backend.config.js
Normal file
5
config/jest-backend.config.js
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports = {
|
||||||
|
"rootDir": "..",
|
||||||
|
"testRegex": "./test/backend.spec.js",
|
||||||
|
};
|
||||||
|
|
5
config/jest-frontend.config.js
Normal file
5
config/jest-frontend.config.js
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports = {
|
||||||
|
"rootDir": "..",
|
||||||
|
"testRegex": "./test/frontend.spec.js",
|
||||||
|
};
|
||||||
|
|
6
config/jest-puppeteer.config.js
Normal file
6
config/jest-puppeteer.config.js
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
"launch": {
|
||||||
|
"headless": process.env.HEADLESS_TEST || false,
|
||||||
|
"userDataDir": "./data/test-chrome-profile",
|
||||||
|
}
|
||||||
|
};
|
11
config/jest.config.js
Normal file
11
config/jest.config.js
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
module.exports = {
|
||||||
|
"verbose": true,
|
||||||
|
"preset": "jest-puppeteer",
|
||||||
|
"globals": {
|
||||||
|
"__DEV__": true
|
||||||
|
},
|
||||||
|
"testRegex": "./test/e2e.spec.js",
|
||||||
|
"rootDir": "..",
|
||||||
|
"testTimeout": 30000,
|
||||||
|
};
|
||||||
|
|
24
config/vite.config.js
Normal file
24
config/vite.config.js
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
import legacy from "@vitejs/plugin-legacy";
|
||||||
|
import vue from "@vitejs/plugin-vue";
|
||||||
|
import { defineConfig } from "vite";
|
||||||
|
|
||||||
|
const postCssScss = require("postcss-scss");
|
||||||
|
const postcssRTLCSS = require("postcss-rtlcss");
|
||||||
|
|
||||||
|
// https://vitejs.dev/config/
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [
|
||||||
|
vue(),
|
||||||
|
legacy({
|
||||||
|
targets: ["ie > 11"],
|
||||||
|
additionalLegacyPolyfills: ["regenerator-runtime/runtime"]
|
||||||
|
})
|
||||||
|
],
|
||||||
|
css: {
|
||||||
|
postcss: {
|
||||||
|
"parser": postCssScss,
|
||||||
|
"map": false,
|
||||||
|
"plugins": [postcssRTLCSS]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
10
db/patch-2fa.sql
Normal file
10
db/patch-2fa.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE user
|
||||||
|
ADD twofa_secret VARCHAR(64);
|
||||||
|
|
||||||
|
ALTER TABLE user
|
||||||
|
ADD twofa_status BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/patch-add-retry-interval-monitor.sql
Normal file
7
db/patch-add-retry-interval-monitor.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD retry_interval INTEGER default 0 not null;
|
||||||
|
|
||||||
|
COMMIT;
|
30
db/patch-group-table.sql
Normal file
30
db/patch-group-table.sql
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
create table `group`
|
||||||
|
(
|
||||||
|
id INTEGER not null
|
||||||
|
constraint group_pk
|
||||||
|
primary key autoincrement,
|
||||||
|
name VARCHAR(255) not null,
|
||||||
|
created_date DATETIME default (DATETIME('now')) not null,
|
||||||
|
public BOOLEAN default 0 not null,
|
||||||
|
active BOOLEAN default 1 not null,
|
||||||
|
weight BOOLEAN NOT NULL DEFAULT 1000
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE [monitor_group]
|
||||||
|
(
|
||||||
|
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
[monitor_id] INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||||
|
[group_id] INTEGER NOT NULL REFERENCES [group] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||||
|
weight BOOLEAN NOT NULL DEFAULT 1000
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX [fk]
|
||||||
|
ON [monitor_group] (
|
||||||
|
[monitor_id],
|
||||||
|
[group_id]);
|
||||||
|
|
||||||
|
|
||||||
|
COMMIT;
|
10
db/patch-improve-performance.sql
Normal file
10
db/patch-improve-performance.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- For sendHeartbeatList
|
||||||
|
CREATE INDEX monitor_time_index ON heartbeat (monitor_id, time);
|
||||||
|
|
||||||
|
-- For sendImportantHeartbeatList
|
||||||
|
CREATE INDEX monitor_important_time_index ON heartbeat (monitor_id, important,time);
|
||||||
|
|
||||||
|
COMMIT;
|
18
db/patch-incident-table.sql
Normal file
18
db/patch-incident-table.sql
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
create table incident
|
||||||
|
(
|
||||||
|
id INTEGER not null
|
||||||
|
constraint incident_pk
|
||||||
|
primary key autoincrement,
|
||||||
|
title VARCHAR(255) not null,
|
||||||
|
content TEXT not null,
|
||||||
|
style VARCHAR(30) default 'warning' not null,
|
||||||
|
created_date DATETIME default (DATETIME('now')) not null,
|
||||||
|
last_updated_date DATETIME,
|
||||||
|
pin BOOLEAN default 1 not null,
|
||||||
|
active BOOLEAN default 1 not null
|
||||||
|
);
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/patch-monitor-push_token.sql
Normal file
7
db/patch-monitor-push_token.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD push_token VARCHAR(20) DEFAULT NULL;
|
||||||
|
|
||||||
|
COMMIT;
|
22
db/patch-setting-value-type.sql
Normal file
22
db/patch-setting-value-type.sql
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- Generated by Intellij IDEA
|
||||||
|
create table setting_dg_tmp
|
||||||
|
(
|
||||||
|
id INTEGER
|
||||||
|
primary key autoincrement,
|
||||||
|
key VARCHAR(200) not null
|
||||||
|
unique,
|
||||||
|
value TEXT,
|
||||||
|
type VARCHAR(20)
|
||||||
|
);
|
||||||
|
|
||||||
|
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
|
||||||
|
|
||||||
|
drop table setting;
|
||||||
|
|
||||||
|
alter table setting_dg_tmp rename to setting;
|
||||||
|
|
||||||
|
|
||||||
|
COMMIT;
|
19
db/patch10.sql
Normal file
19
db/patch10.sql
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
CREATE TABLE tag (
|
||||||
|
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
color VARCHAR(255) NOT NULL,
|
||||||
|
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE monitor_tag (
|
||||||
|
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
|
monitor_id INTEGER NOT NULL,
|
||||||
|
tag_id INTEGER NOT NULL,
|
||||||
|
value TEXT,
|
||||||
|
CONSTRAINT FK_tag FOREIGN KEY (tag_id) REFERENCES tag(id) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor(id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX monitor_tag_monitor_id_index ON monitor_tag (monitor_id);
|
||||||
|
CREATE INDEX monitor_tag_tag_id_index ON monitor_tag (tag_id);
|
10
db/patch7.sql
Normal file
10
db/patch7.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD dns_resolve_type VARCHAR(5);
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD dns_resolve_server VARCHAR(255);
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/patch8.sql
Normal file
7
db/patch8.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD dns_last_result VARCHAR(255);
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/patch9.sql
Normal file
7
db/patch9.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE notification
|
||||||
|
ADD is_default BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
COMMIT;
|
8
docker/alpine-base.dockerfile
Normal file
8
docker/alpine-base.dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||||
|
FROM node:14-alpine3.12
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install apprise, iputils for non-root ping, setpriv
|
||||||
|
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
|
||||||
|
pip3 --no-cache-dir install apprise && \
|
||||||
|
rm -rf /root/.cache
|
12
docker/debian-base.dockerfile
Normal file
12
docker/debian-base.dockerfile
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||||
|
# If the image changed, the second stage image should be changed too
|
||||||
|
FROM node:14-buster-slim
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
||||||
|
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specific --no-install-recommends to skip them, make the base even smaller than alpine!
|
||||||
|
RUN apt update && \
|
||||||
|
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||||
|
sqlite3 iputils-ping util-linux dumb-init && \
|
||||||
|
pip3 --no-cache-dir install apprise && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
51
docker/dockerfile
Normal file
51
docker/dockerfile
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
FROM louislam/uptime-kuma:base-debian AS build
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN npm ci && \
|
||||||
|
npm run build && \
|
||||||
|
npm ci --production && \
|
||||||
|
chmod +x /app/extra/entrypoint.sh
|
||||||
|
|
||||||
|
|
||||||
|
FROM louislam/uptime-kuma:base-debian AS release
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy app files from build layer
|
||||||
|
COPY --from=build /app /app
|
||||||
|
|
||||||
|
EXPOSE 3001
|
||||||
|
VOLUME ["/app/data"]
|
||||||
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||||
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
|
FROM release AS nightly
|
||||||
|
RUN npm run mark-as-nightly
|
||||||
|
|
||||||
|
# Upload the artifact to Github
|
||||||
|
FROM louislam/uptime-kuma:base-debian AS upload-artifact
|
||||||
|
WORKDIR /
|
||||||
|
RUN apt update && \
|
||||||
|
apt --yes install curl file
|
||||||
|
|
||||||
|
ARG GITHUB_TOKEN
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG PLATFORM=debian
|
||||||
|
ARG VERSION
|
||||||
|
ARG FILE=$PLATFORM-$TARGETARCH-$VERSION.tar.gz
|
||||||
|
ARG DIST=dist.tar.gz
|
||||||
|
|
||||||
|
COPY --from=build /app /app
|
||||||
|
RUN chmod +x /app/extra/upload-github-release-asset.sh
|
||||||
|
|
||||||
|
# Full Build
|
||||||
|
# RUN tar -zcvf $FILE app
|
||||||
|
# RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=$FILE
|
||||||
|
|
||||||
|
# Dist only
|
||||||
|
RUN cd /app && tar -zcvf $DIST dist
|
||||||
|
RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=/app/$DIST
|
||||||
|
|
26
docker/dockerfile-alpine
Normal file
26
docker/dockerfile-alpine
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
FROM louislam/uptime-kuma:base-alpine AS build
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN npm ci && \
|
||||||
|
npm run build && \
|
||||||
|
npm ci --production && \
|
||||||
|
chmod +x /app/extra/entrypoint.sh
|
||||||
|
|
||||||
|
|
||||||
|
FROM louislam/uptime-kuma:base-alpine AS release
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy app files from build layer
|
||||||
|
COPY --from=build /app /app
|
||||||
|
|
||||||
|
EXPOSE 3001
|
||||||
|
VOLUME ["/app/data"]
|
||||||
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||||
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
|
FROM release AS nightly
|
||||||
|
RUN npm run mark-as-nightly
|
28
dockerfile
28
dockerfile
|
@ -1,28 +0,0 @@
|
||||||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
|
||||||
FROM node:14-alpine3.12 AS release
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# split the sqlite install here, so that it can caches the arm prebuilt
|
|
||||||
RUN apk add --no-cache --virtual .build-deps make g++ python3 python3-dev && \
|
|
||||||
ln -s /usr/bin/python3 /usr/bin/python && \
|
|
||||||
npm install @louislam/sqlite3@5.0.3 bcrypt@5.0.1 && \
|
|
||||||
apk del .build-deps && \
|
|
||||||
rm -f /usr/bin/python
|
|
||||||
|
|
||||||
# Touching above code may causes sqlite3 re-compile again, painful slow.
|
|
||||||
|
|
||||||
# Install apprise
|
|
||||||
RUN apk add --no-cache python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib
|
|
||||||
RUN pip3 --no-cache-dir install apprise && \
|
|
||||||
rm -rf /root/.cache
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN npm install && npm run build && npm prune
|
|
||||||
|
|
||||||
EXPOSE 3001
|
|
||||||
VOLUME ["/app/data"]
|
|
||||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=300s CMD node extra/healthcheck.js
|
|
||||||
CMD ["npm", "run", "start-server"]
|
|
||||||
|
|
||||||
FROM release AS nightly
|
|
||||||
RUN npm run mark-as-nightly
|
|
6
ecosystem.config.js
Normal file
6
ecosystem.config.js
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
apps: [{
|
||||||
|
name: "uptime-kuma",
|
||||||
|
script: "./server/server.js",
|
||||||
|
}]
|
||||||
|
}
|
|
@ -1 +1,2 @@
|
||||||
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh
|
# Must enable File Sharing in Docker Desktop
|
||||||
|
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh
|
||||||
|
|
57
extra/download-dist.js
Normal file
57
extra/download-dist.js
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
console.log("Downloading dist");
|
||||||
|
const https = require("https");
|
||||||
|
const tar = require("tar");
|
||||||
|
|
||||||
|
const packageJSON = require("../package.json");
|
||||||
|
const fs = require("fs");
|
||||||
|
const version = packageJSON.version;
|
||||||
|
|
||||||
|
const filename = "dist.tar.gz";
|
||||||
|
|
||||||
|
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
|
||||||
|
download(url);
|
||||||
|
|
||||||
|
function download(url) {
|
||||||
|
console.log(url);
|
||||||
|
|
||||||
|
https.get(url, (response) => {
|
||||||
|
if (response.statusCode === 200) {
|
||||||
|
console.log("Extracting dist...");
|
||||||
|
|
||||||
|
if (fs.existsSync("./dist")) {
|
||||||
|
|
||||||
|
if (fs.existsSync("./dist-backup")) {
|
||||||
|
fs.rmdirSync("./dist-backup", {
|
||||||
|
recursive: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.renameSync("./dist", "./dist-backup");
|
||||||
|
}
|
||||||
|
|
||||||
|
const tarStream = tar.x({
|
||||||
|
cwd: "./",
|
||||||
|
});
|
||||||
|
|
||||||
|
tarStream.on("close", () => {
|
||||||
|
fs.rmdirSync("./dist-backup", {
|
||||||
|
recursive: true
|
||||||
|
});
|
||||||
|
console.log("Done");
|
||||||
|
});
|
||||||
|
|
||||||
|
tarStream.on("error", () => {
|
||||||
|
if (fs.existsSync("./dist-backup")) {
|
||||||
|
fs.renameSync("./dist-backup", "./dist");
|
||||||
|
}
|
||||||
|
console.log("Done");
|
||||||
|
});
|
||||||
|
|
||||||
|
response.pipe(tarStream);
|
||||||
|
} else if (response.statusCode === 302) {
|
||||||
|
download(response.headers.location);
|
||||||
|
} else {
|
||||||
|
console.log("dist not found");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
21
extra/entrypoint.sh
Normal file
21
extra/entrypoint.sh
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
# set -e Exit the script if an error happens
|
||||||
|
set -e
|
||||||
|
PUID=${PUID=0}
|
||||||
|
PGID=${PGID=0}
|
||||||
|
|
||||||
|
files_ownership () {
|
||||||
|
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
|
||||||
|
# -R Recursively descends the specified directories
|
||||||
|
# -c Like verbose but report only when a change is made
|
||||||
|
chown -hRc "$PUID":"$PGID" /app/data
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "==> Performing startup jobs and maintenance tasks"
|
||||||
|
files_ownership
|
||||||
|
|
||||||
|
echo "==> Starting application with user $PUID group $PGID"
|
||||||
|
|
||||||
|
# --clear-groups Clear supplementary groups.
|
||||||
|
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"
|
|
@ -1,19 +1,34 @@
|
||||||
let http = require("http");
|
/*
|
||||||
|
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||||
|
*/
|
||||||
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||||
|
|
||||||
|
let client;
|
||||||
|
|
||||||
|
if (process.env.SSL_KEY && process.env.SSL_CERT) {
|
||||||
|
client = require("https");
|
||||||
|
} else {
|
||||||
|
client = require("http");
|
||||||
|
}
|
||||||
|
|
||||||
let options = {
|
let options = {
|
||||||
host: "localhost",
|
host: process.env.HOST || "127.0.0.1",
|
||||||
port: "3001",
|
port: parseInt(process.env.PORT) || 3001,
|
||||||
timeout: 2000,
|
timeout: 28 * 1000,
|
||||||
};
|
};
|
||||||
let request = http.request(options, (res) => {
|
|
||||||
console.log(`STATUS: ${res.statusCode}`);
|
let request = client.request(options, (res) => {
|
||||||
if (res.statusCode == 200) {
|
console.log(`Health Check OK [Res Code: ${res.statusCode}]`);
|
||||||
|
if (res.statusCode === 200) {
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
} else {
|
} else {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
request.on("error", function (err) {
|
request.on("error", function (err) {
|
||||||
console.log("ERROR");
|
console.error("Health Check ERROR");
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
request.end();
|
request.end();
|
||||||
|
|
245
extra/install.batsh
Normal file
245
extra/install.batsh
Normal file
|
@ -0,0 +1,245 @@
|
||||||
|
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||||
|
// "npm run compile-install-script" to compile install.sh
|
||||||
|
// The command is working on Windows PowerShell and Docker for Windows only.
|
||||||
|
|
||||||
|
|
||||||
|
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||||
|
println("=====================");
|
||||||
|
println("Uptime Kuma Installer");
|
||||||
|
println("=====================");
|
||||||
|
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
|
||||||
|
println("---------------------------------------");
|
||||||
|
println("This script is designed for Linux and basic usage.");
|
||||||
|
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
||||||
|
println("---------------------------------------");
|
||||||
|
println("");
|
||||||
|
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
|
||||||
|
println("Docker - Install Uptime Kuma Docker container");
|
||||||
|
println("");
|
||||||
|
|
||||||
|
if ("$1" != "") {
|
||||||
|
type = "$1";
|
||||||
|
} else {
|
||||||
|
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
|
||||||
|
}
|
||||||
|
|
||||||
|
defaultPort = "3001";
|
||||||
|
|
||||||
|
function checkNode() {
|
||||||
|
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
||||||
|
println("Node Version: " ++ nodeVersion);
|
||||||
|
|
||||||
|
if (nodeVersion < "12") {
|
||||||
|
println("Error: Required Node.js 14");
|
||||||
|
call("exit", "1");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nodeVersion == "12") {
|
||||||
|
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function deb() {
|
||||||
|
bash("nodeCheck=$(node -v)");
|
||||||
|
bash("apt --yes update");
|
||||||
|
|
||||||
|
if (nodeCheck != "") {
|
||||||
|
checkNode();
|
||||||
|
} else {
|
||||||
|
|
||||||
|
// Old nodejs binary name is "nodejs"
|
||||||
|
bash("check=$(nodejs --version)");
|
||||||
|
if (check != "") {
|
||||||
|
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("curlCheck=$(curl --version)");
|
||||||
|
if (curlCheck == "") {
|
||||||
|
println("Installing Curl");
|
||||||
|
bash("apt --yes install curl");
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Installing Node.js 14");
|
||||||
|
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
|
||||||
|
bash("apt --yes install nodejs");
|
||||||
|
bash("node -v");
|
||||||
|
|
||||||
|
bash("nodeCheckAgain=$(node -v)");
|
||||||
|
|
||||||
|
if (nodeCheckAgain == "") {
|
||||||
|
println("Error during Node.js installation");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("check=$(git --version)");
|
||||||
|
if (check == "") {
|
||||||
|
println("Installing Git");
|
||||||
|
bash("apt --yes install git");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (type == "local") {
|
||||||
|
defaultInstallPath = "/opt/uptime-kuma";
|
||||||
|
|
||||||
|
if (exists("/etc/redhat-release")) {
|
||||||
|
os = call("cat", "/etc/redhat-release");
|
||||||
|
distribution = "rhel";
|
||||||
|
|
||||||
|
} else if (exists("/etc/issue")) {
|
||||||
|
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
||||||
|
if (os == "Ubuntu") {
|
||||||
|
distribution = "ubuntu";
|
||||||
|
}
|
||||||
|
if (os == "Debian") {
|
||||||
|
distribution = "debian";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("arch=$(uname -i)");
|
||||||
|
|
||||||
|
println("Your OS: " ++ os);
|
||||||
|
println("Distribution: " ++ distribution);
|
||||||
|
println("Arch: " ++ arch);
|
||||||
|
|
||||||
|
if ("$3" != "") {
|
||||||
|
port = "$3";
|
||||||
|
} else {
|
||||||
|
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
|
||||||
|
|
||||||
|
if (port == "") {
|
||||||
|
port = defaultPort;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("$2" != "") {
|
||||||
|
installPath = "$2";
|
||||||
|
} else {
|
||||||
|
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
|
||||||
|
|
||||||
|
if (installPath == "") {
|
||||||
|
installPath = defaultInstallPath;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CentOS
|
||||||
|
if (distribution == "rhel") {
|
||||||
|
bash("nodeCheck=$(node -v)");
|
||||||
|
|
||||||
|
if (nodeCheck != "") {
|
||||||
|
checkNode();
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bash("curlCheck=$(curl --version)");
|
||||||
|
if (curlCheck == "") {
|
||||||
|
println("Installing Curl");
|
||||||
|
bash("yum -y -q install curl");
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Installing Node.js 14");
|
||||||
|
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
|
||||||
|
bash("yum install -y -q nodejs");
|
||||||
|
bash("node -v");
|
||||||
|
|
||||||
|
bash("nodeCheckAgain=$(node -v)");
|
||||||
|
|
||||||
|
if (nodeCheckAgain == "") {
|
||||||
|
println("Error during Node.js installation");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("check=$(git --version)");
|
||||||
|
if (check == "") {
|
||||||
|
println("Installing Git");
|
||||||
|
bash("yum -y -q install git");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ubuntu
|
||||||
|
} else if (distribution == "ubuntu") {
|
||||||
|
deb();
|
||||||
|
|
||||||
|
// Debian
|
||||||
|
} else if (distribution == "debian") {
|
||||||
|
deb();
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Unknown distribution
|
||||||
|
error = 0;
|
||||||
|
|
||||||
|
bash("check=$(git --version)");
|
||||||
|
if (check == "") {
|
||||||
|
error = 1;
|
||||||
|
println("Error: git is missing");
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("check=$(node -v)");
|
||||||
|
if (check == "") {
|
||||||
|
error = 1;
|
||||||
|
println("Error: node is missing");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error > 0) {
|
||||||
|
println("Please install above missing software");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("check=$(pm2 --version)");
|
||||||
|
if (check == "") {
|
||||||
|
println("Installing PM2");
|
||||||
|
bash("npm install pm2 -g");
|
||||||
|
bash("pm2 startup");
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("mkdir -p $installPath");
|
||||||
|
bash("cd $installPath");
|
||||||
|
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
||||||
|
bash("npm run setup");
|
||||||
|
|
||||||
|
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
|
||||||
|
|
||||||
|
} else {
|
||||||
|
defaultVolume = "uptime-kuma";
|
||||||
|
|
||||||
|
bash("check=$(docker -v)");
|
||||||
|
if (check == "") {
|
||||||
|
println("Error: docker is not found!");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
|
||||||
|
bash("check=$(docker info)");
|
||||||
|
|
||||||
|
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
||||||
|
\"echo\" \"Error: docker is not running\"
|
||||||
|
\"exit\" \"1\"
|
||||||
|
fi");
|
||||||
|
|
||||||
|
if ("$3" != "") {
|
||||||
|
port = "$3";
|
||||||
|
} else {
|
||||||
|
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
|
||||||
|
|
||||||
|
if (port == "") {
|
||||||
|
port = defaultPort;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("$2" != "") {
|
||||||
|
volume = "$2";
|
||||||
|
} else {
|
||||||
|
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
|
||||||
|
|
||||||
|
if (volume == "") {
|
||||||
|
volume = defaultVolume;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Port: $port");
|
||||||
|
println("Volume: $volume");
|
||||||
|
bash("docker volume create $volume");
|
||||||
|
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
|
||||||
|
}
|
||||||
|
|
||||||
|
println("http://localhost:$port");
|
|
@ -6,12 +6,14 @@ const Database = require("../server/database");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const readline = require("readline");
|
const readline = require("readline");
|
||||||
const { initJWTSecret } = require("../server/util-server");
|
const { initJWTSecret } = require("../server/util-server");
|
||||||
|
const args = require("args-parser")(process.argv);
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
input: process.stdin,
|
input: process.stdin,
|
||||||
output: process.stdout
|
output: process.stdout
|
||||||
});
|
});
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
|
Database.init(args);
|
||||||
await Database.connect();
|
await Database.connect();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
144
extra/simple-dns-server.js
Normal file
144
extra/simple-dns-server.js
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
/*
|
||||||
|
* Simple DNS Server
|
||||||
|
* For testing DNS monitoring type, dev only
|
||||||
|
*/
|
||||||
|
const dns2 = require("dns2");
|
||||||
|
|
||||||
|
const { Packet } = dns2;
|
||||||
|
|
||||||
|
const server = dns2.createServer({
|
||||||
|
udp: true
|
||||||
|
});
|
||||||
|
|
||||||
|
server.on("request", (request, send, rinfo) => {
|
||||||
|
for (let question of request.questions) {
|
||||||
|
console.log(question.name, type(question.type), question.class);
|
||||||
|
|
||||||
|
const response = Packet.createResponseFromRequest(request);
|
||||||
|
|
||||||
|
if (question.name === "existing.com") {
|
||||||
|
|
||||||
|
if (question.type === Packet.TYPE.A) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
address: "1.2.3.4"
|
||||||
|
});
|
||||||
|
} if (question.type === Packet.TYPE.AAAA) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
address: "fe80::::1234:5678:abcd:ef00",
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.CNAME) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
domain: "cname1.existing.com",
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.MX) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
exchange: "mx1.existing.com",
|
||||||
|
priority: 5
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.NS) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
ns: "ns1.existing.com",
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.SOA) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
primary: "existing.com",
|
||||||
|
admin: "admin@existing.com",
|
||||||
|
serial: 2021082701,
|
||||||
|
refresh: 300,
|
||||||
|
retry: 3,
|
||||||
|
expiration: 10,
|
||||||
|
minimum: 10,
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.SRV) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
priority: 5,
|
||||||
|
weight: 5,
|
||||||
|
port: 8080,
|
||||||
|
target: "srv1.existing.com",
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.TXT) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
data: "#v=spf1 include:_spf.existing.com ~all",
|
||||||
|
});
|
||||||
|
} else if (question.type === Packet.TYPE.CAA) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
flags: 0,
|
||||||
|
tag: "issue",
|
||||||
|
value: "ca.existing.com",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (question.name === "4.3.2.1.in-addr.arpa") {
|
||||||
|
if (question.type === Packet.TYPE.PTR) {
|
||||||
|
response.answers.push({
|
||||||
|
name: question.name,
|
||||||
|
type: question.type,
|
||||||
|
class: question.class,
|
||||||
|
ttl: 300,
|
||||||
|
domain: "ptr1.existing.com",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
send(response);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.on("listening", () => {
|
||||||
|
console.log("Listening");
|
||||||
|
console.log(server.addresses());
|
||||||
|
});
|
||||||
|
|
||||||
|
server.on("close", () => {
|
||||||
|
console.log("server closed");
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen({
|
||||||
|
udp: 5300
|
||||||
|
});
|
||||||
|
|
||||||
|
function type(code) {
|
||||||
|
for (let name in Packet.TYPE) {
|
||||||
|
if (Packet.TYPE[name] === code) {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
3
extra/update-language-files/.gitignore
vendored
Normal file
3
extra/update-language-files/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
package-lock.json
|
||||||
|
test.js
|
||||||
|
languages/
|
84
extra/update-language-files/index.js
Normal file
84
extra/update-language-files/index.js
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
// Need to use ES6 to read language files
|
||||||
|
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import util from "util";
|
||||||
|
|
||||||
|
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
|
||||||
|
/**
|
||||||
|
* Look ma, it's cp -R.
|
||||||
|
* @param {string} src The path to the thing to copy.
|
||||||
|
* @param {string} dest The path to the new copy.
|
||||||
|
*/
|
||||||
|
const copyRecursiveSync = function (src, dest) {
|
||||||
|
let exists = fs.existsSync(src);
|
||||||
|
let stats = exists && fs.statSync(src);
|
||||||
|
let isDirectory = exists && stats.isDirectory();
|
||||||
|
|
||||||
|
if (isDirectory) {
|
||||||
|
fs.mkdirSync(dest);
|
||||||
|
fs.readdirSync(src).forEach(function (childItemName) {
|
||||||
|
copyRecursiveSync(path.join(src, childItemName),
|
||||||
|
path.join(dest, childItemName));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
fs.copyFileSync(src, dest);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("Arguments:", process.argv)
|
||||||
|
const baseLangCode = process.argv[2] || "en";
|
||||||
|
console.log("Base Lang: " + baseLangCode);
|
||||||
|
fs.rmdirSync("./languages", { recursive: true });
|
||||||
|
copyRecursiveSync("../../src/languages", "./languages");
|
||||||
|
|
||||||
|
const en = (await import("./languages/en.js")).default;
|
||||||
|
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
|
||||||
|
const files = fs.readdirSync("./languages");
|
||||||
|
console.log("Files:", files);
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
if (!file.endsWith(".js")) {
|
||||||
|
console.log("Skipping " + file)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Processing " + file);
|
||||||
|
const lang = await import("./languages/" + file);
|
||||||
|
|
||||||
|
let obj;
|
||||||
|
|
||||||
|
if (lang.default) {
|
||||||
|
obj = lang.default;
|
||||||
|
} else {
|
||||||
|
console.log("Empty file");
|
||||||
|
obj = {
|
||||||
|
languageName: "<Your Language name in your language (not in English)>"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// En first
|
||||||
|
for (const key in en) {
|
||||||
|
if (! obj[key]) {
|
||||||
|
obj[key] = en[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseLang !== en) {
|
||||||
|
// Base second
|
||||||
|
for (const key in baseLang) {
|
||||||
|
if (! obj[key]) {
|
||||||
|
obj[key] = key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const code = "export default " + util.inspect(obj, {
|
||||||
|
depth: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
fs.writeFileSync(`../../src/languages/${file}`, code);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.rmdirSync("./languages", { recursive: true });
|
||||||
|
console.log("Done. Fixing formatting by ESLint...");
|
12
extra/update-language-files/package.json
Normal file
12
extra/update-language-files/package.json
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"name": "update-language-files",
|
||||||
|
"type": "module",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC"
|
||||||
|
}
|
|
@ -19,19 +19,22 @@ if (! newVersion) {
|
||||||
const exists = tagExists(newVersion);
|
const exists = tagExists(newVersion);
|
||||||
|
|
||||||
if (! exists) {
|
if (! exists) {
|
||||||
|
|
||||||
// Process package.json
|
// Process package.json
|
||||||
pkg.version = newVersion;
|
pkg.version = newVersion;
|
||||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
||||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
||||||
|
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
|
||||||
|
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
|
||||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||||
|
|
||||||
// Process README.md
|
|
||||||
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion));
|
|
||||||
|
|
||||||
commit(newVersion);
|
commit(newVersion);
|
||||||
tag(newVersion);
|
tag(newVersion);
|
||||||
|
|
||||||
|
updateWiki(oldVersion, newVersion);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
console.log("version exists")
|
console.log("version exists");
|
||||||
}
|
}
|
||||||
|
|
||||||
function commit(version) {
|
function commit(version) {
|
||||||
|
@ -39,16 +42,16 @@ function commit(version) {
|
||||||
|
|
||||||
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
|
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
|
||||||
let stdout = res.stdout.toString().trim();
|
let stdout = res.stdout.toString().trim();
|
||||||
console.log(stdout)
|
console.log(stdout);
|
||||||
|
|
||||||
if (stdout.includes("no changes added to commit")) {
|
if (stdout.includes("no changes added to commit")) {
|
||||||
throw new Error("commit error")
|
throw new Error("commit error");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function tag(version) {
|
function tag(version) {
|
||||||
let res = child_process.spawnSync("git", ["tag", version]);
|
let res = child_process.spawnSync("git", ["tag", version]);
|
||||||
console.log(res.stdout.toString().trim())
|
console.log(res.stdout.toString().trim());
|
||||||
}
|
}
|
||||||
|
|
||||||
function tagExists(version) {
|
function tagExists(version) {
|
||||||
|
@ -60,3 +63,38 @@ function tagExists(version) {
|
||||||
|
|
||||||
return res.stdout.toString().trim() === version;
|
return res.stdout.toString().trim() === version;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function updateWiki(oldVersion, newVersion) {
|
||||||
|
const wikiDir = "./tmp/wiki";
|
||||||
|
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
|
||||||
|
|
||||||
|
safeDelete(wikiDir);
|
||||||
|
|
||||||
|
child_process.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
|
||||||
|
let content = fs.readFileSync(howToUpdateFilename).toString();
|
||||||
|
content = content.replaceAll(`git checkout ${oldVersion}`, `git checkout ${newVersion}`);
|
||||||
|
fs.writeFileSync(howToUpdateFilename, content);
|
||||||
|
|
||||||
|
child_process.spawnSync("git", ["add", "-A"], {
|
||||||
|
cwd: wikiDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
child_process.spawnSync("git", ["commit", "-m", `Update to ${newVersion} from ${oldVersion}`], {
|
||||||
|
cwd: wikiDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Pushing to Github");
|
||||||
|
child_process.spawnSync("git", ["push"], {
|
||||||
|
cwd: wikiDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
safeDelete(wikiDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
function safeDelete(dir) {
|
||||||
|
if (fs.existsSync(dir)) {
|
||||||
|
fs.rmdirSync(dir, {
|
||||||
|
recursive: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
64
extra/upload-github-release-asset.sh
Normal file
64
extra/upload-github-release-asset.sh
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Author: Stefan Buck
|
||||||
|
# License: MIT
|
||||||
|
# https://gist.github.com/stefanbuck/ce788fee19ab6eb0b4447a85fc99f447
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# This script accepts the following parameters:
|
||||||
|
#
|
||||||
|
# * owner
|
||||||
|
# * repo
|
||||||
|
# * tag
|
||||||
|
# * filename
|
||||||
|
# * github_api_token
|
||||||
|
#
|
||||||
|
# Script to upload a release asset using the GitHub API v3.
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
#
|
||||||
|
# upload-github-release-asset.sh github_api_token=TOKEN owner=stefanbuck repo=playground tag=v0.1.0 filename=./build.zip
|
||||||
|
#
|
||||||
|
|
||||||
|
# Check dependencies.
|
||||||
|
set -e
|
||||||
|
xargs=$(which gxargs || which xargs)
|
||||||
|
|
||||||
|
# Validate settings.
|
||||||
|
[ "$TRACE" ] && set -x
|
||||||
|
|
||||||
|
CONFIG=$@
|
||||||
|
|
||||||
|
for line in $CONFIG; do
|
||||||
|
eval "$line"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Define variables.
|
||||||
|
GH_API="https://api.github.com"
|
||||||
|
GH_REPO="$GH_API/repos/$owner/$repo"
|
||||||
|
GH_TAGS="$GH_REPO/releases/tags/$tag"
|
||||||
|
AUTH="Authorization: token $github_api_token"
|
||||||
|
WGET_ARGS="--content-disposition --auth-no-challenge --no-cookie"
|
||||||
|
CURL_ARGS="-LJO#"
|
||||||
|
|
||||||
|
if [[ "$tag" == 'LATEST' ]]; then
|
||||||
|
GH_TAGS="$GH_REPO/releases/latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate token.
|
||||||
|
curl -o /dev/null -sH "$AUTH" $GH_REPO || { echo "Error: Invalid repo, token or network issue!"; exit 1; }
|
||||||
|
|
||||||
|
# Read asset tags.
|
||||||
|
response=$(curl -sH "$AUTH" $GH_TAGS)
|
||||||
|
|
||||||
|
# Get ID of the asset based on given filename.
|
||||||
|
eval $(echo "$response" | grep -m 1 "id.:" | grep -w id | tr : = | tr -cd '[[:alnum:]]=')
|
||||||
|
[ "$id" ] || { echo "Error: Failed to get release id for tag: $tag"; echo "$response" | awk 'length($0)<100' >&2; exit 1; }
|
||||||
|
|
||||||
|
# Upload asset
|
||||||
|
echo "Uploading asset... "
|
||||||
|
|
||||||
|
# Construct url
|
||||||
|
GH_ASSET="https://uploads.github.com/repos/$owner/$repo/releases/$id/assets?name=$(basename $filename)"
|
||||||
|
|
||||||
|
curl "$GITHUB_OAUTH_BASIC" --data-binary @"$filename" -H "Authorization: token $github_api_token" -H "Content-Type: application/octet-stream" $GH_ASSET
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
|
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
|
||||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||||
|
<link rel="manifest" href="manifest.json" />
|
||||||
<meta name="theme-color" id="theme-color" content="" />
|
<meta name="theme-color" id="theme-color" content="" />
|
||||||
<meta name="description" content="Uptime Kuma monitoring tool" />
|
<meta name="description" content="Uptime Kuma monitoring tool" />
|
||||||
<title>Uptime Kuma</title>
|
<title>Uptime Kuma</title>
|
||||||
|
|
203
install.sh
Normal file
203
install.sh
Normal file
|
@ -0,0 +1,203 @@
|
||||||
|
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||||
|
# "npm run compile-install-script" to compile install.sh
|
||||||
|
# The command is working on Windows PowerShell and Docker for Windows only.
|
||||||
|
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||||
|
"echo" "-e" "====================="
|
||||||
|
"echo" "-e" "Uptime Kuma Installer"
|
||||||
|
"echo" "-e" "====================="
|
||||||
|
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
|
||||||
|
"echo" "-e" "---------------------------------------"
|
||||||
|
"echo" "-e" "This script is designed for Linux and basic usage."
|
||||||
|
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
||||||
|
"echo" "-e" "---------------------------------------"
|
||||||
|
"echo" "-e" ""
|
||||||
|
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
|
||||||
|
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
||||||
|
"echo" "-e" ""
|
||||||
|
if [ "$1" != "" ]; then
|
||||||
|
type="$1"
|
||||||
|
else
|
||||||
|
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
|
||||||
|
fi
|
||||||
|
defaultPort="3001"
|
||||||
|
function checkNode {
|
||||||
|
local _0
|
||||||
|
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
||||||
|
"echo" "-e" "Node Version: ""$nodeVersion"
|
||||||
|
_0="12"
|
||||||
|
if [ $(($nodeVersion < $_0)) == 1 ]; then
|
||||||
|
"echo" "-e" "Error: Required Node.js 14"
|
||||||
|
"exit" "1"
|
||||||
|
fi
|
||||||
|
if [ "$nodeVersion" == "12" ]; then
|
||||||
|
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
function deb {
|
||||||
|
nodeCheck=$(node -v)
|
||||||
|
apt --yes update
|
||||||
|
if [ "$nodeCheck" != "" ]; then
|
||||||
|
"checkNode"
|
||||||
|
else
|
||||||
|
# Old nodejs binary name is "nodejs"
|
||||||
|
check=$(nodejs --version)
|
||||||
|
if [ "$check" != "" ]; then
|
||||||
|
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
curlCheck=$(curl --version)
|
||||||
|
if [ "$curlCheck" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Curl"
|
||||||
|
apt --yes install curl
|
||||||
|
fi
|
||||||
|
"echo" "-e" "Installing Node.js 14"
|
||||||
|
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
|
||||||
|
apt --yes install nodejs
|
||||||
|
node -v
|
||||||
|
nodeCheckAgain=$(node -v)
|
||||||
|
if [ "$nodeCheckAgain" == "" ]; then
|
||||||
|
"echo" "-e" "Error during Node.js installation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
check=$(git --version)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Git"
|
||||||
|
apt --yes install git
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
if [ "$type" == "local" ]; then
|
||||||
|
defaultInstallPath="/opt/uptime-kuma"
|
||||||
|
if [ -e "/etc/redhat-release" ]; then
|
||||||
|
os=$("cat" "/etc/redhat-release")
|
||||||
|
distribution="rhel"
|
||||||
|
else
|
||||||
|
if [ -e "/etc/issue" ]; then
|
||||||
|
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
||||||
|
if [ "$os" == "Ubuntu" ]; then
|
||||||
|
distribution="ubuntu"
|
||||||
|
fi
|
||||||
|
if [ "$os" == "Debian" ]; then
|
||||||
|
distribution="debian"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
arch=$(uname -i)
|
||||||
|
"echo" "-e" "Your OS: ""$os"
|
||||||
|
"echo" "-e" "Distribution: ""$distribution"
|
||||||
|
"echo" "-e" "Arch: ""$arch"
|
||||||
|
if [ "$3" != "" ]; then
|
||||||
|
port="$3"
|
||||||
|
else
|
||||||
|
"read" "-p" "Listening Port [$defaultPort]: " "port"
|
||||||
|
if [ "$port" == "" ]; then
|
||||||
|
port="$defaultPort"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [ "$2" != "" ]; then
|
||||||
|
installPath="$2"
|
||||||
|
else
|
||||||
|
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
|
||||||
|
if [ "$installPath" == "" ]; then
|
||||||
|
installPath="$defaultInstallPath"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
# CentOS
|
||||||
|
if [ "$distribution" == "rhel" ]; then
|
||||||
|
nodeCheck=$(node -v)
|
||||||
|
if [ "$nodeCheck" != "" ]; then
|
||||||
|
"checkNode"
|
||||||
|
else
|
||||||
|
curlCheck=$(curl --version)
|
||||||
|
if [ "$curlCheck" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Curl"
|
||||||
|
yum -y -q install curl
|
||||||
|
fi
|
||||||
|
"echo" "-e" "Installing Node.js 14"
|
||||||
|
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
|
||||||
|
yum install -y -q nodejs
|
||||||
|
node -v
|
||||||
|
nodeCheckAgain=$(node -v)
|
||||||
|
if [ "$nodeCheckAgain" == "" ]; then
|
||||||
|
"echo" "-e" "Error during Node.js installation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
check=$(git --version)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Git"
|
||||||
|
yum -y -q install git
|
||||||
|
fi
|
||||||
|
# Ubuntu
|
||||||
|
else
|
||||||
|
if [ "$distribution" == "ubuntu" ]; then
|
||||||
|
"deb"
|
||||||
|
# Debian
|
||||||
|
else
|
||||||
|
if [ "$distribution" == "debian" ]; then
|
||||||
|
"deb"
|
||||||
|
else
|
||||||
|
# Unknown distribution
|
||||||
|
error=$((0))
|
||||||
|
check=$(git --version)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
error=$((1))
|
||||||
|
"echo" "-e" "Error: git is missing"
|
||||||
|
fi
|
||||||
|
check=$(node -v)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
error=$((1))
|
||||||
|
"echo" "-e" "Error: node is missing"
|
||||||
|
fi
|
||||||
|
if [ $(($error > 0)) == 1 ]; then
|
||||||
|
"echo" "-e" "Please install above missing software"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
check=$(pm2 --version)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
"echo" "-e" "Installing PM2"
|
||||||
|
npm install pm2 -g
|
||||||
|
pm2 startup
|
||||||
|
fi
|
||||||
|
mkdir -p $installPath
|
||||||
|
cd $installPath
|
||||||
|
git clone https://github.com/louislam/uptime-kuma.git .
|
||||||
|
npm run setup
|
||||||
|
pm2 start server/server.js --name uptime-kuma -- --port=$port
|
||||||
|
else
|
||||||
|
defaultVolume="uptime-kuma"
|
||||||
|
check=$(docker -v)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
"echo" "-e" "Error: docker is not found!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
check=$(docker info)
|
||||||
|
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
||||||
|
"echo" "Error: docker is not running"
|
||||||
|
"exit" "1"
|
||||||
|
fi
|
||||||
|
if [ "$3" != "" ]; then
|
||||||
|
port="$3"
|
||||||
|
else
|
||||||
|
"read" "-p" "Expose Port [$defaultPort]: " "port"
|
||||||
|
if [ "$port" == "" ]; then
|
||||||
|
port="$defaultPort"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [ "$2" != "" ]; then
|
||||||
|
volume="$2"
|
||||||
|
else
|
||||||
|
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
|
||||||
|
if [ "$volume" == "" ]; then
|
||||||
|
volume="$defaultVolume"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
"echo" "-e" "Port: $port"
|
||||||
|
"echo" "-e" "Volume: $volume"
|
||||||
|
docker volume create $volume
|
||||||
|
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||||
|
fi
|
||||||
|
"echo" "-e" "http://localhost:$port"
|
32
kubernetes/README.md
Normal file
32
kubernetes/README.md
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# Uptime-Kuma K8s Deployment
|
||||||
|
|
||||||
|
⚠ Warning: K8s deployment is provided by contributors. I have no experience with K8s and I can't fix error in the future. I only test Docker and Node.js. Use at your own risk.
|
||||||
|
|
||||||
|
## How does it work?
|
||||||
|
|
||||||
|
Kustomize is a tool which builds a complete deployment file for all config elements.
|
||||||
|
You can edit the files in the ```uptime-kuma``` folder except the ```kustomization.yml``` until you know what you're doing.
|
||||||
|
If you want to choose another namespace you can edit the ```kustomization.yml``` in the ```kubernetes```-Folder and change the ```namespace: uptime-kuma``` to something you like.
|
||||||
|
|
||||||
|
It creates a certificate with the specified Issuer and creates the Ingress for the Uptime-Kuma ClusterIP-Service.
|
||||||
|
|
||||||
|
## What do I have to edit?
|
||||||
|
|
||||||
|
You have to edit the ```ingressroute.yml``` to your needs.
|
||||||
|
This ingressroute.yml is for the [nginx-ingress-controller](https://kubernetes.github.io/ingress-nginx/) in combination with the [cert-manager](https://cert-manager.io/).
|
||||||
|
|
||||||
|
- Host
|
||||||
|
- Secrets and secret names
|
||||||
|
- (Cluster)Issuer (optional)
|
||||||
|
- The Version in the Deployment-File
|
||||||
|
- Update:
|
||||||
|
- Change to newer version and run the above commands, it will update the pods one after another
|
||||||
|
|
||||||
|
## How To use
|
||||||
|
|
||||||
|
- Install [kustomize](https://kubectl.docs.kubernetes.io/installation/kustomize/)
|
||||||
|
- Edit files mentioned above to your needs
|
||||||
|
- Run ```kustomize build > apply.yml```
|
||||||
|
- Run ```kubectl apply -f apply.yml```
|
||||||
|
|
||||||
|
Now you should see some k8s magic and Uptime-Kuma should be available at the specified address.
|
10
kubernetes/kustomization.yml
Normal file
10
kubernetes/kustomization.yml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
namespace: uptime-kuma
|
||||||
|
namePrefix: uptime-kuma-
|
||||||
|
|
||||||
|
commonLabels:
|
||||||
|
app: uptime-kuma
|
||||||
|
|
||||||
|
bases:
|
||||||
|
- uptime-kuma
|
||||||
|
|
||||||
|
|
45
kubernetes/uptime-kuma/deployment.yml
Normal file
45
kubernetes/uptime-kuma/deployment.yml
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
component: uptime-kuma
|
||||||
|
name: deployment
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
component: uptime-kuma
|
||||||
|
replicas: 1
|
||||||
|
strategy:
|
||||||
|
type: Recreate
|
||||||
|
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
component: uptime-kuma
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: app
|
||||||
|
image: louislam/uptime-kuma:1
|
||||||
|
ports:
|
||||||
|
- containerPort: 3001
|
||||||
|
volumeMounts:
|
||||||
|
- mountPath: /app/data
|
||||||
|
name: storage
|
||||||
|
livenessProbe:
|
||||||
|
exec:
|
||||||
|
command:
|
||||||
|
- node
|
||||||
|
- extra/healthcheck.js
|
||||||
|
initialDelaySeconds: 180
|
||||||
|
periodSeconds: 60
|
||||||
|
timeoutSeconds: 30
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: 3001
|
||||||
|
scheme: HTTP
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- name: storage
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: pvc
|
39
kubernetes/uptime-kuma/ingressroute.yml
Normal file
39
kubernetes/uptime-kuma/ingressroute.yml
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
kubernetes.io/ingress.class: nginx
|
||||||
|
cert-manager.io/cluster-issuer: letsencrypt-prod
|
||||||
|
nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
|
||||||
|
nginx.ingress.kubernetes.io/proxy-send-timeout: "3600"
|
||||||
|
nginx.ingress.kubernetes.io/server-snippets: |
|
||||||
|
location / {
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header X-Forwarded-Host $http_host;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
name: ingress
|
||||||
|
spec:
|
||||||
|
tls:
|
||||||
|
- hosts:
|
||||||
|
- example.com
|
||||||
|
secretName: example-com-tls
|
||||||
|
rules:
|
||||||
|
- host: example.com
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: service
|
||||||
|
port:
|
||||||
|
number: 3001
|
5
kubernetes/uptime-kuma/kustomization.yml
Normal file
5
kubernetes/uptime-kuma/kustomization.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
resources:
|
||||||
|
- deployment.yml
|
||||||
|
- service.yml
|
||||||
|
- ingressroute.yml
|
||||||
|
- pvc.yml
|
10
kubernetes/uptime-kuma/pvc.yml
Normal file
10
kubernetes/uptime-kuma/pvc.yml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: pvc
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 4Gi
|
13
kubernetes/uptime-kuma/service.yml
Normal file
13
kubernetes/uptime-kuma/service.yml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: service
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
component: uptime-kuma
|
||||||
|
type: ClusterIP
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
port: 3001
|
||||||
|
targetPort: 3001
|
||||||
|
protocol: TCP
|
28274
package-lock.json
generated
28274
package-lock.json
generated
File diff suppressed because it is too large
Load diff
159
package.json
159
package.json
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "uptime-kuma",
|
"name": "uptime-kuma",
|
||||||
"version": "1.1.0",
|
"version": "1.8.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -10,69 +10,116 @@
|
||||||
"node": "14.*"
|
"node": "14.*"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite --host",
|
"install-legacy": "npm install --legacy-peer-deps",
|
||||||
|
"update-legacy": "npm update --legacy-peer-deps",
|
||||||
|
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||||
|
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||||
|
"lint": "npm run lint:js && npm run lint:style",
|
||||||
|
"dev": "vite --host --config ./config/vite.config.js",
|
||||||
"start": "npm run start-server",
|
"start": "npm run start-server",
|
||||||
"start-server": "node server/server.js",
|
"start-server": "node server/server.js",
|
||||||
"start-demo-server": "set NODE_ENV=demo && node server/server.js",
|
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||||
"update": "",
|
"build": "vite build --config ./config/vite.config.js",
|
||||||
"build": "vite build",
|
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
|
||||||
"vite-preview-dist": "vite preview --host",
|
"test-with-build": "npm run build && npm test",
|
||||||
"build-docker": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.1.0 --target release . --push",
|
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend && jest --config=./config/jest.config.js",
|
||||||
"build-docker-nightly": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
|
||||||
"build-docker-nightly-amd64": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push",
|
"jest-backend": "cross-env TEST_BACKEND=1 jest --config=./config/jest-backend.config.js",
|
||||||
"setup": "git checkout 1.1.0 && npm install && npm run build",
|
"tsc": "tsc",
|
||||||
|
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||||
|
"build-docker": "npm run build-docker-debian && npm run build-docker-alpine",
|
||||||
|
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
|
||||||
|
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
|
||||||
|
"build-docker-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.8.0-alpine --target release . --push",
|
||||||
|
"build-docker-debian": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.8.0 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.8.0-debian --target release . --push",
|
||||||
|
"build-docker-nightly": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||||
|
"build-docker-nightly-alpine": "docker buildx build -f dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
||||||
|
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||||
|
"upload-artifacts": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||||
|
"setup": "git checkout 1.8.0 && npm ci --production && npm run download-dist",
|
||||||
|
"download-dist": "node extra/download-dist.js",
|
||||||
"update-version": "node extra/update-version.js",
|
"update-version": "node extra/update-version.js",
|
||||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||||
"reset-password": "node extra/reset-password.js",
|
"reset-password": "node extra/reset-password.js",
|
||||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1"
|
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||||
|
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||||
|
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
|
||||||
|
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||||
|
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||||
|
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
|
||||||
|
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||||
|
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
|
||||||
|
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@fortawesome/fontawesome-svg-core": "^1.2.36",
|
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||||
"@fortawesome/free-regular-svg-icons": "^5.15.4",
|
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||||
"@fortawesome/free-solid-svg-icons": "^5.15.4",
|
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
||||||
"@fortawesome/vue-fontawesome": "^3.0.0-4",
|
"@fortawesome/vue-fontawesome": "~3.0.0-4",
|
||||||
"@popperjs/core": "^2.9.3",
|
"@louislam/sqlite3": "~6.0.0",
|
||||||
"args-parser": "^1.3.0",
|
"@popperjs/core": "~2.10.2",
|
||||||
"axios": "^0.21.1",
|
"args-parser": "~1.3.0",
|
||||||
"bcrypt": "^5.0.1",
|
"axios": "~0.21.4",
|
||||||
"bootstrap": "^5.1.0",
|
"babel-plugin-rewire": "~1.2.0",
|
||||||
"command-exists": "^1.2.9",
|
"bcryptjs": "~2.4.3",
|
||||||
"dayjs": "^1.10.6",
|
"bootstrap": "~5.1.1",
|
||||||
"express": "^4.17.1",
|
"chart.js": "~3.5.1",
|
||||||
"express-basic-auth": "^1.2.0",
|
"chartjs-adapter-dayjs": "~1.0.0",
|
||||||
"form-data": "^4.0.0",
|
"command-exists": "~1.2.9",
|
||||||
"http-graceful-shutdown": "^3.1.3",
|
"compare-versions": "~3.6.0",
|
||||||
"jsonwebtoken": "^8.5.1",
|
"dayjs": "~1.10.7",
|
||||||
"nodemailer": "^6.6.3",
|
"express": "~4.17.1",
|
||||||
"password-hash": "^1.2.2",
|
"express-basic-auth": "~1.2.0",
|
||||||
"prom-client": "^13.1.0",
|
"form-data": "~4.0.0",
|
||||||
"prometheus-api-metrics": "^3.2.0",
|
"http-graceful-shutdown": "~3.1.4",
|
||||||
"redbean-node": "0.0.21",
|
"jsonwebtoken": "~8.5.1",
|
||||||
"socket.io": "^4.1.3",
|
"nodemailer": "~6.6.5",
|
||||||
"socket.io-client": "^4.1.3",
|
"notp": "~2.0.3",
|
||||||
"@louislam/sqlite3": "^5.0.3",
|
"password-hash": "~1.2.2",
|
||||||
"tcp-ping": "^0.1.1",
|
"postcss-rtlcss": "~3.4.1",
|
||||||
"v-pagination-3": "^0.1.6",
|
"postcss-scss": "~4.0.1",
|
||||||
"vue": "^3.2.1",
|
"prom-client": "~13.2.0",
|
||||||
"vue-confirm-dialog": "^1.0.2",
|
"prometheus-api-metrics": "~3.2.0",
|
||||||
"vue-multiselect": "^3.0.0-alpha.2",
|
"qrcode": "~1.4.4",
|
||||||
"vue-router": "^4.0.10",
|
"redbean-node": "0.1.2",
|
||||||
"vue-toastification": "^2.0.0-rc.1"
|
"socket.io": "~4.2.0",
|
||||||
|
"socket.io-client": "~4.2.0",
|
||||||
|
"tar": "^6.1.11",
|
||||||
|
"tcp-ping": "~0.1.1",
|
||||||
|
"thirty-two": "~1.0.2",
|
||||||
|
"timezones-list": "~3.0.1",
|
||||||
|
"v-pagination-3": "~0.1.6",
|
||||||
|
"vue": "next",
|
||||||
|
"vue-chart-3": "~0.5.8",
|
||||||
|
"vue-confirm-dialog": "~1.0.2",
|
||||||
|
"vue-contenteditable": "~3.0.4",
|
||||||
|
"vue-i18n": "~9.1.9",
|
||||||
|
"vue-image-crop-upload": "~3.0.3",
|
||||||
|
"vue-multiselect": "~3.0.0-alpha.2",
|
||||||
|
"vue-qrcode": "~1.0.0",
|
||||||
|
"vue-router": "~4.0.11",
|
||||||
|
"vue-toastification": "~2.0.0-rc.1",
|
||||||
|
"vuedraggable": "~4.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/eslint-parser": "^7.15.0",
|
"@babel/eslint-parser": "~7.15.7",
|
||||||
"@types/bootstrap": "^5.0.17",
|
"@babel/preset-env": "^7.15.8",
|
||||||
"@vitejs/plugin-legacy": "^1.5.1",
|
"@types/bootstrap": "~5.1.6",
|
||||||
"@vitejs/plugin-vue": "^1.3.0",
|
"@vitejs/plugin-legacy": "~1.6.1",
|
||||||
"@vue/compiler-sfc": "^3.1.5",
|
"@vitejs/plugin-vue": "~1.9.2",
|
||||||
"core-js": "^3.16.0",
|
"@vue/compiler-sfc": "~3.2.19",
|
||||||
"eslint": "^7.32.0",
|
"core-js": "~3.18.1",
|
||||||
"eslint-plugin-vue": "^7.15.1",
|
"cross-env": "~7.0.3",
|
||||||
"sass": "^1.37.5",
|
"dns2": "~2.0.1",
|
||||||
"stylelint": "^13.13.1",
|
"eslint": "~7.32.0",
|
||||||
"stylelint-config-recommended": "^5.0.0",
|
"eslint-plugin-vue": "~7.18.0",
|
||||||
"stylelint-config-standard": "^22.0.0",
|
"jest": "~27.2.4",
|
||||||
"typescript": "^4.3.5",
|
"jest-puppeteer": "~6.0.0",
|
||||||
"vite": "^2.4.4"
|
"puppeteer": "~10.4.0",
|
||||||
|
"sass": "~1.42.1",
|
||||||
|
"stylelint": "~13.13.1",
|
||||||
|
"stylelint-config-standard": "~22.0.0",
|
||||||
|
"typescript": "~4.4.3",
|
||||||
|
"vite": "~2.6.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
BIN
public/icon-192x192.png
Normal file
BIN
public/icon-192x192.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.6 KiB |
BIN
public/icon-512x512.png
Normal file
BIN
public/icon-512x512.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.5 KiB |
19
public/manifest.json
Normal file
19
public/manifest.json
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
{
|
||||||
|
"name": "Uptime Kuma",
|
||||||
|
"short_name": "Uptime Kuma",
|
||||||
|
"start_url": "/",
|
||||||
|
"background_color": "#fff",
|
||||||
|
"display": "standalone",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "icon-192x192.png",
|
||||||
|
"sizes": "192x192",
|
||||||
|
"type": "image/png"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "icon-512x512.png",
|
||||||
|
"sizes": "512x512",
|
||||||
|
"type": "image/png"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
42
server/check-version.js
Normal file
42
server/check-version.js
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
const { setSetting } = require("./util-server");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
exports.version = require("../package.json").version;
|
||||||
|
exports.latestVersion = null;
|
||||||
|
|
||||||
|
let interval;
|
||||||
|
|
||||||
|
exports.startInterval = () => {
|
||||||
|
let check = async () => {
|
||||||
|
try {
|
||||||
|
const res = await axios.get("https://raw.githubusercontent.com/louislam/uptime-kuma/master/package.json");
|
||||||
|
|
||||||
|
if (typeof res.data === "string") {
|
||||||
|
res.data = JSON.parse(res.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For debug
|
||||||
|
if (process.env.TEST_CHECK_VERSION === "1") {
|
||||||
|
res.data.version = "1000.0.0";
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.latestVersion = res.data.version;
|
||||||
|
} catch (_) { }
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
check();
|
||||||
|
interval = setInterval(check, 3600 * 1000 * 48);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.enableCheckUpdate = async (value) => {
|
||||||
|
await setSetting("checkUpdate", value);
|
||||||
|
|
||||||
|
clearInterval(interval);
|
||||||
|
|
||||||
|
if (value) {
|
||||||
|
exports.startInterval();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.socket = null;
|
100
server/client.js
Normal file
100
server/client.js
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
/*
|
||||||
|
* For Client Socket
|
||||||
|
*/
|
||||||
|
const { TimeLogger } = require("../src/util");
|
||||||
|
const { R } = require("redbean-node");
|
||||||
|
const { io } = require("./server");
|
||||||
|
const { setting } = require("./util-server");
|
||||||
|
const checkVersion = require("./check-version");
|
||||||
|
|
||||||
|
async function sendNotificationList(socket) {
|
||||||
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
|
let result = [];
|
||||||
|
let list = await R.find("notification", " user_id = ? ", [
|
||||||
|
socket.userID,
|
||||||
|
]);
|
||||||
|
|
||||||
|
for (let bean of list) {
|
||||||
|
result.push(bean.export());
|
||||||
|
}
|
||||||
|
|
||||||
|
io.to(socket.userID).emit("notificationList", result);
|
||||||
|
|
||||||
|
timeLogger.print("Send Notification List");
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send Heartbeat History list to socket
|
||||||
|
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
|
* @param overwrite Overwrite client-side's heartbeat list
|
||||||
|
*/
|
||||||
|
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
|
let list = await R.getAll(`
|
||||||
|
SELECT * FROM heartbeat
|
||||||
|
WHERE monitor_id = ?
|
||||||
|
ORDER BY time DESC
|
||||||
|
LIMIT 100
|
||||||
|
`, [
|
||||||
|
monitorID,
|
||||||
|
]);
|
||||||
|
|
||||||
|
let result = list.reverse();
|
||||||
|
|
||||||
|
if (toUser) {
|
||||||
|
io.to(socket.userID).emit("heartbeatList", monitorID, result, overwrite);
|
||||||
|
} else {
|
||||||
|
socket.emit("heartbeatList", monitorID, result, overwrite);
|
||||||
|
}
|
||||||
|
|
||||||
|
timeLogger.print(`[Monitor: ${monitorID}] sendHeartbeatList`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Important Heart beat list (aka event list)
|
||||||
|
* @param socket
|
||||||
|
* @param monitorID
|
||||||
|
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
|
* @param overwrite Overwrite client-side's heartbeat list
|
||||||
|
*/
|
||||||
|
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
|
let list = await R.find("heartbeat", `
|
||||||
|
monitor_id = ?
|
||||||
|
AND important = 1
|
||||||
|
ORDER BY time DESC
|
||||||
|
LIMIT 500
|
||||||
|
`, [
|
||||||
|
monitorID,
|
||||||
|
]);
|
||||||
|
|
||||||
|
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
|
||||||
|
|
||||||
|
if (toUser) {
|
||||||
|
io.to(socket.userID).emit("importantHeartbeatList", monitorID, list, overwrite);
|
||||||
|
} else {
|
||||||
|
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sendInfo(socket) {
|
||||||
|
socket.emit("info", {
|
||||||
|
version: checkVersion.version,
|
||||||
|
latestVersion: checkVersion.latestVersion,
|
||||||
|
primaryBaseURL: await setting("primaryBaseURL")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sendNotificationList,
|
||||||
|
sendImportantHeartbeatList,
|
||||||
|
sendHeartbeatList,
|
||||||
|
sendInfo
|
||||||
|
};
|
||||||
|
|
|
@ -1,36 +1,122 @@
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const { sleep } = require("../src/util");
|
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const { setSetting, setting } = require("./util-server");
|
const { setSetting, setting } = require("./util-server");
|
||||||
|
const { debug, sleep } = require("../src/util");
|
||||||
|
const dayjs = require("dayjs");
|
||||||
const knex = require("knex");
|
const knex = require("knex");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Database & App Data Folder
|
||||||
|
*/
|
||||||
class Database {
|
class Database {
|
||||||
|
|
||||||
static templatePath = "./db/kuma.db"
|
static templatePath = "./db/kuma.db";
|
||||||
static path = "./data/kuma.db";
|
|
||||||
static latestVersion = 6;
|
/**
|
||||||
|
* Data Dir (Default: ./data)
|
||||||
|
*/
|
||||||
|
static dataDir;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User Upload Dir (Default: ./data/upload)
|
||||||
|
*/
|
||||||
|
static uploadDir;
|
||||||
|
|
||||||
|
static path;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {boolean}
|
||||||
|
*/
|
||||||
|
static patched = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For Backup only
|
||||||
|
*/
|
||||||
|
static backupPath = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add patch filename in key
|
||||||
|
* Values:
|
||||||
|
* true: Add it regardless of order
|
||||||
|
* false: Do nothing
|
||||||
|
* { parents: []}: Need parents before add it
|
||||||
|
*/
|
||||||
|
static patchList = {
|
||||||
|
"patch-setting-value-type.sql": true,
|
||||||
|
"patch-improve-performance.sql": true,
|
||||||
|
"patch-2fa.sql": true,
|
||||||
|
"patch-add-retry-interval-monitor.sql": true,
|
||||||
|
"patch-incident-table.sql": true,
|
||||||
|
"patch-group-table.sql": true,
|
||||||
|
"patch-monitor-push_token.sql": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The finally version should be 10 after merged tag feature
|
||||||
|
* @deprecated Use patchList for any new feature
|
||||||
|
*/
|
||||||
|
static latestVersion = 10;
|
||||||
|
|
||||||
static noReject = true;
|
static noReject = true;
|
||||||
|
|
||||||
|
static init(args) {
|
||||||
|
// Data Directory (must be end with "/")
|
||||||
|
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
||||||
|
Database.path = Database.dataDir + "kuma.db";
|
||||||
|
if (! fs.existsSync(Database.dataDir)) {
|
||||||
|
fs.mkdirSync(Database.dataDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
Database.uploadDir = Database.dataDir + "upload/";
|
||||||
|
|
||||||
|
if (! fs.existsSync(Database.uploadDir)) {
|
||||||
|
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Data Dir: ${Database.dataDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
static async connect() {
|
static async connect() {
|
||||||
|
const acquireConnectionTimeout = 120 * 1000;
|
||||||
|
|
||||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||||
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
||||||
|
|
||||||
R.setup(knex({
|
const knexInstance = knex({
|
||||||
client: Dialect,
|
client: Dialect,
|
||||||
connection: {
|
connection: {
|
||||||
filename: Database.path,
|
filename: Database.path,
|
||||||
|
acquireConnectionTimeout: acquireConnectionTimeout,
|
||||||
},
|
},
|
||||||
useNullAsDefault: true,
|
useNullAsDefault: true,
|
||||||
pool: {
|
pool: {
|
||||||
min: 1,
|
min: 1,
|
||||||
max: 1,
|
max: 1,
|
||||||
idleTimeoutMillis: 30000,
|
idleTimeoutMillis: 120 * 1000,
|
||||||
|
propagateCreateError: false,
|
||||||
|
acquireTimeoutMillis: acquireConnectionTimeout,
|
||||||
}
|
}
|
||||||
}));
|
});
|
||||||
|
|
||||||
|
R.setup(knexInstance);
|
||||||
|
|
||||||
|
if (process.env.SQL_LOG === "1") {
|
||||||
|
R.debug(true);
|
||||||
|
}
|
||||||
|
|
||||||
// Auto map the model to a bean object
|
// Auto map the model to a bean object
|
||||||
R.freeze(true)
|
R.freeze(true);
|
||||||
await R.autoloadModels("./server/model");
|
await R.autoloadModels("./server/model");
|
||||||
|
|
||||||
|
await R.exec("PRAGMA foreign_keys = ON");
|
||||||
|
// Change to WAL
|
||||||
|
await R.exec("PRAGMA journal_mode = WAL");
|
||||||
|
await R.exec("PRAGMA cache_size = -12000");
|
||||||
|
|
||||||
|
console.log("SQLite config:");
|
||||||
|
console.log(await R.getAll("PRAGMA journal_mode"));
|
||||||
|
console.log(await R.getAll("PRAGMA cache_size"));
|
||||||
|
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static async patch() {
|
static async patch() {
|
||||||
|
@ -48,11 +134,9 @@ class Database {
|
||||||
} else if (version > this.latestVersion) {
|
} else if (version > this.latestVersion) {
|
||||||
console.info("Warning: Database version is newer than expected");
|
console.info("Warning: Database version is newer than expected");
|
||||||
} else {
|
} else {
|
||||||
console.info("Database patch is needed")
|
console.info("Database patch is needed");
|
||||||
|
|
||||||
console.info("Backup the db")
|
this.backup(version);
|
||||||
const backupPath = "./data/kuma.db.bak" + version;
|
|
||||||
fs.copyFileSync(Database.path, backupPath);
|
|
||||||
|
|
||||||
// Try catch anything here, if gone wrong, restore the backup
|
// Try catch anything here, if gone wrong, restore the backup
|
||||||
try {
|
try {
|
||||||
|
@ -63,18 +147,95 @@ class Database {
|
||||||
console.info(`Patched ${sqlFile}`);
|
console.info(`Patched ${sqlFile}`);
|
||||||
await setSetting("database_version", i);
|
await setSetting("database_version", i);
|
||||||
}
|
}
|
||||||
console.log("Database Patched Successfully");
|
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
await Database.close();
|
await Database.close();
|
||||||
console.error("Patch db failed!!! Restoring the backup")
|
|
||||||
fs.copyFileSync(backupPath, Database.path);
|
|
||||||
console.error(ex)
|
|
||||||
|
|
||||||
console.error("Start Uptime-Kuma failed due to patch db failed")
|
console.error(ex);
|
||||||
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues")
|
console.error("Start Uptime-Kuma failed due to patch db failed");
|
||||||
|
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||||
|
|
||||||
|
this.restore();
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await this.patch2();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call it from patch() only
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
static async patch2() {
|
||||||
|
console.log("Database Patch 2.0 Process");
|
||||||
|
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||||
|
|
||||||
|
if (! databasePatchedFiles) {
|
||||||
|
databasePatchedFiles = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("Patched files:");
|
||||||
|
debug(databasePatchedFiles);
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (let sqlFilename in this.patchList) {
|
||||||
|
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.patched) {
|
||||||
|
console.log("Database Patched Successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (ex) {
|
||||||
|
await Database.close();
|
||||||
|
|
||||||
|
console.error(ex);
|
||||||
|
console.error("Start Uptime-Kuma failed due to patch db failed");
|
||||||
|
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||||
|
|
||||||
|
this.restore();
|
||||||
|
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
await setSetting("databasePatchedFiles", databasePatchedFiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used it patch2() only
|
||||||
|
* @param sqlFilename
|
||||||
|
* @param databasePatchedFiles
|
||||||
|
*/
|
||||||
|
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
||||||
|
let value = this.patchList[sqlFilename];
|
||||||
|
|
||||||
|
if (! value) {
|
||||||
|
console.log(sqlFilename + " skip");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if patched
|
||||||
|
if (! databasePatchedFiles[sqlFilename]) {
|
||||||
|
console.log(sqlFilename + " is not patched");
|
||||||
|
|
||||||
|
if (value.parents) {
|
||||||
|
console.log(sqlFilename + " need parents");
|
||||||
|
for (let parentSQLFilename of value.parents) {
|
||||||
|
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.backup(dayjs().format("YYYYMMDDHHmmss"));
|
||||||
|
|
||||||
|
console.log(sqlFilename + " is patching");
|
||||||
|
this.patched = true;
|
||||||
|
await this.importSQLFile("./db/" + sqlFilename);
|
||||||
|
databasePatchedFiles[sqlFilename] = true;
|
||||||
|
console.log(sqlFilename + " is patched successfully");
|
||||||
|
|
||||||
|
} else {
|
||||||
|
debug(sqlFilename + " is already patched, skip");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,12 +252,12 @@ class Database {
|
||||||
// Remove all comments (--)
|
// Remove all comments (--)
|
||||||
let lines = text.split("\n");
|
let lines = text.split("\n");
|
||||||
lines = lines.filter((line) => {
|
lines = lines.filter((line) => {
|
||||||
return ! line.startsWith("--")
|
return ! line.startsWith("--");
|
||||||
});
|
});
|
||||||
|
|
||||||
// Split statements by semicolon
|
// Split statements by semicolon
|
||||||
// Filter out empty line
|
// Filter out empty line
|
||||||
text = lines.join("\n")
|
text = lines.join("\n");
|
||||||
|
|
||||||
let statements = text.split(";")
|
let statements = text.split(";")
|
||||||
.map((statement) => {
|
.map((statement) => {
|
||||||
|
@ -104,13 +265,17 @@ class Database {
|
||||||
})
|
})
|
||||||
.filter((statement) => {
|
.filter((statement) => {
|
||||||
return statement !== "";
|
return statement !== "";
|
||||||
})
|
});
|
||||||
|
|
||||||
for (let statement of statements) {
|
for (let statement of statements) {
|
||||||
await R.exec(statement);
|
await R.exec(statement);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static getBetterSQLite3Database() {
|
||||||
|
return R.knex.client.acquireConnection();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Special handle, because tarn.js throw a promise reject that cannot be caught
|
* Special handle, because tarn.js throw a promise reject that cannot be caught
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
|
@ -121,23 +286,92 @@ class Database {
|
||||||
};
|
};
|
||||||
process.addListener("unhandledRejection", listener);
|
process.addListener("unhandledRejection", listener);
|
||||||
|
|
||||||
console.log("Closing DB")
|
console.log("Closing DB");
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
Database.noReject = true;
|
Database.noReject = true;
|
||||||
await R.close()
|
await R.close();
|
||||||
await sleep(2000)
|
await sleep(2000);
|
||||||
|
|
||||||
if (Database.noReject) {
|
if (Database.noReject) {
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
console.log("Waiting to close the db")
|
console.log("Waiting to close the db");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
console.log("SQLite closed")
|
console.log("SQLite closed");
|
||||||
|
|
||||||
process.removeListener("unhandledRejection", listener);
|
process.removeListener("unhandledRejection", listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* One backup one time in this process.
|
||||||
|
* Reset this.backupPath if you want to backup again
|
||||||
|
* @param version
|
||||||
|
*/
|
||||||
|
static backup(version) {
|
||||||
|
if (! this.backupPath) {
|
||||||
|
console.info("Backup the db");
|
||||||
|
this.backupPath = this.dataDir + "kuma.db.bak" + version;
|
||||||
|
fs.copyFileSync(Database.path, this.backupPath);
|
||||||
|
|
||||||
|
const shmPath = Database.path + "-shm";
|
||||||
|
if (fs.existsSync(shmPath)) {
|
||||||
|
this.backupShmPath = shmPath + ".bak" + version;
|
||||||
|
fs.copyFileSync(shmPath, this.backupShmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
const walPath = Database.path + "-wal";
|
||||||
|
if (fs.existsSync(walPath)) {
|
||||||
|
this.backupWalPath = walPath + ".bak" + version;
|
||||||
|
fs.copyFileSync(walPath, this.backupWalPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static restore() {
|
||||||
|
if (this.backupPath) {
|
||||||
|
console.error("Patch db failed!!! Restoring the backup");
|
||||||
|
|
||||||
|
const shmPath = Database.path + "-shm";
|
||||||
|
const walPath = Database.path + "-wal";
|
||||||
|
|
||||||
|
// Delete patch failed db
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(Database.path)) {
|
||||||
|
fs.unlinkSync(Database.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(shmPath)) {
|
||||||
|
fs.unlinkSync(shmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(walPath)) {
|
||||||
|
fs.unlinkSync(walPath);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("Restore failed, you may need to restore the backup manually");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore backup
|
||||||
|
fs.copyFileSync(this.backupPath, Database.path);
|
||||||
|
|
||||||
|
if (this.backupShmPath) {
|
||||||
|
fs.copyFileSync(this.backupShmPath, shmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.backupWalPath) {
|
||||||
|
fs.copyFileSync(this.backupWalPath, walPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
console.log("Nothing to restore");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Database;
|
module.exports = Database;
|
||||||
|
|
57
server/image-data-uri.js
Normal file
57
server/image-data-uri.js
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
/*
|
||||||
|
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
|
||||||
|
Modified with 0 dependencies
|
||||||
|
*/
|
||||||
|
let fs = require("fs");
|
||||||
|
|
||||||
|
let ImageDataURI = (() => {
|
||||||
|
|
||||||
|
function decode(dataURI) {
|
||||||
|
if (!/data:image\//.test(dataURI)) {
|
||||||
|
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
|
||||||
|
return {
|
||||||
|
imageType: regExMatches[1],
|
||||||
|
dataBase64: regExMatches[2],
|
||||||
|
dataBuffer: new Buffer(regExMatches[2], "base64")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function encode(data, mediaType) {
|
||||||
|
if (!data || !mediaType) {
|
||||||
|
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
|
||||||
|
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
|
||||||
|
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
|
||||||
|
|
||||||
|
return dataImgBase64;
|
||||||
|
}
|
||||||
|
|
||||||
|
function outputFile(dataURI, filePath) {
|
||||||
|
filePath = filePath || "./";
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let imageDecoded = decode(dataURI);
|
||||||
|
|
||||||
|
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
|
||||||
|
if (err) {
|
||||||
|
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
|
||||||
|
}
|
||||||
|
resolve(filePath);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
decode: decode,
|
||||||
|
encode: encode,
|
||||||
|
outputFile: outputFile,
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
module.exports = ImageDataURI;
|
34
server/model/group.js
Normal file
34
server/model/group.js
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
const { R } = require("redbean-node");
|
||||||
|
|
||||||
|
class Group extends BeanModel {
|
||||||
|
|
||||||
|
async toPublicJSON() {
|
||||||
|
let monitorBeanList = await this.getMonitorList();
|
||||||
|
let monitorList = [];
|
||||||
|
|
||||||
|
for (let bean of monitorBeanList) {
|
||||||
|
monitorList.push(await bean.toPublicJSON());
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
name: this.name,
|
||||||
|
weight: this.weight,
|
||||||
|
monitorList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMonitorList() {
|
||||||
|
return R.convertToBeans("monitor", await R.getAll(`
|
||||||
|
SELECT monitor.* FROM monitor, monitor_group
|
||||||
|
WHERE monitor.id = monitor_group.monitor_id
|
||||||
|
AND group_id = ?
|
||||||
|
ORDER BY monitor_group.weight
|
||||||
|
`, [
|
||||||
|
this.id,
|
||||||
|
]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Group;
|
|
@ -1,8 +1,8 @@
|
||||||
const dayjs = require("dayjs");
|
const dayjs = require("dayjs");
|
||||||
const utc = require("dayjs/plugin/utc")
|
const utc = require("dayjs/plugin/utc");
|
||||||
let timezone = require("dayjs/plugin/timezone")
|
let timezone = require("dayjs/plugin/timezone");
|
||||||
dayjs.extend(utc)
|
dayjs.extend(utc);
|
||||||
dayjs.extend(timezone)
|
dayjs.extend(timezone);
|
||||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -13,6 +13,15 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
*/
|
*/
|
||||||
class Heartbeat extends BeanModel {
|
class Heartbeat extends BeanModel {
|
||||||
|
|
||||||
|
toPublicJSON() {
|
||||||
|
return {
|
||||||
|
status: this.status,
|
||||||
|
time: this.time,
|
||||||
|
msg: "", // Hide for public
|
||||||
|
ping: this.ping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
toJSON() {
|
toJSON() {
|
||||||
return {
|
return {
|
||||||
monitorID: this.monitor_id,
|
monitorID: this.monitor_id,
|
||||||
|
|
18
server/model/incident.js
Normal file
18
server/model/incident.js
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
|
||||||
|
class Incident extends BeanModel {
|
||||||
|
|
||||||
|
toPublicJSON() {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
style: this.style,
|
||||||
|
title: this.title,
|
||||||
|
content: this.content,
|
||||||
|
pin: this.pin,
|
||||||
|
createdDate: this.createdDate,
|
||||||
|
lastUpdatedDate: this.lastUpdatedDate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Incident;
|
|
@ -1,17 +1,19 @@
|
||||||
const https = require("https");
|
const https = require("https");
|
||||||
const dayjs = require("dayjs");
|
const dayjs = require("dayjs");
|
||||||
const utc = require("dayjs/plugin/utc")
|
const utc = require("dayjs/plugin/utc");
|
||||||
let timezone = require("dayjs/plugin/timezone")
|
let timezone = require("dayjs/plugin/timezone");
|
||||||
dayjs.extend(utc)
|
dayjs.extend(utc);
|
||||||
dayjs.extend(timezone)
|
dayjs.extend(timezone);
|
||||||
const axios = require("axios");
|
const axios = require("axios");
|
||||||
const { Prometheus } = require("../prometheus");
|
const { Prometheus } = require("../prometheus");
|
||||||
const { debug, UP, DOWN, PENDING, flipStatus } = require("../../src/util");
|
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
|
||||||
const { tcping, ping, checkCertificate, checkStatusCode } = require("../util-server");
|
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom } = require("../util-server");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
const { Notification } = require("../notification")
|
const { Notification } = require("../notification");
|
||||||
|
const { demoMode } = require("../server");
|
||||||
const version = require("../../package.json").version;
|
const version = require("../../package.json").version;
|
||||||
|
const apicache = require("../modules/apicache");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* status:
|
* status:
|
||||||
|
@ -20,18 +22,35 @@ const version = require("../../package.json").version;
|
||||||
* 2 = PENDING
|
* 2 = PENDING
|
||||||
*/
|
*/
|
||||||
class Monitor extends BeanModel {
|
class Monitor extends BeanModel {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a object that ready to parse to JSON for public
|
||||||
|
* Only show necessary data to public
|
||||||
|
*/
|
||||||
|
async toPublicJSON() {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
name: this.name,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a object that ready to parse to JSON
|
||||||
|
*/
|
||||||
async toJSON() {
|
async toJSON() {
|
||||||
|
|
||||||
let notificationIDList = {};
|
let notificationIDList = {};
|
||||||
|
|
||||||
let list = await R.find("monitor_notification", " monitor_id = ? ", [
|
let list = await R.find("monitor_notification", " monitor_id = ? ", [
|
||||||
this.id,
|
this.id,
|
||||||
])
|
]);
|
||||||
|
|
||||||
for (let bean of list) {
|
for (let bean of list) {
|
||||||
notificationIDList[bean.notification_id] = true;
|
notificationIDList[bean.notification_id] = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: this.id,
|
id: this.id,
|
||||||
name: this.name,
|
name: this.name,
|
||||||
|
@ -43,12 +62,18 @@ class Monitor extends BeanModel {
|
||||||
active: this.active,
|
active: this.active,
|
||||||
type: this.type,
|
type: this.type,
|
||||||
interval: this.interval,
|
interval: this.interval,
|
||||||
|
retryInterval: this.retryInterval,
|
||||||
keyword: this.keyword,
|
keyword: this.keyword,
|
||||||
ignoreTls: this.getIgnoreTls(),
|
ignoreTls: this.getIgnoreTls(),
|
||||||
upsideDown: this.isUpsideDown(),
|
upsideDown: this.isUpsideDown(),
|
||||||
maxredirects: this.maxredirects,
|
maxredirects: this.maxredirects,
|
||||||
accepted_statuscodes: this.getAcceptedStatuscodes(),
|
accepted_statuscodes: this.getAcceptedStatuscodes(),
|
||||||
|
dns_resolve_type: this.dns_resolve_type,
|
||||||
|
dns_resolve_server: this.dns_resolve_server,
|
||||||
|
dns_last_result: this.dns_last_result,
|
||||||
|
pushToken: this.pushToken,
|
||||||
notificationIDList,
|
notificationIDList,
|
||||||
|
tags: tags,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +82,7 @@ class Monitor extends BeanModel {
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
getIgnoreTls() {
|
getIgnoreTls() {
|
||||||
return Boolean(this.ignoreTls)
|
return Boolean(this.ignoreTls);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -87,12 +112,12 @@ class Monitor extends BeanModel {
|
||||||
if (! previousBeat) {
|
if (! previousBeat) {
|
||||||
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
||||||
this.id,
|
this.id,
|
||||||
])
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
const isFirstBeat = !previousBeat;
|
const isFirstBeat = !previousBeat;
|
||||||
|
|
||||||
let bean = R.dispense("heartbeat")
|
let bean = R.dispense("heartbeat");
|
||||||
bean.monitor_id = this.id;
|
bean.monitor_id = this.id;
|
||||||
bean.time = R.isoDateTime(dayjs.utc());
|
bean.time = R.isoDateTime(dayjs.utc());
|
||||||
bean.status = DOWN;
|
bean.status = DOWN;
|
||||||
|
@ -110,10 +135,9 @@ class Monitor extends BeanModel {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (this.type === "http" || this.type === "keyword") {
|
if (this.type === "http" || this.type === "keyword") {
|
||||||
|
// Do not do any queries/high loading things before the "bean.ping"
|
||||||
let startTime = dayjs().valueOf();
|
let startTime = dayjs().valueOf();
|
||||||
|
|
||||||
// Use Custom agent to disable session reuse
|
|
||||||
// https://github.com/nodejs/node/issues/3940
|
|
||||||
let res = await axios.get(this.url, {
|
let res = await axios.get(this.url, {
|
||||||
timeout: this.interval * 1000 * 0.8,
|
timeout: this.interval * 1000 * 0.8,
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -121,7 +145,7 @@ class Monitor extends BeanModel {
|
||||||
"User-Agent": "Uptime-Kuma/" + version,
|
"User-Agent": "Uptime-Kuma/" + version,
|
||||||
},
|
},
|
||||||
httpsAgent: new https.Agent({
|
httpsAgent: new https.Agent({
|
||||||
maxCachedSessions: 0,
|
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||||
rejectUnauthorized: ! this.getIgnoreTls(),
|
rejectUnauthorized: ! this.getIgnoreTls(),
|
||||||
}),
|
}),
|
||||||
maxRedirects: this.maxredirects,
|
maxRedirects: this.maxredirects,
|
||||||
|
@ -129,23 +153,24 @@ class Monitor extends BeanModel {
|
||||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
bean.msg = `${res.status} - ${res.statusText}`
|
bean.msg = `${res.status} - ${res.statusText}`;
|
||||||
bean.ping = dayjs().valueOf() - startTime;
|
bean.ping = dayjs().valueOf() - startTime;
|
||||||
|
|
||||||
// Check certificate if https is used
|
// Check certificate if https is used
|
||||||
|
|
||||||
let certInfoStartTime = dayjs().valueOf();
|
let certInfoStartTime = dayjs().valueOf();
|
||||||
if (this.getUrl()?.protocol === "https:") {
|
if (this.getUrl()?.protocol === "https:") {
|
||||||
try {
|
try {
|
||||||
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
|
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.message !== "No TLS certificate in response") {
|
if (e.message !== "No TLS certificate in response") {
|
||||||
console.error(e.message)
|
console.error(e.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms")
|
if (process.env.TIMELOGGER === "1") {
|
||||||
|
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||||
|
}
|
||||||
|
|
||||||
if (this.type === "http") {
|
if (this.type === "http") {
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
|
@ -155,27 +180,89 @@ class Monitor extends BeanModel {
|
||||||
|
|
||||||
// Convert to string for object/array
|
// Convert to string for object/array
|
||||||
if (typeof data !== "string") {
|
if (typeof data !== "string") {
|
||||||
data = JSON.stringify(data)
|
data = JSON.stringify(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.includes(this.keyword)) {
|
if (data.includes(this.keyword)) {
|
||||||
bean.msg += ", keyword is found"
|
bean.msg += ", keyword is found";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
} else {
|
} else {
|
||||||
throw new Error(bean.msg + ", but keyword is not found")
|
throw new Error(bean.msg + ", but keyword is not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (this.type === "port") {
|
} else if (this.type === "port") {
|
||||||
bean.ping = await tcping(this.hostname, this.port);
|
bean.ping = await tcping(this.hostname, this.port);
|
||||||
bean.msg = ""
|
bean.msg = "";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
|
|
||||||
} else if (this.type === "ping") {
|
} else if (this.type === "ping") {
|
||||||
bean.ping = await ping(this.hostname);
|
bean.ping = await ping(this.hostname);
|
||||||
bean.msg = ""
|
bean.msg = "";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
|
} else if (this.type === "dns") {
|
||||||
|
let startTime = dayjs().valueOf();
|
||||||
|
let dnsMessage = "";
|
||||||
|
|
||||||
|
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
|
||||||
|
bean.ping = dayjs().valueOf() - startTime;
|
||||||
|
|
||||||
|
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
|
||||||
|
dnsMessage += "Records: ";
|
||||||
|
dnsMessage += dnsRes.join(" | ");
|
||||||
|
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
|
||||||
|
dnsMessage = dnsRes[0];
|
||||||
|
} else if (this.dns_resolve_type == "CAA") {
|
||||||
|
dnsMessage = dnsRes[0].issue;
|
||||||
|
} else if (this.dns_resolve_type == "MX") {
|
||||||
|
dnsRes.forEach(record => {
|
||||||
|
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
||||||
|
});
|
||||||
|
dnsMessage = dnsMessage.slice(0, -2);
|
||||||
|
} else if (this.dns_resolve_type == "NS") {
|
||||||
|
dnsMessage += "Servers: ";
|
||||||
|
dnsMessage += dnsRes.join(" | ");
|
||||||
|
} else if (this.dns_resolve_type == "SOA") {
|
||||||
|
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
|
||||||
|
} else if (this.dns_resolve_type == "SRV") {
|
||||||
|
dnsRes.forEach(record => {
|
||||||
|
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
|
||||||
|
});
|
||||||
|
dnsMessage = dnsMessage.slice(0, -2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.dnsLastResult !== dnsMessage) {
|
||||||
|
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
|
||||||
|
dnsMessage,
|
||||||
|
this.id
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
bean.msg = dnsMessage;
|
||||||
|
bean.status = UP;
|
||||||
|
} else if (this.type === "push") { // Type: Push
|
||||||
|
const time = R.isoDateTime(dayjs.utc().subtract(this.interval, "second"));
|
||||||
|
|
||||||
|
let heartbeatCount = await R.count("heartbeat", " monitor_id = ? AND time > ? ", [
|
||||||
|
this.id,
|
||||||
|
time
|
||||||
|
]);
|
||||||
|
|
||||||
|
debug("heartbeatCount" + heartbeatCount + " " + time);
|
||||||
|
|
||||||
|
if (heartbeatCount <= 0) {
|
||||||
|
throw new Error("No heartbeat in the time window");
|
||||||
|
} else {
|
||||||
|
// No need to insert successful heartbeat for push type, so end here
|
||||||
|
retries = 0;
|
||||||
|
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
bean.msg = "Unknown Monitor Type";
|
||||||
|
bean.status = PENDING;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.isUpsideDown()) {
|
if (this.isUpsideDown()) {
|
||||||
|
@ -203,6 +290,8 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let beatInterval = this.interval;
|
||||||
|
|
||||||
// * ? -> ANY STATUS = important [isFirstBeat]
|
// * ? -> ANY STATUS = important [isFirstBeat]
|
||||||
// UP -> PENDING = not important
|
// UP -> PENDING = not important
|
||||||
// * UP -> DOWN = important
|
// * UP -> DOWN = important
|
||||||
|
@ -227,24 +316,28 @@ class Monitor extends BeanModel {
|
||||||
if (!isFirstBeat || bean.status === DOWN) {
|
if (!isFirstBeat || bean.status === DOWN) {
|
||||||
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
|
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
|
||||||
this.id,
|
this.id,
|
||||||
])
|
]);
|
||||||
|
|
||||||
let text;
|
let text;
|
||||||
if (bean.status === UP) {
|
if (bean.status === UP) {
|
||||||
text = "✅ Up"
|
text = "✅ Up";
|
||||||
} else {
|
} else {
|
||||||
text = "🔴 Down"
|
text = "🔴 Down";
|
||||||
}
|
}
|
||||||
|
|
||||||
let msg = `[${this.name}] [${text}] ${bean.msg}`;
|
let msg = `[${this.name}] [${text}] ${bean.msg}`;
|
||||||
|
|
||||||
for (let notification of notificationList) {
|
for (let notification of notificationList) {
|
||||||
try {
|
try {
|
||||||
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON())
|
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON());
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Cannot send notification to " + notification.name)
|
console.error("Cannot send notification to " + notification.name);
|
||||||
|
console.log(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clear Status Page Cache
|
||||||
|
apicache.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -252,29 +345,51 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (bean.status === UP) {
|
if (bean.status === UP) {
|
||||||
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${this.interval} seconds | Type: ${this.type}`)
|
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||||
} else if (bean.status === PENDING) {
|
} else if (bean.status === PENDING) {
|
||||||
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Type: ${this.type}`)
|
if (this.retryInterval > 0) {
|
||||||
|
beatInterval = this.retryInterval;
|
||||||
|
}
|
||||||
|
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||||
} else {
|
} else {
|
||||||
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Type: ${this.type}`)
|
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
prometheus.update(bean, tlsInfo)
|
|
||||||
|
|
||||||
io.to(this.user_id).emit("heartbeat", bean.toJSON());
|
io.to(this.user_id).emit("heartbeat", bean.toJSON());
|
||||||
|
Monitor.sendStats(io, this.id, this.user_id);
|
||||||
|
|
||||||
await R.store(bean)
|
await R.store(bean);
|
||||||
Monitor.sendStats(io, this.id, this.user_id)
|
prometheus.update(bean, tlsInfo);
|
||||||
|
|
||||||
previousBeat = bean;
|
previousBeat = bean;
|
||||||
}
|
|
||||||
|
|
||||||
beat();
|
if (! this.isStop) {
|
||||||
this.heartbeatInterval = setInterval(beat, this.interval * 1000);
|
|
||||||
|
if (demoMode) {
|
||||||
|
if (beatInterval < 20) {
|
||||||
|
console.log("beat interval too low, reset to 20s");
|
||||||
|
beatInterval = 20;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
// Delay Push Type
|
||||||
|
if (this.type === "push") {
|
||||||
|
setTimeout(() => {
|
||||||
|
beat();
|
||||||
|
}, this.interval * 1000);
|
||||||
|
} else {
|
||||||
|
beat();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stop() {
|
stop() {
|
||||||
clearInterval(this.heartbeatInterval)
|
clearTimeout(this.heartbeatInterval);
|
||||||
|
this.isStop = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -311,10 +426,16 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
|
|
||||||
static async sendStats(io, monitorID, userID) {
|
static async sendStats(io, monitorID, userID) {
|
||||||
Monitor.sendAvgPing(24, io, monitorID, userID);
|
const hasClients = getTotalClientInRoom(io, userID) > 0;
|
||||||
Monitor.sendUptime(24, io, monitorID, userID);
|
|
||||||
Monitor.sendUptime(24 * 30, io, monitorID, userID);
|
if (hasClients) {
|
||||||
Monitor.sendCertInfo(io, monitorID, userID);
|
await Monitor.sendAvgPing(24, io, monitorID, userID);
|
||||||
|
await Monitor.sendUptime(24, io, monitorID, userID);
|
||||||
|
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
|
||||||
|
await Monitor.sendCertInfo(io, monitorID, userID);
|
||||||
|
} else {
|
||||||
|
debug("No clients in the room, no need to send stats");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -322,6 +443,8 @@ class Monitor extends BeanModel {
|
||||||
* @param duration : int Hours
|
* @param duration : int Hours
|
||||||
*/
|
*/
|
||||||
static async sendAvgPing(duration, io, monitorID, userID) {
|
static async sendAvgPing(duration, io, monitorID, userID) {
|
||||||
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
let avgPing = parseInt(await R.getCell(`
|
let avgPing = parseInt(await R.getCell(`
|
||||||
SELECT AVG(ping)
|
SELECT AVG(ping)
|
||||||
FROM heartbeat
|
FROM heartbeat
|
||||||
|
@ -332,6 +455,8 @@ class Monitor extends BeanModel {
|
||||||
monitorID,
|
monitorID,
|
||||||
]));
|
]));
|
||||||
|
|
||||||
|
timeLogger.print(`[Monitor: ${monitorID}] avgPing`);
|
||||||
|
|
||||||
io.to(userID).emit("avgPing", monitorID, avgPing);
|
io.to(userID).emit("avgPing", monitorID, avgPing);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -350,62 +475,77 @@ class Monitor extends BeanModel {
|
||||||
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
|
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
|
||||||
* @param duration : int Hours
|
* @param duration : int Hours
|
||||||
*/
|
*/
|
||||||
static async sendUptime(duration, io, monitorID, userID) {
|
static async calcUptime(duration, monitorID) {
|
||||||
let sec = duration * 3600;
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
let heartbeatList = await R.getAll(`
|
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
|
||||||
SELECT duration, time, status
|
|
||||||
|
// Handle if heartbeat duration longer than the target duration
|
||||||
|
// e.g. If the last beat's duration is bigger that the 24hrs window, it will use the duration between the (beat time - window margin) (THEN case in SQL)
|
||||||
|
let result = await R.getRow(`
|
||||||
|
SELECT
|
||||||
|
-- SUM all duration, also trim off the beat out of time window
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
|
||||||
|
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
|
||||||
|
ELSE duration
|
||||||
|
END
|
||||||
|
) AS total_duration,
|
||||||
|
|
||||||
|
-- SUM all uptime duration, also trim off the beat out of time window
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN (status = 1)
|
||||||
|
THEN
|
||||||
|
CASE
|
||||||
|
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
|
||||||
|
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
|
||||||
|
ELSE duration
|
||||||
|
END
|
||||||
|
END
|
||||||
|
) AS uptime_duration
|
||||||
FROM heartbeat
|
FROM heartbeat
|
||||||
WHERE time > DATETIME('now', ? || ' hours')
|
WHERE time > ?
|
||||||
AND monitor_id = ? `, [
|
AND monitor_id = ?
|
||||||
-duration,
|
`, [
|
||||||
|
startTime, startTime, startTime, startTime, startTime,
|
||||||
monitorID,
|
monitorID,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
let downtime = 0;
|
timeLogger.print(`[Monitor: ${monitorID}][${duration}] sendUptime`);
|
||||||
let total = 0;
|
|
||||||
let uptime;
|
|
||||||
|
|
||||||
// Special handle for the first heartbeat only
|
let totalDuration = result.total_duration;
|
||||||
if (heartbeatList.length === 1) {
|
let uptimeDuration = result.uptime_duration;
|
||||||
|
let uptime = 0;
|
||||||
|
|
||||||
if (heartbeatList[0].status === 1) {
|
if (totalDuration > 0) {
|
||||||
uptime = 1;
|
uptime = uptimeDuration / totalDuration;
|
||||||
} else {
|
if (uptime < 0) {
|
||||||
uptime = 0;
|
uptime = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
for (let row of heartbeatList) {
|
// Handle new monitor with only one beat, because the beat's duration = 0
|
||||||
let value = parseInt(row.duration)
|
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
|
||||||
let time = row.time
|
|
||||||
|
|
||||||
// Handle if heartbeat duration longer than the target duration
|
if (status === UP) {
|
||||||
// e.g. Heartbeat duration = 28hrs, but target duration = 24hrs
|
uptime = 1;
|
||||||
if (value > sec) {
|
|
||||||
let trim = dayjs.utc().diff(dayjs(time), "second");
|
|
||||||
value = sec - trim;
|
|
||||||
|
|
||||||
if (value < 0) {
|
|
||||||
value = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
total += value;
|
|
||||||
if (row.status === 0 || row.status === 2) {
|
|
||||||
downtime += value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uptime = (total - downtime) / total;
|
|
||||||
|
|
||||||
if (uptime < 0) {
|
|
||||||
uptime = 0;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return uptime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send Uptime
|
||||||
|
* @param duration : int Hours
|
||||||
|
*/
|
||||||
|
static async sendUptime(duration, io, monitorID, userID) {
|
||||||
|
const uptime = await this.calcUptime(duration, monitorID);
|
||||||
io.to(userID).emit("uptime", monitorID, duration, uptime);
|
io.to(userID).emit("uptime", monitorID, duration, uptime);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Monitor;
|
module.exports = Monitor;
|
||||||
|
|
13
server/model/tag.js
Normal file
13
server/model/tag.js
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
|
||||||
|
class Tag extends BeanModel {
|
||||||
|
toJSON() {
|
||||||
|
return {
|
||||||
|
id: this._id,
|
||||||
|
name: this._name,
|
||||||
|
color: this._color,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Tag;
|
749
server/modules/apicache/apicache.js
Normal file
749
server/modules/apicache/apicache.js
Normal file
|
@ -0,0 +1,749 @@
|
||||||
|
let url = require("url");
|
||||||
|
let MemoryCache = require("./memory-cache");
|
||||||
|
|
||||||
|
let t = {
|
||||||
|
ms: 1,
|
||||||
|
second: 1000,
|
||||||
|
minute: 60000,
|
||||||
|
hour: 3600000,
|
||||||
|
day: 3600000 * 24,
|
||||||
|
week: 3600000 * 24 * 7,
|
||||||
|
month: 3600000 * 24 * 30,
|
||||||
|
};
|
||||||
|
|
||||||
|
let instances = [];
|
||||||
|
|
||||||
|
let matches = function (a) {
|
||||||
|
return function (b) {
|
||||||
|
return a === b;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let doesntMatch = function (a) {
|
||||||
|
return function (b) {
|
||||||
|
return !matches(a)(b);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let logDuration = function (d, prefix) {
|
||||||
|
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
|
||||||
|
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
|
||||||
|
};
|
||||||
|
|
||||||
|
function getSafeHeaders(res) {
|
||||||
|
return res.getHeaders ? res.getHeaders() : res._headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ApiCache() {
|
||||||
|
let memCache = new MemoryCache();
|
||||||
|
|
||||||
|
let globalOptions = {
|
||||||
|
debug: false,
|
||||||
|
defaultDuration: 3600000,
|
||||||
|
enabled: true,
|
||||||
|
appendKey: [],
|
||||||
|
jsonp: false,
|
||||||
|
redisClient: false,
|
||||||
|
headerBlacklist: [],
|
||||||
|
statusCodes: {
|
||||||
|
include: [],
|
||||||
|
exclude: [],
|
||||||
|
},
|
||||||
|
events: {
|
||||||
|
expire: undefined,
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
// 'cache-control': 'no-cache' // example of header overwrite
|
||||||
|
},
|
||||||
|
trackPerformance: false,
|
||||||
|
respectCacheControl: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let middlewareOptions = [];
|
||||||
|
let instance = this;
|
||||||
|
let index = null;
|
||||||
|
let timers = {};
|
||||||
|
let performanceArray = []; // for tracking cache hit rate
|
||||||
|
|
||||||
|
instances.push(this);
|
||||||
|
this.id = instances.length;
|
||||||
|
|
||||||
|
function debug(a, b, c, d) {
|
||||||
|
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
|
||||||
|
return arg !== undefined;
|
||||||
|
});
|
||||||
|
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
|
||||||
|
|
||||||
|
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
|
||||||
|
}
|
||||||
|
|
||||||
|
function shouldCacheResponse(request, response, toggle) {
|
||||||
|
let opt = globalOptions;
|
||||||
|
let codes = opt.statusCodes;
|
||||||
|
|
||||||
|
if (!response) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toggle && !toggle(request, response)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function addIndexEntries(key, req) {
|
||||||
|
let groupName = req.apicacheGroup;
|
||||||
|
|
||||||
|
if (groupName) {
|
||||||
|
debug("group detected \"" + groupName + "\"");
|
||||||
|
let group = (index.groups[groupName] = index.groups[groupName] || []);
|
||||||
|
group.unshift(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
index.all.unshift(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterBlacklistedHeaders(headers) {
|
||||||
|
return Object.keys(headers)
|
||||||
|
.filter(function (key) {
|
||||||
|
return globalOptions.headerBlacklist.indexOf(key) === -1;
|
||||||
|
})
|
||||||
|
.reduce(function (acc, header) {
|
||||||
|
acc[header] = headers[header];
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createCacheObject(status, headers, data, encoding) {
|
||||||
|
return {
|
||||||
|
status: status,
|
||||||
|
headers: filterBlacklistedHeaders(headers),
|
||||||
|
data: data,
|
||||||
|
encoding: encoding,
|
||||||
|
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function cacheResponse(key, value, duration) {
|
||||||
|
let redis = globalOptions.redisClient;
|
||||||
|
let expireCallback = globalOptions.events.expire;
|
||||||
|
|
||||||
|
if (redis && redis.connected) {
|
||||||
|
try {
|
||||||
|
redis.hset(key, "response", JSON.stringify(value));
|
||||||
|
redis.hset(key, "duration", duration);
|
||||||
|
redis.expire(key, duration / 1000, expireCallback || function () {});
|
||||||
|
} catch (err) {
|
||||||
|
debug("[apicache] error in redis.hset()");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
memCache.add(key, value, duration, expireCallback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// add automatic cache clearing from duration, includes max limit on setTimeout
|
||||||
|
timers[key] = setTimeout(function () {
|
||||||
|
instance.clear(key, true);
|
||||||
|
}, Math.min(duration, 2147483647));
|
||||||
|
}
|
||||||
|
|
||||||
|
function accumulateContent(res, content) {
|
||||||
|
if (content) {
|
||||||
|
if (typeof content == "string") {
|
||||||
|
res._apicache.content = (res._apicache.content || "") + content;
|
||||||
|
} else if (Buffer.isBuffer(content)) {
|
||||||
|
let oldContent = res._apicache.content;
|
||||||
|
|
||||||
|
if (typeof oldContent === "string") {
|
||||||
|
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!oldContent) {
|
||||||
|
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
res._apicache.content = Buffer.concat(
|
||||||
|
[oldContent, content],
|
||||||
|
oldContent.length + content.length
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
res._apicache.content = content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
|
||||||
|
// monkeypatch res.end to create cache object
|
||||||
|
res._apicache = {
|
||||||
|
write: res.write,
|
||||||
|
writeHead: res.writeHead,
|
||||||
|
end: res.end,
|
||||||
|
cacheable: true,
|
||||||
|
content: undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
// append header overwrites if applicable
|
||||||
|
Object.keys(globalOptions.headers).forEach(function (name) {
|
||||||
|
res.setHeader(name, globalOptions.headers[name]);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.writeHead = function () {
|
||||||
|
// add cache control headers
|
||||||
|
if (!globalOptions.headers["cache-control"]) {
|
||||||
|
if (shouldCacheResponse(req, res, toggle)) {
|
||||||
|
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
|
||||||
|
} else {
|
||||||
|
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
|
||||||
|
return res._apicache.writeHead.apply(this, arguments);
|
||||||
|
};
|
||||||
|
|
||||||
|
// patch res.write
|
||||||
|
res.write = function (content) {
|
||||||
|
accumulateContent(res, content);
|
||||||
|
return res._apicache.write.apply(this, arguments);
|
||||||
|
};
|
||||||
|
|
||||||
|
// patch res.end
|
||||||
|
res.end = function (content, encoding) {
|
||||||
|
if (shouldCacheResponse(req, res, toggle)) {
|
||||||
|
accumulateContent(res, content);
|
||||||
|
|
||||||
|
if (res._apicache.cacheable && res._apicache.content) {
|
||||||
|
addIndexEntries(key, req);
|
||||||
|
let headers = res._apicache.headers || getSafeHeaders(res);
|
||||||
|
let cacheObject = createCacheObject(
|
||||||
|
res.statusCode,
|
||||||
|
headers,
|
||||||
|
res._apicache.content,
|
||||||
|
encoding
|
||||||
|
);
|
||||||
|
cacheResponse(key, cacheObject, duration);
|
||||||
|
|
||||||
|
// display log entry
|
||||||
|
let elapsed = new Date() - req.apicacheTimer;
|
||||||
|
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
|
||||||
|
debug("_apicache.headers: ", res._apicache.headers);
|
||||||
|
debug("res.getHeaders(): ", getSafeHeaders(res));
|
||||||
|
debug("cacheObject: ", cacheObject);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res._apicache.end.apply(this, arguments);
|
||||||
|
};
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
|
||||||
|
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
|
||||||
|
if (toggle && !toggle(request, response)) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
let headers = getSafeHeaders(response);
|
||||||
|
|
||||||
|
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
|
||||||
|
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
|
||||||
|
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
|
||||||
|
|
||||||
|
// only embed apicache headers when not in production environment
|
||||||
|
if (process.env.NODE_ENV !== "production") {
|
||||||
|
Object.assign(headers, {
|
||||||
|
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
|
||||||
|
"apicache-version": "1.6.2-modified",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// unstringify buffers
|
||||||
|
let data = cacheObject.data;
|
||||||
|
if (data && data.type === "Buffer") {
|
||||||
|
data =
|
||||||
|
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// test Etag against If-None-Match for 304
|
||||||
|
let cachedEtag = cacheObject.headers.etag;
|
||||||
|
let requestEtag = request.headers["if-none-match"];
|
||||||
|
|
||||||
|
if (requestEtag && cachedEtag === requestEtag) {
|
||||||
|
response.writeHead(304, headers);
|
||||||
|
return response.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
response.writeHead(cacheObject.status || 200, headers);
|
||||||
|
|
||||||
|
return response.end(data, cacheObject.encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
function syncOptions() {
|
||||||
|
for (let i in middlewareOptions) {
|
||||||
|
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.clear = function (target, isAutomatic) {
|
||||||
|
let group = index.groups[target];
|
||||||
|
let redis = globalOptions.redisClient;
|
||||||
|
|
||||||
|
if (group) {
|
||||||
|
debug("clearing group \"" + target + "\"");
|
||||||
|
|
||||||
|
group.forEach(function (key) {
|
||||||
|
debug("clearing cached entry for \"" + key + "\"");
|
||||||
|
clearTimeout(timers[key]);
|
||||||
|
delete timers[key];
|
||||||
|
if (!globalOptions.redisClient) {
|
||||||
|
memCache.delete(key);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
redis.del(key);
|
||||||
|
} catch (err) {
|
||||||
|
console.log("[apicache] error in redis.del(\"" + key + "\")");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
index.all = index.all.filter(doesntMatch(key));
|
||||||
|
});
|
||||||
|
|
||||||
|
delete index.groups[target];
|
||||||
|
} else if (target) {
|
||||||
|
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
|
||||||
|
clearTimeout(timers[target]);
|
||||||
|
delete timers[target];
|
||||||
|
// clear actual cached entry
|
||||||
|
if (!redis) {
|
||||||
|
memCache.delete(target);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
redis.del(target);
|
||||||
|
} catch (err) {
|
||||||
|
console.log("[apicache] error in redis.del(\"" + target + "\")");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove from global index
|
||||||
|
index.all = index.all.filter(doesntMatch(target));
|
||||||
|
|
||||||
|
// remove target from each group that it may exist in
|
||||||
|
Object.keys(index.groups).forEach(function (groupName) {
|
||||||
|
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
|
||||||
|
|
||||||
|
// delete group if now empty
|
||||||
|
if (!index.groups[groupName].length) {
|
||||||
|
delete index.groups[groupName];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
debug("clearing entire index");
|
||||||
|
|
||||||
|
if (!redis) {
|
||||||
|
memCache.clear();
|
||||||
|
} else {
|
||||||
|
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
|
||||||
|
index.all.forEach(function (key) {
|
||||||
|
clearTimeout(timers[key]);
|
||||||
|
delete timers[key];
|
||||||
|
try {
|
||||||
|
redis.del(key);
|
||||||
|
} catch (err) {
|
||||||
|
console.log("[apicache] error in redis.del(\"" + key + "\")");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this.resetIndex();
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.getIndex();
|
||||||
|
};
|
||||||
|
|
||||||
|
function parseDuration(duration, defaultDuration) {
|
||||||
|
if (typeof duration === "number") {
|
||||||
|
return duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof duration === "string") {
|
||||||
|
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
|
||||||
|
|
||||||
|
if (split.length === 3) {
|
||||||
|
let len = parseFloat(split[1]);
|
||||||
|
let unit = split[2].replace(/s$/i, "").toLowerCase();
|
||||||
|
if (unit === "m") {
|
||||||
|
unit = "ms";
|
||||||
|
}
|
||||||
|
|
||||||
|
return (len || 1) * (t[unit] || 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return defaultDuration;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.getDuration = function (duration) {
|
||||||
|
return parseDuration(duration, globalOptions.defaultDuration);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return cache performance statistics (hit rate). Suitable for putting into a route:
|
||||||
|
* <code>
|
||||||
|
* app.get('/api/cache/performance', (req, res) => {
|
||||||
|
* res.json(apicache.getPerformance())
|
||||||
|
* })
|
||||||
|
* </code>
|
||||||
|
*/
|
||||||
|
this.getPerformance = function () {
|
||||||
|
return performanceArray.map(function (p) {
|
||||||
|
return p.report();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
this.getIndex = function (group) {
|
||||||
|
if (group) {
|
||||||
|
return index.groups[group];
|
||||||
|
} else {
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
|
||||||
|
let duration = instance.getDuration(strDuration);
|
||||||
|
let opt = {};
|
||||||
|
|
||||||
|
middlewareOptions.push({
|
||||||
|
options: opt,
|
||||||
|
});
|
||||||
|
|
||||||
|
let options = function (localOptions) {
|
||||||
|
if (localOptions) {
|
||||||
|
middlewareOptions.find(function (middleware) {
|
||||||
|
return middleware.options === opt;
|
||||||
|
}).localOptions = localOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
syncOptions();
|
||||||
|
|
||||||
|
return opt;
|
||||||
|
};
|
||||||
|
|
||||||
|
options(localOptions);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Function for non tracking performance
|
||||||
|
*/
|
||||||
|
function NOOPCachePerformance() {
|
||||||
|
this.report = this.hit = this.miss = function () {}; // noop;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
|
||||||
|
*/
|
||||||
|
function CachePerformance() {
|
||||||
|
/**
|
||||||
|
* Tracks the hit rate for the last 100 requests.
|
||||||
|
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
|
||||||
|
*/
|
||||||
|
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tracks the hit rate for the last 1000 requests.
|
||||||
|
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
|
||||||
|
*/
|
||||||
|
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tracks the hit rate for the last 10000 requests.
|
||||||
|
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
|
||||||
|
*/
|
||||||
|
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tracks the hit rate for the last 100000 requests.
|
||||||
|
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
|
||||||
|
*/
|
||||||
|
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of calls that have passed through the middleware since the server started.
|
||||||
|
*/
|
||||||
|
this.callCount = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The total number of hits since the server started
|
||||||
|
*/
|
||||||
|
this.hitCount = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
|
||||||
|
*/
|
||||||
|
this.lastCacheHit = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
|
||||||
|
*/
|
||||||
|
this.lastCacheMiss = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return performance statistics
|
||||||
|
*/
|
||||||
|
this.report = function () {
|
||||||
|
return {
|
||||||
|
lastCacheHit: this.lastCacheHit,
|
||||||
|
lastCacheMiss: this.lastCacheMiss,
|
||||||
|
callCount: this.callCount,
|
||||||
|
hitCount: this.hitCount,
|
||||||
|
missCount: this.callCount - this.hitCount,
|
||||||
|
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
|
||||||
|
hitRateLast100: this.hitRate(this.hitsLast100),
|
||||||
|
hitRateLast1000: this.hitRate(this.hitsLast1000),
|
||||||
|
hitRateLast10000: this.hitRate(this.hitsLast10000),
|
||||||
|
hitRateLast100000: this.hitRate(this.hitsLast100000),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes a cache hit rate from an array of hits and misses.
|
||||||
|
* @param {Uint8Array} array An array representing hits and misses.
|
||||||
|
* @returns a number between 0 and 1, or null if the array has no hits or misses
|
||||||
|
*/
|
||||||
|
this.hitRate = function (array) {
|
||||||
|
let hits = 0;
|
||||||
|
let misses = 0;
|
||||||
|
for (let i = 0; i < array.length; i++) {
|
||||||
|
let n8 = array[i];
|
||||||
|
for (let j = 0; j < 4; j++) {
|
||||||
|
switch (n8 & 3) {
|
||||||
|
case 1:
|
||||||
|
hits++;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
misses++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
n8 >>= 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let total = hits + misses;
|
||||||
|
if (total == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return hits / total;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a hit or miss in the given array. It will be recorded at a position determined
|
||||||
|
* by the current value of the callCount variable.
|
||||||
|
* @param {Uint8Array} array An array representing hits and misses.
|
||||||
|
* @param {boolean} hit true for a hit, false for a miss
|
||||||
|
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
|
||||||
|
* Each hit or miss is encoded as to bits as follows:
|
||||||
|
* 00 means no hit or miss has been recorded in these bits
|
||||||
|
* 01 encodes a hit
|
||||||
|
* 10 encodes a miss
|
||||||
|
*/
|
||||||
|
this.recordHitInArray = function (array, hit) {
|
||||||
|
let arrayIndex = ~~(this.callCount / 4) % array.length;
|
||||||
|
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
|
||||||
|
let clearMask = ~(3 << bitOffset);
|
||||||
|
let record = (hit ? 1 : 2) << bitOffset;
|
||||||
|
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Records the hit or miss in the tracking arrays and increments the call count.
|
||||||
|
* @param {boolean} hit true records a hit, false records a miss
|
||||||
|
*/
|
||||||
|
this.recordHit = function (hit) {
|
||||||
|
this.recordHitInArray(this.hitsLast100, hit);
|
||||||
|
this.recordHitInArray(this.hitsLast1000, hit);
|
||||||
|
this.recordHitInArray(this.hitsLast10000, hit);
|
||||||
|
this.recordHitInArray(this.hitsLast100000, hit);
|
||||||
|
if (hit) {
|
||||||
|
this.hitCount++;
|
||||||
|
}
|
||||||
|
this.callCount++;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Records a hit event, setting lastCacheMiss to the given key
|
||||||
|
* @param {string} key The key that had the cache hit
|
||||||
|
*/
|
||||||
|
this.hit = function (key) {
|
||||||
|
this.recordHit(true);
|
||||||
|
this.lastCacheHit = key;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Records a miss event, setting lastCacheMiss to the given key
|
||||||
|
* @param {string} key The key that had the cache miss
|
||||||
|
*/
|
||||||
|
this.miss = function (key) {
|
||||||
|
this.recordHit(false);
|
||||||
|
this.lastCacheMiss = key;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
|
||||||
|
|
||||||
|
performanceArray.push(perf);
|
||||||
|
|
||||||
|
let cache = function (req, res, next) {
|
||||||
|
function bypass() {
|
||||||
|
debug("bypass detected, skipping cache.");
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// initial bypass chances
|
||||||
|
if (!opt.enabled) {
|
||||||
|
return bypass();
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
req.headers["x-apicache-bypass"] ||
|
||||||
|
req.headers["x-apicache-force-fetch"] ||
|
||||||
|
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
|
||||||
|
) {
|
||||||
|
return bypass();
|
||||||
|
}
|
||||||
|
|
||||||
|
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
|
||||||
|
// if (typeof middlewareToggle === 'function') {
|
||||||
|
// if (!middlewareToggle(req, res)) return bypass()
|
||||||
|
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
|
||||||
|
// return bypass()
|
||||||
|
// }
|
||||||
|
|
||||||
|
// embed timer
|
||||||
|
req.apicacheTimer = new Date();
|
||||||
|
|
||||||
|
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
|
||||||
|
let key = req.originalUrl || req.url;
|
||||||
|
|
||||||
|
// Remove querystring from key if jsonp option is enabled
|
||||||
|
if (opt.jsonp) {
|
||||||
|
key = url.parse(key).pathname;
|
||||||
|
}
|
||||||
|
|
||||||
|
// add appendKey (either custom function or response path)
|
||||||
|
if (typeof opt.appendKey === "function") {
|
||||||
|
key += "$$appendKey=" + opt.appendKey(req, res);
|
||||||
|
} else if (opt.appendKey.length > 0) {
|
||||||
|
let appendKey = req;
|
||||||
|
|
||||||
|
for (let i = 0; i < opt.appendKey.length; i++) {
|
||||||
|
appendKey = appendKey[opt.appendKey[i]];
|
||||||
|
}
|
||||||
|
key += "$$appendKey=" + appendKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// attempt cache hit
|
||||||
|
let redis = opt.redisClient;
|
||||||
|
let cached = !redis ? memCache.getValue(key) : null;
|
||||||
|
|
||||||
|
// send if cache hit from memory-cache
|
||||||
|
if (cached) {
|
||||||
|
let elapsed = new Date() - req.apicacheTimer;
|
||||||
|
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
|
||||||
|
|
||||||
|
perf.hit(key);
|
||||||
|
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
|
||||||
|
}
|
||||||
|
|
||||||
|
// send if cache hit from redis
|
||||||
|
if (redis && redis.connected) {
|
||||||
|
try {
|
||||||
|
redis.hgetall(key, function (err, obj) {
|
||||||
|
if (!err && obj && obj.response) {
|
||||||
|
let elapsed = new Date() - req.apicacheTimer;
|
||||||
|
debug("sending cached (redis) version of", key, logDuration(elapsed));
|
||||||
|
|
||||||
|
perf.hit(key);
|
||||||
|
return sendCachedResponse(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
JSON.parse(obj.response),
|
||||||
|
middlewareToggle,
|
||||||
|
next,
|
||||||
|
duration
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
perf.miss(key);
|
||||||
|
return makeResponseCacheable(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next,
|
||||||
|
key,
|
||||||
|
duration,
|
||||||
|
strDuration,
|
||||||
|
middlewareToggle
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
// bypass redis on error
|
||||||
|
perf.miss(key);
|
||||||
|
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
perf.miss(key);
|
||||||
|
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
cache.options = options;
|
||||||
|
|
||||||
|
return cache;
|
||||||
|
};
|
||||||
|
|
||||||
|
this.options = function (options) {
|
||||||
|
if (options) {
|
||||||
|
Object.assign(globalOptions, options);
|
||||||
|
syncOptions();
|
||||||
|
|
||||||
|
if ("defaultDuration" in options) {
|
||||||
|
// Convert the default duration to a number in milliseconds (if needed)
|
||||||
|
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (globalOptions.trackPerformance) {
|
||||||
|
debug("WARNING: using trackPerformance flag can cause high memory usage!");
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
} else {
|
||||||
|
return globalOptions;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
this.resetIndex = function () {
|
||||||
|
index = {
|
||||||
|
all: [],
|
||||||
|
groups: {},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
this.newInstance = function (config) {
|
||||||
|
let instance = new ApiCache();
|
||||||
|
|
||||||
|
if (config) {
|
||||||
|
instance.options(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
};
|
||||||
|
|
||||||
|
this.clone = function () {
|
||||||
|
return this.newInstance(this.options());
|
||||||
|
};
|
||||||
|
|
||||||
|
// initialize index
|
||||||
|
this.resetIndex();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new ApiCache();
|
14
server/modules/apicache/index.js
Normal file
14
server/modules/apicache/index.js
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
const apicache = require("./apicache");
|
||||||
|
|
||||||
|
apicache.options({
|
||||||
|
headerBlacklist: [
|
||||||
|
"cache-control"
|
||||||
|
],
|
||||||
|
headers: {
|
||||||
|
// Disable client side cache, only server side cache.
|
||||||
|
// BUG! Not working for the second request
|
||||||
|
"cache-control": "no-cache",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = apicache;
|
59
server/modules/apicache/memory-cache.js
Normal file
59
server/modules/apicache/memory-cache.js
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
function MemoryCache() {
|
||||||
|
this.cache = {};
|
||||||
|
this.size = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
|
||||||
|
let old = this.cache[key];
|
||||||
|
let instance = this;
|
||||||
|
|
||||||
|
let entry = {
|
||||||
|
value: value,
|
||||||
|
expire: time + Date.now(),
|
||||||
|
timeout: setTimeout(function () {
|
||||||
|
instance.delete(key);
|
||||||
|
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
|
||||||
|
}, time)
|
||||||
|
};
|
||||||
|
|
||||||
|
this.cache[key] = entry;
|
||||||
|
this.size = Object.keys(this.cache).length;
|
||||||
|
|
||||||
|
return entry;
|
||||||
|
};
|
||||||
|
|
||||||
|
MemoryCache.prototype.delete = function (key) {
|
||||||
|
let entry = this.cache[key];
|
||||||
|
|
||||||
|
if (entry) {
|
||||||
|
clearTimeout(entry.timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
delete this.cache[key];
|
||||||
|
|
||||||
|
this.size = Object.keys(this.cache).length;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
MemoryCache.prototype.get = function (key) {
|
||||||
|
let entry = this.cache[key];
|
||||||
|
|
||||||
|
return entry;
|
||||||
|
};
|
||||||
|
|
||||||
|
MemoryCache.prototype.getValue = function (key) {
|
||||||
|
let entry = this.get(key);
|
||||||
|
|
||||||
|
return entry && entry.value;
|
||||||
|
};
|
||||||
|
|
||||||
|
MemoryCache.prototype.clear = function () {
|
||||||
|
Object.keys(this.cache).forEach(function (key) {
|
||||||
|
this.delete(key);
|
||||||
|
}, this);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = MemoryCache;
|
26
server/notification-providers/apprise.js
Normal file
26
server/notification-providers/apprise.js
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const child_process = require("child_process");
|
||||||
|
|
||||||
|
class Apprise extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "apprise";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
|
||||||
|
|
||||||
|
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||||
|
|
||||||
|
if (output) {
|
||||||
|
|
||||||
|
if (! output.includes("ERROR")) {
|
||||||
|
return "Sent Successfully";
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(output)
|
||||||
|
} else {
|
||||||
|
return "No output from apprise";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Apprise;
|
115
server/notification-providers/discord.js
Normal file
115
server/notification-providers/discord.js
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Discord extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "discord";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
const discordDisplayName = notification.discordUsername || "Uptime Kuma";
|
||||||
|
|
||||||
|
// If heartbeatJSON is null, assume we're testing.
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let discordtestdata = {
|
||||||
|
username: discordDisplayName,
|
||||||
|
content: msg,
|
||||||
|
}
|
||||||
|
await axios.post(notification.discordWebhookUrl, discordtestdata)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
let url;
|
||||||
|
|
||||||
|
if (monitorJSON["type"] === "port") {
|
||||||
|
url = monitorJSON["hostname"];
|
||||||
|
if (monitorJSON["port"]) {
|
||||||
|
url += ":" + monitorJSON["port"];
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
url = monitorJSON["url"];
|
||||||
|
}
|
||||||
|
|
||||||
|
// If heartbeatJSON is not null, we go into the normal alerting loop.
|
||||||
|
if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let discorddowndata = {
|
||||||
|
username: discordDisplayName,
|
||||||
|
embeds: [{
|
||||||
|
title: "❌ Your service " + monitorJSON["name"] + " went down. ❌",
|
||||||
|
color: 16711680,
|
||||||
|
timestamp: heartbeatJSON["time"],
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: "Service Name",
|
||||||
|
value: monitorJSON["name"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Service URL",
|
||||||
|
value: url,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Time (UTC)",
|
||||||
|
value: heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Error",
|
||||||
|
value: heartbeatJSON["msg"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (notification.discordPrefixMessage) {
|
||||||
|
discorddowndata.content = notification.discordPrefixMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
await axios.post(notification.discordWebhookUrl, discorddowndata)
|
||||||
|
return okMsg;
|
||||||
|
|
||||||
|
} else if (heartbeatJSON["status"] == UP) {
|
||||||
|
let discordupdata = {
|
||||||
|
username: discordDisplayName,
|
||||||
|
embeds: [{
|
||||||
|
title: "✅ Your service " + monitorJSON["name"] + " is up! ✅",
|
||||||
|
color: 65280,
|
||||||
|
timestamp: heartbeatJSON["time"],
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: "Service Name",
|
||||||
|
value: monitorJSON["name"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Service URL",
|
||||||
|
value: url.startsWith("http") ? "[Visit Service](" + url + ")" : url,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Time (UTC)",
|
||||||
|
value: heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Ping",
|
||||||
|
value: heartbeatJSON["ping"] + "ms",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (notification.discordPrefixMessage) {
|
||||||
|
discordupdata.content = notification.discordPrefixMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
await axios.post(notification.discordWebhookUrl, discordupdata)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Discord;
|
83
server/notification-providers/feishu.js
Normal file
83
server/notification-providers/feishu.js
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Feishu extends NotificationProvider {
|
||||||
|
name = "Feishu";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
let feishuWebHookUrl = notification.feishuWebHookUrl;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let testdata = {
|
||||||
|
msg_type: "text",
|
||||||
|
content: {
|
||||||
|
text: msg,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
await axios.post(feishuWebHookUrl, testdata);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let downdata = {
|
||||||
|
msg_type: "post",
|
||||||
|
content: {
|
||||||
|
post: {
|
||||||
|
zh_cn: {
|
||||||
|
title: "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
content: [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
tag: "text",
|
||||||
|
text:
|
||||||
|
"[Down] " +
|
||||||
|
heartbeatJSON["msg"] +
|
||||||
|
"\nTime (UTC): " +
|
||||||
|
heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
await axios.post(feishuWebHookUrl, downdata);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (heartbeatJSON["status"] == UP) {
|
||||||
|
let updata = {
|
||||||
|
msg_type: "post",
|
||||||
|
content: {
|
||||||
|
post: {
|
||||||
|
zh_cn: {
|
||||||
|
title: "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
content: [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
tag: "text",
|
||||||
|
text:
|
||||||
|
"[Up] " +
|
||||||
|
heartbeatJSON["msg"] +
|
||||||
|
"\nTime (UTC): " +
|
||||||
|
heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
await axios.post(feishuWebHookUrl, updata);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Feishu;
|
28
server/notification-providers/gotify.js
Normal file
28
server/notification-providers/gotify.js
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Gotify extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "gotify";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
if (notification.gotifyserverurl && notification.gotifyserverurl.endsWith("/")) {
|
||||||
|
notification.gotifyserverurl = notification.gotifyserverurl.slice(0, -1);
|
||||||
|
}
|
||||||
|
await axios.post(`${notification.gotifyserverurl}/message?token=${notification.gotifyapplicationToken}`, {
|
||||||
|
"message": msg,
|
||||||
|
"priority": notification.gotifyPriority || 8,
|
||||||
|
"title": "Uptime-Kuma",
|
||||||
|
})
|
||||||
|
|
||||||
|
return okMsg;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Gotify;
|
60
server/notification-providers/line.js
Normal file
60
server/notification-providers/line.js
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Line extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "line";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
let lineAPIUrl = "https://api.line.me/v2/bot/message/push";
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": "Bearer " + notification.lineChannelAccessToken
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let testMessage = {
|
||||||
|
"to": notification.lineUserID,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "Test Successful!"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
await axios.post(lineAPIUrl, testMessage, config)
|
||||||
|
} else if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let downMessage = {
|
||||||
|
"to": notification.lineUserID,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
await axios.post(lineAPIUrl, downMessage, config)
|
||||||
|
} else if (heartbeatJSON["status"] == UP) {
|
||||||
|
let upMessage = {
|
||||||
|
"to": notification.lineUserID,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
await axios.post(lineAPIUrl, upMessage, config)
|
||||||
|
}
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Line;
|
48
server/notification-providers/lunasea.js
Normal file
48
server/notification-providers/lunasea.js
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class LunaSea extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "lunasea";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let testdata = {
|
||||||
|
"title": "Uptime Kuma Alert",
|
||||||
|
"body": "Testing Successful.",
|
||||||
|
}
|
||||||
|
await axios.post(lunaseadevice, testdata)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let downdata = {
|
||||||
|
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||||
|
}
|
||||||
|
await axios.post(lunaseadevice, downdata)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (heartbeatJSON["status"] == UP) {
|
||||||
|
let updata = {
|
||||||
|
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||||
|
}
|
||||||
|
await axios.post(lunaseadevice, updata)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = LunaSea;
|
45
server/notification-providers/matrix.js
Normal file
45
server/notification-providers/matrix.js
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const Crypto = require("crypto");
|
||||||
|
const { debug } = require("../../src/util");
|
||||||
|
|
||||||
|
class Matrix extends NotificationProvider {
|
||||||
|
name = "matrix";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
const size = 20;
|
||||||
|
const randomString = encodeURIComponent(
|
||||||
|
Crypto
|
||||||
|
.randomBytes(size)
|
||||||
|
.toString("base64")
|
||||||
|
.slice(0, size)
|
||||||
|
);
|
||||||
|
|
||||||
|
debug("Random String: " + randomString);
|
||||||
|
|
||||||
|
const roomId = encodeURIComponent(notification.internalRoomId);
|
||||||
|
|
||||||
|
debug("Matrix Room ID: " + roomId);
|
||||||
|
|
||||||
|
try {
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"Authorization": `Bearer ${notification.accessToken}`,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let data = {
|
||||||
|
"msgtype": "m.text",
|
||||||
|
"body": msg
|
||||||
|
};
|
||||||
|
|
||||||
|
await axios.put(`${notification.homeserverUrl}/_matrix/client/r0/rooms/${roomId}/send/m.room.message/${randomString}`, data, config);
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Matrix;
|
123
server/notification-providers/mattermost.js
Normal file
123
server/notification-providers/mattermost.js
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Mattermost extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "mattermost";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
const mattermostUserName = notification.mattermostusername || "Uptime Kuma";
|
||||||
|
// If heartbeatJSON is null, assume we're testing.
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let mattermostTestData = {
|
||||||
|
username: mattermostUserName,
|
||||||
|
text: msg,
|
||||||
|
}
|
||||||
|
await axios.post(notification.mattermostWebhookUrl, mattermostTestData)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mattermostChannel = notification.mattermostchannel;
|
||||||
|
const mattermostIconEmoji = notification.mattermosticonemo;
|
||||||
|
const mattermostIconUrl = notification.mattermosticonurl;
|
||||||
|
|
||||||
|
if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let mattermostdowndata = {
|
||||||
|
username: mattermostUserName,
|
||||||
|
text: "Uptime Kuma Alert",
|
||||||
|
channel: mattermostChannel,
|
||||||
|
icon_emoji: mattermostIconEmoji,
|
||||||
|
icon_url: mattermostIconUrl,
|
||||||
|
attachments: [
|
||||||
|
{
|
||||||
|
fallback:
|
||||||
|
"Your " +
|
||||||
|
monitorJSON["name"] +
|
||||||
|
" service went down.",
|
||||||
|
color: "#FF0000",
|
||||||
|
title:
|
||||||
|
"❌ " +
|
||||||
|
monitorJSON["name"] +
|
||||||
|
" service went down. ❌",
|
||||||
|
title_link: monitorJSON["url"],
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
short: true,
|
||||||
|
title: "Service Name",
|
||||||
|
value: monitorJSON["name"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
short: true,
|
||||||
|
title: "Time (UTC)",
|
||||||
|
value: heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
short: false,
|
||||||
|
title: "Error",
|
||||||
|
value: heartbeatJSON["msg"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
await axios.post(
|
||||||
|
notification.mattermostWebhookUrl,
|
||||||
|
mattermostdowndata
|
||||||
|
);
|
||||||
|
return okMsg;
|
||||||
|
} else if (heartbeatJSON["status"] == UP) {
|
||||||
|
let mattermostupdata = {
|
||||||
|
username: mattermostUserName,
|
||||||
|
text: "Uptime Kuma Alert",
|
||||||
|
channel: mattermostChannel,
|
||||||
|
icon_emoji: mattermostIconEmoji,
|
||||||
|
icon_url: mattermostIconUrl,
|
||||||
|
attachments: [
|
||||||
|
{
|
||||||
|
fallback:
|
||||||
|
"Your " +
|
||||||
|
monitorJSON["name"] +
|
||||||
|
" service went up!",
|
||||||
|
color: "#32CD32",
|
||||||
|
title:
|
||||||
|
"✅ " +
|
||||||
|
monitorJSON["name"] +
|
||||||
|
" service went up! ✅",
|
||||||
|
title_link: monitorJSON["url"],
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
short: true,
|
||||||
|
title: "Service Name",
|
||||||
|
value: monitorJSON["name"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
short: true,
|
||||||
|
title: "Time (UTC)",
|
||||||
|
value: heartbeatJSON["time"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
short: false,
|
||||||
|
title: "Ping",
|
||||||
|
value: heartbeatJSON["ping"] + "ms",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
await axios.post(
|
||||||
|
notification.mattermostWebhookUrl,
|
||||||
|
mattermostupdata
|
||||||
|
);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Mattermost;
|
36
server/notification-providers/notification-provider.js
Normal file
36
server/notification-providers/notification-provider.js
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
class NotificationProvider {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Notification Provider Name
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
name = undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param notification : BeanModel
|
||||||
|
* @param msg : string General Message
|
||||||
|
* @param monitorJSON : object Monitor details (For Up/Down only)
|
||||||
|
* @param heartbeatJSON : object Heartbeat details (For Up/Down only)
|
||||||
|
* @returns {Promise<string>} Return Successful Message
|
||||||
|
* Throw Error with fail msg
|
||||||
|
*/
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
throw new Error("Have to override Notification.send(...)");
|
||||||
|
}
|
||||||
|
|
||||||
|
throwGeneralAxiosError(error) {
|
||||||
|
let msg = "Error: " + error + " ";
|
||||||
|
|
||||||
|
if (error.response && error.response.data) {
|
||||||
|
if (typeof error.response.data === "string") {
|
||||||
|
msg += error.response.data;
|
||||||
|
} else {
|
||||||
|
msg += JSON.stringify(error.response.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = NotificationProvider;
|
64
server/notification-providers/octopush.js
Normal file
64
server/notification-providers/octopush.js
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Octopush extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "octopush";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Default - V2
|
||||||
|
if (notification.octopushVersion == 2 || !notification.octopushVersion) {
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"api-key": notification.octopushAPIKey,
|
||||||
|
"api-login": notification.octopushLogin,
|
||||||
|
"cache-control": "no-cache"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let data = {
|
||||||
|
"recipients": [
|
||||||
|
{
|
||||||
|
"phone_number": notification.octopushPhoneNumber
|
||||||
|
}
|
||||||
|
],
|
||||||
|
//octopush not supporting non ascii char
|
||||||
|
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||||
|
"type": notification.octopushSMSType,
|
||||||
|
"purpose": "alert",
|
||||||
|
"sender": notification.octopushSenderName
|
||||||
|
};
|
||||||
|
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config)
|
||||||
|
} else if (notification.octopushVersion == 1) {
|
||||||
|
let data = {
|
||||||
|
"user_login": notification.octopushDMLogin,
|
||||||
|
"api_key": notification.octopushDMAPIKey,
|
||||||
|
"sms_recipients": notification.octopushDMPhoneNumber,
|
||||||
|
"sms_sender": notification.octopushDMSenderName,
|
||||||
|
"sms_type": (notification.octopushDMSMSType == "sms_premium") ? "FR" : "XXX",
|
||||||
|
"transactional": "1",
|
||||||
|
//octopush not supporting non ascii char
|
||||||
|
"sms_text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"cache-control": "no-cache"
|
||||||
|
},
|
||||||
|
params: data
|
||||||
|
};
|
||||||
|
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config)
|
||||||
|
} else {
|
||||||
|
throw new Error("Unknown Octopush version!");
|
||||||
|
}
|
||||||
|
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Octopush;
|
41
server/notification-providers/promosms.js
Normal file
41
server/notification-providers/promosms.js
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class PromoSMS extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "promosms";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": "Basic " + Buffer.from(notification.promosmsLogin + ":" + notification.promosmsPassword).toString('base64'),
|
||||||
|
"Accept": "text/json",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let data = {
|
||||||
|
"recipients": [ notification.promosmsPhoneNumber ],
|
||||||
|
//Lets remove non ascii char
|
||||||
|
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||||
|
"type": Number(notification.promosmsSMSType),
|
||||||
|
"sender": notification.promosmsSenderName
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = await axios.post("https://promosms.com/api/rest/v3_2/sms", data, config);
|
||||||
|
|
||||||
|
if (resp.data.response.status !== 0) {
|
||||||
|
let error = "Something gone wrong. Api returned " + resp.data.response.status + ".";
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = PromoSMS;
|
50
server/notification-providers/pushbullet.js
Normal file
50
server/notification-providers/pushbullet.js
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Pushbullet extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "pushbullet";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
let pushbulletUrl = "https://api.pushbullet.com/v2/pushes";
|
||||||
|
let config = {
|
||||||
|
headers: {
|
||||||
|
"Access-Token": notification.pushbulletAccessToken,
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let testdata = {
|
||||||
|
"type": "note",
|
||||||
|
"title": "Uptime Kuma Alert",
|
||||||
|
"body": "Testing Successful.",
|
||||||
|
}
|
||||||
|
await axios.post(pushbulletUrl, testdata, config)
|
||||||
|
} else if (heartbeatJSON["status"] == DOWN) {
|
||||||
|
let downdata = {
|
||||||
|
"type": "note",
|
||||||
|
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||||
|
}
|
||||||
|
await axios.post(pushbulletUrl, downdata, config)
|
||||||
|
} else if (heartbeatJSON["status"] == UP) {
|
||||||
|
let updata = {
|
||||||
|
"type": "note",
|
||||||
|
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||||
|
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||||
|
}
|
||||||
|
await axios.post(pushbulletUrl, updata, config)
|
||||||
|
}
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Pushbullet;
|
49
server/notification-providers/pushover.js
Normal file
49
server/notification-providers/pushover.js
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Pushover extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "pushover";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
let pushoverlink = "https://api.pushover.net/1/messages.json"
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let data = {
|
||||||
|
"message": "<b>Uptime Kuma Pushover testing successful.</b>",
|
||||||
|
"user": notification.pushoveruserkey,
|
||||||
|
"token": notification.pushoverapptoken,
|
||||||
|
"sound": notification.pushoversounds,
|
||||||
|
"priority": notification.pushoverpriority,
|
||||||
|
"title": notification.pushovertitle,
|
||||||
|
"retry": "30",
|
||||||
|
"expire": "3600",
|
||||||
|
"html": 1,
|
||||||
|
}
|
||||||
|
await axios.post(pushoverlink, data)
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = {
|
||||||
|
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg + "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"],
|
||||||
|
"user": notification.pushoveruserkey,
|
||||||
|
"token": notification.pushoverapptoken,
|
||||||
|
"sound": notification.pushoversounds,
|
||||||
|
"priority": notification.pushoverpriority,
|
||||||
|
"title": notification.pushovertitle,
|
||||||
|
"retry": "30",
|
||||||
|
"expire": "3600",
|
||||||
|
"html": 1,
|
||||||
|
}
|
||||||
|
await axios.post(pushoverlink, data)
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Pushover;
|
30
server/notification-providers/pushy.js
Normal file
30
server/notification-providers/pushy.js
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Pushy extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "pushy";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
await axios.post(`https://api.pushy.me/push?api_key=${notification.pushyAPIKey}`, {
|
||||||
|
"to": notification.pushyToken,
|
||||||
|
"data": {
|
||||||
|
"message": "Uptime-Kuma"
|
||||||
|
},
|
||||||
|
"notification": {
|
||||||
|
"body": msg,
|
||||||
|
"badge": 1,
|
||||||
|
"sound": "ping.aiff"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Pushy;
|
66
server/notification-providers/rocket-chat.js
Normal file
66
server/notification-providers/rocket-chat.js
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const Slack = require("./slack");
|
||||||
|
const { setting } = require("../util-server");
|
||||||
|
const { getMonitorRelativeURL, UP, DOWN } = require("../../src/util");
|
||||||
|
|
||||||
|
class RocketChat extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "rocket.chat";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let data = {
|
||||||
|
"text": msg,
|
||||||
|
"channel": notification.rocketchannel,
|
||||||
|
"username": notification.rocketusername,
|
||||||
|
"icon_emoji": notification.rocketiconemo,
|
||||||
|
};
|
||||||
|
await axios.post(notification.rocketwebhookURL, data);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
const time = heartbeatJSON["time"];
|
||||||
|
|
||||||
|
let data = {
|
||||||
|
"text": "Uptime Kuma Alert",
|
||||||
|
"channel": notification.rocketchannel,
|
||||||
|
"username": notification.rocketusername,
|
||||||
|
"icon_emoji": notification.rocketiconemo,
|
||||||
|
"attachments": [
|
||||||
|
{
|
||||||
|
"title": "Uptime Kuma Alert *Time (UTC)*\n" + time,
|
||||||
|
"text": "*Message*\n" + msg,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Color
|
||||||
|
if (heartbeatJSON.status === DOWN) {
|
||||||
|
data.attachments[0].color = "#ff0000";
|
||||||
|
} else {
|
||||||
|
data.attachments[0].color = "#32cd32";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (notification.rocketbutton) {
|
||||||
|
await Slack.deprecateURL(notification.rocketbutton);
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseURL = await setting("primaryBaseURL");
|
||||||
|
|
||||||
|
if (baseURL) {
|
||||||
|
data.attachments[0].title_link = baseURL + getMonitorRelativeURL(monitorJSON.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
await axios.post(notification.rocketwebhookURL, data);
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = RocketChat;
|
27
server/notification-providers/signal.js
Normal file
27
server/notification-providers/signal.js
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Signal extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "signal";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
let data = {
|
||||||
|
"message": msg,
|
||||||
|
"number": notification.signalNumber,
|
||||||
|
"recipients": notification.signalRecipients.replace(/\s/g, "").split(","),
|
||||||
|
};
|
||||||
|
let config = {};
|
||||||
|
|
||||||
|
await axios.post(notification.signalURL, data, config)
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Signal;
|
98
server/notification-providers/slack.js
Normal file
98
server/notification-providers/slack.js
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { setSettings, setting } = require("../util-server");
|
||||||
|
const { getMonitorRelativeURL } = require("../../src/util");
|
||||||
|
|
||||||
|
class Slack extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "slack";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated property notification.slackbutton
|
||||||
|
* Set it as primary base url if this is not yet set.
|
||||||
|
*/
|
||||||
|
static async deprecateURL(url) {
|
||||||
|
let currentPrimaryBaseURL = await setting("primaryBaseURL");
|
||||||
|
|
||||||
|
if (!currentPrimaryBaseURL) {
|
||||||
|
console.log("Move the url to be the primary base URL");
|
||||||
|
await setSettings("general", {
|
||||||
|
primaryBaseURL: url,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log("Already there, no need to move the primary base URL");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
let data = {
|
||||||
|
"text": msg,
|
||||||
|
"channel": notification.slackchannel,
|
||||||
|
"username": notification.slackusername,
|
||||||
|
"icon_emoji": notification.slackiconemo,
|
||||||
|
};
|
||||||
|
await axios.post(notification.slackwebhookURL, data);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
const time = heartbeatJSON["time"];
|
||||||
|
let data = {
|
||||||
|
"text": "Uptime Kuma Alert",
|
||||||
|
"channel": notification.slackchannel,
|
||||||
|
"username": notification.slackusername,
|
||||||
|
"icon_emoji": notification.slackiconemo,
|
||||||
|
"blocks": [{
|
||||||
|
"type": "header",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": "Uptime Kuma Alert",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"fields": [{
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*Message*\n" + msg,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*Time (UTC)*\n" + time,
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
if (notification.slackbutton) {
|
||||||
|
await Slack.deprecateURL(notification.slackbutton);
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseURL = await setting("primaryBaseURL");
|
||||||
|
|
||||||
|
// Button
|
||||||
|
if (baseURL) {
|
||||||
|
data.blocks.push({
|
||||||
|
"type": "actions",
|
||||||
|
"elements": [{
|
||||||
|
"type": "button",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": "Visit Uptime Kuma",
|
||||||
|
},
|
||||||
|
"value": "Uptime-Kuma",
|
||||||
|
"url": baseURL + getMonitorRelativeURL(monitorJSON.id),
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await axios.post(notification.slackwebhookURL, data);
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Slack;
|
48
server/notification-providers/smtp.js
Normal file
48
server/notification-providers/smtp.js
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
const nodemailer = require("nodemailer");
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
|
||||||
|
class SMTP extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "smtp";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
host: notification.smtpHost,
|
||||||
|
port: notification.smtpPort,
|
||||||
|
secure: notification.smtpSecure,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Should fix the issue in https://github.com/louislam/uptime-kuma/issues/26#issuecomment-896373904
|
||||||
|
if (notification.smtpUsername || notification.smtpPassword) {
|
||||||
|
config.auth = {
|
||||||
|
user: notification.smtpUsername,
|
||||||
|
pass: notification.smtpPassword,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let transporter = nodemailer.createTransport(config);
|
||||||
|
|
||||||
|
let bodyTextContent = msg;
|
||||||
|
if (heartbeatJSON) {
|
||||||
|
bodyTextContent = `${msg}\nTime (UTC): ${heartbeatJSON["time"]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// send mail with defined transport object
|
||||||
|
await transporter.sendMail({
|
||||||
|
from: notification.smtpFrom,
|
||||||
|
cc: notification.smtpCC,
|
||||||
|
bcc: notification.smtpBCC,
|
||||||
|
to: notification.smtpTo,
|
||||||
|
subject: msg,
|
||||||
|
text: bodyTextContent,
|
||||||
|
tls: {
|
||||||
|
rejectUnauthorized: notification.smtpIgnoreTLSError || false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return "Sent Successfully.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SMTP;
|
124
server/notification-providers/teams.js
Normal file
124
server/notification-providers/teams.js
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Teams extends NotificationProvider {
|
||||||
|
name = "teams";
|
||||||
|
|
||||||
|
_statusMessageFactory = (status, monitorName) => {
|
||||||
|
if (status === DOWN) {
|
||||||
|
return `🔴 Application [${monitorName}] went down`;
|
||||||
|
} else if (status === UP) {
|
||||||
|
return `✅ Application [${monitorName}] is back online`;
|
||||||
|
}
|
||||||
|
return "Notification";
|
||||||
|
};
|
||||||
|
|
||||||
|
_getThemeColor = (status) => {
|
||||||
|
if (status === DOWN) {
|
||||||
|
return "ff0000";
|
||||||
|
}
|
||||||
|
if (status === UP) {
|
||||||
|
return "00e804";
|
||||||
|
}
|
||||||
|
return "008cff";
|
||||||
|
};
|
||||||
|
|
||||||
|
_notificationPayloadFactory = ({
|
||||||
|
status,
|
||||||
|
monitorMessage,
|
||||||
|
monitorName,
|
||||||
|
monitorUrl,
|
||||||
|
}) => {
|
||||||
|
const notificationMessage = this._statusMessageFactory(
|
||||||
|
status,
|
||||||
|
monitorName
|
||||||
|
);
|
||||||
|
|
||||||
|
const facts = [];
|
||||||
|
|
||||||
|
if (monitorName) {
|
||||||
|
facts.push({
|
||||||
|
name: "Monitor",
|
||||||
|
value: monitorName,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitorUrl) {
|
||||||
|
facts.push({
|
||||||
|
name: "URL",
|
||||||
|
value: monitorUrl,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"@context": "https://schema.org/extensions",
|
||||||
|
"@type": "MessageCard",
|
||||||
|
themeColor: this._getThemeColor(status),
|
||||||
|
summary: notificationMessage,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
activityImage:
|
||||||
|
"https://raw.githubusercontent.com/louislam/uptime-kuma/master/public/icon.png",
|
||||||
|
activityTitle: "**Uptime Kuma**",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
activityTitle: notificationMessage,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
activityTitle: "**Description**",
|
||||||
|
text: monitorMessage,
|
||||||
|
facts,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
_sendNotification = async (webhookUrl, payload) => {
|
||||||
|
await axios.post(webhookUrl, payload);
|
||||||
|
};
|
||||||
|
|
||||||
|
_handleGeneralNotification = (webhookUrl, msg) => {
|
||||||
|
const payload = this._notificationPayloadFactory({
|
||||||
|
monitorMessage: msg
|
||||||
|
});
|
||||||
|
|
||||||
|
return this._sendNotification(webhookUrl, payload);
|
||||||
|
};
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON == null) {
|
||||||
|
await this._handleGeneralNotification(notification.webhookUrl, msg);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
let url;
|
||||||
|
|
||||||
|
if (monitorJSON["type"] === "port") {
|
||||||
|
url = monitorJSON["hostname"];
|
||||||
|
if (monitorJSON["port"]) {
|
||||||
|
url += ":" + monitorJSON["port"];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
url = monitorJSON["url"];
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = this._notificationPayloadFactory({
|
||||||
|
monitorMessage: heartbeatJSON.msg,
|
||||||
|
monitorName: monitorJSON.name,
|
||||||
|
monitorUrl: url,
|
||||||
|
status: heartbeatJSON.status,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this._sendNotification(notification.webhookUrl, payload);
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Teams;
|
27
server/notification-providers/telegram.js
Normal file
27
server/notification-providers/telegram.js
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Telegram extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "telegram";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
await axios.get(`https://api.telegram.org/bot${notification.telegramBotToken}/sendMessage`, {
|
||||||
|
params: {
|
||||||
|
chat_id: notification.telegramChatID,
|
||||||
|
text: msg,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return okMsg;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
let msg = (error.response.data.description) ? error.response.data.description : "Error without description"
|
||||||
|
throw new Error(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Telegram;
|
44
server/notification-providers/webhook.js
Normal file
44
server/notification-providers/webhook.js
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const FormData = require("form-data");
|
||||||
|
|
||||||
|
class Webhook extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "webhook";
|
||||||
|
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
let data = {
|
||||||
|
heartbeat: heartbeatJSON,
|
||||||
|
monitor: monitorJSON,
|
||||||
|
msg,
|
||||||
|
};
|
||||||
|
let finalData;
|
||||||
|
let config = {};
|
||||||
|
|
||||||
|
if (notification.webhookContentType === "form-data") {
|
||||||
|
finalData = new FormData();
|
||||||
|
finalData.append("data", JSON.stringify(data));
|
||||||
|
|
||||||
|
config = {
|
||||||
|
headers: finalData.getHeaders(),
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
finalData = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
await axios.post(notification.webhookURL, finalData, config)
|
||||||
|
return okMsg;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Webhook;
|
|
@ -1,361 +1,83 @@
|
||||||
const axios = require("axios");
|
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const FormData = require("form-data");
|
const Apprise = require("./notification-providers/apprise");
|
||||||
const nodemailer = require("nodemailer");
|
const Discord = require("./notification-providers/discord");
|
||||||
const child_process = require("child_process");
|
const Gotify = require("./notification-providers/gotify");
|
||||||
|
const Line = require("./notification-providers/line");
|
||||||
|
const LunaSea = require("./notification-providers/lunasea");
|
||||||
|
const Mattermost = require("./notification-providers/mattermost");
|
||||||
|
const Matrix = require("./notification-providers/matrix");
|
||||||
|
const Octopush = require("./notification-providers/octopush");
|
||||||
|
const PromoSMS = require("./notification-providers/promosms");
|
||||||
|
const Pushbullet = require("./notification-providers/pushbullet");
|
||||||
|
const Pushover = require("./notification-providers/pushover");
|
||||||
|
const Pushy = require("./notification-providers/pushy");
|
||||||
|
const RocketChat = require("./notification-providers/rocket-chat");
|
||||||
|
const Signal = require("./notification-providers/signal");
|
||||||
|
const Slack = require("./notification-providers/slack");
|
||||||
|
const SMTP = require("./notification-providers/smtp");
|
||||||
|
const Teams = require("./notification-providers/teams");
|
||||||
|
const Telegram = require("./notification-providers/telegram");
|
||||||
|
const Webhook = require("./notification-providers/webhook");
|
||||||
|
const Feishu = require("./notification-providers/feishu");
|
||||||
|
|
||||||
class Notification {
|
class Notification {
|
||||||
|
|
||||||
|
providerList = {};
|
||||||
|
|
||||||
|
static init() {
|
||||||
|
console.log("Prepare Notification Providers");
|
||||||
|
|
||||||
|
this.providerList = {};
|
||||||
|
|
||||||
|
const list = [
|
||||||
|
new Apprise(),
|
||||||
|
new Discord(),
|
||||||
|
new Teams(),
|
||||||
|
new Gotify(),
|
||||||
|
new Line(),
|
||||||
|
new LunaSea(),
|
||||||
|
new Feishu(),
|
||||||
|
new Mattermost(),
|
||||||
|
new Matrix(),
|
||||||
|
new Octopush(),
|
||||||
|
new PromoSMS(),
|
||||||
|
new Pushbullet(),
|
||||||
|
new Pushover(),
|
||||||
|
new Pushy(),
|
||||||
|
new RocketChat(),
|
||||||
|
new Signal(),
|
||||||
|
new Slack(),
|
||||||
|
new SMTP(),
|
||||||
|
new Telegram(),
|
||||||
|
new Webhook(),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (let item of list) {
|
||||||
|
if (! item.name) {
|
||||||
|
throw new Error("Notification provider without name");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.providerList[item.name]) {
|
||||||
|
throw new Error("Duplicate notification provider name");
|
||||||
|
}
|
||||||
|
this.providerList[item.name] = item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param notification
|
* @param notification : BeanModel
|
||||||
* @param msg
|
* @param msg : string General Message
|
||||||
* @param monitorJSON
|
* @param monitorJSON : object Monitor details (For Up/Down only)
|
||||||
* @param heartbeatJSON
|
* @param heartbeatJSON : object Heartbeat details (For Up/Down only)
|
||||||
* @returns {Promise<string>} Successful msg
|
* @returns {Promise<string>} Successful msg
|
||||||
* Throw Error with fail msg
|
* Throw Error with fail msg
|
||||||
*/
|
*/
|
||||||
static async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
static async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
let okMsg = "Sent Successfully. ";
|
if (this.providerList[notification.type]) {
|
||||||
|
return this.providerList[notification.type].send(notification, msg, monitorJSON, heartbeatJSON);
|
||||||
if (notification.type === "telegram") {
|
|
||||||
try {
|
|
||||||
await axios.get(`https://api.telegram.org/bot${notification.telegramBotToken}/sendMessage`, {
|
|
||||||
params: {
|
|
||||||
chat_id: notification.telegramChatID,
|
|
||||||
text: msg,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return okMsg;
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
let msg = (error.response.data.description) ? error.response.data.description : "Error without description"
|
|
||||||
throw new Error(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "gotify") {
|
|
||||||
try {
|
|
||||||
if (notification.gotifyserverurl && notification.gotifyserverurl.endsWith("/")) {
|
|
||||||
notification.gotifyserverurl = notification.gotifyserverurl.slice(0, -1);
|
|
||||||
}
|
|
||||||
await axios.post(`${notification.gotifyserverurl}/message?token=${notification.gotifyapplicationToken}`, {
|
|
||||||
"message": msg,
|
|
||||||
"priority": notification.gotifyPriority || 8,
|
|
||||||
"title": "Uptime-Kuma",
|
|
||||||
})
|
|
||||||
|
|
||||||
return okMsg;
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "webhook") {
|
|
||||||
try {
|
|
||||||
let data = {
|
|
||||||
heartbeat: heartbeatJSON,
|
|
||||||
monitor: monitorJSON,
|
|
||||||
msg,
|
|
||||||
};
|
|
||||||
let finalData;
|
|
||||||
let config = {};
|
|
||||||
|
|
||||||
if (notification.webhookContentType === "form-data") {
|
|
||||||
finalData = new FormData();
|
|
||||||
finalData.append("data", JSON.stringify(data));
|
|
||||||
|
|
||||||
config = {
|
|
||||||
headers: finalData.getHeaders(),
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
finalData = data;
|
|
||||||
}
|
|
||||||
|
|
||||||
await axios.post(notification.webhookURL, finalData, config)
|
|
||||||
return okMsg;
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "smtp") {
|
|
||||||
return await Notification.smtp(notification, msg)
|
|
||||||
|
|
||||||
} else if (notification.type === "discord") {
|
|
||||||
try {
|
|
||||||
const discordDisplayName = notification.discordUsername || "Uptime Kuma";
|
|
||||||
|
|
||||||
// If heartbeatJSON is null, assume we're testing.
|
|
||||||
if (heartbeatJSON == null) {
|
|
||||||
let discordtestdata = {
|
|
||||||
username: discordDisplayName,
|
|
||||||
content: msg,
|
|
||||||
}
|
|
||||||
await axios.post(notification.discordWebhookUrl, discordtestdata)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
// If heartbeatJSON is not null, we go into the normal alerting loop.
|
|
||||||
if (heartbeatJSON["status"] == 0) {
|
|
||||||
let discorddowndata = {
|
|
||||||
username: discordDisplayName,
|
|
||||||
embeds: [{
|
|
||||||
title: "❌ One of your services went down. ❌",
|
|
||||||
color: 16711680,
|
|
||||||
timestamp: heartbeatJSON["time"],
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: "Service Name",
|
|
||||||
value: monitorJSON["name"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Service URL",
|
|
||||||
value: monitorJSON["url"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Time (UTC)",
|
|
||||||
value: heartbeatJSON["time"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Error",
|
|
||||||
value: heartbeatJSON["msg"],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
}
|
|
||||||
await axios.post(notification.discordWebhookUrl, discorddowndata)
|
|
||||||
return okMsg;
|
|
||||||
|
|
||||||
} else if (heartbeatJSON["status"] == 1) {
|
|
||||||
let discordupdata = {
|
|
||||||
username: discordDisplayName,
|
|
||||||
embeds: [{
|
|
||||||
title: "✅ Your service " + monitorJSON["name"] + " is up! ✅",
|
|
||||||
color: 65280,
|
|
||||||
timestamp: heartbeatJSON["time"],
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: "Service Name",
|
|
||||||
value: monitorJSON["name"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Service URL",
|
|
||||||
value: "[Visit Service](" + monitorJSON["url"] + ")",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Time (UTC)",
|
|
||||||
value: heartbeatJSON["time"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Ping",
|
|
||||||
value: heartbeatJSON["ping"] + "ms",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
}
|
|
||||||
await axios.post(notification.discordWebhookUrl, discordupdata)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "signal") {
|
|
||||||
try {
|
|
||||||
let data = {
|
|
||||||
"message": msg,
|
|
||||||
"number": notification.signalNumber,
|
|
||||||
"recipients": notification.signalRecipients.replace(/\s/g, "").split(","),
|
|
||||||
};
|
|
||||||
let config = {};
|
|
||||||
|
|
||||||
await axios.post(notification.signalURL, data, config)
|
|
||||||
return okMsg;
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "pushy") {
|
|
||||||
try {
|
|
||||||
await axios.post(`https://api.pushy.me/push?api_key=${notification.pushyAPIKey}`, {
|
|
||||||
"to": notification.pushyToken,
|
|
||||||
"data": {
|
|
||||||
"message": "Uptime-Kuma"
|
|
||||||
},
|
|
||||||
"notification": {
|
|
||||||
"body": msg,
|
|
||||||
"badge": 1,
|
|
||||||
"sound": "ping.aiff"
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error)
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else if (notification.type === "octopush") {
|
|
||||||
try {
|
|
||||||
let config = {
|
|
||||||
headers: {
|
|
||||||
'api-key': notification.octopushAPIKey,
|
|
||||||
'api-login': notification.octopushLogin,
|
|
||||||
'cache-control': 'no-cache'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let data = {
|
|
||||||
"recipients": [
|
|
||||||
{
|
|
||||||
"phone_number": notification.octopushPhoneNumber
|
|
||||||
}
|
|
||||||
],
|
|
||||||
//octopush not supporting non ascii char
|
|
||||||
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
|
||||||
"type": notification.octopushSMSType,
|
|
||||||
"purpose": "alert",
|
|
||||||
"sender": notification.octopushSenderName
|
|
||||||
};
|
|
||||||
|
|
||||||
await axios.post(`https://api.octopush.com/v1/public/sms-campaign/send`, data, config)
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error)
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else if (notification.type === "slack") {
|
|
||||||
try {
|
|
||||||
if (heartbeatJSON == null) {
|
|
||||||
let data = {
|
|
||||||
"text": "Uptime Kuma Slack testing successful.",
|
|
||||||
"channel": notification.slackchannel,
|
|
||||||
"username": notification.slackusername,
|
|
||||||
"icon_emoji": notification.slackiconemo,
|
|
||||||
}
|
|
||||||
await axios.post(notification.slackwebhookURL, data)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
|
|
||||||
const time = heartbeatJSON["time"];
|
|
||||||
let data = {
|
|
||||||
"text": "Uptime Kuma Alert",
|
|
||||||
"channel": notification.slackchannel,
|
|
||||||
"username": notification.slackusername,
|
|
||||||
"icon_emoji": notification.slackiconemo,
|
|
||||||
"blocks": [{
|
|
||||||
"type": "header",
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "Uptime Kuma Alert",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "section",
|
|
||||||
"fields": [{
|
|
||||||
"type": "mrkdwn",
|
|
||||||
"text": "*Message*\n" + msg,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "mrkdwn",
|
|
||||||
"text": "*Time (UTC)*\n" + time,
|
|
||||||
}],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "actions",
|
|
||||||
"elements": [
|
|
||||||
{
|
|
||||||
"type": "button",
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "Visit Uptime Kuma",
|
|
||||||
},
|
|
||||||
"value": "Uptime-Kuma",
|
|
||||||
"url": notification.slackbutton || "https://github.com/louislam/uptime-kuma",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
}
|
|
||||||
await axios.post(notification.slackwebhookURL, data)
|
|
||||||
return okMsg;
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "pushover") {
|
|
||||||
let pushoverlink = "https://api.pushover.net/1/messages.json"
|
|
||||||
try {
|
|
||||||
if (heartbeatJSON == null) {
|
|
||||||
let data = {
|
|
||||||
"message": "<b>Uptime Kuma Pushover testing successful.</b>",
|
|
||||||
"user": notification.pushoveruserkey,
|
|
||||||
"token": notification.pushoverapptoken,
|
|
||||||
"sound": notification.pushoversounds,
|
|
||||||
"priority": notification.pushoverpriority,
|
|
||||||
"title": notification.pushovertitle,
|
|
||||||
"retry": "30",
|
|
||||||
"expire": "3600",
|
|
||||||
"html": 1,
|
|
||||||
}
|
|
||||||
await axios.post(pushoverlink, data)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = {
|
|
||||||
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg + "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"],
|
|
||||||
"user": notification.pushoveruserkey,
|
|
||||||
"token": notification.pushoverapptoken,
|
|
||||||
"sound": notification.pushoversounds,
|
|
||||||
"priority": notification.pushoverpriority,
|
|
||||||
"title": notification.pushovertitle,
|
|
||||||
"retry": "30",
|
|
||||||
"expire": "3600",
|
|
||||||
"html": 1,
|
|
||||||
}
|
|
||||||
await axios.post(pushoverlink, data)
|
|
||||||
return okMsg;
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (notification.type === "apprise") {
|
|
||||||
|
|
||||||
return Notification.apprise(notification, msg)
|
|
||||||
|
|
||||||
} else if (notification.type === "lunasea") {
|
|
||||||
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (heartbeatJSON == null) {
|
|
||||||
let testdata = {
|
|
||||||
"title": "Uptime Kuma Alert",
|
|
||||||
"body": "Testing Successful.",
|
|
||||||
}
|
|
||||||
await axios.post(lunaseadevice, testdata)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (heartbeatJSON["status"] == 0) {
|
|
||||||
let downdata = {
|
|
||||||
"title": "UptimeKuma Alert:" + monitorJSON["name"],
|
|
||||||
"body": "[🔴 Down]" + heartbeatJSON["msg"] + "\nTime (UTC):" + heartbeatJSON["time"],
|
|
||||||
}
|
|
||||||
await axios.post(lunaseadevice, downdata)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (heartbeatJSON["status"] == 1) {
|
|
||||||
let updata = {
|
|
||||||
"title": "UptimeKuma Alert:" + monitorJSON["name"],
|
|
||||||
"body": "[✅ Up]" + heartbeatJSON["msg"] + "\nTime (UTC):" + heartbeatJSON["time"],
|
|
||||||
}
|
|
||||||
await axios.post(lunaseadevice, updata)
|
|
||||||
return okMsg;
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
throwGeneralAxiosError(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Notification type is not supported")
|
throw new Error("Notification type is not supported");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -378,8 +100,15 @@ class Notification {
|
||||||
|
|
||||||
bean.name = notification.name;
|
bean.name = notification.name;
|
||||||
bean.user_id = userID;
|
bean.user_id = userID;
|
||||||
bean.config = JSON.stringify(notification)
|
bean.config = JSON.stringify(notification);
|
||||||
|
bean.is_default = notification.isDefault || false;
|
||||||
await R.store(bean)
|
await R.store(bean)
|
||||||
|
|
||||||
|
if (notification.applyExisting) {
|
||||||
|
await applyNotificationEveryMonitor(bean.id, userID);
|
||||||
|
}
|
||||||
|
|
||||||
|
return bean;
|
||||||
}
|
}
|
||||||
|
|
||||||
static async delete(notificationID, userID) {
|
static async delete(notificationID, userID) {
|
||||||
|
@ -395,46 +124,6 @@ class Notification {
|
||||||
await R.trash(bean)
|
await R.trash(bean)
|
||||||
}
|
}
|
||||||
|
|
||||||
static async smtp(notification, msg) {
|
|
||||||
|
|
||||||
let transporter = nodemailer.createTransport({
|
|
||||||
host: notification.smtpHost,
|
|
||||||
port: notification.smtpPort,
|
|
||||||
secure: notification.smtpSecure,
|
|
||||||
auth: {
|
|
||||||
user: notification.smtpUsername,
|
|
||||||
pass: notification.smtpPassword,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// send mail with defined transport object
|
|
||||||
await transporter.sendMail({
|
|
||||||
from: `"Uptime Kuma" <${notification.smtpFrom}>`,
|
|
||||||
to: notification.smtpTo,
|
|
||||||
subject: msg,
|
|
||||||
text: msg,
|
|
||||||
});
|
|
||||||
|
|
||||||
return "Sent Successfully.";
|
|
||||||
}
|
|
||||||
|
|
||||||
static async apprise(notification, msg) {
|
|
||||||
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
|
|
||||||
|
|
||||||
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
|
||||||
|
|
||||||
if (output) {
|
|
||||||
|
|
||||||
if (! output.includes("ERROR")) {
|
|
||||||
return "Sent Successfully";
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(output)
|
|
||||||
} else {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static checkApprise() {
|
static checkApprise() {
|
||||||
let commandExistsSync = require("command-exists").sync;
|
let commandExistsSync = require("command-exists").sync;
|
||||||
let exists = commandExistsSync("apprise");
|
let exists = commandExistsSync("apprise");
|
||||||
|
@ -443,18 +132,24 @@ class Notification {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function throwGeneralAxiosError(error) {
|
async function applyNotificationEveryMonitor(notificationID, userID) {
|
||||||
let msg = "Error: " + error + " ";
|
let monitors = await R.getAll("SELECT id FROM monitor WHERE user_id = ?", [
|
||||||
|
userID
|
||||||
|
]);
|
||||||
|
|
||||||
if (error.response && error.response.data) {
|
for (let i = 0; i < monitors.length; i++) {
|
||||||
if (typeof error.response.data === "string") {
|
let checkNotification = await R.findOne("monitor_notification", " monitor_id = ? AND notification_id = ? ", [
|
||||||
msg += error.response.data;
|
monitors[i].id,
|
||||||
} else {
|
notificationID,
|
||||||
msg += JSON.stringify(error.response.data)
|
])
|
||||||
|
|
||||||
|
if (! checkNotification) {
|
||||||
|
let relation = R.dispense("monitor_notification");
|
||||||
|
relation.monitor_id = monitors[i].id;
|
||||||
|
relation.notification_id = notificationID;
|
||||||
|
await R.store(relation)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(msg)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const passwordHashOld = require("password-hash");
|
const passwordHashOld = require("password-hash");
|
||||||
const bcrypt = require("bcrypt");
|
const bcrypt = require("bcryptjs");
|
||||||
const saltRounds = 10;
|
const saltRounds = 10;
|
||||||
|
|
||||||
exports.generate = function (password) {
|
exports.generate = function (password) {
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
// https://github.com/ben-bradley/ping-lite/blob/master/ping-lite.js
|
// https://github.com/ben-bradley/ping-lite/blob/master/ping-lite.js
|
||||||
// Fixed on Windows
|
// Fixed on Windows
|
||||||
const net = require("net");
|
const net = require("net");
|
||||||
const spawn = require("child_process").spawn,
|
const spawn = require("child_process").spawn;
|
||||||
events = require("events"),
|
const events = require("events");
|
||||||
fs = require("fs"),
|
const fs = require("fs");
|
||||||
WIN = /^win/.test(process.platform),
|
const WIN = /^win/.test(process.platform);
|
||||||
LIN = /^linux/.test(process.platform),
|
const LIN = /^linux/.test(process.platform);
|
||||||
MAC = /^darwin/.test(process.platform);
|
const MAC = /^darwin/.test(process.platform);
|
||||||
const { debug } = require("../src/util");
|
const FBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
module.exports = Ping;
|
module.exports = Ping;
|
||||||
|
|
||||||
|
@ -21,15 +21,17 @@ function Ping(host, options) {
|
||||||
|
|
||||||
events.EventEmitter.call(this);
|
events.EventEmitter.call(this);
|
||||||
|
|
||||||
|
const timeout = 10;
|
||||||
|
|
||||||
if (WIN) {
|
if (WIN) {
|
||||||
this._bin = "c:/windows/system32/ping.exe";
|
this._bin = "c:/windows/system32/ping.exe";
|
||||||
this._args = (options.args) ? options.args : [ "-n", "1", "-w", "5000", host ];
|
this._args = (options.args) ? options.args : [ "-n", "1", "-w", timeout * 1000, host ];
|
||||||
this._regmatch = /[><=]([0-9.]+?)ms/;
|
this._regmatch = /[><=]([0-9.]+?)ms/;
|
||||||
|
|
||||||
} else if (LIN) {
|
} else if (LIN) {
|
||||||
this._bin = "/bin/ping";
|
this._bin = "/bin/ping";
|
||||||
|
|
||||||
const defaultArgs = [ "-n", "-w", "2", "-c", "1", host ];
|
const defaultArgs = [ "-n", "-w", timeout, "-c", "1", host ];
|
||||||
|
|
||||||
if (net.isIPv6(host) || options.ipv6) {
|
if (net.isIPv6(host) || options.ipv6) {
|
||||||
defaultArgs.unshift("-6");
|
defaultArgs.unshift("-6");
|
||||||
|
@ -46,7 +48,19 @@ function Ping(host, options) {
|
||||||
this._bin = "/sbin/ping";
|
this._bin = "/sbin/ping";
|
||||||
}
|
}
|
||||||
|
|
||||||
this._args = (options.args) ? options.args : [ "-n", "-t", "2", "-c", "1", host ];
|
this._args = (options.args) ? options.args : [ "-n", "-t", timeout, "-c", "1", host ];
|
||||||
|
this._regmatch = /=([0-9.]+?) ms/;
|
||||||
|
|
||||||
|
} else if (FBSD) {
|
||||||
|
this._bin = "/sbin/ping";
|
||||||
|
|
||||||
|
const defaultArgs = [ "-n", "-t", timeout, "-c", "1", host ];
|
||||||
|
|
||||||
|
if (net.isIPv6(host) || options.ipv6) {
|
||||||
|
defaultArgs.unshift("-6");
|
||||||
|
}
|
||||||
|
|
||||||
|
this._args = (options.args) ? options.args : defaultArgs;
|
||||||
this._regmatch = /=([0-9.]+?) ms/;
|
this._regmatch = /=([0-9.]+?) ms/;
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -75,7 +89,9 @@ Ping.prototype.send = function (callback) {
|
||||||
return self.emit("result", ms);
|
return self.emit("result", ms);
|
||||||
};
|
};
|
||||||
|
|
||||||
let _ended, _exited, _errored;
|
let _ended;
|
||||||
|
let _exited;
|
||||||
|
let _errored;
|
||||||
|
|
||||||
this._ping = spawn(this._bin, this._args); // spawn the binary
|
this._ping = spawn(this._bin, this._args); // spawn the binary
|
||||||
|
|
||||||
|
@ -107,9 +123,9 @@ Ping.prototype.send = function (callback) {
|
||||||
});
|
});
|
||||||
|
|
||||||
function onEnd() {
|
function onEnd() {
|
||||||
let stdout = this.stdout._stdout,
|
let stdout = this.stdout._stdout;
|
||||||
stderr = this.stderr._stderr,
|
let stderr = this.stderr._stderr;
|
||||||
ms;
|
let ms;
|
||||||
|
|
||||||
if (stderr) {
|
if (stderr) {
|
||||||
return callback(new Error(stderr));
|
return callback(new Error(stderr));
|
||||||
|
|
|
@ -6,7 +6,7 @@ const commonLabels = [
|
||||||
"monitor_url",
|
"monitor_url",
|
||||||
"monitor_hostname",
|
"monitor_hostname",
|
||||||
"monitor_port",
|
"monitor_port",
|
||||||
]
|
];
|
||||||
|
|
||||||
const monitor_cert_days_remaining = new PrometheusClient.Gauge({
|
const monitor_cert_days_remaining = new PrometheusClient.Gauge({
|
||||||
name: "monitor_cert_days_remaining",
|
name: "monitor_cert_days_remaining",
|
||||||
|
@ -41,45 +41,46 @@ class Prometheus {
|
||||||
monitor_url: monitor.url,
|
monitor_url: monitor.url,
|
||||||
monitor_hostname: monitor.hostname,
|
monitor_hostname: monitor.hostname,
|
||||||
monitor_port: monitor.port
|
monitor_port: monitor.port
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
update(heartbeat, tlsInfo) {
|
update(heartbeat, tlsInfo) {
|
||||||
|
|
||||||
if (typeof tlsInfo !== "undefined") {
|
if (typeof tlsInfo !== "undefined") {
|
||||||
try {
|
try {
|
||||||
let is_valid = 0
|
let is_valid = 0;
|
||||||
if (tlsInfo.valid == true) {
|
if (tlsInfo.valid == true) {
|
||||||
is_valid = 1
|
is_valid = 1;
|
||||||
} else {
|
} else {
|
||||||
is_valid = 0
|
is_valid = 0;
|
||||||
}
|
}
|
||||||
monitor_cert_is_valid.set(this.monitorLabelValues, is_valid)
|
monitor_cert_is_valid.set(this.monitorLabelValues, is_valid);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e)
|
console.error(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.daysRemaining)
|
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.certInfo.daysRemaining);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e)
|
console.error(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
monitor_status.set(this.monitorLabelValues, heartbeat.status)
|
monitor_status.set(this.monitorLabelValues, heartbeat.status);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e)
|
console.error(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (typeof heartbeat.ping === "number") {
|
if (typeof heartbeat.ping === "number") {
|
||||||
monitor_response_time.set(this.monitorLabelValues, heartbeat.ping)
|
monitor_response_time.set(this.monitorLabelValues, heartbeat.ping);
|
||||||
} else {
|
} else {
|
||||||
// Is it good?
|
// Is it good?
|
||||||
monitor_response_time.set(this.monitorLabelValues, -1)
|
monitor_response_time.set(this.monitorLabelValues, -1);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e)
|
console.error(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,4 +88,4 @@ class Prometheus {
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Prometheus
|
Prometheus
|
||||||
}
|
};
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue