only for ansible

This commit is contained in:
Louis Lam 2021-12-14 18:46:34 +08:00
parent a1b20698be
commit bb43dc2825
260 changed files with 0 additions and 52933 deletions

View file

@ -1,45 +0,0 @@
/.idea
/node_modules
/data
/out
/test
/kubernetes
/.do
**/.dockerignore
/private
**/.git
**/.gitignore
**/docker-compose*
**/[Dd]ockerfile*
LICENSE
README.md
.editorconfig
.vscode
.eslint*
.stylelint*
/.github
yarn.lock
app.json
CODE_OF_CONDUCT.md
CONTRIBUTING.md
CNAME
install.sh
SECURITY.md
tsconfig.json
.env
/tmp
### .gitignore content (commented rules are duplicated)
#node_modules
.DS_Store
#dist
dist-ssr
*.local
#.idea
#/data
#!/data/.gitkeep
#.vscode
### End of .gitignore content

View file

@ -1,21 +0,0 @@
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false
[*.yaml]
indent_size = 2
[*.yml]
indent_size = 2
[*.vue]
trim_trailing_whitespace = false

View file

@ -1,113 +0,0 @@
module.exports = {
root: true,
env: {
browser: true,
commonjs: true,
es2020: true,
node: true,
},
extends: [
"eslint:recommended",
"plugin:vue/vue3-recommended",
],
parser: "vue-eslint-parser",
parserOptions: {
parser: "@babel/eslint-parser",
sourceType: "module",
requireConfigFile: false,
},
rules: {
"linebreak-style": ["error", "unix"],
"camelcase": ["warn", {
"properties": "never",
"ignoreImports": true
}],
// override/add rules settings here, such as:
// 'vue/no-unused-vars': 'error'
"no-unused-vars": "warn",
indent: [
"error",
4,
{
ignoredNodes: ["TemplateLiteral"],
SwitchCase: 1,
},
],
quotes: ["warn", "double"],
semi: "warn",
"vue/html-indent": ["warn", 4], // default: 2
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"no-multi-spaces": ["error", {
ignoreEOLComments: true,
}],
"space-before-function-paren": ["error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": ["error", "always"],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
"brace-style": "error",
"no-var": "error",
"key-spacing": "warn",
"keyword-spacing": "warn",
"space-infix-ops": "warn",
"arrow-spacing": "warn",
"no-trailing-spaces": "warn",
"no-constant-condition": ["error", {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": ["warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": ["warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error",
"array-bracket-newline": ["error", "consistent"],
"eol-last": ["error", "always"],
//'prefer-template': 'error',
"comma-dangle": ["warn", "only-multiline"],
"no-empty": ["error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": ["error", "never"],
"max-statements-per-line": ["error", { "max": 1 }]
},
"overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": ["error", "always-multiline"],
}
},
// Override for jest puppeteer
{
"files": [
"**/*.spec.js",
"**/*.spec.jsx"
],
env: {
jest: true,
},
globals: {
page: true,
browser: true,
context: true,
jestPuppeteer: true,
},
}
]
};

12
.github/FUNDING.yml vendored
View file

@ -1,12 +0,0 @@
# These are supported funding model platforms
github: louislam # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
#patreon: # Replace with a single Patreon username
open_collective: uptime-kuma # Replace with a single Open Collective username
#ko_fi: # Replace with a single Ko-fi username
#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
#liberapay: # Replace with a single Liberapay username
#issuehunt: # Replace with a single IssueHunt username
#otechie: # Replace with a single Otechie username
#custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View file

@ -1,68 +0,0 @@
name: "❓ Ask for help"
description: "Submit any question related to Uptime Kuma"
#title: "[Help] "
labels: [help]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "📝 Describe your problem"
description: "Please walk us through it step by step."
placeholder: "Describe what are you asking for..."
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: input
id: docker-version
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes:
label: "🟩 NodeJS Version"
description: "If running with Node.js? which version are you running?"
placeholder: "Ex. 14.18.0"
validations:
required: false

View file

@ -1,99 +0,0 @@
name: "🐛 Bug Report"
description: "Submit a bug report to help us improve"
#title: "[Bug] "
labels: [bug]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: description
validations:
required: false
attributes:
label: "Description"
description: "You could also upload screenshots"
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "👟 Reproduction steps"
description: "How do you trigger this bug? Please walk us through it step by step."
placeholder: "..."
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: "👀 Expected behavior"
description: "What did you think would happen?"
placeholder: "..."
- type: textarea
id: actual-behavior
validations:
required: true
attributes:
label: "😓 Actual Behavior"
description: "What actually happen?"
placeholder: "..."
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: input
id: docker-version
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes:
label: "🟩 NodeJS Version"
description: "If running with Node.js? which version are you running?"
placeholder: "Ex. 14.18.0"
validations:
required: false
- type: textarea
id: logs
attributes:
label: "📝 Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
validations:
required: false

View file

@ -1,59 +0,0 @@
name: 🚀 Feature Request
description: "Submit a proposal for a new feature"
#title: "[Feature] "
labels: [feature-request]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this feature request has NOT been suggested before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar feature request"
required: true
- type: dropdown
id: feature-area
attributes:
label: "🏷️ Feature Request Type"
description: "What kind of feature request is this?"
multiple: true
options:
- API
- New Notification
- New Monitor
- UI Feature
- Other
validations:
required: true
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: "🔖 Feature description"
description: "A clear and concise description of what the feature request is."
placeholder: "You should add ..."
- type: textarea
id: solution
validations:
required: true
attributes:
label: "✔️ Solution"
description: "A clear and concise description of what you want to happen."
placeholder: "In my use-case, ..."
- type: textarea
id: alternatives
validations:
required: false
attributes:
label: "❓ Alternatives"
description: "A clear and concise description of any alternative solutions or features you've considered."
placeholder: "I have considered ..."
- type: textarea
id: additional-context
validations:
required: false
attributes:
label: "📝 Additional Context"
description: "Add any other context or screenshots about the feature request here."
placeholder: "..."

View file

@ -1,28 +0,0 @@
# Description
Fixes #(issue)
## Type of change
Please delete options that are not relevant.
- Bug fix (non-breaking change which fixes an issue)
- User Interface
- New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
- Translation update
- Other
- This change requires a documentation update
## Checklist
- [ ] My code follows the style guidelines of this project
- [ ] I ran ESLint and other linters for modified files
- [ ] I have performed a self-review of my own code and test it
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] My changes generate no new warnings
- [ ] My code needed automated testing. I have added them (this is optional task)
## Screenshots (if any)
Please do not use any external image service. Instead, just paste in or drag and drop the image here, and it will be uploaded automatically.

View file

@ -1,35 +0,0 @@
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Auto Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
auto-test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
node-version: [14.x, 16.x, 17.x]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm run install-legacy
- run: npm run build
- run: npm test
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}

View file

@ -1,26 +0,0 @@
name: Close Incorrect Issue
on:
issues:
types: [opened]
jobs:
close-incorrect-issue:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
node-version: [16.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm ci
- run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} ${{ github.event.issue.user.login }}

View file

@ -1,22 +0,0 @@
name: 'Automatically close stale issues and PRs'
on:
schedule:
- cron: '0 0 * * *'
#Run once a day at midnight
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v4
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.'
close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.'
days-before-stale: 180
days-before-close: 7
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,'
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,'
exempt-issue-assignees: 'louislam'
exempt-pr-assignees: 'louislam'

15
.gitignore vendored
View file

@ -1,15 +0,0 @@
node_modules
.DS_Store
dist
dist-ssr
*.local
.idea
/data
!/data/.gitkeep
.vscode
/private
/out
/tmp
.env

View file

@ -1,9 +0,0 @@
{
"extends": "stylelint-config-standard",
"rules": {
"indentation": 4,
"no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null
}
}

1
CNAME
View file

@ -1 +0,0 @@
git.kuma.pet

View file

@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
uptime@kuma.pet.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View file

@ -1,254 +0,0 @@
# Project Info
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structured and commented so well, lol. Sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
- Bootstrap
- SQLite
## Directories
- data (App data)
- dist (Frontend build)
- extra (Extra useful scripts)
- public (Frontend resources for dev only)
- server (Server source code)
- src (Frontend source code)
- test (unit test)
## Can I create a pull request for Uptime Kuma?
Generally, if the pull request is working fine, and it does not affect any existing logic, workflow and performance, I will merge into the master branch once it is tested.
If you are not sure whether I will accept your pull request, feel free to create an empty pull request draft first.
### Recommended Pull Request Guideline
1. Fork the project
1. Clone your fork repo to local
1. Create a new branch
1. Create an empty commit
`git commit -m "[empty commit] pull request for <YOUR TASK NAME>" --allow-empty`
1. Push to your fork repo
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
1. Write a proper description
1. Click "Change to draft"
### Pull Request Examples
Here are some example situations in the past.
#### ✅ High - Medium Priority
Easy to review, no breaking change and not touching the existing code
- Add a new notification
- Add a chart
- Fix a bug
- Translations
- Add a independent new feature
#### *️⃣ Requires one more reviewer
I do not have such knowledge to test it.
- Add k8s supports
#### ⚠ Low Priority - Harsh Mode
Some pull requests are required to modify the core. To be honest, I do not want anyone to try to do that, because it would spend a lot of your time. I will review your pull request harshly. Also, you may need to write a lot of unit tests to ensure that there is no breaking change.
- Touch large parts of code of any very important features
- Touch monitoring logic
- Drop a table or drop a column for any reason
- Touch the entry point of Docker or Node.js
- Modify auth
#### *️⃣ Low Priority
It changed my current workflow and require further studies.
- Change my release approach
#### ❌ Won't Merge
- Any breaking changes
- Duplicated pull request
- Buggy
- Existing logic is completely modified or deleted
- A function that is completely out of scope
## Project Styles
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Env var is not encouraged.
- Easy to use
## Coding Styles
- 4 spaces indentation
- Follow `.editorconfig`
- Follow ESLint
## Name convention
- Javascript/Typescript: camelCaseType
- SQLite: underscore_type
- CSS/SCSS: dash-type
## Tools
- Node.js >= 14
- Git
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite tool (SQLite Expert Personal is suggested)
## Install dependencies
```bash
npm ci
```
## How to start the Backend Dev Server
(2021-09-23 Update)
```bash
npm run start-server-dev
```
It binds to `0.0.0.0:3001` by default.
### Backend Details
It is mainly a socket.io app + express.js.
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- notification-providers/ (individual notification logic)
- routers/ (Express Routers)
- socket-handler (Socket.io Handlers)
- server.js (Server main logic)
## How to start the Frontend Dev Server
1. Set the env var `NODE_ENV` to "development".
2. Start the frontend dev server by the following command.
```bash
npm run dev
```
It binds to `0.0.0.0:3000` by default.
You can use Vue.js devtools Chrome extension for debugging.
### Build the frontend
```bash
npm run build
```
### Frontend Details
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
The router is in `src/router.js`
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
## Database Migration
1. Create `patch-{name}.sql` in `./db/`
2. Add your patch filename in the `patchList` list in `./server/database.js`
## Unit Test
It is an end-to-end testing. It is using Jest and Puppeteer.
```bash
npm run build
npm test
```
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments.
## Update Dependencies
Install `ncu`
https://github.com/raineorshine/npm-check-updates
```bash
ncu -u -t patch
npm install
```
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
## Translations
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
## Wiki
Since there is no way to make a pull request to wiki's repo, I have set up another repo to do that.
https://github.com/louislam/uptime-kuma-wiki
## Maintainer
Check the latest issues and pull requests:
https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
### Release Procedures
1. Draft a release note
1. Make sure the repo is cleared
1. `npm run update-version 1.X.X`
1. `npm run build`
1. `npm run build-docker`
1. `git push`
1. Publish the release note as 1.X.X
1. `npm run upload-artifacts`
1. SSH to demo site server and update to 1.X.X
Checking:
- Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
- Try clean installation with Node.js
### Release Wiki
#### Setup Repo
```bash
git clone https://github.com/louislam/uptime-kuma-wiki.git
cd uptime-kuma-wiki
git remote add production https://github.com/louislam/uptime-kuma.wiki.git
```
#### Push to Production Wiki
```bash
git pull
git push production master
```

21
LICENSE
View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 Louis Lam
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

141
README.md
View file

@ -1,141 +0,0 @@
# Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam)
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
It is a self-hosted monitoring tool like "Uptime Robot".
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" />
## 🥔 Live Demo
Try it!
https://demo.uptime.kuma.pet
It is a temporary live demo, all data will be deleted after 10 minutes. The server is located in Tokyo, so if you live far from there, it may affect your experience. I suggest that you should install and try it out for the best demo experience.
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server.
* Fancy, Reactive, Fast UI/UX.
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [70+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* 20 second intervals.
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
* Simple Status Page
* Ping Chart
* Certificate Info
## 🔧 How to Install
### 🐳 Docker
```bash
docker volume create uptime-kuma
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
```
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
Browse to http://localhost:3001 after starting.
### 💪🏻 Non-Docker
Required Tools: Node.js >= 14, git and pm2.
```bash
# Update your npm to the latest version
npm install npm -g
git clone https://github.com/louislam/uptime-kuma.git
cd uptime-kuma
npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it: npm install pm2 -g
pm2 start server/server.js --name uptime-kuma
```
Browse to http://localhost:3001 after starting.
### Advanced Installation
If you need more options or need to browse via a reverse proxy, please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
## 🆙 How to Update
Please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones
Project Plan:
https://github.com/louislam/uptime-kuma/projects/1
## 🖼 More Screenshots
Light Mode:
<img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="" />
Status Page:
<img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="" />
Settings Page:
<img src="https://louislam.net/uptimekuma/2.jpg" width="400" alt="" />
Telegram Notification Sample:
<img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="" />
## Motivation
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
* Want to build a fancy UI.
* Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5.
* Try to use WebSocket with SPA instead of REST API.
* Deploy my first Docker image to Docker Hub.
If you love this project, please consider giving me a ⭐.
## 🗣️ Discussion
### Issues Page
You can discuss or ask for help in [issues](https://github.com/louislam/uptime-kuma/issues).
### Subreddit
My Reddit account: louislamlam
You can mention me if you ask a question on Reddit.
https://www.reddit.com/r/UptimeKuma/
## Contribute
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
English proofreading is needed too because my grammar is not that great, sadly. Feel free to correct my grammar in this README, source code, or wiki.

View file

@ -1,31 +0,0 @@
# Security Policy
## Reporting a Vulnerability
Please report security issues to uptime@kuma.pet.
Do not use the issue tracker or discuss it in the public as it will cause more damage.
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
### Uptime Kuma Versions
| Version | Supported |
| ------- | ------------------ |
| 1.9.X | :white_check_mark: |
| <= 1.8.X | ❌ |
### Upgradable Docker Tags
| Tag | Supported |
| ------- | ------------------ |
| 1 | :white_check_mark: |
| 1-debian | :white_check_mark: |
| 1-alpine | :white_check_mark: |
| latest | :white_check_mark: |
| debian | :white_check_mark: |
| alpine | :white_check_mark: |
| All other tags | ❌ |

View file

@ -1,11 +0,0 @@
const config = {};
if (process.env.TEST_FRONTEND) {
config.presets = ["@babel/preset-env"];
}
if (process.env.TEST_BACKEND) {
config.plugins = ["babel-plugin-rewire"];
}
module.exports = config;

View file

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/backend.spec.js",
};

View file

@ -1,33 +0,0 @@
const PuppeteerEnvironment = require("jest-environment-puppeteer");
const util = require("util");
class DebugEnv extends PuppeteerEnvironment {
async handleTestEvent(event, state) {
const ignoredEvents = [
"setup",
"add_hook",
"start_describe_definition",
"add_test",
"finish_describe_definition",
"run_start",
"run_describe_start",
"test_start",
"hook_start",
"hook_success",
"test_fn_start",
"test_fn_success",
"test_done",
"run_describe_finish",
"run_finish",
"teardown",
"test_fn_failure",
];
if (!ignoredEvents.includes(event.name)) {
console.log(
new Date().toString() + ` Unhandled event [${event.name}] ` + util.inspect(event)
);
}
}
}
module.exports = DebugEnv;

View file

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/frontend.spec.js",
};

View file

@ -1,20 +0,0 @@
module.exports = {
"launch": {
"dumpio": true,
"slowMo": 500,
"headless": process.env.HEADLESS_TEST || false,
"userDataDir": "./data/test-chrome-profile",
args: [
"--disable-setuid-sandbox",
"--disable-gpu",
"--disable-dev-shm-usage",
"--no-default-browser-check",
"--no-experiments",
"--no-first-run",
"--no-pings",
"--no-sandbox",
"--no-zygote",
"--single-process",
],
}
};

View file

@ -1,12 +0,0 @@
module.exports = {
"verbose": true,
"preset": "jest-puppeteer",
"globals": {
"__DEV__": true
},
"testRegex": "./test/e2e.spec.js",
"testEnvironment": "./config/jest-debug-env.js",
"rootDir": "..",
"testTimeout": 30000,
};

View file

@ -1,24 +0,0 @@
import legacy from "@vitejs/plugin-legacy";
import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite";
const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss");
// https://vitejs.dev/config/
export default defineConfig({
plugins: [
vue(),
legacy({
targets: ["ie > 11"],
additionalLegacyPolyfills: ["regenerator-runtime/runtime"]
})
],
css: {
postcss: {
"parser": postCssScss,
"map": false,
"plugins": [postcssRTLCSS]
}
},
});

View file

Binary file not shown.

View file

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE user
ADD twofa_last_token VARCHAR(6);
COMMIT;

View file

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE user
ADD twofa_secret VARCHAR(64);
ALTER TABLE user
ADD twofa_status BOOLEAN default 0 NOT NULL;
COMMIT;

View file

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD retry_interval INTEGER default 0 not null;
COMMIT;

View file

@ -1,30 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table `group`
(
id INTEGER not null
constraint group_pk
primary key autoincrement,
name VARCHAR(255) not null,
created_date DATETIME default (DATETIME('now')) not null,
public BOOLEAN default 0 not null,
active BOOLEAN default 1 not null,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE TABLE [monitor_group]
(
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[monitor_id] INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[group_id] INTEGER NOT NULL REFERENCES [group] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE INDEX [fk]
ON [monitor_group] (
[monitor_id],
[group_id]);
COMMIT;

View file

@ -1,13 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD method TEXT default 'GET' not null;
ALTER TABLE monitor
ADD body TEXT default null;
ALTER TABLE monitor
ADD headers TEXT default null;
COMMIT;

View file

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- For sendHeartbeatList
CREATE INDEX monitor_time_index ON heartbeat (monitor_id, time);
-- For sendImportantHeartbeatList
CREATE INDEX monitor_important_time_index ON heartbeat (monitor_id, important,time);
COMMIT;

View file

@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table incident
(
id INTEGER not null
constraint incident_pk
primary key autoincrement,
title VARCHAR(255) not null,
content TEXT not null,
style VARCHAR(30) default 'warning' not null,
created_date DATETIME default (DATETIME('now')) not null,
last_updated_date DATETIME,
pin BOOLEAN default 1 not null,
active BOOLEAN default 1 not null
);
COMMIT;

View file

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD basic_auth_user TEXT default null;
ALTER TABLE monitor
ADD basic_auth_pass TEXT default null;
COMMIT;

View file

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD push_token VARCHAR(20) DEFAULT NULL;
COMMIT;

View file

@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [notification_sent_history] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[type] VARCHAR(50) NOT NULL,
[monitor_id] INTEGER NOT NULL,
[days] INTEGER NOT NULL,
UNIQUE([type], [monitor_id], [days])
);
CREATE INDEX [good_index] ON [notification_sent_history] (
[type],
[monitor_id],
[days]
);
COMMIT;

View file

@ -1,22 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Generated by Intellij IDEA
create table setting_dg_tmp
(
id INTEGER
primary key autoincrement,
key VARCHAR(200) not null
unique,
value TEXT,
type VARCHAR(20)
);
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
drop table setting;
alter table setting_dg_tmp rename to setting;
COMMIT;

View file

@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Change Monitor.created_date from "TIMESTAMP" to "DATETIME"
-- SQL Generated by Intellij Idea
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255)
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View file

@ -1,19 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
CREATE TABLE tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name VARCHAR(255) NOT NULL,
color VARCHAR(255) NOT NULL,
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
);
CREATE TABLE monitor_tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
tag_id INTEGER NOT NULL,
value TEXT,
CONSTRAINT FK_tag FOREIGN KEY (tag_id) REFERENCES tag(id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor(id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX monitor_tag_monitor_id_index ON monitor_tag (monitor_id);
CREATE INDEX monitor_tag_tag_id_index ON monitor_tag (tag_id);

View file

@ -1,9 +0,0 @@
BEGIN TRANSACTION;
CREATE TABLE monitor_tls_info (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
info_json TEXT
);
COMMIT;

View file

@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Add maxretries column to monitor
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View file

@ -1,40 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- OK.... serious wrong, missing maxretries column
-- Developers should patch it manually if you have missing the maxretries column
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View file

@ -1,70 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

View file

@ -1,74 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null,
maxredirects INTEGER default 10 not null,
accepted_statuscodes_json TEXT default '["200-299"]' not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

View file

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_resolve_type VARCHAR(5);
ALTER TABLE monitor
ADD dns_resolve_server VARCHAR(255);
COMMIT;

View file

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_last_result VARCHAR(255);
COMMIT;

View file

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE notification
ADD is_default BOOLEAN default 0 NOT NULL;
COMMIT;

View file

@ -1,8 +0,0 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:14-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
pip3 --no-cache-dir install apprise==0.9.6 && \
rm -rf /root/.cache

View file

@ -1,12 +0,0 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:14-buster-slim
WORKDIR /app
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \
pip3 --no-cache-dir install apprise==0.9.6 && \
rm -rf /var/lib/apt/lists/*

View file

@ -1,13 +0,0 @@
# Simple docker-composer.yml
# You can change your port or volume location
version: '3.3'
services:
uptime-kuma:
image: louislam/uptime-kuma
container_name: uptime-kuma
volumes:
- ./uptime-kuma:/app/data
ports:
- 3001:3001

View file

@ -1,52 +0,0 @@
FROM louislam/uptime-kuma:base-debian AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-debian AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly
# Upload the artifact to Github
FROM louislam/uptime-kuma:base-debian AS upload-artifact
WORKDIR /
RUN apt update && \
apt --yes install curl file
COPY --from=build /app /app
ARG VERSION
ARG GITHUB_TOKEN
ARG TARGETARCH
ARG PLATFORM=debian
ARG FILE=$PLATFORM-$TARGETARCH-$VERSION.tar.gz
ARG DIST=dist.tar.gz
RUN chmod +x /app/extra/upload-github-release-asset.sh
# Full Build
# RUN tar -zcvf $FILE app
# RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=$FILE
# Dist only
RUN cd /app && tar -zcvf $DIST dist
RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=/app/$DIST

View file

@ -1,25 +0,0 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

View file

@ -1,6 +0,0 @@
module.exports = {
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
}

View file

@ -1,57 +0,0 @@
const github = require("@actions/github");
(async () => {
try {
const token = process.argv[2];
const issueNumber = process.argv[3];
const username = process.argv[4];
const client = github.getOctokit(token).rest;
const issue = {
owner: "louislam",
repo: "uptime-kuma",
number: issueNumber,
};
const labels = (
await client.issues.listLabelsOnIssue({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number
})
).data.map(({ name }) => name);
if (labels.length === 0) {
console.log("Bad format here");
await client.issues.addLabels({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
labels: ["invalid-format"]
});
// Add the issue closing comment
await client.issues.createComment({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please DO NOT open a blank issue`
});
// Close the issue
await client.issues.update({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
state: "closed"
});
} else {
console.log("Pass!");
}
} catch (e) {
console.log(e);
}
})();

View file

@ -1,2 +0,0 @@
# Must enable File Sharing in Docker Desktop
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh

View file

@ -1,59 +0,0 @@
console.log("Downloading dist");
const https = require("https");
const tar = require("tar");
const packageJSON = require("../package.json");
const fs = require("fs");
const version = packageJSON.version;
const filename = "dist.tar.gz";
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
download(url);
function download(url) {
console.log(url);
https.get(url, (response) => {
if (response.statusCode === 200) {
console.log("Extracting dist...");
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
fs.rmdirSync("./dist-backup", {
recursive: true
});
}
fs.renameSync("./dist", "./dist-backup");
}
const tarStream = tar.x({
cwd: "./",
});
tarStream.on("close", () => {
if (fs.existsSync("./dist-backup")) {
fs.rmdirSync("./dist-backup", {
recursive: true
});
}
console.log("Done");
});
tarStream.on("error", () => {
if (fs.existsSync("./dist-backup")) {
fs.renameSync("./dist-backup", "./dist");
}
console.error("Error from tarStream");
});
response.pipe(tarStream);
} else if (response.statusCode === 302) {
download(response.headers.location);
} else {
console.log("dist not found");
}
});
}

View file

@ -1,21 +0,0 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

View file

@ -1,50 +0,0 @@
/*
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
const { FBSD } = require("../server/util-server");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
let client;
const sslKey = process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
if (sslKey && sslCert) {
client = require("https");
} else {
client = require("http");
}
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
let hostname = process.env.UPTIME_KUMA_HOST;
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
if (!hostname && !FBSD) {
hostname = process.env.HOST;
}
const port = parseInt(process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
let options = {
host: hostname || "127.0.0.1",
port: port,
timeout: 28 * 1000,
};
let request = client.request(options, (res) => {
console.log(`Health Check OK [Res Code: ${res.statusCode}]`);
if (res.statusCode === 302) {
process.exit(0);
} else {
process.exit(1);
}
});
request.on("error", function (err) {
console.error("Health Check ERROR");
process.exit(1);
});
request.end();

View file

@ -1,245 +0,0 @@
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
// "npm run compile-install-script" to compile install.sh
// The command is working on Windows PowerShell and Docker for Windows only.
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
println("=====================");
println("Uptime Kuma Installer");
println("=====================");
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
println("---------------------------------------");
println("This script is designed for Linux and basic usage.");
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
println("---------------------------------------");
println("");
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
println("Docker - Install Uptime Kuma Docker container");
println("");
if ("$1" != "") {
type = "$1";
} else {
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
}
defaultPort = "3001";
function checkNode() {
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
println("Node Version: " ++ nodeVersion);
if (nodeVersion < "12") {
println("Error: Required Node.js 14");
call("exit", "1");
}
if (nodeVersion == "12") {
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
}
}
function deb() {
bash("nodeCheck=$(node -v)");
bash("apt --yes update");
if (nodeCheck != "") {
checkNode();
} else {
// Old nodejs binary name is "nodejs"
bash("check=$(nodejs --version)");
if (check != "") {
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
bash("exit 1");
}
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("apt --yes install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
bash("apt --yes install nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("apt --yes install git");
}
}
if (type == "local") {
defaultInstallPath = "/opt/uptime-kuma";
if (exists("/etc/redhat-release")) {
os = call("cat", "/etc/redhat-release");
distribution = "rhel";
} else if (exists("/etc/issue")) {
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
if (os == "Ubuntu") {
distribution = "ubuntu";
}
if (os == "Debian") {
distribution = "debian";
}
}
bash("arch=$(uname -i)");
println("Your OS: " ++ os);
println("Distribution: " ++ distribution);
println("Arch: " ++ arch);
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
installPath = "$2";
} else {
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
if (installPath == "") {
installPath = defaultInstallPath;
}
}
// CentOS
if (distribution == "rhel") {
bash("nodeCheck=$(node -v)");
if (nodeCheck != "") {
checkNode();
} else {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("yum -y -q install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
bash("yum install -y -q nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("yum -y -q install git");
}
// Ubuntu
} else if (distribution == "ubuntu") {
deb();
// Debian
} else if (distribution == "debian") {
deb();
} else {
// Unknown distribution
error = 0;
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is missing");
}
if (error > 0) {
println("Please install above missing software");
bash("exit 1");
}
}
bash("check=$(pm2 --version)");
if (check == "") {
println("Installing PM2");
bash("npm install pm2 -g");
bash("pm2 startup");
}
bash("mkdir -p $installPath");
bash("cd $installPath");
bash("git clone https://github.com/louislam/uptime-kuma.git .");
bash("npm run setup");
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
} else {
defaultVolume = "uptime-kuma";
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
bash("exit 1");
}
bash("check=$(docker info)");
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"exit\" \"1\"
fi");
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
volume = "$2";
} else {
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
if (volume == "") {
volume = defaultVolume;
}
}
println("Port: $port");
println("Volume: $volume");
bash("docker volume create $volume");
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
}
println("http://localhost:$port");

View file

@ -1,24 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version
const newVersion = oldVersion + "-nightly"
console.log("Old Version: " + oldVersion)
console.log("New Version: " + newVersion)
if (newVersion) {
// Process package.json
pkg.version = newVersion
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
// Process README.md
if (fs.existsSync("README.md")) {
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion))
}
}

View file

@ -1,60 +0,0 @@
console.log("== Uptime Kuma Remove 2FA Tool ==");
console.log("Loading the database");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const TwoFA = require("../server/2fa");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const main = async () => {
Database.init(args);
await Database.connect();
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
throw new Error("user not found, have you installed?");
}
console.log("Found user: " + user.username);
let ans = await question("Are you sure want to remove 2FA? [y/N]");
if (ans.toLowerCase() === "y") {
await TwoFA.disable2FA(user.id);
console.log("2FA has been removed successfully.");
}
}
} catch (e) {
console.error("Error: " + e.message);
}
await Database.close();
rl.close();
console.log("Finished.");
};
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {
resolve(answer);
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}
module.exports = {
main,
};

View file

@ -1,70 +0,0 @@
console.log("== Uptime Kuma Reset Password Tool ==");
console.log("Loading the database");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const main = async () => {
Database.init(args);
await Database.connect();
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
throw new Error("user not found, have you installed?");
}
console.log("Found user: " + user.username);
while (true) {
let password = await question("New Password: ");
let confirmPassword = await question("Confirm New Password: ");
if (password === confirmPassword) {
await user.resetPassword(password);
// Reset all sessions by reset jwt secret
await initJWTSecret();
break;
} else {
console.log("Passwords do not match, please try again.");
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
}
await Database.close();
rl.close();
console.log("Finished.");
};
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {
resolve(answer);
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}
module.exports = {
main,
};

View file

@ -1,144 +0,0 @@
/*
* Simple DNS Server
* For testing DNS monitoring type, dev only
*/
const dns2 = require("dns2");
const { Packet } = dns2;
const server = dns2.createServer({
udp: true
});
server.on("request", (request, send, rinfo) => {
for (let question of request.questions) {
console.log(question.name, type(question.type), question.class);
const response = Packet.createResponseFromRequest(request);
if (question.name === "existing.com") {
if (question.type === Packet.TYPE.A) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "1.2.3.4"
});
} if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "fe80::::1234:5678:abcd:ef00",
});
} else if (question.type === Packet.TYPE.CNAME) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "cname1.existing.com",
});
} else if (question.type === Packet.TYPE.MX) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
exchange: "mx1.existing.com",
priority: 5
});
} else if (question.type === Packet.TYPE.NS) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
ns: "ns1.existing.com",
});
} else if (question.type === Packet.TYPE.SOA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
primary: "existing.com",
admin: "admin@existing.com",
serial: 2021082701,
refresh: 300,
retry: 3,
expiration: 10,
minimum: 10,
});
} else if (question.type === Packet.TYPE.SRV) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
priority: 5,
weight: 5,
port: 8080,
target: "srv1.existing.com",
});
} else if (question.type === Packet.TYPE.TXT) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
data: "#v=spf1 include:_spf.existing.com ~all",
});
} else if (question.type === Packet.TYPE.CAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
flags: 0,
tag: "issue",
value: "ca.existing.com",
});
}
}
if (question.name === "4.3.2.1.in-addr.arpa") {
if (question.type === Packet.TYPE.PTR) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "ptr1.existing.com",
});
}
}
send(response);
}
});
server.on("listening", () => {
console.log("Listening");
console.log(server.addresses());
});
server.on("close", () => {
console.log("server closed");
});
server.listen({
udp: 5300
});
function type(code) {
for (let name in Packet.TYPE) {
if (Packet.TYPE[name] === code) {
return name;
}
}
}

View file

@ -1,3 +0,0 @@
package-lock.json
test.js
languages/

View file

@ -1,86 +0,0 @@
// Need to use ES6 to read language files
import fs from "fs";
import path from "path";
import util from "util";
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
/**
* Look ma, it's cp -R.
* @param {string} src The path to the thing to copy.
* @param {string} dest The path to the new copy.
*/
const copyRecursiveSync = function (src, dest) {
let exists = fs.existsSync(src);
let stats = exists && fs.statSync(src);
let isDirectory = exists && stats.isDirectory();
if (isDirectory) {
fs.mkdirSync(dest);
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName),
path.join(dest, childItemName));
});
} else {
fs.copyFileSync(src, dest);
}
};
console.log("Arguments:", process.argv);
const baseLangCode = process.argv[2] || "en";
console.log("Base Lang: " + baseLangCode);
if (fs.existsSync("./languages")) {
fs.rmdirSync("./languages", { recursive: true });
}
copyRecursiveSync("../../src/languages", "./languages");
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
const files = fs.readdirSync("./languages");
console.log("Files:", files);
for (const file of files) {
if (!file.endsWith(".js")) {
console.log("Skipping " + file);
continue;
}
console.log("Processing " + file);
const lang = await import("./languages/" + file);
let obj;
if (lang.default) {
obj = lang.default;
} else {
console.log("Empty file");
obj = {
languageName: "<Your Language name in your language (not in English)>"
};
}
// En first
for (const key in en) {
if (! obj[key]) {
obj[key] = en[key];
}
}
if (baseLang !== en) {
// Base second
for (const key in baseLang) {
if (! obj[key]) {
obj[key] = key;
}
}
}
const code = "export default " + util.inspect(obj, {
depth: null,
});
fs.writeFileSync(`../../src/languages/${file}`, code);
}
fs.rmdirSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint...");

View file

@ -1,12 +0,0 @@
{
"name": "update-language-files",
"type": "module",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

View file

@ -1,100 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const child_process = require("child_process");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version;
const newVersion = process.argv[2];
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);
if (! newVersion) {
console.error("invalid version");
process.exit(1);
}
const exists = tagExists(newVersion);
if (! exists) {
// Process package.json
pkg.version = newVersion;
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
commit(newVersion);
tag(newVersion);
updateWiki(oldVersion, newVersion);
} else {
console.log("version exists");
}
function commit(version) {
let msg = "update to " + version;
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
if (stdout.includes("no changes added to commit")) {
throw new Error("commit error");
}
}
function tag(version) {
let res = child_process.spawnSync("git", ["tag", version]);
console.log(res.stdout.toString().trim());
}
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = child_process.spawnSync("git", ["tag", "-l", version]);
return res.stdout.toString().trim() === version;
}
function updateWiki(oldVersion, newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
safeDelete(wikiDir);
child_process.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
let content = fs.readFileSync(howToUpdateFilename).toString();
content = content.replaceAll(`git checkout ${oldVersion}`, `git checkout ${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
child_process.spawnSync("git", ["add", "-A"], {
cwd: wikiDir,
});
child_process.spawnSync("git", ["commit", "-m", `Update to ${newVersion} from ${oldVersion}`], {
cwd: wikiDir,
});
console.log("Pushing to Github");
child_process.spawnSync("git", ["push"], {
cwd: wikiDir,
});
safeDelete(wikiDir);
}
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rmdirSync(dir, {
recursive: true,
});
}
}

View file

@ -1,64 +0,0 @@
#!/usr/bin/env bash
#
# Author: Stefan Buck
# License: MIT
# https://gist.github.com/stefanbuck/ce788fee19ab6eb0b4447a85fc99f447
#
#
# This script accepts the following parameters:
#
# * owner
# * repo
# * tag
# * filename
# * github_api_token
#
# Script to upload a release asset using the GitHub API v3.
#
# Example:
#
# upload-github-release-asset.sh github_api_token=TOKEN owner=stefanbuck repo=playground tag=v0.1.0 filename=./build.zip
#
# Check dependencies.
set -e
xargs=$(which gxargs || which xargs)
# Validate settings.
[ "$TRACE" ] && set -x
CONFIG=$@
for line in $CONFIG; do
eval "$line"
done
# Define variables.
GH_API="https://api.github.com"
GH_REPO="$GH_API/repos/$owner/$repo"
GH_TAGS="$GH_REPO/releases/tags/$tag"
AUTH="Authorization: token $github_api_token"
WGET_ARGS="--content-disposition --auth-no-challenge --no-cookie"
CURL_ARGS="-LJO#"
if [[ "$tag" == 'LATEST' ]]; then
GH_TAGS="$GH_REPO/releases/latest"
fi
# Validate token.
curl -o /dev/null -sH "$AUTH" $GH_REPO || { echo "Error: Invalid repo, token or network issue!"; exit 1; }
# Read asset tags.
response=$(curl -sH "$AUTH" $GH_TAGS)
# Get ID of the asset based on given filename.
eval $(echo "$response" | grep -m 1 "id.:" | grep -w id | tr : = | tr -cd '[[:alnum:]]=')
[ "$id" ] || { echo "Error: Failed to get release id for tag: $tag"; echo "$response" | awk 'length($0)<100' >&2; exit 1; }
# Upload asset
echo "Uploading asset... "
# Construct url
GH_ASSET="https://uploads.github.com/repos/$owner/$repo/releases/$id/assets?name=$(basename $filename)"
curl "$GITHUB_OAUTH_BASIC" --data-binary @"$filename" -H "Authorization: token $github_api_token" -H "Content-Type: application/octet-stream" $GH_ASSET

View file

@ -1,17 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

View file

@ -1,203 +0,0 @@
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
# "npm run compile-install-script" to compile install.sh
# The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "====================="
"echo" "-e" "Uptime Kuma Installer"
"echo" "-e" "====================="
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------"
"echo" "-e" ""
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
"echo" "-e" ""
if [ "$1" != "" ]; then
type="$1"
else
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
fi
defaultPort="3001"
function checkNode {
local _0
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion"
_0="12"
if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14"
"exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi
}
function deb {
nodeCheck=$(node -v)
apt --yes update
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
# Old nodejs binary name is "nodejs"
check=$(nodejs --version)
if [ "$check" != "" ]; then
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
exit 1
fi
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
apt --yes install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
apt --yes install git
fi
}
if [ "$type" == "local" ]; then
defaultInstallPath="/opt/uptime-kuma"
if [ -e "/etc/redhat-release" ]; then
os=$("cat" "/etc/redhat-release")
distribution="rhel"
else
if [ -e "/etc/issue" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu"
fi
if [ "$os" == "Debian" ]; then
distribution="debian"
fi
fi
fi
arch=$(uname -i)
"echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Listening Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
installPath="$2"
else
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
if [ "$installPath" == "" ]; then
installPath="$defaultInstallPath"
fi
fi
# CentOS
if [ "$distribution" == "rhel" ]; then
nodeCheck=$(node -v)
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
yum -y -q install git
fi
# Ubuntu
else
if [ "$distribution" == "ubuntu" ]; then
"deb"
# Debian
else
if [ "$distribution" == "debian" ]; then
"deb"
else
# Unknown distribution
error=$((0))
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
exit 1
fi
fi
fi
fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing PM2"
npm install pm2 -g
pm2 startup
fi
mkdir -p $installPath
cd $installPath
git clone https://github.com/louislam/uptime-kuma.git .
npm run setup
pm2 start server/server.js --name uptime-kuma -- --port=$port
else
defaultVolume="uptime-kuma"
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"exit" "1"
fi
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Expose Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
volume="$2"
else
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
if [ "$volume" == "" ]; then
volume="$defaultVolume"
fi
fi
"echo" "-e" "Port: $port"
"echo" "-e" "Volume: $volume"
docker volume create $volume
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
fi
"echo" "-e" "http://localhost:$port"

25652
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,134 +0,0 @@
{
"name": "uptime-kuma",
"version": "1.11.1",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/louislam/uptime-kuma.git"
},
"engines": {
"node": "14.* || >=16.*"
},
"scripts": {
"install-legacy": "npm install --legacy-peer-deps",
"update-legacy": "npm update --legacy-peer-deps",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"dev": "vite --host --config ./config/vite.config.js",
"start": "npm run start-server",
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
"test-with-build": "npm run build && npm test",
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend",
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
"jest-backend": "cross-env TEST_BACKEND=1 jest --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.11.1-alpine --target release . --push",
"build-docker-debian": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.11.1 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.11.1-debian --target release . --push",
"build-docker-nightly": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.11.1 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"update-version": "node extra/update-version.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
"remove-2fa": "node extra/remove-2fa.js",
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix",
"ncu-patch": "ncu -u -t patch"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@louislam/sqlite3": "~6.0.1",
"@popperjs/core": "~2.10.2",
"args-parser": "~1.3.0",
"axios": "~0.21.4",
"bcryptjs": "~2.4.3",
"bootstrap": "5.1.3",
"bree": "~7.1.0",
"chardet": "^1.3.0",
"chart.js": "~3.6.0",
"chartjs-adapter-dayjs": "~1.0.0",
"check-password-strength": "^2.0.3",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"dayjs": "~1.10.7",
"express": "~4.17.1",
"express-basic-auth": "~1.2.0",
"form-data": "~4.0.0",
"http-graceful-shutdown": "~3.1.5",
"iconv-lite": "^0.6.3",
"jsonwebtoken": "~8.5.1",
"jwt-decode": "^3.1.2",
"limiter": "^2.1.0",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"password-hash": "~1.2.2",
"postcss-rtlcss": "~3.4.1",
"postcss-scss": "~4.0.2",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.0",
"qrcode": "~1.5.0",
"redbean-node": "0.1.3",
"socket.io": "~4.2.0",
"socket.io-client": "~4.2.0",
"tar": "^6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"timezones-list": "~3.0.1",
"v-pagination-3": "~0.1.7",
"vue": "next",
"vue-chart-3": "~0.5.11",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.12",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0"
},
"devDependencies": {
"@actions/github": "~5.0.0",
"@babel/eslint-parser": "~7.15.8",
"@babel/preset-env": "^7.15.8",
"@types/bootstrap": "~5.1.6",
"@vitejs/plugin-legacy": "~1.6.3",
"@vitejs/plugin-vue": "~1.9.4",
"@vue/compiler-sfc": "~3.2.22",
"babel-plugin-rewire": "~1.2.0",
"core-js": "~3.18.3",
"cross-env": "~7.0.3",
"dns2": "~2.0.1",
"eslint": "~7.32.0",
"eslint-plugin-vue": "~7.18.0",
"jest": "~27.2.5",
"jest-puppeteer": "~6.0.0",
"puppeteer": "~10.4.0",
"sass": "~1.42.1",
"stylelint": "~13.13.1",
"stylelint-config-standard": "~22.0.0",
"typescript": "~4.4.4",
"vite": "~2.6.14"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.4 KiB

View file

@ -1,19 +0,0 @@
{
"name": "Uptime Kuma",
"short_name": "Uptime Kuma",
"start_url": "/",
"background_color": "#fff",
"display": "standalone",
"icons": [
{
"src": "icon-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "icon-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
]
}

View file

@ -1,14 +0,0 @@
const { checkLogin } = require("./util-server");
const { R } = require("redbean-node");
class TwoFA {
static async disable2FA(userID) {
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [
userID,
]);
}
}
module.exports = TwoFA;

View file

@ -1,61 +0,0 @@
const basicAuth = require("express-basic-auth");
const passwordHash = require("./password-hash");
const { R } = require("redbean-node");
const { setting } = require("./util-server");
const { debug } = require("../src/util");
const { loginRateLimiter } = require("./rate-limiter");
/**
*
* @param username : string
* @param password : string
* @returns {Promise<Bean|null>}
*/
exports.login = async function (username, password) {
let user = await R.findOne("user", " username = ? AND active = 1 ", [
username,
]);
if (user && passwordHash.verify(password, user.password)) {
// Upgrade the hash to bcrypt
if (passwordHash.needRehash(user.password)) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
passwordHash.generate(password),
user.id,
]);
}
return user;
}
return null;
};
function myAuthorizer(username, password, callback) {
setting("disableAuth").then((result) => {
if (result) {
callback(null, true);
} else {
// Login Rate Limit
loginRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
exports.login(username, password).then((user) => {
callback(null, user != null);
if (user == null) {
loginRateLimiter.removeTokens(1);
}
});
} else {
callback(null, false);
}
});
}
});
}
exports.basicAuth = basicAuth({
authorizer: myAuthorizer,
authorizeAsync: true,
challenge: true,
});

View file

@ -1,41 +0,0 @@
const { setSetting } = require("./util-server");
const axios = require("axios");
exports.version = require("../package.json").version;
exports.latestVersion = null;
let interval;
exports.startInterval = () => {
let check = async () => {
try {
const res = await axios.get("https://uptime.kuma.pet/version");
// For debug
if (process.env.TEST_CHECK_VERSION === "1") {
res.data.slow = "1000.0.0";
}
if (res.data.slow) {
exports.latestVersion = res.data.slow;
}
} catch (_) { }
};
check();
interval = setInterval(check, 3600 * 1000 * 48);
};
exports.enableCheckUpdate = async (value) => {
await setSetting("checkUpdate", value);
clearInterval(interval);
if (value) {
exports.startInterval();
}
};
exports.socket = null;

View file

@ -1,100 +0,0 @@
/*
* For Client Socket
*/
const { TimeLogger } = require("../src/util");
const { R } = require("redbean-node");
const { io } = require("./server");
const { setting } = require("./util-server");
const checkVersion = require("./check-version");
async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("notification", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
result.push(bean.export());
}
io.to(socket.userID).emit("notificationList", result);
timeLogger.print("Send Notification List");
return list;
}
/**
* Send Heartbeat History list to socket
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 100
`, [
monitorID,
]);
let result = list.reverse();
if (toUser) {
io.to(socket.userID).emit("heartbeatList", monitorID, result, overwrite);
} else {
socket.emit("heartbeatList", monitorID, result, overwrite);
}
timeLogger.print(`[Monitor: ${monitorID}] sendHeartbeatList`);
}
/**
* Important Heart beat list (aka event list)
* @param socket
* @param monitorID
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
*/
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.find("heartbeat", `
monitor_id = ?
AND important = 1
ORDER BY time DESC
LIMIT 500
`, [
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
if (toUser) {
io.to(socket.userID).emit("importantHeartbeatList", monitorID, list, overwrite);
} else {
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
}
}
async function sendInfo(socket) {
socket.emit("info", {
version: checkVersion.version,
latestVersion: checkVersion.latestVersion,
primaryBaseURL: await setting("primaryBaseURL")
});
}
module.exports = {
sendNotificationList,
sendImportantHeartbeatList,
sendHeartbeatList,
sendInfo
};

View file

@ -1,7 +0,0 @@
const args = require("args-parser")(process.argv);
const demoMode = args["demo"] || false;
module.exports = {
args,
demoMode
};

View file

@ -1,398 +0,0 @@
const fs = require("fs");
const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { debug, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
/**
* Database & App Data Folder
*/
class Database {
static templatePath = "./db/kuma.db";
/**
* Data Dir (Default: ./data)
*/
static dataDir;
/**
* User Upload Dir (Default: ./data/upload)
*/
static uploadDir;
static path;
/**
* @type {boolean}
*/
static patched = false;
/**
* For Backup only
*/
static backupPath = null;
/**
* Add patch filename in key
* Values:
* true: Add it regardless of order
* false: Do nothing
* { parents: []}: Need parents before add it
*/
static patchList = {
"patch-setting-value-type.sql": true,
"patch-improve-performance.sql": true,
"patch-2fa.sql": true,
"patch-add-retry-interval-monitor.sql": true,
"patch-incident-table.sql": true,
"patch-group-table.sql": true,
"patch-monitor-push_token.sql": true,
"patch-http-monitor-method-body-and-headers.sql": true,
"patch-2fa-invalidate-used-token.sql": true,
"patch-notification_sent_history.sql": true,
"patch-monitor-basic-auth.sql": true,
}
/**
* The final version should be 10 after merged tag feature
* @deprecated Use patchList for any new feature
*/
static latestVersion = 10;
static noReject = true;
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
console.log(`Data Dir: ${Database.dataDir}`);
}
static async connect(testMode = false) {
const acquireConnectionTimeout = 120 * 1000;
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
});
R.setup(knexInstance);
if (process.env.SQL_LOG === "1") {
R.debug(true);
}
// Auto map the model to a bean object
R.freeze(true);
await R.autoloadModels("./server/model");
await R.exec("PRAGMA foreign_keys = ON");
if (testMode) {
// Change to MEMORY
await R.exec("PRAGMA journal_mode = MEMORY");
} else {
// Change to WAL
await R.exec("PRAGMA journal_mode = WAL");
}
await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = FULL");
console.log("SQLite config:");
console.log(await R.getAll("PRAGMA journal_mode"));
console.log(await R.getAll("PRAGMA cache_size"));
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
static async patch() {
let version = parseInt(await setting("database_version"));
if (! version) {
version = 0;
}
console.info("Your database version: " + version);
console.info("Latest database version: " + this.latestVersion);
if (version === this.latestVersion) {
console.info("Database patch not needed");
} else if (version > this.latestVersion) {
console.info("Warning: Database version is newer than expected");
} else {
console.info("Database patch is needed");
this.backup(version);
// Try catch anything here, if gone wrong, restore the backup
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
console.info(`Patching ${sqlFile}`);
await Database.importSQLFile(sqlFile);
console.info(`Patched ${sqlFile}`);
await setSetting("database_version", i);
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to issue patching the database");
console.error("Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
}
await this.patch2();
}
/**
* Call it from patch() only
* @returns {Promise<void>}
*/
static async patch2() {
console.log("Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (! databasePatchedFiles) {
databasePatchedFiles = {};
}
debug("Patched files:");
debug(databasePatchedFiles);
try {
for (let sqlFilename in this.patchList) {
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
}
if (this.patched) {
console.log("Database Patched Successfully");
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to issue patching the database");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
await setSetting("databasePatchedFiles", databasePatchedFiles);
}
/**
* Used it patch2() only
* @param sqlFilename
* @param databasePatchedFiles
*/
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (! value) {
console.log(sqlFilename + " skip");
return;
}
// Check if patched
if (! databasePatchedFiles[sqlFilename]) {
console.log(sqlFilename + " is not patched");
if (value.parents) {
console.log(sqlFilename + " need parents");
for (let parentSQLFilename of value.parents) {
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
}
}
this.backup(dayjs().format("YYYYMMDDHHmmss"));
console.log(sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
console.log(sqlFilename + " was patched successfully");
} else {
debug(sqlFilename + " is already patched, skip");
}
}
/**
* Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
* @param filename
* @returns {Promise<void>}
*/
static async importSQLFile(filename) {
await R.getCell("SELECT 1");
let text = fs.readFileSync(filename).toString();
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n");
let statements = text.split(";")
.map((statement) => {
return statement.trim();
})
.filter((statement) => {
return statement !== "";
});
for (let statement of statements) {
await R.exec(statement);
}
}
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
}
/**
* Special handle, because tarn.js throw a promise reject that cannot be caught
* @returns {Promise<void>}
*/
static async close() {
const listener = (reason, p) => {
Database.noReject = false;
};
process.addListener("unhandledRejection", listener);
console.log("Closing the database");
while (true) {
Database.noReject = true;
await R.close();
await sleep(2000);
if (Database.noReject) {
break;
} else {
console.log("Waiting to close the database");
}
}
console.log("SQLite closed");
process.removeListener("unhandledRejection", listener);
}
/**
* One backup one time in this process.
* Reset this.backupPath if you want to backup again
* @param version
*/
static backup(version) {
if (! this.backupPath) {
console.info("Backing up the database");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);
const shmPath = Database.path + "-shm";
if (fs.existsSync(shmPath)) {
this.backupShmPath = shmPath + ".bak" + version;
fs.copyFileSync(shmPath, this.backupShmPath);
}
const walPath = Database.path + "-wal";
if (fs.existsSync(walPath)) {
this.backupWalPath = walPath + ".bak" + version;
fs.copyFileSync(walPath, this.backupWalPath);
}
}
}
/**
*
*/
static restore() {
if (this.backupPath) {
console.error("Patching the database failed!!! Restoring the backup");
const shmPath = Database.path + "-shm";
const walPath = Database.path + "-wal";
// Delete patch failed db
try {
if (fs.existsSync(Database.path)) {
fs.unlinkSync(Database.path);
}
if (fs.existsSync(shmPath)) {
fs.unlinkSync(shmPath);
}
if (fs.existsSync(walPath)) {
fs.unlinkSync(walPath);
}
} catch (e) {
console.log("Restore failed; you may need to restore the backup manually");
process.exit(1);
}
// Restore backup
fs.copyFileSync(this.backupPath, Database.path);
if (this.backupShmPath) {
fs.copyFileSync(this.backupShmPath, shmPath);
}
if (this.backupWalPath) {
fs.copyFileSync(this.backupWalPath, walPath);
}
} else {
console.log("Nothing to restore");
}
}
static getSize() {
debug("Database.getSize()");
let stats = fs.statSync(Database.path);
debug(stats);
return stats.size;
}
static async shrink() {
await R.exec("VACUUM");
}
}
module.exports = Database;

View file

@ -1,57 +0,0 @@
/*
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
Modified with 0 dependencies
*/
let fs = require("fs");
let ImageDataURI = (() => {
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
return null;
}
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
return {
imageType: regExMatches[1],
dataBase64: regExMatches[2],
dataBuffer: new Buffer(regExMatches[2], "base64")
};
}
function encode(data, mediaType) {
if (!data || !mediaType) {
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
return null;
}
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
return dataImgBase64;
}
function outputFile(dataURI, filePath) {
filePath = filePath || "./";
return new Promise((resolve, reject) => {
let imageDecoded = decode(dataURI);
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
if (err) {
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
}
resolve(filePath);
});
});
}
return {
decode: decode,
encode: encode,
outputFile: outputFile,
};
})();
module.exports = ImageDataURI;

View file

@ -1,31 +0,0 @@
const path = require("path");
const Bree = require("bree");
const { SHARE_ENV } = require("worker_threads");
const jobs = [
{
name: "clear-old-data",
interval: "at 03:14",
},
];
const initBackgroundJobs = function (args) {
const bree = new Bree({
root: path.resolve("server", "jobs"),
jobs,
worker: {
env: SHARE_ENV,
workerData: args,
},
workerMessageHandler: (message) => {
console.log("[Background Job]:", message);
}
});
bree.start();
return bree;
};
module.exports = {
initBackgroundJobs
};

View file

@ -1,40 +0,0 @@
const { log, exit, connectDb } = require("./util-worker");
const { R } = require("redbean-node");
const { setSetting, setting } = require("../util-server");
const DEFAULT_KEEP_PERIOD = 180;
(async () => {
await connectDb();
let period = await setting("keepDataPeriodDays");
// Set Default Period
if (period == null) {
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
period = DEFAULT_KEEP_PERIOD;
}
// Try parse setting
let parsedPeriod;
try {
parsedPeriod = parseInt(period);
} catch (_) {
log("Failed to parse setting, resetting to default..");
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
parsedPeriod = DEFAULT_KEEP_PERIOD;
}
log(`Clearing Data older than ${parsedPeriod} days...`);
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[parsedPeriod]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);
}
exit();
})();

View file

@ -1,39 +0,0 @@
const { parentPort, workerData } = require("worker_threads");
const Database = require("../database");
const path = require("path");
const log = function (any) {
if (parentPort) {
parentPort.postMessage(any);
}
};
const exit = function (error) {
if (error && error != 0) {
process.exit(error);
} else {
if (parentPort) {
parentPort.postMessage("done");
} else {
process.exit(0);
}
}
};
const connectDb = async function () {
const dbPath = path.join(
process.env.DATA_DIR || workerData["data-dir"] || "./data/"
);
Database.init({
"data-dir": dbPath,
});
await Database.connect();
};
module.exports = {
log,
exit,
connectDb,
};

View file

@ -1,34 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
class Group extends BeanModel {
async toPublicJSON() {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON());
}
return {
id: this.id,
name: this.name,
weight: this.weight,
monitorList,
};
}
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
SELECT monitor.* FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight
`, [
this.id,
]));
}
}
module.exports = Group;

View file

@ -1,39 +0,0 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model");
/**
* status:
* 0 = DOWN
* 1 = UP
* 2 = PENDING
*/
class Heartbeat extends BeanModel {
toPublicJSON() {
return {
status: this.status,
time: this.time,
msg: "", // Hide for public
ping: this.ping,
};
}
toJSON() {
return {
monitorID: this.monitor_id,
status: this.status,
time: this.time,
msg: this.msg,
ping: this.ping,
important: this.important,
duration: this.duration,
};
}
}
module.exports = Heartbeat;

View file

@ -1,18 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
toPublicJSON() {
return {
id: this.id,
style: this.style,
title: this.title,
content: this.content,
pin: this.pin,
createdDate: this.createdDate,
lastUpdatedDate: this.lastUpdatedDate,
};
}
}
module.exports = Incident;

View file

@ -1,768 +0,0 @@
const https = require("https");
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, errorLog } = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification");
const { demoMode } = require("../config");
const version = require("../../package.json").version;
const apicache = require("../modules/apicache");
/**
* status:
* 0 = DOWN
* 1 = UP
* 2 = PENDING
*/
class Monitor extends BeanModel {
/**
* Return a object that ready to parse to JSON for public
* Only show necessary data to public
*/
async toPublicJSON() {
return {
id: this.id,
name: this.name,
};
}
/**
* Return a object that ready to parse to JSON
*/
async toJSON() {
let notificationIDList = {};
let list = await R.find("monitor_notification", " monitor_id = ? ", [
this.id,
]);
for (let bean of list) {
notificationIDList[bean.notification_id] = true;
}
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
return {
id: this.id,
name: this.name,
url: this.url,
method: this.method,
body: this.body,
headers: this.headers,
basic_auth_user: this.basic_auth_user,
basic_auth_pass: this.basic_auth_pass,
hostname: this.hostname,
port: this.port,
maxretries: this.maxretries,
weight: this.weight,
active: this.active,
type: this.type,
interval: this.interval,
retryInterval: this.retryInterval,
keyword: this.keyword,
ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(),
maxredirects: this.maxredirects,
accepted_statuscodes: this.getAcceptedStatuscodes(),
dns_resolve_type: this.dns_resolve_type,
dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
notificationIDList,
tags: tags,
};
}
/**
* Encode user and password to Base64 encoding
* for HTTP "basic" auth, as per RFC-7617
* @returns {string}
*/
encodeBase64(user, pass) {
return Buffer.from(user + ":" + pass).toString("base64");
}
/**
* Parse to boolean
* @returns {boolean}
*/
getIgnoreTls() {
return Boolean(this.ignoreTls);
}
/**
* Parse to boolean
* @returns {boolean}
*/
isUpsideDown() {
return Boolean(this.upsideDown);
}
getAcceptedStatuscodes() {
return JSON.parse(this.accepted_statuscodes_json);
}
start(io) {
let previousBeat = null;
let retries = 0;
let prometheus = new Prometheus(this);
const beat = async () => {
// Expose here for prometheus update
// undefined if not https
let tlsInfo = undefined;
if (! previousBeat) {
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
this.id,
]);
}
const isFirstBeat = !previousBeat;
let bean = R.dispense("heartbeat");
bean.monitor_id = this.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.status = DOWN;
if (this.isUpsideDown()) {
bean.status = flipStatus(bean.status);
}
// Duration
if (! isFirstBeat) {
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
} else {
bean.duration = 0;
}
try {
if (this.type === "http" || this.type === "keyword") {
// Do not do any queries/high loading things before the "bean.ping"
let startTime = dayjs().valueOf();
// HTTP basic auth
let basicAuthHeader = {};
if (this.basic_auth_user) {
basicAuthHeader = {
"Authorization": "Basic " + this.encodeBase64(this.basic_auth_user, this.basic_auth_pass),
};
}
debug(`[${this.name}] Prepare Options for axios`);
const options = {
url: this.url,
method: (this.method || "get").toLowerCase(),
...(this.body ? { data: JSON.parse(this.body) } : {}),
timeout: this.interval * 1000 * 0.8,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
...(this.headers ? JSON.parse(this.headers) : {}),
...(basicAuthHeader),
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
};
debug(`[${this.name}] Axios Request`);
let res = await axios.request(options);
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
// Check certificate if https is used
let certInfoStartTime = dayjs().valueOf();
if (this.getUrl()?.protocol === "https:") {
debug(`[${this.name}] Check cert`);
try {
let tlsInfoObject = checkCertificate(res);
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
if (!this.getIgnoreTls()) {
debug(`[${this.name}] call sendCertNotification`);
await this.sendCertNotification(tlsInfoObject);
}
} catch (e) {
if (e.message !== "No TLS certificate in response") {
console.error(e.message);
}
}
}
if (process.env.TIMELOGGER === "1") {
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
}
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID == this.id) {
console.log(res.data);
}
if (this.type === "http") {
bean.status = UP;
} else {
let data = res.data;
// Convert to string for object/array
if (typeof data !== "string") {
data = JSON.stringify(data);
}
if (data.includes(this.keyword)) {
bean.msg += ", keyword is found";
bean.status = UP;
} else {
throw new Error(bean.msg + ", but keyword is not found");
}
}
} else if (this.type === "port") {
bean.ping = await tcping(this.hostname, this.port);
bean.msg = "";
bean.status = UP;
} else if (this.type === "ping") {
bean.ping = await ping(this.hostname);
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
let startTime = dayjs().valueOf();
let dnsMessage = "";
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
bean.ping = dayjs().valueOf() - startTime;
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
dnsMessage += "Records: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
dnsMessage = dnsRes[0];
} else if (this.dns_resolve_type == "CAA") {
dnsMessage = dnsRes[0].issue;
} else if (this.dns_resolve_type == "MX") {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type == "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "SOA") {
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
} else if (this.dns_resolve_type == "SRV") {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
}
if (this.dnsLastResult !== dnsMessage) {
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
dnsMessage,
this.id
]);
}
bean.msg = dnsMessage;
bean.status = UP;
} else if (this.type === "push") { // Type: Push
const time = R.isoDateTime(dayjs.utc().subtract(this.interval, "second"));
let heartbeatCount = await R.count("heartbeat", " monitor_id = ? AND time > ? ", [
this.id,
time
]);
debug("heartbeatCount" + heartbeatCount + " " + time);
if (heartbeatCount <= 0) {
// Fix #922, since previous heartbeat could be inserted by api, it should get from database
previousBeat = await Monitor.getPreviousHeartbeat(this.id);
throw new Error("No heartbeat in the time window");
} else {
// No need to insert successful heartbeat for push type, so end here
retries = 0;
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
return;
}
} else if (this.type === "steam") {
const steamApiUrl = "https://api.steampowered.com/IGameServersService/GetServerList/v1/";
const steamAPIKey = await setting("steamAPIKey");
const filter = `addr\\${this.hostname}:${this.port}`;
if (!steamAPIKey) {
throw new Error("Steam API Key not found");
}
let res = await axios.get(steamApiUrl, {
timeout: this.interval * 1000 * 0.8,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
params: {
filter: filter,
key: steamAPIKey,
}
});
if (res.data.response && res.data.response.servers && res.data.response.servers.length > 0) {
bean.status = UP;
bean.msg = res.data.response.servers[0].name;
try {
bean.ping = await ping(this.hostname);
} catch (_) { }
} else {
throw new Error("Server not found on Steam");
}
} else {
bean.msg = "Unknown Monitor Type";
bean.status = PENDING;
}
if (this.isUpsideDown()) {
bean.status = flipStatus(bean.status);
if (bean.status === DOWN) {
throw new Error("Flip UP to DOWN");
}
}
retries = 0;
} catch (error) {
bean.msg = error.message;
// If UP come in here, it must be upside down mode
// Just reset the retries
if (this.isUpsideDown() && bean.status === UP) {
retries = 0;
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
retries++;
bean.status = PENDING;
}
}
let beatInterval = this.interval;
debug(`[${this.name}] Check isImportant`);
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
// Mark as important if status changed, ignore pending pings,
// Don't notify if disrupted changes to up
if (isImportant) {
bean.important = true;
debug(`[${this.name}] sendNotification`);
await Monitor.sendNotification(isFirstBeat, this, bean);
// Clear Status Page Cache
debug(`[${this.name}] apicache clear`);
apicache.clear();
} else {
bean.important = false;
}
if (bean.status === UP) {
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === PENDING) {
if (this.retryInterval > 0) {
beatInterval = this.retryInterval;
}
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else {
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
}
debug(`[${this.name}] Send to socket`);
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id);
debug(`[${this.name}] Store`);
await R.store(bean);
debug(`[${this.name}] prometheus.update`);
prometheus.update(bean, tlsInfo);
previousBeat = bean;
if (! this.isStop) {
if (demoMode) {
if (beatInterval < 20) {
console.log("beat interval too low, reset to 20s");
beatInterval = 20;
}
}
debug(`[${this.name}] SetTimeout for next check.`);
this.heartbeatInterval = setTimeout(safeBeat, beatInterval * 1000);
} else {
console.log(`[${this.name}] isStop = true, no next check.`);
}
};
const safeBeat = async () => {
try {
await beat();
} catch (e) {
console.trace(e);
errorLog(e, false);
console.error("Please report to https://github.com/louislam/uptime-kuma/issues");
if (! this.isStop) {
console.log("Try to restart the monitor");
this.heartbeatInterval = setTimeout(safeBeat, this.interval * 1000);
}
}
};
// Delay Push Type
if (this.type === "push") {
setTimeout(() => {
safeBeat();
}, this.interval * 1000);
} else {
safeBeat();
}
}
stop() {
clearTimeout(this.heartbeatInterval);
this.isStop = true;
}
/**
* Helper Method:
* returns URL object for further usage
* returns null if url is invalid
* @returns {null|URL}
*/
getUrl() {
try {
return new URL(this.url);
} catch (_) {
return null;
}
}
/**
* Store TLS info to database
* @param checkCertificateResult
* @returns {Promise<object>}
*/
async updateTlsInfo(checkCertificateResult) {
let tls_info_bean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
this.id,
]);
if (tls_info_bean == null) {
tls_info_bean = R.dispense("monitor_tls_info");
tls_info_bean.monitor_id = this.id;
} else {
// Clear sent history if the cert changed.
try {
let oldCertInfo = JSON.parse(tls_info_bean.info_json);
let isValidObjects = oldCertInfo && oldCertInfo.certInfo && checkCertificateResult && checkCertificateResult.certInfo;
if (isValidObjects) {
if (oldCertInfo.certInfo.fingerprint256 !== checkCertificateResult.certInfo.fingerprint256) {
debug("Resetting sent_history");
await R.exec("DELETE FROM notification_sent_history WHERE type = 'certificate' AND monitor_id = ?", [
this.id
]);
} else {
debug("No need to reset sent_history");
debug(oldCertInfo.certInfo.fingerprint256);
debug(checkCertificateResult.certInfo.fingerprint256);
}
} else {
debug("Not valid object");
}
} catch (e) { }
}
tls_info_bean.info_json = JSON.stringify(checkCertificateResult);
await R.store(tls_info_bean);
return checkCertificateResult;
}
static async sendStats(io, monitorID, userID) {
const hasClients = getTotalClientInRoom(io, userID) > 0;
if (hasClients) {
await Monitor.sendAvgPing(24, io, monitorID, userID);
await Monitor.sendUptime(24, io, monitorID, userID);
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
await Monitor.sendCertInfo(io, monitorID, userID);
} else {
debug("No clients in the room, no need to send stats");
}
}
/**
*
* @param duration : int Hours
*/
static async sendAvgPing(duration, io, monitorID, userID) {
const timeLogger = new TimeLogger();
let avgPing = parseInt(await R.getCell(`
SELECT AVG(ping)
FROM heartbeat
WHERE time > DATETIME('now', ? || ' hours')
AND ping IS NOT NULL
AND monitor_id = ? `, [
-duration,
monitorID,
]));
timeLogger.print(`[Monitor: ${monitorID}] avgPing`);
io.to(userID).emit("avgPing", monitorID, avgPing);
}
static async sendCertInfo(io, monitorID, userID) {
let tls_info = await R.findOne("monitor_tls_info", "monitor_id = ?", [
monitorID,
]);
if (tls_info != null) {
io.to(userID).emit("certInfo", monitorID, tls_info.info_json);
}
}
/**
* Uptime with calculation
* Calculation based on:
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param duration : int Hours
*/
static async calcUptime(duration, monitorID) {
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
// Handle if heartbeat duration longer than the target duration
// e.g. If the last beat's duration is bigger that the 24hrs window, it will use the duration between the (beat time - window margin) (THEN case in SQL)
let result = await R.getRow(`
SELECT
-- SUM all duration, also trim off the beat out of time window
SUM(
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
) AS total_duration,
-- SUM all uptime duration, also trim off the beat out of time window
SUM(
CASE
WHEN (status = 1)
THEN
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
END
) AS uptime_duration
FROM heartbeat
WHERE time > ?
AND monitor_id = ?
`, [
startTime, startTime, startTime, startTime, startTime,
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}][${duration}] sendUptime`);
let totalDuration = result.total_duration;
let uptimeDuration = result.uptime_duration;
let uptime = 0;
if (totalDuration > 0) {
uptime = uptimeDuration / totalDuration;
if (uptime < 0) {
uptime = 0;
}
} else {
// Handle new monitor with only one beat, because the beat's duration = 0
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
if (status === UP) {
uptime = 1;
}
}
return uptime;
}
/**
* Send Uptime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
static isImportantBeat(isFirstBeat, previousBeatStatus, currentBeatStatus) {
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
// * UP -> DOWN = important
// UP -> UP = not important
// PENDING -> PENDING = not important
// * PENDING -> DOWN = important
// PENDING -> UP = not important
// DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important
// * DOWN -> UP = important
let isImportant = isFirstBeat ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN);
return isImportant;
}
static async sendNotification(isFirstBeat, monitor, bean) {
if (!isFirstBeat || bean.status === DOWN) {
const notificationList = await Monitor.getNotificationList(monitor);
let text;
if (bean.status === UP) {
text = "✅ Up";
} else {
text = "🔴 Down";
}
let msg = `[${monitor.name}] [${text}] ${bean.msg}`;
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(), bean.toJSON());
} catch (e) {
console.error("Cannot send notification to " + notification.name);
console.log(e);
}
}
}
}
static async getNotificationList(monitor) {
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
monitor.id,
]);
return notificationList;
}
async sendCertNotification(tlsInfoObject) {
if (tlsInfoObject && tlsInfoObject.certInfo && tlsInfoObject.certInfo.daysRemaining) {
const notificationList = await Monitor.getNotificationList(this);
debug("call sendCertNotificationByTargetDays");
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 21, notificationList);
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 14, notificationList);
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 7, notificationList);
}
}
async sendCertNotificationByTargetDays(daysRemaining, targetDays, notificationList) {
if (daysRemaining > targetDays) {
debug(`No need to send cert notification. ${daysRemaining} > ${targetDays}`);
return;
}
if (notificationList.length > 0) {
let row = await R.getRow("SELECT * FROM notification_sent_history WHERE type = ? AND monitor_id = ? AND days = ?", [
"certificate",
this.id,
targetDays,
]);
// Sent already, no need to send again
if (row) {
debug("Sent already, no need to send again");
return;
}
let sent = false;
debug("Send certificate notification");
for (let notification of notificationList) {
try {
debug("Sending to " + notification.name);
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] Certificate will be expired in ${daysRemaining} days`);
sent = true;
} catch (e) {
console.error("Cannot send cert notification to " + notification.name);
console.error(e);
}
}
if (sent) {
await R.exec("INSERT INTO notification_sent_history (type, monitor_id, days) VALUES(?, ?, ?)", [
"certificate",
this.id,
targetDays,
]);
}
} else {
debug("No notification, no need to send cert notification");
}
}
static async getPreviousHeartbeat(monitorID) {
return await R.getRow(`
SELECT status, time FROM heartbeat
WHERE id = (select MAX(id) from heartbeat where monitor_id = ?)
`, [
monitorID
]);
}
}
module.exports = Monitor;

View file

@ -1,13 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Tag extends BeanModel {
toJSON() {
return {
id: this._id,
name: this._name,
color: this._color,
};
}
}
module.exports = Tag;

View file

@ -1,21 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const passwordHash = require("../password-hash");
const { R } = require("redbean-node");
class User extends BeanModel {
/**
* Direct execute, no need R.store()
* @param newPassword
* @returns {Promise<void>}
*/
async resetPassword(newPassword) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
passwordHash.generate(newPassword),
this.id
]);
this.password = newPassword;
}
}
module.exports = User;

View file

@ -1,749 +0,0 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
let t = {
ms: 1,
second: 1000,
minute: 60000,
hour: 3600000,
day: 3600000 * 24,
week: 3600000 * 24 * 7,
month: 3600000 * 24 * 30,
};
let instances = [];
let matches = function (a) {
return function (b) {
return a === b;
};
};
let doesntMatch = function (a) {
return function (b) {
return !matches(a)(b);
};
};
let logDuration = function (d, prefix) {
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
};
function getSafeHeaders(res) {
return res.getHeaders ? res.getHeaders() : res._headers;
}
function ApiCache() {
let memCache = new MemoryCache();
let globalOptions = {
debug: false,
defaultDuration: 3600000,
enabled: true,
appendKey: [],
jsonp: false,
redisClient: false,
headerBlacklist: [],
statusCodes: {
include: [],
exclude: [],
},
events: {
expire: undefined,
},
headers: {
// 'cache-control': 'no-cache' // example of header overwrite
},
trackPerformance: false,
respectCacheControl: false,
};
let middlewareOptions = [];
let instance = this;
let index = null;
let timers = {};
let performanceArray = []; // for tracking cache hit rate
instances.push(this);
this.id = instances.length;
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
function shouldCacheResponse(request, response, toggle) {
let opt = globalOptions;
let codes = opt.statusCodes;
if (!response) {
return false;
}
if (toggle && !toggle(request, response)) {
return false;
}
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
return false;
}
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
return false;
}
return true;
}
function addIndexEntries(key, req) {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
index.all.unshift(key);
}
function filterBlacklistedHeaders(headers) {
return Object.keys(headers)
.filter(function (key) {
return globalOptions.headerBlacklist.indexOf(key) === -1;
})
.reduce(function (acc, header) {
acc[header] = headers[header];
return acc;
}, {});
}
function createCacheObject(status, headers, data, encoding) {
return {
status: status,
headers: filterBlacklistedHeaders(headers),
data: data,
encoding: encoding,
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
};
}
function cacheResponse(key, value, duration) {
let redis = globalOptions.redisClient;
let expireCallback = globalOptions.events.expire;
if (redis && redis.connected) {
try {
redis.hset(key, "response", JSON.stringify(value));
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
debug("[apicache] error in redis.hset()");
}
} else {
memCache.add(key, value, duration, expireCallback);
}
// add automatic cache clearing from duration, includes max limit on setTimeout
timers[key] = setTimeout(function () {
instance.clear(key, true);
}, Math.min(duration, 2147483647));
}
function accumulateContent(res, content) {
if (content) {
if (typeof content == "string") {
res._apicache.content = (res._apicache.content || "") + content;
} else if (Buffer.isBuffer(content)) {
let oldContent = res._apicache.content;
if (typeof oldContent === "string") {
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
}
if (!oldContent) {
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
}
res._apicache.content = Buffer.concat(
[oldContent, content],
oldContent.length + content.length
);
} else {
res._apicache.content = content;
}
}
}
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
// monkeypatch res.end to create cache object
res._apicache = {
write: res.write,
writeHead: res.writeHead,
end: res.end,
cacheable: true,
content: undefined,
};
// append header overwrites if applicable
Object.keys(globalOptions.headers).forEach(function (name) {
res.setHeader(name, globalOptions.headers[name]);
});
res.writeHead = function () {
// add cache control headers
if (!globalOptions.headers["cache-control"]) {
if (shouldCacheResponse(req, res, toggle)) {
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
} else {
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
}
}
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
return res._apicache.writeHead.apply(this, arguments);
};
// patch res.write
res.write = function (content) {
accumulateContent(res, content);
return res._apicache.write.apply(this, arguments);
};
// patch res.end
res.end = function (content, encoding) {
if (shouldCacheResponse(req, res, toggle)) {
accumulateContent(res, content);
if (res._apicache.cacheable && res._apicache.content) {
addIndexEntries(key, req);
let headers = res._apicache.headers || getSafeHeaders(res);
let cacheObject = createCacheObject(
res.statusCode,
headers,
res._apicache.content,
encoding
);
cacheResponse(key, cacheObject, duration);
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
}
}
return res._apicache.end.apply(this, arguments);
};
next();
}
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
if (toggle && !toggle(request, response)) {
return next();
}
let headers = getSafeHeaders(response);
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
// only embed apicache headers when not in production environment
if (process.env.NODE_ENV !== "production") {
Object.assign(headers, {
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
"apicache-version": "1.6.2-modified",
});
}
// unstringify buffers
let data = cacheObject.data;
if (data && data.type === "Buffer") {
data =
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
}
// test Etag against If-None-Match for 304
let cachedEtag = cacheObject.headers.etag;
let requestEtag = request.headers["if-none-match"];
if (requestEtag && cachedEtag === requestEtag) {
response.writeHead(304, headers);
return response.end();
}
response.writeHead(cacheObject.status || 200, headers);
return response.end(data, cacheObject.encoding);
}
function syncOptions() {
for (let i in middlewareOptions) {
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
}
}
this.clear = function (target, isAutomatic) {
let group = index.groups[target];
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
memCache.delete(key);
} else {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
});
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
if (!redis) {
memCache.delete(target);
} else {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
}
}
// remove from global index
index.all = index.all.filter(doesntMatch(target));
// remove target from each group that it may exist in
Object.keys(index.groups).forEach(function (groupName) {
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
// delete group if now empty
if (!index.groups[groupName].length) {
delete index.groups[groupName];
}
});
} else {
debug("clearing entire index");
if (!redis) {
memCache.clear();
} else {
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
index.all.forEach(function (key) {
clearTimeout(timers[key]);
delete timers[key];
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
});
}
this.resetIndex();
}
return this.getIndex();
};
function parseDuration(duration, defaultDuration) {
if (typeof duration === "number") {
return duration;
}
if (typeof duration === "string") {
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
if (split.length === 3) {
let len = parseFloat(split[1]);
let unit = split[2].replace(/s$/i, "").toLowerCase();
if (unit === "m") {
unit = "ms";
}
return (len || 1) * (t[unit] || 0);
}
}
return defaultDuration;
}
this.getDuration = function (duration) {
return parseDuration(duration, globalOptions.defaultDuration);
};
/**
* Return cache performance statistics (hit rate). Suitable for putting into a route:
* <code>
* app.get('/api/cache/performance', (req, res) => {
* res.json(apicache.getPerformance())
* })
* </code>
*/
this.getPerformance = function () {
return performanceArray.map(function (p) {
return p.report();
});
};
this.getIndex = function (group) {
if (group) {
return index.groups[group];
} else {
return index;
}
};
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
let duration = instance.getDuration(strDuration);
let opt = {};
middlewareOptions.push({
options: opt,
});
let options = function (localOptions) {
if (localOptions) {
middlewareOptions.find(function (middleware) {
return middleware.options === opt;
}).localOptions = localOptions;
}
syncOptions();
return opt;
};
options(localOptions);
/**
* A Function for non tracking performance
*/
function NOOPCachePerformance() {
this.report = this.hit = this.miss = function () {}; // noop;
}
/**
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
*/
function CachePerformance() {
/**
* Tracks the hit rate for the last 100 requests.
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 1000 requests.
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 10000 requests.
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 100000 requests.
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
/**
* The number of calls that have passed through the middleware since the server started.
*/
this.callCount = 0;
/**
* The total number of hits since the server started
*/
this.hitCount = 0;
/**
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheHit = null;
/**
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheMiss = null;
/**
* Return performance statistics
*/
this.report = function () {
return {
lastCacheHit: this.lastCacheHit,
lastCacheMiss: this.lastCacheMiss,
callCount: this.callCount,
hitCount: this.hitCount,
missCount: this.callCount - this.hitCount,
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
hitRateLast100: this.hitRate(this.hitsLast100),
hitRateLast1000: this.hitRate(this.hitsLast1000),
hitRateLast10000: this.hitRate(this.hitsLast10000),
hitRateLast100000: this.hitRate(this.hitsLast100000),
};
};
/**
* Computes a cache hit rate from an array of hits and misses.
* @param {Uint8Array} array An array representing hits and misses.
* @returns a number between 0 and 1, or null if the array has no hits or misses
*/
this.hitRate = function (array) {
let hits = 0;
let misses = 0;
for (let i = 0; i < array.length; i++) {
let n8 = array[i];
for (let j = 0; j < 4; j++) {
switch (n8 & 3) {
case 1:
hits++;
break;
case 2:
misses++;
break;
}
n8 >>= 2;
}
}
let total = hits + misses;
if (total == 0) {
return null;
}
return hits / total;
};
/**
* Record a hit or miss in the given array. It will be recorded at a position determined
* by the current value of the callCount variable.
* @param {Uint8Array} array An array representing hits and misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
* Each hit or miss is encoded as to bits as follows:
* 00 means no hit or miss has been recorded in these bits
* 01 encodes a hit
* 10 encodes a miss
*/
this.recordHitInArray = function (array, hit) {
let arrayIndex = ~~(this.callCount / 4) % array.length;
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
let clearMask = ~(3 << bitOffset);
let record = (hit ? 1 : 2) << bitOffset;
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
};
/**
* Records the hit or miss in the tracking arrays and increments the call count.
* @param {boolean} hit true records a hit, false records a miss
*/
this.recordHit = function (hit) {
this.recordHitInArray(this.hitsLast100, hit);
this.recordHitInArray(this.hitsLast1000, hit);
this.recordHitInArray(this.hitsLast10000, hit);
this.recordHitInArray(this.hitsLast100000, hit);
if (hit) {
this.hitCount++;
}
this.callCount++;
};
/**
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
this.hit = function (key) {
this.recordHit(true);
this.lastCacheHit = key;
};
/**
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
this.miss = function (key) {
this.recordHit(false);
this.lastCacheMiss = key;
};
}
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
performanceArray.push(perf);
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
return next();
}
// initial bypass chances
if (!opt.enabled) {
return bypass();
}
if (
req.headers["x-apicache-bypass"] ||
req.headers["x-apicache-force-fetch"] ||
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
) {
return bypass();
}
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
// if (typeof middlewareToggle === 'function') {
// if (!middlewareToggle(req, res)) return bypass()
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
// return bypass()
// }
// embed timer
req.apicacheTimer = new Date();
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
let key = req.originalUrl || req.url;
// Remove querystring from key if jsonp option is enabled
if (opt.jsonp) {
key = url.parse(key).pathname;
}
// add appendKey (either custom function or response path)
if (typeof opt.appendKey === "function") {
key += "$$appendKey=" + opt.appendKey(req, res);
} else if (opt.appendKey.length > 0) {
let appendKey = req;
for (let i = 0; i < opt.appendKey.length; i++) {
appendKey = appendKey[opt.appendKey[i]];
}
key += "$$appendKey=" + appendKey;
}
// attempt cache hit
let redis = opt.redisClient;
let cached = !redis ? memCache.getValue(key) : null;
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
}
// send if cache hit from redis
if (redis && redis.connected) {
try {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (redis) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
req,
res,
JSON.parse(obj.response),
middlewareToggle,
next,
duration
);
} else {
perf.miss(key);
return makeResponseCacheable(
req,
res,
next,
key,
duration,
strDuration,
middlewareToggle
);
}
});
} catch (err) {
// bypass redis on error
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
} else {
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
};
cache.options = options;
return cache;
};
this.options = function (options) {
if (options) {
Object.assign(globalOptions, options);
syncOptions();
if ("defaultDuration" in options) {
// Convert the default duration to a number in milliseconds (if needed)
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
}
if (globalOptions.trackPerformance) {
debug("WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;
} else {
return globalOptions;
}
};
this.resetIndex = function () {
index = {
all: [],
groups: {},
};
};
this.newInstance = function (config) {
let instance = new ApiCache();
if (config) {
instance.options(config);
}
return instance;
};
this.clone = function () {
return this.newInstance(this.options());
};
// initialize index
this.resetIndex();
}
module.exports = new ApiCache();

View file

@ -1,14 +0,0 @@
const apicache = require("./apicache");
apicache.options({
headerBlacklist: [
"cache-control"
],
headers: {
// Disable client side cache, only server side cache.
// BUG! Not working for the second request
"cache-control": "no-cache",
},
});
module.exports = apicache;

Some files were not shown because too many files have changed in this diff Show more