mirror of
https://github.com/louislam/uptime-kuma.git
synced 2024-11-23 14:54:05 +00:00
Merge branch 'master' into feature/time-format
This commit is contained in:
commit
be83595a19
290 changed files with 15513 additions and 13133 deletions
|
@ -30,7 +30,6 @@ SECURITY.md
|
|||
tsconfig.json
|
||||
.env
|
||||
/tmp
|
||||
/babel.config.js
|
||||
/ecosystem.config.js
|
||||
/extra/healthcheck.exe
|
||||
/extra/healthcheck
|
||||
|
@ -38,6 +37,10 @@ tsconfig.json
|
|||
/extra/push-examples
|
||||
/extra/uptime-kuma-push
|
||||
|
||||
# Comment the following line if you want to rebuild the healthcheck binary
|
||||
/extra/healthcheck-armv7
|
||||
|
||||
|
||||
### .gitignore content (commented rules are duplicated)
|
||||
|
||||
#node_modules
|
||||
|
|
33
.eslintrc.js
33
.eslintrc.js
|
@ -19,12 +19,13 @@ module.exports = {
|
|||
],
|
||||
parser: "vue-eslint-parser",
|
||||
parserOptions: {
|
||||
parser: "@babel/eslint-parser",
|
||||
parser: "@typescript-eslint/parser",
|
||||
sourceType: "module",
|
||||
requireConfigFile: false,
|
||||
},
|
||||
plugins: [
|
||||
"jsdoc"
|
||||
"jsdoc",
|
||||
"@typescript-eslint",
|
||||
],
|
||||
rules: {
|
||||
"yoda": "error",
|
||||
|
@ -83,7 +84,7 @@ module.exports = {
|
|||
"checkLoops": false,
|
||||
}],
|
||||
"space-before-blocks": "warn",
|
||||
//'no-console': 'warn',
|
||||
//"no-console": "warn",
|
||||
"no-extra-boolean-cast": "off",
|
||||
"no-multiple-empty-lines": [ "warn", {
|
||||
"max": 1,
|
||||
|
@ -95,7 +96,8 @@ module.exports = {
|
|||
"no-unneeded-ternary": "error",
|
||||
"array-bracket-newline": [ "error", "consistent" ],
|
||||
"eol-last": [ "error", "always" ],
|
||||
//'prefer-template': 'error',
|
||||
//"prefer-template": "error",
|
||||
"template-curly-spacing": [ "warn", "never" ],
|
||||
"comma-dangle": [ "warn", "only-multiline" ],
|
||||
"no-empty": [ "error", {
|
||||
"allowEmptyCatch": true
|
||||
|
@ -148,21 +150,20 @@ module.exports = {
|
|||
}
|
||||
},
|
||||
|
||||
// Override for jest puppeteer
|
||||
// Override for TypeScript
|
||||
{
|
||||
"files": [
|
||||
"**/*.spec.js",
|
||||
"**/*.spec.jsx"
|
||||
"**/*.ts",
|
||||
],
|
||||
env: {
|
||||
jest: true,
|
||||
},
|
||||
globals: {
|
||||
page: true,
|
||||
browser: true,
|
||||
context: true,
|
||||
jestPuppeteer: true,
|
||||
},
|
||||
extends: [
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
],
|
||||
"rules": {
|
||||
"jsdoc/require-returns-type": "off",
|
||||
"jsdoc/require-param-type": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"prefer-const": "off",
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
|
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
|
@ -6,7 +6,7 @@ body:
|
|||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
||||
label: "⚠️ Please verify that this question has NOT been raised before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar issue"
|
||||
|
@ -24,7 +24,7 @@ body:
|
|||
required: true
|
||||
attributes:
|
||||
label: "📝 Describe your problem"
|
||||
description: "Please walk us through it step by step."
|
||||
description: "Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||
placeholder: "Describe what are you asking for..."
|
||||
- type: textarea
|
||||
id: error-msg
|
||||
|
@ -56,19 +56,20 @@ body:
|
|||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: docker-version
|
||||
- type: textarea
|
||||
id: deployment-info
|
||||
attributes:
|
||||
label: "🐋 Docker Version"
|
||||
description: "If running with Docker, which version are you running?"
|
||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
||||
label: "🖥️ Deployment Environment"
|
||||
description: |
|
||||
examples:
|
||||
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
|
||||
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||
- **number of monitors**: 42
|
||||
value: |
|
||||
- Runtime:
|
||||
- Database:
|
||||
- Filesystem used to store the database on:
|
||||
- number of monitors:
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: nodejs-version
|
||||
attributes:
|
||||
label: "🟩 NodeJS Version"
|
||||
description: "If running with Node.js? which version are you running?"
|
||||
placeholder: "Ex. 14.18.0"
|
||||
validations:
|
||||
required: false
|
||||
required: true
|
||||
|
|
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
|
@ -3,14 +3,14 @@ description: "Submit a bug report to help us improve"
|
|||
#title: "[Bug] "
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar issue"
|
||||
- type: textarea
|
||||
id: related-issues
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "📑 I have found these related issues/pull requests"
|
||||
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: "🛡️ Security Policy"
|
||||
|
@ -31,7 +31,7 @@ body:
|
|||
required: true
|
||||
attributes:
|
||||
label: "👟 Reproduction steps"
|
||||
description: "How do you trigger this bug? Please walk us through it step by step."
|
||||
description: "How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||
placeholder: "..."
|
||||
- type: textarea
|
||||
id: expected-behavior
|
||||
|
@ -73,22 +73,23 @@ body:
|
|||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: docker-version
|
||||
- type: textarea
|
||||
id: deployment-info
|
||||
attributes:
|
||||
label: "🐋 Docker Version"
|
||||
description: "If running with Docker, which version are you running?"
|
||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
||||
label: "🖥️ Deployment Environment"
|
||||
description: |
|
||||
examples:
|
||||
- **Runtime**: Docker 20.10.9 / nodejs 18.17.1 / K8S via ... v1.3.3 / ..
|
||||
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||
- **number of monitors**: 42
|
||||
value: |
|
||||
- Runtime:
|
||||
- Database:
|
||||
- Filesystem used to store the database on:
|
||||
- number of monitors:
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: nodejs-version
|
||||
attributes:
|
||||
label: "🟩 NodeJS Version"
|
||||
description: "If running with Node.js? which version are you running?"
|
||||
placeholder: "Ex. 14.18.0"
|
||||
validations:
|
||||
required: false
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
|
|
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
|
@ -3,14 +3,14 @@ description: "Submit a proposal for a new feature"
|
|||
#title: "[Feature] "
|
||||
labels: [feature-request]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "⚠️ Please verify that this feature request has NOT been suggested before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar feature request"
|
||||
- type: textarea
|
||||
id: related-issues
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "📑 I have found these related issues/pull requests"
|
||||
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||
- type: dropdown
|
||||
id: feature-area
|
||||
attributes:
|
||||
|
@ -18,10 +18,17 @@ body:
|
|||
description: "What kind of feature request is this?"
|
||||
multiple: true
|
||||
options:
|
||||
- API
|
||||
- New Notification
|
||||
- New Monitor
|
||||
- UI Feature
|
||||
- API / automation options
|
||||
- New notification-provider
|
||||
- Change to existing notification-provider
|
||||
- New monitor
|
||||
- Change to existing monitor
|
||||
- Dashboard
|
||||
- Status-page
|
||||
- Maintenance
|
||||
- Deployment
|
||||
- Certificate expiry
|
||||
- Settings
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -15,7 +15,7 @@ Please delete any options that are not relevant.
|
|||
- Bug fix (non-breaking change which fixes an issue)
|
||||
- User interface (UI)
|
||||
- New feature (non-breaking change which adds functionality)
|
||||
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- Breaking change (a fix or feature that would cause existing functionality to not work as expected)
|
||||
- Other
|
||||
- This change requires a documentation update
|
||||
|
||||
|
@ -24,9 +24,8 @@ Please delete any options that are not relevant.
|
|||
- [ ] My code follows the style guidelines of this project
|
||||
- [ ] I ran ESLint and other linters for modified files
|
||||
- [ ] I have performed a self-review of my own code and tested it
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
(including JSDoc for methods)
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas (including JSDoc for methods)
|
||||
- [ ] My changes generates no new warnings
|
||||
- [ ] My code needed automated testing. I have added them (this is optional task)
|
||||
|
||||
## Screenshots (if any)
|
||||
|
|
59
.github/workflows/auto-test.yml
vendored
59
.github/workflows/auto-test.yml
vendored
|
@ -5,38 +5,37 @@ name: Auto Test
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
pull_request:
|
||||
branches: [ master, 2.0.X ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
|
||||
jobs:
|
||||
auto-test:
|
||||
needs: [ check-linters ]
|
||||
needs: [ check-linters, e2e-test ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||
node: [ 14, 20.5 ]
|
||||
node: [ 18, 20.5 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm install npm@9 -g
|
||||
- run: npm install
|
||||
- run: npm run build
|
||||
- run: npm test
|
||||
- run: npm run test-backend
|
||||
env:
|
||||
HEADLESS_TEST: 1
|
||||
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
||||
|
@ -50,18 +49,17 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ ARMv7 ]
|
||||
node: [ 14, 20 ]
|
||||
node: [ 18, 20 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm install npm@9 -g
|
||||
- run: npm ci --production
|
||||
|
||||
check-linters:
|
||||
|
@ -69,42 +67,27 @@ jobs:
|
|||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- run: npm install
|
||||
- run: npm run lint
|
||||
- run: npm run lint:prod
|
||||
|
||||
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
|
||||
# e2e-tests:
|
||||
# needs: [ check-linters ]
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - run: git config --global core.autocrlf false # Mainly for Windows
|
||||
# - uses: actions/checkout@v3
|
||||
#
|
||||
# - name: Use Node.js 14
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: 14
|
||||
# - run: npm install
|
||||
# - run: npm run build
|
||||
# - run: npm run cy:test
|
||||
|
||||
frontend-unit-tests:
|
||||
e2e-test:
|
||||
needs: [ check-linters ]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ARM64
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 14
|
||||
uses: actions/setup-node@v3
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 14
|
||||
node-version: 20
|
||||
- run: npm install
|
||||
- run: npx playwright install
|
||||
- run: npm run build
|
||||
- run: npm run cy:run:unit
|
||||
- run: npm run test-e2e
|
||||
|
|
6
.github/workflows/close-incorrect-issue.yml
vendored
6
.github/workflows/close-incorrect-issue.yml
vendored
|
@ -11,13 +11,13 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [16]
|
||||
node-version: [18]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
|
|
43
.github/workflows/codeql-analysis.yml
vendored
Normal file
43
.github/workflows/codeql-analysis.yml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "1.23.X"]
|
||||
pull_request:
|
||||
branches: [ "master", "1.23.X"]
|
||||
schedule:
|
||||
- cron: '16 22 * * 0'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 360
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript-typescript' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
26
.github/workflows/conflict_labeler.yml
vendored
Normal file
26
.github/workflows/conflict_labeler.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
name: Merge Conflict Labeler
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Labeling
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'louislam/uptime-kuma' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Apply label
|
||||
uses: eps1lon/actions-label-merge-conflict@v3
|
||||
with:
|
||||
dirtyLabel: 'needs:resolve-merge-conflict'
|
||||
removeOnDirtyLabel: 'needs:resolve-merge-conflict'
|
||||
repoToken: '${{ secrets.GITHUB_TOKEN }}'
|
6
.github/workflows/json-yaml-validate.yml
vendored
6
.github/workflows/json-yaml-validate.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- 2.0.X
|
||||
- 1.23.X
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
|
@ -17,11 +17,11 @@ jobs:
|
|||
json-yaml-validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: json-yaml-validate
|
||||
id: json-yaml-validate
|
||||
uses: GrantBirki/json-yaml-validate@v1.3.0
|
||||
uses: GrantBirki/json-yaml-validate@v2.4.0
|
||||
with:
|
||||
comment: "true" # enable comment mode
|
||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||
|
|
36
.github/workflows/stale-bot.yml
vendored
36
.github/workflows/stale-bot.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: 'Automatically close stale issues and PRs'
|
||||
name: 'Automatically close stale issues'
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
|
@ -9,14 +9,34 @@ jobs:
|
|||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v7
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
||||
days-before-stale: 90
|
||||
days-before-close: 2
|
||||
days-before-pr-stale: 999999999
|
||||
days-before-pr-close: 1
|
||||
stale-issue-message: |-
|
||||
We are clearing up our old `help`-issues and your issue has been open for 60 days with no activity.
|
||||
If no comment is made and the stale label is not removed, this issue will be closed in 7 days.
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
|
||||
exempt-issue-assignees: 'louislam'
|
||||
operations-per-run: 200
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-message: |-
|
||||
This issue was marked as `cannot-reproduce` by a maintainer.
|
||||
If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is.
|
||||
If you have any ideas how we can reproduce this issue, we would love to hear them.
|
||||
|
||||
We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month.
|
||||
If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas.
|
||||
close-issue-message: |-
|
||||
This issue will be closed as no way to reproduce it has been found.
|
||||
If you/somebody finds a way how to (semi-reliably) reproduce this, we can reopen this issue. ^^
|
||||
days-before-stale: 180
|
||||
days-before-close: 30
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
any-of-issue-labels: 'cannot-reproduce'
|
||||
operations-per-run: 200
|
||||
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -15,9 +15,6 @@ dist-ssr
|
|||
/tmp
|
||||
.env
|
||||
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
|
||||
/extra/healthcheck.exe
|
||||
/extra/healthcheck
|
||||
/extra/healthcheck-armv7
|
||||
|
|
378
CONTRIBUTING.md
378
CONTRIBUTING.md
|
@ -1,94 +1,216 @@
|
|||
# Project Info
|
||||
|
||||
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
||||
First of all, I want to thank everyone who have wrote issues or shared pull requests for Uptime Kuma.
|
||||
I never thought the GitHub community would be so nice!
|
||||
Because of this, I also never thought that other people would actually read and edit my code.
|
||||
Parts of the code are not very well-structured or commented, sorry about that.
|
||||
|
||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
|
||||
The project was created with `vite.js` and is written in `vue3`.
|
||||
Our backend lives in the `server`-directory and mostly communicates via websockets.
|
||||
Both frontend and backend share the same `package.json`.
|
||||
|
||||
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
||||
|
||||
## Key Technical Skills
|
||||
|
||||
- Node.js (You should know about promise, async/await and arrow function etc.)
|
||||
- Socket.io
|
||||
- SCSS
|
||||
- Vue.js
|
||||
- Bootstrap
|
||||
- SQLite
|
||||
For production, the frontend is build into `dist`-directory and the server (`express.js`) exposes the `dist` directory as the root of the endpoint.
|
||||
For development, we run vite in development mode on another port.
|
||||
|
||||
## Directories
|
||||
|
||||
- config (dev config files)
|
||||
- data (App data)
|
||||
- db (Base database and migration scripts)
|
||||
- dist (Frontend build)
|
||||
- docker (Dockerfiles)
|
||||
- extra (Extra useful scripts)
|
||||
- public (Frontend resources for dev only)
|
||||
- server (Server source code)
|
||||
- src (Frontend source code)
|
||||
- test (unit test)
|
||||
- `config` (dev config files)
|
||||
- `data` (App data)
|
||||
- `db` (Base database and migration scripts)
|
||||
- `dist` (Frontend build)
|
||||
- `docker` (Dockerfiles)
|
||||
- `extra` (Extra useful scripts)
|
||||
- `public` (Frontend resources for dev only)
|
||||
- `server` (Server source code)
|
||||
- `src` (Frontend source code)
|
||||
- `test` (unit test)
|
||||
|
||||
## Can I create a pull request for Uptime Kuma?
|
||||
|
||||
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
|
||||
Yes or no, it depends on what you will try to do.
|
||||
Both your and our maintainers time is precious, and we don't want to waste both time.
|
||||
|
||||
Here are some references:
|
||||
If you have any questions about any process/.. is not clear, you are likely not alone => please ask them ^^
|
||||
|
||||
### ✅ Usually accepted
|
||||
Different guidelines exist for different types of pull requests (PRs):
|
||||
- <details><summary><b>security fixes</b></summary>
|
||||
<p>
|
||||
|
||||
- Bug fix
|
||||
- Security fix
|
||||
- Adding notification providers
|
||||
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Adding new language keys: `$t("...")`
|
||||
Submitting security fixes is something that may put the community at risk.
|
||||
Please read through our [security policy](SECURITY.md) and submit vulnerabilities via an [advisory](https://github.com/louislam/uptime-kuma/security/advisories/new) + [issue](https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md) instead.
|
||||
We encourage you to submit how to fix a vulnerability if you know how to, this is not required.
|
||||
Following the security policy allows us to properly test, fix bugs.
|
||||
This review allows us to notice, if there are any changes necessary to unrelated parts like the documentation.
|
||||
[**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md)
|
||||
|
||||
### ⚠️ Discussion required
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>small, non-breaking bug fixes</b></summary>
|
||||
<p>
|
||||
|
||||
- Large pull requests
|
||||
- New features
|
||||
If you come across a bug and think you can solve, we appreciate your work.
|
||||
Please make sure that you follow by these rules:
|
||||
- keep the PR as small as possible, fix only one thing at a time => keeping it reviewable
|
||||
- test that your code does what you came it does.
|
||||
|
||||
### ❌ Won't be merged
|
||||
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>translations / internationalisation (i18n)</b></summary>
|
||||
<p>
|
||||
|
||||
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Do not pass the auto-test
|
||||
- Any breaking changes
|
||||
- Duplicated pull requests
|
||||
- Buggy
|
||||
- UI/UX is not close to Uptime Kuma
|
||||
- Modifications or deletions of existing logic without a valid reason.
|
||||
- Adding functions that is completely out of scope
|
||||
- Converting existing code into other programming languages
|
||||
- Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
|
||||
We use weblate to localise this project into many languages.
|
||||
If you are unhappy with a translation this is the best start.
|
||||
On how to translate using weblate, please see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||
|
||||
The above cases may not cover all possible situations.
|
||||
There are two cases in which a change cannot be done in weblate and requires a PR:
|
||||
- A text may not be currently localisable. In this case, **adding a new language key** via `$t("languageKey")` might be nessesary
|
||||
- language keys need to be **added to `en.json`** to be visible in weblate. If this has not happened, a PR is appreciated.
|
||||
- **Adding a new language** requires a new file see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md)
|
||||
|
||||
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
||||
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new notification providers</b></summary>
|
||||
<p>
|
||||
|
||||
To set up a new notification provider these files need to be modified/created:
|
||||
- `server/notification-providers/PROVIDER_NAME.js` is where the heart of the notification provider lives.
|
||||
- Both `monitorJSON` and `heartbeatJSON` can be `null` for some events.
|
||||
If both are `null`, this is a general testing message, but if just `heartbeatJSON` is `null` this is a certificate expiry.
|
||||
- Please wrap the axios call into a
|
||||
```js
|
||||
try {
|
||||
let result = await axios.post(...);
|
||||
if (result.status === ...) ...
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
```
|
||||
- `server/notification.js` is where the backend of the notification provider needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
- `src/components/NotificationDialog.vue` you need to decide if the provider is a regional or a global one and add it with a name to the respective list
|
||||
- `src/components/notifications/PROVIDER_NAME.vue` is where the frontend of each provider lives.
|
||||
Please make sure that you have:
|
||||
- used `HiddenInput` for secret credentials
|
||||
- included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
|
||||
- include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
|
||||
- `src/components/notifications/index.js` is where the frontend of the provider needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
|
||||
Offering notifications is close to the core of what we are as an uptime monitor.
|
||||
Therefore, making sure that they work is also really important.
|
||||
Because testing notification providers is quite time intensive, we mostly offload this onto the person contributing a notification provider.
|
||||
|
||||
To make shure you have tested the notification provider, please include screenshots of the following events in the pull-request description:
|
||||
- `UP`/`DOWN`
|
||||
- Certificate Expiry via https://expired.badssl.com/
|
||||
- Testing (the test button on the notification provider setup page)
|
||||
|
||||
Using the following way to format this is encouraged:
|
||||
```md
|
||||
| Event | Before | After |
|
||||
------------------
|
||||
| `UP` | paste-image-here | paste-image-here |
|
||||
| `DOWN` | paste-image-here | paste-image-here |
|
||||
| Certificate-expiry | paste-image-here | paste-image-here |
|
||||
| Testing | paste-image-here | paste-image-here |
|
||||
```
|
||||
|
||||
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new monitoring types</b></summary>
|
||||
<p>
|
||||
|
||||
To set up a new notification provider these files need to be modified/created:
|
||||
- `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor.
|
||||
the `async check(...)`-function should:
|
||||
- throw an error for each fault that is detected with an actionable error message
|
||||
- in the happy-path, you should set `heartbeat.msg` to a successfull message and set `heartbeat.status = UP`
|
||||
- `server/uptime-kuma-server.js` is where the monitoring backend needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
- `src/pages/EditMonitor.vue` is the shared frontend users interact with.
|
||||
Please make sure that you have:
|
||||
- used `HiddenInput` for secret credentials
|
||||
- included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
|
||||
- include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
|
||||
-
|
||||
|
||||
|
||||
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new features/ major changes / breaking bugfixes</b></summary>
|
||||
<p>
|
||||
|
||||
be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**.
|
||||
This is especially important for a large pull request or you don't know if it will be merged or not.
|
||||
|
||||
<sub>Because of the large impact of this work, only senior maintainers may merge PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
|
||||
The following rules are essential for making your PR mergable:
|
||||
- Merging multiple issues by a huge PR is more difficult to review and causes conflicts with other PRs. Please
|
||||
- (if possible) **create one PR for one issue** or
|
||||
- (if not possible) **explain which issues a PR addresses and why this PR should not be broken apart**
|
||||
- Make sure your **PR passes our continuous integration**.
|
||||
PRs will not be merged unless all CI-Checks are green.
|
||||
- **Breaking changes** (unless for a good reason and discussed beforehand) will not get merged / not get merged quickly.
|
||||
Such changes require a major version release.
|
||||
- **Test your code** before submitting a PR.
|
||||
Buggy PRs will not be merged.
|
||||
- Make sure the **UI/UX is close to Uptime Kuma**.
|
||||
- **Think about the maintainability**:
|
||||
Don't add functionality that is completely **out of scope**.
|
||||
Keep in mind that we need to be able to maintain the functionality.
|
||||
- Don't modify or delete existing logic without a valid reason.
|
||||
- Don't convert existing code into other programming languages for no reason.
|
||||
|
||||
I ([@louislam](https://github.com/louislam)) have the final say.
|
||||
If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it.
|
||||
Therefore, it is essential to have a discussion beforehand.
|
||||
|
||||
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
||||
|
||||
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||
Please don't rush or ask for an ETA.
|
||||
We have to understand the pull request, make sure it has no breaking changes and stick to the vision of this project, especially for large pull requests.
|
||||
|
||||
|
||||
## I'd like to work on an issue. How do I do that?
|
||||
|
||||
We have found that assigning people to issues is management-overhead that we don't need.
|
||||
A short comment that you want to try your hand at this issue is appreciated to save other devs time.
|
||||
If you come across any problem during development, feel free to leave a comment with what you are stuck on.
|
||||
|
||||
### Recommended Pull Request Guideline
|
||||
|
||||
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
|
||||
Before diving deep into coding, having a discussion first by creating an empty pull request for discussion is preferred.
|
||||
The rationale behind this is that we can align the direction and scope of the feature to eliminate any conflicts with existing and planned work, and can help by pointing out any potential pitfalls.
|
||||
|
||||
1. Fork the project
|
||||
1. Clone your fork repo to local
|
||||
1. Create a new branch
|
||||
1. Create an empty commit
|
||||
`git commit -m "[empty commit] pull request for <YOUR TASK NAME>" --allow-empty`
|
||||
1. Push to your fork repo
|
||||
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
|
||||
1. Write a proper description
|
||||
1. Click "Change to draft"
|
||||
1. Discussion
|
||||
2. Clone your fork repo to local
|
||||
3. Create a new branch
|
||||
4. Create an empty commit: `git commit -m "<YOUR TASK NAME>" --allow-empty`
|
||||
5. Push to your fork repo
|
||||
6. Prepare a pull request: https://github.com/louislam/uptime-kuma/compare
|
||||
7. Write a proper description. You can mention @louislam in it, so @louislam will get the notification.
|
||||
8. Create your pull request as a Draft
|
||||
9. Wait for the discussion
|
||||
|
||||
## Project Styles
|
||||
|
||||
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
|
||||
I personally do not like something that requires so many configurations before you can finally start the app.
|
||||
The goal is to make the Uptime Kuma installation as easy as installing a mobile app.
|
||||
|
||||
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
|
||||
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
||||
- Easy to install for non-Docker users
|
||||
- no native build dependency is needed (for `x86_64`/`armv7`/`arm64`)
|
||||
- no extra configuration and
|
||||
- no extra effort required to get it running
|
||||
- Single container for Docker users
|
||||
- no complex docker-compose file
|
||||
- mapping the volume and exposing the port should be the only requirements
|
||||
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
|
||||
- Easy to use
|
||||
- The web UI styling should be consistent and nice
|
||||
|
@ -108,15 +230,15 @@ I personally do not like something that requires so many configurations before y
|
|||
|
||||
## Tools
|
||||
|
||||
- [`Node.js`](https://nodejs.org/) >= 14
|
||||
- [`npm`](https://www.npmjs.com/) >= 8.5
|
||||
- [`Node.js`](https://nodejs.org/) >= 18
|
||||
- [`npm`](https://www.npmjs.com/) >= 9.3
|
||||
- [`git`](https://git-scm.com/)
|
||||
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
||||
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
||||
|
||||
### GitHub Codespace
|
||||
### GitHub Codespaces
|
||||
|
||||
If you don't want to setup an local environment, you can now develop on GitHub Codespace, read more:
|
||||
If you don't want to setup an local environment, you can now develop on GitHub Codespaces, read more:
|
||||
|
||||
https://github.com/louislam/uptime-kuma/tree/master/.devcontainer
|
||||
|
||||
|
@ -155,25 +277,25 @@ npm run start-server-dev
|
|||
|
||||
It binds to `0.0.0.0:3001` by default.
|
||||
|
||||
It is mainly a socket.io app + express.js.
|
||||
The backend is an `express.js` server with `socket.io` integrated.
|
||||
It uses `socket.io` to communicate with clients, and most server logic is encapsulated in the `socket.io` handlers.
|
||||
`express.js` is also used to serve:
|
||||
|
||||
express.js is used for:
|
||||
- as an entry point for redirecting to a status page or the dashboard
|
||||
- the frontend built files (`index.html`, `*.js`, `*.css`, etc.)
|
||||
- internal APIs of the status page
|
||||
|
||||
- entry point such as redirecting to a status page or the dashboard
|
||||
- serving the frontend built files (index.html, .js and .css etc.)
|
||||
- serving internal APIs of the status page
|
||||
### Structure in `/server/`
|
||||
|
||||
### Structure in /server/
|
||||
|
||||
- jobs/ (Jobs that are running in another process)
|
||||
- model/ (Object model, auto-mapping to the database table name)
|
||||
- modules/ (Modified 3rd-party modules)
|
||||
- monitor_types (Monitor Types)
|
||||
- notification-providers/ (individual notification logic)
|
||||
- routers/ (Express Routers)
|
||||
- socket-handler (Socket.io Handlers)
|
||||
- server.js (Server entry point)
|
||||
- uptime-kuma-server.js (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
|
||||
- `jobs/` (Jobs that are running in another process)
|
||||
- `model/` (Object model, auto-mapping to the database table name)
|
||||
- `modules/` (Modified 3rd-party modules)
|
||||
- `monitor_types/` (Monitor Types)
|
||||
- `notification-providers/` (individual notification logic)
|
||||
- `routers/` (Express Routers)
|
||||
- `socket-handler/` (Socket.io Handlers)
|
||||
- `server.js` (Server entry point)
|
||||
- `uptime-kuma-server.js` (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
|
||||
|
||||
## Frontend Dev Server
|
||||
|
||||
|
@ -212,14 +334,15 @@ npm test
|
|||
|
||||
## Dependencies
|
||||
|
||||
Both frontend and backend share the same package.json. However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into dist files. So:
|
||||
Both frontend and backend share the same `package.json`.
|
||||
However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into `dist` files. So:
|
||||
|
||||
- Frontend dependencies = "devDependencies"
|
||||
- Examples: vue, chart.js
|
||||
- Examples: `vue`, `chart.js`
|
||||
- Backend dependencies = "dependencies"
|
||||
- Examples: socket.io, sqlite3
|
||||
- Examples: `socket.io`, `sqlite3`
|
||||
- Development dependencies = "devDependencies"
|
||||
- Examples: eslint, sass
|
||||
- Examples: `eslint`, `sass`
|
||||
|
||||
### Update Dependencies
|
||||
|
||||
|
@ -231,9 +354,9 @@ If for security / bug / other reasons, a library must be updated, breaking chang
|
|||
|
||||
## Translations
|
||||
|
||||
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
|
||||
Please add **all** the strings which are translatable to `src/lang/en.json` (if translation keys are omitted, they can not be translated.)
|
||||
|
||||
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
||||
**Don't include any other languages in your initial pull request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
||||
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
|
||||
|
||||
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||
|
@ -245,7 +368,7 @@ My mother language is not English and my grammar is not that great.
|
|||
|
||||
## Wiki
|
||||
|
||||
Since there is no way to make a pull request to wiki's repo, I have set up another repo to do that.
|
||||
Since there is no way to make a pull request to the wiki, I have set up another repo to do that.
|
||||
|
||||
https://github.com/louislam/uptime-kuma-wiki
|
||||
|
||||
|
@ -290,7 +413,23 @@ https://github.com/louislam/uptime-kuma-wiki
|
|||
Check the latest issues and pull requests:
|
||||
https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
|
||||
|
||||
### Release Procedures
|
||||
### What is a maintainer and what are their roles?
|
||||
|
||||
This project has multiple maintainers which specialise in different areas.
|
||||
Currently, there are 3 maintainers:
|
||||
|
||||
| Person | Role | Main Area |
|
||||
|-------------------|-------------------|------------------|
|
||||
| `@louislam` | senior maintainer | major features |
|
||||
| `@chakflying` | junior maintainer | fixing bugs |
|
||||
| `@commanderstorm` | junior maintainer | issue-management |
|
||||
|
||||
### Procedures
|
||||
|
||||
We have a few procedures we follow. These are documented here:
|
||||
|
||||
- <details><summary>Release</summary>
|
||||
<p>
|
||||
|
||||
1. Draft a release note
|
||||
2. Make sure the repo is cleared
|
||||
|
@ -298,28 +437,34 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
|
|||
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
5. Wait until the `Press any key to continue`
|
||||
6. `git push`
|
||||
7. Publish the release note as 1.X.X
|
||||
7. Publish the release note as `1.X.X`
|
||||
8. Press any key to continue
|
||||
9. Deploy to the demo server: `npm run deploy-demo-server`
|
||||
|
||||
Checking:
|
||||
These Items need to be checked:
|
||||
|
||||
- Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
|
||||
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
|
||||
- Try clean installation with Node.js
|
||||
- [ ] Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
|
||||
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
|
||||
- [ ] Try clean installation with Node.js
|
||||
|
||||
### Release Beta Procedures
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Release Beta</summary>
|
||||
<p>
|
||||
|
||||
1. Draft a release note, check "This is a pre-release"
|
||||
1. Draft a release note, check `This is a pre-release`
|
||||
2. Make sure the repo is cleared
|
||||
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
4. Wait until the `Press any key to continue`
|
||||
5. Publish the release note as 1.X.X-beta.X
|
||||
5. Publish the release note as `1.X.X-beta.X`
|
||||
6. Press any key to continue
|
||||
|
||||
### Release Wiki
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Release Wiki</summary>
|
||||
<p>
|
||||
|
||||
#### Setup Repo
|
||||
**Setup Repo**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/louislam/uptime-kuma-wiki.git
|
||||
|
@ -327,17 +472,46 @@ cd uptime-kuma-wiki
|
|||
git remote add production https://github.com/louislam/uptime-kuma.wiki.git
|
||||
```
|
||||
|
||||
#### Push to Production Wiki
|
||||
**Push to Production Wiki**
|
||||
|
||||
```bash
|
||||
git pull
|
||||
git push production master
|
||||
```
|
||||
|
||||
## Useful Commands
|
||||
|
||||
Change the base of a pull request such as `master` to `1.23.X`
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Change the base of a pull request such as <code>master</code> to <code>1.23.X</code></summary>
|
||||
<p>
|
||||
|
||||
```bash
|
||||
git rebase --onto <new parent> <old parent>
|
||||
```
|
||||
|
||||
</p>
|
||||
</details>
|
||||
|
||||
### Set up a Docker Builder
|
||||
|
||||
- amd64, armv7 using local.
|
||||
- arm64 using remote arm64 cpu, as the emulator is too slow and can no longer pass the `npm ci` command.
|
||||
1. Add the public key to the remote server.
|
||||
2. Add the remote context. The remote machine must be arm64 and installed Docker CE.
|
||||
```
|
||||
docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88"
|
||||
```
|
||||
3. Create a new builder.
|
||||
```
|
||||
docker buildx create --name kuma-builder --platform linux/amd64,linux/arm/v7
|
||||
docker buildx use kuma-builder
|
||||
docker buildx inspect --bootstrap
|
||||
```
|
||||
4. Append the remote context to the builder.
|
||||
```
|
||||
docker buildx create --append --name kuma-builder --platform linux/arm64 oracle-arm64-jp
|
||||
```
|
||||
5. Verify the builder and check if the builder is using `kuma-builder`.
|
||||
```
|
||||
docker buildx inspect kuma-builder
|
||||
docker buildx ls
|
||||
```
|
||||
|
|
62
README.md
62
README.md
|
@ -6,7 +6,7 @@
|
|||
|
||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
||||
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
||||
</a>
|
||||
|
@ -17,9 +17,9 @@ Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
|||
|
||||
Try it!
|
||||
|
||||
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
|
||||
Demo Server (Location: Frankfurt - Germany): https://demo.kuma.pet/start-demo
|
||||
|
||||
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
|
||||
It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors).
|
||||
|
||||
## ⭐ Features
|
||||
|
||||
|
@ -43,9 +43,17 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
|
|||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
```
|
||||
|
||||
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
|
||||
Uptime Kuma is now running on <http://0.0.0.0:3001>.
|
||||
|
||||
Uptime Kuma is now running on http://localhost:3001
|
||||
> [!WARNING]
|
||||
> File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume.
|
||||
|
||||
> [!NOTE]
|
||||
> If you want to limit exposure to localhost (without exposing port for other users or to use a [reverse proxy](https://github.com/louislam/uptime-kuma/wiki/Reverse-Proxy)), you can expose the port like this:
|
||||
>
|
||||
> ```bash
|
||||
> docker run -d --restart=always -p 127.0.0.1:3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
> ```
|
||||
|
||||
### 💪🏻 Non-Docker
|
||||
|
||||
|
@ -55,15 +63,12 @@ Requirements:
|
|||
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
||||
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
||||
- ❌ Replit / Heroku
|
||||
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
||||
- [npm](https://docs.npmjs.com/cli/) >= 7
|
||||
- [Node.js](https://nodejs.org/en/download/) 18 / 20.4
|
||||
- [npm](https://docs.npmjs.com/cli/) 9
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
||||
|
||||
```bash
|
||||
# Update your npm
|
||||
npm install npm@9 -g
|
||||
|
||||
git clone https://github.com/louislam/uptime-kuma.git
|
||||
cd uptime-kuma
|
||||
npm run setup
|
||||
|
@ -91,10 +96,6 @@ pm2 monit
|
|||
pm2 save && pm2 startup
|
||||
```
|
||||
|
||||
### Windows Portable (x64)
|
||||
|
||||
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1-2.zip
|
||||
|
||||
### Advanced Installation
|
||||
|
||||
If you need more options or need to browse via a reverse proxy, please read:
|
||||
|
@ -113,10 +114,6 @@ I will assign requests/issues to the next milestone.
|
|||
|
||||
https://github.com/louislam/uptime-kuma/milestones
|
||||
|
||||
Project Plan:
|
||||
|
||||
https://github.com/users/louislam/projects/4/views/1
|
||||
|
||||
## ❤️ Sponsors
|
||||
|
||||
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
|
||||
|
@ -143,28 +140,33 @@ Telegram Notification Sample:
|
|||
|
||||
## Motivation
|
||||
|
||||
- I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
|
||||
- Want to build a fancy UI.
|
||||
- I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the closest ones is statping. Unfortunately, it is not stable and no longer maintained.
|
||||
- Wanted to build a fancy UI.
|
||||
- Learn Vue 3 and vite.js.
|
||||
- Show the power of Bootstrap 5.
|
||||
- Try to use WebSocket with SPA instead of REST API.
|
||||
- Try to use WebSocket with SPA instead of a REST API.
|
||||
- Deploy my first Docker image to Docker Hub.
|
||||
|
||||
If you love this project, please consider giving me a ⭐.
|
||||
If you love this project, please consider giving it a ⭐.
|
||||
|
||||
## 🗣️ Discussion / Ask for Help
|
||||
|
||||
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not respond if you asked such questions.
|
||||
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not respond if you ask questions there.
|
||||
|
||||
I recommend using Google, GitHub Issues, or Uptime Kuma's Subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
|
||||
I recommend using Google, GitHub Issues, or Uptime Kuma's subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
|
||||
|
||||
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
|
||||
- [Subreddit r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
|
||||
- [Subreddit (r/UptimeKuma)](https://www.reddit.com/r/UptimeKuma/)
|
||||
|
||||
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
|
||||
You can mention me if you ask a question on Reddit.
|
||||
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam)
|
||||
You can mention me if you ask a question on the subreddit.
|
||||
|
||||
## Contribute
|
||||
## Contributions
|
||||
|
||||
### Create Pull Requests
|
||||
|
||||
We DO NOT accept all types of pull requests and do not want to waste your time. Please be sure that you have read and follow pull request rules:
|
||||
[CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)
|
||||
|
||||
### Test Pull Requests
|
||||
|
||||
|
@ -188,8 +190,6 @@ If you want to translate Uptime Kuma into your language, please visit [Weblate R
|
|||
### Spelling & Grammar
|
||||
|
||||
Feel free to correct the grammar in the documentation or code.
|
||||
My mother language is not english and my grammar is not that great.
|
||||
My mother language is not English and my grammar is not that great.
|
||||
|
||||
### Create Pull Requests
|
||||
|
||||
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
## Reporting a Vulnerability
|
||||
|
||||
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||
2. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||
|
||||
Do not use the public issue tracker or discuss it in public as it will cause more damage.
|
||||
|
||||
|
@ -20,7 +20,7 @@ You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` vers
|
|||
### Upgradable Docker Tags
|
||||
|
||||
| Tag | Supported |
|
||||
| ------- | ------------------ |
|
||||
|-|-|
|
||||
| 1 | :white_check_mark: |
|
||||
| 1-debian | :white_check_mark: |
|
||||
| latest | :white_check_mark: |
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
const config = {};
|
||||
|
||||
if (process.env.TEST_FRONTEND) {
|
||||
config.presets = [ "@babel/preset-env" ];
|
||||
}
|
||||
|
||||
module.exports = config;
|
9
compose.yaml
Normal file
9
compose.yaml
Normal file
|
@ -0,0 +1,9 @@
|
|||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
ports:
|
||||
# <Host Port>:<Container Port>
|
||||
- 3001:3001
|
||||
restart: unless-stopped
|
60
config/playwright.config.js
Normal file
60
config/playwright.config.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
import { defineConfig, devices } from "@playwright/test";
|
||||
|
||||
const port = 30001;
|
||||
const url = `http://localhost:${port}`;
|
||||
|
||||
export default defineConfig({
|
||||
// Look for test files in the "tests" directory, relative to this configuration file.
|
||||
testDir: "../test/e2e",
|
||||
outputDir: "../private/playwright-test-results",
|
||||
fullyParallel: false,
|
||||
locale: "en-US",
|
||||
|
||||
// Fail the build on CI if you accidentally left test.only in the source code.
|
||||
forbidOnly: !!process.env.CI,
|
||||
|
||||
// Retry on CI only.
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
|
||||
// Opt out of parallel tests on CI.
|
||||
workers: 1,
|
||||
|
||||
// Reporter to use
|
||||
reporter: [
|
||||
[
|
||||
"html", {
|
||||
outputFolder: "../private/playwright-report",
|
||||
open: "never",
|
||||
}
|
||||
],
|
||||
],
|
||||
|
||||
use: {
|
||||
// Base URL to use in actions like `await page.goto('/')`.
|
||||
baseURL: url,
|
||||
|
||||
// Collect trace when retrying the failed test.
|
||||
trace: "on-first-retry",
|
||||
},
|
||||
|
||||
// Configure projects for major browsers.
|
||||
projects: [
|
||||
{
|
||||
name: "chromium",
|
||||
use: { ...devices["Desktop Chrome"] },
|
||||
},
|
||||
/*
|
||||
{
|
||||
name: "firefox",
|
||||
use: { browserName: "firefox" }
|
||||
},*/
|
||||
],
|
||||
|
||||
// Run your local dev server before starting the tests.
|
||||
webServer: {
|
||||
command: `node extra/remove-playwright-test-data.js && node server/server.js --port=${port} --data-dir=./data/playwright-test`,
|
||||
url,
|
||||
reuseExistingServer: false,
|
||||
cwd: "../",
|
||||
},
|
||||
});
|
|
@ -1,9 +1,8 @@
|
|||
import legacy from "@vitejs/plugin-legacy";
|
||||
import vue from "@vitejs/plugin-vue";
|
||||
import { defineConfig } from "vite";
|
||||
import visualizer from "rollup-plugin-visualizer";
|
||||
import viteCompression from "vite-plugin-compression";
|
||||
import commonjs from "vite-plugin-commonjs";
|
||||
import VueDevTools from "vite-plugin-vue-devtools";
|
||||
|
||||
const postCssScss = require("postcss-scss");
|
||||
const postcssRTLCSS = require("postcss-rtlcss");
|
||||
|
@ -22,11 +21,7 @@ export default defineConfig({
|
|||
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||
},
|
||||
plugins: [
|
||||
commonjs(),
|
||||
vue(),
|
||||
legacy({
|
||||
targets: [ "since 2015" ],
|
||||
}),
|
||||
visualizer({
|
||||
filename: "tmp/dist-stats.html"
|
||||
}),
|
||||
|
@ -38,6 +33,7 @@ export default defineConfig({
|
|||
algorithm: "brotliCompress",
|
||||
filter: viteCompressionFilter,
|
||||
}),
|
||||
VueDevTools(),
|
||||
],
|
||||
css: {
|
||||
postcss: {
|
||||
|
|
|
@ -275,7 +275,7 @@ async function createTables() {
|
|||
table.boolean("active").notNullable().defaultTo(true);
|
||||
table.integer("user_id").unsigned();
|
||||
table.boolean("is_default").notNullable().defaultTo(false);
|
||||
table.text("config");
|
||||
table.text("config", "longtext");
|
||||
});
|
||||
|
||||
// monitor_notification
|
||||
|
@ -318,7 +318,10 @@ async function createTables() {
|
|||
// monitor_tls_info
|
||||
await knex.schema.createTable("monitor_tls_info", (table) => {
|
||||
table.increments("id");
|
||||
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
|
||||
table.integer("monitor_id").unsigned().notNullable()
|
||||
.references("id").inTable("monitor")
|
||||
.onDelete("CASCADE")
|
||||
.onUpdate("CASCADE");
|
||||
table.text("info_json");
|
||||
});
|
||||
|
||||
|
@ -493,8 +496,11 @@ ALTER TABLE monitor
|
|||
await knex.schema.table("monitor", function (table) {
|
||||
table.string("kafka_producer_topic", 255);
|
||||
table.text("kafka_producer_brokers");
|
||||
table.integer("kafka_producer_ssl");
|
||||
table.string("kafka_producer_allow_auto_topic_creation", 255);
|
||||
|
||||
// patch-fix-kafka-producer-booleans.sql
|
||||
table.boolean("kafka_producer_ssl").defaultTo(0).notNullable();
|
||||
table.boolean("kafka_producer_allow_auto_topic_creation").defaultTo(0).notNullable();
|
||||
|
||||
table.text("kafka_producer_sasl_options");
|
||||
table.text("kafka_producer_message");
|
||||
});
|
||||
|
|
15
db/knex_migrations/2023-09-29-0000-heartbeat-retires.js
Normal file
15
db/knex_migrations/2023-09-29-0000-heartbeat-retires.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
exports.up = function (knex) {
|
||||
// Add new column heartbeat.retries
|
||||
return knex.schema
|
||||
.alterTable("heartbeat", function (table) {
|
||||
table.integer("retries").notNullable().defaultTo(0);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("heartbeat", function (table) {
|
||||
table.dropColumn("retries");
|
||||
});
|
||||
};
|
16
db/knex_migrations/2023-10-08-0000-mqtt-query.js
Normal file
16
db/knex_migrations/2023-10-08-0000-mqtt-query.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
exports.up = function (knex) {
|
||||
// Add new column monitor.mqtt_check_type
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
// Drop column monitor.mqtt_check_type
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.dropColumn("mqtt_check_type");
|
||||
});
|
||||
};
|
14
db/knex_migrations/2023-10-11-1915-push-token-to-32.js
Normal file
14
db/knex_migrations/2023-10-11-1915-push-token-to-32.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
exports.up = function (knex) {
|
||||
// update monitor.push_token to 32 length
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.string("push_token", 32).alter();
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.string("push_token", 20).alter();
|
||||
});
|
||||
};
|
21
db/knex_migrations/2023-10-16-0000-create-remote-browsers.js
Normal file
21
db/knex_migrations/2023-10-16-0000-create-remote-browsers.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.createTable("remote_browser", function (table) {
|
||||
table.increments("id");
|
||||
table.string("name", 255).notNullable();
|
||||
table.string("url", 255).notNullable();
|
||||
table.integer("user_id").unsigned();
|
||||
}).alterTable("monitor", function (table) {
|
||||
// Add new column monitor.remote_browser
|
||||
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
|
||||
.index()
|
||||
.references("id")
|
||||
.inTable("remote_browser");
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.dropTable("remote_browser").alterTable("monitor", function (table) {
|
||||
table.dropColumn("remote_browser");
|
||||
});
|
||||
};
|
12
db/knex_migrations/2023-12-20-0000-alter-status-page.js
Normal file
12
db/knex_migrations/2023-12-20-0000-alter-status-page.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("status_page", function (table) {
|
||||
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("status_page", function (table) {
|
||||
table.dropColumn("auto_refresh_interval");
|
||||
});
|
||||
};
|
24
db/knex_migrations/2023-12-21-0000-stat-ping-min-max.js
Normal file
24
db/knex_migrations/2023-12-21-0000-stat-ping-min-max.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.dropColumn("ping_min");
|
||||
table.dropColumn("ping_max");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.dropColumn("ping_min");
|
||||
table.dropColumn("ping_max");
|
||||
});
|
||||
};
|
26
db/knex_migrations/2023-12-22-0000-hourly-uptime.js
Normal file
26
db/knex_migrations/2023-12-22-0000-hourly-uptime.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.createTable("stat_hourly", function (table) {
|
||||
table.increments("id");
|
||||
table.comment("This table contains the hourly aggregate statistics for each monitor");
|
||||
table.integer("monitor_id").unsigned().notNullable()
|
||||
.references("id").inTable("monitor")
|
||||
.onDelete("CASCADE")
|
||||
.onUpdate("CASCADE");
|
||||
table.integer("timestamp")
|
||||
.notNullable()
|
||||
.comment("Unix timestamp rounded down to the nearest hour");
|
||||
table.float("ping").notNullable().comment("Average ping in milliseconds");
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
table.smallint("up").notNullable();
|
||||
table.smallint("down").notNullable();
|
||||
|
||||
table.unique([ "monitor_id", "timestamp" ]);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.dropTable("stat_hourly");
|
||||
};
|
26
db/knex_migrations/2024-01-22-0000-stats-extras.js
Normal file
26
db/knex_migrations/2024-01-22-0000-stats-extras.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
})
|
||||
.alterTable("stat_hourly", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.dropColumn("extras");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.dropColumn("extras");
|
||||
})
|
||||
.alterTable("stat_hourly", function (table) {
|
||||
table.dropColumn("extras");
|
||||
});
|
||||
};
|
34
db/old_migrations/patch-fix-kafka-producer-booleans.sql
Normal file
34
db/old_migrations/patch-fix-kafka-producer-booleans.sql
Normal file
|
@ -0,0 +1,34 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Rename COLUMNs to another one (suffixed by `_old`)
|
||||
ALTER TABLE monitor
|
||||
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
|
||||
|
||||
ALTER TABLE monitor
|
||||
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
|
||||
|
||||
-- Add correct COLUMNs
|
||||
ALTER TABLE monitor
|
||||
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
|
||||
|
||||
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
|
||||
|
||||
-- Set bring old values from `_old` COLUMNs to correct ones
|
||||
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
|
||||
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
|
||||
|
||||
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
|
||||
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
|
||||
|
||||
-- Remove old COLUMNs
|
||||
ALTER TABLE monitor
|
||||
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
|
||||
|
||||
ALTER TABLE monitor
|
||||
DROP COLUMN kafka_producer_ssl_old;
|
||||
|
||||
COMMIT;
|
18
db/old_migrations/patch-monitor-tls-info-add-fk.sql
Normal file
18
db/old_migrations/patch-monitor-tls-info-add-fk.sql
Normal file
|
@ -0,0 +1,18 @@
|
|||
BEGIN TRANSACTION;
|
||||
|
||||
PRAGMA writable_schema = TRUE;
|
||||
|
||||
UPDATE
|
||||
SQLITE_MASTER
|
||||
SET
|
||||
sql = replace(sql,
|
||||
'monitor_id INTEGER NOT NULL',
|
||||
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||
)
|
||||
WHERE
|
||||
name = 'monitor_tls_info'
|
||||
AND type = 'table';
|
||||
|
||||
PRAGMA writable_schema = RESET;
|
||||
|
||||
COMMIT;
|
10
db/old_migrations/patch-notification-config.sql
Normal file
10
db/old_migrations/patch-notification-config.sql
Normal file
|
@ -0,0 +1,10 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||
UPDATE notification SET config = config_old;
|
||||
ALTER TABLE notification DROP COLUMN config_old;
|
||||
|
||||
COMMIT;
|
7
db/old_migrations/patch-timeout.sql
Normal file
7
db/old_migrations/patch-timeout.sql
Normal file
|
@ -0,0 +1,7 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
UPDATE monitor SET timeout = (interval * 0.8)
|
||||
WHERE timeout IS NULL OR timeout <= 0;
|
||||
|
||||
COMMIT;
|
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
|
@ -0,0 +1,18 @@
|
|||
BEGIN TRANSACTION;
|
||||
|
||||
PRAGMA writable_schema = TRUE;
|
||||
|
||||
UPDATE
|
||||
SQLITE_MASTER
|
||||
SET
|
||||
sql = replace(sql,
|
||||
'monitor_id INTEGER NOT NULL',
|
||||
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||
)
|
||||
WHERE
|
||||
name = 'monitor_tls_info'
|
||||
AND type = 'table';
|
||||
|
||||
PRAGMA writable_schema = RESET;
|
||||
|
||||
COMMIT;
|
|
@ -1,15 +0,0 @@
|
|||
version: '3.8'
|
||||
|
||||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
container_name: uptime-kuma
|
||||
volumes:
|
||||
- uptime-kuma:/app/data
|
||||
ports:
|
||||
- "3001:3001" # <Host Port>:<Container Port>
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
uptime-kuma:
|
||||
|
|
@ -42,13 +42,20 @@ HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD ext
|
|||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
CMD ["node", "server/server.js"]
|
||||
|
||||
############################################
|
||||
# Rootless Image
|
||||
############################################
|
||||
FROM release AS rootless
|
||||
|
||||
############################################
|
||||
# Mark as Nightly
|
||||
############################################
|
||||
FROM release AS nightly
|
||||
USER node
|
||||
RUN npm run mark-as-nightly
|
||||
|
||||
FROM nightly AS nightly-rootless
|
||||
USER node
|
||||
|
||||
############################################
|
||||
# Build an image for testing pr
|
||||
############################################
|
||||
|
|
|
@ -37,7 +37,7 @@ const github = require("@actions/github");
|
|||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
issue_number: issue.number,
|
||||
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please DO NOT open a blank issue.`
|
||||
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context nessesary for a good discussions.`
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
|
|
|
@ -5,7 +5,7 @@ const fs = require("fs");
|
|||
* or the `recursive` property removing completely in the future Node.js version.
|
||||
* See the link below.
|
||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation information of `fs.rmdirSync`
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
||||
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||
*/
|
||||
const { FBSD } = require("../server/util-server");
|
||||
const FBSD = /^freebsd/.test(process.platform);
|
||||
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||
|
||||
|
|
|
@ -1,276 +0,0 @@
|
|||
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||
// "npm run compile-install-script" to compile install.sh
|
||||
// The command is working on Windows PowerShell and Docker for Windows only.
|
||||
|
||||
|
||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
println("=====================");
|
||||
println("Uptime Kuma Install Script");
|
||||
println("=====================");
|
||||
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
|
||||
println("---------------------------------------");
|
||||
println("This script is designed for Linux and basic usage.");
|
||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
||||
println("---------------------------------------");
|
||||
println("");
|
||||
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
|
||||
println("Docker - Install Uptime Kuma Docker container");
|
||||
println("");
|
||||
|
||||
if ("$1" != "") {
|
||||
type = "$1";
|
||||
} else {
|
||||
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
|
||||
}
|
||||
|
||||
defaultPort = "3001";
|
||||
|
||||
function checkNode() {
|
||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
||||
println("Node Version: " ++ nodeVersion);
|
||||
|
||||
if (nodeVersion <= "12") {
|
||||
println("Error: Required Node.js 14");
|
||||
call("exit", "1");
|
||||
}
|
||||
}
|
||||
|
||||
function deb() {
|
||||
bash("nodeCheck=$(node -v)");
|
||||
bash("apt --yes update");
|
||||
|
||||
if (nodeCheck != "") {
|
||||
checkNode();
|
||||
} else {
|
||||
|
||||
// Old nodejs binary name is "nodejs"
|
||||
bash("check=$(nodejs --version)");
|
||||
if (check != "") {
|
||||
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("apt --yes install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("apt --yes install nodejs");
|
||||
bash("node -v");
|
||||
|
||||
bash("nodeCheckAgain=$(node -v)");
|
||||
|
||||
if (nodeCheckAgain == "") {
|
||||
println("Error during Node.js installation");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
println("Installing Git");
|
||||
bash("apt --yes install git");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == "local") {
|
||||
defaultInstallPath = "/opt/uptime-kuma";
|
||||
|
||||
if (exists("/etc/redhat-release")) {
|
||||
os = call("cat", "/etc/redhat-release");
|
||||
distribution = "rhel";
|
||||
|
||||
} else if (exists("/etc/issue")) {
|
||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
||||
if (os == "Ubuntu") {
|
||||
distribution = "ubuntu";
|
||||
|
||||
// Get ubuntu version
|
||||
bash(". /etc/lsb-release");
|
||||
version = DISTRIB_RELEASE;
|
||||
}
|
||||
if (os == "Debian") {
|
||||
distribution = "debian";
|
||||
}
|
||||
}
|
||||
|
||||
bash("arch=$(uname -i)");
|
||||
|
||||
println("Your OS: " ++ os);
|
||||
println("Distribution: " ++ distribution);
|
||||
println("Version: " ++ version);
|
||||
println("Arch: " ++ arch);
|
||||
|
||||
if ("$3" != "") {
|
||||
port = "$3";
|
||||
} else {
|
||||
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
|
||||
|
||||
if (port == "") {
|
||||
port = defaultPort;
|
||||
}
|
||||
}
|
||||
|
||||
if ("$2" != "") {
|
||||
installPath = "$2";
|
||||
} else {
|
||||
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
|
||||
|
||||
if (installPath == "") {
|
||||
installPath = defaultInstallPath;
|
||||
}
|
||||
}
|
||||
|
||||
// CentOS
|
||||
if (distribution == "rhel") {
|
||||
bash("nodeCheck=$(node -v)");
|
||||
|
||||
if (nodeCheck != "") {
|
||||
checkNode();
|
||||
} else {
|
||||
|
||||
bash("dnfCheck=$(dnf --version)");
|
||||
|
||||
// Use yum
|
||||
if (dnfCheck == "") {
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("yum -y -q install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("yum install -y -q nodejs");
|
||||
} else {
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("dnf -y install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("dnf install -y nodejs");
|
||||
}
|
||||
|
||||
|
||||
bash("node -v");
|
||||
|
||||
bash("nodeCheckAgain=$(node -v)");
|
||||
|
||||
if (nodeCheckAgain == "") {
|
||||
println("Error during Node.js installation");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
println("Installing Git");
|
||||
bash("yum -y -q install git");
|
||||
}
|
||||
|
||||
// Ubuntu
|
||||
} else if (distribution == "ubuntu") {
|
||||
deb();
|
||||
|
||||
// Debian
|
||||
} else if (distribution == "debian") {
|
||||
deb();
|
||||
|
||||
} else {
|
||||
// Unknown distribution
|
||||
error = 0;
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: git is not found!");
|
||||
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
|
||||
}
|
||||
|
||||
bash("check=$(node -v)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: node is not found");
|
||||
println("help: an installation guide is available at https://nodejs.org/en/download");
|
||||
}
|
||||
|
||||
if (error > 0) {
|
||||
println("Please install above missing software");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(pm2 --version)");
|
||||
if (check == "") {
|
||||
println("Installing PM2");
|
||||
bash("npm install pm2 -g && pm2 install pm2-logrotate");
|
||||
bash("pm2 startup");
|
||||
}
|
||||
|
||||
|
||||
// Check again
|
||||
bash("check=$(pm2 --version)");
|
||||
if (check == "") {
|
||||
println("Error: pm2 is not found!");
|
||||
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("mkdir -p $installPath");
|
||||
bash("cd $installPath");
|
||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
||||
bash("npm run setup");
|
||||
|
||||
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
|
||||
|
||||
} else {
|
||||
defaultVolume = "uptime-kuma";
|
||||
|
||||
bash("check=$(docker -v)");
|
||||
if (check == "") {
|
||||
println("Error: docker is not found!");
|
||||
println("help: an installation guide is available at https://docs.docker.com/desktop/");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("check=$(docker info)");
|
||||
|
||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
||||
\"echo\" \"Error: docker is not running\"
|
||||
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
|
||||
\"exit\" \"1\"
|
||||
fi");
|
||||
|
||||
if ("$3" != "") {
|
||||
port = "$3";
|
||||
} else {
|
||||
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
|
||||
|
||||
if (port == "") {
|
||||
port = defaultPort;
|
||||
}
|
||||
}
|
||||
|
||||
if ("$2" != "") {
|
||||
volume = "$2";
|
||||
} else {
|
||||
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
|
||||
|
||||
if (volume == "") {
|
||||
volume = defaultVolume;
|
||||
}
|
||||
}
|
||||
|
||||
println("Port: $port");
|
||||
println("Volume: $volume");
|
||||
bash("docker volume create $volume");
|
||||
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
|
||||
}
|
||||
|
||||
println("http://localhost:$port");
|
44
extra/reformat-changelog.js
Normal file
44
extra/reformat-changelog.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Generate on GitHub
|
||||
const input = `
|
||||
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
|
||||
`;
|
||||
|
||||
const template = `
|
||||
### 🆕 New Features
|
||||
|
||||
### 💇♀️ Improvements
|
||||
|
||||
### 🐞 Bug Fixes
|
||||
|
||||
### ⬆️ Security Fixes
|
||||
|
||||
### 🦎 Translation Contributions
|
||||
|
||||
### Others
|
||||
- Other small changes, code refactoring and comment/doc updates in this repo:
|
||||
`;
|
||||
|
||||
const lines = input.split("\n").filter((line) => line.trim() !== "");
|
||||
|
||||
for (const line of lines) {
|
||||
// Split the last " by "
|
||||
const usernamePullRequesURL = line.split(" by ").pop();
|
||||
|
||||
if (!usernamePullRequesURL) {
|
||||
console.log("Unable to parse", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
|
||||
const pullRequestID = "#" + pullRequestURL.split("/").pop();
|
||||
let message = line.split(" by ").shift();
|
||||
|
||||
if (!message) {
|
||||
console.log("Unable to parse", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
message = message.split("* ").pop();
|
||||
console.log("-", pullRequestID, message, `(Thanks ${username})`);
|
||||
}
|
||||
console.log(template);
|
6
extra/remove-playwright-test-data.js
Normal file
6
extra/remove-playwright-test-data.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
const fs = require("fs");
|
||||
|
||||
fs.rmSync("./data/playwright-test", {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
|
@ -5,18 +5,25 @@ const { R } = require("redbean-node");
|
|||
const readline = require("readline");
|
||||
const { initJWTSecret } = require("../server/util-server");
|
||||
const User = require("../server/model/user");
|
||||
const { io } = require("socket.io-client");
|
||||
const { localWebSocketURL } = require("../server/config");
|
||||
const args = require("args-parser")(process.argv);
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
|
||||
const main = async () => {
|
||||
if ("dry-run" in args) {
|
||||
console.log("Dry run mode, no changes will be made.");
|
||||
}
|
||||
|
||||
console.log("Connecting the database");
|
||||
Database.initDataDir(args);
|
||||
await Database.connect(false, false, true);
|
||||
|
||||
try {
|
||||
Database.initDataDir(args);
|
||||
await Database.connect(false, false, true);
|
||||
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
|
||||
if (!process.env.TEST_BACKEND) {
|
||||
const user = await R.findOne("user");
|
||||
|
@ -27,21 +34,36 @@ const main = async () => {
|
|||
console.log("Found user: " + user.username);
|
||||
|
||||
while (true) {
|
||||
let password = await question("New Password: ");
|
||||
let confirmPassword = await question("Confirm New Password: ");
|
||||
let password;
|
||||
let confirmPassword;
|
||||
|
||||
// When called with "--new-password" argument for unattended modification (e.g. npm run reset-password -- --new_password=secret)
|
||||
if ("new-password" in args) {
|
||||
console.log("Using password from argument");
|
||||
console.warn("\x1b[31m%s\x1b[0m", "Warning: the password might be stored, in plain text, in your shell's history");
|
||||
password = confirmPassword = args["new-password"] + "";
|
||||
} else {
|
||||
password = await question("New Password: ");
|
||||
confirmPassword = await question("Confirm New Password: ");
|
||||
}
|
||||
|
||||
if (password === confirmPassword) {
|
||||
if (!("dry-run" in args)) {
|
||||
await User.resetPassword(user.id, password);
|
||||
|
||||
// Reset all sessions by reset jwt secret
|
||||
await initJWTSecret();
|
||||
|
||||
// Disconnect all other socket clients of the user
|
||||
await disconnectAllSocketClients(user.username, password);
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
console.log("Passwords do not match, please try again.");
|
||||
}
|
||||
}
|
||||
console.log("Password reset successfully.");
|
||||
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Error: " + e.message);
|
||||
|
@ -66,6 +88,50 @@ function question(question) {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect all socket clients of the user
|
||||
* @param {string} username Username
|
||||
* @param {string} password Password
|
||||
* @returns {Promise<void>} Promise
|
||||
*/
|
||||
function disconnectAllSocketClients(username, password) {
|
||||
return new Promise((resolve) => {
|
||||
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
|
||||
|
||||
// Disconnect all socket connections
|
||||
const socket = io(localWebSocketURL, {
|
||||
reconnection: false,
|
||||
timeout: 5000,
|
||||
});
|
||||
socket.on("connect", () => {
|
||||
socket.emit("login", {
|
||||
username,
|
||||
password,
|
||||
}, (res) => {
|
||||
if (res.ok) {
|
||||
console.log("Logged in.");
|
||||
socket.emit("disconnectOtherSocketClients");
|
||||
} else {
|
||||
console.warn("Login failed.");
|
||||
console.warn("Please restart the server to disconnect all sessions.");
|
||||
}
|
||||
socket.close();
|
||||
});
|
||||
});
|
||||
|
||||
socket.on("connect_error", function () {
|
||||
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
|
||||
// Ask the user to restart the server manually
|
||||
console.warn("Failed to connect to " + localWebSocketURL);
|
||||
console.warn("Please restart the server to disconnect all sessions manually.");
|
||||
resolve();
|
||||
});
|
||||
socket.on("disconnect", () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (!process.env.TEST_BACKEND) {
|
||||
main();
|
||||
}
|
||||
|
|
228
install.sh
228
install.sh
|
@ -1,228 +0,0 @@
|
|||
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||
# "npm run compile-install-script" to compile install.sh
|
||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Uptime Kuma Install Script"
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" ""
|
||||
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
|
||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
||||
"echo" "-e" ""
|
||||
if [ "$1" != "" ]; then
|
||||
type="$1"
|
||||
else
|
||||
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
|
||||
fi
|
||||
defaultPort="3001"
|
||||
function checkNode {
|
||||
local _0
|
||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
||||
_0="12"
|
||||
if [ $(($nodeVersion <= $_0)) == 1 ]; then
|
||||
"echo" "-e" "Error: Required Node.js 14"
|
||||
"exit" "1"
|
||||
fi
|
||||
}
|
||||
function deb {
|
||||
nodeCheck=$(node -v)
|
||||
apt --yes update
|
||||
if [ "$nodeCheck" != "" ]; then
|
||||
"checkNode"
|
||||
else
|
||||
# Old nodejs binary name is "nodejs"
|
||||
check=$(nodejs --version)
|
||||
if [ "$check" != "" ]; then
|
||||
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
|
||||
exit 1
|
||||
fi
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
apt --yes install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
|
||||
apt --yes install nodejs
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
if [ "$nodeCheckAgain" == "" ]; then
|
||||
"echo" "-e" "Error during Node.js installation"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing Git"
|
||||
apt --yes install git
|
||||
fi
|
||||
}
|
||||
if [ "$type" == "local" ]; then
|
||||
defaultInstallPath="/opt/uptime-kuma"
|
||||
if [ -e "/etc/redhat-release" ]; then
|
||||
os=$("cat" "/etc/redhat-release")
|
||||
distribution="rhel"
|
||||
else
|
||||
if [ -e "/etc/issue" ]; then
|
||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
||||
if [ "$os" == "Ubuntu" ]; then
|
||||
distribution="ubuntu"
|
||||
# Get ubuntu version
|
||||
. /etc/lsb-release
|
||||
version="$DISTRIB_RELEASE"
|
||||
fi
|
||||
if [ "$os" == "Debian" ]; then
|
||||
distribution="debian"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
arch=$(uname -i)
|
||||
"echo" "-e" "Your OS: ""$os"
|
||||
"echo" "-e" "Distribution: ""$distribution"
|
||||
"echo" "-e" "Version: ""$version"
|
||||
"echo" "-e" "Arch: ""$arch"
|
||||
if [ "$3" != "" ]; then
|
||||
port="$3"
|
||||
else
|
||||
"read" "-p" "Listening Port [$defaultPort]: " "port"
|
||||
if [ "$port" == "" ]; then
|
||||
port="$defaultPort"
|
||||
fi
|
||||
fi
|
||||
if [ "$2" != "" ]; then
|
||||
installPath="$2"
|
||||
else
|
||||
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
|
||||
if [ "$installPath" == "" ]; then
|
||||
installPath="$defaultInstallPath"
|
||||
fi
|
||||
fi
|
||||
# CentOS
|
||||
if [ "$distribution" == "rhel" ]; then
|
||||
nodeCheck=$(node -v)
|
||||
if [ "$nodeCheck" != "" ]; then
|
||||
"checkNode"
|
||||
else
|
||||
dnfCheck=$(dnf --version)
|
||||
# Use yum
|
||||
if [ "$dnfCheck" == "" ]; then
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
yum -y -q install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||
yum install -y -q nodejs
|
||||
else
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
dnf -y install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||
dnf install -y nodejs
|
||||
fi
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
if [ "$nodeCheckAgain" == "" ]; then
|
||||
"echo" "-e" "Error during Node.js installation"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing Git"
|
||||
yum -y -q install git
|
||||
fi
|
||||
# Ubuntu
|
||||
else
|
||||
if [ "$distribution" == "ubuntu" ]; then
|
||||
"deb"
|
||||
# Debian
|
||||
else
|
||||
if [ "$distribution" == "debian" ]; then
|
||||
"deb"
|
||||
else
|
||||
# Unknown distribution
|
||||
error=$((0))
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: git is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
|
||||
fi
|
||||
check=$(node -v)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: node is not found"
|
||||
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
|
||||
fi
|
||||
if [ $(($error > 0)) == 1 ]; then
|
||||
"echo" "-e" "Please install above missing software"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
check=$(pm2 --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing PM2"
|
||||
npm install pm2 -g && pm2 install pm2-logrotate
|
||||
pm2 startup
|
||||
fi
|
||||
# Check again
|
||||
check=$(pm2 --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Error: pm2 is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p $installPath
|
||||
cd $installPath
|
||||
git clone https://github.com/louislam/uptime-kuma.git .
|
||||
npm run setup
|
||||
pm2 start server/server.js --name uptime-kuma -- --port=$port
|
||||
else
|
||||
defaultVolume="uptime-kuma"
|
||||
check=$(docker -v)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Error: docker is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
|
||||
exit 1
|
||||
fi
|
||||
check=$(docker info)
|
||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
||||
"echo" "Error: docker is not running"
|
||||
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
|
||||
"exit" "1"
|
||||
fi
|
||||
if [ "$3" != "" ]; then
|
||||
port="$3"
|
||||
else
|
||||
"read" "-p" "Expose Port [$defaultPort]: " "port"
|
||||
if [ "$port" == "" ]; then
|
||||
port="$defaultPort"
|
||||
fi
|
||||
fi
|
||||
if [ "$2" != "" ]; then
|
||||
volume="$2"
|
||||
else
|
||||
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
|
||||
if [ "$volume" == "" ]; then
|
||||
volume="$defaultVolume"
|
||||
fi
|
||||
fi
|
||||
"echo" "-e" "Port: $port"
|
||||
"echo" "-e" "Volume: $volume"
|
||||
docker volume create $volume
|
||||
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
fi
|
||||
"echo" "-e" "http://localhost:$port"
|
13090
package-lock.json
generated
13090
package-lock.json
generated
File diff suppressed because it is too large
Load diff
93
package.json
93
package.json
|
@ -1,35 +1,39 @@
|
|||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "1.23.2",
|
||||
"version": "2.0.0-dev",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/louislam/uptime-kuma.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": "14 || 16 || 18 || >= 20.4.0"
|
||||
"node": "18 || >= 20.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"install-legacy": "npm install",
|
||||
"update-legacy": "npm update",
|
||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
|
||||
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
||||
"lint": "npm run lint:js && npm run lint:style",
|
||||
"lint:prod": "npm run lint:js-prod && npm run lint:style",
|
||||
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
||||
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
||||
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
||||
"start": "npm run start-server",
|
||||
"start-server": "node server/server.js",
|
||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||
"start-server-dev:watch": "cross-env NODE_ENV=development node --watch server/server.js",
|
||||
"build": "vite build --config ./config/vite.config.js",
|
||||
"test": "node test/prepare-test-server.js && npm run test-backend",
|
||||
"test": "npm run test-backend && npm run test-e2e",
|
||||
"test-with-build": "npm run build && npm test",
|
||||
"test-backend": "node test/backend-test-entry.js && npm run jest-backend",
|
||||
"test-backend": "node test/backend-test-entry.js",
|
||||
"test-backend:14": "cross-env TEST_BACKEND=1 NODE_OPTIONS=\"--experimental-abortcontroller --no-warnings\" node--test test/backend-test",
|
||||
"test-backend:18": "cross-env TEST_BACKEND=1 node --test test/backend-test",
|
||||
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
||||
"test-e2e": "playwright test --config ./config/playwright.config.js",
|
||||
"test-e2e-ui": "playwright test --config ./config/playwright.config.js --ui --ui-port=51063",
|
||||
"playwright-codegen": "playwright codegen localhost:3000 --save-storage=./private/e2e-auth.json",
|
||||
"playwright-show-report": "playwright show-report ./private/playwright-report",
|
||||
"tsc": "tsc",
|
||||
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
||||
|
@ -39,52 +43,46 @@
|
|||
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
||||
"build-docker-slim-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim-rootless -t louislam/uptime-kuma:$VERSION-slim-rootless --target rootless --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||
"build-docker-full-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-rootless -t louislam/uptime-kuma:$VERSION-rootless --target rootless . --push",
|
||||
"build-docker-nightly-rootless": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2-rootless --target nightly-rootless . --push",
|
||||
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
|
||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||
"setup": "git checkout 1.23.2 && npm ci --production && npm run download-dist",
|
||||
"setup": "git checkout 1.23.13 && npm ci --production && npm run download-dist",
|
||||
"download-dist": "node extra/download-dist.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
"remove-2fa": "node extra/remove-2fa.js",
|
||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
||||
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
|
||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
|
||||
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
|
||||
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
|
||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||
"git-remove-tag": "git tag -d",
|
||||
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
||||
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
||||
"cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e",
|
||||
"cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
|
||||
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
|
||||
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
|
||||
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
||||
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
||||
"sort-contributors": "node extra/sort-contributors.js",
|
||||
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
||||
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate",
|
||||
"rebase-pr-to-1.23.X": "node extra/rebase-pr.js 1.23.X"
|
||||
"rebase-pr-to-1.23.X": "node extra/rebase-pr.js 1.23.X",
|
||||
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "~1.7.3",
|
||||
"@louislam/ping": "~0.4.4-mod.1",
|
||||
"@louislam/sqlite3": "15.1.6",
|
||||
"@vvo/tzdb": "^6.125.0",
|
||||
"args-parser": "~1.3.0",
|
||||
"axios": "~0.27.0",
|
||||
"axios": "~0.28.1",
|
||||
"axios-ntlm": "1.3.0",
|
||||
"badge-maker": "~3.3.1",
|
||||
"bcryptjs": "~2.4.3",
|
||||
"cacheable-lookup": "~6.0.4",
|
||||
"chardet": "~1.4.0",
|
||||
"check-password-strength": "^2.0.5",
|
||||
"cheerio": "~1.0.0-rc.12",
|
||||
|
@ -95,11 +93,13 @@
|
|||
"croner": "~6.0.5",
|
||||
"dayjs": "~1.11.5",
|
||||
"dotenv": "~16.0.3",
|
||||
"express": "~4.17.3",
|
||||
"express": "~4.19.2",
|
||||
"express-basic-auth": "~1.2.1",
|
||||
"express-static-gzip": "~2.1.7",
|
||||
"form-data": "~4.0.0",
|
||||
"gamedig": "~4.0.5",
|
||||
"gamedig": "^4.2.0",
|
||||
"html-escaper": "^3.0.3",
|
||||
"http-cookie-agent": "~5.0.4",
|
||||
"http-graceful-shutdown": "~3.1.7",
|
||||
"http-proxy-agent": "~5.0.0",
|
||||
"https-proxy-agent": "~5.0.1",
|
||||
|
@ -117,20 +117,21 @@
|
|||
"mongodb": "~4.17.1",
|
||||
"mqtt": "~4.3.7",
|
||||
"mssql": "~8.1.4",
|
||||
"mysql2": "~2.3.3",
|
||||
"mysql2": "~3.9.6",
|
||||
"nanoid": "~3.3.4",
|
||||
"node-cloudflared-tunnel": "~1.0.9",
|
||||
"node-radius-client": "~1.0.0",
|
||||
"nodemailer": "~6.6.5",
|
||||
"nodemailer": "~6.9.13",
|
||||
"nostr-tools": "^1.13.1",
|
||||
"notp": "~2.0.3",
|
||||
"openid-client": "^5.4.2",
|
||||
"password-hash": "~1.2.2",
|
||||
"pg": "~8.8.0",
|
||||
"pg-connection-string": "~2.5.0",
|
||||
"playwright-core": "~1.35.1",
|
||||
"pg": "~8.11.3",
|
||||
"pg-connection-string": "~2.6.2",
|
||||
"playwright-core": "~1.39.0",
|
||||
"prom-client": "~13.2.0",
|
||||
"prometheus-api-metrics": "~3.2.1",
|
||||
"promisify-child-process": "~4.1.2",
|
||||
"protobufjs": "~7.2.4",
|
||||
"qs": "~6.10.4",
|
||||
"redbean-node": "~0.3.0",
|
||||
|
@ -139,24 +140,26 @@
|
|||
"socket.io": "~4.6.1",
|
||||
"socket.io-client": "~4.6.1",
|
||||
"socks-proxy-agent": "6.1.1",
|
||||
"tar": "~6.1.11",
|
||||
"tar": "~6.2.1",
|
||||
"tcp-ping": "~0.1.1",
|
||||
"thirty-two": "~1.0.2",
|
||||
"tough-cookie": "~4.1.3",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/github": "~5.0.1",
|
||||
"@babel/eslint-parser": "^7.22.7",
|
||||
"@babel/preset-env": "^7.15.8",
|
||||
"@actions/github": "~5.1.1",
|
||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||
"@playwright/test": "~1.39.0",
|
||||
"@popperjs/core": "~2.10.2",
|
||||
"@types/bootstrap": "~5.1.9",
|
||||
"@vitejs/plugin-legacy": "~4.1.0",
|
||||
"@vitejs/plugin-vue": "~4.2.3",
|
||||
"@vue/compiler-sfc": "~3.3.4",
|
||||
"@types/node": "^20.8.6",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.5",
|
||||
"@typescript-eslint/parser": "^6.7.5",
|
||||
"@vitejs/plugin-vue": "~5.0.1",
|
||||
"@vue/compiler-sfc": "~3.4.2",
|
||||
"@vuepic/vue-datepicker": "~3.4.8",
|
||||
"aedes": "^0.46.3",
|
||||
"bootstrap": "5.1.3",
|
||||
|
@ -166,15 +169,14 @@
|
|||
"core-js": "~3.26.1",
|
||||
"cronstrue": "~2.24.0",
|
||||
"cross-env": "~7.0.3",
|
||||
"cypress": "^13.2.0",
|
||||
"delay": "^5.0.0",
|
||||
"dns2": "~2.0.1",
|
||||
"dompurify": "~2.4.3",
|
||||
"dompurify": "~3.0.11",
|
||||
"eslint": "~8.14.0",
|
||||
"eslint-plugin-jsdoc": "~46.4.6",
|
||||
"eslint-plugin-vue": "~8.7.1",
|
||||
"favico.js": "~0.3.10",
|
||||
"jest": "~29.6.1",
|
||||
"get-port-please": "^3.1.1",
|
||||
"marked": "~4.2.5",
|
||||
"node-ssh": "~13.1.0",
|
||||
"postcss-html": "~1.5.0",
|
||||
|
@ -188,13 +190,12 @@
|
|||
"stylelint-config-standard": "~25.0.0",
|
||||
"terser": "~5.15.0",
|
||||
"test": "~3.3.0",
|
||||
"timezones-list": "~3.0.1",
|
||||
"typescript": "~4.4.4",
|
||||
"v-pagination-3": "~0.1.7",
|
||||
"vite": "~4.4.1",
|
||||
"vite-plugin-commonjs": "^0.8.0",
|
||||
"vite": "~5.2.8",
|
||||
"vite-plugin-compression": "^0.5.1",
|
||||
"vue": "~3.3.4",
|
||||
"vite-plugin-vue-devtools": "^7.0.15",
|
||||
"vue": "~3.4.2",
|
||||
"vue-chartjs": "~5.2.0",
|
||||
"vue-confirm-dialog": "~1.0.2",
|
||||
"vue-contenteditable": "~3.0.4",
|
||||
|
@ -203,10 +204,10 @@
|
|||
"vue-multiselect": "~3.0.0-alpha.2",
|
||||
"vue-prism-editor": "~2.0.0-alpha.2",
|
||||
"vue-qrcode": "~1.0.0",
|
||||
"vue-router": "~4.0.14",
|
||||
"vue-router": "~4.2.5",
|
||||
"vue-toastification": "~2.0.0-rc.5",
|
||||
"vuedraggable": "~4.1.0",
|
||||
"wait-on": "^6.0.1",
|
||||
"wait-on": "^7.2.0",
|
||||
"whatwg-url": "~12.0.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
|
||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
|
||||
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
|
||||
<g transform="matrix(1 0 0 1 320 320)">
|
||||
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
|
||||
<stop stop-color="#5CDD8B"/>
|
||||
<stop offset="1" stop-color="#86E6A9"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
|
||||
</g>
|
||||
</svg>
|
||||
|
|
Before Width: | Height: | Size: 893 B After Width: | Height: | Size: 1.1 KiB |
|
@ -130,7 +130,7 @@ function userAuthorizer(username, password, callback) {
|
|||
* @param {express.Request} req Express request object
|
||||
* @param {express.Response} res Express response object
|
||||
* @param {express.NextFunction} next Next handler in chain
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
exports.basicAuth = async function (req, res, next) {
|
||||
const middleware = basicAuth({
|
||||
|
@ -153,7 +153,7 @@ exports.basicAuth = async function (req, res, next) {
|
|||
* @param {express.Request} req Express request object
|
||||
* @param {express.Response} res Express response object
|
||||
* @param {express.NextFunction} next Next handler in chain
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
exports.apiAuth = async function (req, res, next) {
|
||||
if (!await Settings.get("disableAuth")) {
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
const https = require("https");
|
||||
const http = require("http");
|
||||
const CacheableLookup = require("cacheable-lookup");
|
||||
const { Settings } = require("./settings");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
class CacheableDnsHttpAgent {
|
||||
|
||||
static cacheable = new CacheableLookup();
|
||||
|
||||
static httpAgentList = {};
|
||||
static httpsAgentList = {};
|
||||
|
||||
static enable = false;
|
||||
|
||||
/**
|
||||
* Register/Disable cacheable to global agents
|
||||
* @returns {void}
|
||||
*/
|
||||
static async update() {
|
||||
log.debug("CacheableDnsHttpAgent", "update");
|
||||
let isEnable = await Settings.get("dnsCache");
|
||||
|
||||
if (isEnable !== this.enable) {
|
||||
log.debug("CacheableDnsHttpAgent", "value changed");
|
||||
|
||||
if (isEnable) {
|
||||
log.debug("CacheableDnsHttpAgent", "enable");
|
||||
this.cacheable.install(http.globalAgent);
|
||||
this.cacheable.install(https.globalAgent);
|
||||
} else {
|
||||
log.debug("CacheableDnsHttpAgent", "disable");
|
||||
this.cacheable.uninstall(http.globalAgent);
|
||||
this.cacheable.uninstall(https.globalAgent);
|
||||
}
|
||||
}
|
||||
|
||||
this.enable = isEnable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach cacheable to HTTP agent
|
||||
* @param {http.Agent} agent Agent to install
|
||||
* @returns {void}
|
||||
*/
|
||||
static install(agent) {
|
||||
this.cacheable.install(agent);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
|
||||
* @returns {https.Agent} The new HTTPS agent
|
||||
*/
|
||||
static getHttpsAgent(agentOptions) {
|
||||
if (!this.enable) {
|
||||
return new https.Agent(agentOptions);
|
||||
}
|
||||
|
||||
let key = JSON.stringify(agentOptions);
|
||||
if (!(key in this.httpsAgentList)) {
|
||||
this.httpsAgentList[key] = new https.Agent(agentOptions);
|
||||
this.cacheable.install(this.httpsAgentList[key]);
|
||||
}
|
||||
return this.httpsAgentList[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
|
||||
* @returns {https.Agents} The new HTTP agent
|
||||
*/
|
||||
static getHttpAgent(agentOptions) {
|
||||
if (!this.enable) {
|
||||
return new http.Agent(agentOptions);
|
||||
}
|
||||
|
||||
let key = JSON.stringify(agentOptions);
|
||||
if (!(key in this.httpAgentList)) {
|
||||
this.httpAgentList[key] = new http.Agent(agentOptions);
|
||||
this.cacheable.install(this.httpAgentList[key]);
|
||||
}
|
||||
return this.httpAgentList[key];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
CacheableDnsHttpAgent,
|
||||
};
|
|
@ -8,6 +8,7 @@ const server = UptimeKumaServer.getInstance();
|
|||
const io = server.io;
|
||||
const { setting } = require("./util-server");
|
||||
const checkVersion = require("./check-version");
|
||||
const Database = require("./database");
|
||||
|
||||
/**
|
||||
* Send list of notification providers to client
|
||||
|
@ -144,17 +145,20 @@ async function sendInfo(socket, hideVersion = false) {
|
|||
let version;
|
||||
let latestVersion;
|
||||
let isContainer;
|
||||
let dbType;
|
||||
|
||||
if (!hideVersion) {
|
||||
version = checkVersion.version;
|
||||
latestVersion = checkVersion.latestVersion;
|
||||
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
|
||||
dbType = Database.dbConfig.type;
|
||||
}
|
||||
|
||||
socket.emit("info", {
|
||||
version,
|
||||
latestVersion,
|
||||
isContainer,
|
||||
dbType,
|
||||
primaryBaseURL: await setting("primaryBaseURL"),
|
||||
serverTimezone: await server.getTimezone(),
|
||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||
|
@ -185,6 +189,30 @@ async function sendDockerHostList(socket) {
|
|||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send list of docker hosts to client
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @returns {Promise<Bean[]>} List of docker hosts
|
||||
*/
|
||||
async function sendRemoteBrowserList(socket) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let result = [];
|
||||
let list = await R.find("remote_browser", " user_id = ? ", [
|
||||
socket.userID,
|
||||
]);
|
||||
|
||||
for (let bean of list) {
|
||||
result.push(bean.toJSON());
|
||||
}
|
||||
|
||||
io.to(socket.userID).emit("remoteBrowserList", result);
|
||||
|
||||
timeLogger.print("Send Remote Browser List");
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendNotificationList,
|
||||
sendImportantHeartbeatList,
|
||||
|
@ -192,5 +220,6 @@ module.exports = {
|
|||
sendProxyList,
|
||||
sendAPIKeyList,
|
||||
sendInfo,
|
||||
sendDockerHostList
|
||||
sendDockerHostList,
|
||||
sendRemoteBrowserList,
|
||||
};
|
||||
|
|
|
@ -1,29 +1,46 @@
|
|||
const isFreeBSD = /^freebsd/.test(process.platform);
|
||||
|
||||
// Interop with browser
|
||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||
const demoMode = args["demo"] || false;
|
||||
|
||||
const badgeConstants = {
|
||||
naColor: "#999",
|
||||
defaultUpColor: "#66c20a",
|
||||
defaultWarnColor: "#eed202",
|
||||
defaultDownColor: "#c2290a",
|
||||
defaultPendingColor: "#f8a306",
|
||||
defaultMaintenanceColor: "#1747f5",
|
||||
defaultPingColor: "blue", // as defined by badge-maker / shields.io
|
||||
defaultStyle: "flat",
|
||||
defaultPingValueSuffix: "ms",
|
||||
defaultPingLabelSuffix: "h",
|
||||
defaultUptimeValueSuffix: "%",
|
||||
defaultUptimeLabelSuffix: "h",
|
||||
defaultCertExpValueSuffix: " days",
|
||||
defaultCertExpLabelSuffix: "h",
|
||||
// Values Come From Default Notification Times
|
||||
defaultCertExpireWarnDays: "14",
|
||||
defaultCertExpireDownDays: "7"
|
||||
};
|
||||
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||
// Dual-stack support for (::)
|
||||
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||
let hostEnv = isFreeBSD ? null : process.env.HOST;
|
||||
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||
|
||||
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||
.map(portValue => parseInt(portValue))
|
||||
.find(portValue => !isNaN(portValue));
|
||||
|
||||
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
|
||||
|
||||
const isSSL = sslKey && sslCert;
|
||||
|
||||
/**
|
||||
* Get the local WebSocket URL
|
||||
* @returns {string} The local WebSocket URL
|
||||
*/
|
||||
function getLocalWebSocketURL() {
|
||||
const protocol = isSSL ? "wss" : "ws";
|
||||
const host = hostname || "localhost";
|
||||
return `${protocol}://${host}:${port}`;
|
||||
}
|
||||
|
||||
const localWebSocketURL = getLocalWebSocketURL();
|
||||
|
||||
const demoMode = args["demo"] || false;
|
||||
|
||||
module.exports = {
|
||||
args,
|
||||
hostname,
|
||||
port,
|
||||
sslKey,
|
||||
sslCert,
|
||||
sslKeyPassphrase,
|
||||
isSSL,
|
||||
localWebSocketURL,
|
||||
demoMode,
|
||||
badgeConstants,
|
||||
};
|
||||
|
|
|
@ -12,22 +12,40 @@ const mysql = require("mysql2/promise");
|
|||
*/
|
||||
class Database {
|
||||
|
||||
/**
|
||||
* Boostrap database for SQLite
|
||||
* @type {string}
|
||||
*/
|
||||
static templatePath = "./db/kuma.db";
|
||||
|
||||
/**
|
||||
* Data Dir (Default: ./data)
|
||||
* @type {string}
|
||||
*/
|
||||
static dataDir;
|
||||
|
||||
/**
|
||||
* User Upload Dir (Default: ./data/upload)
|
||||
* @type {string}
|
||||
*/
|
||||
static uploadDir;
|
||||
|
||||
/**
|
||||
* Chrome Screenshot Dir (Default: ./data/screenshots)
|
||||
* @type {string}
|
||||
*/
|
||||
static screenshotDir;
|
||||
|
||||
/**
|
||||
* SQLite file path (Default: ./data/kuma.db)
|
||||
* @type {string}
|
||||
*/
|
||||
static sqlitePath;
|
||||
|
||||
/**
|
||||
* For storing Docker TLS certs (Default: ./data/docker-tls)
|
||||
* @type {string}
|
||||
*/
|
||||
static dockerTLSDir;
|
||||
|
||||
/**
|
||||
|
@ -84,7 +102,11 @@ class Database {
|
|||
"patch-add-certificate-expiry-status-page.sql": true,
|
||||
"patch-monitor-oauth-cc.sql": true,
|
||||
"patch-add-timeout-monitor.sql": true,
|
||||
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
|
||||
"patch-add-gamedig-given-port.sql": true,
|
||||
"patch-notification-config.sql": true,
|
||||
"patch-fix-kafka-producer-booleans.sql": true,
|
||||
"patch-timeout.sql": true,
|
||||
"patch-monitor-tls-info-add-fk.sql": true, // The last file so far converted to a knex migration file
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -130,7 +152,7 @@ class Database {
|
|||
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
|
||||
}
|
||||
|
||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
||||
log.info("server", `Data Dir: ${Database.dataDir}`);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -187,9 +209,9 @@ class Database {
|
|||
let config = {};
|
||||
|
||||
let mariadbPoolConfig = {
|
||||
afterCreate: function (conn, done) {
|
||||
|
||||
}
|
||||
min: 0,
|
||||
max: 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
};
|
||||
|
||||
log.info("db", `Database Type: ${dbConfig.type}`);
|
||||
|
@ -242,7 +264,14 @@ class Database {
|
|||
user: dbConfig.username,
|
||||
password: dbConfig.password,
|
||||
database: dbConfig.dbName,
|
||||
timezone: "+00:00",
|
||||
timezone: "Z",
|
||||
typeCast: function (field, next) {
|
||||
if (field.type === "DATETIME") {
|
||||
// Do not perform timezone conversion
|
||||
return field.string();
|
||||
}
|
||||
return next();
|
||||
},
|
||||
},
|
||||
pool: mariadbPoolConfig,
|
||||
};
|
||||
|
@ -256,6 +285,14 @@ class Database {
|
|||
socketPath: embeddedMariaDB.socketPath,
|
||||
user: "node",
|
||||
database: "kuma",
|
||||
timezone: "Z",
|
||||
typeCast: function (field, next) {
|
||||
if (field.type === "DATETIME") {
|
||||
// Do not perform timezone conversion
|
||||
return field.string();
|
||||
}
|
||||
return next();
|
||||
},
|
||||
},
|
||||
pool: mariadbPoolConfig,
|
||||
};
|
||||
|
@ -317,10 +354,10 @@ class Database {
|
|||
await R.exec("PRAGMA synchronous = NORMAL");
|
||||
|
||||
if (!noLog) {
|
||||
log.info("db", "SQLite config:");
|
||||
log.info("db", await R.getAll("PRAGMA journal_mode"));
|
||||
log.info("db", await R.getAll("PRAGMA cache_size"));
|
||||
log.info("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||
log.debug("db", "SQLite config:");
|
||||
log.debug("db", await R.getAll("PRAGMA journal_mode"));
|
||||
log.debug("db", await R.getAll("PRAGMA cache_size"));
|
||||
log.debug("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -342,7 +379,7 @@ class Database {
|
|||
|
||||
/**
|
||||
* Patch the database
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async patch() {
|
||||
// Still need to keep this for old versions of Uptime Kuma
|
||||
|
@ -389,13 +426,15 @@ class Database {
|
|||
version = 0;
|
||||
}
|
||||
|
||||
if (version !== this.latestVersion) {
|
||||
log.info("db", "Your database version: " + version);
|
||||
log.info("db", "Latest database version: " + this.latestVersion);
|
||||
}
|
||||
|
||||
if (version === this.latestVersion) {
|
||||
log.info("db", "Database patch not needed");
|
||||
log.debug("db", "Database patch not needed");
|
||||
} else if (version > this.latestVersion) {
|
||||
log.info("db", "Warning: Database version is newer than expected");
|
||||
log.warn("db", "Warning: Database version is newer than expected");
|
||||
} else {
|
||||
log.info("db", "Database patch is needed");
|
||||
|
||||
|
@ -431,7 +470,7 @@ class Database {
|
|||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async patchSqlite2() {
|
||||
log.info("db", "Database Patch 2.0 Process");
|
||||
log.debug("db", "Database Patch 2.0 Process");
|
||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||
|
||||
if (! databasePatchedFiles) {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const axios = require("axios");
|
||||
const { R } = require("redbean-node");
|
||||
const version = require("../package.json").version;
|
||||
const https = require("https");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const Database = require("./database");
|
||||
const { axiosAbortSignal } = require("./util-server");
|
||||
|
||||
class DockerHost {
|
||||
|
||||
|
@ -65,15 +65,16 @@ class DockerHost {
|
|||
/**
|
||||
* Fetches the amount of containers on the Docker host
|
||||
* @param {object} dockerHost Docker host to check for
|
||||
* @returns {number} Total amount of containers on the host
|
||||
* @returns {Promise<number>} Total amount of containers on the host
|
||||
*/
|
||||
static async testDockerHost(dockerHost) {
|
||||
const options = {
|
||||
url: "/containers/json?all=true",
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version
|
||||
},
|
||||
signal: axiosAbortSignal(6000),
|
||||
};
|
||||
|
||||
if (dockerHost.dockerType === "socket") {
|
||||
|
@ -83,6 +84,7 @@ class DockerHost {
|
|||
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
||||
}
|
||||
|
||||
try {
|
||||
let res = await axios.request(options);
|
||||
|
||||
if (Array.isArray(res.data)) {
|
||||
|
@ -102,7 +104,13 @@ class DockerHost {
|
|||
} else {
|
||||
throw new Error("Invalid Docker response, is it Docker really a daemon?");
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
if (e.code === "ECONNABORTED" || e.name === "CanceledError") {
|
||||
throw new Error("Connection to Docker daemon timed out.");
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const jsesc = require("jsesc");
|
||||
const { escape } = require("html-escaper");
|
||||
|
||||
/**
|
||||
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
||||
|
@ -7,15 +8,18 @@ const jsesc = require("jsesc");
|
|||
* @returns {string} HTML script tags to inject into page
|
||||
*/
|
||||
function getGoogleAnalyticsScript(tagId) {
|
||||
let escapedTagId = jsesc(tagId, { isScriptContext: true });
|
||||
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
|
||||
|
||||
if (escapedTagId) {
|
||||
escapedTagId = escapedTagId.trim();
|
||||
if (escapedTagIdJS) {
|
||||
escapedTagIdJS = escapedTagIdJS.trim();
|
||||
}
|
||||
|
||||
// Escape the tag ID for use in an HTML attribute.
|
||||
let escapedTagIdHTMLAttribute = escape(tagId);
|
||||
|
||||
return `
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
|
||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
|
||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
|
||||
`;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ class Group extends BeanModel {
|
|||
* @param {boolean} showTags Should the JSON include monitor tags
|
||||
* @param {boolean} certExpiry Should JSON include info about
|
||||
* certificate expiry?
|
||||
* @returns {object} Object ready to parse
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let monitorBeanList = await this.getMonitorList();
|
||||
|
@ -29,7 +29,7 @@ class Group extends BeanModel {
|
|||
|
||||
/**
|
||||
* Get all monitors
|
||||
* @returns {Bean[]} List of monitors
|
||||
* @returns {Promise<Bean[]>} List of monitors
|
||||
*/
|
||||
async getMonitorList() {
|
||||
return R.convertToBeans("monitor", await R.getAll(`
|
||||
|
|
|
@ -29,13 +29,14 @@ class Heartbeat extends BeanModel {
|
|||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
monitorID: this.monitor_id,
|
||||
status: this.status,
|
||||
time: this.time,
|
||||
msg: this.msg,
|
||||
ping: this.ping,
|
||||
important: this.important,
|
||||
duration: this.duration,
|
||||
monitorID: this._monitorId,
|
||||
status: this._status,
|
||||
time: this._time,
|
||||
msg: this._msg,
|
||||
ping: this._ping,
|
||||
important: this._important,
|
||||
duration: this._duration,
|
||||
retries: this._retries,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ class Maintenance extends BeanModel {
|
|||
/**
|
||||
* Return an object that ready to parse to JSON for public
|
||||
* Only show necessary data to public
|
||||
* @returns {object} Object ready to parse
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON() {
|
||||
|
||||
|
@ -98,7 +98,7 @@ class Maintenance extends BeanModel {
|
|||
/**
|
||||
* Return an object that ready to parse to JSON
|
||||
* @param {string} timezone If not specified, the timeRange will be in UTC
|
||||
* @returns {object} Object ready to parse
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
*/
|
||||
async toJSON(timezone = null) {
|
||||
return this.toPublicJSON(timezone);
|
||||
|
@ -143,7 +143,7 @@ class Maintenance extends BeanModel {
|
|||
* Convert data from socket to bean
|
||||
* @param {Bean} bean Bean to fill in
|
||||
* @param {object} obj Data to fill bean with
|
||||
* @returns {Bean} Filled bean
|
||||
* @returns {Promise<Bean>} Filled bean
|
||||
*/
|
||||
static async jsonToBean(bean, obj) {
|
||||
if (obj.id) {
|
||||
|
@ -189,9 +189,9 @@ class Maintenance extends BeanModel {
|
|||
/**
|
||||
* Throw error if cron is invalid
|
||||
* @param {string|Date} cron Pattern or date
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
static async validateCron(cron) {
|
||||
static validateCron(cron) {
|
||||
let job = new Cron(cron, () => {});
|
||||
job.stop();
|
||||
}
|
||||
|
@ -324,7 +324,7 @@ class Maintenance extends BeanModel {
|
|||
|
||||
/**
|
||||
* Is this maintenance currently active
|
||||
* @returns {boolean} The maintenance is active?
|
||||
* @returns {Promise<boolean>} The maintenance is active?
|
||||
*/
|
||||
async isUnderMaintenance() {
|
||||
return (await this.getStatus()) === "under-maintenance";
|
||||
|
@ -332,7 +332,7 @@ class Maintenance extends BeanModel {
|
|||
|
||||
/**
|
||||
* Get the timezone of the maintenance
|
||||
* @returns {string} timezone
|
||||
* @returns {Promise<string>} timezone
|
||||
*/
|
||||
async getTimezone() {
|
||||
if (!this.timezone || this.timezone === "SAME_AS_SERVER") {
|
||||
|
@ -343,7 +343,7 @@ class Maintenance extends BeanModel {
|
|||
|
||||
/**
|
||||
* Get offset for timezone
|
||||
* @returns {string} offset
|
||||
* @returns {Promise<string>} offset
|
||||
*/
|
||||
async getTimezoneOffset() {
|
||||
return dayjs.tz(dayjs(), await this.getTimezone()).format("Z");
|
||||
|
@ -351,7 +351,7 @@ class Maintenance extends BeanModel {
|
|||
|
||||
/**
|
||||
* Get the current status of the maintenance
|
||||
* @returns {string} Current status
|
||||
* @returns {Promise<string>} Current status
|
||||
*/
|
||||
async getStatus() {
|
||||
if (!this.active) {
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
const https = require("https");
|
||||
const dayjs = require("dayjs");
|
||||
const axios = require("axios");
|
||||
const { Prometheus } = require("../prometheus");
|
||||
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
|
||||
SQL_DATETIME_FORMAT
|
||||
} = require("../../src/util");
|
||||
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
|
||||
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials,
|
||||
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, setSetting, httpNtlm, radius, grpcQuery,
|
||||
redisPingAsync, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
@ -16,12 +15,18 @@ const { demoMode } = require("../config");
|
|||
const version = require("../../package.json").version;
|
||||
const apicache = require("../modules/apicache");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
|
||||
const { DockerHost } = require("../docker");
|
||||
const Gamedig = require("gamedig");
|
||||
const jsonata = require("jsonata");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const crypto = require("crypto");
|
||||
const { UptimeCalculator } = require("../uptime-calculator");
|
||||
const { CookieJar } = require("tough-cookie");
|
||||
const { HttpsCookieAgent } = require("http-cookie-agent/http");
|
||||
const https = require("https");
|
||||
const http = require("http");
|
||||
|
||||
const rootCertificates = rootCertificatesFingerprints();
|
||||
|
||||
/**
|
||||
* status:
|
||||
|
@ -38,7 +43,7 @@ class Monitor extends BeanModel {
|
|||
* @param {boolean} showTags Include tags in JSON
|
||||
* @param {boolean} certExpiry Include certificate expiry info in
|
||||
* JSON
|
||||
* @returns {object} Object ready to parse
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let obj = {
|
||||
|
@ -56,7 +61,7 @@ class Monitor extends BeanModel {
|
|||
obj.tags = await this.getTags();
|
||||
}
|
||||
|
||||
if (certExpiry && this.type === "http" && this.getURLProtocol() === "https:") {
|
||||
if (certExpiry && (this.type === "http" || this.type === "keyword" || this.type === "json-query") && this.getURLProtocol() === "https:") {
|
||||
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
|
||||
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
|
||||
obj.validCert = validCert;
|
||||
|
@ -69,7 +74,7 @@ class Monitor extends BeanModel {
|
|||
* Return an object that ready to parse to JSON
|
||||
* @param {boolean} includeSensitiveData Include sensitive data in
|
||||
* JSON
|
||||
* @returns {object} Object ready to parse
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
*/
|
||||
async toJSON(includeSensitiveData = true) {
|
||||
|
||||
|
@ -91,11 +96,15 @@ class Monitor extends BeanModel {
|
|||
screenshot = "/screenshots/" + jwt.sign(this.id, UptimeKumaServer.getInstance().jwtSecret) + ".png";
|
||||
}
|
||||
|
||||
const path = await this.getPath();
|
||||
const pathName = path.join(" / ");
|
||||
|
||||
let data = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
pathName: await this.getPathName(),
|
||||
path,
|
||||
pathName,
|
||||
parent: this.parent,
|
||||
childrenIDs: await Monitor.getAllChildrenIDs(this.id),
|
||||
url: this.url,
|
||||
|
@ -130,6 +139,7 @@ class Monitor extends BeanModel {
|
|||
maintenance: await Monitor.isUnderMaintenance(this.id),
|
||||
mqttTopic: this.mqttTopic,
|
||||
mqttSuccessMessage: this.mqttSuccessMessage,
|
||||
mqttCheckType: this.mqttCheckType,
|
||||
databaseQuery: this.databaseQuery,
|
||||
authMethod: this.authMethod,
|
||||
grpcUrl: this.grpcUrl,
|
||||
|
@ -146,10 +156,11 @@ class Monitor extends BeanModel {
|
|||
expectedValue: this.expectedValue,
|
||||
kafkaProducerTopic: this.kafkaProducerTopic,
|
||||
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
|
||||
kafkaProducerSsl: this.kafkaProducerSsl === "1" && true || false,
|
||||
kafkaProducerAllowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation === "1" && true || false,
|
||||
kafkaProducerSsl: this.getKafkaProducerSsl(),
|
||||
kafkaProducerAllowAutoTopicCreation: this.getKafkaProducerAllowAutoTopicCreation(),
|
||||
kafkaProducerMessage: this.kafkaProducerMessage,
|
||||
screenshot,
|
||||
remote_browser: this.remote_browser,
|
||||
};
|
||||
|
||||
if (includeSensitiveData) {
|
||||
|
@ -234,12 +245,12 @@ class Monitor extends BeanModel {
|
|||
/**
|
||||
* Encode user and password to Base64 encoding
|
||||
* for HTTP "basic" auth, as per RFC-7617
|
||||
* @param {string} user Username to encode
|
||||
* @param {string} pass Password to encode
|
||||
* @returns {string} Encoded username:password
|
||||
* @param {string|null} user - The username (nullable if not changed by a user)
|
||||
* @param {string|null} pass - The password (nullable if not changed by a user)
|
||||
* @returns {string} Encoded Base64 string
|
||||
*/
|
||||
encodeBase64(user, pass) {
|
||||
return Buffer.from(user + ":" + pass).toString("base64");
|
||||
return Buffer.from(`${user || ""}:${pass || ""}`).toString("base64");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -298,12 +309,28 @@ class Monitor extends BeanModel {
|
|||
return Boolean(this.gamedigGivenPortOnly);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean} Kafka Producer Ssl enabled?
|
||||
*/
|
||||
getKafkaProducerSsl() {
|
||||
return Boolean(this.kafkaProducerSsl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean} Kafka Producer Allow Auto Topic Creation Enabled?
|
||||
*/
|
||||
getKafkaProducerAllowAutoTopicCreation() {
|
||||
return Boolean(this.kafkaProducerAllowAutoTopicCreation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start monitor
|
||||
* @param {Server} io Socket server instance
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
start(io) {
|
||||
async start(io) {
|
||||
let previousBeat = null;
|
||||
let retries = 0;
|
||||
|
||||
|
@ -332,6 +359,9 @@ class Monitor extends BeanModel {
|
|||
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
||||
this.id,
|
||||
]);
|
||||
if (previousBeat) {
|
||||
retries = previousBeat.retries;
|
||||
}
|
||||
}
|
||||
|
||||
const isFirstBeat = !previousBeat;
|
||||
|
@ -346,6 +376,12 @@ class Monitor extends BeanModel {
|
|||
bean.status = flipStatus(bean.status);
|
||||
}
|
||||
|
||||
// Runtime patch timeout if it is 0
|
||||
// See https://github.com/louislam/uptime-kuma/pull/3961#issuecomment-1804149144
|
||||
if (!this.timeout || this.timeout <= 0) {
|
||||
this.timeout = this.interval * 1000 * 0.8;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await Monitor.isUnderMaintenance(this.id)) {
|
||||
bean.msg = "Monitor under maintenance";
|
||||
|
@ -401,9 +437,7 @@ class Monitor extends BeanModel {
|
|||
if (this.auth_method === "oauth2-cc") {
|
||||
try {
|
||||
if (this.oauthAccessToken === undefined || new Date(this.oauthAccessToken.expires_at * 1000) <= new Date()) {
|
||||
log.debug("monitor", `[${this.name}] The oauth access-token undefined or expired. Requesting a new one`);
|
||||
this.oauthAccessToken = await getOidcTokenClientCredentials(this.oauth_token_url, this.oauth_client_id, this.oauth_client_secret, this.oauth_scopes, this.oauth_auth_method);
|
||||
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Expires at ${new Date(this.oauthAccessToken.expires_at * 1000)}`);
|
||||
this.oauthAccessToken = await this.makeOidcTokenClientCredentialsRequest();
|
||||
}
|
||||
oauth2AuthHeader = {
|
||||
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
||||
|
@ -416,6 +450,7 @@ class Monitor extends BeanModel {
|
|||
const httpsAgentOptions = {
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
};
|
||||
|
||||
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
|
||||
|
@ -447,7 +482,6 @@ class Monitor extends BeanModel {
|
|||
timeout: this.timeout * 1000,
|
||||
headers: {
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
...(contentType ? { "Content-Type": contentType } : {}),
|
||||
...(basicAuthHeader),
|
||||
...(oauth2AuthHeader),
|
||||
|
@ -457,6 +491,7 @@ class Monitor extends BeanModel {
|
|||
validateStatus: (status) => {
|
||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||
},
|
||||
signal: axiosAbortSignal((this.timeout + 10) * 1000),
|
||||
};
|
||||
|
||||
if (bodyValue) {
|
||||
|
@ -478,7 +513,12 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
if (!options.httpsAgent) {
|
||||
options.httpsAgent = new https.Agent(httpsAgentOptions);
|
||||
let jar = new CookieJar();
|
||||
let httpsCookieAgentOptions = {
|
||||
...httpsAgentOptions,
|
||||
cookies: { jar }
|
||||
};
|
||||
options.httpsAgent = new HttpsCookieAgent(httpsCookieAgentOptions);
|
||||
}
|
||||
|
||||
if (this.auth_method === "mtls") {
|
||||
|
@ -493,6 +533,18 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
}
|
||||
|
||||
let tlsInfo = {};
|
||||
// Store tlsInfo when secureConnect event is emitted
|
||||
// The keylog event listener is a workaround to access the tlsSocket
|
||||
options.httpsAgent.once("keylog", async (line, tlsSocket) => {
|
||||
tlsSocket.once("secureConnect", async () => {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
});
|
||||
});
|
||||
|
||||
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
|
||||
log.debug("monitor", `[${this.name}] Axios Request`);
|
||||
|
||||
|
@ -502,30 +554,18 @@ class Monitor extends BeanModel {
|
|||
bean.msg = `${res.status} - ${res.statusText}`;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
// Check certificate if https is used
|
||||
let certInfoStartTime = dayjs().valueOf();
|
||||
if (this.getUrl()?.protocol === "https:") {
|
||||
log.debug("monitor", `[${this.name}] Check cert`);
|
||||
try {
|
||||
let tlsInfoObject = checkCertificate(res);
|
||||
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
|
||||
// fallback for if kelog event is not emitted, but we may still have tlsInfo,
|
||||
// e.g. if the connection is made through a proxy
|
||||
if (this.getUrl()?.protocol === "https:" && tlsInfo.valid === undefined) {
|
||||
const tlsSocket = res.request.res.socket;
|
||||
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call checkCertExpiryNotifications`);
|
||||
await this.checkCertExpiryNotifications(tlsInfoObject);
|
||||
}
|
||||
if (tlsSocket) {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
|
||||
} catch (e) {
|
||||
if (e.message !== "No TLS certificate in response") {
|
||||
log.error("monitor", "Caught error");
|
||||
log.error("monitor", e.message);
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.TIMELOGGER === "1") {
|
||||
log.debug("monitor", "Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
}
|
||||
|
||||
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID === this.id) {
|
||||
log.info("monitor", res.data);
|
||||
|
@ -559,8 +599,12 @@ class Monitor extends BeanModel {
|
|||
let data = res.data;
|
||||
|
||||
// convert data to object
|
||||
if (typeof data === "string") {
|
||||
if (typeof data === "string" && res.headers["content-type"] !== "application/json") {
|
||||
try {
|
||||
data = JSON.parse(data);
|
||||
} catch (_) {
|
||||
// Failed to parse as JSON, just process it as a string
|
||||
}
|
||||
}
|
||||
|
||||
let expression = jsonata(this.jsonPath);
|
||||
|
@ -596,6 +640,7 @@ class Monitor extends BeanModel {
|
|||
// If the previous beat was down or pending we use the regular
|
||||
// beatInterval/retryInterval in the setTimeout further below
|
||||
if (previousBeat.status !== (this.isUpsideDown() ? DOWN : UP) || msSinceLastBeat > beatInterval * 1000 + bufferTime) {
|
||||
bean.duration = Math.round(msSinceLastBeat / 1000);
|
||||
throw new Error("No heartbeat in the time window");
|
||||
} else {
|
||||
let timeout = beatInterval * 1000 - msSinceLastBeat;
|
||||
|
@ -611,6 +656,7 @@ class Monitor extends BeanModel {
|
|||
return;
|
||||
}
|
||||
} else {
|
||||
bean.duration = beatInterval;
|
||||
throw new Error("No heartbeat in the time window");
|
||||
}
|
||||
|
||||
|
@ -627,13 +673,13 @@ class Monitor extends BeanModel {
|
|||
timeout: this.timeout * 1000,
|
||||
headers: {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
},
|
||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
}),
|
||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
||||
httpAgent: new http.Agent({
|
||||
maxCachedSessions: 0,
|
||||
}),
|
||||
maxRedirects: this.maxredirects,
|
||||
|
@ -674,29 +720,33 @@ class Monitor extends BeanModel {
|
|||
} else if (this.type === "docker") {
|
||||
log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
|
||||
|
||||
const dockerHost = await R.load("docker_host", this.docker_host);
|
||||
|
||||
const options = {
|
||||
url: `/containers/${this.docker_container}/json`,
|
||||
timeout: this.interval * 1000 * 0.8,
|
||||
headers: {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
},
|
||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
}),
|
||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
||||
httpAgent: new http.Agent({
|
||||
maxCachedSessions: 0,
|
||||
}),
|
||||
};
|
||||
|
||||
const dockerHost = await R.load("docker_host", this.docker_host);
|
||||
|
||||
if (!dockerHost) {
|
||||
throw new Error("Failed to load docker host config");
|
||||
}
|
||||
|
||||
if (dockerHost._dockerType === "socket") {
|
||||
options.socketPath = dockerHost._dockerDaemon;
|
||||
} else if (dockerHost._dockerType === "tcp") {
|
||||
options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
|
||||
options.httpsAgent = CacheableDnsHttpAgent.getHttpsAgent(
|
||||
options.httpsAgent = new https.Agent(
|
||||
DockerHost.getHttpsAgentOptions(dockerHost._dockerType, options.baseURL)
|
||||
);
|
||||
}
|
||||
|
@ -715,18 +765,10 @@ class Monitor extends BeanModel {
|
|||
} else {
|
||||
throw Error("Container State is " + res.data.State.Status);
|
||||
}
|
||||
} else if (this.type === "mqtt") {
|
||||
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
|
||||
port: this.port,
|
||||
username: this.mqttUsername,
|
||||
password: this.mqttPassword,
|
||||
interval: this.interval,
|
||||
});
|
||||
bean.status = UP;
|
||||
} else if (this.type === "sqlserver") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await mssqlQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
await mssqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
|
@ -765,7 +807,7 @@ class Monitor extends BeanModel {
|
|||
} else if (this.type === "postgres") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await postgresQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
await postgresQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
|
@ -773,18 +815,13 @@ class Monitor extends BeanModel {
|
|||
} else if (this.type === "mysql") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
// Use `radius_password` as `password` field, since there are too many unnecessary fields
|
||||
// TODO: rename `radius_password` to `password` later for general use
|
||||
let mysqlPassword = this.radiusPassword;
|
||||
|
||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1", mysqlPassword);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
} else if (this.type === "mongodb") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await mongodbPing(this.databaseConnectionString);
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
} else if (this.type === "radius") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
|
@ -815,7 +852,7 @@ class Monitor extends BeanModel {
|
|||
} else if (this.type === "redis") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await redisPingAsync(this.databaseConnectionString);
|
||||
bean.msg = await redisPingAsync(this.databaseConnectionString, !this.ignoreTls);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
|
@ -861,7 +898,11 @@ class Monitor extends BeanModel {
|
|||
|
||||
} catch (error) {
|
||||
|
||||
if (error?.name === "CanceledError") {
|
||||
bean.msg = `timeout by AbortSignal (${this.timeout}s)`;
|
||||
} else {
|
||||
bean.msg = error.message;
|
||||
}
|
||||
|
||||
// If UP come in here, it must be upside down mode
|
||||
// Just reset the retries
|
||||
|
@ -871,9 +912,14 @@ class Monitor extends BeanModel {
|
|||
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
|
||||
retries++;
|
||||
bean.status = PENDING;
|
||||
} else {
|
||||
// Continue counting retries during DOWN
|
||||
retries++;
|
||||
}
|
||||
}
|
||||
|
||||
bean.retries = retries;
|
||||
|
||||
log.debug("monitor", `[${this.name}] Check isImportant`);
|
||||
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
|
||||
|
||||
|
@ -896,7 +942,7 @@ class Monitor extends BeanModel {
|
|||
log.debug("monitor", `[${this.name}] apicache clear`);
|
||||
apicache.clear();
|
||||
|
||||
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
|
||||
await UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
|
||||
|
||||
} else {
|
||||
bean.important = false;
|
||||
|
@ -1016,27 +1062,44 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
return res;
|
||||
} catch (e) {
|
||||
} catch (error) {
|
||||
|
||||
/**
|
||||
* Make a single attempt to obtain an new access token in the event that
|
||||
* the recent api request failed for authentication purposes
|
||||
*/
|
||||
if (this.auth_method === "oauth2-cc" && error.response.status === 401 && !finalCall) {
|
||||
this.oauthAccessToken = await this.makeOidcTokenClientCredentialsRequest();
|
||||
let oauth2AuthHeader = {
|
||||
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
||||
};
|
||||
options.headers = { ...(options.headers),
|
||||
...(oauth2AuthHeader)
|
||||
};
|
||||
|
||||
return this.makeAxiosRequest(options, true);
|
||||
}
|
||||
|
||||
// Fix #2253
|
||||
// Read more: https://stackoverflow.com/questions/1759956/curl-error-18-transfer-closed-with-outstanding-read-data-remaining
|
||||
if (!finalCall && typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
|
||||
if (!finalCall && typeof error.message === "string" && error.message.includes("maxContentLength size of -1 exceeded")) {
|
||||
log.debug("monitor", "makeAxiosRequest with gzip");
|
||||
options.headers["Accept-Encoding"] = "gzip, deflate";
|
||||
return this.makeAxiosRequest(options, true);
|
||||
} else {
|
||||
if (typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
|
||||
e.message = "response timeout: incomplete response within a interval";
|
||||
if (typeof error.message === "string" && error.message.includes("maxContentLength size of -1 exceeded")) {
|
||||
error.message = "response timeout: incomplete response within a interval";
|
||||
}
|
||||
throw e;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop monitor
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
stop() {
|
||||
async stop() {
|
||||
clearTimeout(this.heartbeatInterval);
|
||||
this.isStop = true;
|
||||
|
||||
|
@ -1137,7 +1200,7 @@ class Monitor extends BeanModel {
|
|||
if (hasClients) {
|
||||
// Send 24 hour average ping
|
||||
let data24h = await uptimeCalculator.get24Hour();
|
||||
io.to(userID).emit("avgPing", monitorID, (data24h.avgPing) ? data24h.avgPing.toFixed(2) : null);
|
||||
io.to(userID).emit("avgPing", monitorID, (data24h.avgPing) ? Number(data24h.avgPing.toFixed(2)) : null);
|
||||
|
||||
// Send 24 hour uptime
|
||||
io.to(userID).emit("uptime", monitorID, 24, data24h.uptime);
|
||||
|
@ -1309,7 +1372,7 @@ class Monitor extends BeanModel {
|
|||
let notifyDays = await setting("tlsExpiryNotifyDays");
|
||||
if (notifyDays == null || !Array.isArray(notifyDays)) {
|
||||
// Reset Default
|
||||
setSetting("tlsExpiryNotifyDays", [ 7, 14, 21 ], "general");
|
||||
await setSetting("tlsExpiryNotifyDays", [ 7, 14, 21 ], "general");
|
||||
notifyDays = [ 7, 14, 21 ];
|
||||
}
|
||||
|
||||
|
@ -1318,7 +1381,10 @@ class Monitor extends BeanModel {
|
|||
let certInfo = tlsInfoObject.certInfo;
|
||||
while (certInfo) {
|
||||
let subjectCN = certInfo.subject["CN"];
|
||||
if (certInfo.daysRemaining > targetDays) {
|
||||
if (rootCertificates.has(certInfo.fingerprint256)) {
|
||||
log.debug("monitor", `Known root cert: ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||
break;
|
||||
} else if (certInfo.daysRemaining > targetDays) {
|
||||
log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||
} else {
|
||||
log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`);
|
||||
|
@ -1384,10 +1450,7 @@ class Monitor extends BeanModel {
|
|||
* @returns {Promise<LooseObject<any>>} Previous heartbeat
|
||||
*/
|
||||
static async getPreviousHeartbeat(monitorID) {
|
||||
return await R.getRow(`
|
||||
SELECT ping, status, time FROM heartbeat
|
||||
WHERE id = (select MAX(id) from heartbeat where monitor_id = ?)
|
||||
`, [
|
||||
return await R.findOne("heartbeat", " id = (select MAX(id) from heartbeat where monitor_id = ?)", [
|
||||
monitorID
|
||||
]);
|
||||
}
|
||||
|
@ -1463,11 +1526,11 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
/**
|
||||
* Gets Full Path-Name (Groups and Name)
|
||||
* @returns {Promise<string>} Full path name of this monitor
|
||||
* Gets the full path
|
||||
* @returns {Promise<string[]>} Full path (includes groups and the name) of the monitor
|
||||
*/
|
||||
async getPathName() {
|
||||
let path = this.name;
|
||||
async getPath() {
|
||||
const path = [ this.name ];
|
||||
|
||||
if (this.parent === null) {
|
||||
return path;
|
||||
|
@ -1475,7 +1538,7 @@ class Monitor extends BeanModel {
|
|||
|
||||
let parent = await Monitor.getParent(this.id);
|
||||
while (parent !== null) {
|
||||
path = `${parent.name} / ${path}`;
|
||||
path.unshift(parent.name);
|
||||
parent = await Monitor.getParent(parent.id);
|
||||
}
|
||||
|
||||
|
@ -1505,7 +1568,7 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
/**
|
||||
* Unlinks all children of the the group monitor
|
||||
* Unlinks all children of the group monitor
|
||||
* @param {number} groupID ID of group to remove children of
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
|
@ -1530,6 +1593,37 @@ class Monitor extends BeanModel {
|
|||
const parentActive = await Monitor.isParentActive(parent.id);
|
||||
return parent.active && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains a new Oidc Token
|
||||
* @returns {Promise<object>} OAuthProvider client
|
||||
*/
|
||||
async makeOidcTokenClientCredentialsRequest() {
|
||||
log.debug("monitor", `[${this.name}] The oauth access-token undefined or expired. Requesting a new token`);
|
||||
const oAuthAccessToken = await getOidcTokenClientCredentials(this.oauth_token_url, this.oauth_client_id, this.oauth_client_secret, this.oauth_scopes, this.oauth_auth_method);
|
||||
if (this.oauthAccessToken?.expires_at) {
|
||||
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Expires at ${new Date(this.oauthAccessToken?.expires_at * 1000)}`);
|
||||
} else {
|
||||
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Time until expiry was not provided`);
|
||||
}
|
||||
|
||||
return oAuthAccessToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store TLS certificate information and check for expiry
|
||||
* @param {object} tlsInfo Information about the TLS connection
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async handleTlsInfo(tlsInfo) {
|
||||
await this.updateTlsInfo(tlsInfo);
|
||||
this.prometheus?.update(null, tlsInfo);
|
||||
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call checkCertExpiryNotifications`);
|
||||
await this.checkCertExpiryNotifications(tlsInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Monitor;
|
||||
|
|
17
server/model/remote_browser.js
Normal file
17
server/model/remote_browser.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
class RemoteBrowser extends BeanModel {
|
||||
/**
|
||||
* Returns an object that ready to parse to JSON
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
url: this.url,
|
||||
name: this.name,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RemoteBrowser;
|
|
@ -18,9 +18,15 @@ class StatusPage extends BeanModel {
|
|||
* @param {Response} response Response object
|
||||
* @param {string} indexHTML HTML to render
|
||||
* @param {string} slug Status page slug
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async handleStatusPageResponse(response, indexHTML, slug) {
|
||||
// Handle url with trailing slash (http://localhost:3001/status/)
|
||||
// The slug comes from the route "/status/:slug". If the slug is empty, express converts it to "index.html"
|
||||
if (slug === "index.html") {
|
||||
slug = "default";
|
||||
}
|
||||
|
||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
|
@ -36,7 +42,7 @@ class StatusPage extends BeanModel {
|
|||
* SSR for status pages
|
||||
* @param {string} indexHTML HTML page to render
|
||||
* @param {StatusPage} statusPage Status page populate HTML with
|
||||
* @returns {void}
|
||||
* @returns {Promise<string>} the rendered html
|
||||
*/
|
||||
static async renderHTML(indexHTML, statusPage) {
|
||||
const $ = cheerio.load(indexHTML);
|
||||
|
@ -232,6 +238,7 @@ class StatusPage extends BeanModel {
|
|||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
autoRefreshInterval: this.autoRefreshInterval,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
domainNameList: this.getDomainNameList(),
|
||||
|
@ -254,6 +261,7 @@ class StatusPage extends BeanModel {
|
|||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
autoRefreshInterval: this.autoRefreshInterval,
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const passwordHash = require("../password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const { shake256, SHAKE256_LENGTH } = require("../util-server");
|
||||
|
||||
class User extends BeanModel {
|
||||
/**
|
||||
|
@ -23,8 +25,27 @@ class User extends BeanModel {
|
|||
* @returns {Promise<void>}
|
||||
*/
|
||||
async resetPassword(newPassword) {
|
||||
await User.resetPassword(this.id, newPassword);
|
||||
this.password = newPassword;
|
||||
const hashedPassword = passwordHash.generate(newPassword);
|
||||
|
||||
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||
hashedPassword,
|
||||
this.id
|
||||
]);
|
||||
|
||||
this.password = hashedPassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new JWT for a user
|
||||
* @param {User} user The User to create a JsonWebToken for
|
||||
* @param {string} jwtSecret The key used to sign the JsonWebToken
|
||||
* @returns {string} the JsonWebToken as a string
|
||||
*/
|
||||
static createJWT(user, jwtSecret) {
|
||||
return jwt.sign({
|
||||
username: user.username,
|
||||
h: shake256(user.password, SHAKE256_LENGTH),
|
||||
}, jwtSecret);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
65
server/monitor-types/mongodb.js
Normal file
65
server/monitor-types/mongodb.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
const { MonitorType } = require("./monitor-type");
|
||||
const { UP } = require("../../src/util");
|
||||
const { MongoClient } = require("mongodb");
|
||||
const jsonata = require("jsonata");
|
||||
|
||||
class MongodbMonitorType extends MonitorType {
|
||||
|
||||
name = "mongodb";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async check(monitor, heartbeat, _server) {
|
||||
let command = { "ping": 1 };
|
||||
if (monitor.databaseQuery) {
|
||||
command = JSON.parse(monitor.databaseQuery);
|
||||
}
|
||||
|
||||
let result = await this.runMongodbCommand(monitor.databaseConnectionString, command);
|
||||
|
||||
if (result["ok"] !== 1) {
|
||||
throw new Error("MongoDB command failed");
|
||||
} else {
|
||||
heartbeat.msg = "Command executed successfully";
|
||||
}
|
||||
|
||||
if (monitor.jsonPath) {
|
||||
let expression = jsonata(monitor.jsonPath);
|
||||
result = await expression.evaluate(result);
|
||||
if (result) {
|
||||
heartbeat.msg = "Command executed successfully and the jsonata expression produces a result.";
|
||||
} else {
|
||||
throw new Error("Queried value not found.");
|
||||
}
|
||||
}
|
||||
|
||||
if (monitor.expectedValue) {
|
||||
if (result.toString() === monitor.expectedValue) {
|
||||
heartbeat.msg = "Command executed successfully and expected value was found";
|
||||
} else {
|
||||
throw new Error("Query executed, but value is not equal to expected value, value was: [" + JSON.stringify(result) + "]");
|
||||
}
|
||||
}
|
||||
|
||||
heartbeat.status = UP;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to and run MongoDB command on a MongoDB database
|
||||
* @param {string} connectionString The database connection string
|
||||
* @param {object} command MongoDB command to run on the database
|
||||
* @returns {Promise<(string[] | object[] | object)>} Response from
|
||||
* server
|
||||
*/
|
||||
async runMongodbCommand(connectionString, command) {
|
||||
let client = await MongoClient.connect(connectionString);
|
||||
let result = await client.db().command(command);
|
||||
await client.close();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MongodbMonitorType,
|
||||
};
|
|
@ -1,5 +1,4 @@
|
|||
class MonitorType {
|
||||
|
||||
name = undefined;
|
||||
|
||||
/**
|
||||
|
|
122
server/monitor-types/mqtt.js
Normal file
122
server/monitor-types/mqtt.js
Normal file
|
@ -0,0 +1,122 @@
|
|||
const { MonitorType } = require("./monitor-type");
|
||||
const { log, UP } = require("../../src/util");
|
||||
const mqtt = require("mqtt");
|
||||
const jsonata = require("jsonata");
|
||||
|
||||
class MqttMonitorType extends MonitorType {
|
||||
|
||||
name = "mqtt";
|
||||
|
||||
/**
|
||||
* Run the monitoring check on the MQTT monitor
|
||||
* @param {Monitor} monitor Monitor to check
|
||||
* @param {Heartbeat} heartbeat Monitor heartbeat to update
|
||||
* @param {UptimeKumaServer} server Uptime Kuma server
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async check(monitor, heartbeat, server) {
|
||||
const receivedMessage = await this.mqttAsync(monitor.hostname, monitor.mqttTopic, {
|
||||
port: monitor.port,
|
||||
username: monitor.mqttUsername,
|
||||
password: monitor.mqttPassword,
|
||||
interval: monitor.interval,
|
||||
});
|
||||
|
||||
if (monitor.mqttCheckType == null || monitor.mqttCheckType === "") {
|
||||
// use old default
|
||||
monitor.mqttCheckType = "keyword";
|
||||
}
|
||||
|
||||
if (monitor.mqttCheckType === "keyword") {
|
||||
if (receivedMessage != null && receivedMessage.includes(monitor.mqttSuccessMessage)) {
|
||||
heartbeat.msg = `Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`;
|
||||
heartbeat.status = UP;
|
||||
} else {
|
||||
throw Error(`Message Mismatch - Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`);
|
||||
}
|
||||
} else if (monitor.mqttCheckType === "json-query") {
|
||||
const parsedMessage = JSON.parse(receivedMessage);
|
||||
|
||||
let expression = jsonata(monitor.jsonPath);
|
||||
|
||||
let result = await expression.evaluate(parsedMessage);
|
||||
|
||||
if (result?.toString() === monitor.expectedValue) {
|
||||
heartbeat.msg = "Message received, expected value is found";
|
||||
heartbeat.status = UP;
|
||||
} else {
|
||||
throw new Error("Message received but value is not equal to expected value, value was: [" + result + "]");
|
||||
}
|
||||
} else {
|
||||
throw Error("Unknown MQTT Check Type");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to MQTT Broker, subscribe to topic and receive message as String
|
||||
* @param {string} hostname Hostname / address of machine to test
|
||||
* @param {string} topic MQTT topic
|
||||
* @param {object} options MQTT options. Contains port, username,
|
||||
* password and interval (interval defaults to 20)
|
||||
* @returns {Promise<string>} Received MQTT message
|
||||
*/
|
||||
mqttAsync(hostname, topic, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { port, username, password, interval = 20 } = options;
|
||||
|
||||
// Adds MQTT protocol to the hostname if not already present
|
||||
if (!/^(?:http|mqtt|ws)s?:\/\//.test(hostname)) {
|
||||
hostname = "mqtt://" + hostname;
|
||||
}
|
||||
|
||||
const timeoutID = setTimeout(() => {
|
||||
log.debug("mqtt", "MQTT timeout triggered");
|
||||
client.end();
|
||||
reject(new Error("Timeout, Message not received"));
|
||||
}, interval * 1000 * 0.8);
|
||||
|
||||
const mqttUrl = `${hostname}:${port}`;
|
||||
|
||||
log.debug("mqtt", `MQTT connecting to ${mqttUrl}`);
|
||||
|
||||
let client = mqtt.connect(mqttUrl, {
|
||||
username,
|
||||
password,
|
||||
clientId: "uptime-kuma_" + Math.random().toString(16).substr(2, 8)
|
||||
});
|
||||
|
||||
client.on("connect", () => {
|
||||
log.debug("mqtt", "MQTT connected");
|
||||
|
||||
try {
|
||||
client.subscribe(topic, () => {
|
||||
log.debug("mqtt", "MQTT subscribed to topic");
|
||||
});
|
||||
} catch (e) {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("Cannot subscribe topic"));
|
||||
}
|
||||
});
|
||||
|
||||
client.on("error", (error) => {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
reject(error);
|
||||
});
|
||||
|
||||
client.on("message", (messageTopic, message) => {
|
||||
if (messageTopic === topic) {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
resolve(message.toString("utf8"));
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MqttMonitorType,
|
||||
};
|
|
@ -8,7 +8,12 @@ const path = require("path");
|
|||
const Database = require("../database");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const config = require("../config");
|
||||
const { RemoteBrowser } = require("../remote-browser");
|
||||
|
||||
/**
|
||||
* Cached instance of a browser
|
||||
* @type {import ("playwright-core").Browser}
|
||||
*/
|
||||
let browser = null;
|
||||
|
||||
let allowedList = [];
|
||||
|
@ -24,6 +29,9 @@ if (process.platform === "win32") {
|
|||
allowedList.push(process.env.PROGRAMFILES + "\\Chromium\\Application\\chrome.exe");
|
||||
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Chromium\\Application\\chrome.exe");
|
||||
|
||||
// Allow MS Edge
|
||||
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Microsoft\\Edge\\Application\\msedge.exe");
|
||||
|
||||
// For Loop A to Z
|
||||
for (let i = 65; i <= 90; i++) {
|
||||
let drive = String.fromCharCode(i);
|
||||
|
@ -40,17 +48,15 @@ if (process.platform === "win32") {
|
|||
"/usr/bin/chromium",
|
||||
"/usr/bin/chromium-browser",
|
||||
"/usr/bin/google-chrome",
|
||||
"/snap/bin/chromium", // Ubuntu
|
||||
];
|
||||
} else if (process.platform === "darwin") {
|
||||
// TODO: Generated by GitHub Copilot, but not sure if it's correct
|
||||
allowedList = [
|
||||
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||
"/Applications/Chromium.app/Contents/MacOS/Chromium",
|
||||
];
|
||||
}
|
||||
|
||||
log.debug("chrome", allowedList);
|
||||
|
||||
/**
|
||||
* Is the executable path allowed?
|
||||
* @param {string} executablePath Path to executable
|
||||
|
@ -69,10 +75,12 @@ async function isAllowedChromeExecutable(executablePath) {
|
|||
/**
|
||||
* Get the current instance of the browser. If there isn't one, create
|
||||
* it.
|
||||
* @returns {Promise<Browser>} The browser
|
||||
* @returns {Promise<import ("playwright-core").Browser>} The browser
|
||||
*/
|
||||
async function getBrowser() {
|
||||
if (!browser) {
|
||||
if (browser && browser.isConnected()) {
|
||||
return browser;
|
||||
} else {
|
||||
let executablePath = await Settings.get("chromeExecutable");
|
||||
|
||||
executablePath = await prepareChromeExecutable(executablePath);
|
||||
|
@ -81,7 +89,21 @@ async function getBrowser() {
|
|||
//headless: false,
|
||||
executablePath,
|
||||
});
|
||||
|
||||
return browser;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current instance of the browser. If there isn't one, create it
|
||||
* @param {integer} remoteBrowserID Path to executable
|
||||
* @param {integer} userId User ID
|
||||
* @returns {Promise<Browser>} The browser
|
||||
*/
|
||||
async function getRemoteBrowser(remoteBrowserID, userId) {
|
||||
let remoteBrowser = await RemoteBrowser.get(remoteBrowserID, userId);
|
||||
log.debug("MONITOR", `Using remote browser: ${remoteBrowser.name} (${remoteBrowser.id})`);
|
||||
browser = await chromium.connect(remoteBrowser.url);
|
||||
return browser;
|
||||
}
|
||||
|
||||
|
@ -191,11 +213,21 @@ async function testChrome(executablePath) {
|
|||
throw new Error(e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// test remote browser
|
||||
/**
|
||||
* TODO: connect remote browser? https://playwright.dev/docs/api/class-browsertype#browser-type-connect
|
||||
*
|
||||
* @param {string} remoteBrowserURL Remote Browser URL
|
||||
* @returns {Promise<boolean>} Returns if connection worked
|
||||
*/
|
||||
async function testRemoteBrowser(remoteBrowserURL) {
|
||||
try {
|
||||
const browser = await chromium.connect(remoteBrowserURL);
|
||||
browser.version();
|
||||
await browser.close();
|
||||
return true;
|
||||
} catch (e) {
|
||||
throw new Error(e.message);
|
||||
}
|
||||
}
|
||||
class RealBrowserMonitorType extends MonitorType {
|
||||
|
||||
name = "real-browser";
|
||||
|
@ -204,7 +236,7 @@ class RealBrowserMonitorType extends MonitorType {
|
|||
* @inheritdoc
|
||||
*/
|
||||
async check(monitor, heartbeat, server) {
|
||||
const browser = await getBrowser();
|
||||
const browser = monitor.remote_browser ? await getRemoteBrowser(monitor.remote_browser, monitor.user_id) : await getBrowser();
|
||||
const context = await browser.newContext();
|
||||
const page = await context.newPage();
|
||||
|
||||
|
@ -237,4 +269,5 @@ module.exports = {
|
|||
RealBrowserMonitorType,
|
||||
testChrome,
|
||||
resetChrome,
|
||||
testRemoteBrowser,
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { MonitorType } = require("./monitor-type");
|
||||
const { UP, log } = require("../../src/util");
|
||||
const exec = require("child_process").exec;
|
||||
const { UP } = require("../../src/util");
|
||||
const childProcessAsync = require("promisify-child-process");
|
||||
|
||||
/**
|
||||
* A TailscalePing class extends the MonitorType.
|
||||
|
@ -16,14 +16,13 @@ class TailscalePing extends MonitorType {
|
|||
* @param {object} monitor The monitor object associated with the check.
|
||||
* @param {object} heartbeat The heartbeat object to update.
|
||||
* @returns {Promise<void>}
|
||||
* @throws Will throw an error if checking Tailscale ping encounters any error
|
||||
* @throws Error if checking Tailscale ping encounters any error
|
||||
*/
|
||||
async check(monitor, heartbeat) {
|
||||
try {
|
||||
let tailscaleOutput = await this.runTailscalePing(monitor.hostname, monitor.interval);
|
||||
this.parseTailscaleOutput(tailscaleOutput, heartbeat);
|
||||
} catch (err) {
|
||||
log.debug("Tailscale", err);
|
||||
// trigger log function somewhere to display a notification or alert to the user (but how?)
|
||||
throw new Error(`Error checking Tailscale ping: ${err}`);
|
||||
}
|
||||
|
@ -37,26 +36,19 @@ class TailscalePing extends MonitorType {
|
|||
* @throws Will throw an error if the command execution encounters any error.
|
||||
*/
|
||||
async runTailscalePing(hostname, interval) {
|
||||
let cmd = `tailscale ping ${hostname}`;
|
||||
|
||||
log.debug("Tailscale", cmd);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let timeout = interval * 1000 * 0.8;
|
||||
exec(cmd, { timeout: timeout }, (error, stdout, stderr) => {
|
||||
// we may need to handle more cases if tailscale reports an error that isn't necessarily an error (such as not-logged in or DERP health-related issues)
|
||||
if (error) {
|
||||
reject(`Execution error: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
if (stderr) {
|
||||
reject(`Error in output: ${stderr}`);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(stdout);
|
||||
});
|
||||
let res = await childProcessAsync.spawn("tailscale", [ "ping", "--c", "1", hostname ], {
|
||||
timeout: timeout,
|
||||
encoding: "utf8",
|
||||
});
|
||||
if (res.stderr && res.stderr.toString()) {
|
||||
throw new Error(`Error in output: ${res.stderr.toString()}`);
|
||||
}
|
||||
if (res.stdout && res.stdout.toString()) {
|
||||
return res.stdout.toString();
|
||||
} else {
|
||||
throw new Error("No output from Tailscale ping");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -74,7 +66,7 @@ class TailscalePing extends MonitorType {
|
|||
heartbeat.status = UP;
|
||||
let time = line.split(" in ")[1].split(" ")[0];
|
||||
heartbeat.ping = parseInt(time);
|
||||
heartbeat.msg = line;
|
||||
heartbeat.msg = "OK";
|
||||
break;
|
||||
} else if (line.includes("timed out")) {
|
||||
throw new Error(`Ping timed out: "${line}"`);
|
||||
|
|
|
@ -3,17 +3,15 @@ const { DOWN, UP } = require("../../src/util");
|
|||
const axios = require("axios");
|
||||
|
||||
class Alerta extends NotificationProvider {
|
||||
|
||||
name = "alerta";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let alertaUrl = `${notification.alertaApiEndpoint}`;
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
|
@ -40,7 +38,7 @@ class Alerta extends NotificationProvider {
|
|||
resource: "Message",
|
||||
}, data);
|
||||
|
||||
await axios.post(alertaUrl, postData, config);
|
||||
await axios.post(notification.alertaApiEndpoint, postData, config);
|
||||
} else {
|
||||
let datadup = Object.assign( {
|
||||
correlate: [ "service_up", "service_down" ],
|
||||
|
@ -52,11 +50,11 @@ class Alerta extends NotificationProvider {
|
|||
if (heartbeatJSON["status"] === DOWN) {
|
||||
datadup.severity = notification.alertaAlertState; // critical
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is down.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
await axios.post(notification.alertaApiEndpoint, datadup, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
datadup.severity = notification.alertaRecoverState; // cleaned
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is up.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
await axios.post(notification.alertaApiEndpoint, datadup, config);
|
||||
}
|
||||
}
|
||||
return okMsg;
|
||||
|
|
|
@ -4,14 +4,14 @@ const { setting } = require("../util-server");
|
|||
const { getMonitorRelativeURL, UP, DOWN } = require("../../src/util");
|
||||
|
||||
class AlertNow extends NotificationProvider {
|
||||
|
||||
name = "AlertNow";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let textMsg = "";
|
||||
let status = "open";
|
||||
|
|
|
@ -11,7 +11,7 @@ class AliyunSMS extends NotificationProvider {
|
|||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
if (heartbeatJSON != null) {
|
||||
|
@ -44,7 +44,7 @@ class AliyunSMS extends NotificationProvider {
|
|||
* Send the SMS notification
|
||||
* @param {BeanModel} notification Notification details
|
||||
* @param {string} msgbody Message template
|
||||
* @returns {boolean} True if successful else false
|
||||
* @returns {Promise<boolean>} True if successful else false
|
||||
*/
|
||||
async sendSms(notification, msgbody) {
|
||||
let params = {
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const childProcess = require("child_process");
|
||||
const childProcessAsync = require("promisify-child-process");
|
||||
|
||||
class Apprise extends NotificationProvider {
|
||||
|
||||
name = "apprise";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
const args = [ "-vv", "-b", msg, notification.appriseURL ];
|
||||
if (notification.title) {
|
||||
args.push("-t");
|
||||
args.push(notification.title);
|
||||
}
|
||||
const s = childProcess.spawnSync("apprise", args);
|
||||
const s = await childProcessAsync.spawn("apprise", args, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
|
||||
const output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||
|
||||
if (output) {
|
||||
|
||||
if (! output.includes("ERROR")) {
|
||||
return "Sent Successfully";
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
throw new Error(output);
|
||||
|
|
|
@ -46,29 +46,29 @@ class Bark extends NotificationProvider {
|
|||
}
|
||||
|
||||
/**
|
||||
* Add additional parameter for Bark v1 endpoints
|
||||
* Add additional parameter for Bark v1 endpoints.
|
||||
* Leads to better on device styles (iOS 15 optimized)
|
||||
* @param {BeanModel} notification Notification to send
|
||||
* @param {string} postUrl URL to append parameters to
|
||||
* @returns {string} Additional URL parameters
|
||||
*/
|
||||
appendAdditionalParameters(notification, postUrl) {
|
||||
additionalParameters(notification) {
|
||||
// set icon to uptime kuma icon, 11kb should be fine
|
||||
postUrl += "?icon=" + barkNotificationAvatar;
|
||||
let params = "?icon=" + barkNotificationAvatar;
|
||||
// grouping all our notifications
|
||||
if (notification.barkGroup != null) {
|
||||
postUrl += "&group=" + notification.barkGroup;
|
||||
params += "&group=" + notification.barkGroup;
|
||||
} else {
|
||||
// default name
|
||||
postUrl += "&group=" + "UptimeKuma";
|
||||
params += "&group=" + "UptimeKuma";
|
||||
}
|
||||
// picked a sound, this should follow system's mute status when arrival
|
||||
if (notification.barkSound != null) {
|
||||
postUrl += "&sound=" + notification.barkSound;
|
||||
params += "&sound=" + notification.barkSound;
|
||||
} else {
|
||||
// default sound
|
||||
postUrl += "&sound=" + "telegraph";
|
||||
params += "&sound=" + "telegraph";
|
||||
}
|
||||
return postUrl;
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -92,7 +92,7 @@ class Bark extends NotificationProvider {
|
|||
* @param {string} title Message title
|
||||
* @param {string} subtitle Message
|
||||
* @param {string} endpoint Endpoint to send request to
|
||||
* @returns {string} Success message
|
||||
* @returns {Promise<string>} Success message
|
||||
*/
|
||||
async postNotification(notification, title, subtitle, endpoint) {
|
||||
let result;
|
||||
|
@ -100,9 +100,8 @@ class Bark extends NotificationProvider {
|
|||
// url encode title and subtitle
|
||||
title = encodeURIComponent(title);
|
||||
subtitle = encodeURIComponent(subtitle);
|
||||
let postUrl = endpoint + "/" + title + "/" + subtitle;
|
||||
postUrl = this.appendAdditionalParameters(notification, postUrl);
|
||||
result = await axios.get(postUrl);
|
||||
const params = this.additionalParameters(notification);
|
||||
result = await axios.get(`${endpoint}/${title}/${subtitle}${params}`);
|
||||
} else {
|
||||
result = await axios.post(`${endpoint}/push`, {
|
||||
title,
|
||||
|
|
31
server/notification-providers/bitrix24.js
Normal file
31
server/notification-providers/bitrix24.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { UP } = require("../../src/util");
|
||||
|
||||
class Bitrix24 extends NotificationProvider {
|
||||
name = "Bitrix24";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
const params = {
|
||||
user_id: notification.bitrix24UserID,
|
||||
message: "[B]Uptime Kuma[/B]",
|
||||
"ATTACH[COLOR]": (heartbeatJSON ?? {})["status"] === UP ? "#b73419" : "#67b518",
|
||||
"ATTACH[BLOCKS][0][MESSAGE]": msg
|
||||
};
|
||||
|
||||
await axios.get(`${notification.bitrix24WebhookURL}/im.notify.system.add.json`, { params });
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Bitrix24;
|
23
server/notification-providers/call-me-bot.js
Normal file
23
server/notification-providers/call-me-bot.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class CallMeBot extends NotificationProvider {
|
||||
name = "CallMeBot";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
try {
|
||||
const url = new URL(notification.callMeBotEndpoint);
|
||||
url.searchParams.set("text", msg);
|
||||
await axios.get(url.toString());
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CallMeBot;
|
39
server/notification-providers/cellsynt.js
Normal file
39
server/notification-providers/cellsynt.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Cellsynt extends NotificationProvider {
|
||||
name = "Cellsynt";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
const data = {
|
||||
// docs at https://www.cellsynt.com/en/sms/api-integration
|
||||
params: {
|
||||
"username": notification.cellsyntLogin,
|
||||
"password": notification.cellsyntPassword,
|
||||
"destination": notification.cellsyntDestination,
|
||||
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||
"originatortype": notification.cellsyntOriginatortype,
|
||||
"originator": notification.cellsyntOriginator,
|
||||
"allowconcat": notification.cellsyntAllowLongSMS ? 6 : 1
|
||||
}
|
||||
};
|
||||
try {
|
||||
const resp = await axios.post("https://se-1.cellsynt.net/sms.php", null, data);
|
||||
if (resp.data == null ) {
|
||||
throw new Error("Could not connect to Cellsynt, please try again.");
|
||||
} else if (resp.data.includes("Error:")) {
|
||||
resp.data = resp.data.replaceAll("Error:", "");
|
||||
throw new Error(resp.data);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Cellsynt;
|
|
@ -2,14 +2,15 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class ClickSendSMS extends NotificationProvider {
|
||||
|
||||
name = "clicksendsms";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
const url = "https://rest.clicksend.com/v3/sms/send";
|
||||
|
||||
try {
|
||||
let config = {
|
||||
headers: {
|
||||
|
@ -28,7 +29,7 @@ class ClickSendSMS extends NotificationProvider {
|
|||
}
|
||||
]
|
||||
};
|
||||
let resp = await axios.post("https://rest.clicksend.com/v3/sms/send", data, config);
|
||||
let resp = await axios.post(url, data, config);
|
||||
if (resp.data.data.messages[0].status !== "SUCCESS") {
|
||||
let error = "Something gone wrong. Api returned " + resp.data.data.messages[0].status + ".";
|
||||
this.throwGeneralAxiosError(error);
|
||||
|
|
|
@ -10,7 +10,7 @@ class DingDing extends NotificationProvider {
|
|||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
if (heartbeatJSON != null) {
|
||||
|
@ -19,9 +19,12 @@ class DingDing extends NotificationProvider {
|
|||
markdown: {
|
||||
title: `[${this.statusToString(heartbeatJSON["status"])}] ${monitorJSON["name"]}`,
|
||||
text: `## [${this.statusToString(heartbeatJSON["status"])}] ${monitorJSON["name"]} \n> ${heartbeatJSON["msg"]}\n> Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`,
|
||||
},
|
||||
"at": {
|
||||
"isAtAll": notification.mentioning === "everyone"
|
||||
}
|
||||
};
|
||||
if (this.sendToDingDing(notification, params)) {
|
||||
if (await this.sendToDingDing(notification, params)) {
|
||||
return okMsg;
|
||||
}
|
||||
} else {
|
||||
|
@ -31,7 +34,7 @@ class DingDing extends NotificationProvider {
|
|||
content: msg
|
||||
}
|
||||
};
|
||||
if (this.sendToDingDing(notification, params)) {
|
||||
if (await this.sendToDingDing(notification, params)) {
|
||||
return okMsg;
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +47,7 @@ class DingDing extends NotificationProvider {
|
|||
* Send message to DingDing
|
||||
* @param {BeanModel} notification Notification to send
|
||||
* @param {object} params Parameters of message
|
||||
* @returns {boolean} True if successful else false
|
||||
* @returns {Promise<boolean>} True if successful else false
|
||||
*/
|
||||
async sendToDingDing(notification, params) {
|
||||
let timestamp = Date.now();
|
||||
|
@ -62,7 +65,7 @@ class DingDing extends NotificationProvider {
|
|||
if (result.data.errmsg === "ok") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
throw new Error(result.data.errmsg);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -3,17 +3,20 @@ const axios = require("axios");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Discord extends NotificationProvider {
|
||||
|
||||
name = "discord";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
const discordDisplayName = notification.discordUsername || "Uptime Kuma";
|
||||
const webhookUrl = new URL(notification.discordWebhookUrl);
|
||||
if (notification.discordChannelType === "postToThread") {
|
||||
webhookUrl.searchParams.append("thread_id", notification.threadId);
|
||||
}
|
||||
|
||||
// If heartbeatJSON is null, assume we're testing.
|
||||
if (heartbeatJSON == null) {
|
||||
|
@ -21,7 +24,12 @@ class Discord extends NotificationProvider {
|
|||
username: discordDisplayName,
|
||||
content: msg,
|
||||
};
|
||||
await axios.post(notification.discordWebhookUrl, discordtestdata);
|
||||
|
||||
if (notification.discordChannelType === "createNewForumPost") {
|
||||
discordtestdata.thread_name = notification.postName;
|
||||
}
|
||||
|
||||
await axios.post(webhookUrl.toString(), discordtestdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -73,12 +81,14 @@ class Discord extends NotificationProvider {
|
|||
],
|
||||
}],
|
||||
};
|
||||
|
||||
if (notification.discordChannelType === "createNewForumPost") {
|
||||
discorddowndata.thread_name = notification.postName;
|
||||
}
|
||||
if (notification.discordPrefixMessage) {
|
||||
discorddowndata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discorddowndata);
|
||||
await axios.post(webhookUrl.toString(), discorddowndata);
|
||||
return okMsg;
|
||||
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
|
@ -109,11 +119,15 @@ class Discord extends NotificationProvider {
|
|||
}],
|
||||
};
|
||||
|
||||
if (notification.discordChannelType === "createNewForumPost") {
|
||||
discordupdata.thread_name = notification.postName;
|
||||
}
|
||||
|
||||
if (notification.discordPrefixMessage) {
|
||||
discordupdata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discordupdata);
|
||||
await axios.post(webhookUrl.toString(), discordupdata);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
|
@ -9,8 +9,7 @@ class Feishu extends NotificationProvider {
|
|||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let feishuWebHookUrl = notification.feishuWebHookUrl;
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
|
@ -20,7 +19,7 @@ class Feishu extends NotificationProvider {
|
|||
text: msg,
|
||||
},
|
||||
};
|
||||
await axios.post(feishuWebHookUrl, testdata);
|
||||
await axios.post(notification.feishuWebHookUrl, testdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -46,7 +45,7 @@ class Feishu extends NotificationProvider {
|
|||
},
|
||||
},
|
||||
};
|
||||
await axios.post(feishuWebHookUrl, downdata);
|
||||
await axios.post(notification.feishuWebHookUrl, downdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -72,7 +71,7 @@ class Feishu extends NotificationProvider {
|
|||
},
|
||||
},
|
||||
};
|
||||
await axios.post(feishuWebHookUrl, updata);
|
||||
await axios.post(notification.feishuWebHookUrl, updata);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
|
@ -62,6 +62,15 @@ class FlashDuty extends NotificationProvider {
|
|||
* @returns {string} Success message
|
||||
*/
|
||||
async postNotification(notification, title, body, monitorInfo, eventStatus) {
|
||||
let labels = {
|
||||
resource: this.genMonitorUrl(monitorInfo),
|
||||
check: monitorInfo.name,
|
||||
};
|
||||
if (monitorInfo.tags && monitorInfo.tags.length > 0) {
|
||||
for (let tag of monitorInfo.tags) {
|
||||
labels[tag.name] = tag.value;
|
||||
}
|
||||
}
|
||||
const options = {
|
||||
method: "POST",
|
||||
url: "https://api.flashcat.cloud/event/push/alert/standard?integration_key=" + notification.flashdutyIntegrationKey,
|
||||
|
@ -71,9 +80,7 @@ class FlashDuty extends NotificationProvider {
|
|||
title,
|
||||
event_status: eventStatus || "Info",
|
||||
alert_key: String(monitorInfo.id) || Math.random().toString(36).substring(7),
|
||||
labels: monitorInfo?.tags?.reduce((acc, item) => ({ ...acc,
|
||||
[item.name]: item.value
|
||||
}), { resource: this.genMonitorUrl(monitorInfo) }),
|
||||
labels,
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -2,14 +2,14 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class FreeMobile extends NotificationProvider {
|
||||
|
||||
name = "FreeMobile";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
await axios.post(`https://smsapi.free-mobile.fr/sendmsg?msg=${encodeURIComponent(msg.replace("🔴", "⛔️"))}`, {
|
||||
"user": notification.freemobileUser,
|
||||
|
|
|
@ -3,21 +3,20 @@ const axios = require("axios");
|
|||
const { UP } = require("../../src/util");
|
||||
|
||||
class GoAlert extends NotificationProvider {
|
||||
|
||||
name = "GoAlert";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let closeAction = "close";
|
||||
let data = {
|
||||
summary: msg,
|
||||
};
|
||||
if (heartbeatJSON != null && heartbeatJSON["status"] === UP) {
|
||||
data["action"] = closeAction;
|
||||
data["action"] = "close";
|
||||
}
|
||||
let headers = {
|
||||
"Content-Type": "multipart/form-data",
|
||||
|
@ -27,7 +26,6 @@ class GoAlert extends NotificationProvider {
|
|||
};
|
||||
await axios.post(`${notification.goAlertBaseURL}/api/v2/generic/incoming?token=${notification.goAlertToken}`, data, config);
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
let msg = (error.response.data) ? error.response.data : "Error without response";
|
||||
throw new Error(msg);
|
||||
|
|
|
@ -1,41 +1,85 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { setting } = require("../util-server");
|
||||
const { getMonitorRelativeURL } = require("../../src/util");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
const { getMonitorRelativeURL, UP } = require("../../src/util");
|
||||
|
||||
class GoogleChat extends NotificationProvider {
|
||||
|
||||
name = "GoogleChat";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
// Google Chat message formatting: https://developers.google.com/chat/api/guides/message-formats/basic
|
||||
|
||||
let textMsg = "";
|
||||
if (heartbeatJSON && heartbeatJSON.status === UP) {
|
||||
textMsg = "✅ Application is back online\n";
|
||||
} else if (heartbeatJSON && heartbeatJSON.status === DOWN) {
|
||||
textMsg = "🔴 Application went down\n";
|
||||
let chatHeader = {
|
||||
title: "Uptime Kuma Alert",
|
||||
};
|
||||
|
||||
if (monitorJSON && heartbeatJSON) {
|
||||
chatHeader["title"] =
|
||||
heartbeatJSON["status"] === UP
|
||||
? `✅ ${monitorJSON["name"]} is back online`
|
||||
: `🔴 ${monitorJSON["name"]} went down`;
|
||||
}
|
||||
|
||||
if (monitorJSON && monitorJSON.name) {
|
||||
textMsg += `*${monitorJSON.name}*\n`;
|
||||
// always show msg
|
||||
let sectionWidgets = [
|
||||
{
|
||||
textParagraph: {
|
||||
text: `<b>Message:</b>\n${msg}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// add time if available
|
||||
if (heartbeatJSON) {
|
||||
sectionWidgets.push({
|
||||
textParagraph: {
|
||||
text: `<b>Time (${heartbeatJSON["timezone"]}):</b>\n${heartbeatJSON["localDateTime"]}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
textMsg += `${msg}`;
|
||||
|
||||
// add button for monitor link if available
|
||||
const baseURL = await setting("primaryBaseURL");
|
||||
if (baseURL && monitorJSON) {
|
||||
textMsg += `\n${baseURL + getMonitorRelativeURL(monitorJSON.id)}`;
|
||||
if (baseURL) {
|
||||
const urlPath = monitorJSON ? getMonitorRelativeURL(monitorJSON.id) : "/";
|
||||
sectionWidgets.push({
|
||||
buttonList: {
|
||||
buttons: [
|
||||
{
|
||||
text: "Visit Uptime Kuma",
|
||||
onClick: {
|
||||
openLink: {
|
||||
url: baseURL + urlPath,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const data = {
|
||||
"text": textMsg,
|
||||
let chatSections = [
|
||||
{
|
||||
widgets: sectionWidgets,
|
||||
},
|
||||
];
|
||||
|
||||
// construct json data
|
||||
let data = {
|
||||
cardsV2: [
|
||||
{
|
||||
card: {
|
||||
header: chatHeader,
|
||||
sections: chatSections,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await axios.post(notification.googleChatWebhookURL, data);
|
||||
|
|
|
@ -2,14 +2,13 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class Gorush extends NotificationProvider {
|
||||
|
||||
name = "gorush";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
let platformMapping = {
|
||||
"ios": 1,
|
||||
|
|
|
@ -2,14 +2,14 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class Gotify extends NotificationProvider {
|
||||
|
||||
name = "gotify";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
if (notification.gotifyserverurl && notification.gotifyserverurl.endsWith("/")) {
|
||||
notification.gotifyserverurl = notification.gotifyserverurl.slice(0, -1);
|
||||
|
|
51
server/notification-providers/grafana-oncall.js
Normal file
51
server/notification-providers/grafana-oncall.js
Normal file
|
@ -0,0 +1,51 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class GrafanaOncall extends NotificationProvider {
|
||||
name = "GrafanaOncall";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
if (!notification.GrafanaOncallURL) {
|
||||
throw new Error("GrafanaOncallURL cannot be empty");
|
||||
}
|
||||
|
||||
try {
|
||||
if (heartbeatJSON === null) {
|
||||
let grafanaupdata = {
|
||||
title: "General notification",
|
||||
message: msg,
|
||||
state: "alerting",
|
||||
};
|
||||
await axios.post(notification.GrafanaOncallURL, grafanaupdata);
|
||||
return okMsg;
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let grafanadowndata = {
|
||||
title: monitorJSON["name"] + " is down",
|
||||
message: heartbeatJSON["msg"],
|
||||
state: "alerting",
|
||||
};
|
||||
await axios.post(notification.GrafanaOncallURL, grafanadowndata);
|
||||
return okMsg;
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let grafanaupdata = {
|
||||
title: monitorJSON["name"] + " is up",
|
||||
message: heartbeatJSON["msg"],
|
||||
state: "ok",
|
||||
};
|
||||
await axios.post(notification.GrafanaOncallURL, grafanaupdata);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GrafanaOncall;
|
33
server/notification-providers/gtx-messaging.js
Normal file
33
server/notification-providers/gtx-messaging.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class GtxMessaging extends NotificationProvider {
|
||||
name = "gtxmessaging";
|
||||
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
// The UP/DOWN symbols will be replaced with `???` by gtx-messaging
|
||||
const text = msg.replaceAll("🔴 ", "").replaceAll("✅ ", "");
|
||||
|
||||
try {
|
||||
const data = new URLSearchParams();
|
||||
data.append("from", notification.gtxMessagingFrom.trim());
|
||||
data.append("to", notification.gtxMessagingTo.trim());
|
||||
data.append("text", text);
|
||||
|
||||
const url = `https://rest.gtx-messaging.net/smsc/sendsms/${notification.gtxMessagingApiKey}/json`;
|
||||
|
||||
await axios.post(url, data);
|
||||
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GtxMessaging;
|
52
server/notification-providers/heii-oncall.js
Normal file
52
server/notification-providers/heii-oncall.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const { UP, DOWN, getMonitorRelativeURL } = require("../../src/util");
|
||||
const { setting } = require("../util-server");
|
||||
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
class HeiiOnCall extends NotificationProvider {
|
||||
name = "HeiiOnCall";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
const payload = heartbeatJSON || {};
|
||||
|
||||
const baseURL = await setting("primaryBaseURL");
|
||||
if (baseURL && monitorJSON) {
|
||||
payload["url"] = baseURL + getMonitorRelativeURL(monitorJSON.id);
|
||||
}
|
||||
|
||||
const config = {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
Authorization: "Bearer " + notification.heiiOnCallApiKey,
|
||||
},
|
||||
};
|
||||
const heiiUrl = `https://heiioncall.com/triggers/${notification.heiiOnCallTriggerId}/`;
|
||||
// docs https://heiioncall.com/docs#manual-triggers
|
||||
try {
|
||||
if (!heartbeatJSON) {
|
||||
// Testing or general notification like certificate expiry
|
||||
payload["msg"] = msg;
|
||||
await axios.post(heiiUrl + "alert", payload, config);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON.status === DOWN) {
|
||||
await axios.post(heiiUrl + "alert", payload, config);
|
||||
return okMsg;
|
||||
}
|
||||
if (heartbeatJSON.status === UP) {
|
||||
await axios.post(heiiUrl + "resolve", payload, config);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HeiiOnCall;
|
|
@ -9,7 +9,9 @@ class HomeAssistant extends NotificationProvider {
|
|||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, message, monitor = null, heartbeat = null) {
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
const notificationService = notification?.notificationService || defaultNotificationService;
|
||||
|
||||
try {
|
||||
|
@ -17,10 +19,12 @@ class HomeAssistant extends NotificationProvider {
|
|||
`${notification.homeAssistantUrl.trim().replace(/\/*$/, "")}/api/services/notify/${notificationService}`,
|
||||
{
|
||||
title: "Uptime Kuma",
|
||||
message,
|
||||
message: msg,
|
||||
...(notificationService !== "persistent_notification" && { data: {
|
||||
name: monitor?.name,
|
||||
status: heartbeat?.status,
|
||||
name: monitorJSON?.name,
|
||||
status: heartbeatJSON?.status,
|
||||
channel: "Uptime Kuma",
|
||||
icon_url: "https://github.com/louislam/uptime-kuma/blob/master/public/icon.png?raw=true",
|
||||
} }),
|
||||
},
|
||||
{
|
||||
|
@ -31,7 +35,7 @@ class HomeAssistant extends NotificationProvider {
|
|||
}
|
||||
);
|
||||
|
||||
return "Sent Successfully.";
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
|
42
server/notification-providers/keep.js
Normal file
42
server/notification-providers/keep.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Keep extends NotificationProvider {
|
||||
name = "Keep";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let data = {
|
||||
heartbeat: heartbeatJSON,
|
||||
monitor: monitorJSON,
|
||||
msg,
|
||||
};
|
||||
let config = {
|
||||
headers: {
|
||||
"x-api-key": notification.webhookAPIKey,
|
||||
"content-type": "application/json",
|
||||
},
|
||||
};
|
||||
|
||||
let url = notification.webhookURL;
|
||||
|
||||
if (url.endsWith("/")) {
|
||||
url = url.slice(0, -1);
|
||||
}
|
||||
|
||||
let webhookURL = url + "/alerts/event/uptimekuma";
|
||||
|
||||
await axios.post(webhookURL, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Keep;
|
|
@ -2,15 +2,15 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class Kook extends NotificationProvider {
|
||||
|
||||
name = "Kook";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let url = "https://www.kookapp.cn/api/v3/message/create";
|
||||
const okMsg = "Sent Successfully.";
|
||||
const url = "https://www.kookapp.cn/api/v3/message/create";
|
||||
|
||||
let data = {
|
||||
target_id: notification.kookGuildID,
|
||||
content: msg,
|
||||
|
|
|
@ -3,16 +3,16 @@ const axios = require("axios");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Line extends NotificationProvider {
|
||||
|
||||
name = "line";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
const url = "https://api.line.me/v2/bot/message/push";
|
||||
|
||||
try {
|
||||
let lineAPIUrl = "https://api.line.me/v2/bot/message/push";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
|
@ -29,7 +29,7 @@ class Line extends NotificationProvider {
|
|||
}
|
||||
]
|
||||
};
|
||||
await axios.post(lineAPIUrl, testMessage, config);
|
||||
await axios.post(url, testMessage, config);
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let downMessage = {
|
||||
"to": notification.lineUserID,
|
||||
|
@ -43,7 +43,7 @@ class Line extends NotificationProvider {
|
|||
}
|
||||
]
|
||||
};
|
||||
await axios.post(lineAPIUrl, downMessage, config);
|
||||
await axios.post(url, downMessage, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let upMessage = {
|
||||
"to": notification.lineUserID,
|
||||
|
@ -57,7 +57,7 @@ class Line extends NotificationProvider {
|
|||
}
|
||||
]
|
||||
};
|
||||
await axios.post(lineAPIUrl, upMessage, config);
|
||||
await axios.post(url, upMessage, config);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
|
|
|
@ -4,16 +4,16 @@ const qs = require("qs");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class LineNotify extends NotificationProvider {
|
||||
|
||||
name = "LineNotify";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
const url = "https://notify-api.line.me/api/notify";
|
||||
|
||||
try {
|
||||
let lineAPIUrl = "https://notify-api.line.me/api/notify";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
|
@ -24,7 +24,7 @@ class LineNotify extends NotificationProvider {
|
|||
let testMessage = {
|
||||
"message": msg,
|
||||
};
|
||||
await axios.post(lineAPIUrl, qs.stringify(testMessage), config);
|
||||
await axios.post(url, qs.stringify(testMessage), config);
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let downMessage = {
|
||||
"message": "\n[🔴 Down]\n" +
|
||||
|
@ -32,7 +32,7 @@ class LineNotify extends NotificationProvider {
|
|||
heartbeatJSON["msg"] + "\n" +
|
||||
`Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lineAPIUrl, qs.stringify(downMessage), config);
|
||||
await axios.post(url, qs.stringify(downMessage), config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let upMessage = {
|
||||
"message": "\n[✅ Up]\n" +
|
||||
|
@ -40,7 +40,7 @@ class LineNotify extends NotificationProvider {
|
|||
heartbeatJSON["msg"] + "\n" +
|
||||
`Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lineAPIUrl, qs.stringify(upMessage), config);
|
||||
await axios.post(url, qs.stringify(upMessage), config);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
|
|
|
@ -3,28 +3,23 @@ const axios = require("axios");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class LunaSea extends NotificationProvider {
|
||||
|
||||
name = "lunasea";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let lunaseaurl = "";
|
||||
if (notification.lunaseaTarget === "user") {
|
||||
lunaseaurl = "https://notify.lunasea.app/v1/custom/user/" + notification.lunaseaUserID;
|
||||
} else {
|
||||
lunaseaurl = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice;
|
||||
}
|
||||
const okMsg = "Sent Successfully.";
|
||||
const url = "https://notify.lunasea.app/v1";
|
||||
|
||||
try {
|
||||
const target = this.getTarget(notification);
|
||||
if (heartbeatJSON == null) {
|
||||
let testdata = {
|
||||
"title": "Uptime Kuma Alert",
|
||||
"body": msg,
|
||||
};
|
||||
await axios.post(lunaseaurl, testdata);
|
||||
await axios.post(`${url}/custom/${target}`, testdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -35,7 +30,7 @@ class LunaSea extends NotificationProvider {
|
|||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lunaseaurl, downdata);
|
||||
await axios.post(`${url}/custom/${target}`, downdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -46,13 +41,25 @@ class LunaSea extends NotificationProvider {
|
|||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lunaseaurl, updata);
|
||||
await axios.post(`${url}/custom/${target}`, updata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the lunasea target to send the notification to
|
||||
* @param {BeanModel} notification Notification details
|
||||
* @returns {string} The target to send the notification to
|
||||
*/
|
||||
getTarget(notification) {
|
||||
if (notification.lunaseaTarget === "user") {
|
||||
return "user/" + notification.lunaseaUserID;
|
||||
}
|
||||
return "device/" + notification.lunaseaDevice;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ class Matrix extends NotificationProvider {
|
|||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
const size = 20;
|
||||
const randomString = encodeURIComponent(
|
||||
|
|
|
@ -3,14 +3,14 @@ const axios = require("axios");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Mattermost extends NotificationProvider {
|
||||
|
||||
name = "mattermost";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
const mattermostUserName = notification.mattermostusername || "Uptime Kuma";
|
||||
// If heartbeatJSON is null, assume non monitoring notification (Certificate warning) or testing.
|
||||
|
@ -78,12 +78,12 @@ class Mattermost extends NotificationProvider {
|
|||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON.name +
|
||||
monitorJSON.pathName +
|
||||
" service went " +
|
||||
statusText,
|
||||
color: color,
|
||||
title:
|
||||
monitorJSON.name +
|
||||
monitorJSON.pathName +
|
||||
" service went " +
|
||||
statusText,
|
||||
title_link: monitorJSON.url,
|
||||
|
@ -98,10 +98,7 @@ class Mattermost extends NotificationProvider {
|
|||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostdata
|
||||
);
|
||||
await axios.post(notification.mattermostWebhookUrl, mattermostdata);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
|
|
|
@ -107,7 +107,7 @@ class Nostr extends NotificationProvider {
|
|||
/**
|
||||
* Get public keys for recipients
|
||||
* @param {string} recipients Newline delimited list of recipients
|
||||
* @returns {nip19.DecodeResult[]} Public keys
|
||||
* @returns {Promise<nip19.DecodeResult[]>} Public keys
|
||||
*/
|
||||
async getPublicKeys(recipients) {
|
||||
const recipientsList = recipients.split("\n");
|
||||
|
|
|
@ -3,14 +3,14 @@ const axios = require("axios");
|
|||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Ntfy extends NotificationProvider {
|
||||
|
||||
name = "ntfy";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let headers = {};
|
||||
if (notification.ntfyAuthenticationMethod === "usernamePassword") {
|
||||
|
@ -31,7 +31,7 @@ class Ntfy extends NotificationProvider {
|
|||
"priority": notification.ntfyPriority,
|
||||
"tags": [ "test_tube" ],
|
||||
};
|
||||
await axios.post(`${notification.ntfyserverurl}`, ntfyTestData, { headers: headers });
|
||||
await axios.post(notification.ntfyserverurl, ntfyTestData, { headers: headers });
|
||||
return okMsg;
|
||||
}
|
||||
let tags = [];
|
||||
|
@ -54,20 +54,23 @@ class Ntfy extends NotificationProvider {
|
|||
"priority": priority,
|
||||
"title": monitorJSON.name + " " + status + " [Uptime-Kuma]",
|
||||
"tags": tags,
|
||||
"actions": [
|
||||
};
|
||||
|
||||
if (monitorJSON.url && monitorJSON.url !== "https://") {
|
||||
data.actions = [
|
||||
{
|
||||
"action": "view",
|
||||
"label": "Open " + monitorJSON.name,
|
||||
"url": monitorJSON.url,
|
||||
},
|
||||
];
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
if (notification.ntfyIcon) {
|
||||
data.icon = notification.ntfyIcon;
|
||||
}
|
||||
|
||||
await axios.post(`${notification.ntfyserverurl}`, data, { headers: headers });
|
||||
await axios.post(notification.ntfyserverurl, data, { headers: headers });
|
||||
|
||||
return okMsg;
|
||||
|
||||
|
|
|
@ -2,14 +2,15 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class Octopush extends NotificationProvider {
|
||||
|
||||
name = "octopush";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
const urlV2 = "https://api.octopush.com/v1/public/sms-campaign/send";
|
||||
const urlV1 = "https://www.octopush-dm.com/api/sms/json";
|
||||
|
||||
try {
|
||||
// Default - V2
|
||||
|
@ -33,7 +34,7 @@ class Octopush extends NotificationProvider {
|
|||
"purpose": "alert",
|
||||
"sender": notification.octopushSenderName
|
||||
};
|
||||
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config);
|
||||
await axios.post(urlV2, data, config);
|
||||
} else if (notification.octopushVersion === "1") {
|
||||
let data = {
|
||||
"user_login": notification.octopushDMLogin,
|
||||
|
@ -55,7 +56,7 @@ class Octopush extends NotificationProvider {
|
|||
|
||||
// V1 API returns 200 even on error so we must check
|
||||
// response data
|
||||
let response = await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config);
|
||||
let response = await axios.post(urlV1, {}, config);
|
||||
if ("error_code" in response.data) {
|
||||
if (response.data.error_code !== "000") {
|
||||
this.throwGeneralAxiosError(`Octopush error ${JSON.stringify(response.data)}`);
|
||||
|
|
|
@ -2,23 +2,23 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
|
||||
class OneBot extends NotificationProvider {
|
||||
|
||||
name = "OneBot";
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let httpAddr = notification.httpAddr;
|
||||
if (!httpAddr.startsWith("http")) {
|
||||
httpAddr = "http://" + httpAddr;
|
||||
let url = notification.httpAddr;
|
||||
if (!url.startsWith("http")) {
|
||||
url = "http://" + url;
|
||||
}
|
||||
if (!httpAddr.endsWith("/")) {
|
||||
httpAddr += "/";
|
||||
if (!url.endsWith("/")) {
|
||||
url += "/";
|
||||
}
|
||||
let onebotAPIUrl = httpAddr + "send_msg";
|
||||
url += "send_msg";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
|
@ -37,7 +37,7 @@ class OneBot extends NotificationProvider {
|
|||
data["message_type"] = "private";
|
||||
data["user_id"] = notification.recieverId;
|
||||
}
|
||||
await axios.post(onebotAPIUrl, data, config);
|
||||
await axios.post(url, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
|
|
|
@ -4,10 +4,9 @@ const { UP, DOWN } = require("../../src/util");
|
|||
|
||||
const opsgenieAlertsUrlEU = "https://api.eu.opsgenie.com/v2/alerts";
|
||||
const opsgenieAlertsUrlUS = "https://api.opsgenie.com/v2/alerts";
|
||||
let okMsg = "Sent Successfully.";
|
||||
const okMsg = "Sent Successfully.";
|
||||
|
||||
class Opsgenie extends NotificationProvider {
|
||||
|
||||
name = "Opsgenie";
|
||||
|
||||
/**
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue