mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-03-04 08:25:57 +00:00
Merge branch 'master' into subdirectory
This commit is contained in:
commit
812b4ad0ff
260 changed files with 13358 additions and 13000 deletions
|
@ -14,7 +14,8 @@
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"streetsidesoftware.code-spell-checker",
|
"streetsidesoftware.code-spell-checker",
|
||||||
"dbaeumer.vscode-eslint"
|
"dbaeumer.vscode-eslint",
|
||||||
|
"GitHub.copilot-chat"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -30,11 +30,15 @@ SECURITY.md
|
||||||
tsconfig.json
|
tsconfig.json
|
||||||
.env
|
.env
|
||||||
/tmp
|
/tmp
|
||||||
/babel.config.js
|
|
||||||
/ecosystem.config.js
|
/ecosystem.config.js
|
||||||
/extra/healthcheck.exe
|
/extra/healthcheck.exe
|
||||||
/extra/healthcheck
|
/extra/healthcheck
|
||||||
extra/exe-builder
|
/extra/exe-builder
|
||||||
|
/extra/push-examples
|
||||||
|
/extra/uptime-kuma-push
|
||||||
|
|
||||||
|
# Comment the following line if you want to rebuild the healthcheck binary
|
||||||
|
/extra/healthcheck-armv7
|
||||||
|
|
||||||
|
|
||||||
### .gitignore content (commented rules are duplicated)
|
### .gitignore content (commented rules are duplicated)
|
||||||
|
|
35
.eslintrc.js
35
.eslintrc.js
|
@ -19,12 +19,13 @@ module.exports = {
|
||||||
],
|
],
|
||||||
parser: "vue-eslint-parser",
|
parser: "vue-eslint-parser",
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
parser: "@babel/eslint-parser",
|
parser: "@typescript-eslint/parser",
|
||||||
sourceType: "module",
|
sourceType: "module",
|
||||||
requireConfigFile: false,
|
requireConfigFile: false,
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
"jsdoc"
|
"jsdoc",
|
||||||
|
"@typescript-eslint",
|
||||||
],
|
],
|
||||||
rules: {
|
rules: {
|
||||||
"yoda": "error",
|
"yoda": "error",
|
||||||
|
@ -76,14 +77,14 @@ module.exports = {
|
||||||
"no-var": "error",
|
"no-var": "error",
|
||||||
"key-spacing": "warn",
|
"key-spacing": "warn",
|
||||||
"keyword-spacing": "warn",
|
"keyword-spacing": "warn",
|
||||||
"space-infix-ops": "warn",
|
"space-infix-ops": "error",
|
||||||
"arrow-spacing": "warn",
|
"arrow-spacing": "warn",
|
||||||
"no-trailing-spaces": "error",
|
"no-trailing-spaces": "error",
|
||||||
"no-constant-condition": [ "error", {
|
"no-constant-condition": [ "error", {
|
||||||
"checkLoops": false,
|
"checkLoops": false,
|
||||||
}],
|
}],
|
||||||
"space-before-blocks": "warn",
|
"space-before-blocks": "warn",
|
||||||
//'no-console': 'warn',
|
//"no-console": "warn",
|
||||||
"no-extra-boolean-cast": "off",
|
"no-extra-boolean-cast": "off",
|
||||||
"no-multiple-empty-lines": [ "warn", {
|
"no-multiple-empty-lines": [ "warn", {
|
||||||
"max": 1,
|
"max": 1,
|
||||||
|
@ -95,7 +96,8 @@ module.exports = {
|
||||||
"no-unneeded-ternary": "error",
|
"no-unneeded-ternary": "error",
|
||||||
"array-bracket-newline": [ "error", "consistent" ],
|
"array-bracket-newline": [ "error", "consistent" ],
|
||||||
"eol-last": [ "error", "always" ],
|
"eol-last": [ "error", "always" ],
|
||||||
//'prefer-template': 'error',
|
//"prefer-template": "error",
|
||||||
|
"template-curly-spacing": [ "warn", "never" ],
|
||||||
"comma-dangle": [ "warn", "only-multiline" ],
|
"comma-dangle": [ "warn", "only-multiline" ],
|
||||||
"no-empty": [ "error", {
|
"no-empty": [ "error", {
|
||||||
"allowEmptyCatch": true
|
"allowEmptyCatch": true
|
||||||
|
@ -148,21 +150,20 @@ module.exports = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
// Override for jest puppeteer
|
// Override for TypeScript
|
||||||
{
|
{
|
||||||
"files": [
|
"files": [
|
||||||
"**/*.spec.js",
|
"**/*.ts",
|
||||||
"**/*.spec.jsx"
|
|
||||||
],
|
],
|
||||||
env: {
|
extends: [
|
||||||
jest: true,
|
"plugin:@typescript-eslint/recommended",
|
||||||
},
|
],
|
||||||
globals: {
|
"rules": {
|
||||||
page: true,
|
"jsdoc/require-returns-type": "off",
|
||||||
browser: true,
|
"jsdoc/require-param-type": "off",
|
||||||
context: true,
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
jestPuppeteer: true,
|
"prefer-const": "off",
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
|
@ -6,7 +6,7 @@ body:
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: no-duplicate-issues
|
id: no-duplicate-issues
|
||||||
attributes:
|
attributes:
|
||||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
label: "⚠️ Please verify that this question has NOT been raised before."
|
||||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||||
options:
|
options:
|
||||||
- label: "I checked and didn't find similar issue"
|
- label: "I checked and didn't find similar issue"
|
||||||
|
@ -24,7 +24,7 @@ body:
|
||||||
required: true
|
required: true
|
||||||
attributes:
|
attributes:
|
||||||
label: "📝 Describe your problem"
|
label: "📝 Describe your problem"
|
||||||
description: "Please walk us through it step by step."
|
description: "Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||||
placeholder: "Describe what are you asking for..."
|
placeholder: "Describe what are you asking for..."
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: error-msg
|
id: error-msg
|
||||||
|
@ -56,19 +56,20 @@ body:
|
||||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: textarea
|
||||||
id: docker-version
|
id: deployment-info
|
||||||
attributes:
|
attributes:
|
||||||
label: "🐋 Docker Version"
|
label: "🖥️ Deployment Environment"
|
||||||
description: "If running with Docker, which version are you running?"
|
description: |
|
||||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
examples:
|
||||||
|
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
|
||||||
|
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||||
|
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||||
|
- **number of monitors**: 42
|
||||||
|
value: |
|
||||||
|
- Runtime:
|
||||||
|
- Database:
|
||||||
|
- Filesystem used to store the database on:
|
||||||
|
- number of monitors:
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: true
|
||||||
- type: input
|
|
||||||
id: nodejs-version
|
|
||||||
attributes:
|
|
||||||
label: "🟩 NodeJS Version"
|
|
||||||
description: "If running with Node.js? which version are you running?"
|
|
||||||
placeholder: "Ex. 14.18.0"
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
|
@ -3,14 +3,14 @@ description: "Submit a bug report to help us improve"
|
||||||
#title: "[Bug] "
|
#title: "[Bug] "
|
||||||
labels: [bug]
|
labels: [bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: textarea
|
||||||
id: no-duplicate-issues
|
id: related-issues
|
||||||
attributes:
|
validations:
|
||||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
|
||||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
|
||||||
options:
|
|
||||||
- label: "I checked and didn't find similar issue"
|
|
||||||
required: true
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: "📑 I have found these related issues/pull requests"
|
||||||
|
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||||
|
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: "🛡️ Security Policy"
|
label: "🛡️ Security Policy"
|
||||||
|
@ -31,7 +31,7 @@ body:
|
||||||
required: true
|
required: true
|
||||||
attributes:
|
attributes:
|
||||||
label: "👟 Reproduction steps"
|
label: "👟 Reproduction steps"
|
||||||
description: "How do you trigger this bug? Please walk us through it step by step."
|
description: "How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||||
placeholder: "..."
|
placeholder: "..."
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: expected-behavior
|
id: expected-behavior
|
||||||
|
@ -73,22 +73,23 @@ body:
|
||||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: textarea
|
||||||
id: docker-version
|
id: deployment-info
|
||||||
attributes:
|
attributes:
|
||||||
label: "🐋 Docker Version"
|
label: "🖥️ Deployment Environment"
|
||||||
description: "If running with Docker, which version are you running?"
|
description: |
|
||||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
examples:
|
||||||
|
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
|
||||||
|
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||||
|
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||||
|
- **number of monitors**: 42
|
||||||
|
value: |
|
||||||
|
- Runtime:
|
||||||
|
- Database:
|
||||||
|
- Filesystem used to store the database on:
|
||||||
|
- number of monitors:
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: true
|
||||||
- type: input
|
|
||||||
id: nodejs-version
|
|
||||||
attributes:
|
|
||||||
label: "🟩 NodeJS Version"
|
|
||||||
description: "If running with Node.js? which version are you running?"
|
|
||||||
placeholder: "Ex. 14.18.0"
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
|
|
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
|
@ -3,14 +3,14 @@ description: "Submit a proposal for a new feature"
|
||||||
#title: "[Feature] "
|
#title: "[Feature] "
|
||||||
labels: [feature-request]
|
labels: [feature-request]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: textarea
|
||||||
id: no-duplicate-issues
|
id: related-issues
|
||||||
attributes:
|
validations:
|
||||||
label: "⚠️ Please verify that this feature request has NOT been suggested before."
|
|
||||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
|
||||||
options:
|
|
||||||
- label: "I checked and didn't find similar feature request"
|
|
||||||
required: true
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: "📑 I have found these related issues/pull requests"
|
||||||
|
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||||
|
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: feature-area
|
id: feature-area
|
||||||
attributes:
|
attributes:
|
||||||
|
@ -18,10 +18,17 @@ body:
|
||||||
description: "What kind of feature request is this?"
|
description: "What kind of feature request is this?"
|
||||||
multiple: true
|
multiple: true
|
||||||
options:
|
options:
|
||||||
- API
|
- API / automation options
|
||||||
- New Notification
|
- New notification-provider
|
||||||
- New Monitor
|
- Change to existing notification-provider
|
||||||
- UI Feature
|
- New monitor
|
||||||
|
- Change to existing monitor
|
||||||
|
- Dashboard
|
||||||
|
- Status-page
|
||||||
|
- Maintenance
|
||||||
|
- Deployment
|
||||||
|
- Certificate expiry
|
||||||
|
- Settings
|
||||||
- Other
|
- Other
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
2
.github/ISSUE_TEMPLATE/security.md
vendored
2
.github/ISSUE_TEMPLATE/security.md
vendored
|
@ -12,8 +12,6 @@ labels:
|
||||||
|
|
||||||
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||||
|
|
||||||
|
|
||||||
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
|
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
|
||||||
|
|
||||||
Your GitHub Advisory URL:
|
Your GitHub Advisory URL:
|
||||||
|
|
||||||
|
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -15,7 +15,7 @@ Please delete any options that are not relevant.
|
||||||
- Bug fix (non-breaking change which fixes an issue)
|
- Bug fix (non-breaking change which fixes an issue)
|
||||||
- User interface (UI)
|
- User interface (UI)
|
||||||
- New feature (non-breaking change which adds functionality)
|
- New feature (non-breaking change which adds functionality)
|
||||||
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
- Breaking change (a fix or feature that would cause existing functionality to not work as expected)
|
||||||
- Other
|
- Other
|
||||||
- This change requires a documentation update
|
- This change requires a documentation update
|
||||||
|
|
||||||
|
@ -24,9 +24,8 @@ Please delete any options that are not relevant.
|
||||||
- [ ] My code follows the style guidelines of this project
|
- [ ] My code follows the style guidelines of this project
|
||||||
- [ ] I ran ESLint and other linters for modified files
|
- [ ] I ran ESLint and other linters for modified files
|
||||||
- [ ] I have performed a self-review of my own code and tested it
|
- [ ] I have performed a self-review of my own code and tested it
|
||||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
- [ ] I have commented my code, particularly in hard-to-understand areas (including JSDoc for methods)
|
||||||
(including JSDoc for methods)
|
- [ ] My changes generates no new warnings
|
||||||
- [ ] My changes generate no new warnings
|
|
||||||
- [ ] My code needed automated testing. I have added them (this is optional task)
|
- [ ] My code needed automated testing. I have added them (this is optional task)
|
||||||
|
|
||||||
## Screenshots (if any)
|
## Screenshots (if any)
|
||||||
|
|
55
.github/workflows/auto-test.yml
vendored
55
.github/workflows/auto-test.yml
vendored
|
@ -5,38 +5,38 @@ name: Auto Test
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master, 1.23.X ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ master, 2.0.X ]
|
branches: [ master, 1.23.X ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
auto-test:
|
auto-test:
|
||||||
needs: [ check-linters ]
|
needs: [ check-linters, e2e-test ]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||||
node: [ 14, 20 ]
|
node: [ 14, 20.5 ]
|
||||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node }}
|
- name: Use Node.js ${{ matrix.node }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
- run: npm install npm@9 -g
|
- run: npm install npm@9 -g
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm test
|
- run: npm run test-backend
|
||||||
env:
|
env:
|
||||||
HEADLESS_TEST: 1
|
HEADLESS_TEST: 1
|
||||||
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
||||||
|
@ -55,10 +55,10 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node }}
|
- name: Use Node.js ${{ matrix.node }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
- run: npm install npm@9 -g
|
- run: npm install npm@9 -g
|
||||||
|
@ -69,42 +69,27 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js 20
|
- name: Use Node.js 20
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm run lint
|
- run: npm run lint:prod
|
||||||
|
|
||||||
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
|
e2e-test:
|
||||||
# e2e-tests:
|
|
||||||
# needs: [ check-linters ]
|
|
||||||
# runs-on: ubuntu-latest
|
|
||||||
# steps:
|
|
||||||
# - run: git config --global core.autocrlf false # Mainly for Windows
|
|
||||||
# - uses: actions/checkout@v3
|
|
||||||
#
|
|
||||||
# - name: Use Node.js 14
|
|
||||||
# uses: actions/setup-node@v3
|
|
||||||
# with:
|
|
||||||
# node-version: 14
|
|
||||||
# - run: npm install
|
|
||||||
# - run: npm run build
|
|
||||||
# - run: npm run cy:test
|
|
||||||
|
|
||||||
frontend-unit-tests:
|
|
||||||
needs: [ check-linters ]
|
needs: [ check-linters ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ARM64
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js 14
|
- name: Use Node.js 20
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 14
|
node-version: 20
|
||||||
- run: npm install
|
- run: npm install
|
||||||
|
- run: npx playwright install
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm run cy:run:unit
|
- run: npm run test-e2e
|
||||||
|
|
4
.github/workflows/close-incorrect-issue.yml
vendored
4
.github/workflows/close-incorrect-issue.yml
vendored
|
@ -14,10 +14,10 @@ jobs:
|
||||||
node-version: [16]
|
node-version: [16]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|
43
.github/workflows/codeql-analysis.yml
vendored
Normal file
43
.github/workflows/codeql-analysis.yml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master", "1.23.X"]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master", "1.23.X"]
|
||||||
|
schedule:
|
||||||
|
- cron: '16 22 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 360
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go', 'javascript-typescript' ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
6
.github/workflows/json-yaml-validate.yml
vendored
6
.github/workflows/json-yaml-validate.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- 2.0.X
|
- 1.23.X
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
@ -17,11 +17,11 @@ jobs:
|
||||||
json-yaml-validate:
|
json-yaml-validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: json-yaml-validate
|
- name: json-yaml-validate
|
||||||
id: json-yaml-validate
|
id: json-yaml-validate
|
||||||
uses: GrantBirki/json-yaml-validate@v1.3.0
|
uses: GrantBirki/json-yaml-validate@v2.4.0
|
||||||
with:
|
with:
|
||||||
comment: "true" # enable comment mode
|
comment: "true" # enable comment mode
|
||||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||||
|
|
17
.github/workflows/prevent-file-change.yml
vendored
Normal file
17
.github/workflows/prevent-file-change.yml
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
name: prevent-file-change
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-file-changes:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Prevent file change
|
||||||
|
uses: xalvarez/prevent-file-change-action@v1
|
||||||
|
with:
|
||||||
|
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json
|
||||||
|
pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$'
|
||||||
|
trustedAuthors: UptimeKumaBot
|
||||||
|
|
36
.github/workflows/stale-bot.yml
vendored
36
.github/workflows/stale-bot.yml
vendored
|
@ -1,4 +1,4 @@
|
||||||
name: 'Automatically close stale issues and PRs'
|
name: 'Automatically close stale issues'
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
|
@ -9,14 +9,34 @@ jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v7
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
stale-issue-message: |-
|
||||||
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
We are clearing up our old `help`-issues and your issue has been open for 60 days with no activity.
|
||||||
days-before-stale: 90
|
If no comment is made and the stale label is not removed, this issue will be closed in 7 days.
|
||||||
days-before-close: 2
|
days-before-stale: 60
|
||||||
days-before-pr-stale: 999999999
|
days-before-close: 7
|
||||||
days-before-pr-close: 1
|
days-before-pr-stale: -1
|
||||||
|
days-before-pr-close: -1
|
||||||
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
|
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
|
||||||
exempt-issue-assignees: 'louislam'
|
exempt-issue-assignees: 'louislam'
|
||||||
operations-per-run: 200
|
operations-per-run: 200
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
stale-issue-message: |-
|
||||||
|
This issue was marked as `cannot-reproduce` by a maintainer.
|
||||||
|
If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is.
|
||||||
|
If you have any ideas how we can reproduce this issue, we would love to hear them.
|
||||||
|
|
||||||
|
We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month.
|
||||||
|
If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas.
|
||||||
|
close-issue-message: |-
|
||||||
|
This issue will be closed as no way to reproduce it has been found.
|
||||||
|
If you/somebody finds a way how to (semi-reliably) reproduce this, we can reopen this issue. ^^
|
||||||
|
days-before-stale: 180
|
||||||
|
days-before-close: 30
|
||||||
|
days-before-pr-stale: -1
|
||||||
|
days-before-pr-close: -1
|
||||||
|
any-of-issue-labels: 'cannot-reproduce'
|
||||||
|
operations-per-run: 200
|
||||||
|
|
||||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -15,9 +15,6 @@ dist-ssr
|
||||||
/tmp
|
/tmp
|
||||||
.env
|
.env
|
||||||
|
|
||||||
cypress/videos
|
|
||||||
cypress/screenshots
|
|
||||||
|
|
||||||
/extra/healthcheck.exe
|
/extra/healthcheck.exe
|
||||||
/extra/healthcheck
|
/extra/healthcheck
|
||||||
/extra/healthcheck-armv7
|
/extra/healthcheck-armv7
|
||||||
|
|
118
CONTRIBUTING.md
118
CONTRIBUTING.md
|
@ -1,14 +1,14 @@
|
||||||
# Project Info
|
# Project Info
|
||||||
|
|
||||||
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
First of all, I want to thank everyone who have made pull requests for Uptime Kuma. I never thought the GitHub community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
||||||
|
|
||||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
|
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same `package.json`.
|
||||||
|
|
||||||
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
||||||
|
|
||||||
## Key Technical Skills
|
## Key Technical Skills
|
||||||
|
|
||||||
- Node.js (You should know about promise, async/await and arrow function etc.)
|
- Node.js (You should know about promises, async/await, arrow functions, etc.)
|
||||||
- Socket.io
|
- Socket.io
|
||||||
- SCSS
|
- SCSS
|
||||||
- Vue.js
|
- Vue.js
|
||||||
|
@ -34,18 +34,21 @@ Yes or no, it depends on what you will try to do. Since I don't want to waste yo
|
||||||
|
|
||||||
Here are some references:
|
Here are some references:
|
||||||
|
|
||||||
### ✅ Usually accepted:
|
### ✅ Usually accepted
|
||||||
|
|
||||||
- Bug fix
|
- Bug fix
|
||||||
- Security fix
|
- Security fix
|
||||||
- Adding notification providers
|
- Adding notification providers
|
||||||
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||||
- Adding new language keys: `$t("...")`
|
- Adding new language keys: `$t("...")`
|
||||||
|
|
||||||
### ⚠️ Discussion required:
|
### ⚠️ Discussion required
|
||||||
|
|
||||||
- Large pull requests
|
- Large pull requests
|
||||||
- New features
|
- New features
|
||||||
|
|
||||||
### ❌ Won't be merged:
|
### ❌ Won't be merged
|
||||||
|
|
||||||
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||||
- Do not pass the auto-test
|
- Do not pass the auto-test
|
||||||
- Any breaking changes
|
- Any breaking changes
|
||||||
|
@ -59,27 +62,25 @@ Here are some references:
|
||||||
|
|
||||||
The above cases may not cover all possible situations.
|
The above cases may not cover all possible situations.
|
||||||
|
|
||||||
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
I ([@louislam](https://github.com/louislam)) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it. Therefore, it is essential to have a discussion beforehand.
|
||||||
|
|
||||||
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
||||||
|
|
||||||
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||||
|
|
||||||
|
|
||||||
### Recommended Pull Request Guideline
|
### Recommended Pull Request Guideline
|
||||||
|
|
||||||
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
|
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
|
||||||
|
|
||||||
1. Fork the project
|
1. Fork the project
|
||||||
1. Clone your fork repo to local
|
2. Clone your fork repo to local
|
||||||
1. Create a new branch
|
3. Create a new branch
|
||||||
1. Create an empty commit
|
4. Create an empty commit: `git commit -m "<YOUR TASK NAME>" --allow-empty`
|
||||||
`git commit -m "[empty commit] pull request for <YOUR TASK NAME>" --allow-empty`
|
5. Push to your fork repo
|
||||||
1. Push to your fork repo
|
6. Prepare a pull request: https://github.com/louislam/uptime-kuma/compare
|
||||||
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
|
7. Write a proper description. You can mention @louislam in it, so @louislam will get the notification.
|
||||||
1. Write a proper description
|
8. Create your pull request as a Draft
|
||||||
1. Click "Change to draft"
|
9. Wait for the discussion
|
||||||
1. Discussion
|
|
||||||
|
|
||||||
## Project Styles
|
## Project Styles
|
||||||
|
|
||||||
|
@ -112,6 +113,18 @@ I personally do not like something that requires so many configurations before y
|
||||||
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
||||||
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
||||||
|
|
||||||
|
### GitHub Codespaces
|
||||||
|
|
||||||
|
If you don't want to setup an local environment, you can now develop on GitHub Codespaces, read more:
|
||||||
|
|
||||||
|
https://github.com/louislam/uptime-kuma/tree/master/.devcontainer
|
||||||
|
|
||||||
|
## Git Branches
|
||||||
|
|
||||||
|
- `master`: 2.X.X development. If you want to add a new feature, your pull request should base on this.
|
||||||
|
- `1.23.X`: 1.23.X development. If you want to fix a bug for v1 and v2, your pull request should base on this.
|
||||||
|
- All other branches are unused, outdated or for dev.
|
||||||
|
|
||||||
## Install Dependencies for Development
|
## Install Dependencies for Development
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -131,7 +144,8 @@ npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
|
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
|
||||||
```
|
|
||||||
|
```bash
|
||||||
npm run start-frontend-dev
|
npm run start-frontend-dev
|
||||||
npm run start-server-dev
|
npm run start-server-dev
|
||||||
```
|
```
|
||||||
|
@ -140,15 +154,14 @@ npm run start-server-dev
|
||||||
|
|
||||||
It binds to `0.0.0.0:3001` by default.
|
It binds to `0.0.0.0:3001` by default.
|
||||||
|
|
||||||
|
|
||||||
It is mainly a socket.io app + express.js.
|
It is mainly a socket.io app + express.js.
|
||||||
|
|
||||||
express.js is used for:
|
express.js is used for:
|
||||||
|
|
||||||
- entry point such as redirecting to a status page or the dashboard
|
- entry point such as redirecting to a status page or the dashboard
|
||||||
- serving the frontend built files (index.html, .js and .css etc.)
|
- serving the frontend built files (index.html, .js and .css etc.)
|
||||||
- serving internal APIs of the status page
|
- serving internal APIs of the status page
|
||||||
|
|
||||||
|
|
||||||
### Structure in /server/
|
### Structure in /server/
|
||||||
|
|
||||||
- jobs/ (Jobs that are running in another process)
|
- jobs/ (Jobs that are running in another process)
|
||||||
|
@ -187,8 +200,7 @@ The data and socket logic are in `src/mixins/socket.js`.
|
||||||
|
|
||||||
## Database Migration
|
## Database Migration
|
||||||
|
|
||||||
1. Create `patch-{name}.sql` in `./db/`
|
See: https://github.com/louislam/uptime-kuma/tree/master/db/knex_migrations
|
||||||
2. Add your patch filename in the `patchList` list in `./server/database.js`
|
|
||||||
|
|
||||||
## Unit Test
|
## Unit Test
|
||||||
|
|
||||||
|
@ -218,9 +230,9 @@ If for security / bug / other reasons, a library must be updated, breaking chang
|
||||||
|
|
||||||
## Translations
|
## Translations
|
||||||
|
|
||||||
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
|
Please add **all** the strings which are translatable to `src/lang/en.json` (if translation keys are omitted, they can not be translated.)
|
||||||
|
|
||||||
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
**Don't include any other languages in your initial pull request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
||||||
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
|
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
|
||||||
|
|
||||||
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||||
|
@ -232,10 +244,46 @@ My mother language is not English and my grammar is not that great.
|
||||||
|
|
||||||
## Wiki
|
## Wiki
|
||||||
|
|
||||||
Since there is no way to make a pull request to wiki's repo, I have set up another repo to do that.
|
Since there is no way to make a pull request to the wiki, I have set up another repo to do that.
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma-wiki
|
https://github.com/louislam/uptime-kuma-wiki
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
|
||||||
|
### Arch
|
||||||
|
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
|
- armv7
|
||||||
|
|
||||||
|
### Docker Tags
|
||||||
|
|
||||||
|
#### v2
|
||||||
|
|
||||||
|
- `2`, `latest-2`: v2 with full features such as Chromium and bundled MariaDB
|
||||||
|
- `2.x.x`
|
||||||
|
- `2-slim`: v2 with basic features
|
||||||
|
- `2.x.x-slim`
|
||||||
|
- `beta2`: Latest beta build
|
||||||
|
- `2.x.x-beta.x`
|
||||||
|
- `nightly2`: Dev build
|
||||||
|
- `base2`: Basic Debian setup without Uptime Kuma source code (Full features)
|
||||||
|
- `base2-slim`: Basic Debian setup without Uptime Kuma source code
|
||||||
|
- `pr-test2`: For testing pull request without setting up a local environment
|
||||||
|
|
||||||
|
#### v1
|
||||||
|
|
||||||
|
- `1`, `latest`, `1-debian`, `debian`: Latest version of v1
|
||||||
|
- `1.x.x`, `1.x.x-debian`
|
||||||
|
- `1.x.x-beta.x`: Beta build
|
||||||
|
- `beta`: Latest beta build
|
||||||
|
- `nightly`: Dev build
|
||||||
|
- `base-debian`: Basic Debian setup without Uptime Kuma source code
|
||||||
|
- `pr-test`: For testing pull request without setting up a local environment
|
||||||
|
- `base-alpine`: (Deprecated) Basic Alpine setup without Uptime Kuma source code
|
||||||
|
- `1-alpine`, `alpine`: (Deprecated)
|
||||||
|
- `1.x.x-alpine`: (Deprecated)
|
||||||
|
|
||||||
## Maintainer
|
## Maintainer
|
||||||
|
|
||||||
Check the latest issues and pull requests:
|
Check the latest issues and pull requests:
|
||||||
|
@ -246,12 +294,12 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
|
||||||
1. Draft a release note
|
1. Draft a release note
|
||||||
2. Make sure the repo is cleared
|
2. Make sure the repo is cleared
|
||||||
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
|
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
|
||||||
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
|
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||||
4. Wait until the `Press any key to continue`
|
5. Wait until the `Press any key to continue`
|
||||||
5. `git push`
|
6. `git push`
|
||||||
6. Publish the release note as 1.X.X
|
7. Publish the release note as 1.X.X
|
||||||
7. Press any key to continue
|
8. Press any key to continue
|
||||||
8. Deploy to the demo server: `npm run deploy-demo-server`
|
9. Deploy to the demo server: `npm run deploy-demo-server`
|
||||||
|
|
||||||
Checking:
|
Checking:
|
||||||
|
|
||||||
|
@ -284,3 +332,11 @@ git remote add production https://github.com/louislam/uptime-kuma.wiki.git
|
||||||
git pull
|
git pull
|
||||||
git push production master
|
git push production master
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Useful Commands
|
||||||
|
|
||||||
|
Change the base of a pull request such as `master` to `1.23.X`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git rebase --onto <new parent> <old parent>
|
||||||
|
```
|
||||||
|
|
86
README.md
86
README.md
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||||
|
|
||||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||||
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
||||||
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
||||||
</a>
|
</a>
|
||||||
|
@ -17,23 +17,23 @@ Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||||
|
|
||||||
Try it!
|
Try it!
|
||||||
|
|
||||||
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
|
Demo Server (Location: Frankfurt - Germany): https://demo.kuma.pet/start-demo
|
||||||
|
|
||||||
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
|
It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors).
|
||||||
|
|
||||||
## ⭐ Features
|
## ⭐ Features
|
||||||
|
|
||||||
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
- Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
||||||
* Fancy, Reactive, Fast UI/UX
|
- Fancy, Reactive, Fast UI/UX
|
||||||
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
- Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
||||||
* 20-second intervals
|
- 20-second intervals
|
||||||
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
|
- [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
|
||||||
* Multiple status pages
|
- Multiple status pages
|
||||||
* Map status pages to specific domains
|
- Map status pages to specific domains
|
||||||
* Ping chart
|
- Ping chart
|
||||||
* Certificate info
|
- Certificate info
|
||||||
* Proxy support
|
- Proxy support
|
||||||
* 2FA support
|
- 2FA support
|
||||||
|
|
||||||
## 🔧 How to Install
|
## 🔧 How to Install
|
||||||
|
|
||||||
|
@ -43,19 +43,21 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
|
||||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||||
```
|
```
|
||||||
|
|
||||||
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
|
|
||||||
|
|
||||||
Uptime Kuma is now running on http://localhost:3001
|
Uptime Kuma is now running on http://localhost:3001
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume.
|
||||||
|
|
||||||
### 💪🏻 Non-Docker
|
### 💪🏻 Non-Docker
|
||||||
|
|
||||||
Requirements:
|
Requirements:
|
||||||
|
|
||||||
- Platform
|
- Platform
|
||||||
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
||||||
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
||||||
- ❌ Replit / Heroku
|
- ❌ Replit / Heroku
|
||||||
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
||||||
- [npm](https://docs.npmjs.com/cli/) >= 7
|
- [npm](https://docs.npmjs.com/cli/) 9
|
||||||
- [Git](https://git-scm.com/downloads)
|
- [Git](https://git-scm.com/downloads)
|
||||||
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
||||||
|
|
||||||
|
@ -76,9 +78,8 @@ npm install pm2 -g && pm2 install pm2-logrotate
|
||||||
|
|
||||||
# Start Server
|
# Start Server
|
||||||
pm2 start server/server.js --name uptime-kuma
|
pm2 start server/server.js --name uptime-kuma
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Uptime Kuma is now running on http://localhost:3001
|
Uptime Kuma is now running on http://localhost:3001
|
||||||
|
|
||||||
More useful PM2 Commands
|
More useful PM2 Commands
|
||||||
|
@ -91,10 +92,6 @@ pm2 monit
|
||||||
pm2 save && pm2 startup
|
pm2 save && pm2 startup
|
||||||
```
|
```
|
||||||
|
|
||||||
### Windows Portable (x64)
|
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1.zip
|
|
||||||
|
|
||||||
### Advanced Installation
|
### Advanced Installation
|
||||||
|
|
||||||
If you need more options or need to browse via a reverse proxy, please read:
|
If you need more options or need to browse via a reverse proxy, please read:
|
||||||
|
@ -113,10 +110,6 @@ I will assign requests/issues to the next milestone.
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/milestones
|
https://github.com/louislam/uptime-kuma/milestones
|
||||||
|
|
||||||
Project Plan:
|
|
||||||
|
|
||||||
https://github.com/users/louislam/projects/4/views/1
|
|
||||||
|
|
||||||
## ❤️ Sponsors
|
## ❤️ Sponsors
|
||||||
|
|
||||||
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
|
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
|
||||||
|
@ -143,29 +136,33 @@ Telegram Notification Sample:
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
|
- I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the closest ones is statping. Unfortunately, it is not stable and no longer maintained.
|
||||||
* Want to build a fancy UI.
|
- Wanted to build a fancy UI.
|
||||||
* Learn Vue 3 and vite.js.
|
- Learn Vue 3 and vite.js.
|
||||||
* Show the power of Bootstrap 5.
|
- Show the power of Bootstrap 5.
|
||||||
* Try to use WebSocket with SPA instead of REST API.
|
- Try to use WebSocket with SPA instead of a REST API.
|
||||||
* Deploy my first Docker image to Docker Hub.
|
- Deploy my first Docker image to Docker Hub.
|
||||||
|
|
||||||
If you love this project, please consider giving me a ⭐.
|
If you love this project, please consider giving it a ⭐.
|
||||||
|
|
||||||
## 🗣️ Discussion / Ask for Help
|
## 🗣️ Discussion / Ask for Help
|
||||||
|
|
||||||
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not response if you asked such questions.
|
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not respond if you ask questions there.
|
||||||
|
|
||||||
I recommend using Google, GitHub Issues, or Uptime Kuma's Subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
|
I recommend using Google, GitHub Issues, or Uptime Kuma's subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
|
||||||
|
|
||||||
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
|
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
|
||||||
- [Subreddit r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
|
- [Subreddit (r/UptimeKuma)](https://www.reddit.com/r/UptimeKuma/)
|
||||||
|
|
||||||
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
|
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam)
|
||||||
You can mention me if you ask a question on Reddit.
|
You can mention me if you ask a question on the subreddit.
|
||||||
|
|
||||||
|
## Contributions
|
||||||
|
|
||||||
## Contribute
|
### Create Pull Requests
|
||||||
|
|
||||||
|
We DO NOT accept all types of pull requests and do not want to waste your time. Please be sure that you have read and follow pull request rules:
|
||||||
|
[CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)
|
||||||
|
|
||||||
### Test Pull Requests
|
### Test Pull Requests
|
||||||
|
|
||||||
|
@ -179,15 +176,16 @@ https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests
|
||||||
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
|
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
|
||||||
|
|
||||||
### Bug Reports / Feature Requests
|
### Bug Reports / Feature Requests
|
||||||
|
|
||||||
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
|
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
|
||||||
|
|
||||||
### Translations
|
### Translations
|
||||||
|
|
||||||
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||||
|
|
||||||
## Spelling & Grammar
|
### Spelling & Grammar
|
||||||
|
|
||||||
Feel free to correct the grammar in the documentation or code.
|
Feel free to correct the grammar in the documentation or code.
|
||||||
My mother language is not english and my grammar is not that great.
|
My mother language is not English and my grammar is not that great.
|
||||||
|
|
||||||
|
|
||||||
### Create Pull Requests
|
|
||||||
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||||
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
2. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||||
|
|
||||||
Do not use the public issue tracker or discuss it in public as it will cause more damage.
|
Do not use the public issue tracker or discuss it in public as it will cause more damage.
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` vers
|
||||||
### Upgradable Docker Tags
|
### Upgradable Docker Tags
|
||||||
|
|
||||||
| Tag | Supported |
|
| Tag | Supported |
|
||||||
| ------- | ------------------ |
|
|-|-|
|
||||||
| 1 | :white_check_mark: |
|
| 1 | :white_check_mark: |
|
||||||
| 1-debian | :white_check_mark: |
|
| 1-debian | :white_check_mark: |
|
||||||
| latest | :white_check_mark: |
|
| latest | :white_check_mark: |
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
const config = {};
|
|
||||||
|
|
||||||
if (process.env.TEST_FRONTEND) {
|
|
||||||
config.presets = [ "@babel/preset-env" ];
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = config;
|
|
9
compose.yaml
Normal file
9
compose.yaml
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
services:
|
||||||
|
uptime-kuma:
|
||||||
|
image: louislam/uptime-kuma:1
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
ports:
|
||||||
|
# <Host Port>:<Container Port>
|
||||||
|
- 3001:3001
|
||||||
|
restart: unless-stopped
|
60
config/playwright.config.js
Normal file
60
config/playwright.config.js
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
import { defineConfig, devices } from "@playwright/test";
|
||||||
|
|
||||||
|
const port = 30001;
|
||||||
|
const url = `http://localhost:${port}`;
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
// Look for test files in the "tests" directory, relative to this configuration file.
|
||||||
|
testDir: "../test/e2e",
|
||||||
|
outputDir: "../private/playwright-test-results",
|
||||||
|
fullyParallel: false,
|
||||||
|
locale: "en-US",
|
||||||
|
|
||||||
|
// Fail the build on CI if you accidentally left test.only in the source code.
|
||||||
|
forbidOnly: !!process.env.CI,
|
||||||
|
|
||||||
|
// Retry on CI only.
|
||||||
|
retries: process.env.CI ? 2 : 0,
|
||||||
|
|
||||||
|
// Opt out of parallel tests on CI.
|
||||||
|
workers: 1,
|
||||||
|
|
||||||
|
// Reporter to use
|
||||||
|
reporter: [
|
||||||
|
[
|
||||||
|
"html", {
|
||||||
|
outputFolder: "../private/playwright-report",
|
||||||
|
open: "never",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
],
|
||||||
|
|
||||||
|
use: {
|
||||||
|
// Base URL to use in actions like `await page.goto('/')`.
|
||||||
|
baseURL: url,
|
||||||
|
|
||||||
|
// Collect trace when retrying the failed test.
|
||||||
|
trace: "on-first-retry",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Configure projects for major browsers.
|
||||||
|
projects: [
|
||||||
|
{
|
||||||
|
name: "chromium",
|
||||||
|
use: { ...devices["Desktop Chrome"] },
|
||||||
|
},
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
name: "firefox",
|
||||||
|
use: { browserName: "firefox" }
|
||||||
|
},*/
|
||||||
|
],
|
||||||
|
|
||||||
|
// Run your local dev server before starting the tests.
|
||||||
|
webServer: {
|
||||||
|
command: `node extra/remove-playwright-test-data.js && node server/server.js --port=${port} --data-dir=./data/playwright-test`,
|
||||||
|
url,
|
||||||
|
reuseExistingServer: false,
|
||||||
|
cwd: "../",
|
||||||
|
},
|
||||||
|
});
|
|
@ -1,9 +1,7 @@
|
||||||
import legacy from "@vitejs/plugin-legacy";
|
|
||||||
import vue from "@vitejs/plugin-vue";
|
import vue from "@vitejs/plugin-vue";
|
||||||
import { defineConfig } from "vite";
|
import { defineConfig } from "vite";
|
||||||
import visualizer from "rollup-plugin-visualizer";
|
import visualizer from "rollup-plugin-visualizer";
|
||||||
import viteCompression from "vite-plugin-compression";
|
import viteCompression from "vite-plugin-compression";
|
||||||
import commonjs from "vite-plugin-commonjs";
|
|
||||||
|
|
||||||
const postCssScss = require("postcss-scss");
|
const postCssScss = require("postcss-scss");
|
||||||
const postcssRTLCSS = require("postcss-rtlcss");
|
const postcssRTLCSS = require("postcss-rtlcss");
|
||||||
|
@ -23,11 +21,7 @@ export default defineConfig({
|
||||||
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
commonjs(),
|
|
||||||
vue(),
|
vue(),
|
||||||
legacy({
|
|
||||||
targets: [ "since 2015" ],
|
|
||||||
}),
|
|
||||||
visualizer({
|
visualizer({
|
||||||
filename: "tmp/dist-stats.html"
|
filename: "tmp/dist-stats.html"
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -272,10 +272,10 @@ async function createTables() {
|
||||||
await knex.schema.createTable("notification", (table) => {
|
await knex.schema.createTable("notification", (table) => {
|
||||||
table.increments("id");
|
table.increments("id");
|
||||||
table.string("name", 255);
|
table.string("name", 255);
|
||||||
table.string("config", 255); // TODO: should use TEXT!
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
table.integer("user_id").unsigned();
|
table.integer("user_id").unsigned();
|
||||||
table.boolean("is_default").notNullable().defaultTo(false);
|
table.boolean("is_default").notNullable().defaultTo(false);
|
||||||
|
table.text("config", "longtext");
|
||||||
});
|
});
|
||||||
|
|
||||||
// monitor_notification
|
// monitor_notification
|
||||||
|
@ -493,8 +493,11 @@ ALTER TABLE monitor
|
||||||
await knex.schema.table("monitor", function (table) {
|
await knex.schema.table("monitor", function (table) {
|
||||||
table.string("kafka_producer_topic", 255);
|
table.string("kafka_producer_topic", 255);
|
||||||
table.text("kafka_producer_brokers");
|
table.text("kafka_producer_brokers");
|
||||||
table.integer("kafka_producer_ssl");
|
|
||||||
table.string("kafka_producer_allow_auto_topic_creation", 255);
|
// patch-fix-kafka-producer-booleans.sql
|
||||||
|
table.boolean("kafka_producer_ssl").defaultTo(0).notNullable();
|
||||||
|
table.boolean("kafka_producer_allow_auto_topic_creation").defaultTo(0).notNullable();
|
||||||
|
|
||||||
table.text("kafka_producer_sasl_options");
|
table.text("kafka_producer_sasl_options");
|
||||||
table.text("kafka_producer_message");
|
table.text("kafka_producer_message");
|
||||||
});
|
});
|
||||||
|
|
15
db/knex_migrations/2023-09-29-0000-heartbeat-retires.js
Normal file
15
db/knex_migrations/2023-09-29-0000-heartbeat-retires.js
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
// Add new column heartbeat.retries
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("heartbeat", function (table) {
|
||||||
|
table.integer("retries").notNullable().defaultTo(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("heartbeat", function (table) {
|
||||||
|
table.dropColumn("retries");
|
||||||
|
});
|
||||||
|
};
|
16
db/knex_migrations/2023-10-08-0000-mqtt-query.js
Normal file
16
db/knex_migrations/2023-10-08-0000-mqtt-query.js
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
// Add new column monitor.mqtt_check_type
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
// Drop column monitor.mqtt_check_type
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.dropColumn("mqtt_check_type");
|
||||||
|
});
|
||||||
|
};
|
14
db/knex_migrations/2023-10-11-1915-push-token-to-32.js
Normal file
14
db/knex_migrations/2023-10-11-1915-push-token-to-32.js
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
// update monitor.push_token to 32 length
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("push_token", 32).alter();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("push_token", 20).alter();
|
||||||
|
});
|
||||||
|
};
|
21
db/knex_migrations/2023-10-16-0000-create-remote-browsers.js
Normal file
21
db/knex_migrations/2023-10-16-0000-create-remote-browsers.js
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.createTable("remote_browser", function (table) {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 255).notNullable();
|
||||||
|
table.string("url", 255).notNullable();
|
||||||
|
table.integer("user_id").unsigned();
|
||||||
|
}).alterTable("monitor", function (table) {
|
||||||
|
// Add new column monitor.remote_browser
|
||||||
|
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
|
||||||
|
.index()
|
||||||
|
.references("id")
|
||||||
|
.inTable("remote_browser");
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema.dropTable("remote_browser").alterTable("monitor", function (table) {
|
||||||
|
table.dropColumn("remote_browser");
|
||||||
|
});
|
||||||
|
};
|
24
db/knex_migrations/2023-12-21-0000-stat-ping-min-max.js
Normal file
24
db/knex_migrations/2023-12-21-0000-stat-ping-min-max.js
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.dropColumn("ping_min");
|
||||||
|
table.dropColumn("ping_max");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.dropColumn("ping_min");
|
||||||
|
table.dropColumn("ping_max");
|
||||||
|
});
|
||||||
|
};
|
26
db/knex_migrations/2023-12-22-0000-hourly-uptime.js
Normal file
26
db/knex_migrations/2023-12-22-0000-hourly-uptime.js
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.createTable("stat_hourly", function (table) {
|
||||||
|
table.increments("id");
|
||||||
|
table.comment("This table contains the hourly aggregate statistics for each monitor");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("timestamp")
|
||||||
|
.notNullable()
|
||||||
|
.comment("Unix timestamp rounded down to the nearest hour");
|
||||||
|
table.float("ping").notNullable().comment("Average ping in milliseconds");
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
table.smallint("up").notNullable();
|
||||||
|
table.smallint("down").notNullable();
|
||||||
|
|
||||||
|
table.unique([ "monitor_id", "timestamp" ]);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.dropTable("stat_hourly");
|
||||||
|
};
|
|
@ -1,8 +1,9 @@
|
||||||
## Info
|
# Info
|
||||||
|
|
||||||
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
|
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
|
||||||
|
|
||||||
## Basic rules
|
## Basic rules
|
||||||
|
|
||||||
- All tables must have a primary key named `id`
|
- All tables must have a primary key named `id`
|
||||||
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
|
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
|
||||||
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports SQLite and MariaDB.
|
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports SQLite and MariaDB.
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor_group
|
ALTER TABLE monitor_group
|
||||||
ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
|
ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
ADD game VARCHAR(255);
|
ADD game VARCHAR(255);
|
||||||
COMMIT
|
|
||||||
|
COMMIT;
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
ALTER TABLE status_page ADD google_analytics_tag_id VARCHAR;
|
|
||||||
|
ALTER TABLE status_page
|
||||||
|
ADD google_analytics_tag_id VARCHAR;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
|
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
COMMIT
|
COMMIT;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
|
@ -15,4 +16,4 @@ ALTER TABLE monitor
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
ADD radius_secret VARCHAR(255);
|
ADD radius_secret VARCHAR(255);
|
||||||
|
|
||||||
COMMIT
|
COMMIT;
|
||||||
|
|
|
@ -3,4 +3,5 @@ BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
ADD timeout DOUBLE default 0 not null;
|
ADD timeout DOUBLE default 0 not null;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
|
@ -1,5 +1,6 @@
|
||||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
CREATE TABLE [api_key] (
|
CREATE TABLE [api_key] (
|
||||||
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
[key] VARCHAR(255) NOT NULL,
|
[key] VARCHAR(255) NOT NULL,
|
||||||
|
@ -10,4 +11,5 @@ CREATE TABLE [api_key] (
|
||||||
[expires] DATETIME DEFAULT NULL,
|
[expires] DATETIME DEFAULT NULL,
|
||||||
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
);
|
);
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
34
db/old_migrations/patch-fix-kafka-producer-booleans.sql
Normal file
34
db/old_migrations/patch-fix-kafka-producer-booleans.sql
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- Rename COLUMNs to another one (suffixed by `_old`)
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
-- Add correct COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
|
||||||
|
|
||||||
|
-- Set bring old values from `_old` COLUMNs to correct ones
|
||||||
|
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
|
||||||
|
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
|
||||||
|
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- Remove old COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
COMMIT;
|
10
db/old_migrations/patch-notification-config.sql
Normal file
10
db/old_migrations/patch-notification-config.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||||
|
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||||
|
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||||
|
UPDATE notification SET config = config_old;
|
||||||
|
ALTER TABLE notification DROP COLUMN config_old;
|
||||||
|
|
||||||
|
COMMIT;
|
|
@ -1,5 +1,7 @@
|
||||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
ALTER TABLE monitor
|
ALTER TABLE monitor
|
||||||
ADD packet_size INTEGER DEFAULT 56 NOT NULL;
|
ADD packet_size INTEGER DEFAULT 56 NOT NULL;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
|
@ -18,5 +18,4 @@ drop table setting;
|
||||||
|
|
||||||
alter table setting_dg_tmp rename to setting;
|
alter table setting_dg_tmp rename to setting;
|
||||||
|
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
ALTER TABLE status_page ADD footer_text TEXT;
|
|
||||||
ALTER TABLE status_page ADD custom_css TEXT;
|
ALTER TABLE status_page
|
||||||
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
|
ADD footer_text TEXT;
|
||||||
|
ALTER TABLE status_page
|
||||||
|
ADD custom_css TEXT;
|
||||||
|
ALTER TABLE status_page
|
||||||
|
ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
7
db/old_migrations/patch-timeout.sql
Normal file
7
db/old_migrations/patch-timeout.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
UPDATE monitor SET timeout = (interval * 0.8)
|
||||||
|
WHERE timeout IS NULL OR timeout <= 0;
|
||||||
|
|
||||||
|
COMMIT;
|
|
@ -1,3 +1,4 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
BEGIN TRANSACTION;
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
CREATE TABLE monitor_tls_info (
|
CREATE TABLE monitor_tls_info (
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
uptime-kuma:
|
|
||||||
image: louislam/uptime-kuma:2
|
|
||||||
container_name: uptime-kuma
|
|
||||||
volumes:
|
|
||||||
- uptime-kuma:/app/data
|
|
||||||
ports:
|
|
||||||
- "3001:3001" # <Host Port>:<Container Port>
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
uptime-kuma:
|
|
||||||
|
|
|
@ -30,6 +30,8 @@ FROM $BASE_IMAGE AS release
|
||||||
USER node
|
USER node
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
|
||||||
|
|
||||||
ENV UPTIME_KUMA_IS_CONTAINER=1
|
ENV UPTIME_KUMA_IS_CONTAINER=1
|
||||||
|
|
||||||
# Copy app files from build layer
|
# Copy app files from build layer
|
||||||
|
@ -40,13 +42,20 @@ HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD ext
|
||||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||||
CMD ["node", "server/server.js"]
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
|
############################################
|
||||||
|
# Rootless Image
|
||||||
|
############################################
|
||||||
|
FROM release AS rootless
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Mark as Nightly
|
# Mark as Nightly
|
||||||
############################################
|
############################################
|
||||||
FROM release AS nightly
|
FROM release AS nightly
|
||||||
USER node
|
|
||||||
RUN npm run mark-as-nightly
|
RUN npm run mark-as-nightly
|
||||||
|
|
||||||
|
FROM nightly AS nightly-rootless
|
||||||
|
USER node
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Build an image for testing pr
|
# Build an image for testing pr
|
||||||
############################################
|
############################################
|
||||||
|
|
|
@ -37,7 +37,7 @@ const github = require("@actions/github");
|
||||||
owner: issue.owner,
|
owner: issue.owner,
|
||||||
repo: issue.repo,
|
repo: issue.repo,
|
||||||
issue_number: issue.number,
|
issue_number: issue.number,
|
||||||
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please DO NOT open a blank issue.`
|
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context nessesary for a good discussions.`
|
||||||
});
|
});
|
||||||
|
|
||||||
// Close the issue
|
// Close the issue
|
||||||
|
|
|
@ -5,7 +5,7 @@ const fs = require("fs");
|
||||||
* or the `recursive` property removing completely in the future Node.js version.
|
* or the `recursive` property removing completely in the future Node.js version.
|
||||||
* See the link below.
|
* See the link below.
|
||||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation information of `fs.rmdirSync`
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||||
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
||||||
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||||
*/
|
*/
|
||||||
const { FBSD } = require("../server/util-server");
|
const FBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||||
|
|
||||||
|
|
|
@ -1,276 +0,0 @@
|
||||||
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
|
||||||
// "npm run compile-install-script" to compile install.sh
|
|
||||||
// The command is working on Windows PowerShell and Docker for Windows only.
|
|
||||||
|
|
||||||
|
|
||||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
|
||||||
println("=====================");
|
|
||||||
println("Uptime Kuma Install Script");
|
|
||||||
println("=====================");
|
|
||||||
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
|
|
||||||
println("---------------------------------------");
|
|
||||||
println("This script is designed for Linux and basic usage.");
|
|
||||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
|
||||||
println("---------------------------------------");
|
|
||||||
println("");
|
|
||||||
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
|
|
||||||
println("Docker - Install Uptime Kuma Docker container");
|
|
||||||
println("");
|
|
||||||
|
|
||||||
if ("$1" != "") {
|
|
||||||
type = "$1";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
|
|
||||||
}
|
|
||||||
|
|
||||||
defaultPort = "3001";
|
|
||||||
|
|
||||||
function checkNode() {
|
|
||||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
|
||||||
println("Node Version: " ++ nodeVersion);
|
|
||||||
|
|
||||||
if (nodeVersion <= "12") {
|
|
||||||
println("Error: Required Node.js 14");
|
|
||||||
call("exit", "1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function deb() {
|
|
||||||
bash("nodeCheck=$(node -v)");
|
|
||||||
bash("apt --yes update");
|
|
||||||
|
|
||||||
if (nodeCheck != "") {
|
|
||||||
checkNode();
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// Old nodejs binary name is "nodejs"
|
|
||||||
bash("check=$(nodejs --version)");
|
|
||||||
if (check != "") {
|
|
||||||
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("apt --yes install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("apt --yes install nodejs");
|
|
||||||
bash("node -v");
|
|
||||||
|
|
||||||
bash("nodeCheckAgain=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheckAgain == "") {
|
|
||||||
println("Error during Node.js installation");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing Git");
|
|
||||||
bash("apt --yes install git");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type == "local") {
|
|
||||||
defaultInstallPath = "/opt/uptime-kuma";
|
|
||||||
|
|
||||||
if (exists("/etc/redhat-release")) {
|
|
||||||
os = call("cat", "/etc/redhat-release");
|
|
||||||
distribution = "rhel";
|
|
||||||
|
|
||||||
} else if (exists("/etc/issue")) {
|
|
||||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
|
||||||
if (os == "Ubuntu") {
|
|
||||||
distribution = "ubuntu";
|
|
||||||
|
|
||||||
// Get ubuntu version
|
|
||||||
bash(". /etc/lsb-release");
|
|
||||||
version = DISTRIB_RELEASE;
|
|
||||||
}
|
|
||||||
if (os == "Debian") {
|
|
||||||
distribution = "debian";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("arch=$(uname -i)");
|
|
||||||
|
|
||||||
println("Your OS: " ++ os);
|
|
||||||
println("Distribution: " ++ distribution);
|
|
||||||
println("Version: " ++ version);
|
|
||||||
println("Arch: " ++ arch);
|
|
||||||
|
|
||||||
if ("$3" != "") {
|
|
||||||
port = "$3";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
|
|
||||||
|
|
||||||
if (port == "") {
|
|
||||||
port = defaultPort;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("$2" != "") {
|
|
||||||
installPath = "$2";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
|
|
||||||
|
|
||||||
if (installPath == "") {
|
|
||||||
installPath = defaultInstallPath;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CentOS
|
|
||||||
if (distribution == "rhel") {
|
|
||||||
bash("nodeCheck=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheck != "") {
|
|
||||||
checkNode();
|
|
||||||
} else {
|
|
||||||
|
|
||||||
bash("dnfCheck=$(dnf --version)");
|
|
||||||
|
|
||||||
// Use yum
|
|
||||||
if (dnfCheck == "") {
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("yum -y -q install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("yum install -y -q nodejs");
|
|
||||||
} else {
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("dnf -y install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("dnf install -y nodejs");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bash("node -v");
|
|
||||||
|
|
||||||
bash("nodeCheckAgain=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheckAgain == "") {
|
|
||||||
println("Error during Node.js installation");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing Git");
|
|
||||||
bash("yum -y -q install git");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ubuntu
|
|
||||||
} else if (distribution == "ubuntu") {
|
|
||||||
deb();
|
|
||||||
|
|
||||||
// Debian
|
|
||||||
} else if (distribution == "debian") {
|
|
||||||
deb();
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// Unknown distribution
|
|
||||||
error = 0;
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
error = 1;
|
|
||||||
println("Error: git is not found!");
|
|
||||||
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(node -v)");
|
|
||||||
if (check == "") {
|
|
||||||
error = 1;
|
|
||||||
println("Error: node is not found");
|
|
||||||
println("help: an installation guide is available at https://nodejs.org/en/download");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error > 0) {
|
|
||||||
println("Please install above missing software");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(pm2 --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing PM2");
|
|
||||||
bash("npm install pm2 -g && pm2 install pm2-logrotate");
|
|
||||||
bash("pm2 startup");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Check again
|
|
||||||
bash("check=$(pm2 --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Error: pm2 is not found!");
|
|
||||||
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("mkdir -p $installPath");
|
|
||||||
bash("cd $installPath");
|
|
||||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
|
||||||
bash("npm run setup");
|
|
||||||
|
|
||||||
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
|
|
||||||
|
|
||||||
} else {
|
|
||||||
defaultVolume = "uptime-kuma";
|
|
||||||
|
|
||||||
bash("check=$(docker -v)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Error: docker is not found!");
|
|
||||||
println("help: an installation guide is available at https://docs.docker.com/desktop/");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(docker info)");
|
|
||||||
|
|
||||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
|
||||||
\"echo\" \"Error: docker is not running\"
|
|
||||||
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
|
|
||||||
\"exit\" \"1\"
|
|
||||||
fi");
|
|
||||||
|
|
||||||
if ("$3" != "") {
|
|
||||||
port = "$3";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
|
|
||||||
|
|
||||||
if (port == "") {
|
|
||||||
port = defaultPort;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("$2" != "") {
|
|
||||||
volume = "$2";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
|
|
||||||
|
|
||||||
if (volume == "") {
|
|
||||||
volume = defaultVolume;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Port: $port");
|
|
||||||
println("Volume: $volume");
|
|
||||||
bash("docker volume create $volume");
|
|
||||||
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("http://localhost:$port");
|
|
3
extra/push-examples/.gitignore
vendored
Normal file
3
extra/push-examples/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
java/Index.class
|
||||||
|
csharp/index.exe
|
||||||
|
typescript-fetch/index.js
|
10
extra/push-examples/bash-curl/index.sh
Normal file
10
extra/push-examples/bash-curl/index.sh
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Filename: index.sh
|
||||||
|
PUSH_URL="https://example.com/api/push/key?status=up&msg=OK&ping="
|
||||||
|
INTERVAL=60
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
curl -s -o /dev/null $PUSH_URL
|
||||||
|
echo "Pushed!"
|
||||||
|
sleep $INTERVAL
|
||||||
|
done
|
24
extra/push-examples/csharp/index.cs
Normal file
24
extra/push-examples/csharp/index.cs
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
using System;
|
||||||
|
using System.Net;
|
||||||
|
using System.Threading;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compile: C:\Windows\Microsoft.NET\Framework\v4.0.30319\csc.exe index.cs
|
||||||
|
* Run: index.exe
|
||||||
|
*/
|
||||||
|
class Index
|
||||||
|
{
|
||||||
|
const string PushURL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
|
||||||
|
const int Interval = 60;
|
||||||
|
|
||||||
|
static void Main(string[] args)
|
||||||
|
{
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
WebClient client = new WebClient();
|
||||||
|
client.DownloadString(PushURL);
|
||||||
|
Console.WriteLine("Pushed!");
|
||||||
|
Thread.Sleep(Interval * 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
1
extra/push-examples/docker/index.sh
Normal file
1
extra/push-examples/docker/index.sh
Normal file
|
@ -0,0 +1 @@
|
||||||
|
docker run -d --restart=always --name uptime-kuma-push louislam/uptime-kuma:push "https://example.com/api/push/key?status=up&msg=OK&ping=" 60
|
20
extra/push-examples/go/index.go
Normal file
20
extra/push-examples/go/index.go
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
const PushURL = "https://example.com/api/push/key?status=up&msg=OK&ping="
|
||||||
|
const Interval = 60
|
||||||
|
|
||||||
|
for {
|
||||||
|
_, err := http.Get(PushURL)
|
||||||
|
if err == nil {
|
||||||
|
fmt.Println("Pushed!")
|
||||||
|
}
|
||||||
|
time.Sleep(Interval * time.Second)
|
||||||
|
}
|
||||||
|
}
|
32
extra/push-examples/java/index.java
Normal file
32
extra/push-examples/java/index.java
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compile: javac index.java
|
||||||
|
* Run: java Index
|
||||||
|
*/
|
||||||
|
class Index {
|
||||||
|
|
||||||
|
public static final String PUSH_URL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
|
||||||
|
public static final int INTERVAL = 60;
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
URL url = new URL(PUSH_URL);
|
||||||
|
HttpURLConnection con = (HttpURLConnection) url.openConnection();
|
||||||
|
con.setRequestMethod("GET");
|
||||||
|
con.getResponseCode();
|
||||||
|
con.disconnect();
|
||||||
|
System.out.println("Pushed!");
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Thread.sleep(INTERVAL * 1000);
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
11
extra/push-examples/javascript-fetch/index.js
Normal file
11
extra/push-examples/javascript-fetch/index.js
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
// Supports: Node.js >= 18, Deno, Bun
|
||||||
|
const pushURL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
|
||||||
|
const interval = 60;
|
||||||
|
|
||||||
|
const push = async () => {
|
||||||
|
await fetch(pushURL);
|
||||||
|
console.log("Pushed!");
|
||||||
|
};
|
||||||
|
|
||||||
|
push();
|
||||||
|
setInterval(push, interval * 1000);
|
5
extra/push-examples/javascript-fetch/package.json
Normal file
5
extra/push-examples/javascript-fetch/package.json
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"start": "node index.js"
|
||||||
|
}
|
||||||
|
}
|
13
extra/push-examples/php/index.php
Normal file
13
extra/push-examples/php/index.php
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
<?php
|
||||||
|
const PUSH_URL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
|
||||||
|
const interval = 60;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
$ch = curl_init();
|
||||||
|
curl_setopt($ch, CURLOPT_URL, PUSH_URL);
|
||||||
|
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
|
||||||
|
curl_exec($ch);
|
||||||
|
curl_close($ch);
|
||||||
|
echo "Pushed!\n";
|
||||||
|
sleep(interval);
|
||||||
|
}
|
9
extra/push-examples/powershell/index.ps1
Normal file
9
extra/push-examples/powershell/index.ps1
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
# Filename: index.ps1
|
||||||
|
$pushURL = "https://example.com/api/push/key?status=up&msg=OK&ping="
|
||||||
|
$interval = 60
|
||||||
|
|
||||||
|
while ($true) {
|
||||||
|
$res = Invoke-WebRequest -Uri $pushURL
|
||||||
|
Write-Host "Pushed!"
|
||||||
|
Start-Sleep -Seconds $interval
|
||||||
|
}
|
10
extra/push-examples/python/index.py
Normal file
10
extra/push-examples/python/index.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import urllib.request
|
||||||
|
import time
|
||||||
|
|
||||||
|
push_url = "https://example.com/api/push/key?status=up&msg=OK&ping="
|
||||||
|
interval = 60
|
||||||
|
|
||||||
|
while True:
|
||||||
|
urllib.request.urlopen(push_url)
|
||||||
|
print("Pushed!\n")
|
||||||
|
time.sleep(interval)
|
19
extra/push-examples/typescript-fetch/README.md
Normal file
19
extra/push-examples/typescript-fetch/README.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# How to run
|
||||||
|
|
||||||
|
Node.js (ts-node)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ts-node index.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
Deno
|
||||||
|
|
||||||
|
```bash
|
||||||
|
deno run --allow-net index.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
Bun.js
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun index.ts
|
||||||
|
```
|
11
extra/push-examples/typescript-fetch/index.ts
Normal file
11
extra/push-examples/typescript-fetch/index.ts
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
// Supports: Deno, Bun, Node.js >= 18 (ts-node)
|
||||||
|
const pushURL : string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
|
||||||
|
const interval : number = 60;
|
||||||
|
|
||||||
|
const push = async () => {
|
||||||
|
await fetch(pushURL);
|
||||||
|
console.log("Pushed!");
|
||||||
|
};
|
||||||
|
|
||||||
|
push();
|
||||||
|
setInterval(push, interval * 1000);
|
13
extra/push-examples/typescript-fetch/package.json
Normal file
13
extra/push-examples/typescript-fetch/package.json
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"ts-node": "ts-node index.ts",
|
||||||
|
"deno": "deno run --allow-net index.ts",
|
||||||
|
"bun": "bun index.ts"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^20.6.0",
|
||||||
|
"ts-node": "^10.9.1",
|
||||||
|
"tslib": "^2.6.2",
|
||||||
|
"typescript": "^5.2.2"
|
||||||
|
}
|
||||||
|
}
|
40
extra/rebase-pr.js
Normal file
40
extra/rebase-pr.js
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
const { execSync } = require("child_process");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rebase a PR onto such as 1.23.X or master
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
async function main() {
|
||||||
|
const branch = process.argv[2];
|
||||||
|
|
||||||
|
// Use gh to get current branch's pr id
|
||||||
|
let currentBranchPRID = execSync("gh pr view --json number --jq \".number\"").toString().trim();
|
||||||
|
console.log("Pr ID: ", currentBranchPRID);
|
||||||
|
|
||||||
|
// Use gh commend to get pr commits
|
||||||
|
const prCommits = JSON.parse(execSync(`gh pr view ${currentBranchPRID} --json commits`).toString().trim()).commits;
|
||||||
|
|
||||||
|
console.log("Found commits: ", prCommits.length);
|
||||||
|
|
||||||
|
// Sort the commits by authoredDate
|
||||||
|
prCommits.sort((a, b) => {
|
||||||
|
return new Date(a.authoredDate) - new Date(b.authoredDate);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get the oldest commit id
|
||||||
|
const oldestCommitID = prCommits[0].oid;
|
||||||
|
console.log("Oldest commit id of this pr:", oldestCommitID);
|
||||||
|
|
||||||
|
// Get the latest commit id of the target branch
|
||||||
|
const latestCommitID = execSync(`git rev-parse origin/${branch}`).toString().trim();
|
||||||
|
console.log("Latest commit id of " + branch + ":", latestCommitID);
|
||||||
|
|
||||||
|
// Get the original parent commit id of the oldest commit
|
||||||
|
const originalParentCommitID = execSync(`git log --pretty=%P -n 1 "${oldestCommitID}"`).toString().trim();
|
||||||
|
console.log("Original parent commit id of the oldest commit:", originalParentCommitID);
|
||||||
|
|
||||||
|
// Rebase the pr onto the target branch
|
||||||
|
execSync(`git rebase --onto ${latestCommitID} ${originalParentCommitID}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
44
extra/reformat-changelog.js
Normal file
44
extra/reformat-changelog.js
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
// Generate on GitHub
|
||||||
|
const input = `
|
||||||
|
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
|
||||||
|
`;
|
||||||
|
|
||||||
|
const template = `
|
||||||
|
### 🆕 New Features
|
||||||
|
|
||||||
|
### 💇♀️ Improvements
|
||||||
|
|
||||||
|
### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
### ⬆️ Security Fixes
|
||||||
|
|
||||||
|
### 🦎 Translation Contributions
|
||||||
|
|
||||||
|
### Others
|
||||||
|
- Other small changes, code refactoring and comment/doc updates in this repo:
|
||||||
|
`;
|
||||||
|
|
||||||
|
const lines = input.split("\n").filter((line) => line.trim() !== "");
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
// Split the last " by "
|
||||||
|
const usernamePullRequesURL = line.split(" by ").pop();
|
||||||
|
|
||||||
|
if (!usernamePullRequesURL) {
|
||||||
|
console.log("Unable to parse", line);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
|
||||||
|
const pullRequestID = "#" + pullRequestURL.split("/").pop();
|
||||||
|
let message = line.split(" by ").shift();
|
||||||
|
|
||||||
|
if (!message) {
|
||||||
|
console.log("Unable to parse", line);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
message = message.split("* ").pop();
|
||||||
|
console.log("-", pullRequestID, message, `(Thanks ${username})`);
|
||||||
|
}
|
||||||
|
console.log(template);
|
6
extra/remove-playwright-test-data.js
Normal file
6
extra/remove-playwright-test-data.js
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
const fs = require("fs");
|
||||||
|
|
||||||
|
fs.rmSync("./data/playwright-test", {
|
||||||
|
recursive: true,
|
||||||
|
force: true,
|
||||||
|
});
|
|
@ -5,6 +5,8 @@ const { R } = require("redbean-node");
|
||||||
const readline = require("readline");
|
const readline = require("readline");
|
||||||
const { initJWTSecret } = require("../server/util-server");
|
const { initJWTSecret } = require("../server/util-server");
|
||||||
const User = require("../server/model/user");
|
const User = require("../server/model/user");
|
||||||
|
const { io } = require("socket.io-client");
|
||||||
|
const { localWebSocketURL } = require("../server/config");
|
||||||
const args = require("args-parser")(process.argv);
|
const args = require("args-parser")(process.argv);
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
input: process.stdin,
|
input: process.stdin,
|
||||||
|
@ -12,6 +14,10 @@ const rl = readline.createInterface({
|
||||||
});
|
});
|
||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
|
if ("dry-run" in args) {
|
||||||
|
console.log("Dry run mode, no changes will be made.");
|
||||||
|
}
|
||||||
|
|
||||||
console.log("Connecting the database");
|
console.log("Connecting the database");
|
||||||
Database.initDataDir(args);
|
Database.initDataDir(args);
|
||||||
await Database.connect(false, false, true);
|
await Database.connect(false, false, true);
|
||||||
|
@ -27,21 +33,36 @@ const main = async () => {
|
||||||
console.log("Found user: " + user.username);
|
console.log("Found user: " + user.username);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
let password = await question("New Password: ");
|
let password;
|
||||||
let confirmPassword = await question("Confirm New Password: ");
|
let confirmPassword;
|
||||||
|
|
||||||
|
// When called with "--new-password" argument for unattended modification (e.g. npm run reset-password -- --new_password=secret)
|
||||||
|
if ("new-password" in args) {
|
||||||
|
console.log("Using password from argument");
|
||||||
|
console.warn("\x1b[31m%s\x1b[0m", "Warning: the password might be stored, in plain text, in your shell's history");
|
||||||
|
password = confirmPassword = args["new-password"] + "";
|
||||||
|
} else {
|
||||||
|
password = await question("New Password: ");
|
||||||
|
confirmPassword = await question("Confirm New Password: ");
|
||||||
|
}
|
||||||
|
|
||||||
if (password === confirmPassword) {
|
if (password === confirmPassword) {
|
||||||
|
if (!("dry-run" in args)) {
|
||||||
await User.resetPassword(user.id, password);
|
await User.resetPassword(user.id, password);
|
||||||
|
|
||||||
// Reset all sessions by reset jwt secret
|
// Reset all sessions by reset jwt secret
|
||||||
await initJWTSecret();
|
await initJWTSecret();
|
||||||
|
|
||||||
|
// Disconnect all other socket clients of the user
|
||||||
|
await disconnectAllSocketClients(user.username, password);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
console.log("Passwords do not match, please try again.");
|
console.log("Passwords do not match, please try again.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
console.log("Password reset successfully.");
|
console.log("Password reset successfully.");
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Error: " + e.message);
|
console.error("Error: " + e.message);
|
||||||
|
@ -66,6 +87,50 @@ function question(question) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect all socket clients of the user
|
||||||
|
* @param {string} username Username
|
||||||
|
* @param {string} password Password
|
||||||
|
* @returns {Promise<void>} Promise
|
||||||
|
*/
|
||||||
|
function disconnectAllSocketClients(username, password) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
|
||||||
|
|
||||||
|
// Disconnect all socket connections
|
||||||
|
const socket = io(localWebSocketURL, {
|
||||||
|
reconnection: false,
|
||||||
|
timeout: 5000,
|
||||||
|
});
|
||||||
|
socket.on("connect", () => {
|
||||||
|
socket.emit("login", {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
}, (res) => {
|
||||||
|
if (res.ok) {
|
||||||
|
console.log("Logged in.");
|
||||||
|
socket.emit("disconnectOtherSocketClients");
|
||||||
|
} else {
|
||||||
|
console.warn("Login failed.");
|
||||||
|
console.warn("Please restart the server to disconnect all sessions.");
|
||||||
|
}
|
||||||
|
socket.close();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on("connect_error", function () {
|
||||||
|
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
|
||||||
|
// Ask the user to restart the server manually
|
||||||
|
console.warn("Failed to connect to " + localWebSocketURL);
|
||||||
|
console.warn("Please restart the server to disconnect all sessions manually.");
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
socket.on("disconnect", () => {
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (!process.env.TEST_BACKEND) {
|
if (!process.env.TEST_BACKEND) {
|
||||||
main();
|
main();
|
||||||
}
|
}
|
||||||
|
|
1
extra/uptime-kuma-push/.gitignore
vendored
Normal file
1
extra/uptime-kuma-push/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
build/*
|
18
extra/uptime-kuma-push/Dockerfile
Normal file
18
extra/uptime-kuma-push/Dockerfile
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
FROM node AS build
|
||||||
|
RUN useradd --create-home kuma
|
||||||
|
USER kuma
|
||||||
|
WORKDIR /home/kuma
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
COPY --chown=kuma:kuma ./build/ ./build/
|
||||||
|
COPY --chown=kuma:kuma build.js build.js
|
||||||
|
RUN node build.js $TARGETPLATFORM
|
||||||
|
|
||||||
|
FROM debian:bookworm-slim AS release
|
||||||
|
RUN useradd --create-home kuma
|
||||||
|
USER kuma
|
||||||
|
WORKDIR /home/kuma
|
||||||
|
COPY --from=build /home/kuma/uptime-kuma-push ./uptime-kuma-push
|
||||||
|
|
||||||
|
ENTRYPOINT ["/home/kuma/uptime-kuma-push"]
|
||||||
|
|
||||||
|
|
48
extra/uptime-kuma-push/build.js
Normal file
48
extra/uptime-kuma-push/build.js
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
const fs = require("fs");
|
||||||
|
const platform = process.argv[2];
|
||||||
|
|
||||||
|
if (!platform) {
|
||||||
|
console.error("No platform??");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const supportedPlatforms = [
|
||||||
|
{
|
||||||
|
name: "linux/amd64",
|
||||||
|
bin: "./build/uptime-kuma-push-amd64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "linux/arm64",
|
||||||
|
bin: "./build/uptime-kuma-push-arm64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "linux/arm/v7",
|
||||||
|
bin: "./build/uptime-kuma-push-armv7"
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
let platformObj = null;
|
||||||
|
|
||||||
|
// Check if the platform is supported
|
||||||
|
for (let i = 0; i < supportedPlatforms.length; i++) {
|
||||||
|
if (supportedPlatforms[i].name === platform) {
|
||||||
|
platformObj = supportedPlatforms[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (platformObj) {
|
||||||
|
let filename = platformObj.bin;
|
||||||
|
|
||||||
|
if (!fs.existsSync(filename)) {
|
||||||
|
console.error(`prebuilt: ${filename} is not found, please build it first`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.renameSync(filename, "./uptime-kuma-push");
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.error("Unsupported platform: " + platform);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
13
extra/uptime-kuma-push/package.json
Normal file
13
extra/uptime-kuma-push/package.json
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"build-docker": "npm run build-all && docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:push . --push --target release",
|
||||||
|
"build-all": "npm run build-win && npm run build-linux-amd64 && npm run build-linux-arm64 && npm run build-linux-armv7 && npm run build-linux-armv6 && npm run build-linux-armv5 && npm run build-linux-riscv64",
|
||||||
|
"build-win": "cross-env GOOS=windows GOARCH=amd64 go build -x -o ./build/uptime-kuma-push.exe uptime-kuma-push.go",
|
||||||
|
"build-linux-amd64": "cross-env GOOS=linux GOARCH=amd64 go build -x -o ./build/uptime-kuma-push-amd64 uptime-kuma-push.go",
|
||||||
|
"build-linux-arm64": "cross-env GOOS=linux GOARCH=arm64 go build -x -o ./build/uptime-kuma-push-arm64 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./build/uptime-kuma-push-armv7 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv6": "cross-env GOOS=linux GOARCH=arm GOARM=6 go build -x -o ./build/uptime-kuma-push-armv6 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv5": "cross-env GOOS=linux GOARCH=arm GOARM=5 go build -x -o ./build/uptime-kuma-push-armv5 uptime-kuma-push.go",
|
||||||
|
"build-linux-riscv64": "cross-env GOOS=linux GOARCH=riscv64 go build -x -o ./build/uptime-kuma-push-riscv64 uptime-kuma-push.go"
|
||||||
|
}
|
||||||
|
}
|
44
extra/uptime-kuma-push/uptime-kuma-push.go
Normal file
44
extra/uptime-kuma-push/uptime-kuma-push.go
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
os "os"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
if len(os.Args) < 2 {
|
||||||
|
fmt.Fprintln(os.Stderr, "Usage: uptime-kuma-push <url> [<interval>]")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pushURL := os.Args[1]
|
||||||
|
|
||||||
|
var interval time.Duration
|
||||||
|
|
||||||
|
if len(os.Args) >= 3 {
|
||||||
|
intervalString, err := time.ParseDuration(os.Args[2] + "s")
|
||||||
|
interval = intervalString
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Error: Invalid interval", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
interval = 60 * time.Second
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
_, err := http.Get(pushURL)
|
||||||
|
if err == nil {
|
||||||
|
fmt.Print("Pushed!")
|
||||||
|
} else {
|
||||||
|
fmt.Print("Error: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(" Sleeping for", interval)
|
||||||
|
time.Sleep(interval)
|
||||||
|
}
|
||||||
|
}
|
228
install.sh
228
install.sh
|
@ -1,228 +0,0 @@
|
||||||
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
|
||||||
# "npm run compile-install-script" to compile install.sh
|
|
||||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
|
||||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
|
||||||
"echo" "-e" "====================="
|
|
||||||
"echo" "-e" "Uptime Kuma Install Script"
|
|
||||||
"echo" "-e" "====================="
|
|
||||||
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
|
|
||||||
"echo" "-e" "---------------------------------------"
|
|
||||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
|
||||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
|
||||||
"echo" "-e" "---------------------------------------"
|
|
||||||
"echo" "-e" ""
|
|
||||||
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
|
|
||||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
|
||||||
"echo" "-e" ""
|
|
||||||
if [ "$1" != "" ]; then
|
|
||||||
type="$1"
|
|
||||||
else
|
|
||||||
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
|
|
||||||
fi
|
|
||||||
defaultPort="3001"
|
|
||||||
function checkNode {
|
|
||||||
local _0
|
|
||||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
|
||||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
|
||||||
_0="12"
|
|
||||||
if [ $(($nodeVersion <= $_0)) == 1 ]; then
|
|
||||||
"echo" "-e" "Error: Required Node.js 14"
|
|
||||||
"exit" "1"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
function deb {
|
|
||||||
nodeCheck=$(node -v)
|
|
||||||
apt --yes update
|
|
||||||
if [ "$nodeCheck" != "" ]; then
|
|
||||||
"checkNode"
|
|
||||||
else
|
|
||||||
# Old nodejs binary name is "nodejs"
|
|
||||||
check=$(nodejs --version)
|
|
||||||
if [ "$check" != "" ]; then
|
|
||||||
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
apt --yes install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
apt --yes install nodejs
|
|
||||||
node -v
|
|
||||||
nodeCheckAgain=$(node -v)
|
|
||||||
if [ "$nodeCheckAgain" == "" ]; then
|
|
||||||
"echo" "-e" "Error during Node.js installation"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Git"
|
|
||||||
apt --yes install git
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
if [ "$type" == "local" ]; then
|
|
||||||
defaultInstallPath="/opt/uptime-kuma"
|
|
||||||
if [ -e "/etc/redhat-release" ]; then
|
|
||||||
os=$("cat" "/etc/redhat-release")
|
|
||||||
distribution="rhel"
|
|
||||||
else
|
|
||||||
if [ -e "/etc/issue" ]; then
|
|
||||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
|
||||||
if [ "$os" == "Ubuntu" ]; then
|
|
||||||
distribution="ubuntu"
|
|
||||||
# Get ubuntu version
|
|
||||||
. /etc/lsb-release
|
|
||||||
version="$DISTRIB_RELEASE"
|
|
||||||
fi
|
|
||||||
if [ "$os" == "Debian" ]; then
|
|
||||||
distribution="debian"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
arch=$(uname -i)
|
|
||||||
"echo" "-e" "Your OS: ""$os"
|
|
||||||
"echo" "-e" "Distribution: ""$distribution"
|
|
||||||
"echo" "-e" "Version: ""$version"
|
|
||||||
"echo" "-e" "Arch: ""$arch"
|
|
||||||
if [ "$3" != "" ]; then
|
|
||||||
port="$3"
|
|
||||||
else
|
|
||||||
"read" "-p" "Listening Port [$defaultPort]: " "port"
|
|
||||||
if [ "$port" == "" ]; then
|
|
||||||
port="$defaultPort"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [ "$2" != "" ]; then
|
|
||||||
installPath="$2"
|
|
||||||
else
|
|
||||||
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
|
|
||||||
if [ "$installPath" == "" ]; then
|
|
||||||
installPath="$defaultInstallPath"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# CentOS
|
|
||||||
if [ "$distribution" == "rhel" ]; then
|
|
||||||
nodeCheck=$(node -v)
|
|
||||||
if [ "$nodeCheck" != "" ]; then
|
|
||||||
"checkNode"
|
|
||||||
else
|
|
||||||
dnfCheck=$(dnf --version)
|
|
||||||
# Use yum
|
|
||||||
if [ "$dnfCheck" == "" ]; then
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
yum -y -q install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
yum install -y -q nodejs
|
|
||||||
else
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
dnf -y install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
dnf install -y nodejs
|
|
||||||
fi
|
|
||||||
node -v
|
|
||||||
nodeCheckAgain=$(node -v)
|
|
||||||
if [ "$nodeCheckAgain" == "" ]; then
|
|
||||||
"echo" "-e" "Error during Node.js installation"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Git"
|
|
||||||
yum -y -q install git
|
|
||||||
fi
|
|
||||||
# Ubuntu
|
|
||||||
else
|
|
||||||
if [ "$distribution" == "ubuntu" ]; then
|
|
||||||
"deb"
|
|
||||||
# Debian
|
|
||||||
else
|
|
||||||
if [ "$distribution" == "debian" ]; then
|
|
||||||
"deb"
|
|
||||||
else
|
|
||||||
# Unknown distribution
|
|
||||||
error=$((0))
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
error=$((1))
|
|
||||||
"echo" "-e" "Error: git is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
|
|
||||||
fi
|
|
||||||
check=$(node -v)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
error=$((1))
|
|
||||||
"echo" "-e" "Error: node is not found"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
|
|
||||||
fi
|
|
||||||
if [ $(($error > 0)) == 1 ]; then
|
|
||||||
"echo" "-e" "Please install above missing software"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(pm2 --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing PM2"
|
|
||||||
npm install pm2 -g && pm2 install pm2-logrotate
|
|
||||||
pm2 startup
|
|
||||||
fi
|
|
||||||
# Check again
|
|
||||||
check=$(pm2 --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Error: pm2 is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
mkdir -p $installPath
|
|
||||||
cd $installPath
|
|
||||||
git clone https://github.com/louislam/uptime-kuma.git .
|
|
||||||
npm run setup
|
|
||||||
pm2 start server/server.js --name uptime-kuma -- --port=$port
|
|
||||||
else
|
|
||||||
defaultVolume="uptime-kuma"
|
|
||||||
check=$(docker -v)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Error: docker is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
check=$(docker info)
|
|
||||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
|
||||||
"echo" "Error: docker is not running"
|
|
||||||
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
|
|
||||||
"exit" "1"
|
|
||||||
fi
|
|
||||||
if [ "$3" != "" ]; then
|
|
||||||
port="$3"
|
|
||||||
else
|
|
||||||
"read" "-p" "Expose Port [$defaultPort]: " "port"
|
|
||||||
if [ "$port" == "" ]; then
|
|
||||||
port="$defaultPort"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [ "$2" != "" ]; then
|
|
||||||
volume="$2"
|
|
||||||
else
|
|
||||||
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
|
|
||||||
if [ "$volume" == "" ]; then
|
|
||||||
volume="$defaultVolume"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Port: $port"
|
|
||||||
"echo" "-e" "Volume: $volume"
|
|
||||||
docker volume create $volume
|
|
||||||
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
|
||||||
fi
|
|
||||||
"echo" "-e" "http://localhost:$port"
|
|
12306
package-lock.json
generated
12306
package-lock.json
generated
File diff suppressed because it is too large
Load diff
83
package.json
83
package.json
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "uptime-kuma",
|
"name": "uptime-kuma",
|
||||||
"version": "1.23.1",
|
"version": "2.0.0-dev",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -10,13 +10,13 @@
|
||||||
"node": "14 || 16 || 18 || >= 20.4.0"
|
"node": "14 || 16 || 18 || >= 20.4.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install-legacy": "npm install",
|
|
||||||
"update-legacy": "npm update",
|
|
||||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||||
|
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
|
||||||
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
||||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||||
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
||||||
"lint": "npm run lint:js && npm run lint:style",
|
"lint": "npm run lint:js && npm run lint:style",
|
||||||
|
"lint:prod": "npm run lint:js-prod && npm run lint:style",
|
||||||
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
||||||
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
||||||
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
||||||
|
@ -24,13 +24,17 @@
|
||||||
"start-server": "node server/server.js",
|
"start-server": "node server/server.js",
|
||||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||||
"start-server-dev:test-subdir": "cross-env NODE_ENV=development BASE_PATH=/my-kuma node server/server.js",
|
"start-server-dev:test-subdir": "cross-env NODE_ENV=development BASE_PATH=/my-kuma node server/server.js",
|
||||||
|
"start-server-dev:watch": "cross-env NODE_ENV=development node --watch server/server.js",
|
||||||
"build": "vite build --config ./config/vite.config.js",
|
"build": "vite build --config ./config/vite.config.js",
|
||||||
"test": "node test/prepare-test-server.js && npm run test-backend",
|
"test": "npm run test-backend && npm run test-e2e",
|
||||||
"test-with-build": "npm run build && npm test",
|
"test-with-build": "npm run build && npm test",
|
||||||
"test-backend": "node test/backend-test-entry.js && npm run jest-backend",
|
"test-backend": "node test/backend-test-entry.js",
|
||||||
"test-backend:14": "cross-env TEST_BACKEND=1 NODE_OPTIONS=\"--experimental-abortcontroller --no-warnings\" node--test test/backend-test",
|
"test-backend:14": "cross-env TEST_BACKEND=1 NODE_OPTIONS=\"--experimental-abortcontroller --no-warnings\" node--test test/backend-test",
|
||||||
"test-backend:18": "cross-env TEST_BACKEND=1 node --test test/backend-test",
|
"test-backend:18": "cross-env TEST_BACKEND=1 node --test test/backend-test",
|
||||||
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
"test-e2e": "playwright test --config ./config/playwright.config.js",
|
||||||
|
"test-e2e-ui": "playwright test --config ./config/playwright.config.js --ui --ui-port=51063",
|
||||||
|
"playwright-codegen": "playwright codegen localhost:3000 --save-storage=./private/e2e-auth.json",
|
||||||
|
"playwright-show-report": "playwright show-report ./private/playwright-report",
|
||||||
"tsc": "tsc",
|
"tsc": "tsc",
|
||||||
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||||
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
||||||
|
@ -40,39 +44,35 @@
|
||||||
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||||
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
||||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
||||||
|
"build-docker-slim-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim-rootless -t louislam/uptime-kuma:$VERSION-slim-rootless --target rootless --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||||
|
"build-docker-full-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-rootless -t louislam/uptime-kuma:$VERSION-rootless --target rootless . --push",
|
||||||
|
"build-docker-nightly-rootless": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2-rootless --target nightly-rootless . --push",
|
||||||
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
||||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
|
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
|
||||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||||
"setup": "git checkout 1.23.1 && npm ci --production && npm run download-dist",
|
"setup": "git checkout 1.23.11 && npm ci --production && npm run download-dist",
|
||||||
"download-dist": "node extra/download-dist.js",
|
"download-dist": "node extra/download-dist.js",
|
||||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||||
"reset-password": "node extra/reset-password.js",
|
"reset-password": "node extra/reset-password.js",
|
||||||
"remove-2fa": "node extra/remove-2fa.js",
|
"remove-2fa": "node extra/remove-2fa.js",
|
||||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
|
||||||
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
|
||||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
|
||||||
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
|
||||||
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
|
||||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
|
||||||
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
|
|
||||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
|
||||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||||
|
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
|
||||||
|
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
|
||||||
|
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
|
||||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||||
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||||
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||||
"git-remove-tag": "git tag -d",
|
"git-remove-tag": "git tag -d",
|
||||||
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
||||||
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
||||||
"cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e",
|
|
||||||
"cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
|
|
||||||
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
|
|
||||||
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
|
|
||||||
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
||||||
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
||||||
"sort-contributors": "node extra/sort-contributors.js",
|
"sort-contributors": "node extra/sort-contributors.js",
|
||||||
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
||||||
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate"
|
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate",
|
||||||
|
"rebase-pr-to-1.23.X": "node extra/rebase-pr.js 1.23.X",
|
||||||
|
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@grpc/grpc-js": "~1.7.3",
|
"@grpc/grpc-js": "~1.7.3",
|
||||||
|
@ -83,7 +83,6 @@
|
||||||
"axios-ntlm": "1.3.0",
|
"axios-ntlm": "1.3.0",
|
||||||
"badge-maker": "~3.3.1",
|
"badge-maker": "~3.3.1",
|
||||||
"bcryptjs": "~2.4.3",
|
"bcryptjs": "~2.4.3",
|
||||||
"cacheable-lookup": "~6.0.4",
|
|
||||||
"chardet": "~1.4.0",
|
"chardet": "~1.4.0",
|
||||||
"check-password-strength": "^2.0.5",
|
"check-password-strength": "^2.0.5",
|
||||||
"cheerio": "~1.0.0-rc.12",
|
"cheerio": "~1.0.0-rc.12",
|
||||||
|
@ -98,7 +97,9 @@
|
||||||
"express-basic-auth": "~1.2.1",
|
"express-basic-auth": "~1.2.1",
|
||||||
"express-static-gzip": "~2.1.7",
|
"express-static-gzip": "~2.1.7",
|
||||||
"form-data": "~4.0.0",
|
"form-data": "~4.0.0",
|
||||||
"gamedig": "~4.0.5",
|
"gamedig": "^4.2.0",
|
||||||
|
"html-escaper": "^3.0.3",
|
||||||
|
"http-cookie-agent": "~5.0.4",
|
||||||
"http-graceful-shutdown": "~3.1.7",
|
"http-graceful-shutdown": "~3.1.7",
|
||||||
"http-proxy-agent": "~5.0.0",
|
"http-proxy-agent": "~5.0.0",
|
||||||
"https-proxy-agent": "~5.0.1",
|
"https-proxy-agent": "~5.0.1",
|
||||||
|
@ -112,10 +113,11 @@
|
||||||
"knex": "^2.4.2",
|
"knex": "^2.4.2",
|
||||||
"limiter": "~2.1.0",
|
"limiter": "~2.1.0",
|
||||||
"liquidjs": "^10.7.0",
|
"liquidjs": "^10.7.0",
|
||||||
"mongodb": "~4.14.0",
|
"mitt": "~3.0.1",
|
||||||
|
"mongodb": "~4.17.1",
|
||||||
"mqtt": "~4.3.7",
|
"mqtt": "~4.3.7",
|
||||||
"mssql": "~8.1.4",
|
"mssql": "~8.1.4",
|
||||||
"mysql2": "~2.3.3",
|
"mysql2": "~3.6.2",
|
||||||
"nanoid": "~3.3.4",
|
"nanoid": "~3.3.4",
|
||||||
"node-cloudflared-tunnel": "~1.0.9",
|
"node-cloudflared-tunnel": "~1.0.9",
|
||||||
"node-radius-client": "~1.0.0",
|
"node-radius-client": "~1.0.0",
|
||||||
|
@ -124,11 +126,12 @@
|
||||||
"notp": "~2.0.3",
|
"notp": "~2.0.3",
|
||||||
"openid-client": "^5.4.2",
|
"openid-client": "^5.4.2",
|
||||||
"password-hash": "~1.2.2",
|
"password-hash": "~1.2.2",
|
||||||
"pg": "~8.8.0",
|
"pg": "~8.11.3",
|
||||||
"pg-connection-string": "~2.5.0",
|
"pg-connection-string": "~2.6.2",
|
||||||
"playwright-core": "~1.35.1",
|
"playwright-core": "~1.39.0",
|
||||||
"prom-client": "~13.2.0",
|
"prom-client": "~13.2.0",
|
||||||
"prometheus-api-metrics": "~3.2.1",
|
"prometheus-api-metrics": "~3.2.1",
|
||||||
|
"promisify-child-process": "~4.1.2",
|
||||||
"protobufjs": "~7.2.4",
|
"protobufjs": "~7.2.4",
|
||||||
"qs": "~6.10.4",
|
"qs": "~6.10.4",
|
||||||
"redbean-node": "~0.3.0",
|
"redbean-node": "~0.3.0",
|
||||||
|
@ -140,21 +143,23 @@
|
||||||
"tar": "~6.1.11",
|
"tar": "~6.1.11",
|
||||||
"tcp-ping": "~0.1.1",
|
"tcp-ping": "~0.1.1",
|
||||||
"thirty-two": "~1.0.2",
|
"thirty-two": "~1.0.2",
|
||||||
|
"tough-cookie": "~4.1.3",
|
||||||
"ws": "^8.13.0"
|
"ws": "^8.13.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@actions/github": "~5.0.1",
|
"@actions/github": "~5.0.1",
|
||||||
"@babel/eslint-parser": "^7.22.7",
|
|
||||||
"@babel/preset-env": "^7.15.8",
|
|
||||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||||
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
||||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||||
|
"@playwright/test": "~1.39.0",
|
||||||
"@popperjs/core": "~2.10.2",
|
"@popperjs/core": "~2.10.2",
|
||||||
"@types/bootstrap": "~5.1.9",
|
"@types/bootstrap": "~5.1.9",
|
||||||
"@vitejs/plugin-legacy": "~4.1.0",
|
"@types/node": "^20.8.6",
|
||||||
"@vitejs/plugin-vue": "~4.2.3",
|
"@typescript-eslint/eslint-plugin": "^6.7.5",
|
||||||
"@vue/compiler-sfc": "~3.3.4",
|
"@typescript-eslint/parser": "^6.7.5",
|
||||||
|
"@vitejs/plugin-vue": "~5.0.1",
|
||||||
|
"@vue/compiler-sfc": "~3.4.2",
|
||||||
"@vuepic/vue-datepicker": "~3.4.8",
|
"@vuepic/vue-datepicker": "~3.4.8",
|
||||||
"aedes": "^0.46.3",
|
"aedes": "^0.46.3",
|
||||||
"bootstrap": "5.1.3",
|
"bootstrap": "5.1.3",
|
||||||
|
@ -164,15 +169,14 @@
|
||||||
"core-js": "~3.26.1",
|
"core-js": "~3.26.1",
|
||||||
"cronstrue": "~2.24.0",
|
"cronstrue": "~2.24.0",
|
||||||
"cross-env": "~7.0.3",
|
"cross-env": "~7.0.3",
|
||||||
"cypress": "^12.17.0",
|
|
||||||
"delay": "^5.0.0",
|
"delay": "^5.0.0",
|
||||||
"dns2": "~2.0.1",
|
"dns2": "~2.0.1",
|
||||||
"dompurify": "~2.4.3",
|
"dompurify": "~2.4.3",
|
||||||
"eslint": "~8.14.0",
|
"eslint": "~8.14.0",
|
||||||
"eslint-plugin-jsdoc": "^46.4.6",
|
"eslint-plugin-jsdoc": "~46.4.6",
|
||||||
"eslint-plugin-vue": "~8.7.1",
|
"eslint-plugin-vue": "~8.7.1",
|
||||||
"favico.js": "~0.3.10",
|
"favico.js": "~0.3.10",
|
||||||
"jest": "~29.6.1",
|
"get-port-please": "^3.1.1",
|
||||||
"marked": "~4.2.5",
|
"marked": "~4.2.5",
|
||||||
"node-ssh": "~13.1.0",
|
"node-ssh": "~13.1.0",
|
||||||
"postcss-html": "~1.5.0",
|
"postcss-html": "~1.5.0",
|
||||||
|
@ -189,10 +193,9 @@
|
||||||
"timezones-list": "~3.0.1",
|
"timezones-list": "~3.0.1",
|
||||||
"typescript": "~4.4.4",
|
"typescript": "~4.4.4",
|
||||||
"v-pagination-3": "~0.1.7",
|
"v-pagination-3": "~0.1.7",
|
||||||
"vite": "~4.4.1",
|
"vite": "~5.0.10",
|
||||||
"vite-plugin-commonjs": "^0.8.0",
|
|
||||||
"vite-plugin-compression": "^0.5.1",
|
"vite-plugin-compression": "^0.5.1",
|
||||||
"vue": "~3.3.4",
|
"vue": "~3.4.2",
|
||||||
"vue-chartjs": "~5.2.0",
|
"vue-chartjs": "~5.2.0",
|
||||||
"vue-confirm-dialog": "~1.0.2",
|
"vue-confirm-dialog": "~1.0.2",
|
||||||
"vue-contenteditable": "~3.0.4",
|
"vue-contenteditable": "~3.0.4",
|
||||||
|
@ -201,10 +204,10 @@
|
||||||
"vue-multiselect": "~3.0.0-alpha.2",
|
"vue-multiselect": "~3.0.0-alpha.2",
|
||||||
"vue-prism-editor": "~2.0.0-alpha.2",
|
"vue-prism-editor": "~2.0.0-alpha.2",
|
||||||
"vue-qrcode": "~1.0.0",
|
"vue-qrcode": "~1.0.0",
|
||||||
"vue-router": "~4.0.14",
|
"vue-router": "~4.2.5",
|
||||||
"vue-toastification": "~2.0.0-rc.5",
|
"vue-toastification": "~2.0.0-rc.5",
|
||||||
"vuedraggable": "~4.1.0",
|
"vuedraggable": "~4.1.0",
|
||||||
"wait-on": "^6.0.1",
|
"wait-on": "^7.2.0",
|
||||||
"whatwg-url": "~12.0.1"
|
"whatwg-url": "~12.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
|
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
|
||||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
|
<g transform="matrix(1 0 0 1 320 320)">
|
||||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
|
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
|
||||||
<defs>
|
|
||||||
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
|
|
||||||
<stop stop-color="#5CDD8B"/>
|
<stop stop-color="#5CDD8B"/>
|
||||||
<stop offset="1" stop-color="#86E6A9"/>
|
<stop offset="1" stop-color="#86E6A9"/>
|
||||||
</linearGradient>
|
</linearGradient>
|
||||||
</defs>
|
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
|
||||||
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
|
Before Width: | Height: | Size: 893 B After Width: | Height: | Size: 1.1 KiB |
|
@ -1,88 +0,0 @@
|
||||||
const https = require("https");
|
|
||||||
const http = require("http");
|
|
||||||
const CacheableLookup = require("cacheable-lookup");
|
|
||||||
const { Settings } = require("./settings");
|
|
||||||
const { log } = require("../src/util");
|
|
||||||
|
|
||||||
class CacheableDnsHttpAgent {
|
|
||||||
|
|
||||||
static cacheable = new CacheableLookup();
|
|
||||||
|
|
||||||
static httpAgentList = {};
|
|
||||||
static httpsAgentList = {};
|
|
||||||
|
|
||||||
static enable = false;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Register/Disable cacheable to global agents
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
static async update() {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "update");
|
|
||||||
let isEnable = await Settings.get("dnsCache");
|
|
||||||
|
|
||||||
if (isEnable !== this.enable) {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "value changed");
|
|
||||||
|
|
||||||
if (isEnable) {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "enable");
|
|
||||||
this.cacheable.install(http.globalAgent);
|
|
||||||
this.cacheable.install(https.globalAgent);
|
|
||||||
} else {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "disable");
|
|
||||||
this.cacheable.uninstall(http.globalAgent);
|
|
||||||
this.cacheable.uninstall(https.globalAgent);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.enable = isEnable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attach cacheable to HTTP agent
|
|
||||||
* @param {http.Agent} agent Agent to install
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
static install(agent) {
|
|
||||||
this.cacheable.install(agent);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
|
|
||||||
* @returns {https.Agent} The new HTTPS agent
|
|
||||||
*/
|
|
||||||
static getHttpsAgent(agentOptions) {
|
|
||||||
if (!this.enable) {
|
|
||||||
return new https.Agent(agentOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = JSON.stringify(agentOptions);
|
|
||||||
if (!(key in this.httpsAgentList)) {
|
|
||||||
this.httpsAgentList[key] = new https.Agent(agentOptions);
|
|
||||||
this.cacheable.install(this.httpsAgentList[key]);
|
|
||||||
}
|
|
||||||
return this.httpsAgentList[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
|
|
||||||
* @returns {https.Agents} The new HTTP agent
|
|
||||||
*/
|
|
||||||
static getHttpAgent(agentOptions) {
|
|
||||||
if (!this.enable) {
|
|
||||||
return new http.Agent(agentOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = JSON.stringify(agentOptions);
|
|
||||||
if (!(key in this.httpAgentList)) {
|
|
||||||
this.httpAgentList[key] = new http.Agent(agentOptions);
|
|
||||||
this.cacheable.install(this.httpAgentList[key]);
|
|
||||||
}
|
|
||||||
return this.httpAgentList[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
CacheableDnsHttpAgent,
|
|
||||||
};
|
|
|
@ -185,6 +185,30 @@ async function sendDockerHostList(socket) {
|
||||||
return list;
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send list of docker hosts to client
|
||||||
|
* @param {Socket} socket Socket.io socket instance
|
||||||
|
* @returns {Promise<Bean[]>} List of docker hosts
|
||||||
|
*/
|
||||||
|
async function sendRemoteBrowserList(socket) {
|
||||||
|
const timeLogger = new TimeLogger();
|
||||||
|
|
||||||
|
let result = [];
|
||||||
|
let list = await R.find("remote_browser", " user_id = ? ", [
|
||||||
|
socket.userID,
|
||||||
|
]);
|
||||||
|
|
||||||
|
for (let bean of list) {
|
||||||
|
result.push(bean.toJSON());
|
||||||
|
}
|
||||||
|
|
||||||
|
io.to(socket.userID).emit("remoteBrowserList", result);
|
||||||
|
|
||||||
|
timeLogger.print("Send Remote Browser List");
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
sendNotificationList,
|
sendNotificationList,
|
||||||
sendImportantHeartbeatList,
|
sendImportantHeartbeatList,
|
||||||
|
@ -192,5 +216,6 @@ module.exports = {
|
||||||
sendProxyList,
|
sendProxyList,
|
||||||
sendAPIKeyList,
|
sendAPIKeyList,
|
||||||
sendInfo,
|
sendInfo,
|
||||||
sendDockerHostList
|
sendDockerHostList,
|
||||||
|
sendRemoteBrowserList,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,29 +1,46 @@
|
||||||
|
const isFreeBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
// Interop with browser
|
// Interop with browser
|
||||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||||
const demoMode = args["demo"] || false;
|
|
||||||
|
|
||||||
const badgeConstants = {
|
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||||
naColor: "#999",
|
// Dual-stack support for (::)
|
||||||
defaultUpColor: "#66c20a",
|
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||||
defaultWarnColor: "#eed202",
|
let hostEnv = isFreeBSD ? null : process.env.HOST;
|
||||||
defaultDownColor: "#c2290a",
|
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||||
defaultPendingColor: "#f8a306",
|
|
||||||
defaultMaintenanceColor: "#1747f5",
|
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||||
defaultPingColor: "blue", // as defined by badge-maker / shields.io
|
.map(portValue => parseInt(portValue))
|
||||||
defaultStyle: "flat",
|
.find(portValue => !isNaN(portValue));
|
||||||
defaultPingValueSuffix: "ms",
|
|
||||||
defaultPingLabelSuffix: "h",
|
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||||
defaultUptimeValueSuffix: "%",
|
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||||
defaultUptimeLabelSuffix: "h",
|
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
|
||||||
defaultCertExpValueSuffix: " days",
|
|
||||||
defaultCertExpLabelSuffix: "h",
|
const isSSL = sslKey && sslCert;
|
||||||
// Values Come From Default Notification Times
|
|
||||||
defaultCertExpireWarnDays: "14",
|
/**
|
||||||
defaultCertExpireDownDays: "7"
|
* Get the local WebSocket URL
|
||||||
};
|
* @returns {string} The local WebSocket URL
|
||||||
|
*/
|
||||||
|
function getLocalWebSocketURL() {
|
||||||
|
const protocol = isSSL ? "wss" : "ws";
|
||||||
|
const host = hostname || "localhost";
|
||||||
|
return `${protocol}://${host}:${port}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const localWebSocketURL = getLocalWebSocketURL();
|
||||||
|
|
||||||
|
const demoMode = args["demo"] || false;
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
args,
|
args,
|
||||||
|
hostname,
|
||||||
|
port,
|
||||||
|
sslKey,
|
||||||
|
sslCert,
|
||||||
|
sslKeyPassphrase,
|
||||||
|
isSSL,
|
||||||
|
localWebSocketURL,
|
||||||
demoMode,
|
demoMode,
|
||||||
badgeConstants,
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -12,22 +12,40 @@ const mysql = require("mysql2/promise");
|
||||||
*/
|
*/
|
||||||
class Database {
|
class Database {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Boostrap database for SQLite
|
||||||
|
* @type {string}
|
||||||
|
*/
|
||||||
static templatePath = "./db/kuma.db";
|
static templatePath = "./db/kuma.db";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Data Dir (Default: ./data)
|
* Data Dir (Default: ./data)
|
||||||
|
* @type {string}
|
||||||
*/
|
*/
|
||||||
static dataDir;
|
static dataDir;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* User Upload Dir (Default: ./data/upload)
|
* User Upload Dir (Default: ./data/upload)
|
||||||
|
* @type {string}
|
||||||
*/
|
*/
|
||||||
static uploadDir;
|
static uploadDir;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chrome Screenshot Dir (Default: ./data/screenshots)
|
||||||
|
* @type {string}
|
||||||
|
*/
|
||||||
static screenshotDir;
|
static screenshotDir;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQLite file path (Default: ./data/kuma.db)
|
||||||
|
* @type {string}
|
||||||
|
*/
|
||||||
static sqlitePath;
|
static sqlitePath;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For storing Docker TLS certs (Default: ./data/docker-tls)
|
||||||
|
* @type {string}
|
||||||
|
*/
|
||||||
static dockerTLSDir;
|
static dockerTLSDir;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -84,7 +102,10 @@ class Database {
|
||||||
"patch-add-certificate-expiry-status-page.sql": true,
|
"patch-add-certificate-expiry-status-page.sql": true,
|
||||||
"patch-monitor-oauth-cc.sql": true,
|
"patch-monitor-oauth-cc.sql": true,
|
||||||
"patch-add-timeout-monitor.sql": true,
|
"patch-add-timeout-monitor.sql": true,
|
||||||
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
|
"patch-add-gamedig-given-port.sql": true,
|
||||||
|
"patch-notification-config.sql": true,
|
||||||
|
"patch-fix-kafka-producer-booleans.sql": true,
|
||||||
|
"patch-timeout.sql": true, // The last file so far converted to a knex migration file
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -130,11 +151,14 @@ class Database {
|
||||||
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
|
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
log.info("server", `Data Dir: ${Database.dataDir}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Read the database config
|
||||||
|
* @throws {Error} If the config is invalid
|
||||||
|
* @typedef {string|undefined} envString
|
||||||
|
* @returns {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} Database config
|
||||||
*/
|
*/
|
||||||
static readDBConfig() {
|
static readDBConfig() {
|
||||||
let dbConfig;
|
let dbConfig;
|
||||||
|
@ -153,7 +177,9 @@ class Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param dbConfig
|
* @typedef {string|undefined} envString
|
||||||
|
* @param {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} dbConfig the database configuration that should be written
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
static writeDBConfig(dbConfig) {
|
static writeDBConfig(dbConfig) {
|
||||||
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
|
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
|
||||||
|
@ -161,10 +187,8 @@ class Database {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to the database
|
* Connect to the database
|
||||||
* @param {boolean} testMode Should the connection be
|
* @param {boolean} testMode Should the connection be started in test mode?
|
||||||
* started in test mode?
|
* @param {boolean} autoloadModels Should models be automatically loaded?
|
||||||
* @param {boolean} autoloadModels Should models be
|
|
||||||
* automatically loaded?
|
|
||||||
* @param {boolean} noLog Should logs not be output?
|
* @param {boolean} noLog Should logs not be output?
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
|
@ -239,7 +263,14 @@ class Database {
|
||||||
user: dbConfig.username,
|
user: dbConfig.username,
|
||||||
password: dbConfig.password,
|
password: dbConfig.password,
|
||||||
database: dbConfig.dbName,
|
database: dbConfig.dbName,
|
||||||
timezone: "UTC",
|
timezone: "Z",
|
||||||
|
typeCast: function (field, next) {
|
||||||
|
if (field.type === "DATETIME") {
|
||||||
|
// Do not perform timezone conversion
|
||||||
|
return field.string();
|
||||||
|
}
|
||||||
|
return next();
|
||||||
|
},
|
||||||
},
|
},
|
||||||
pool: mariadbPoolConfig,
|
pool: mariadbPoolConfig,
|
||||||
};
|
};
|
||||||
|
@ -253,6 +284,14 @@ class Database {
|
||||||
socketPath: embeddedMariaDB.socketPath,
|
socketPath: embeddedMariaDB.socketPath,
|
||||||
user: "node",
|
user: "node",
|
||||||
database: "kuma",
|
database: "kuma",
|
||||||
|
timezone: "Z",
|
||||||
|
typeCast: function (field, next) {
|
||||||
|
if (field.type === "DATETIME") {
|
||||||
|
// Do not perform timezone conversion
|
||||||
|
return field.string();
|
||||||
|
}
|
||||||
|
return next();
|
||||||
|
},
|
||||||
},
|
},
|
||||||
pool: mariadbPoolConfig,
|
pool: mariadbPoolConfig,
|
||||||
};
|
};
|
||||||
|
@ -292,8 +331,9 @@ class Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param testMode
|
@param {boolean} testMode Should the connection be started in test mode?
|
||||||
* @param noLog
|
@param {boolean} noLog Should logs not be output?
|
||||||
|
@returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async initSQLite(testMode, noLog) {
|
static async initSQLite(testMode, noLog) {
|
||||||
await R.exec("PRAGMA foreign_keys = ON");
|
await R.exec("PRAGMA foreign_keys = ON");
|
||||||
|
@ -313,15 +353,16 @@ class Database {
|
||||||
await R.exec("PRAGMA synchronous = NORMAL");
|
await R.exec("PRAGMA synchronous = NORMAL");
|
||||||
|
|
||||||
if (!noLog) {
|
if (!noLog) {
|
||||||
log.info("db", "SQLite config:");
|
log.debug("db", "SQLite config:");
|
||||||
log.info("db", await R.getAll("PRAGMA journal_mode"));
|
log.debug("db", await R.getAll("PRAGMA journal_mode"));
|
||||||
log.info("db", await R.getAll("PRAGMA cache_size"));
|
log.debug("db", await R.getAll("PRAGMA cache_size"));
|
||||||
log.info("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
log.debug("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Initialize MariaDB
|
||||||
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async initMariaDB() {
|
static async initMariaDB() {
|
||||||
log.debug("db", "Checking if MariaDB database exists...");
|
log.debug("db", "Checking if MariaDB database exists...");
|
||||||
|
@ -353,10 +394,16 @@ class Database {
|
||||||
directory: Database.knexMigrationsPath,
|
directory: Database.knexMigrationsPath,
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
// Allow missing patch files for downgrade or testing pr.
|
||||||
|
if (e.message.includes("the following files are missing:")) {
|
||||||
|
log.warn("db", e.message);
|
||||||
|
log.warn("db", "Database migration failed, you may be downgrading Uptime Kuma.");
|
||||||
|
} else {
|
||||||
log.error("db", "Database migration failed");
|
log.error("db", "Database migration failed");
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO
|
* TODO
|
||||||
|
@ -368,6 +415,7 @@ class Database {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Patch the database for SQLite
|
* Patch the database for SQLite
|
||||||
|
* @returns {Promise<void>}
|
||||||
* @deprecated
|
* @deprecated
|
||||||
*/
|
*/
|
||||||
static async patchSqlite() {
|
static async patchSqlite() {
|
||||||
|
@ -377,13 +425,15 @@ class Database {
|
||||||
version = 0;
|
version = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (version !== this.latestVersion) {
|
||||||
log.info("db", "Your database version: " + version);
|
log.info("db", "Your database version: " + version);
|
||||||
log.info("db", "Latest database version: " + this.latestVersion);
|
log.info("db", "Latest database version: " + this.latestVersion);
|
||||||
|
}
|
||||||
|
|
||||||
if (version === this.latestVersion) {
|
if (version === this.latestVersion) {
|
||||||
log.info("db", "Database patch not needed");
|
log.debug("db", "Database patch not needed");
|
||||||
} else if (version > this.latestVersion) {
|
} else if (version > this.latestVersion) {
|
||||||
log.info("db", "Warning: Database version is newer than expected");
|
log.warn("db", "Warning: Database version is newer than expected");
|
||||||
} else {
|
} else {
|
||||||
log.info("db", "Database patch is needed");
|
log.info("db", "Database patch is needed");
|
||||||
|
|
||||||
|
@ -419,7 +469,7 @@ class Database {
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patchSqlite2() {
|
static async patchSqlite2() {
|
||||||
log.info("db", "Database Patch 2.0 Process");
|
log.debug("db", "Database Patch 2.0 Process");
|
||||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||||
|
|
||||||
if (! databasePatchedFiles) {
|
if (! databasePatchedFiles) {
|
||||||
|
@ -650,10 +700,10 @@ class Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* @returns {string} Get the SQL for the current time plus a number of hours
|
||||||
*/
|
*/
|
||||||
static sqlHourOffset() {
|
static sqlHourOffset() {
|
||||||
if (this.dbConfig.client === "sqlite3") {
|
if (Database.dbConfig.type === "sqlite") {
|
||||||
return "DATETIME('now', ? || ' hours')";
|
return "DATETIME('now', ? || ' hours')";
|
||||||
} else {
|
} else {
|
||||||
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
|
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
const axios = require("axios");
|
const axios = require("axios");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const version = require("../package.json").version;
|
|
||||||
const https = require("https");
|
const https = require("https");
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const Database = require("./database");
|
const Database = require("./database");
|
||||||
|
const { axiosAbortSignal } = require("./util-server");
|
||||||
|
|
||||||
class DockerHost {
|
class DockerHost {
|
||||||
|
|
||||||
|
@ -70,10 +70,11 @@ class DockerHost {
|
||||||
static async testDockerHost(dockerHost) {
|
static async testDockerHost(dockerHost) {
|
||||||
const options = {
|
const options = {
|
||||||
url: "/containers/json?all=true",
|
url: "/containers/json?all=true",
|
||||||
|
timeout: 5000,
|
||||||
headers: {
|
headers: {
|
||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
"User-Agent": "Uptime-Kuma/" + version
|
|
||||||
},
|
},
|
||||||
|
signal: axiosAbortSignal(6000),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (dockerHost.dockerType === "socket") {
|
if (dockerHost.dockerType === "socket") {
|
||||||
|
@ -83,6 +84,7 @@ class DockerHost {
|
||||||
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
let res = await axios.request(options);
|
let res = await axios.request(options);
|
||||||
|
|
||||||
if (Array.isArray(res.data)) {
|
if (Array.isArray(res.data)) {
|
||||||
|
@ -102,7 +104,13 @@ class DockerHost {
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Invalid Docker response, is it Docker really a daemon?");
|
throw new Error("Invalid Docker response, is it Docker really a daemon?");
|
||||||
}
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (e.code === "ECONNABORTED" || e.name === "CanceledError") {
|
||||||
|
throw new Error("Connection to Docker daemon timed out.");
|
||||||
|
} else {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -18,13 +18,17 @@ class EmbeddedMariaDB {
|
||||||
|
|
||||||
socketPath = this.runDir + "/mysqld.sock";
|
socketPath = this.runDir + "/mysqld.sock";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {ChildProcessWithoutNullStreams}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
childProcess = null;
|
childProcess = null;
|
||||||
running = false;
|
running = false;
|
||||||
|
|
||||||
started = false;
|
started = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @returns {EmbeddedMariaDB}
|
* @returns {EmbeddedMariaDB} The singleton instance
|
||||||
*/
|
*/
|
||||||
static getInstance() {
|
static getInstance() {
|
||||||
if (!EmbeddedMariaDB.instance) {
|
if (!EmbeddedMariaDB.instance) {
|
||||||
|
@ -34,14 +38,15 @@ class EmbeddedMariaDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* @returns {boolean} If the singleton instance is created
|
||||||
*/
|
*/
|
||||||
static hasInstance() {
|
static hasInstance() {
|
||||||
return !!EmbeddedMariaDB.instance;
|
return !!EmbeddedMariaDB.instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Start the embedded MariaDB
|
||||||
|
* @returns {Promise<void>|void} A promise that resolves when the MariaDB is started or void if it is already started
|
||||||
*/
|
*/
|
||||||
start() {
|
start() {
|
||||||
if (this.childProcess) {
|
if (this.childProcess) {
|
||||||
|
@ -103,7 +108,8 @@ class EmbeddedMariaDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Stop all the child processes
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
stop() {
|
stop() {
|
||||||
if (this.childProcess) {
|
if (this.childProcess) {
|
||||||
|
@ -113,7 +119,8 @@ class EmbeddedMariaDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Install MariaDB if it is not installed and make sure the `runDir` directory exists
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
initDB() {
|
initDB() {
|
||||||
if (!fs.existsSync(this.mariadbDataDir)) {
|
if (!fs.existsSync(this.mariadbDataDir)) {
|
||||||
|
@ -146,7 +153,8 @@ class EmbeddedMariaDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Initialise the "kuma" database in mariadb if it does not exist
|
||||||
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async initDBAfterStarted() {
|
async initDBAfterStarted() {
|
||||||
const connection = mysql.createConnection({
|
const connection = mysql.createConnection({
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
const jsesc = require("jsesc");
|
const jsesc = require("jsesc");
|
||||||
|
const { escape } = require("html-escaper");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
||||||
|
@ -7,15 +8,18 @@ const jsesc = require("jsesc");
|
||||||
* @returns {string} HTML script tags to inject into page
|
* @returns {string} HTML script tags to inject into page
|
||||||
*/
|
*/
|
||||||
function getGoogleAnalyticsScript(tagId) {
|
function getGoogleAnalyticsScript(tagId) {
|
||||||
let escapedTagId = jsesc(tagId, { isScriptContext: true });
|
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
|
||||||
|
|
||||||
if (escapedTagId) {
|
if (escapedTagIdJS) {
|
||||||
escapedTagId = escapedTagId.trim();
|
escapedTagIdJS = escapedTagIdJS.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Escape the tag ID for use in an HTML attribute.
|
||||||
|
let escapedTagIdHTMLAttribute = escape(tagId);
|
||||||
|
|
||||||
return `
|
return `
|
||||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
|
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
|
||||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
|
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
|
||||||
`;
|
`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,13 +29,14 @@ class Heartbeat extends BeanModel {
|
||||||
*/
|
*/
|
||||||
toJSON() {
|
toJSON() {
|
||||||
return {
|
return {
|
||||||
monitorID: this.monitor_id,
|
monitorID: this._monitorId,
|
||||||
status: this.status,
|
status: this._status,
|
||||||
time: this.time,
|
time: this._time,
|
||||||
msg: this.msg,
|
msg: this._msg,
|
||||||
ping: this.ping,
|
ping: this._ping,
|
||||||
important: this.important,
|
important: this._important,
|
||||||
duration: this.duration,
|
duration: this._duration,
|
||||||
|
retries: this._retries,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
const https = require("https");
|
|
||||||
const dayjs = require("dayjs");
|
const dayjs = require("dayjs");
|
||||||
const axios = require("axios");
|
const axios = require("axios");
|
||||||
const { Prometheus } = require("../prometheus");
|
const { Prometheus } = require("../prometheus");
|
||||||
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
|
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
|
||||||
SQL_DATETIME_FORMAT
|
SQL_DATETIME_FORMAT
|
||||||
} = require("../../src/util");
|
} = require("../../src/util");
|
||||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
|
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, setSetting, httpNtlm, radius, grpcQuery,
|
||||||
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials,
|
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
|
||||||
} = require("../util-server");
|
} = require("../util-server");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
@ -16,12 +15,18 @@ const { demoMode } = require("../config");
|
||||||
const version = require("../../package.json").version;
|
const version = require("../../package.json").version;
|
||||||
const apicache = require("../modules/apicache");
|
const apicache = require("../modules/apicache");
|
||||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||||
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
|
|
||||||
const { DockerHost } = require("../docker");
|
const { DockerHost } = require("../docker");
|
||||||
const Gamedig = require("gamedig");
|
const Gamedig = require("gamedig");
|
||||||
const jsonata = require("jsonata");
|
const jsonata = require("jsonata");
|
||||||
const jwt = require("jsonwebtoken");
|
const jwt = require("jsonwebtoken");
|
||||||
|
const crypto = require("crypto");
|
||||||
const { UptimeCalculator } = require("../uptime-calculator");
|
const { UptimeCalculator } = require("../uptime-calculator");
|
||||||
|
const { CookieJar } = require("tough-cookie");
|
||||||
|
const { HttpsCookieAgent } = require("http-cookie-agent/http");
|
||||||
|
const https = require("https");
|
||||||
|
const http = require("http");
|
||||||
|
|
||||||
|
const rootCertificates = rootCertificatesFingerprints();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* status:
|
* status:
|
||||||
|
@ -56,7 +61,7 @@ class Monitor extends BeanModel {
|
||||||
obj.tags = await this.getTags();
|
obj.tags = await this.getTags();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (certExpiry && this.type === "http") {
|
if (certExpiry && (this.type === "http" || this.type === "keyword" || this.type === "json-query") && this.getURLProtocol() === "https:") {
|
||||||
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
|
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
|
||||||
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
|
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
|
||||||
obj.validCert = validCert;
|
obj.validCert = validCert;
|
||||||
|
@ -130,6 +135,7 @@ class Monitor extends BeanModel {
|
||||||
maintenance: await Monitor.isUnderMaintenance(this.id),
|
maintenance: await Monitor.isUnderMaintenance(this.id),
|
||||||
mqttTopic: this.mqttTopic,
|
mqttTopic: this.mqttTopic,
|
||||||
mqttSuccessMessage: this.mqttSuccessMessage,
|
mqttSuccessMessage: this.mqttSuccessMessage,
|
||||||
|
mqttCheckType: this.mqttCheckType,
|
||||||
databaseQuery: this.databaseQuery,
|
databaseQuery: this.databaseQuery,
|
||||||
authMethod: this.authMethod,
|
authMethod: this.authMethod,
|
||||||
grpcUrl: this.grpcUrl,
|
grpcUrl: this.grpcUrl,
|
||||||
|
@ -146,10 +152,11 @@ class Monitor extends BeanModel {
|
||||||
expectedValue: this.expectedValue,
|
expectedValue: this.expectedValue,
|
||||||
kafkaProducerTopic: this.kafkaProducerTopic,
|
kafkaProducerTopic: this.kafkaProducerTopic,
|
||||||
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
|
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
|
||||||
kafkaProducerSsl: this.kafkaProducerSsl === "1" && true || false,
|
kafkaProducerSsl: this.getKafkaProducerSsl(),
|
||||||
kafkaProducerAllowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation === "1" && true || false,
|
kafkaProducerAllowAutoTopicCreation: this.getKafkaProducerAllowAutoTopicCreation(),
|
||||||
kafkaProducerMessage: this.kafkaProducerMessage,
|
kafkaProducerMessage: this.kafkaProducerMessage,
|
||||||
screenshot,
|
screenshot,
|
||||||
|
remote_browser: this.remote_browser,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (includeSensitiveData) {
|
if (includeSensitiveData) {
|
||||||
|
@ -291,12 +298,29 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Get if game dig should only use the port which was provided
|
||||||
|
* @returns {boolean} gamedig should only use the provided port
|
||||||
*/
|
*/
|
||||||
getGameDigGivenPortOnly() {
|
getGameDigGivenPortOnly() {
|
||||||
return Boolean(this.gamedigGivenPortOnly);
|
return Boolean(this.gamedigGivenPortOnly);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse to boolean
|
||||||
|
* @returns {boolean} Kafka Producer Ssl enabled?
|
||||||
|
*/
|
||||||
|
getKafkaProducerSsl() {
|
||||||
|
return Boolean(this.kafkaProducerSsl);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse to boolean
|
||||||
|
* @returns {boolean} Kafka Producer Allow Auto Topic Creation Enabled?
|
||||||
|
*/
|
||||||
|
getKafkaProducerAllowAutoTopicCreation() {
|
||||||
|
return Boolean(this.kafkaProducerAllowAutoTopicCreation);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start monitor
|
* Start monitor
|
||||||
* @param {Server} io Socket server instance
|
* @param {Server} io Socket server instance
|
||||||
|
@ -331,6 +355,9 @@ class Monitor extends BeanModel {
|
||||||
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
||||||
this.id,
|
this.id,
|
||||||
]);
|
]);
|
||||||
|
if (previousBeat) {
|
||||||
|
retries = previousBeat.retries;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isFirstBeat = !previousBeat;
|
const isFirstBeat = !previousBeat;
|
||||||
|
@ -345,6 +372,12 @@ class Monitor extends BeanModel {
|
||||||
bean.status = flipStatus(bean.status);
|
bean.status = flipStatus(bean.status);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Runtime patch timeout if it is 0
|
||||||
|
// See https://github.com/louislam/uptime-kuma/pull/3961#issuecomment-1804149144
|
||||||
|
if (!this.timeout || this.timeout <= 0) {
|
||||||
|
this.timeout = this.interval * 1000 * 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (await Monitor.isUnderMaintenance(this.id)) {
|
if (await Monitor.isUnderMaintenance(this.id)) {
|
||||||
bean.msg = "Monitor under maintenance";
|
bean.msg = "Monitor under maintenance";
|
||||||
|
@ -400,9 +433,7 @@ class Monitor extends BeanModel {
|
||||||
if (this.auth_method === "oauth2-cc") {
|
if (this.auth_method === "oauth2-cc") {
|
||||||
try {
|
try {
|
||||||
if (this.oauthAccessToken === undefined || new Date(this.oauthAccessToken.expires_at * 1000) <= new Date()) {
|
if (this.oauthAccessToken === undefined || new Date(this.oauthAccessToken.expires_at * 1000) <= new Date()) {
|
||||||
log.debug("monitor", `[${this.name}] The oauth access-token undefined or expired. Requesting a new one`);
|
this.oauthAccessToken = await this.makeOidcTokenClientCredentialsRequest();
|
||||||
this.oauthAccessToken = await getOidcTokenClientCredentials(this.oauth_token_url, this.oauth_client_id, this.oauth_client_secret, this.oauth_scopes, this.oauth_auth_method);
|
|
||||||
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Expires at ${new Date(this.oauthAccessToken.expires_at * 1000)}`);
|
|
||||||
}
|
}
|
||||||
oauth2AuthHeader = {
|
oauth2AuthHeader = {
|
||||||
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
||||||
|
@ -415,6 +446,7 @@ class Monitor extends BeanModel {
|
||||||
const httpsAgentOptions = {
|
const httpsAgentOptions = {
|
||||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||||
rejectUnauthorized: !this.getIgnoreTls(),
|
rejectUnauthorized: !this.getIgnoreTls(),
|
||||||
|
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||||
};
|
};
|
||||||
|
|
||||||
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
|
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
|
||||||
|
@ -430,6 +462,9 @@ class Monitor extends BeanModel {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error("Your JSON body is invalid. " + e.message);
|
throw new Error("Your JSON body is invalid. " + e.message);
|
||||||
}
|
}
|
||||||
|
} else if (this.httpBodyEncoding === "form") {
|
||||||
|
bodyValue = this.body;
|
||||||
|
contentType = "application/x-www-form-urlencoded";
|
||||||
} else if (this.httpBodyEncoding === "xml") {
|
} else if (this.httpBodyEncoding === "xml") {
|
||||||
bodyValue = this.body;
|
bodyValue = this.body;
|
||||||
contentType = "text/xml; charset=utf-8";
|
contentType = "text/xml; charset=utf-8";
|
||||||
|
@ -443,7 +478,6 @@ class Monitor extends BeanModel {
|
||||||
timeout: this.timeout * 1000,
|
timeout: this.timeout * 1000,
|
||||||
headers: {
|
headers: {
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
|
||||||
"User-Agent": "Uptime-Kuma/" + version,
|
|
||||||
...(contentType ? { "Content-Type": contentType } : {}),
|
...(contentType ? { "Content-Type": contentType } : {}),
|
||||||
...(basicAuthHeader),
|
...(basicAuthHeader),
|
||||||
...(oauth2AuthHeader),
|
...(oauth2AuthHeader),
|
||||||
|
@ -453,6 +487,7 @@ class Monitor extends BeanModel {
|
||||||
validateStatus: (status) => {
|
validateStatus: (status) => {
|
||||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||||
},
|
},
|
||||||
|
signal: axiosAbortSignal((this.timeout + 10) * 1000),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (bodyValue) {
|
if (bodyValue) {
|
||||||
|
@ -474,7 +509,12 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!options.httpsAgent) {
|
if (!options.httpsAgent) {
|
||||||
options.httpsAgent = new https.Agent(httpsAgentOptions);
|
let jar = new CookieJar();
|
||||||
|
let httpsCookieAgentOptions = {
|
||||||
|
...httpsAgentOptions,
|
||||||
|
cookies: { jar }
|
||||||
|
};
|
||||||
|
options.httpsAgent = new HttpsCookieAgent(httpsCookieAgentOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.auth_method === "mtls") {
|
if (this.auth_method === "mtls") {
|
||||||
|
@ -580,46 +620,6 @@ class Monitor extends BeanModel {
|
||||||
bean.ping = await ping(this.hostname, this.packetSize);
|
bean.ping = await ping(this.hostname, this.packetSize);
|
||||||
bean.msg = "";
|
bean.msg = "";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
} else if (this.type === "dns") {
|
|
||||||
let startTime = dayjs().valueOf();
|
|
||||||
let dnsMessage = "";
|
|
||||||
|
|
||||||
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.port, this.dns_resolve_type);
|
|
||||||
bean.ping = dayjs().valueOf() - startTime;
|
|
||||||
|
|
||||||
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT" || this.dns_resolve_type === "PTR") {
|
|
||||||
dnsMessage += "Records: ";
|
|
||||||
dnsMessage += dnsRes.join(" | ");
|
|
||||||
} else if (this.dns_resolve_type === "CNAME") {
|
|
||||||
dnsMessage += dnsRes[0];
|
|
||||||
} else if (this.dns_resolve_type === "CAA") {
|
|
||||||
dnsMessage += dnsRes[0].issue;
|
|
||||||
} else if (this.dns_resolve_type === "MX") {
|
|
||||||
dnsRes.forEach(record => {
|
|
||||||
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
|
||||||
});
|
|
||||||
dnsMessage = dnsMessage.slice(0, -2);
|
|
||||||
} else if (this.dns_resolve_type === "NS") {
|
|
||||||
dnsMessage += "Servers: ";
|
|
||||||
dnsMessage += dnsRes.join(" | ");
|
|
||||||
} else if (this.dns_resolve_type === "SOA") {
|
|
||||||
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
|
|
||||||
} else if (this.dns_resolve_type === "SRV") {
|
|
||||||
dnsRes.forEach(record => {
|
|
||||||
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
|
|
||||||
});
|
|
||||||
dnsMessage = dnsMessage.slice(0, -2);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.dnsLastResult !== dnsMessage && dnsMessage !== undefined) {
|
|
||||||
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
|
|
||||||
dnsMessage,
|
|
||||||
this.id
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
bean.msg = dnsMessage;
|
|
||||||
bean.status = UP;
|
|
||||||
} else if (this.type === "push") { // Type: Push
|
} else if (this.type === "push") { // Type: Push
|
||||||
log.debug("monitor", `[${this.name}] Checking monitor at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
|
log.debug("monitor", `[${this.name}] Checking monitor at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
|
||||||
const bufferTime = 1000; // 1s buffer to accommodate clock differences
|
const bufferTime = 1000; // 1s buffer to accommodate clock differences
|
||||||
|
@ -632,6 +632,7 @@ class Monitor extends BeanModel {
|
||||||
// If the previous beat was down or pending we use the regular
|
// If the previous beat was down or pending we use the regular
|
||||||
// beatInterval/retryInterval in the setTimeout further below
|
// beatInterval/retryInterval in the setTimeout further below
|
||||||
if (previousBeat.status !== (this.isUpsideDown() ? DOWN : UP) || msSinceLastBeat > beatInterval * 1000 + bufferTime) {
|
if (previousBeat.status !== (this.isUpsideDown() ? DOWN : UP) || msSinceLastBeat > beatInterval * 1000 + bufferTime) {
|
||||||
|
bean.duration = Math.round(msSinceLastBeat / 1000);
|
||||||
throw new Error("No heartbeat in the time window");
|
throw new Error("No heartbeat in the time window");
|
||||||
} else {
|
} else {
|
||||||
let timeout = beatInterval * 1000 - msSinceLastBeat;
|
let timeout = beatInterval * 1000 - msSinceLastBeat;
|
||||||
|
@ -647,6 +648,7 @@ class Monitor extends BeanModel {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
bean.duration = beatInterval;
|
||||||
throw new Error("No heartbeat in the time window");
|
throw new Error("No heartbeat in the time window");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -663,13 +665,13 @@ class Monitor extends BeanModel {
|
||||||
timeout: this.timeout * 1000,
|
timeout: this.timeout * 1000,
|
||||||
headers: {
|
headers: {
|
||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
"User-Agent": "Uptime-Kuma/" + version,
|
|
||||||
},
|
},
|
||||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
httpsAgent: new https.Agent({
|
||||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||||
rejectUnauthorized: !this.getIgnoreTls(),
|
rejectUnauthorized: !this.getIgnoreTls(),
|
||||||
|
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||||
}),
|
}),
|
||||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
httpAgent: new http.Agent({
|
||||||
maxCachedSessions: 0,
|
maxCachedSessions: 0,
|
||||||
}),
|
}),
|
||||||
maxRedirects: this.maxredirects,
|
maxRedirects: this.maxredirects,
|
||||||
|
@ -710,29 +712,33 @@ class Monitor extends BeanModel {
|
||||||
} else if (this.type === "docker") {
|
} else if (this.type === "docker") {
|
||||||
log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
|
log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
|
||||||
|
|
||||||
const dockerHost = await R.load("docker_host", this.docker_host);
|
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
url: `/containers/${this.docker_container}/json`,
|
url: `/containers/${this.docker_container}/json`,
|
||||||
timeout: this.interval * 1000 * 0.8,
|
timeout: this.interval * 1000 * 0.8,
|
||||||
headers: {
|
headers: {
|
||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
"User-Agent": "Uptime-Kuma/" + version,
|
|
||||||
},
|
},
|
||||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
httpsAgent: new https.Agent({
|
||||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||||
rejectUnauthorized: !this.getIgnoreTls(),
|
rejectUnauthorized: !this.getIgnoreTls(),
|
||||||
|
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||||
}),
|
}),
|
||||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
httpAgent: new http.Agent({
|
||||||
maxCachedSessions: 0,
|
maxCachedSessions: 0,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const dockerHost = await R.load("docker_host", this.docker_host);
|
||||||
|
|
||||||
|
if (!dockerHost) {
|
||||||
|
throw new Error("Failed to load docker host config");
|
||||||
|
}
|
||||||
|
|
||||||
if (dockerHost._dockerType === "socket") {
|
if (dockerHost._dockerType === "socket") {
|
||||||
options.socketPath = dockerHost._dockerDaemon;
|
options.socketPath = dockerHost._dockerDaemon;
|
||||||
} else if (dockerHost._dockerType === "tcp") {
|
} else if (dockerHost._dockerType === "tcp") {
|
||||||
options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
|
options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
|
||||||
options.httpsAgent = CacheableDnsHttpAgent.getHttpsAgent(
|
options.httpsAgent = new https.Agent(
|
||||||
DockerHost.getHttpsAgentOptions(dockerHost._dockerType, options.baseURL)
|
DockerHost.getHttpsAgentOptions(dockerHost._dockerType, options.baseURL)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -751,18 +757,10 @@ class Monitor extends BeanModel {
|
||||||
} else {
|
} else {
|
||||||
throw Error("Container State is " + res.data.State.Status);
|
throw Error("Container State is " + res.data.State.Status);
|
||||||
}
|
}
|
||||||
} else if (this.type === "mqtt") {
|
|
||||||
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
|
|
||||||
port: this.port,
|
|
||||||
username: this.mqttUsername,
|
|
||||||
password: this.mqttPassword,
|
|
||||||
interval: this.interval,
|
|
||||||
});
|
|
||||||
bean.status = UP;
|
|
||||||
} else if (this.type === "sqlserver") {
|
} else if (this.type === "sqlserver") {
|
||||||
let startTime = dayjs().valueOf();
|
let startTime = dayjs().valueOf();
|
||||||
|
|
||||||
await mssqlQuery(this.databaseConnectionString, this.databaseQuery);
|
await mssqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||||
|
|
||||||
bean.msg = "";
|
bean.msg = "";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
|
@ -801,7 +799,7 @@ class Monitor extends BeanModel {
|
||||||
} else if (this.type === "postgres") {
|
} else if (this.type === "postgres") {
|
||||||
let startTime = dayjs().valueOf();
|
let startTime = dayjs().valueOf();
|
||||||
|
|
||||||
await postgresQuery(this.databaseConnectionString, this.databaseQuery);
|
await postgresQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||||
|
|
||||||
bean.msg = "";
|
bean.msg = "";
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
|
@ -809,7 +807,11 @@ class Monitor extends BeanModel {
|
||||||
} else if (this.type === "mysql") {
|
} else if (this.type === "mysql") {
|
||||||
let startTime = dayjs().valueOf();
|
let startTime = dayjs().valueOf();
|
||||||
|
|
||||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery);
|
// Use `radius_password` as `password` field, since there are too many unnecessary fields
|
||||||
|
// TODO: rename `radius_password` to `password` later for general use
|
||||||
|
let mysqlPassword = this.radiusPassword;
|
||||||
|
|
||||||
|
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1", mysqlPassword);
|
||||||
bean.status = UP;
|
bean.status = UP;
|
||||||
bean.ping = dayjs().valueOf() - startTime;
|
bean.ping = dayjs().valueOf() - startTime;
|
||||||
} else if (this.type === "mongodb") {
|
} else if (this.type === "mongodb") {
|
||||||
|
@ -897,7 +899,11 @@ class Monitor extends BeanModel {
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
||||||
|
if (error?.name === "CanceledError") {
|
||||||
|
bean.msg = `timeout by AbortSignal (${this.timeout}s)`;
|
||||||
|
} else {
|
||||||
bean.msg = error.message;
|
bean.msg = error.message;
|
||||||
|
}
|
||||||
|
|
||||||
// If UP come in here, it must be upside down mode
|
// If UP come in here, it must be upside down mode
|
||||||
// Just reset the retries
|
// Just reset the retries
|
||||||
|
@ -907,9 +913,14 @@ class Monitor extends BeanModel {
|
||||||
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
|
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
|
||||||
retries++;
|
retries++;
|
||||||
bean.status = PENDING;
|
bean.status = PENDING;
|
||||||
|
} else {
|
||||||
|
// Continue counting retries during DOWN
|
||||||
|
retries++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bean.retries = retries;
|
||||||
|
|
||||||
log.debug("monitor", `[${this.name}] Check isImportant`);
|
log.debug("monitor", `[${this.name}] Check isImportant`);
|
||||||
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
|
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
|
||||||
|
|
||||||
|
@ -984,7 +995,15 @@ class Monitor extends BeanModel {
|
||||||
|
|
||||||
if (! this.isStop) {
|
if (! this.isStop) {
|
||||||
log.debug("monitor", `[${this.name}] SetTimeout for next check.`);
|
log.debug("monitor", `[${this.name}] SetTimeout for next check.`);
|
||||||
this.heartbeatInterval = setTimeout(safeBeat, beatInterval * 1000);
|
|
||||||
|
let intervalRemainingMs = Math.max(
|
||||||
|
1,
|
||||||
|
beatInterval * 1000 - dayjs().diff(dayjs.utc(bean.time))
|
||||||
|
);
|
||||||
|
|
||||||
|
log.debug("monitor", `[${this.name}] Next heartbeat in: ${intervalRemainingMs}ms`);
|
||||||
|
|
||||||
|
this.heartbeatInterval = setTimeout(safeBeat, intervalRemainingMs);
|
||||||
} else {
|
} else {
|
||||||
log.info("monitor", `[${this.name}] isStop = true, no next check.`);
|
log.info("monitor", `[${this.name}] isStop = true, no next check.`);
|
||||||
}
|
}
|
||||||
|
@ -1044,18 +1063,35 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
} catch (e) {
|
} catch (error) {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make a single attempt to obtain an new access token in the event that
|
||||||
|
* the recent api request failed for authentication purposes
|
||||||
|
*/
|
||||||
|
if (this.auth_method === "oauth2-cc" && error.response.status === 401 && !finalCall) {
|
||||||
|
this.oauthAccessToken = await this.makeOidcTokenClientCredentialsRequest();
|
||||||
|
let oauth2AuthHeader = {
|
||||||
|
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
||||||
|
};
|
||||||
|
options.headers = { ...(options.headers),
|
||||||
|
...(oauth2AuthHeader)
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.makeAxiosRequest(options, true);
|
||||||
|
}
|
||||||
|
|
||||||
// Fix #2253
|
// Fix #2253
|
||||||
// Read more: https://stackoverflow.com/questions/1759956/curl-error-18-transfer-closed-with-outstanding-read-data-remaining
|
// Read more: https://stackoverflow.com/questions/1759956/curl-error-18-transfer-closed-with-outstanding-read-data-remaining
|
||||||
if (!finalCall && typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
|
if (!finalCall && typeof error.message === "string" && error.message.includes("maxContentLength size of -1 exceeded")) {
|
||||||
log.debug("monitor", "makeAxiosRequest with gzip");
|
log.debug("monitor", "makeAxiosRequest with gzip");
|
||||||
options.headers["Accept-Encoding"] = "gzip, deflate";
|
options.headers["Accept-Encoding"] = "gzip, deflate";
|
||||||
return this.makeAxiosRequest(options, true);
|
return this.makeAxiosRequest(options, true);
|
||||||
} else {
|
} else {
|
||||||
if (typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
|
if (typeof error.message === "string" && error.message.includes("maxContentLength size of -1 exceeded")) {
|
||||||
e.message = "response timeout: incomplete response within a interval";
|
error.message = "response timeout: incomplete response within a interval";
|
||||||
}
|
}
|
||||||
throw e;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1093,6 +1129,19 @@ class Monitor extends BeanModel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Example: http: or https:
|
||||||
|
* @returns {(null|string)} URL's protocol
|
||||||
|
*/
|
||||||
|
getURLProtocol() {
|
||||||
|
const url = this.getUrl();
|
||||||
|
if (url) {
|
||||||
|
return this.getUrl().protocol;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store TLS info to database
|
* Store TLS info to database
|
||||||
* @param {object} checkCertificateResult Certificate to update
|
* @param {object} checkCertificateResult Certificate to update
|
||||||
|
@ -1152,7 +1201,7 @@ class Monitor extends BeanModel {
|
||||||
if (hasClients) {
|
if (hasClients) {
|
||||||
// Send 24 hour average ping
|
// Send 24 hour average ping
|
||||||
let data24h = await uptimeCalculator.get24Hour();
|
let data24h = await uptimeCalculator.get24Hour();
|
||||||
io.to(userID).emit("avgPing", monitorID, (data24h.avgPing) ? data24h.avgPing.toFixed(2) : null);
|
io.to(userID).emit("avgPing", monitorID, (data24h.avgPing) ? Number(data24h.avgPing.toFixed(2)) : null);
|
||||||
|
|
||||||
// Send 24 hour uptime
|
// Send 24 hour uptime
|
||||||
io.to(userID).emit("uptime", monitorID, 24, data24h.uptime);
|
io.to(userID).emit("uptime", monitorID, 24, data24h.uptime);
|
||||||
|
@ -1333,7 +1382,10 @@ class Monitor extends BeanModel {
|
||||||
let certInfo = tlsInfoObject.certInfo;
|
let certInfo = tlsInfoObject.certInfo;
|
||||||
while (certInfo) {
|
while (certInfo) {
|
||||||
let subjectCN = certInfo.subject["CN"];
|
let subjectCN = certInfo.subject["CN"];
|
||||||
if (certInfo.daysRemaining > targetDays) {
|
if (rootCertificates.has(certInfo.fingerprint256)) {
|
||||||
|
log.debug("monitor", `Known root cert: ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||||
|
break;
|
||||||
|
} else if (certInfo.daysRemaining > targetDays) {
|
||||||
log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||||
} else {
|
} else {
|
||||||
log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`);
|
log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`);
|
||||||
|
@ -1399,10 +1451,7 @@ class Monitor extends BeanModel {
|
||||||
* @returns {Promise<LooseObject<any>>} Previous heartbeat
|
* @returns {Promise<LooseObject<any>>} Previous heartbeat
|
||||||
*/
|
*/
|
||||||
static async getPreviousHeartbeat(monitorID) {
|
static async getPreviousHeartbeat(monitorID) {
|
||||||
return await R.getRow(`
|
return await R.findOne("heartbeat", " id = (select MAX(id) from heartbeat where monitor_id = ?)", [
|
||||||
SELECT ping, status, time FROM heartbeat
|
|
||||||
WHERE id = (select MAX(id) from heartbeat where monitor_id = ?)
|
|
||||||
`, [
|
|
||||||
monitorID
|
monitorID
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -1545,6 +1594,23 @@ class Monitor extends BeanModel {
|
||||||
const parentActive = await Monitor.isParentActive(parent.id);
|
const parentActive = await Monitor.isParentActive(parent.id);
|
||||||
return parent.active && parentActive;
|
return parent.active && parentActive;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtains a new Oidc Token
|
||||||
|
* @returns {Promise<object>} OAuthProvider client
|
||||||
|
*/
|
||||||
|
async makeOidcTokenClientCredentialsRequest() {
|
||||||
|
log.debug("monitor", `[${this.name}] The oauth access-token undefined or expired. Requesting a new token`);
|
||||||
|
const oAuthAccessToken = await getOidcTokenClientCredentials(this.oauth_token_url, this.oauth_client_id, this.oauth_client_secret, this.oauth_scopes, this.oauth_auth_method);
|
||||||
|
if (this.oauthAccessToken?.expires_at) {
|
||||||
|
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Expires at ${new Date(this.oauthAccessToken?.expires_at * 1000)}`);
|
||||||
|
} else {
|
||||||
|
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Time until expiry was not provided`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return oAuthAccessToken;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Monitor;
|
module.exports = Monitor;
|
||||||
|
|
17
server/model/remote_browser.js
Normal file
17
server/model/remote_browser.js
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
|
||||||
|
class RemoteBrowser extends BeanModel {
|
||||||
|
/**
|
||||||
|
* Returns an object that ready to parse to JSON
|
||||||
|
* @returns {object} Object ready to parse
|
||||||
|
*/
|
||||||
|
toJSON() {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
url: this.url,
|
||||||
|
name: this.name,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = RemoteBrowser;
|
|
@ -21,6 +21,12 @@ class StatusPage extends BeanModel {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
static async handleStatusPageResponse(response, indexHTML, slug) {
|
static async handleStatusPageResponse(response, indexHTML, slug) {
|
||||||
|
// Handle url with trailing slash (http://localhost:3001/status/)
|
||||||
|
// The slug comes from the route "/status/:slug". If the slug is empty, express converts it to "index.html"
|
||||||
|
if (slug === "index.html") {
|
||||||
|
slug = "default";
|
||||||
|
}
|
||||||
|
|
||||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||||
slug
|
slug
|
||||||
]);
|
]);
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
const passwordHash = require("../password-hash");
|
const passwordHash = require("../password-hash");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
|
const jwt = require("jsonwebtoken");
|
||||||
|
const { shake256, SHAKE256_LENGTH } = require("../util-server");
|
||||||
|
|
||||||
class User extends BeanModel {
|
class User extends BeanModel {
|
||||||
/**
|
/**
|
||||||
|
@ -23,8 +25,27 @@ class User extends BeanModel {
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async resetPassword(newPassword) {
|
async resetPassword(newPassword) {
|
||||||
await User.resetPassword(this.id, newPassword);
|
const hashedPassword = passwordHash.generate(newPassword);
|
||||||
this.password = newPassword;
|
|
||||||
|
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||||
|
hashedPassword,
|
||||||
|
this.id
|
||||||
|
]);
|
||||||
|
|
||||||
|
this.password = hashedPassword;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new JWT for a user
|
||||||
|
* @param {User} user The User to create a JsonWebToken for
|
||||||
|
* @param {string} jwtSecret The key used to sign the JsonWebToken
|
||||||
|
* @returns {string} the JsonWebToken as a string
|
||||||
|
*/
|
||||||
|
static createJWT(user, jwtSecret) {
|
||||||
|
return jwt.sign({
|
||||||
|
username: user.username,
|
||||||
|
h: shake256(user.password, SHAKE256_LENGTH),
|
||||||
|
}, jwtSecret);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
56
server/monitor-types/dns.js
Normal file
56
server/monitor-types/dns.js
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
const { MonitorType } = require("./monitor-type");
|
||||||
|
const { UP } = require("../../src/util");
|
||||||
|
const dayjs = require("dayjs");
|
||||||
|
const { dnsResolve } = require("../util-server");
|
||||||
|
const { R } = require("redbean-node");
|
||||||
|
|
||||||
|
class DnsMonitorType extends MonitorType {
|
||||||
|
|
||||||
|
name = "dns";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async check(monitor, heartbeat, _server) {
|
||||||
|
let startTime = dayjs().valueOf();
|
||||||
|
let dnsMessage = "";
|
||||||
|
|
||||||
|
let dnsRes = await dnsResolve(monitor.hostname, monitor.dns_resolve_server, monitor.port, monitor.dns_resolve_type);
|
||||||
|
heartbeat.ping = dayjs().valueOf() - startTime;
|
||||||
|
|
||||||
|
if (monitor.dns_resolve_type === "A" || monitor.dns_resolve_type === "AAAA" || monitor.dns_resolve_type === "TXT" || monitor.dns_resolve_type === "PTR") {
|
||||||
|
dnsMessage += "Records: ";
|
||||||
|
dnsMessage += dnsRes.join(" | ");
|
||||||
|
} else if (monitor.dns_resolve_type === "CNAME" || monitor.dns_resolve_type === "PTR") {
|
||||||
|
dnsMessage += dnsRes[0];
|
||||||
|
} else if (monitor.dns_resolve_type === "CAA") {
|
||||||
|
dnsMessage += dnsRes[0].issue;
|
||||||
|
} else if (monitor.dns_resolve_type === "MX") {
|
||||||
|
dnsRes.forEach(record => {
|
||||||
|
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
||||||
|
});
|
||||||
|
dnsMessage = dnsMessage.slice(0, -2);
|
||||||
|
} else if (monitor.dns_resolve_type === "NS") {
|
||||||
|
dnsMessage += "Servers: ";
|
||||||
|
dnsMessage += dnsRes.join(" | ");
|
||||||
|
} else if (monitor.dns_resolve_type === "SOA") {
|
||||||
|
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
|
||||||
|
} else if (monitor.dns_resolve_type === "SRV") {
|
||||||
|
dnsRes.forEach(record => {
|
||||||
|
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
|
||||||
|
});
|
||||||
|
dnsMessage = dnsMessage.slice(0, -2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitor.dns_last_result !== dnsMessage && dnsMessage !== undefined) {
|
||||||
|
await R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [ dnsMessage, monitor.id ]);
|
||||||
|
}
|
||||||
|
|
||||||
|
heartbeat.msg = dnsMessage;
|
||||||
|
heartbeat.status = UP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
DnsMonitorType,
|
||||||
|
};
|
121
server/monitor-types/mqtt.js
Normal file
121
server/monitor-types/mqtt.js
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
const { MonitorType } = require("./monitor-type");
|
||||||
|
const { log, UP } = require("../../src/util");
|
||||||
|
const mqtt = require("mqtt");
|
||||||
|
const jsonata = require("jsonata");
|
||||||
|
|
||||||
|
class MqttMonitorType extends MonitorType {
|
||||||
|
|
||||||
|
name = "mqtt";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run the monitoring check on the MQTT monitor
|
||||||
|
* @param {Monitor} monitor Monitor to check
|
||||||
|
* @param {Heartbeat} heartbeat Monitor heartbeat to update
|
||||||
|
* @param {UptimeKumaServer} server Uptime Kuma server
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
async check(monitor, heartbeat, server) {
|
||||||
|
const receivedMessage = await this.mqttAsync(monitor.hostname, monitor.mqttTopic, {
|
||||||
|
port: monitor.port,
|
||||||
|
username: monitor.mqttUsername,
|
||||||
|
password: monitor.mqttPassword,
|
||||||
|
interval: monitor.interval,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (monitor.mqttCheckType == null || monitor.mqttCheckType === "") {
|
||||||
|
// use old default
|
||||||
|
monitor.mqttCheckType = "keyword";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitor.mqttCheckType === "keyword") {
|
||||||
|
if (receivedMessage != null && receivedMessage.includes(monitor.mqttSuccessMessage)) {
|
||||||
|
heartbeat.msg = `Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`;
|
||||||
|
heartbeat.status = UP;
|
||||||
|
} else {
|
||||||
|
throw Error(`Message Mismatch - Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`);
|
||||||
|
}
|
||||||
|
} else if (monitor.mqttCheckType === "json-query") {
|
||||||
|
const parsedMessage = JSON.parse(receivedMessage);
|
||||||
|
|
||||||
|
let expression = jsonata(monitor.jsonPath);
|
||||||
|
|
||||||
|
let result = await expression.evaluate(parsedMessage);
|
||||||
|
|
||||||
|
if (result?.toString() === monitor.expectedValue) {
|
||||||
|
heartbeat.msg = "Message received, expected value is found";
|
||||||
|
heartbeat.status = UP;
|
||||||
|
} else {
|
||||||
|
throw new Error("Message received but value is not equal to expected value, value was: [" + result + "]");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw Error("Unknown MQTT Check Type");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to MQTT Broker, subscribe to topic and receive message as String
|
||||||
|
* @param {string} hostname Hostname / address of machine to test
|
||||||
|
* @param {string} topic MQTT topic
|
||||||
|
* @param {object} options MQTT options. Contains port, username,
|
||||||
|
* password and interval (interval defaults to 20)
|
||||||
|
* @returns {Promise<string>} Received MQTT message
|
||||||
|
*/
|
||||||
|
mqttAsync(hostname, topic, options = {}) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const { port, username, password, interval = 20 } = options;
|
||||||
|
|
||||||
|
// Adds MQTT protocol to the hostname if not already present
|
||||||
|
if (!/^(?:http|mqtt|ws)s?:\/\//.test(hostname)) {
|
||||||
|
hostname = "mqtt://" + hostname;
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeoutID = setTimeout(() => {
|
||||||
|
log.debug("mqtt", "MQTT timeout triggered");
|
||||||
|
client.end();
|
||||||
|
reject(new Error("Timeout, Message not received"));
|
||||||
|
}, interval * 1000 * 0.8);
|
||||||
|
|
||||||
|
const mqttUrl = `${hostname}:${port}`;
|
||||||
|
|
||||||
|
log.debug("mqtt", `MQTT connecting to ${mqttUrl}`);
|
||||||
|
|
||||||
|
let client = mqtt.connect(mqttUrl, {
|
||||||
|
username,
|
||||||
|
password
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("connect", () => {
|
||||||
|
log.debug("mqtt", "MQTT connected");
|
||||||
|
|
||||||
|
try {
|
||||||
|
client.subscribe(topic, () => {
|
||||||
|
log.debug("mqtt", "MQTT subscribed to topic");
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
reject(new Error("Cannot subscribe topic"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("error", (error) => {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("message", (messageTopic, message) => {
|
||||||
|
if (messageTopic === topic) {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
resolve(message.toString("utf8"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
MqttMonitorType,
|
||||||
|
};
|
|
@ -8,6 +8,7 @@ const path = require("path");
|
||||||
const Database = require("../database");
|
const Database = require("../database");
|
||||||
const jwt = require("jsonwebtoken");
|
const jwt = require("jsonwebtoken");
|
||||||
const config = require("../config");
|
const config = require("../config");
|
||||||
|
const { RemoteBrowser } = require("../remote-browser");
|
||||||
|
|
||||||
let browser = null;
|
let browser = null;
|
||||||
|
|
||||||
|
@ -24,6 +25,9 @@ if (process.platform === "win32") {
|
||||||
allowedList.push(process.env.PROGRAMFILES + "\\Chromium\\Application\\chrome.exe");
|
allowedList.push(process.env.PROGRAMFILES + "\\Chromium\\Application\\chrome.exe");
|
||||||
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Chromium\\Application\\chrome.exe");
|
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Chromium\\Application\\chrome.exe");
|
||||||
|
|
||||||
|
// Allow MS Edge
|
||||||
|
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Microsoft\\Edge\\Application\\msedge.exe");
|
||||||
|
|
||||||
// For Loop A to Z
|
// For Loop A to Z
|
||||||
for (let i = 65; i <= 90; i++) {
|
for (let i = 65; i <= 90; i++) {
|
||||||
let drive = String.fromCharCode(i);
|
let drive = String.fromCharCode(i);
|
||||||
|
@ -40,17 +44,15 @@ if (process.platform === "win32") {
|
||||||
"/usr/bin/chromium",
|
"/usr/bin/chromium",
|
||||||
"/usr/bin/chromium-browser",
|
"/usr/bin/chromium-browser",
|
||||||
"/usr/bin/google-chrome",
|
"/usr/bin/google-chrome",
|
||||||
|
"/snap/bin/chromium", // Ubuntu
|
||||||
];
|
];
|
||||||
} else if (process.platform === "darwin") {
|
} else if (process.platform === "darwin") {
|
||||||
// TODO: Generated by GitHub Copilot, but not sure if it's correct
|
|
||||||
allowedList = [
|
allowedList = [
|
||||||
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||||
"/Applications/Chromium.app/Contents/MacOS/Chromium",
|
"/Applications/Chromium.app/Contents/MacOS/Chromium",
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("chrome", allowedList);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the executable path allowed?
|
* Is the executable path allowed?
|
||||||
* @param {string} executablePath Path to executable
|
* @param {string} executablePath Path to executable
|
||||||
|
@ -85,6 +87,19 @@ async function getBrowser() {
|
||||||
return browser;
|
return browser;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current instance of the browser. If there isn't one, create it
|
||||||
|
* @param {integer} remoteBrowserID Path to executable
|
||||||
|
* @param {integer} userId User ID
|
||||||
|
* @returns {Promise<Browser>} The browser
|
||||||
|
*/
|
||||||
|
async function getRemoteBrowser(remoteBrowserID, userId) {
|
||||||
|
let remoteBrowser = await RemoteBrowser.get(remoteBrowserID, userId);
|
||||||
|
log.debug("MONITOR", `Using remote browser: ${remoteBrowser.name} (${remoteBrowser.id})`);
|
||||||
|
browser = await chromium.connect(remoteBrowser.url);
|
||||||
|
return browser;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepare the chrome executable path
|
* Prepare the chrome executable path
|
||||||
* @param {string} executablePath Path to chrome executable
|
* @param {string} executablePath Path to chrome executable
|
||||||
|
@ -191,11 +206,21 @@ async function testChrome(executablePath) {
|
||||||
throw new Error(e.message);
|
throw new Error(e.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// test remote browser
|
||||||
/**
|
/**
|
||||||
* TODO: connect remote browser? https://playwright.dev/docs/api/class-browsertype#browser-type-connect
|
* @param {string} remoteBrowserURL Remote Browser URL
|
||||||
*
|
* @returns {Promise<boolean>} Returns if connection worked
|
||||||
*/
|
*/
|
||||||
|
async function testRemoteBrowser(remoteBrowserURL) {
|
||||||
|
try {
|
||||||
|
const browser = await chromium.connect(remoteBrowserURL);
|
||||||
|
browser.version();
|
||||||
|
await browser.close();
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
class RealBrowserMonitorType extends MonitorType {
|
class RealBrowserMonitorType extends MonitorType {
|
||||||
|
|
||||||
name = "real-browser";
|
name = "real-browser";
|
||||||
|
@ -204,7 +229,7 @@ class RealBrowserMonitorType extends MonitorType {
|
||||||
* @inheritdoc
|
* @inheritdoc
|
||||||
*/
|
*/
|
||||||
async check(monitor, heartbeat, server) {
|
async check(monitor, heartbeat, server) {
|
||||||
const browser = await getBrowser();
|
const browser = monitor.remote_browser ? await getRemoteBrowser(monitor.remote_browser, monitor.user_id) : await getBrowser();
|
||||||
const context = await browser.newContext();
|
const context = await browser.newContext();
|
||||||
const page = await context.newPage();
|
const page = await context.newPage();
|
||||||
|
|
||||||
|
@ -237,4 +262,5 @@ module.exports = {
|
||||||
RealBrowserMonitorType,
|
RealBrowserMonitorType,
|
||||||
testChrome,
|
testChrome,
|
||||||
resetChrome,
|
resetChrome,
|
||||||
|
testRemoteBrowser,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const { MonitorType } = require("./monitor-type");
|
const { MonitorType } = require("./monitor-type");
|
||||||
const { UP, log } = require("../../src/util");
|
const { UP } = require("../../src/util");
|
||||||
const exec = require("child_process").exec;
|
const childProcessAsync = require("promisify-child-process");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A TailscalePing class extends the MonitorType.
|
* A TailscalePing class extends the MonitorType.
|
||||||
|
@ -23,7 +23,6 @@ class TailscalePing extends MonitorType {
|
||||||
let tailscaleOutput = await this.runTailscalePing(monitor.hostname, monitor.interval);
|
let tailscaleOutput = await this.runTailscalePing(monitor.hostname, monitor.interval);
|
||||||
this.parseTailscaleOutput(tailscaleOutput, heartbeat);
|
this.parseTailscaleOutput(tailscaleOutput, heartbeat);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.debug("Tailscale", err);
|
|
||||||
// trigger log function somewhere to display a notification or alert to the user (but how?)
|
// trigger log function somewhere to display a notification or alert to the user (but how?)
|
||||||
throw new Error(`Error checking Tailscale ping: ${err}`);
|
throw new Error(`Error checking Tailscale ping: ${err}`);
|
||||||
}
|
}
|
||||||
|
@ -37,26 +36,19 @@ class TailscalePing extends MonitorType {
|
||||||
* @throws Will throw an error if the command execution encounters any error.
|
* @throws Will throw an error if the command execution encounters any error.
|
||||||
*/
|
*/
|
||||||
async runTailscalePing(hostname, interval) {
|
async runTailscalePing(hostname, interval) {
|
||||||
let cmd = `tailscale ping ${hostname}`;
|
|
||||||
|
|
||||||
log.debug("Tailscale", cmd);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
let timeout = interval * 1000 * 0.8;
|
let timeout = interval * 1000 * 0.8;
|
||||||
exec(cmd, { timeout: timeout }, (error, stdout, stderr) => {
|
let res = await childProcessAsync.spawn("tailscale", [ "ping", "--c", "1", hostname ], {
|
||||||
// we may need to handle more cases if tailscale reports an error that isn't necessarily an error (such as not-logged in or DERP health-related issues)
|
timeout: timeout,
|
||||||
if (error) {
|
encoding: "utf8",
|
||||||
reject(`Execution error: ${error.message}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (stderr) {
|
|
||||||
reject(`Error in output: ${stderr}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
resolve(stdout);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
if (res.stderr && res.stderr.toString()) {
|
||||||
|
throw new Error(`Error in output: ${res.stderr.toString()}`);
|
||||||
|
}
|
||||||
|
if (res.stdout && res.stdout.toString()) {
|
||||||
|
return res.stdout.toString();
|
||||||
|
} else {
|
||||||
|
throw new Error("No output from Tailscale ping");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -74,7 +66,7 @@ class TailscalePing extends MonitorType {
|
||||||
heartbeat.status = UP;
|
heartbeat.status = UP;
|
||||||
let time = line.split(" in ")[1].split(" ")[0];
|
let time = line.split(" in ")[1].split(" ")[0];
|
||||||
heartbeat.ping = parseInt(time);
|
heartbeat.ping = parseInt(time);
|
||||||
heartbeat.msg = line;
|
heartbeat.msg = "OK";
|
||||||
break;
|
break;
|
||||||
} else if (line.includes("timed out")) {
|
} else if (line.includes("timed out")) {
|
||||||
throw new Error(`Ping timed out: "${line}"`);
|
throw new Error(`Ping timed out: "${line}"`);
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const NotificationProvider = require("./notification-provider");
|
const NotificationProvider = require("./notification-provider");
|
||||||
const childProcess = require("child_process");
|
const childProcessAsync = require("promisify-child-process");
|
||||||
|
|
||||||
class Apprise extends NotificationProvider {
|
class Apprise extends NotificationProvider {
|
||||||
|
|
||||||
|
@ -14,7 +14,9 @@ class Apprise extends NotificationProvider {
|
||||||
args.push("-t");
|
args.push("-t");
|
||||||
args.push(notification.title);
|
args.push(notification.title);
|
||||||
}
|
}
|
||||||
const s = childProcess.spawnSync("apprise", args);
|
const s = await childProcessAsync.spawn("apprise", args, {
|
||||||
|
encoding: "utf8",
|
||||||
|
});
|
||||||
|
|
||||||
const output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
const output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||||
|
|
||||||
|
|
|
@ -46,8 +46,7 @@ class Bark extends NotificationProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add additional parameter for better on device styles (iOS 15
|
* Add additional parameter for Bark v1 endpoints
|
||||||
* optimized)
|
|
||||||
* @param {BeanModel} notification Notification to send
|
* @param {BeanModel} notification Notification to send
|
||||||
* @param {string} postUrl URL to append parameters to
|
* @param {string} postUrl URL to append parameters to
|
||||||
* @returns {string} Additional URL parameters
|
* @returns {string} Additional URL parameters
|
||||||
|
@ -96,12 +95,23 @@ class Bark extends NotificationProvider {
|
||||||
* @returns {string} Success message
|
* @returns {string} Success message
|
||||||
*/
|
*/
|
||||||
async postNotification(notification, title, subtitle, endpoint) {
|
async postNotification(notification, title, subtitle, endpoint) {
|
||||||
|
let result;
|
||||||
|
if (notification.apiVersion === "v1" || notification.apiVersion == null) {
|
||||||
// url encode title and subtitle
|
// url encode title and subtitle
|
||||||
title = encodeURIComponent(title);
|
title = encodeURIComponent(title);
|
||||||
subtitle = encodeURIComponent(subtitle);
|
subtitle = encodeURIComponent(subtitle);
|
||||||
let postUrl = endpoint + "/" + title + "/" + subtitle;
|
let postUrl = endpoint + "/" + title + "/" + subtitle;
|
||||||
postUrl = this.appendAdditionalParameters(notification, postUrl);
|
postUrl = this.appendAdditionalParameters(notification, postUrl);
|
||||||
let result = await axios.get(postUrl);
|
result = await axios.get(postUrl);
|
||||||
|
} else {
|
||||||
|
result = await axios.post(`${endpoint}/push`, {
|
||||||
|
title,
|
||||||
|
body: subtitle,
|
||||||
|
icon: barkNotificationAvatar,
|
||||||
|
sound: notification.barkSound || "telegraph", // default sound is telegraph
|
||||||
|
group: notification.barkGroup || "UptimeKuma", // default group is UptimeKuma
|
||||||
|
});
|
||||||
|
}
|
||||||
this.checkResult(result);
|
this.checkResult(result);
|
||||||
if (result.statusText != null) {
|
if (result.statusText != null) {
|
||||||
return "Bark notification succeed: " + result.statusText;
|
return "Bark notification succeed: " + result.statusText;
|
||||||
|
|
61
server/notification-providers/grafana-oncall.js
Normal file
61
server/notification-providers/grafana-oncall.js
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { DOWN, UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class GrafanaOncall extends NotificationProvider {
|
||||||
|
|
||||||
|
name = "GrafanaOncall";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
|
||||||
|
if (!notification.GrafanaOncallURL) {
|
||||||
|
throw new Error("GrafanaOncallURL cannot be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
let okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
if (heartbeatJSON === null) {
|
||||||
|
let grafanaupdata = {
|
||||||
|
title: "General notification",
|
||||||
|
message: msg,
|
||||||
|
state: "alerting",
|
||||||
|
};
|
||||||
|
await axios.post(
|
||||||
|
notification.GrafanaOncallURL,
|
||||||
|
grafanaupdata
|
||||||
|
);
|
||||||
|
return okMsg;
|
||||||
|
} else if (heartbeatJSON["status"] === DOWN) {
|
||||||
|
let grafanadowndata = {
|
||||||
|
title: monitorJSON["name"] + " is down",
|
||||||
|
message: heartbeatJSON["msg"],
|
||||||
|
state: "alerting",
|
||||||
|
};
|
||||||
|
await axios.post(
|
||||||
|
notification.GrafanaOncallURL,
|
||||||
|
grafanadowndata
|
||||||
|
);
|
||||||
|
return okMsg;
|
||||||
|
} else if (heartbeatJSON["status"] === UP) {
|
||||||
|
let grafanaupdata = {
|
||||||
|
title: monitorJSON["name"] + " is up",
|
||||||
|
message: heartbeatJSON["msg"],
|
||||||
|
state: "ok",
|
||||||
|
};
|
||||||
|
await axios.post(
|
||||||
|
notification.GrafanaOncallURL,
|
||||||
|
grafanaupdata
|
||||||
|
);
|
||||||
|
return okMsg;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GrafanaOncall;
|
|
@ -78,12 +78,12 @@ class Mattermost extends NotificationProvider {
|
||||||
{
|
{
|
||||||
fallback:
|
fallback:
|
||||||
"Your " +
|
"Your " +
|
||||||
monitorJSON.name +
|
monitorJSON.pathName +
|
||||||
" service went " +
|
" service went " +
|
||||||
statusText,
|
statusText,
|
||||||
color: color,
|
color: color,
|
||||||
title:
|
title:
|
||||||
monitorJSON.name +
|
monitorJSON.pathName +
|
||||||
" service went " +
|
" service went " +
|
||||||
statusText,
|
statusText,
|
||||||
title_link: monitorJSON.url,
|
title_link: monitorJSON.url,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const nodemailer = require("nodemailer");
|
const nodemailer = require("nodemailer");
|
||||||
const NotificationProvider = require("./notification-provider");
|
const NotificationProvider = require("./notification-provider");
|
||||||
const { DOWN } = require("../../src/util");
|
const { DOWN } = require("../../src/util");
|
||||||
|
const { Liquid } = require("liquidjs");
|
||||||
|
|
||||||
class SMTP extends NotificationProvider {
|
class SMTP extends NotificationProvider {
|
||||||
|
|
||||||
|
@ -39,33 +40,56 @@ class SMTP extends NotificationProvider {
|
||||||
pass: notification.smtpPassword,
|
pass: notification.smtpPassword,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// Lets start with default subject and empty string for custom one
|
|
||||||
|
// default values in case the user does not want to template
|
||||||
let subject = msg;
|
let subject = msg;
|
||||||
|
let body = msg;
|
||||||
// Change the subject if:
|
if (heartbeatJSON) {
|
||||||
// - The msg ends with "Testing" or
|
body = `${msg}\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`;
|
||||||
// - Actual Up/Down Notification
|
}
|
||||||
|
// subject and body are templated
|
||||||
if ((monitorJSON && heartbeatJSON) || msg.endsWith("Testing")) {
|
if ((monitorJSON && heartbeatJSON) || msg.endsWith("Testing")) {
|
||||||
let customSubject = "";
|
// cannot end with whitespace as this often raises spam scores
|
||||||
|
const customSubject = notification.customSubject?.trim() || "";
|
||||||
|
const customBody = notification.customBody?.trim() || "";
|
||||||
|
|
||||||
// Our subject cannot end with whitespace it's often raise spam score
|
const context = this.generateContext(msg, monitorJSON, heartbeatJSON);
|
||||||
// Once I got "Cannot read property 'trim' of undefined", better be safe than sorry
|
const engine = new Liquid();
|
||||||
if (notification.customSubject) {
|
if (customSubject !== "") {
|
||||||
customSubject = notification.customSubject.trim();
|
const tpl = engine.parse(customSubject);
|
||||||
|
subject = await engine.render(tpl, context);
|
||||||
|
}
|
||||||
|
if (customBody !== "") {
|
||||||
|
const tpl = engine.parse(customBody);
|
||||||
|
body = await engine.render(tpl, context);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If custom subject is not empty, change subject for notification
|
// send mail with defined transport object
|
||||||
if (customSubject !== "") {
|
let transporter = nodemailer.createTransport(config);
|
||||||
|
await transporter.sendMail({
|
||||||
|
from: notification.smtpFrom,
|
||||||
|
cc: notification.smtpCC,
|
||||||
|
bcc: notification.smtpBCC,
|
||||||
|
to: notification.smtpTo,
|
||||||
|
subject: subject,
|
||||||
|
text: body,
|
||||||
|
});
|
||||||
|
|
||||||
// Replace "MACROS" with corresponding variable
|
return "Sent Successfully.";
|
||||||
let replaceName = new RegExp("{{NAME}}", "g");
|
}
|
||||||
let replaceHostnameOrURL = new RegExp("{{HOSTNAME_OR_URL}}", "g");
|
|
||||||
let replaceStatus = new RegExp("{{STATUS}}", "g");
|
|
||||||
|
|
||||||
// Lets start with dummy values to simplify code
|
/**
|
||||||
let monitorName = "Test";
|
* Generate context for LiquidJS
|
||||||
|
* @param {string} msg the message that will be included in the context
|
||||||
|
* @param {?object} monitorJSON Monitor details (For Up/Down/Cert-Expiry only)
|
||||||
|
* @param {?object} heartbeatJSON Heartbeat details (For Up/Down only)
|
||||||
|
* @returns {{STATUS: string, status: string, HOSTNAME_OR_URL: string, hostnameOrUrl: string, NAME: string, name: string, monitorJSON: ?object, heartbeatJSON: ?object, msg: string}} the context
|
||||||
|
*/
|
||||||
|
generateContext(msg, monitorJSON, heartbeatJSON) {
|
||||||
|
// Let's start with dummy values to simplify code
|
||||||
|
let monitorName = "Monitor Name not available";
|
||||||
let monitorHostnameOrURL = "testing.hostname";
|
let monitorHostnameOrURL = "testing.hostname";
|
||||||
let serviceStatus = "⚠️ Test";
|
|
||||||
|
|
||||||
if (monitorJSON !== null) {
|
if (monitorJSON !== null) {
|
||||||
monitorName = monitorJSON["name"];
|
monitorName = monitorJSON["name"];
|
||||||
|
@ -77,37 +101,24 @@ class SMTP extends NotificationProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let serviceStatus = "⚠️ Test";
|
||||||
if (heartbeatJSON !== null) {
|
if (heartbeatJSON !== null) {
|
||||||
serviceStatus = (heartbeatJSON["status"] === DOWN) ? "🔴 Down" : "✅ Up";
|
serviceStatus = (heartbeatJSON["status"] === DOWN) ? "🔴 Down" : "✅ Up";
|
||||||
}
|
}
|
||||||
|
return {
|
||||||
|
// for v1 compatibility, to be removed in v3
|
||||||
|
"STATUS": serviceStatus,
|
||||||
|
"NAME": monitorName,
|
||||||
|
"HOSTNAME_OR_URL": monitorHostnameOrURL,
|
||||||
|
|
||||||
// Break replace to one by line for better readability
|
// variables which are officially supported
|
||||||
customSubject = customSubject.replace(replaceStatus, serviceStatus);
|
"status": serviceStatus,
|
||||||
customSubject = customSubject.replace(replaceName, monitorName);
|
"name": monitorName,
|
||||||
customSubject = customSubject.replace(replaceHostnameOrURL, monitorHostnameOrURL);
|
"hostnameOrURL": monitorHostnameOrURL,
|
||||||
|
monitorJSON,
|
||||||
subject = customSubject;
|
heartbeatJSON,
|
||||||
}
|
msg,
|
||||||
}
|
};
|
||||||
|
|
||||||
let transporter = nodemailer.createTransport(config);
|
|
||||||
|
|
||||||
let bodyTextContent = msg;
|
|
||||||
if (heartbeatJSON) {
|
|
||||||
bodyTextContent = `${msg}\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// send mail with defined transport object
|
|
||||||
await transporter.sendMail({
|
|
||||||
from: notification.smtpFrom,
|
|
||||||
cc: notification.smtpCC,
|
|
||||||
bcc: notification.smtpBCC,
|
|
||||||
to: notification.smtpTo,
|
|
||||||
subject: subject,
|
|
||||||
text: bodyTextContent,
|
|
||||||
});
|
|
||||||
|
|
||||||
return "Sent Successfully.";
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,11 +28,7 @@ class Telegram extends NotificationProvider {
|
||||||
return okMsg;
|
return okMsg;
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.response && error.response.data && error.response.data.description) {
|
this.throwGeneralAxiosError(error);
|
||||||
throw new Error(error.response.data.description);
|
|
||||||
} else {
|
|
||||||
throw new Error(error.message);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ const FreeMobile = require("./notification-providers/freemobile");
|
||||||
const GoogleChat = require("./notification-providers/google-chat");
|
const GoogleChat = require("./notification-providers/google-chat");
|
||||||
const Gorush = require("./notification-providers/gorush");
|
const Gorush = require("./notification-providers/gorush");
|
||||||
const Gotify = require("./notification-providers/gotify");
|
const Gotify = require("./notification-providers/gotify");
|
||||||
|
const GrafanaOncall = require("./notification-providers/grafana-oncall");
|
||||||
const HomeAssistant = require("./notification-providers/home-assistant");
|
const HomeAssistant = require("./notification-providers/home-assistant");
|
||||||
const Kook = require("./notification-providers/kook");
|
const Kook = require("./notification-providers/kook");
|
||||||
const Line = require("./notification-providers/line");
|
const Line = require("./notification-providers/line");
|
||||||
|
@ -65,7 +66,7 @@ class Notification {
|
||||||
* @throws Duplicate notification providers in list
|
* @throws Duplicate notification providers in list
|
||||||
*/
|
*/
|
||||||
static init() {
|
static init() {
|
||||||
log.info("notification", "Prepare Notification Providers");
|
log.debug("notification", "Prepare Notification Providers");
|
||||||
|
|
||||||
this.providerList = {};
|
this.providerList = {};
|
||||||
|
|
||||||
|
@ -84,6 +85,7 @@ class Notification {
|
||||||
new GoogleChat(),
|
new GoogleChat(),
|
||||||
new Gorush(),
|
new Gorush(),
|
||||||
new Gotify(),
|
new Gotify(),
|
||||||
|
new GrafanaOncall(),
|
||||||
new HomeAssistant(),
|
new HomeAssistant(),
|
||||||
new Kook(),
|
new Kook(),
|
||||||
new Line(),
|
new Line(),
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue