Merge branch 'mariadb' into 2.0.X

# Conflicts:
#	package.json
This commit is contained in:
Louis Lam 2023-02-05 18:31:42 +08:00
commit e06c3ee5d4
259 changed files with 32579 additions and 19095 deletions

View File

@ -31,6 +31,9 @@ tsconfig.json
/tmp
/babel.config.js
/ecosystem.config.js
/extra/healthcheck.exe
/extra/healthcheck
### .gitignore content (commented rules are duplicated)

View File

@ -19,3 +19,6 @@ indent_size = 2
[*.vue]
trim_trailing_whitespace = false
[*.go]
indent_style = tab

View File

@ -16,7 +16,6 @@ Please delete any options that are not relevant.
- User interface (UI)
- New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
- Translation update
- Other
- This change requires a documentation update

View File

@ -6,8 +6,12 @@ name: Auto Test
on:
push:
branches: [ master ]
paths-ignore:
- '*.md'
pull_request:
branches: [ master ]
paths-ignore:
- '*.md'
jobs:
auto-test:
@ -18,7 +22,7 @@ jobs:
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
node: [ 14, 16, 17, 18 ]
node: [ 14, 16, 18, 19 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
@ -36,6 +40,7 @@ jobs:
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
check-linters:
runs-on: ubuntu-latest
@ -66,3 +71,19 @@ jobs:
- run: npm install
- run: npm run build
- run: npm run cy:test
frontend-unit-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm run cy:run:unit

View File

@ -1,4 +1,3 @@
name: Close Incorrect Issue
on:
@ -12,13 +11,13 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
node-version: [16.x]
node-version: [16]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'

View File

@ -3,22 +3,20 @@ on:
workflow_dispatch:
schedule:
- cron: '0 */6 * * *'
#Run every 6 hours
#Run every 6 hours
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v5
- uses: actions/stale@v7
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
close-pr-message: 'This PR was closed because it has been stalled for 2 days with no activity.'
days-before-stale: 90
days-before-close: 2
days-before-pr-stale: 999999999
days-before-pr-close: 1
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,feature-request'
exempt-issue-assignees: 'louislam'
exempt-pr-assignees: 'louislam'
operations-per-run: 200

4
.gitignore vendored
View File

@ -16,3 +16,7 @@ dist-ssr
cypress/videos
cypress/screenshots
/extra/healthcheck.exe
/extra/healthcheck
/extra/healthcheck-armv7

View File

@ -1,6 +1,6 @@
# Project Info
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structured and commented so well, lol. Sorry about that.
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
@ -17,8 +17,11 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
## Directories
- config (dev config files)
- data (App data)
- db (Base database and migration scripts)
- dist (Frontend build)
- docker (Dockerfiles)
- extra (Extra useful scripts)
- public (Frontend resources for dev only)
- server (Server source code)
@ -27,20 +30,23 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can discuss first**. Especially for a large pull request or you don't know it will be merged or not.
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
Here are some references:
✅ Usually Accept:
- Bug/Security fix
- Translations
- Bug fix
- Security fix
- Adding notification providers
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
- Adding new language keys: `$t("...")`
⚠️ Discussion First
- Large pull requests
- New features
❌ Won't Merge
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
- Do not pass auto test
- Any breaking changes
- Duplicated pull request
@ -48,8 +54,13 @@ Here are some references:
- UI/UX is not close to Uptime Kuma
- Existing logic is completely modified or deleted for no reason
- A function that is completely out of scope
- Convert existing code into other programming languages
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
The above cases cannot cover all situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
@ -72,13 +83,13 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
## Project Styles
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`.
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use
- The web UI styling should be consistent and nice.
- The web UI styling should be consistent and nice
## Coding Styles
@ -87,7 +98,7 @@ I personally do not like something need to learn so much and need to config so m
- Follow ESLint
- Methods and functions should be documented with JSDoc
## Name convention
## Name Conventions
- Javascript/Typescript: camelCaseType
- SQLite: snake_case (Underscore)
@ -101,7 +112,7 @@ I personally do not like something need to learn so much and need to config so m
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite GUI tool (SQLite Expert Personal is suggested)
## Install dependencies
## Install Dependencies for Development
```bash
npm ci
@ -119,6 +130,12 @@ Port `3000` and port `3001` will be used.
npm run dev
```
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
```
npm run start-frontend-dev
npm run start-server-dev
```
## Backend Server
It binds to `0.0.0.0:3001` by default.
@ -134,12 +151,15 @@ express.js is used for:
### Structure in /server/
- jobs/ (Jobs that are running in another process)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- monitor_types (Monitor Types)
- notification-providers/ (individual notification logic)
- routers/ (Express Routers)
- socket-handler (Socket.io Handlers)
- server.js (Server entry point and main logic)
- server.js (Server entry point)
- uptime-kuma-server.js (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
## Frontend Dev Server
@ -172,15 +192,11 @@ The data and socket logic are in `src/mixins/socket.js`.
## Unit Test
It is an end-to-end testing. It is using Jest and Puppeteer.
```bash
npm run build
npm test
```
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments.
## Dependencies
Both frontend and backend share the same package.json. However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into dist files. So:
@ -194,18 +210,12 @@ Both frontend and backend share the same package.json. However, the frontend dep
### Update Dependencies
Install `ncu`
https://github.com/raineorshine/npm-check-updates
```bash
ncu -u -t patch
npm install
```
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
## Translations
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages

View File

@ -1,39 +1,39 @@
# Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam)
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
It is a self-hosted monitoring tool like "Uptime Robot".
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" />
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo
Try it!
https://demo.uptime.kuma.pet
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
It is a temporary live demo, all data will be deleted after 10 minutes. The server is located in Tokyo, so if you live far from there, it may affect your experience. I suggest that you should install and try it out for the best demo experience.
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers.
* Fancy, Reactive, Fast UI/UX.
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* 20 second intervals.
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
* Multiple Status Pages
* Map Status Page to Domain
* Ping Chart
* Certificate Info
* Proxy Support
* 2FA available
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20 second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple status pages
* Map status pages to specific domains
* Ping chart
* Certificate info
* Proxy support
* 2FA support
## 🔧 How to Install
@ -45,14 +45,14 @@ docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name upti
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
Browse to http://localhost:3001 after starting.
Uptime Kuma is now running on http://localhost:3001
### 💪🏻 Non-Docker
Required Tools:
- [Node.js](https://nodejs.org/en/download/) >= 14
- [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For run in background
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
```bash
# Update your npm to the latest version
@ -74,7 +74,7 @@ pm2 start server/server.js --name uptime-kuma
```
Browse to http://localhost:3001 after starting.
Uptime Kuma is now running on http://localhost:3001
More useful PM2 Commands
@ -172,7 +172,7 @@ Check out the latest beta release here: https://github.com/louislam/uptime-kuma/
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.

View File

@ -2,9 +2,9 @@
## Reporting a Vulnerability
Please report security issues to uptime@kuma.pet.
Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
Do not use the issue tracker or discuss it in the public as it will cause more damage.
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
## Supported Versions

28
config/cypress.config.js Normal file
View File

@ -0,0 +1,28 @@
const { defineConfig } = require("cypress");
module.exports = defineConfig({
projectId: "vyjuem",
e2e: {
experimentalStudio: true,
setupNodeEvents(on, config) {
},
fixturesFolder: "test/cypress/fixtures",
screenshotsFolder: "test/cypress/screenshots",
videosFolder: "test/cypress/videos",
downloadsFolder: "test/cypress/downloads",
supportFile: "test/cypress/support/e2e.js",
baseUrl: "http://localhost:3002",
defaultCommandTimeout: 10000,
pageLoadTimeout: 60000,
viewportWidth: 1920,
viewportHeight: 1080,
specPattern: [
"test/cypress/e2e/setup.cy.js",
"test/cypress/e2e/**/*.js"
],
},
env: {
baseUrl: "http://localhost:3002",
},
});

View File

@ -0,0 +1,10 @@
const { defineConfig } = require("cypress");
module.exports = defineConfig({
e2e: {
supportFile: false,
specPattern: [
"test/cypress/unit/**/*.js"
],
}
});

View File

@ -1,33 +0,0 @@
const PuppeteerEnvironment = require("jest-environment-puppeteer");
const util = require("util");
class DebugEnv extends PuppeteerEnvironment {
async handleTestEvent(event, state) {
const ignoredEvents = [
"setup",
"add_hook",
"start_describe_definition",
"add_test",
"finish_describe_definition",
"run_start",
"run_describe_start",
"test_start",
"hook_start",
"hook_success",
"test_fn_start",
"test_fn_success",
"test_done",
"run_describe_finish",
"run_finish",
"teardown",
"test_fn_failure",
];
if (!ignoredEvents.includes(event.name)) {
console.log(
new Date().toString() + ` Unhandled event [${event.name}] ` + util.inspect(event)
);
}
}
}
module.exports = DebugEnv;

View File

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/frontend.spec.js",
};

View File

@ -1,20 +0,0 @@
module.exports = {
"launch": {
"dumpio": true,
"slowMo": 500,
"headless": process.env.HEADLESS_TEST || false,
"userDataDir": "./data/test-chrome-profile",
args: [
"--disable-setuid-sandbox",
"--disable-gpu",
"--disable-dev-shm-usage",
"--no-default-browser-check",
"--no-experiments",
"--no-first-run",
"--no-pings",
"--no-sandbox",
"--no-zygote",
"--single-process",
],
}
};

View File

@ -1,12 +0,0 @@
module.exports = {
"verbose": true,
"preset": "jest-puppeteer",
"globals": {
"__DEV__": true
},
"testRegex": "./test/e2e.spec.js",
"testEnvironment": "./config/jest-debug-env.js",
"rootDir": "..",
"testTimeout": 30000,
};

View File

@ -1,15 +0,0 @@
import { defineConfig } from "cypress";
export default defineConfig({
e2e: {
baseUrl: "http://localhost:3002",
defaultCommandTimeout: 10000,
pageLoadTimeout: 60000,
viewportWidth: 1920,
viewportHeight: 1080,
specPattern: ["cypress/e2e/setup.cy.ts", "cypress/e2e/**/*.ts"],
},
env: {
baseUrl: "http://localhost:3002",
},
});

View File

@ -1,24 +0,0 @@
import { actor } from "../support/actors/actor";
import { DEFAULT_USER_DATA } from "../support/const/user-data";
import { DashboardPage } from "../support/pages/dasboard-page";
import { SetupPage } from "../support/pages/setup-page";
describe("user can create a new account on setup page", () => {
before(() => {
cy.visit("/setup");
});
it("user can create new account", () => {
cy.url().should("be.equal", SetupPage.url);
actor.setupTask.fillAndSubmitSetupForm(
DEFAULT_USER_DATA.username,
DEFAULT_USER_DATA.password,
DEFAULT_USER_DATA.password
);
cy.url().should("be.equal", DashboardPage.url);
cy.get('[role="alert"]')
.should("be.visible")
.and("contain.text", "Added Successfully.");
});
});

View File

@ -1,8 +0,0 @@
import { SetupTask } from "../tasks/setup-task";
class Actor {
setupTask: SetupTask = new SetupTask();
}
const actor = new Actor();
export { actor };

View File

@ -1 +0,0 @@
import "./commands";

View File

@ -1,15 +0,0 @@
import { SetupPage } from "../pages/setup-page";
export class SetupTask {
fillAndSubmitSetupForm(
username: string,
password: string,
passwordRepeat: string
) {
cy.get(SetupPage.usernameInput).type(username);
cy.get(SetupPage.passWordInput).type(password);
cy.get(SetupPage.passwordRepeatInput).type(passwordRepeat);
cy.get(SetupPage.submitSetupForm).click();
}
}

View File

@ -0,0 +1,5 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD game VARCHAR(255);
COMMIT

25
db/patch-grpc-monitor.sql Normal file
View File

@ -0,0 +1,25 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD grpc_url VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_protobuf TEXT default null;
ALTER TABLE monitor
ADD grpc_body TEXT default null;
ALTER TABLE monitor
ADD grpc_metadata TEXT default null;
ALTER TABLE monitor
ADD grpc_method VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_service_name VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_enable_tls BOOLEAN default 0 not null;
COMMIT;

View File

@ -0,0 +1,83 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Just for someone who tested maintenance before (patch-maintenance-table.sql)
DROP TABLE IF EXISTS maintenance_status_page;
DROP TABLE IF EXISTS monitor_maintenance;
DROP TABLE IF EXISTS maintenance;
DROP TABLE IF EXISTS maintenance_timeslot;
-- maintenance
CREATE TABLE [maintenance] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[title] VARCHAR(150) NOT NULL,
[description] TEXT NOT NULL,
[user_id] INTEGER REFERENCES [user]([id]) ON DELETE SET NULL ON UPDATE CASCADE,
[active] BOOLEAN NOT NULL DEFAULT 1,
[strategy] VARCHAR(50) NOT NULL DEFAULT 'single',
[start_date] DATETIME,
[end_date] DATETIME,
[start_time] TIME,
[end_time] TIME,
[weekdays] VARCHAR2(250) DEFAULT '[]',
[days_of_month] TEXT DEFAULT '[]',
[interval_day] INTEGER
);
CREATE INDEX [manual_active] ON [maintenance] (
[strategy],
[active]
);
CREATE INDEX [active] ON [maintenance] ([active]);
CREATE INDEX [maintenance_user_id] ON [maintenance] ([user_id]);
-- maintenance_status_page
CREATE TABLE maintenance_status_page (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
status_page_id INTEGER NOT NULL,
maintenance_id INTEGER NOT NULL,
CONSTRAINT FK_maintenance FOREIGN KEY (maintenance_id) REFERENCES maintenance (id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_status_page FOREIGN KEY (status_page_id) REFERENCES status_page (id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX [status_page_id_index]
ON [maintenance_status_page]([status_page_id]);
CREATE INDEX [maintenance_id_index]
ON [maintenance_status_page]([maintenance_id]);
-- maintenance_timeslot
CREATE TABLE [maintenance_timeslot] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[maintenance_id] INTEGER NOT NULL CONSTRAINT [FK_maintenance] REFERENCES [maintenance]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[start_date] DATETIME NOT NULL,
[end_date] DATETIME,
[generated_next] BOOLEAN DEFAULT 0
);
CREATE INDEX [maintenance_id] ON [maintenance_timeslot] ([maintenance_id] DESC);
CREATE INDEX [active_timeslot_index] ON [maintenance_timeslot] (
[maintenance_id] DESC,
[start_date] DESC,
[end_date] DESC
);
CREATE INDEX [generated_next_index] ON [maintenance_timeslot] ([generated_next]);
-- monitor_maintenance
CREATE TABLE monitor_maintenance (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
maintenance_id INTEGER NOT NULL,
CONSTRAINT FK_maintenance FOREIGN KEY (maintenance_id) REFERENCES maintenance (id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor (id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX [maintenance_id_index2] ON [monitor_maintenance]([maintenance_id]);
CREATE INDEX [monitor_id_index] ON [monitor_maintenance]([monitor_id]);
COMMIT;

View File

@ -0,0 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD packet_size INTEGER DEFAULT 56 NOT NULL;
COMMIT;

View File

@ -0,0 +1,16 @@
############################################
# Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
############################################
FROM golang:1.19-buster
WORKDIR /app
ARG TARGETPLATFORM
COPY ./extra/ ./extra/
# Compile healthcheck.go
RUN apt update && \
apt --yes --no-install-recommends install curl && \
curl -sL https://deb.nodesource.com/setup_18.x | bash && \
apt --yes --no-install-recommends install nodejs && \
node ./extra/build-healthcheck.js $TARGETPLATFORM && \
apt --yes remove nodejs

View File

@ -1,17 +1,15 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:16-buster-slim
FROM node:18-buster-slim AS base2-slim
ARG TARGETPLATFORM
WORKDIR /app
# Install Curl
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them.
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \
pip3 --no-cache-dir install apprise==1.0.0 && \
sqlite3 iputils-ping util-linux dumb-init git && \
pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
@ -26,3 +24,13 @@ RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
rm -f cloudflared.deb && \
apt --yes autoremove
FROM base2-slim AS base2
RUN apt update && \
apt --yes --no-install-recommends install curl && \
curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-10.11" && \
apt --yes --no-install-recommends install mariadb-server && \
apt --yes remove curl && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
RUN chown -R node:node /var/lib/mysql

View File

@ -0,0 +1,13 @@
version: '3.8'
services:
uptime-kuma:
container_name: uptime-kuma-dev
image: louislam/uptime-kuma:nightly2
volumes:
- ./data:/app/data
- ../server:/app/server
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

View File

@ -1,14 +1,15 @@
# Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
version: '3.8'
services:
uptime-kuma:
image: louislam/uptime-kuma:1
image: louislam/uptime-kuma:2
container_name: uptime-kuma
volumes:
- ./uptime-kuma-data:/app/data
- uptime-kuma:/app/data
ports:
- 3001:3001 # <Host Port>:<Container Port>
- "3001:3001" # <Host Port>:<Container Port>
restart: always
volumes:
uptime-kuma:

View File

@ -1,34 +1,54 @@
FROM louislam/uptime-kuma:base-debian AS build
ARG BASE_IMAGE=louislam/uptime-kuma:base2
############################################
# Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck
# Check file: builder-go.dockerfile
############################################
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
############################################
# Build in Node.js
############################################
FROM louislam/uptime-kuma:base2 AS build
USER node
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY --chown=node:node .npmrc .npmrc
COPY --chown=node:node package.json package.json
COPY --chown=node:node package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
RUN npm ci --production && \
chmod +x /app/extra/entrypoint.sh
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
FROM louislam/uptime-kuma:base-debian AS release
############################################
# ⭐ Main Image
############################################
FROM $BASE_IMAGE AS release
USER node
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
COPY --chown=node:node --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
CMD ["node", "server/server.js"]
############################################
# Mark as Nightly
############################################
FROM release AS nightly
USER node
RUN npm run mark-as-nightly
############################################
# Build an image for testing pr
FROM louislam/uptime-kuma:base-debian AS pr-test
############################################
FROM louislam/uptime-kuma:base2 AS pr-test2
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
## Install Git
@ -50,13 +70,13 @@ RUN git clone https://github.com/louislam/uptime-kuma.git .
RUN npm ci
EXPOSE 3000 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
CMD ["npm", "run", "start-pr-test"]
############################################
# Upload the artifact to Github
FROM louislam/uptime-kuma:base-debian AS upload-artifact
############################################
FROM louislam/uptime-kuma:base2 AS upload-artifact
WORKDIR /
RUN apt update && \
apt --yes install curl file

View File

@ -32,6 +32,10 @@ if (! exists) {
process.exit(1);
}
/**
* Commit updated files
* @param {string} version Version to update to
*/
function commit(version) {
let msg = "Update to " + version;
@ -47,6 +51,10 @@ function commit(version) {
console.log(res.stdout.toString().trim());
}
/**
* Create a tag with the specified version
* @param {string} version Tag to create
*/
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
@ -55,6 +63,11 @@ function tag(version) {
console.log(res.stdout.toString().trim());
}
/**
* Check if a tag exists for the specified version
* @param {string} version Version to check
* @returns {boolean} Does the tag already exist
*/
function tagExists(version) {
if (! version) {
throw new Error("invalid version");

View File

@ -0,0 +1,27 @@
const childProcess = require("child_process");
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
if (platform === "linux/arm/v7") {
console.log("Arch: armv7");
if (fs.existsSync("./extra/healthcheck-armv7")) {
fs.renameSync("./extra/healthcheck-armv7", "./extra/healthcheck");
console.log("Already built in the host, skip.");
process.exit(0);
} else {
console.log("prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build.");
}
} else {
if (fs.existsSync("./extra/healthcheck-armv7")) {
fs.rmSync("./extra/healthcheck-armv7");
}
}
const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8");
console.log(output);

View File

@ -25,6 +25,10 @@ if (platform === "linux/amd64") {
const file = fs.createWriteStream("cloudflared.deb");
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
/**
* Download specified file
* @param {string} url URL to request
*/
function get(url) {
http.get(url, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {

View File

@ -1,21 +0,0 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

90
extra/healthcheck.go Normal file
View File

@ -0,0 +1,90 @@
/*
* If changed, have to run `npm run build-docker-builder-go`.
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
package main
import (
"crypto/tls"
"io/ioutil"
"log"
"net/http"
"os"
"runtime"
"strings"
"time"
)
func main() {
isFreeBSD := runtime.GOOS == "freebsd"
// Is K8S + uptime-kuma as the container name
// See #2083
isK8s := strings.HasPrefix(os.Getenv("UPTIME_KUMA_PORT"), "tcp://")
// process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
http.DefaultTransport.(*http.Transport).TLSClientConfig = &tls.Config{
InsecureSkipVerify: true,
}
client := http.Client{
Timeout: 28 * time.Second,
}
sslKey := os.Getenv("UPTIME_KUMA_SSL_KEY")
if len(sslKey) == 0 {
sslKey = os.Getenv("SSL_KEY")
}
sslCert := os.Getenv("UPTIME_KUMA_SSL_CERT")
if len(sslCert) == 0 {
sslCert = os.Getenv("SSL_CERT")
}
hostname := os.Getenv("UPTIME_KUMA_HOST")
if len(hostname) == 0 && !isFreeBSD {
hostname = os.Getenv("HOST")
}
if len(hostname) == 0 {
hostname = "127.0.0.1"
}
port := ""
// UPTIME_KUMA_PORT is override by K8S unexpectedly,
if !isK8s {
port = os.Getenv("UPTIME_KUMA_PORT")
}
if len(port) == 0 {
port = os.Getenv("PORT")
}
if len(port) == 0 {
port = "3001"
}
protocol := ""
if len(sslKey) != 0 && len(sslCert) != 0 {
protocol = "https"
} else {
protocol = "http"
}
url := protocol + "://" + hostname + ":" + port
log.Println("Checking " + url)
resp, err := client.Get(url)
if err != nil {
log.Fatalln(err)
}
defer resp.Body.Close()
_, err = ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatalln(err)
}
log.Printf("Health Check OK [Res Code: %d]\n", resp.StatusCode)
}

View File

@ -1,4 +1,5 @@
/*
* Deprecated: Changed to healthcheck.go, it will be deleted in the future.
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
const { FBSD } = require("../server/util-server");
@ -18,17 +19,17 @@ if (sslKey && sslCert) {
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
let hostname = process.env.UPTIME_KUMA_HOST;
let hostname = process.env.UPTIME_KUMA_SERVICE_HOST || process.env.UPTIME_KUMA_HOST || "::";
// Also read HOST if not *BSD, as HOST is a system environment variable in FreeBSD
if (!hostname && !FBSD) {
hostname = process.env.HOST;
}
const port = parseInt(process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
const port = parseInt(process.env.UPTIME_KUMA_SERVICE_PORT || process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
let options = {
host: hostname || "127.0.0.1",
host: hostname,
port: port,
timeout: 28 * 1000,
};

View File

@ -1,11 +1,12 @@
const pkg = require("../package.json");
const fs = require("fs");
const util = require("../src/util");
const dayjs = require("dayjs");
util.polyfill();
const oldVersion = pkg.version;
const newVersion = oldVersion + "-nightly";
const newVersion = oldVersion + "-nightly-" + dayjs().format("YYYYMMDDHHmmss");
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);

View File

@ -43,6 +43,11 @@ const main = async () => {
console.log("Finished.");
};
/**
* Ask question of user
* @param {string} question Question to ask
* @returns {Promise<string>} Users response
*/
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {

View File

@ -53,6 +53,11 @@ const main = async () => {
console.log("Finished.");
};
/**
* Ask question of user
* @param {string} question Question to ask
* @returns {Promise<string>} Users response
*/
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {

View File

@ -135,6 +135,11 @@ server.listen({
udp: 5300
});
/**
* Get human readable request type from request code
* @param {number} code Request code to translate
* @returns {string} Human readable request type
*/
function type(code) {
for (let name in Packet.TYPE) {
if (Packet.TYPE[name] === code) {

View File

@ -11,6 +11,7 @@ class SimpleMqttServer {
this.port = port;
}
/** Start the MQTT server */
start() {
this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port);

View File

@ -1,51 +1,45 @@
// Need to use ES6 to read language files
import fs from "fs";
import path from "path";
import util from "util";
import rmSync from "../fs-rmSync.js";
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
/**
* Look ma, it's cp -R.
* @param {string} src The path to the thing to copy.
* @param {string} dest The path to the new copy.
* Copy across the required language files
* Creates a local directory (./languages) and copies the required files
* into it.
* @param {string} langCode Code of language to update. A file will be
* created with this code if one does not already exist
* @param {string} baseLang The second base language file to copy. This
* will be ignored if set to "en" as en.js is copied by default
*/
const copyRecursiveSync = function (src, dest) {
let exists = fs.existsSync(src);
let stats = exists && fs.statSync(src);
let isDirectory = exists && stats.isDirectory();
function copyFiles(langCode, baseLang) {
if (fs.existsSync("./languages")) {
rmSync("./languages", { recursive: true });
}
fs.mkdirSync("./languages");
if (isDirectory) {
fs.mkdirSync(dest);
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName),
path.join(dest, childItemName));
});
if (!fs.existsSync(`../../src/languages/${langCode}.js`)) {
fs.closeSync(fs.openSync(`./languages/${langCode}.js`, "a"));
} else {
fs.copyFileSync(src, dest);
fs.copyFileSync(`../../src/languages/${langCode}.js`, `./languages/${langCode}.js`);
}
fs.copyFileSync("../../src/languages/en.js", "./languages/en.js");
if (baseLang !== "en") {
fs.copyFileSync(`../../src/languages/${baseLang}.js`, `./languages/${baseLang}.js`);
}
};
console.log("Arguments:", process.argv);
const baseLangCode = process.argv[2] || "en";
console.log("Base Lang: " + baseLangCode);
if (fs.existsSync("./languages")) {
rmSync("./languages", { recursive: true });
}
copyRecursiveSync("../../src/languages", "./languages");
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
const files = fs.readdirSync("./languages");
console.log("Files:", files);
for (const file of files) {
if (! file.endsWith(".js")) {
console.log("Skipping " + file);
continue;
}
/**
* Update the specified language file
* @param {string} langCode Language code to update
* @param {string} baseLang Second language to copy keys from
*/
async function updateLanguage(langCode, baseLangCode) {
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
let file = langCode + ".js";
console.log("Processing " + file);
const lang = await import("./languages/" + file);
@ -83,5 +77,20 @@ for (const file of files) {
fs.writeFileSync(`../../src/languages/${file}`, code);
}
// Get command line arguments
const baseLangCode = process.env.npm_config_baselang || "en";
const langCode = process.env.npm_config_language;
// We need the file to edit
if (langCode == null) {
throw new Error("Argument --language=<code> must be provided");
}
console.log("Base Lang: " + baseLangCode);
console.log("Updating: " + langCode);
copyFiles(langCode, baseLangCode);
await updateLanguage(langCode, baseLangCode);
rmSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint...");

View File

@ -36,10 +36,8 @@ if (! exists) {
}
/**
* Updates the version number in package.json and commits it to git.
* @param {string} version - The new version number
*
* Generated by Trelent
* Commit updated files
* @param {string} version Version to update to
*/
function commit(version) {
let msg = "Update to " + version;
@ -53,16 +51,19 @@ function commit(version) {
}
}
/**
* Create a tag with the specified version
* @param {string} version Tag to create
*/
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
}
/**
* Checks if a given version is already tagged in the git repository.
* @param {string} version - The version to check for.
*
* Generated by Trelent
* Check if a tag exists for the specified version
* @param {string} version Version to check
* @returns {boolean} Does the tag already exist
*/
function tagExists(version) {
if (! version) {

View File

@ -10,6 +10,10 @@ if (!newVersion) {
updateWiki(newVersion);
/**
* Update the wiki with new version number
* @param {string} newVersion Version to update to
*/
function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
@ -39,6 +43,10 @@ function updateWiki(newVersion) {
safeDelete(wikiDir);
}
/**
* Check if a directory exists and then delete it
* @param {string} dir Directory to delete
*/
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rm(dir, {

10892
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "uptime-kuma",
"version": "1.18.2",
"version": "1.20.0-beta.0",
"license": "MIT",
"repository": {
"type": "git",
@ -23,21 +23,22 @@
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
"test": "node test/prepare-test-server.js && npm run jest-backend",
"test-with-build": "npm run build && npm test",
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend",
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
"jest-backend": "cross-env TEST_BACKEND=1 jest --config=./config/jest-backend.config.js",
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build && npm run build-docker-debian",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly --build-arg . --push",
"build-docker-nightly-local": "docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.18.2 && npm ci --production && npm run download-dist",
"setup": "git checkout 1.19.6 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
@ -46,64 +47,75 @@
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix",
"ncu-patch": "npm-check-updates -u -t patch",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d",
"build-dist-and-restart": "npm run build && npm run start-server-dev",
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
"cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e",
"cy:run": "npx cypress run --browser chrome --headless"
"cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up"
},
"dependencies": {
"@louislam/sqlite3": "~15.0.6",
"@grpc/grpc-js": "~1.7.3",
"@louislam/ping": "~0.4.2-mod.1",
"@louislam/sqlite3": "15.1.2",
"args-parser": "~1.3.0",
"axios": "~0.27.0",
"axios-ntlm": "^1.3.0",
"badge-maker": "^3.3.1",
"axios-ntlm": "1.3.0",
"badge-maker": "~3.3.1",
"bcryptjs": "~2.4.3",
"bree": "~7.1.5",
"cacheable-lookup": "~6.0.4",
"chardet": "^1.3.0",
"chardet": "~1.4.0",
"check-password-strength": "^2.0.5",
"cheerio": "^1.0.0-rc.10",
"chroma-js": "^2.1.2",
"cheerio": "~1.0.0-rc.12",
"chroma-js": "~2.4.2",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"compression": "^1.7.4",
"dayjs": "^1.11.0",
"compression": "~1.7.4",
"dayjs": "~1.11.5",
"dotenv": "~16.0.3",
"express": "~4.17.3",
"express-basic-auth": "~1.2.1",
"express-static-gzip": "^2.1.7",
"express-static-gzip": "~2.1.7",
"form-data": "~4.0.0",
"gamedig": "^4.0.5",
"http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"iconv-lite": "^0.6.3",
"jsonwebtoken": "~8.5.1",
"jwt-decode": "^3.1.2",
"limiter": "^2.1.0",
"mqtt": "^4.2.8",
"mssql": "^8.1.0",
"http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1",
"iconv-lite": "~0.6.3",
"jsesc": "~3.0.2",
"jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2",
"limiter": "~2.1.0",
"mongodb": "~4.13.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~2.3.3",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "^1.0.0",
"node-radius-client": "~1.0.0",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"password-hash": "~1.2.2",
"pg": "^8.7.3",
"pg-connection-string": "^2.5.0",
"pg": "~8.8.0",
"pg-connection-string": "~2.5.0",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1",
"redbean-node": "0.1.4",
"socket.io": "~4.4.1",
"socket.io-client": "~4.4.1",
"protobufjs": "~7.1.1",
"redbean-node": "~0.2.0",
"redis": "~4.5.1",
"socket.io": "~4.5.3",
"socket.io-client": "~4.5.3",
"socks-proxy-agent": "6.1.1",
"tar": "^6.1.11",
"tar": "~6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2"
},
@ -120,33 +132,34 @@
"@vitejs/plugin-legacy": "~2.1.0",
"@vitejs/plugin-vue": "~3.1.0",
"@vue/compiler-sfc": "~3.2.36",
"@vuepic/vue-datepicker": "~3.4.8",
"aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3",
"chart.js": "~3.6.2",
"chartjs-adapter-dayjs": "~1.0.0",
"concurrently": "^7.1.0",
"core-js": "~3.18.3",
"core-js": "~3.26.1",
"cross-env": "~7.0.3",
"cypress": "^10.1.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"dompurify": "~2.4.3",
"eslint": "~8.14.0",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "^0.3.10",
"favico.js": "~0.3.10",
"marked": "~4.2.5",
"jest": "~27.2.5",
"jest-puppeteer": "~6.0.3",
"postcss-html": "~1.5.0",
"postcss-rtlcss": "~3.7.2",
"postcss-scss": "~4.0.4",
"prismjs": "^1.27.0",
"puppeteer": "~13.1.3",
"prismjs": "~1.29.0",
"qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1",
"stylelint": "~14.7.1",
"stylelint-config-standard": "~25.0.0",
"terser": "^5.15.0",
"terser": "~5.15.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
@ -156,10 +169,10 @@
"vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9",
"vue-i18n": "~9.2.2",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-prism-editor": "^2.0.0-alpha.2",
"vue-prism-editor": "~2.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5",

View File

@ -63,6 +63,12 @@ function myAuthorizer(username, password, callback) {
});
}
/**
* Use basic auth if auth is not disabled
* @param {express.Request} req Express request object
* @param {express.Response} res Express response object
* @param {express.NextFunction} next
*/
exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({
authorizer: myAuthorizer,

View File

@ -1,6 +1,8 @@
const https = require("https");
const http = require("http");
const CacheableLookup = require("cacheable-lookup");
const { Settings } = require("./settings");
const { log } = require("../src/util");
class CacheableDnsHttpAgent {
@ -9,14 +11,36 @@ class CacheableDnsHttpAgent {
static httpAgentList = {};
static httpsAgentList = {};
static enable = false;
/**
* Register cacheable to global agents
* Register/Disable cacheable to global agents
*/
static registerGlobalAgent() {
this.cacheable.install(http.globalAgent);
this.cacheable.install(https.globalAgent);
static async update() {
log.debug("CacheableDnsHttpAgent", "update");
let isEnable = await Settings.get("dnsCache");
if (isEnable !== this.enable) {
log.debug("CacheableDnsHttpAgent", "value changed");
if (isEnable) {
log.debug("CacheableDnsHttpAgent", "enable");
this.cacheable.install(http.globalAgent);
this.cacheable.install(https.globalAgent);
} else {
log.debug("CacheableDnsHttpAgent", "disable");
this.cacheable.uninstall(http.globalAgent);
this.cacheable.uninstall(https.globalAgent);
}
}
this.enable = isEnable;
}
/**
* Attach cacheable to HTTP agent
* @param {http.Agent} agent Agent to install
*/
static install(agent) {
this.cacheable.install(agent);
}
@ -26,6 +50,10 @@ class CacheableDnsHttpAgent {
* @return {https.Agent}
*/
static getHttpsAgent(agentOptions) {
if (!this.enable) {
return new https.Agent(agentOptions);
}
let key = JSON.stringify(agentOptions);
if (!(key in this.httpsAgentList)) {
this.httpsAgentList[key] = new https.Agent(agentOptions);
@ -39,6 +67,10 @@ class CacheableDnsHttpAgent {
* @return {https.Agents}
*/
static getHttpAgent(agentOptions) {
if (!this.enable) {
return new http.Agent(agentOptions);
}
let key = JSON.stringify(agentOptions);
if (!(key in this.httpAgentList)) {
this.httpAgentList[key] = new http.Agent(agentOptions);

View File

@ -25,7 +25,7 @@ exports.startInterval = () => {
let checkBeta = await setting("checkBeta");
if (checkBeta && res.data.beta) {
if (compareVersions.compare(res.data.beta, res.data.beta, ">")) {
if (compareVersions.compare(res.data.beta, res.data.slow, ">")) {
exports.latestVersion = res.data.beta;
return;
}

View File

@ -4,7 +4,8 @@
const { TimeLogger } = require("../src/util");
const { R } = require("redbean-node");
const { UptimeKumaServer } = require("./uptime-kuma-server");
const io = UptimeKumaServer.getInstance().io;
const server = UptimeKumaServer.getInstance();
const io = server.io;
const { setting } = require("./util-server");
const checkVersion = require("./check-version");
@ -121,7 +122,9 @@ async function sendInfo(socket) {
socket.emit("info", {
version: checkVersion.version,
latestVersion: checkVersion.latestVersion,
primaryBaseURL: await setting("primaryBaseURL")
primaryBaseURL: await setting("primaryBaseURL"),
serverTimezone: await server.getTimezone(),
serverTimezoneOffset: server.getTimezoneOffset(),
});
}

View File

@ -4,13 +4,21 @@ const demoMode = args["demo"] || false;
const badgeConstants = {
naColor: "#999",
defaultUpColor: "#66c20a",
defaultWarnColor: "#eed202",
defaultDownColor: "#c2290a",
defaultPendingColor: "#f8a306",
defaultMaintenanceColor: "#1747f5",
defaultPingColor: "blue", // as defined by badge-maker / shields.io
defaultStyle: "flat",
defaultPingValueSuffix: "ms",
defaultPingLabelSuffix: "h",
defaultUptimeValueSuffix: "%",
defaultUptimeLabelSuffix: "h",
defaultCertExpValueSuffix: " days",
defaultCertExpLabelSuffix: "h",
// Values Come From Default Notification Times
defaultCertExpireWarnDays: "14",
defaultCertExpireDownDays: "7"
};
module.exports = {

View File

@ -4,6 +4,9 @@ const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
const { PluginsManager } = require("./plugins-manager");
const path = require("path");
const { EmbeddedMariaDB } = require("./embedded-mariadb");
/**
* Database & App Data Folder
@ -62,8 +65,12 @@ class Database {
"patch-add-clickable-status-page-link.sql": true,
"patch-add-sqlserver-monitor.sql": true,
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
"patch-grpc-monitor.sql": true,
"patch-add-radius-monitor.sql": true,
"patch-monitor-add-resend-interval.sql": true,
"patch-ping-packet-size.sql": true,
"patch-maintenance-table2.sql": true,
"patch-add-gamedig-monitor.sql": true,
};
/**
@ -81,6 +88,13 @@ class Database {
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
// Plugin feature is working only if the dataDir = "./data";
if (Database.dataDir !== "./data/") {
log.warn("PLUGIN", "Warning: In order to enable plugin feature, you need to use the default data directory: ./data/");
PluginsManager.disable = true;
}
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
@ -107,24 +121,64 @@ class Database {
static async connect(testMode = false, autoloadModels = true, noLog = false) {
const acquireConnectionTimeout = 120 * 1000;
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
let dbConfig;
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
try {
let dbConfigString = fs.readFileSync(path.join(Database.dataDir, "db-config.json")).toString("utf-8");
dbConfig = JSON.parse(dbConfigString);
if (typeof dbConfig !== "object") {
throw new Error("Invalid db-config.json, it must be an object");
}
});
if (typeof dbConfig.type !== "string") {
throw new Error("Invalid db-config.json, type must be a string");
}
} catch (_) {
dbConfig = {
//type: "sqlite",
type: "embedded-mariadb",
};
}
let config = {};
if (dbConfig.type === "sqlite") {
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
config = {
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
};
} else if (dbConfig.type === "embedded-mariadb") {
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
await embeddedMariaDB.start();
log.info("mariadb", "Embedded MariaDB started");
config = {
client: "mysql2",
connection: {
socketPath: embeddedMariaDB.socketPath,
user: "node",
database: "kuma"
}
};
} else {
throw new Error("Unknown Database type: " + dbConfig.type);
}
const knexInstance = knex(config);
R.setup(knexInstance);
@ -150,9 +204,6 @@ class Database {
await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = FULL");
// Avoid error "SQLITE_BUSY: database is locked" by allowing SQLITE to wait up to 5 seconds to do a write
await R.exec("PRAGMA busy_timeout = 5000");
// This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous

157
server/embedded-mariadb.js Normal file
View File

@ -0,0 +1,157 @@
const { log } = require("../src/util");
const childProcess = require("child_process");
const fs = require("fs");
const mysql = require("mysql2");
/**
* It is only used inside the docker container
*/
class EmbeddedMariaDB {
static instance = null;
exec = "mariadbd";
mariadbDataDir = "/app/data/mariadb";
runDir = "/app/data/run/mariadb";
socketPath = this.runDir + "/mysqld.sock";
childProcess = null;
running = false;
started = false;
/**
*
* @returns {EmbeddedMariaDB}
*/
static getInstance() {
if (!EmbeddedMariaDB.instance) {
EmbeddedMariaDB.instance = new EmbeddedMariaDB();
}
return EmbeddedMariaDB.instance;
}
static hasInstance() {
return !!EmbeddedMariaDB.instance;
}
/**
*
*/
start() {
if (this.childProcess) {
log.info("mariadb", "Already started");
return;
}
this.initDB();
this.running = true;
log.info("mariadb", "Starting Embedded MariaDB");
this.childProcess = childProcess.spawn(this.exec, [
"--user=node",
"--datadir=" + this.mariadbDataDir,
`--socket=${this.socketPath}`,
`--pid-file=${this.runDir}/mysqld.pid`,
]);
this.childProcess.on("close", (code) => {
this.running = false;
this.childProcess = null;
this.started = false;
log.info("mariadb", "Stopped Embedded MariaDB: " + code);
if (code !== 0) {
log.info("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
this.start();
}
});
this.childProcess.on("error", (err) => {
if (err.code === "ENOENT") {
log.error("mariadb", `Embedded MariaDB: ${this.exec} is not found`);
} else {
log.error("mariadb", err);
}
});
let handler = (data) => {
log.debug("mariadb", data.toString("utf-8"));
if (data.toString("utf-8").includes("ready for connections")) {
this.initDBAfterStarted();
}
};
this.childProcess.stdout.on("data", handler);
this.childProcess.stderr.on("data", handler);
return new Promise((resolve) => {
let interval = setInterval(() => {
if (this.started) {
clearInterval(interval);
resolve();
} else {
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
}
}, 1000);
});
}
stop() {
if (this.childProcess) {
this.childProcess.kill("SIGINT");
this.childProcess = null;
}
}
initDB() {
if (!fs.existsSync(this.mariadbDataDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.mariadbDataDir} is not found, create one now.`);
fs.mkdirSync(this.mariadbDataDir, {
recursive: true,
});
let result = childProcess.spawnSync("mysql_install_db", [
"--user=node",
"--ldata=" + this.mariadbDataDir,
]);
if (result.status !== 0) {
let error = result.stderr.toString("utf-8");
log.error("mariadb", error);
return;
} else {
log.info("mariadb", "Embedded MariaDB: mysql_install_db done:" + result.stdout.toString("utf-8"));
}
}
if (!fs.existsSync(this.runDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.runDir} is not found, create one now.`);
fs.mkdirSync(this.runDir, {
recursive: true,
});
}
}
async initDBAfterStarted() {
const connection = mysql.createConnection({
socketPath: this.socketPath,
user: "node",
});
let result = await connection.execute("CREATE DATABASE IF NOT EXISTS `kuma`");
log.debug("mariadb", "CREATE DATABASE: " + JSON.stringify(result));
log.info("mariadb", "Embedded MariaDB is ready for connections");
this.started = true;
}
}
module.exports = {
EmbeddedMariaDB,
};

24
server/git.js Normal file
View File

@ -0,0 +1,24 @@
const childProcess = require("child_process");
class Git {
static clone(repoURL, cwd, targetDir = ".") {
let result = childProcess.spawnSync("git", [
"clone",
repoURL,
targetDir,
], {
cwd: cwd,
});
if (result.status !== 0) {
throw new Error(result.stderr.toString("utf-8"));
} else {
return result.stdout.toString("utf-8") + result.stderr.toString("utf-8");
}
}
}
module.exports = {
Git,
};

View File

@ -32,6 +32,7 @@ const initBackgroundJobs = function (args) {
return bree;
};
/** Stop all background jobs if running */
const stopBackgroundJobs = function () {
if (bree) {
bree.stop();

View File

@ -25,15 +25,20 @@ const DEFAULT_KEEP_PERIOD = 180;
parsedPeriod = DEFAULT_KEEP_PERIOD;
}
log(`Clearing Data older than ${parsedPeriod} days...`);
if (parsedPeriod < 1) {
log(`Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
} else {
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[ parsedPeriod ]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);
log(`Clearing Data older than ${parsedPeriod} days...`);
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[ parsedPeriod ]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);
}
}
exit();

View File

@ -1,8 +1,3 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model");
/**
@ -10,6 +5,7 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
* 0 = DOWN
* 1 = UP
* 2 = PENDING
* 3 = MAINTENANCE
*/
class Heartbeat extends BeanModel {

240
server/model/maintenance.js Normal file
View File

@ -0,0 +1,240 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { parseTimeObject, parseTimeFromTimeObject, utcToLocal, localToUTC, log } = require("../../src/util");
const { timeObjectToUTC, timeObjectToLocal } = require("../util-server");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
class Maintenance extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
let dateRange = [];
if (this.start_date) {
dateRange.push(utcToLocal(this.start_date));
if (this.end_date) {
dateRange.push(utcToLocal(this.end_date));
}
}
let timeRange = [];
let startTime = timeObjectToLocal(parseTimeObject(this.start_time));
timeRange.push(startTime);
let endTime = timeObjectToLocal(parseTimeObject(this.end_time));
timeRange.push(endTime);
let obj = {
id: this.id,
title: this.title,
description: this.description,
strategy: this.strategy,
intervalDay: this.interval_day,
active: !!this.active,
dateRange: dateRange,
timeRange: timeRange,
weekdays: (this.weekdays) ? JSON.parse(this.weekdays) : [],
daysOfMonth: (this.days_of_month) ? JSON.parse(this.days_of_month) : [],
timeslotList: [],
};
const timeslotList = await this.getTimeslotList();
for (let timeslot of timeslotList) {
obj.timeslotList.push(await timeslot.toPublicJSON());
}
if (!Array.isArray(obj.weekdays)) {
obj.weekdays = [];
}
if (!Array.isArray(obj.daysOfMonth)) {
obj.daysOfMonth = [];
}
// Maintenance Status
if (!obj.active) {
obj.status = "inactive";
} else if (obj.strategy === "manual") {
obj.status = "under-maintenance";
} else if (obj.timeslotList.length > 0) {
let currentTimestamp = dayjs().unix();
for (let timeslot of obj.timeslotList) {
if (dayjs.utc(timeslot.startDate).unix() <= currentTimestamp && dayjs.utc(timeslot.endDate).unix() >= currentTimestamp) {
log.debug("timeslot", "Timeslot ID: " + timeslot.id);
log.debug("timeslot", "currentTimestamp:" + currentTimestamp);
log.debug("timeslot", "timeslot.start_date:" + dayjs.utc(timeslot.startDate).unix());
log.debug("timeslot", "timeslot.end_date:" + dayjs.utc(timeslot.endDate).unix());
obj.status = "under-maintenance";
break;
}
}
if (!obj.status) {
obj.status = "scheduled";
}
} else if (obj.timeslotList.length === 0) {
obj.status = "ended";
} else {
obj.status = "unknown";
}
return obj;
}
/**
* Only get future or current timeslots only
* @returns {Promise<[]>}
*/
async getTimeslotList() {
return R.convertToBeans("maintenance_timeslot", await R.getAll(`
SELECT maintenance_timeslot.*
FROM maintenance_timeslot, maintenance
WHERE maintenance_timeslot.maintenance_id = maintenance.id
AND maintenance.id = ?
AND ${Maintenance.getActiveAndFutureMaintenanceSQLCondition()}
`, [
this.id
]));
}
/**
* Return an object that ready to parse to JSON
* @param {string} timezone If not specified, the timeRange will be in UTC
* @returns {Object}
*/
async toJSON(timezone = null) {
return this.toPublicJSON(timezone);
}
/**
* Get a list of weekdays that the maintenance is active for
* Monday=1, Tuesday=2 etc.
* @returns {number[]} Array of active weekdays
*/
getDayOfWeekList() {
log.debug("timeslot", "List: " + this.weekdays);
return JSON.parse(this.weekdays).sort(function (a, b) {
return a - b;
});
}
/**
* Get a list of days in month that maintenance is active for
* @returns {number[]} Array of active days in month
*/
getDayOfMonthList() {
return JSON.parse(this.days_of_month).sort(function (a, b) {
return a - b;
});
}
/**
* Get the start date and time for maintenance
* @returns {dayjs.Dayjs} Start date and time
*/
getStartDateTime() {
let startOfTheDay = dayjs.utc(this.start_date).format("HH:mm");
log.debug("timeslot", "startOfTheDay: " + startOfTheDay);
// Start Time
let startTimeSecond = dayjs.utc(this.start_time, "HH:mm").diff(dayjs.utc(startOfTheDay, "HH:mm"), "second");
log.debug("timeslot", "startTime: " + startTimeSecond);
// Bake StartDate + StartTime = Start DateTime
return dayjs.utc(this.start_date).add(startTimeSecond, "second");
}
/**
* Get the duraction of maintenance in seconds
* @returns {number} Duration of maintenance
*/
getDuration() {
let duration = dayjs.utc(this.end_time, "HH:mm").diff(dayjs.utc(this.start_time, "HH:mm"), "second");
// Add 24hours if it is across day
if (duration < 0) {
duration += 24 * 3600;
}
return duration;
}
/**
* Convert data from socket to bean
* @param {Bean} bean Bean to fill in
* @param {Object} obj Data to fill bean with
* @returns {Bean} Filled bean
*/
static jsonToBean(bean, obj) {
if (obj.id) {
bean.id = obj.id;
}
// Apply timezone offset to timeRange, as it cannot apply automatically.
if (obj.timeRange[0]) {
timeObjectToUTC(obj.timeRange[0]);
if (obj.timeRange[1]) {
timeObjectToUTC(obj.timeRange[1]);
}
}
bean.title = obj.title;
bean.description = obj.description;
bean.strategy = obj.strategy;
bean.interval_day = obj.intervalDay;
bean.active = obj.active;
if (obj.dateRange[0]) {
bean.start_date = localToUTC(obj.dateRange[0]);
if (obj.dateRange[1]) {
bean.end_date = localToUTC(obj.dateRange[1]);
}
}
bean.start_time = parseTimeFromTimeObject(obj.timeRange[0]);
bean.end_time = parseTimeFromTimeObject(obj.timeRange[1]);
bean.weekdays = JSON.stringify(obj.weekdays);
bean.days_of_month = JSON.stringify(obj.daysOfMonth);
return bean;
}
/**
* SQL conditions for active maintenance
* @returns {string}
*/
static getActiveMaintenanceSQLCondition() {
return `
(
(maintenance_timeslot.start_date <= DATETIME('now')
AND maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1)
)
`;
}
/**
* SQL conditions for active and future maintenance
* @returns {string}
*/
static getActiveAndFutureMaintenanceSQLCondition() {
return `
(
((maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1))
)
`;
}
}
module.exports = Maintenance;

View File

@ -0,0 +1,198 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
const { log, utcToLocal, SQL_DATETIME_FORMAT_WITHOUT_SECOND, localToUTC } = require("../../src/util");
const { UptimeKumaServer } = require("../uptime-kuma-server");
class MaintenanceTimeslot extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
const serverTimezoneOffset = UptimeKumaServer.getInstance().getTimezoneOffset();
const obj = {
id: this.id,
startDate: this.start_date,
endDate: this.end_date,
startDateServerTimezone: utcToLocal(this.start_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
endDateServerTimezone: utcToLocal(this.end_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
serverTimezoneOffset,
};
return obj;
}
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
async toJSON() {
return await this.toPublicJSON();
}
/**
* @param {Maintenance} maintenance
* @param {dayjs} minDate (For recurring type only) Generate a next timeslot from this date.
* @param {boolean} removeExist Remove existing timeslot before create
* @returns {Promise<MaintenanceTimeslot>}
*/
static async generateTimeslot(maintenance, minDate = null, removeExist = false) {
if (removeExist) {
await R.exec("DELETE FROM maintenance_timeslot WHERE maintenance_id = ? ", [
maintenance.id
]);
}
if (maintenance.strategy === "manual") {
log.debug("maintenance", "No need to generate timeslot for manual type");
} else if (maintenance.strategy === "single") {
let bean = R.dispense("maintenance_timeslot");
bean.maintenance_id = maintenance.id;
bean.start_date = maintenance.start_date;
bean.end_date = maintenance.end_date;
bean.generated_next = true;
return await R.store(bean);
} else if (maintenance.strategy === "recurring-interval") {
// Prevent dead loop, in case interval_day is not set
if (!maintenance.interval_day || maintenance.interval_day <= 0) {
maintenance.interval_day = 1;
}
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
return startDateTime.add(maintenance.interval_day, "day");
}, () => {
return true;
});
} else if (maintenance.strategy === "recurring-weekday") {
let dayOfWeekList = maintenance.getDayOfWeekList();
log.debug("timeslot", dayOfWeekList);
if (dayOfWeekList.length <= 0) {
log.debug("timeslot", "No weekdays selected?");
return null;
}
const isValid = (startDateTime) => {
log.debug("timeslot", "nextDateTime: " + startDateTime);
let day = startDateTime.local().day();
log.debug("timeslot", "nextDateTime.day(): " + day);
return dayOfWeekList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else if (maintenance.strategy === "recurring-day-of-month") {
let dayOfMonthList = maintenance.getDayOfMonthList();
if (dayOfMonthList.length <= 0) {
log.debug("timeslot", "No day selected?");
return null;
}
const isValid = (startDateTime) => {
let day = parseInt(startDateTime.local().format("D"));
log.debug("timeslot", "day: " + day);
// Check 1-31
if (dayOfMonthList.includes(day)) {
return startDateTime;
}
// Check "lastDay1","lastDay2"...
let daysInMonth = startDateTime.daysInMonth();
let lastDayList = [];
// Small first, e.g. 28 > 29 > 30 > 31
for (let i = 4; i >= 1; i--) {
if (dayOfMonthList.includes("lastDay" + i)) {
lastDayList.push(daysInMonth - i + 1);
}
}
log.debug("timeslot", lastDayList);
return lastDayList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else {
throw new Error("Unknown maintenance strategy");
}
}
/**
* Generate a next timeslot for all recurring types
* @param maintenance
* @param minDate
* @param {function} nextDayCallback The logic how to get the next possible day
* @param {function} isValidCallback Check the day whether is matched the current strategy
* @returns {Promise<null|MaintenanceTimeslot>}
*/
static async handleRecurringType(maintenance, minDate, nextDayCallback, isValidCallback) {
let bean = R.dispense("maintenance_timeslot");
let duration = maintenance.getDuration();
let startDateTime = maintenance.getStartDateTime();
let endDateTime;
// Keep generating from the first possible date, until it is ok
while (true) {
log.debug("timeslot", "startDateTime: " + startDateTime.format());
// Handling out of effective date range
if (startDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
log.debug("timeslot", "Out of effective date range");
return null;
}
endDateTime = startDateTime.add(duration, "second");
// If endDateTime is out of effective date range, use the end datetime from effective date range
if (endDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
endDateTime = dayjs.utc(maintenance.end_date);
}
// If minDate is set, the endDateTime must be bigger than it.
// And the endDateTime must be bigger current time
// Is valid under current recurring strategy
if (
(!minDate || endDateTime.diff(minDate) > 0) &&
endDateTime.diff(dayjs()) > 0 &&
isValidCallback(startDateTime)
) {
break;
}
startDateTime = nextDayCallback(startDateTime);
}
bean.maintenance_id = maintenance.id;
bean.start_date = localToUTC(startDateTime);
bean.end_date = localToUTC(endDateTime);
bean.generated_next = false;
return await R.store(bean);
}
}
module.exports = MaintenanceTimeslot;

View File

@ -1,13 +1,11 @@
const https = require("https");
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { log, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mqttAsync, setSetting, httpNtlm, radius } = require("../util-server");
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
redisPingAsync, mongodbPing,
} = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification");
@ -18,12 +16,16 @@ const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
const { DockerHost } = require("../docker");
const Maintenance = require("./maintenance");
const { UptimeCacheList } = require("../uptime-cache-list");
const Gamedig = require("gamedig");
/**
* status:
* 0 = DOWN
* 1 = UP
* 2 = PENDING
* 3 = MAINTENANCE
*/
class Monitor extends BeanModel {
@ -85,31 +87,30 @@ class Monitor extends BeanModel {
expiryNotification: this.isEnabledExpiryNotification(),
ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(),
packetSize: this.packetSize,
maxredirects: this.maxredirects,
accepted_statuscodes: this.getAcceptedStatuscodes(),
dns_resolve_type: this.dns_resolve_type,
dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
docker_container: this.docker_container,
docker_host: this.docker_host,
proxyId: this.proxy_id,
notificationIDList,
tags: tags,
mqttUsername: this.mqttUsername,
mqttPassword: this.mqttPassword,
maintenance: await Monitor.isUnderMaintenance(this.id),
mqttTopic: this.mqttTopic,
mqttSuccessMessage: this.mqttSuccessMessage,
databaseConnectionString: this.databaseConnectionString,
databaseQuery: this.databaseQuery,
authMethod: this.authMethod,
authWorkstation: this.authWorkstation,
authDomain: this.authDomain,
radiusUsername: this.radiusUsername,
radiusPassword: this.radiusPassword,
grpcUrl: this.grpcUrl,
grpcProtobuf: this.grpcProtobuf,
grpcMethod: this.grpcMethod,
grpcServiceName: this.grpcServiceName,
grpcEnableTls: this.getGrpcEnableTls(),
radiusCalledStationId: this.radiusCalledStationId,
radiusCallingStationId: this.radiusCallingStationId,
radiusSecret: this.radiusSecret,
game: this.game,
};
if (includeSensitiveData) {
@ -117,12 +118,23 @@ class Monitor extends BeanModel {
...data,
headers: this.headers,
body: this.body,
grpcBody: this.grpcBody,
grpcMetadata: this.grpcMetadata,
basic_auth_user: this.basic_auth_user,
basic_auth_pass: this.basic_auth_pass,
pushToken: this.pushToken,
databaseConnectionString: this.databaseConnectionString,
radiusUsername: this.radiusUsername,
radiusPassword: this.radiusPassword,
radiusSecret: this.radiusSecret,
mqttUsername: this.mqttUsername,
mqttPassword: this.mqttPassword,
authWorkstation: this.authWorkstation,
authDomain: this.authDomain,
};
}
data.includeSensitiveData = includeSensitiveData;
return data;
}
@ -167,6 +179,14 @@ class Monitor extends BeanModel {
return Boolean(this.upsideDown);
}
/**
* Parse to boolean
* @returns {boolean}
*/
getGrpcEnableTls() {
return Boolean(this.grpcEnableTls);
}
/**
* Get accepted status codes
* @returns {Object}
@ -230,7 +250,10 @@ class Monitor extends BeanModel {
}
try {
if (this.type === "http" || this.type === "keyword") {
if (await Monitor.isUnderMaintenance(this.id)) {
bean.msg = "Monitor under maintenance";
bean.status = MAINTENANCE;
} else if (this.type === "http" || this.type === "keyword") {
// Do not do any queries/high loading things before the "bean.ping"
let startTime = dayjs().valueOf();
@ -249,6 +272,7 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
// Axios Options
const options = {
url: this.url,
method: (this.method || "get").toLowerCase(),
@ -287,20 +311,8 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
log.debug("monitor", `[${this.name}] Axios Request`);
let res;
if (this.auth_method === "ntlm") {
options.httpsAgent.keepAlive = true;
res = await httpNtlm(options, {
username: this.basic_auth_user,
password: this.basic_auth_pass,
domain: this.authDomain,
workstation: this.authWorkstation ? this.authWorkstation : undefined
});
} else {
res = await axios.request(options);
}
// Make Request
let res = await this.makeAxiosRequest(options);
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
@ -364,7 +376,7 @@ class Monitor extends BeanModel {
bean.status = UP;
} else if (this.type === "ping") {
bean.ping = await ping(this.hostname);
bean.ping = await ping(this.hostname, this.packetSize);
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
@ -474,25 +486,44 @@ class Monitor extends BeanModel {
bean.msg = res.data.response.servers[0].name;
try {
bean.ping = await ping(this.hostname);
bean.ping = await ping(this.hostname, this.packetSize);
} catch (_) { }
} else {
throw new Error("Server not found on Steam");
}
} else if (this.type === "gamedig") {
try {
const state = await Gamedig.query({
type: this.game,
host: this.hostname,
port: this.port,
givenPortOnly: true,
});
bean.msg = state.name;
bean.status = UP;
bean.ping = state.ping;
} catch (e) {
throw new Error(e.message);
}
} else if (this.type === "docker") {
log.debug(`[${this.name}] Prepare Options for Axios`);
log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
const dockerHost = await R.load("docker_host", this.docker_host);
const options = {
url: `/containers/${this.docker_container}/json`,
timeout: this.interval * 1000 * 0.8,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
},
httpsAgent: new https.Agent({
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
rejectUnauthorized: !this.getIgnoreTls(),
}),
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
maxCachedSessions: 0,
}),
};
@ -502,11 +533,13 @@ class Monitor extends BeanModel {
options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
}
log.debug(`[${this.name}] Axios Request`);
log.debug("monitor", `[${this.name}] Axios Request`);
let res = await axios.request(options);
if (res.data.State.Running) {
bean.status = UP;
bean.msg = "";
bean.msg = res.data.State.Status;
} else {
throw Error("Container State is " + res.data.State.Status);
}
} else if (this.type === "mqtt") {
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
@ -524,6 +557,37 @@ class Monitor extends BeanModel {
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "grpc-keyword") {
let startTime = dayjs().valueOf();
const options = {
grpcUrl: this.grpcUrl,
grpcProtobufData: this.grpcProtobuf,
grpcServiceName: this.grpcServiceName,
grpcEnableTls: this.grpcEnableTls,
grpcMethod: this.grpcMethod,
grpcBody: this.grpcBody,
keyword: this.keyword
};
const response = await grpcQuery(options);
bean.ping = dayjs().valueOf() - startTime;
log.debug("monitor:", `gRPC response: ${JSON.stringify(response)}`);
let responseData = response.data;
if (responseData.length > 50) {
responseData = responseData.toString().substring(0, 47) + "...";
}
if (response.code !== 1) {
bean.status = DOWN;
bean.msg = `Error in send gRPC ${response.code} ${response.errorMessage}`;
} else {
if (response.data.toString().includes(this.keyword)) {
bean.status = UP;
bean.msg = `${responseData}, keyword [${this.keyword}] is found`;
} else {
log.debug("monitor:", `GRPC response [${response.data}] + ", but keyword [${this.keyword}] is not in [" + ${response.data} + "]"`);
bean.status = DOWN;
bean.msg = `, but keyword [${this.keyword}] is not in [" + ${responseData} + "]`;
}
}
} else if (this.type === "postgres") {
let startTime = dayjs().valueOf();
@ -532,8 +596,36 @@ class Monitor extends BeanModel {
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "mysql") {
let startTime = dayjs().valueOf();
await mysqlQuery(this.databaseConnectionString, this.databaseQuery);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "mongodb") {
let startTime = dayjs().valueOf();
await mongodbPing(this.databaseConnectionString);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "radius") {
let startTime = dayjs().valueOf();
// Handle monitors that were created before the
// update and as such don't have a value for
// this.port.
let port;
if (this.port == null) {
port = 1812;
} else {
port = this.port;
}
try {
const resp = await radius(
this.hostname,
@ -541,7 +633,8 @@ class Monitor extends BeanModel {
this.radiusPassword,
this.radiusCalledStationId,
this.radiusCallingStationId,
this.radiusSecret
this.radiusSecret,
port
);
if (resp.code) {
bean.msg = resp.code;
@ -556,9 +649,23 @@ class Monitor extends BeanModel {
}
}
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "redis") {
let startTime = dayjs().valueOf();
bean.msg = await redisPingAsync(this.databaseConnectionString);
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type in UptimeKumaServer.monitorTypeList) {
let startTime = dayjs().valueOf();
const monitorType = UptimeKumaServer.monitorTypeList[this.type];
await monitorType.check(this, bean);
if (!bean.ping) {
bean.ping = dayjs().valueOf() - startTime;
}
} else {
bean.msg = "Unknown Monitor Type";
bean.status = PENDING;
throw new Error("Unknown Monitor Type");
}
if (this.isUpsideDown()) {
@ -594,8 +701,12 @@ class Monitor extends BeanModel {
if (isImportant) {
bean.important = true;
log.debug("monitor", `[${this.name}] sendNotification`);
await Monitor.sendNotification(isFirstBeat, this, bean);
if (Monitor.isImportantForNotification(isFirstBeat, previousBeat?.status, bean.status)) {
log.debug("monitor", `[${this.name}] sendNotification`);
await Monitor.sendNotification(isFirstBeat, this, bean);
} else {
log.debug("monitor", `[${this.name}] will not sendNotification because it is (or was) under maintenance`);
}
// Reset down count
bean.downCount = 0;
@ -604,6 +715,8 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] apicache clear`);
apicache.clear();
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
} else {
bean.important = false;
@ -627,11 +740,14 @@ class Monitor extends BeanModel {
beatInterval = this.retryInterval;
}
log.warn("monitor", `Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === MAINTENANCE) {
log.warn("monitor", `Monitor #${this.id} '${this.name}': Under Maintenance | Type: ${this.type}`);
} else {
log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type} | Down Count: ${bean.downCount} | Resend Interval: ${this.resendInterval}`);
}
log.debug("monitor", `[${this.name}] Send to socket`);
UptimeCacheList.clearCache(this.id);
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id);
@ -678,6 +794,47 @@ class Monitor extends BeanModel {
}
}
/**
* Make a request using axios
* @param {Object} options Options for Axios
* @param {boolean} finalCall Should this be the final call i.e
* don't retry on faliure
* @returns {Object} Axios response
*/
async makeAxiosRequest(options, finalCall = false) {
try {
let res;
if (this.auth_method === "ntlm") {
options.httpsAgent.keepAlive = true;
res = await httpNtlm(options, {
username: this.basic_auth_user,
password: this.basic_auth_pass,
domain: this.authDomain,
workstation: this.authWorkstation ? this.authWorkstation : undefined
});
} else {
res = await axios.request(options);
}
return res;
} catch (e) {
// Fix #2253
// Read more: https://stackoverflow.com/questions/1759956/curl-error-18-transfer-closed-with-outstanding-read-data-remaining
if (!finalCall && typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
log.debug("monitor", "makeAxiosRequest with gzip");
options.headers["Accept-Encoding"] = "gzip, deflate";
return this.makeAxiosRequest(options, true);
} else {
if (typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
e.message = "response timeout: incomplete response within a interval";
}
throw e;
}
}
}
/** Stop monitor */
stop() {
clearTimeout(this.heartbeatInterval);
@ -816,7 +973,15 @@ class Monitor extends BeanModel {
* @param {number} duration Hours
* @param {number} monitorID ID of monitor to calculate
*/
static async calcUptime(duration, monitorID) {
static async calcUptime(duration, monitorID, forceNoCache = false) {
if (!forceNoCache) {
let cachedUptime = UptimeCacheList.getUptime(monitorID, duration);
if (cachedUptime != null) {
return cachedUptime;
}
}
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
@ -837,7 +1002,7 @@ class Monitor extends BeanModel {
-- SUM all uptime duration, also trim off the beat out of time window
SUM(
CASE
WHEN (status = 1)
WHEN (status = 1 OR status = 3)
THEN
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
@ -875,6 +1040,9 @@ class Monitor extends BeanModel {
}
}
// Cache
UptimeCacheList.addUptime(monitorID, duration, uptime);
return uptime;
}
@ -908,11 +1076,49 @@ class Monitor extends BeanModel {
// DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important
// * DOWN -> UP = important
let isImportant = isFirstBeat ||
// MAINTENANCE -> MAINTENANCE = not important
// * MAINTENANCE -> UP = important
// * MAINTENANCE -> DOWN = important
// * DOWN -> MAINTENANCE = important
// * UP -> MAINTENANCE = important
return isFirstBeat ||
(previousBeatStatus === DOWN && currentBeatStatus === MAINTENANCE) ||
(previousBeatStatus === UP && currentBeatStatus === MAINTENANCE) ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === DOWN) ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === UP) ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN);
}
/**
* Is this beat important for notifications?
* @param {boolean} isFirstBeat Is this the first beat of this monitor?
* @param {const} previousBeatStatus Status of the previous beat
* @param {const} currentBeatStatus Status of the current beat
* @returns {boolean} True if is an important beat else false
*/
static isImportantForNotification(isFirstBeat, previousBeatStatus, currentBeatStatus) {
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
// * UP -> DOWN = important
// UP -> UP = not important
// PENDING -> PENDING = not important
// * PENDING -> DOWN = important
// PENDING -> UP = not important
// DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important
// * DOWN -> UP = important
// MAINTENANCE -> MAINTENANCE = not important
// MAINTENANCE -> UP = not important
// * MAINTENANCE -> DOWN = important
// DOWN -> MAINTENANCE = not important
// UP -> MAINTENANCE = not important
return isFirstBeat ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === DOWN) ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN);
return isImportant;
}
/**
@ -936,7 +1142,13 @@ class Monitor extends BeanModel {
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), bean.toJSON());
// Prevent if the msg is undefined, notifications such as Discord cannot send out.
const heartbeatJSON = bean.toJSON();
if (!heartbeatJSON["msg"]) {
heartbeatJSON["msg"] = "N/A";
}
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), heartbeatJSON);
} catch (e) {
log.error("monitor", "Cannot send notification to " + notification.name);
log.error("monitor", e);
@ -1043,12 +1255,42 @@ class Monitor extends BeanModel {
*/
static async getPreviousHeartbeat(monitorID) {
return await R.getRow(`
SELECT status, time FROM heartbeat
SELECT ping, status, time FROM heartbeat
WHERE id = (select MAX(id) from heartbeat where monitor_id = ?)
`, [
monitorID
]);
}
/**
* Check if monitor is under maintenance
* @param {number} monitorID ID of monitor to check
* @returns {Promise<boolean>}
*/
static async isUnderMaintenance(monitorID) {
let activeCondition = Maintenance.getActiveMaintenanceSQLCondition();
const maintenance = await R.getRow(`
SELECT COUNT(*) AS count
FROM monitor_maintenance mm
JOIN maintenance
ON mm.maintenance_id = maintenance.id
AND mm.monitor_id = ?
LEFT JOIN maintenance_timeslot
ON maintenance_timeslot.maintenance_id = maintenance.id
WHERE ${activeCondition}
LIMIT 1`, [ monitorID ]);
return maintenance.count !== 0;
}
/** Make sure monitor interval is between bounds */
validate() {
if (this.interval > MAX_INTERVAL_SECOND) {
throw new Error(`Interval cannot be more than ${MAX_INTERVAL_SECOND} seconds`);
}
if (this.interval < MIN_INTERVAL_SECOND) {
throw new Error(`Interval cannot be less than ${MIN_INTERVAL_SECOND} seconds`);
}
}
}
module.exports = Monitor;

View File

@ -2,6 +2,8 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const cheerio = require("cheerio");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const jsesc = require("jsesc");
const Maintenance = require("./maintenance");
class StatusPage extends BeanModel {
@ -36,7 +38,7 @@ class StatusPage extends BeanModel {
*/
static async renderHTML(indexHTML, statusPage) {
const $ = cheerio.load(indexHTML);
const description155 = statusPage.description?.substring(0, 155);
const description155 = statusPage.description?.substring(0, 155) ?? "";
$("title").text(statusPage.title);
$("meta[name=description]").attr("content", description155);
@ -56,13 +58,19 @@ class StatusPage extends BeanModel {
head.append(`<meta property="og:description" content="${description155}" />`);
// Preload data
const json = JSON.stringify(await StatusPage.getStatusPageData(statusPage));
head.append(`
<script>
window.preloadData = ${json}
// Add jsesc, fix https://github.com/louislam/uptime-kuma/issues/2186
const escapedJSONObject = jsesc(await StatusPage.getStatusPageData(statusPage), {
"isScriptContext": true
});
const script = $(`
<script id="preload-data" data-json="{}">
window.preloadData = ${escapedJSONObject};
</script>
`);
head.append(script);
// manifest.json
$("link[rel=manifest]").attr("href", `/api/status-page/${statusPage.slug}/manifest.json`);
@ -83,6 +91,8 @@ class StatusPage extends BeanModel {
incident = incident.toPublicJSON();
}
let maintenanceList = await StatusPage.getMaintenanceList(statusPage.id);
// Public Group List
const publicGroupList = [];
const showTags = !!statusPage.show_tags;
@ -100,7 +110,8 @@ class StatusPage extends BeanModel {
return {
config: await statusPage.toPublicJSON(),
incident,
publicGroupList
publicGroupList,
maintenanceList,
};
}
@ -259,6 +270,38 @@ class StatusPage extends BeanModel {
}
}
/**
* Get list of maintenances
* @param {number} statusPageId ID of status page to get maintenance for
* @returns {Object} Object representing maintenances sanitized for public
*/
static async getMaintenanceList(statusPageId) {
try {
const publicMaintenanceList = [];
let activeCondition = Maintenance.getActiveMaintenanceSQLCondition();
let maintenanceBeanList = R.convertToBeans("maintenance", await R.getAll(`
SELECT DISTINCT maintenance.*
FROM maintenance
JOIN maintenance_status_page
ON maintenance_status_page.maintenance_id = maintenance.id
AND maintenance_status_page.status_page_id = ?
LEFT JOIN maintenance_timeslot
ON maintenance_timeslot.maintenance_id = maintenance.id
WHERE ${activeCondition}
ORDER BY maintenance.end_date
`, [ statusPageId ]));
for (const bean of maintenanceBeanList) {
publicMaintenanceList.push(await bean.toPublicJSON());
}
return publicMaintenanceList;
} catch (error) {
return [];
}
}
}
module.exports = StatusPage;

View File

@ -0,0 +1,20 @@
import { PluginFunc, ConfigType } from 'dayjs'
declare const plugin: PluginFunc
export = plugin
declare module 'dayjs' {
interface Dayjs {
tz(timezone?: string, keepLocalTime?: boolean): Dayjs
offsetName(type?: 'short' | 'long'): string | undefined
}
interface DayjsTimezone {
(date: ConfigType, timezone?: string): Dayjs
(date: ConfigType, format: string, timezone?: string): Dayjs
guess(): string
setDefault(timezone?: string): void
}
const tz: DayjsTimezone
}

View File

@ -0,0 +1,115 @@
/**
* Copy from node_modules/dayjs/plugin/timezone.js
* Try to fix https://github.com/louislam/uptime-kuma/issues/2318
* Source: https://github.com/iamkun/dayjs/tree/dev/src/plugin/utc
* License: MIT
*/
!function (t, e) {
// eslint-disable-next-line no-undef
typeof exports == "object" && typeof module != "undefined" ? module.exports = e() : typeof define == "function" && define.amd ? define(e) : (t = typeof globalThis != "undefined" ? globalThis : t || self).dayjs_plugin_timezone = e();
}(this, (function () {
"use strict";
let t = {
year: 0,
month: 1,
day: 2,
hour: 3,
minute: 4,
second: 5
};
let e = {};
return function (n, i, o) {
let r;
let a = function (t, n, i) {
void 0 === i && (i = {});
let o = new Date(t);
let r = function (t, n) {
void 0 === n && (n = {});
let i = n.timeZoneName || "short";
let o = t + "|" + i;
let r = e[o];
return r || (r = new Intl.DateTimeFormat("en-US", {
hour12: !1,
timeZone: t,
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZoneName: i
}), e[o] = r), r;
}(n, i);
return r.formatToParts(o);
};
let u = function (e, n) {
let i = a(e, n);
let r = [];
let u = 0;
for (; u < i.length; u += 1) {
let f = i[u];
let s = f.type;
let m = f.value;
let c = t[s];
c >= 0 && (r[c] = parseInt(m, 10));
}
let d = r[3];
let l = d === 24 ? 0 : d;
let v = r[0] + "-" + r[1] + "-" + r[2] + " " + l + ":" + r[4] + ":" + r[5] + ":000";
let h = +e;
return (o.utc(v).valueOf() - (h -= h % 1e3)) / 6e4;
};
let f = i.prototype;
f.tz = function (t, e) {
void 0 === t && (t = r);
let n = this.utcOffset();
let i = this.toDate();
let a = i.toLocaleString("en-US", { timeZone: t }).replace("\u202f", " ");
let u = Math.round((i - new Date(a)) / 1e3 / 60);
let f = o(a).$set("millisecond", this.$ms).utcOffset(15 * -Math.round(i.getTimezoneOffset() / 15) - u, !0);
if (e) {
let s = f.utcOffset();
f = f.add(n - s, "minute");
}
return f.$x.$timezone = t, f;
}, f.offsetName = function (t) {
let e = this.$x.$timezone || o.tz.guess();
let n = a(this.valueOf(), e, { timeZoneName: t }).find((function (t) {
return t.type.toLowerCase() === "timezonename";
}));
return n && n.value;
};
let s = f.startOf;
f.startOf = function (t, e) {
if (!this.$x || !this.$x.$timezone) {
return s.call(this, t, e);
}
let n = o(this.format("YYYY-MM-DD HH:mm:ss:SSS"));
return s.call(n, t, e).tz(this.$x.$timezone, !0);
}, o.tz = function (t, e, n) {
let i = n && e;
let a = n || e || r;
let f = u(+o(), a);
if (typeof t != "string") {
return o(t).tz(a);
}
let s = function (t, e, n) {
let i = t - 60 * e * 1e3;
let o = u(i, n);
if (e === o) {
return [ i, e ];
}
let r = u(i -= 60 * (o - e) * 1e3, n);
return o === r ? [ i, o ] : [ t - 60 * Math.min(o, r) * 1e3, Math.max(o, r) ];
}(o.utc(t, i).valueOf(), f, a);
let m = s[0];
let c = s[1];
let d = o(m).utcOffset(c);
return d.$x.$timezone = a, d;
}, o.tz.guess = function () {
return Intl.DateTimeFormat().resolvedOptions().timeZone;
}, o.tz.setDefault = function (t) {
r = t;
};
};
}));

View File

@ -0,0 +1,19 @@
class MonitorType {
name = undefined;
/**
*
* @param {Monitor} monitor
* @param {Heartbeat} heartbeat
* @returns {Promise<void>}
*/
async check(monitor, heartbeat) {
throw new Error("You need to override check()");
}
}
module.exports = {
MonitorType,
};

View File

@ -8,7 +8,6 @@ class ClickSendSMS extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
console.log({ notification });
let config = {
headers: {
"Content-Type": "application/json",

View File

@ -64,7 +64,7 @@ class Discord extends NotificationProvider {
},
{
name: "Error",
value: heartbeatJSON["msg"],
value: heartbeatJSON["msg"] == null ? "N/A" : heartbeatJSON["msg"],
},
],
}],
@ -91,7 +91,7 @@ class Discord extends NotificationProvider {
},
{
name: monitorJSON["type"] === "push" ? "Service Type" : "Service URL",
value: monitorJSON["type"] === "push" ? "Heartbeat" : address.startsWith("http") ? "[Visit Service](" + address + ")" : address,
value: monitorJSON["type"] === "push" ? "Heartbeat" : address,
},
{
name: "Time (UTC)",

View File

@ -0,0 +1,24 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class FreeMobile extends NotificationProvider {
name = "FreeMobile";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
await axios.post(`https://smsapi.free-mobile.fr/sendmsg?msg=${encodeURIComponent(msg.replace("🔴", "⛔️"))}`, {
"user": notification.freemobileUser,
"pass": notification.freemobilePass,
});
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = FreeMobile;

View File

@ -0,0 +1,31 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class Kook extends NotificationProvider {
name = "Kook";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let url = "https://www.kookapp.cn/api/v3/message/create";
let data = {
target_id: notification.kookGuildID,
content: msg,
};
let config = {
headers: {
"Authorization": "Bot " + notification.kookBotToken,
"Content-Type": "application/json",
},
};
try {
await axios.post(url, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Kook;

View File

@ -9,7 +9,7 @@ class Ntfy extends NotificationProvider {
let okMsg = "Sent Successfully.";
try {
let headers = {};
if (notification.ntfyusername.length > 0) {
if (notification.ntfyusername) {
headers = {
"Authorization": "Basic " + Buffer.from(notification.ntfyusername + ":" + notification.ntfypassword).toString("base64"),
};
@ -20,6 +20,11 @@ class Ntfy extends NotificationProvider {
"priority": notification.ntfyPriority || 4,
"title": "Uptime-Kuma",
};
if (notification.ntfyIcon) {
data.icon = notification.ntfyIcon;
}
await axios.post(`${notification.ntfyserverurl}`, data, { headers: headers });
return okMsg;

View File

@ -8,6 +8,14 @@ class PromoSMS extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
if (notification.promosmsAllowLongSMS === undefined) {
notification.promosmsAllowLongSMS = false;
}
//TODO: Add option for enabling special characters. It will decrese message max length from 160 to 70 chars.
//Lets remove non ascii char
let cleanMsg = msg.replace(/[^\x00-\x7F]/g, "");
try {
let config = {
headers: {
@ -18,8 +26,9 @@ class PromoSMS extends NotificationProvider {
};
let data = {
"recipients": [ notification.promosmsPhoneNumber ],
//Lets remove non ascii char
"text": msg.replace(/[^\x00-\x7F]/g, ""),
//Trim message to maximum length of 1 SMS or 4 if we allowed long messages
"text": notification.promosmsAllowLongSMS ? cleanMsg.substring(0, 639) : cleanMsg.substring(0, 159),
"long-sms": notification.promosmsAllowLongSMS,
"type": Number(notification.promosmsSMSType),
"sender": notification.promosmsSenderName
};

View File

@ -19,26 +19,26 @@ class Pushbullet extends NotificationProvider {
}
};
if (heartbeatJSON == null) {
let testdata = {
let data = {
"type": "note",
"title": "Uptime Kuma Alert",
"body": "Testing Successful.",
"body": msg,
};
await axios.post(pushbulletUrl, testdata, config);
await axios.post(pushbulletUrl, data, config);
} else if (heartbeatJSON["status"] === DOWN) {
let downdata = {
let downData = {
"type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
};
await axios.post(pushbulletUrl, downdata, config);
await axios.post(pushbulletUrl, downData, config);
} else if (heartbeatJSON["status"] === UP) {
let updata = {
let upData = {
"type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
};
await axios.post(pushbulletUrl, updata, config);
await axios.post(pushbulletUrl, upData, config);
}
return okMsg;
} catch (error) {

View File

@ -10,7 +10,7 @@ class Pushover extends NotificationProvider {
let pushoverlink = "https://api.pushover.net/1/messages.json";
let data = {
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg,
"message": msg,
"user": notification.pushoveruserkey,
"token": notification.pushoverapptoken,
"sound": notification.pushoversounds,

View File

@ -21,6 +21,12 @@ class ServerChan extends NotificationProvider {
}
}
/**
* Get the formatted title for message
* @param {?Object} monitorJSON Monitor details (For Up/Down only)
* @param {?Object} heartbeatJSON Heartbeat details (For Up/Down only)
* @returns {string} Formatted title
*/
checkStatus(heartbeatJSON, monitorJSON) {
let title = "UptimeKuma Message";
if (heartbeatJSON != null && heartbeatJSON["status"] === UP) {

View File

@ -1,7 +1,7 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { setSettings, setting } = require("../util-server");
const { getMonitorRelativeURL } = require("../../src/util");
const { getMonitorRelativeURL, UP } = require("../../src/util");
class Slack extends NotificationProvider {
@ -46,24 +46,31 @@ class Slack extends NotificationProvider {
"channel": notification.slackchannel,
"username": notification.slackusername,
"icon_emoji": notification.slackiconemo,
"blocks": [{
"type": "header",
"text": {
"type": "plain_text",
"text": "Uptime Kuma Alert",
},
},
{
"type": "section",
"fields": [{
"type": "mrkdwn",
"text": "*Message*\n" + msg,
},
"attachments": [
{
"type": "mrkdwn",
"text": "*Time (UTC)*\n" + time,
}],
}],
"color": (heartbeatJSON["status"] === UP) ? "#2eb886" : "#e01e5a",
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": "Uptime Kuma Alert",
},
},
{
"type": "section",
"fields": [{
"type": "mrkdwn",
"text": "*Message*\n" + msg,
},
{
"type": "mrkdwn",
"text": "*Time (UTC)*\n" + time,
}],
}
],
}
]
};
if (notification.slackbutton) {
@ -74,17 +81,19 @@ class Slack extends NotificationProvider {
// Button
if (baseURL) {
data.blocks.push({
"type": "actions",
"elements": [{
"type": "button",
"text": {
"type": "plain_text",
"text": "Visit Uptime Kuma",
},
"value": "Uptime-Kuma",
"url": baseURL + getMonitorRelativeURL(monitorJSON.id),
}],
data.attachments.forEach(element => {
element.blocks.push({
"type": "actions",
"elements": [{
"type": "button",
"text": {
"type": "plain_text",
"text": "Visit Uptime Kuma",
},
"value": "Uptime-Kuma",
"url": baseURL + getMonitorRelativeURL(monitorJSON.id),
}],
});
});
}

View File

@ -0,0 +1,71 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class SMSEagle extends NotificationProvider {
name = "SMSEagle";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let config = {
headers: {
"Content-Type": "application/json",
}
};
let postData;
let sendMethod;
let recipientType;
let encoding = (notification.smseagleEncoding) ? "1" : "0";
let priority = (notification.smseaglePriority) ? notification.smseaglePriority : "0";
if (notification.smseagleRecipientType === "smseagle-contact") {
recipientType = "contactname";
sendMethod = "sms.send_tocontact";
}
if (notification.smseagleRecipientType === "smseagle-group") {
recipientType = "groupname";
sendMethod = "sms.send_togroup";
}
if (notification.smseagleRecipientType === "smseagle-to") {
recipientType = "to";
sendMethod = "sms.send_sms";
}
let params = {
access_token: notification.smseagleToken,
[recipientType]: notification.smseagleRecipient,
message: msg,
responsetype: "extended",
unicode: encoding,
highpriority: priority
};
postData = {
method: sendMethod,
params: params
};
let resp = await axios.post(notification.smseagleUrl + "/jsonrpc/sms", postData, config);
if ((JSON.stringify(resp.data)).indexOf("message_id") === -1) {
let error = "";
if (resp.data.result && resp.data.result.error_text) {
error = `SMSEagle API returned error: ${JSON.stringify(resp.data.result.error_text)}`;
} else {
error = "SMSEagle API returned an unexpected response";
}
throw new Error(error);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = SMSEagle;

View File

@ -0,0 +1,113 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { UP, DOWN, getMonitorRelativeURL } = require("../../src/util");
const { setting } = require("../util-server");
let successMessage = "Sent Successfully.";
class Splunk extends NotificationProvider {
name = "Splunk";
/**
* @inheritdoc
*/
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
try {
if (heartbeatJSON == null) {
const title = "Uptime Kuma Alert";
const monitor = {
type: "ping",
url: "Uptime Kuma Test Button",
};
return this.postNotification(notification, title, msg, monitor, "trigger");
}
if (heartbeatJSON.status === UP) {
const title = "Uptime Kuma Monitor ✅ Up";
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "recovery");
}
if (heartbeatJSON.status === DOWN) {
const title = "Uptime Kuma Monitor 🔴 Down";
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "trigger");
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
/**
* Check if result is successful, result code should be in range 2xx
* @param {Object} result Axios response object
* @throws {Error} The status code is not in range 2xx
*/
checkResult(result) {
if (result.status == null) {
throw new Error("Splunk notification failed with invalid response!");
}
if (result.status < 200 || result.status >= 300) {
throw new Error("Splunk notification failed with status code " + result.status);
}
}
/**
* Send the message
* @param {BeanModel} notification Message title
* @param {string} title Message title
* @param {string} body Message
* @param {Object} monitorInfo Monitor details (For Up/Down only)
* @param {?string} eventAction Action event for PagerDuty (trigger, acknowledge, resolve)
* @returns {string}
*/
async postNotification(notification, title, body, monitorInfo, eventAction = "trigger") {
let monitorUrl;
if (monitorInfo.type === "port") {
monitorUrl = monitorInfo.hostname;
if (monitorInfo.port) {
monitorUrl += ":" + monitorInfo.port;
}
} else if (monitorInfo.hostname != null) {
monitorUrl = monitorInfo.hostname;
} else {
monitorUrl = monitorInfo.url;
}
if (eventAction === "recovery") {
if (notification.splunkAutoResolve === "0") {
return "No action required";
}
eventAction = notification.splunkAutoResolve;
} else {
eventAction = notification.splunkSeverity;
}
const options = {
method: "POST",
url: notification.splunkRestURL,
headers: { "Content-Type": "application/json" },
data: {
message_type: eventAction,
state_message: `[${title}] [${monitorUrl}] ${body}`,
entity_display_name: "Uptime Kuma Alert: " + monitorInfo.name,
routing_key: notification.pagerdutyIntegrationKey,
entity_id: "Uptime Kuma/" + monitorInfo.id,
}
};
const baseURL = await setting("primaryBaseURL");
if (baseURL && monitorInfo) {
options.client = "Uptime Kuma";
options.client_url = baseURL + getMonitorRelativeURL(monitorInfo.id);
}
let result = await axios.request(options);
this.checkResult(result);
if (result.statusText != null) {
return "Splunk notification succeed: " + result.statusText;
}
return successMessage;
}
}
module.exports = Splunk;

View File

@ -16,20 +16,29 @@ class Webhook extends NotificationProvider {
msg,
};
let finalData;
let config = {};
let config = {
headers: {}
};
if (notification.webhookContentType === "form-data") {
finalData = new FormData();
finalData.append("data", JSON.stringify(data));
config = {
headers: finalData.getHeaders(),
};
config.headers = finalData.getHeaders();
} else {
finalData = data;
}
if (notification.webhookAdditionalHeaders) {
try {
config.headers = {
...config.headers,
...JSON.parse(notification.webhookAdditionalHeaders)
};
} catch (err) {
throw "Additional Headers is not a valid JSON";
}
}
await axios.post(notification.webhookURL, finalData, config);
return okMsg;

View File

@ -0,0 +1,116 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class ZohoCliq extends NotificationProvider {
name = "ZohoCliq";
/**
* Generate the message to send
* @param {const} status The status constant
* @param {string} monitorName Name of monitor
* @returns {string}
*/
_statusMessageFactory = (status, monitorName) => {
if (status === DOWN) {
return `🔴 Application [${monitorName}] went down\n`;
} else if (status === UP) {
return `✅ Application [${monitorName}] is back online\n`;
}
return "Notification\n";
};
/**
* Send the notification
* @param {string} webhookUrl URL to send the request to
* @param {Array} payload Payload generated by _notificationPayloadFactory
*/
_sendNotification = async (webhookUrl, payload) => {
await axios.post(webhookUrl, { text: payload.join("\n") });
};
/**
* Generate payload for notification
* @param {const} status The status of the monitor
* @param {string} monitorMessage Message to send
* @param {string} monitorName Name of monitor affected
* @param {string} monitorUrl URL of monitor affected
* @returns {Array}
*/
_notificationPayloadFactory = ({
status,
monitorMessage,
monitorName,
monitorUrl,
}) => {
const payload = [];
payload.push("### Uptime Kuma\n");
payload.push(this._statusMessageFactory(status, monitorName));
payload.push(`*Description:* ${monitorMessage}`);
if (monitorName) {
payload.push(`*Monitor:* ${monitorName}`);
}
if (monitorUrl && monitorUrl !== "https://") {
payload.push(`*URL:* [${monitorUrl}](${monitorUrl})`);
}
return payload;
};
/**
* Send a general notification
* @param {string} webhookUrl URL to send request to
* @param {string} msg Message to send
* @returns {Promise<void>}
*/
_handleGeneralNotification = (webhookUrl, msg) => {
const payload = this._notificationPayloadFactory({
monitorMessage: msg
});
return this._sendNotification(webhookUrl, payload);
};
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
if (heartbeatJSON == null) {
await this._handleGeneralNotification(notification.webhookUrl, msg);
return okMsg;
}
let url;
switch (monitorJSON["type"]) {
case "http":
case "keywork":
url = monitorJSON["url"];
break;
case "docker":
url = monitorJSON["docker_host"];
break;
default:
url = monitorJSON["hostname"];
break;
}
const payload = this._notificationPayloadFactory({
monitorMessage: heartbeatJSON.msg,
monitorName: monitorJSON.name,
monitorUrl: url,
status: heartbeatJSON.status
});
await this._sendNotification(notification.webhookUrl, payload);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = ZohoCliq;

View File

@ -9,10 +9,12 @@ const ClickSendSMS = require("./notification-providers/clicksendsms");
const DingDing = require("./notification-providers/dingding");
const Discord = require("./notification-providers/discord");
const Feishu = require("./notification-providers/feishu");
const FreeMobile = require("./notification-providers/freemobile");
const GoogleChat = require("./notification-providers/google-chat");
const Gorush = require("./notification-providers/gorush");
const Gotify = require("./notification-providers/gotify");
const HomeAssistant = require("./notification-providers/home-assistant");
const Kook = require("./notification-providers/kook");
const Line = require("./notification-providers/line");
const LineNotify = require("./notification-providers/linenotify");
const LunaSea = require("./notification-providers/lunasea");
@ -31,17 +33,20 @@ const RocketChat = require("./notification-providers/rocket-chat");
const SerwerSMS = require("./notification-providers/serwersms");
const Signal = require("./notification-providers/signal");
const Slack = require("./notification-providers/slack");
const SMSEagle = require("./notification-providers/smseagle");
const SMTP = require("./notification-providers/smtp");
const Squadcast = require("./notification-providers/squadcast");
const Stackfield = require("./notification-providers/stackfield");
const Teams = require("./notification-providers/teams");
const TechulusPush = require("./notification-providers/techulus-push");
const Telegram = require("./notification-providers/telegram");
const Splunk = require("./notification-providers/splunk");
const Webhook = require("./notification-providers/webhook");
const WeCom = require("./notification-providers/wecom");
const GoAlert = require("./notification-providers/goalert");
const SMSManager = require("./notification-providers/smsmanager");
const ServerChan = require("./notification-providers/serverchan");
const ZohoCliq = require("./notification-providers/zoho-cliq");
class Notification {
@ -63,10 +68,12 @@ class Notification {
new DingDing(),
new Discord(),
new Feishu(),
new FreeMobile(),
new GoogleChat(),
new Gorush(),
new Gotify(),
new HomeAssistant(),
new Kook(),
new Line(),
new LineNotify(),
new LunaSea(),
@ -87,15 +94,18 @@ class Notification {
new Signal(),
new SMSManager(),
new Slack(),
new SMSEagle(),
new SMTP(),
new Squadcast(),
new Stackfield(),
new Teams(),
new TechulusPush(),
new Telegram(),
new Splunk(),
new Webhook(),
new WeCom(),
new GoAlert(),
new ZohoCliq()
];
for (let item of list) {

View File

@ -1,199 +0,0 @@
// https://github.com/ben-bradley/ping-lite/blob/master/ping-lite.js
// Fixed on Windows
const net = require("net");
const spawn = require("child_process").spawn;
const events = require("events");
const fs = require("fs");
const util = require("./util-server");
module.exports = Ping;
/**
* Constructor for ping class
* @param {string} host Host to ping
* @param {object} [options] Options for the ping command
* @param {array|string} [options.args] - Arguments to pass to the ping command
*/
function Ping(host, options) {
if (!host) {
throw new Error("You must specify a host to ping!");
}
this._host = host;
this._options = options = (options || {});
events.EventEmitter.call(this);
const timeout = 10;
if (util.WIN) {
this._bin = "c:/windows/system32/ping.exe";
this._args = (options.args) ? options.args : [ "-n", "1", "-w", timeout * 1000, host ];
this._regmatch = /[><=]([0-9.]+?)ms/;
} else if (util.LIN) {
this._bin = "/bin/ping";
const defaultArgs = [ "-n", "-w", timeout, "-c", "1", host ];
if (net.isIPv6(host) || options.ipv6) {
defaultArgs.unshift("-6");
}
this._args = (options.args) ? options.args : defaultArgs;
this._regmatch = /=([0-9.]+?) ms/;
} else if (util.MAC) {
if (net.isIPv6(host) || options.ipv6) {
this._bin = "/sbin/ping6";
} else {
this._bin = "/sbin/ping";
}
this._args = (options.args) ? options.args : [ "-n", "-t", timeout, "-c", "1", host ];
this._regmatch = /=([0-9.]+?) ms/;
} else if (util.BSD) {
this._bin = "/sbin/ping";
const defaultArgs = [ "-n", "-t", timeout, "-c", "1", host ];
if (net.isIPv6(host) || options.ipv6) {
defaultArgs.unshift("-6");
}
this._args = (options.args) ? options.args : defaultArgs;
this._regmatch = /=([0-9.]+?) ms/;
} else {
throw new Error("Could not detect your ping binary.");
}
if (!fs.existsSync(this._bin)) {
throw new Error("Could not detect " + this._bin + " on your system");
}
this._i = 0;
return this;
}
Ping.prototype.__proto__ = events.EventEmitter.prototype;
/**
* Callback for send
* @callback pingCB
* @param {any} err Any error encountered
* @param {number} ms Ping time in ms
*/
/**
* Send a ping
* @param {pingCB} callback Callback to call with results
*/
Ping.prototype.send = function (callback) {
let self = this;
callback = callback || function (err, ms) {
if (err) {
return self.emit("error", err);
}
return self.emit("result", ms);
};
let _ended;
let _exited;
let _errored;
this._ping = spawn(this._bin, this._args); // spawn the binary
this._ping.on("error", function (err) { // handle binary errors
_errored = true;
callback(err);
});
this._ping.stdout.on("data", function (data) { // log stdout
if (util.WIN) {
data = convertOutput(data);
}
this._stdout = (this._stdout || "") + data;
});
this._ping.stdout.on("end", function () {
_ended = true;
if (_exited && !_errored) {
onEnd.call(self._ping);
}
});
this._ping.stderr.on("data", function (data) { // log stderr
if (util.WIN) {
data = convertOutput(data);
}
this._stderr = (this._stderr || "") + data;
});
this._ping.on("exit", function (code) { // handle complete
_exited = true;
if (_ended && !_errored) {
onEnd.call(self._ping);
}
});
/**
* @param {Function} callback
*
* Generated by Trelent
*/
function onEnd() {
let stdout = this.stdout._stdout;
let stderr = this.stderr._stderr;
let ms;
if (stderr) {
return callback(new Error(stderr));
}
if (!stdout) {
return callback(new Error("No stdout detected"));
}
ms = stdout.match(self._regmatch); // parse out the ##ms response
ms = (ms && ms[1]) ? Number(ms[1]) : ms;
callback(null, ms, stdout);
}
};
/**
* Ping every interval
* @param {pingCB} callback Callback to call with results
*/
Ping.prototype.start = function (callback) {
let self = this;
this._i = setInterval(function () {
self.send(callback);
}, (self._options.interval || 5000));
self.send(callback);
};
/** Stop sending pings */
Ping.prototype.stop = function () {
clearInterval(this._i);
};
/**
* Try to convert to UTF-8 for Windows, as the ping's output on Windows is not UTF-8 and could be in other languages
* Thank @pemassi
* https://github.com/louislam/uptime-kuma/issues/570#issuecomment-941984094
* @param {any} data
* @returns {string}
*/
function convertOutput(data) {
if (util.WIN) {
if (data) {
return util.convertToUTF8(data);
}
}
return data;
}

13
server/plugin.js Normal file
View File

@ -0,0 +1,13 @@
class Plugin {
async load() {
}
async unload() {
}
}
module.exports = {
Plugin,
};

256
server/plugins-manager.js Normal file
View File

@ -0,0 +1,256 @@
const fs = require("fs");
const { log } = require("../src/util");
const path = require("path");
const axios = require("axios");
const { Git } = require("./git");
const childProcess = require("child_process");
class PluginsManager {
static disable = false;
/**
* Plugin List
* @type {PluginWrapper[]}
*/
pluginList = [];
/**
* Plugins Dir
*/
pluginsDir;
server;
/**
*
* @param {UptimeKumaServer} server
*/
constructor(server) {
this.server = server;
if (!PluginsManager.disable) {
this.pluginsDir = "./data/plugins/";
if (! fs.existsSync(this.pluginsDir)) {
fs.mkdirSync(this.pluginsDir, { recursive: true });
}
log.debug("plugin", "Scanning plugin directory");
let list = fs.readdirSync(this.pluginsDir);
this.pluginList = [];
for (let item of list) {
this.loadPlugin(item);
}
} else {
log.warn("PLUGIN", "Skip scanning plugin directory");
}
}
/**
* Install a Plugin
*/
async loadPlugin(name) {
log.info("plugin", "Load " + name);
let plugin = new PluginWrapper(this.server, this.pluginsDir + name);
try {
await plugin.load();
this.pluginList.push(plugin);
} catch (e) {
log.error("plugin", "Failed to load plugin: " + this.pluginsDir + name);
log.error("plugin", "Reason: " + e.message);
}
}
/**
* Download a Plugin
* @param {string} repoURL Git repo url
* @param {string} name Directory name, also known as plugin unique name
*/
downloadPlugin(repoURL, name) {
if (fs.existsSync(this.pluginsDir + name)) {
log.info("plugin", "Plugin folder already exists? Removing...");
fs.rmSync(this.pluginsDir + name, {
recursive: true
});
}
log.info("plugin", "Installing plugin: " + name + " " + repoURL);
let result = Git.clone(repoURL, this.pluginsDir, name);
log.info("plugin", "Install result: " + result);
}
/**
* Remove a plugin
* @param {string} name
*/
async removePlugin(name) {
log.info("plugin", "Removing plugin: " + name);
for (let plugin of this.pluginList) {
if (plugin.info.name === name) {
await plugin.unload();
// Delete the plugin directory
fs.rmSync(this.pluginsDir + name, {
recursive: true
});
this.pluginList.splice(this.pluginList.indexOf(plugin), 1);
return;
}
}
log.warn("plugin", "Plugin not found: " + name);
throw new Error("Plugin not found: " + name);
}
/**
* TODO: Update a plugin
* Only available for plugins which were downloaded from the official list
* @param pluginID
*/
updatePlugin(pluginID) {
}
/**
* Get the plugin list from server + local installed plugin list
* Item will be merged if the `name` is the same.
* @returns {Promise<[]>}
*/
async fetchPluginList() {
let remotePluginList;
try {
const res = await axios.get("https://uptime.kuma.pet/c/plugins.json");
remotePluginList = res.data.pluginList;
} catch (e) {
log.error("plugin", "Failed to fetch plugin list: " + e.message);
remotePluginList = [];
}
for (let plugin of this.pluginList) {
let find = false;
// Try to merge
for (let remotePlugin of remotePluginList) {
if (remotePlugin.name === plugin.info.name) {
find = true;
remotePlugin.installed = true;
remotePlugin.name = plugin.info.name;
remotePlugin.fullName = plugin.info.fullName;
remotePlugin.description = plugin.info.description;
remotePlugin.version = plugin.info.version;
break;
}
}
// Local plugin
if (!find) {
plugin.info.local = true;
remotePluginList.push(plugin.info);
}
}
// Sort Installed first, then sort by name
return remotePluginList.sort((a, b) => {
if (a.installed === b.installed) {
if (a.fullName < b.fullName) {
return -1;
}
if (a.fullName > b.fullName) {
return 1;
}
return 0;
} else if (a.installed) {
return -1;
} else {
return 1;
}
});
}
}
class PluginWrapper {
server = undefined;
pluginDir = undefined;
/**
* Must be an `new-able` class.
* @type {function}
*/
pluginClass = undefined;
/**
*
* @type {Plugin}
*/
object = undefined;
info = {};
/**
*
* @param {UptimeKumaServer} server
* @param {string} pluginDir
*/
constructor(server, pluginDir) {
this.server = server;
this.pluginDir = pluginDir;
}
async load() {
let indexFile = this.pluginDir + "/index.js";
let packageJSON = this.pluginDir + "/package.json";
log.info("plugin", "Installing dependencies");
if (fs.existsSync(indexFile)) {
// Install dependencies
let result = childProcess.spawnSync("npm", [ "install" ], {
cwd: this.pluginDir,
env: {
...process.env,
PLAYWRIGHT_BROWSERS_PATH: "../../browsers", // Special handling for read-browser-monitor
}
});
if (result.stdout) {
log.info("plugin", "Install dependencies result: " + result.stdout.toString("utf-8"));
} else {
log.warn("plugin", "Install dependencies result: no output");
}
this.pluginClass = require(path.join(process.cwd(), indexFile));
let pluginClassType = typeof this.pluginClass;
if (pluginClassType === "function") {
this.object = new this.pluginClass(this.server);
await this.object.load();
} else {
throw new Error("Invalid plugin, it does not export a class");
}
if (fs.existsSync(packageJSON)) {
this.info = require(path.join(process.cwd(), packageJSON));
} else {
this.info.fullName = this.pluginDir;
this.info.name = "[unknown]";
this.info.version = "[unknown-version]";
}
this.info.installed = true;
log.info("plugin", `${this.info.fullName} v${this.info.version} loaded`);
}
}
async unload() {
await this.object.unload();
}
}
module.exports = {
PluginsManager,
PluginWrapper
};

View File

@ -99,6 +99,7 @@ class Prometheus {
}
}
/** Remove monitor from prometheus */
remove() {
try {
monitorCertDaysRemaining.remove(this.monitorLabelValues);

View File

@ -7,7 +7,7 @@ const { UptimeKumaServer } = require("./uptime-kuma-server");
class Proxy {
static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks4" ];
static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks5h", "socks4" ];
/**
* Saves and updates given proxy entity
@ -126,6 +126,7 @@ class Proxy {
break;
case "socks":
case "socks5":
case "socks5h":
case "socks4":
agent = new SocksProxyAgent({
...httpAgentOptions,

View File

@ -4,7 +4,7 @@ const { R } = require("redbean-node");
const apicache = require("../modules/apicache");
const Monitor = require("../model/monitor");
const dayjs = require("dayjs");
const { UP, DOWN, flipStatus, log } = require("../../src/util");
const { UP, MAINTENANCE, DOWN, PENDING, flipStatus, log } = require("../../src/util");
const StatusPage = require("../model/status_page");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { makeBadge } = require("badge-maker");
@ -67,6 +67,11 @@ router.get("/api/push/:pushToken", async (request, response) => {
duration = dayjs(bean.time).diff(dayjs(previousHeartbeat.time), "second");
}
if (await Monitor.isUnderMaintenance(monitor.id)) {
msg = "Monitor under maintenance";
status = MAINTENANCE;
}
log.debug("router", `/api/push/ called at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
log.debug("router", "PreviousStatus: " + previousStatus);
log.debug("router", "Current Status: " + status);
@ -87,7 +92,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
ok: true,
});
if (bean.important) {
if (Monitor.isImportantForNotification(isFirstBeat, previousStatus, status)) {
await Monitor.sendNotification(isFirstBeat, monitor, bean);
}
@ -106,8 +111,12 @@ router.get("/api/badge/:id/status", cache("5 minutes"), async (request, response
label,
upLabel = "Up",
downLabel = "Down",
pendingLabel = "Pending",
maintenanceLabel = "Maintenance",
upColor = badgeConstants.defaultUpColor,
downColor = badgeConstants.defaultDownColor,
pendingColor = badgeConstants.defaultPendingColor,
maintenanceColor = badgeConstants.defaultMaintenanceColor,
style = badgeConstants.defaultStyle,
value, // for demo purpose only
} = request.query;
@ -134,11 +143,30 @@ router.get("/api/badge/:id/status", cache("5 minutes"), async (request, response
badgeValues.color = badgeConstants.naColor;
} else {
const heartbeat = await Monitor.getPreviousHeartbeat(requestedMonitorId);
const state = overrideValue !== undefined ? overrideValue : heartbeat.status === 1;
const state = overrideValue !== undefined ? overrideValue : heartbeat.status;
badgeValues.label = label ? label : "";
badgeValues.color = state ? upColor : downColor;
badgeValues.message = label ?? state ? upLabel : downLabel;
badgeValues.label = label ?? "Status";
switch (state) {
case DOWN:
badgeValues.color = downColor;
badgeValues.message = downLabel;
break;
case UP:
badgeValues.color = upColor;
badgeValues.message = upLabel;
break;
case PENDING:
badgeValues.color = pendingColor;
badgeValues.message = pendingLabel;
break;
case MAINTENANCE:
badgeValues.color = maintenanceColor;
badgeValues.message = maintenanceLabel;
break;
default:
badgeValues.color = badgeConstants.naColor;
badgeValues.message = "N/A";
}
}
// build the svg based on given values
@ -184,7 +212,7 @@ router.get("/api/badge/:id/uptime/:duration?", cache("5 minutes"), async (reques
const badgeValues = { style };
if (!publicMonitor) {
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non exsitant
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non existent
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
@ -200,8 +228,11 @@ router.get("/api/badge/:id/uptime/:duration?", cache("5 minutes"), async (reques
badgeValues.color = color ?? percentageToColor(uptime);
// use a given, custom labelColor or use the default badge label color (defined by badge-maker)
badgeValues.labelColor = labelColor ?? "";
// build a lable string. If a custom label is given, override the default one (requestedDuration)
badgeValues.label = filterAndJoin([ labelPrefix, label ?? requestedDuration, labelSuffix ]);
// build a label string. If a custom label is given, override the default one (requestedDuration)
badgeValues.label = filterAndJoin([
labelPrefix,
label ?? `Uptime (${requestedDuration}${labelSuffix})`,
]);
badgeValues.message = filterAndJoin([ prefix, `${cleanUptime * 100}`, suffix ]);
}
@ -262,7 +293,7 @@ router.get("/api/badge/:id/ping/:duration?", cache("5 minutes"), async (request,
// use a given, custom labelColor or use the default badge label color (defined by badge-maker)
badgeValues.labelColor = labelColor ?? "";
// build a lable string. If a custom label is given, override the default one (requestedDuration)
badgeValues.label = filterAndJoin([ labelPrefix, label ?? requestedDuration, labelSuffix ]);
badgeValues.label = filterAndJoin([ labelPrefix, label ?? `Avg. Ping (${requestedDuration}${labelSuffix})` ]);
badgeValues.message = filterAndJoin([ prefix, avgPing, suffix ]);
}
@ -276,4 +307,237 @@ router.get("/api/badge/:id/ping/:duration?", cache("5 minutes"), async (request,
}
});
router.get("/api/badge/:id/avg-response/:duration?", cache("5 minutes"), async (request, response) => {
allowAllOrigin(response);
const {
label,
labelPrefix,
labelSuffix,
prefix,
suffix = badgeConstants.defaultPingValueSuffix,
color = badgeConstants.defaultPingColor,
labelColor,
style = badgeConstants.defaultStyle,
value, // for demo purpose only
} = request.query;
try {
const requestedMonitorId = parseInt(request.params.id, 10);
// Default duration is 24 (h) if not defined in queryParam, limited to 720h (30d)
const requestedDuration = Math.min(
request.params.duration
? parseInt(request.params.duration, 10)
: 24,
720
);
const overrideValue = value && parseFloat(value);
const publicAvgPing = parseInt(await R.getCell(`
SELECT AVG(ping) FROM monitor_group, \`group\`, heartbeat
WHERE monitor_group.group_id = \`group\`.id
AND heartbeat.time > DATETIME('now', ? || ' hours')
AND heartbeat.ping IS NOT NULL
AND public = 1
AND heartbeat.monitor_id = ?
`,
[ -requestedDuration, requestedMonitorId ]
));
const badgeValues = { style };
if (!publicAvgPing) {
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non existent
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
const avgPing = parseInt(overrideValue ?? publicAvgPing);
badgeValues.color = color;
// use a given, custom labelColor or use the default badge label color (defined by badge-maker)
badgeValues.labelColor = labelColor ?? "";
// build a label string. If a custom label is given, override the default one (requestedDuration)
badgeValues.label = filterAndJoin([
labelPrefix,
label ?? `Avg. Response (${requestedDuration}h)`,
labelSuffix,
]);
badgeValues.message = filterAndJoin([ prefix, avgPing, suffix ]);
}
// build the SVG based on given values
const svg = makeBadge(badgeValues);
response.type("image/svg+xml");
response.send(svg);
} catch (error) {
send403(response, error.message);
}
});
router.get("/api/badge/:id/cert-exp", cache("5 minutes"), async (request, response) => {
allowAllOrigin(response);
const date = request.query.date;
const {
label,
labelPrefix,
labelSuffix,
prefix,
suffix = date ? "" : badgeConstants.defaultCertExpValueSuffix,
upColor = badgeConstants.defaultUpColor,
warnColor = badgeConstants.defaultWarnColor,
downColor = badgeConstants.defaultDownColor,
warnDays = badgeConstants.defaultCertExpireWarnDays,
downDays = badgeConstants.defaultCertExpireDownDays,
labelColor,
style = badgeConstants.defaultStyle,
value, // for demo purpose only
} = request.query;
try {
const requestedMonitorId = parseInt(request.params.id, 10);
const overrideValue = value && parseFloat(value);
let publicMonitor = await R.getRow(`
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
WHERE monitor_group.group_id = \`group\`.id
AND monitor_group.monitor_id = ?
AND public = 1
`,
[ requestedMonitorId ]
);
const badgeValues = { style };
if (!publicMonitor) {
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non existent
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
const tlsInfoBean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
requestedMonitorId,
]);
if (!tlsInfoBean) {
// return a "No/Bad Cert" badge in naColor (grey), if no cert saved (does not save bad certs?)
badgeValues.message = "No/Bad Cert";
badgeValues.color = badgeConstants.naColor;
} else {
const tlsInfo = JSON.parse(tlsInfoBean.info_json);
if (!tlsInfo.valid) {
// return a "Bad Cert" badge in naColor (grey), when cert is not valid
badgeValues.message = "Bad Cert";
badgeValues.color = badgeConstants.downColor;
} else {
const daysRemaining = parseInt(overrideValue ?? tlsInfo.certInfo.daysRemaining);
if (daysRemaining > warnDays) {
badgeValues.color = upColor;
} else if (daysRemaining > downDays) {
badgeValues.color = warnColor;
} else {
badgeValues.color = downColor;
}
// use a given, custom labelColor or use the default badge label color (defined by badge-maker)
badgeValues.labelColor = labelColor ?? "";
// build a label string. If a custom label is given, override the default one
badgeValues.label = filterAndJoin([
labelPrefix,
label ?? "Cert Exp.",
labelSuffix,
]);
badgeValues.message = filterAndJoin([ prefix, date ? tlsInfo.certInfo.validTo : daysRemaining, suffix ]);
}
}
}
// build the SVG based on given values
const svg = makeBadge(badgeValues);
response.type("image/svg+xml");
response.send(svg);
} catch (error) {
send403(response, error.message);
}
});
router.get("/api/badge/:id/response", cache("5 minutes"), async (request, response) => {
allowAllOrigin(response);
const {
label,
labelPrefix,
labelSuffix,
prefix,
suffix = badgeConstants.defaultPingValueSuffix,
color = badgeConstants.defaultPingColor,
labelColor,
style = badgeConstants.defaultStyle,
value, // for demo purpose only
} = request.query;
try {
const requestedMonitorId = parseInt(request.params.id, 10);
const overrideValue = value && parseFloat(value);
let publicMonitor = await R.getRow(`
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
WHERE monitor_group.group_id = \`group\`.id
AND monitor_group.monitor_id = ?
AND public = 1
`,
[ requestedMonitorId ]
);
const badgeValues = { style };
if (!publicMonitor) {
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non existent
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
const heartbeat = await Monitor.getPreviousHeartbeat(
requestedMonitorId
);
if (!heartbeat.ping) {
// return a "N/A" badge in naColor (grey), if previous heartbeat has no ping
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
const ping = parseInt(overrideValue ?? heartbeat.ping);
badgeValues.color = color;
// use a given, custom labelColor or use the default badge label color (defined by badge-maker)
badgeValues.labelColor = labelColor ?? "";
// build a label string. If a custom label is given, override the default one
badgeValues.label = filterAndJoin([
labelPrefix,
label ?? "Response",
labelSuffix,
]);
badgeValues.message = filterAndJoin([ prefix, ping, suffix ]);
}
}
// build the SVG based on given values
const svg = makeBadge(badgeValues);
response.type("image/svg+xml");
response.send(svg);
} catch (error) {
send403(response, error.message);
}
});
module.exports = router;

View File

@ -5,6 +5,15 @@
*/
console.log("Welcome to Uptime Kuma");
// As the log function need to use dayjs, it should be very top
const dayjs = require("dayjs");
dayjs.extend(require("dayjs/plugin/utc"));
dayjs.extend(require("./modules/dayjs/plugin/timezone"));
dayjs.extend(require("dayjs/plugin/customParseFormat"));
// Load environment variables from `.env`
require("dotenv").config();
// Check Node.js Version
const nodeVersion = parseInt(process.versions.node.split(".")[0]);
const requiredVersion = 14;
@ -33,6 +42,7 @@ log.info("server", "Importing Node libraries");
const fs = require("fs");
log.info("server", "Importing 3rd-party libraries");
log.debug("server", "Importing express");
const express = require("express");
const expressStaticGzip = require("express-static-gzip");
@ -127,6 +137,12 @@ const StatusPage = require("./model/status_page");
const { cloudflaredSocketHandler, autoStart: cloudflaredAutoStart, stop: cloudflaredStop } = require("./socket-handlers/cloudflared-socket-handler");
const { proxySocketHandler } = require("./socket-handlers/proxy-socket-handler");
const { dockerSocketHandler } = require("./socket-handlers/docker-socket-handler");
const { maintenanceSocketHandler } = require("./socket-handlers/maintenance-socket-handler");
const { generalSocketHandler } = require("./socket-handlers/general-socket-handler");
const { Settings } = require("./settings");
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
const { pluginsHandler } = require("./socket-handlers/plugins-handler");
const { EmbeddedMariaDB } = require("./embedded-mariadb");
app.use(express.json());
@ -154,8 +170,9 @@ let needSetup = false;
(async () => {
Database.init(args);
await initDatabase(testMode);
exports.entryPage = await setting("entryPage");
await server.initAfterDatabaseReady();
server.loadPlugins();
server.entryPage = await Settings.get("entryPage");
await StatusPage.loadDomainMappingList();
log.info("server", "Adding route");
@ -176,14 +193,15 @@ let needSetup = false;
log.debug("entry", `Request Domain: ${hostname}`);
const uptimeKumaEntryPage = server.entryPage;
if (hostname in StatusPage.domainMappingList) {
log.debug("entry", "This is a status page domain");
let slug = StatusPage.domainMappingList[hostname];
await StatusPage.handleStatusPageResponse(response, server.indexHTML, slug);
} else if (exports.entryPage && exports.entryPage.startsWith("statusPage-")) {
response.redirect("/status/" + exports.entryPage.replace("statusPage-", ""));
} else if (uptimeKumaEntryPage && uptimeKumaEntryPage.startsWith("statusPage-")) {
response.redirect("/status/" + uptimeKumaEntryPage.replace("statusPage-", ""));
} else {
response.redirect("/dashboard");
@ -192,6 +210,7 @@ let needSetup = false;
if (isDev) {
app.post("/test-webhook", async (request, response) => {
log.debug("test", request.headers);
log.debug("test", request.body);
response.send("OK");
});
@ -200,7 +219,7 @@ let needSetup = false;
// Robots.txt
app.get("/robots.txt", async (_request, response) => {
let txt = "User-agent: *\nDisallow:";
if (! await setting("searchEngineIndex")) {
if (!await setting("searchEngineIndex")) {
txt += " /";
}
response.setHeader("Content-Type", "text/plain");
@ -560,7 +579,6 @@ let needSetup = false;
});
}
} catch (error) {
console.log(error);
callback({
ok: false,
msg: error.message,
@ -620,6 +638,9 @@ let needSetup = false;
bean.import(monitor);
bean.user_id = socket.userID;
bean.validate();
await R.store(bean);
await updateMonitorNotification(bean.id, notificationIDList);
@ -672,12 +693,14 @@ let needSetup = false;
bean.retryInterval = monitor.retryInterval;
bean.resendInterval = monitor.resendInterval;
bean.hostname = monitor.hostname;
bean.game = monitor.game;
bean.maxretries = monitor.maxretries;
bean.port = parseInt(monitor.port);
bean.keyword = monitor.keyword;
bean.ignoreTls = monitor.ignoreTls;
bean.expiryNotification = monitor.expiryNotification;
bean.upsideDown = monitor.upsideDown;
bean.packetSize = monitor.packetSize;
bean.maxredirects = monitor.maxredirects;
bean.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
bean.dns_resolve_type = monitor.dns_resolve_type;
@ -695,12 +718,21 @@ let needSetup = false;
bean.authMethod = monitor.authMethod;
bean.authWorkstation = monitor.authWorkstation;
bean.authDomain = monitor.authDomain;
bean.grpcUrl = monitor.grpcUrl;
bean.grpcProtobuf = monitor.grpcProtobuf;
bean.grpcServiceName = monitor.grpcServiceName;
bean.grpcMethod = monitor.grpcMethod;
bean.grpcBody = monitor.grpcBody;
bean.grpcMetadata = monitor.grpcMetadata;
bean.grpcEnableTls = monitor.grpcEnableTls;
bean.radiusUsername = monitor.radiusUsername;
bean.radiusPassword = monitor.radiusPassword;
bean.radiusCalledStationId = monitor.radiusCalledStationId;
bean.radiusCallingStationId = monitor.radiusCallingStationId;
bean.radiusSecret = monitor.radiusSecret;
bean.validate();
await R.store(bean);
await updateMonitorNotification(bean.id, monitor.notificationIDList);
@ -915,13 +947,21 @@ let needSetup = false;
try {
checkLogin(socket);
let bean = await R.findOne("monitor", " id = ? ", [ tag.id ]);
let bean = await R.findOne("tag", " id = ? ", [ tag.id ]);
if (bean == null) {
callback({
ok: false,
msg: "Tag not found",
});
return;
}
bean.name = tag.name;
bean.color = tag.color;
await R.store(bean);
callback({
ok: true,
msg: "Saved",
tag: await bean.toJSON(),
});
@ -1055,10 +1095,15 @@ let needSetup = false;
socket.on("getSettings", async (callback) => {
try {
checkLogin(socket);
const data = await getSettings("general");
if (!data.serverTimezone) {
data.serverTimezone = await server.getTimezone();
}
callback({
ok: true,
data: await getSettings("general"),
data: data,
});
} catch (e) {
@ -1084,7 +1129,14 @@ let needSetup = false;
}
await setSettings("general", data);
exports.entryPage = data.entryPage;
server.entryPage = data.entryPage;
await CacheableDnsHttpAgent.update();
// Also need to apply timezone globally
if (data.serverTimezone) {
await server.setTimezone(data.serverTimezone);
}
callback({
ok: true,
@ -1092,6 +1144,7 @@ let needSetup = false;
});
sendInfo(socket);
server.sendMaintenanceList(socket);
} catch (e) {
callback({
@ -1450,6 +1503,9 @@ let needSetup = false;
databaseSocketHandler(socket);
proxySocketHandler(socket);
dockerSocketHandler(socket);
maintenanceSocketHandler(socket);
generalSocketHandler(socket, server);
pluginsHandler(socket, server);
log.debug("server", "added all socket handlers");
@ -1552,6 +1608,7 @@ async function afterLogin(socket, user) {
socket.join(user.id);
let monitorList = await server.sendMonitorList(socket);
server.sendMaintenanceList(socket);
sendNotificationList(socket);
sendProxyList(socket);
sendDockerHostList(socket);
@ -1571,6 +1628,13 @@ async function afterLogin(socket, user) {
for (let monitorID in monitorList) {
await Monitor.sendStats(io, monitorID, user.id);
}
// Set server timezone from client browser if not set
// It should be run once only
if (! await Settings.get("initServerTimezone")) {
log.debug("server", "emit initServerTimezone");
socket.emit("initServerTimezone");
}
}
/**
@ -1697,6 +1761,8 @@ async function shutdownFunction(signal) {
log.info("server", "Shutdown requested");
log.info("server", "Called signal: " + signal);
await server.stop();
log.info("server", "Stopping all monitors");
for (let id in server.monitorList) {
let monitor = server.monitorList[id];
@ -1705,8 +1771,13 @@ async function shutdownFunction(signal) {
await sleep(2000);
await Database.close();
if (EmbeddedMariaDB.hasInstance()) {
EmbeddedMariaDB.getInstance().stop();
}
stopBackgroundJobs();
await cloudflaredStop();
Settings.stopCacheCleaner();
}
/** Final function called before application exits */

View File

@ -158,6 +158,13 @@ class Settings {
delete Settings.cacheList[key];
}
}
static stopCacheCleaner() {
if (Settings.cacheCleaner) {
clearInterval(Settings.cacheCleaner);
Settings.cacheCleaner = null;
}
}
}
module.exports = {

View File

@ -1,6 +1,7 @@
const { checkLogin, setSetting, setting, doubleCheckPassword } = require("../util-server");
const { CloudflaredTunnel } = require("node-cloudflared-tunnel");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { log } = require("../../src/util");
const io = UptimeKumaServer.getInstance().io;
const prefix = "cloudflared_";
@ -107,7 +108,7 @@ module.exports.autoStart = async (token) => {
/** Stop cloudflared */
module.exports.stop = async () => {
console.log("Stop cloudflared");
log.info("cloudflared", "Stop cloudflared");
if (cloudflared) {
cloudflared.stop();
}

View File

@ -0,0 +1,51 @@
const { log } = require("../../src/util");
const { Settings } = require("../settings");
const { sendInfo } = require("../client");
const { checkLogin } = require("../util-server");
const GameResolver = require("gamedig/lib/GameResolver");
let gameResolver = new GameResolver();
let gameList = null;
/**
* Get a game list via GameDig
* @returns {any[]}
*/
function getGameList() {
if (!gameList) {
gameList = gameResolver._readGames().games.sort((a, b) => {
if ( a.pretty < b.pretty ) {
return -1;
}
if ( a.pretty > b.pretty ) {
return 1;
}
return 0;
});
} else {
return gameList;
}
}
module.exports.generalSocketHandler = (socket, server) => {
socket.on("initServerTimezone", async (timezone) => {
try {
checkLogin(socket);
log.debug("generalSocketHandler", "Timezone: " + timezone);
await Settings.set("initServerTimezone", true);
await server.setTimezone(timezone);
await sendInfo(socket);
} catch (e) {
log.warn("initServerTimezone", e.message);
}
});
socket.on("getGameList", async (callback) => {
callback({
ok: true,
gameList: getGameList(),
});
});
};

View File

@ -0,0 +1,317 @@
const { checkLogin } = require("../util-server");
const { log } = require("../../src/util");
const { R } = require("redbean-node");
const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const Maintenance = require("../model/maintenance");
const server = UptimeKumaServer.getInstance();
const MaintenanceTimeslot = require("../model/maintenance_timeslot");
/**
* Handlers for Maintenance
* @param {Socket} socket Socket.io instance
*/
module.exports.maintenanceSocketHandler = (socket) => {
// Add a new maintenance
socket.on("addMaintenance", async (maintenance, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", maintenance);
let bean = Maintenance.jsonToBean(R.dispense("maintenance"), maintenance);
bean.user_id = socket.userID;
let maintenanceID = await R.store(bean);
await MaintenanceTimeslot.generateTimeslot(bean);
await server.sendMaintenanceList(socket);
callback({
ok: true,
msg: "Added Successfully.",
maintenanceID,
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
// Edit a maintenance
socket.on("editMaintenance", async (maintenance, callback) => {
try {
checkLogin(socket);
let bean = await R.findOne("maintenance", " id = ? ", [ maintenance.id ]);
if (bean.user_id !== socket.userID) {
throw new Error("Permission denied.");
}
Maintenance.jsonToBean(bean, maintenance);
await R.store(bean);
await MaintenanceTimeslot.generateTimeslot(bean, null, true);
await server.sendMaintenanceList(socket);
callback({
ok: true,
msg: "Saved.",
maintenanceID: bean.id,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
// Add a new monitor_maintenance
socket.on("addMonitorMaintenance", async (maintenanceID, monitors, callback) => {
try {
checkLogin(socket);
await R.exec("DELETE FROM monitor_maintenance WHERE maintenance_id = ?", [
maintenanceID
]);
for await (const monitor of monitors) {
let bean = R.dispense("monitor_maintenance");
bean.import({
monitor_id: monitor.id,
maintenance_id: maintenanceID
});
await R.store(bean);
}
apicache.clear();
callback({
ok: true,
msg: "Added Successfully.",
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
// Add a new monitor_maintenance
socket.on("addMaintenanceStatusPage", async (maintenanceID, statusPages, callback) => {
try {
checkLogin(socket);
await R.exec("DELETE FROM maintenance_status_page WHERE maintenance_id = ?", [
maintenanceID
]);
for await (const statusPage of statusPages) {
let bean = R.dispense("maintenance_status_page");
bean.import({
status_page_id: statusPage.id,
maintenance_id: maintenanceID
});
await R.store(bean);
}
apicache.clear();
callback({
ok: true,
msg: "Added Successfully.",
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let bean = await R.findOne("maintenance", " id = ? AND user_id = ? ", [
maintenanceID,
socket.userID,
]);
callback({
ok: true,
maintenance: await bean.toJSON(),
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenanceList", async (callback) => {
try {
checkLogin(socket);
await server.sendMaintenanceList(socket);
callback({
ok: true,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMonitorMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Monitors for Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let monitors = await R.getAll("SELECT monitor.id, monitor.name FROM monitor_maintenance mm JOIN monitor ON mm.monitor_id = monitor.id WHERE mm.maintenance_id = ? ", [
maintenanceID,
]);
callback({
ok: true,
monitors,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenanceStatusPage", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Status Pages for Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let statusPages = await R.getAll("SELECT status_page.id, status_page.title FROM maintenance_status_page msp JOIN status_page ON msp.status_page_id = status_page.id WHERE msp.maintenance_id = ? ", [
maintenanceID,
]);
callback({
ok: true,
statusPages,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("deleteMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Delete Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
if (maintenanceID in server.maintenanceList) {
delete server.maintenanceList[maintenanceID];
}
await R.exec("DELETE FROM maintenance WHERE id = ? AND user_id = ? ", [
maintenanceID,
socket.userID,
]);
apicache.clear();
callback({
ok: true,
msg: "Deleted Successfully.",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("pauseMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Pause Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
await R.exec("UPDATE maintenance SET active = 0 WHERE id = ? ", [
maintenanceID,
]);
apicache.clear();
callback({
ok: true,
msg: "Paused Successfully.",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("resumeMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Resume Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
await R.exec("UPDATE maintenance SET active = 1 WHERE id = ? ", [
maintenanceID,
]);
apicache.clear();
callback({
ok: true,
msg: "Resume Successfully",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
};

View File

@ -0,0 +1,69 @@
const { checkLogin } = require("../util-server");
const { PluginsManager } = require("../plugins-manager");
const { log } = require("../../src/util.js");
/**
* Handlers for plugins
* @param {Socket} socket Socket.io instance
* @param {UptimeKumaServer} server
*/
module.exports.pluginsHandler = (socket, server) => {
const pluginManager = server.getPluginManager();
// Get Plugin List
socket.on("getPluginList", async (callback) => {
try {
checkLogin(socket);
log.debug("plugin", "PluginManager.disable: " + PluginsManager.disable);
if (PluginsManager.disable) {
throw new Error("Plugin Disabled: In order to enable plugin feature, you need to use the default data directory: ./data/");
}
let pluginList = await pluginManager.fetchPluginList();
callback({
ok: true,
pluginList,
});
} catch (error) {
log.warn("plugin", "Error: " + error.message);
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("installPlugin", async (repoURL, name, callback) => {
try {
checkLogin(socket);
pluginManager.downloadPlugin(repoURL, name);
await pluginManager.loadPlugin(name);
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("uninstallPlugin", async (name, callback) => {
try {
checkLogin(socket);
await pluginManager.removePlugin(name);
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
};

View File

@ -0,0 +1,49 @@
const { log } = require("../src/util");
class UptimeCacheList {
/**
* list[monitorID][duration]
*/
static list = {};
/**
* Get the uptime for a specific period
* @param {number} monitorID
* @param {number} duration
* @return {number}
*/
static getUptime(monitorID, duration) {
if (UptimeCacheList.list[monitorID] && UptimeCacheList.list[monitorID][duration]) {
log.debug("UptimeCacheList", "getUptime: " + monitorID + " " + duration);
return UptimeCacheList.list[monitorID][duration];
} else {
return null;
}
}
/**
* Add uptime for specified monitor
* @param {number} monitorID
* @param {number} duration
* @param {number} uptime Uptime to add
*/
static addUptime(monitorID, duration, uptime) {
log.debug("UptimeCacheList", "addUptime: " + monitorID + " " + duration);
if (!UptimeCacheList.list[monitorID]) {
UptimeCacheList.list[monitorID] = {};
}
UptimeCacheList.list[monitorID][duration] = uptime;
}
/**
* Clear cache for specified monitor
* @param {number} monitorID
*/
static clearCache(monitorID) {
log.debug("UptimeCacheList", "clearCache: " + monitorID);
delete UptimeCacheList.list[monitorID];
}
}
module.exports = {
UptimeCacheList,
};

View File

@ -9,6 +9,9 @@ const Database = require("./database");
const util = require("util");
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
const { Settings } = require("./settings");
const dayjs = require("dayjs");
const { PluginsManager } = require("./plugins-manager");
// DO NOT IMPORT HERE IF THE MODULES USED `UptimeKumaServer.getInstance()`
/**
* `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue.
@ -26,6 +29,13 @@ class UptimeKumaServer {
* @type {{}}
*/
monitorList = {};
/**
* Main maintenance list
* @type {{}}
*/
maintenanceList = {};
entryPage = "dashboard";
app = undefined;
httpServer = undefined;
@ -37,6 +47,22 @@ class UptimeKumaServer {
*/
indexHTML = "";
generateMaintenanceTimeslotsInterval = undefined;
/**
* Plugins Manager
* @type {PluginsManager}
*/
pluginsManager = null;
/**
*
* @type {{}}
*/
static monitorTypeList = {
};
static getInstance(args) {
if (UptimeKumaServer.instance == null) {
UptimeKumaServer.instance = new UptimeKumaServer(args);
@ -72,11 +98,27 @@ class UptimeKumaServer {
}
}
CacheableDnsHttpAgent.registerGlobalAgent();
this.io = new Server(this.httpServer);
}
/** Initialise app after the database has been set up */
async initAfterDatabaseReady() {
await CacheableDnsHttpAgent.update();
process.env.TZ = await this.getTimezone();
dayjs.tz.setDefault(process.env.TZ);
log.debug("DEBUG", "Timezone: " + process.env.TZ);
log.debug("DEBUG", "Current Time: " + dayjs.tz().format());
await this.generateMaintenanceTimeslots();
this.generateMaintenanceTimeslotsInterval = setInterval(this.generateMaintenanceTimeslots, 60 * 1000);
}
/**
* Send list of monitors to client
* @param {Socket} socket
* @returns {Object} List of monitors
*/
async sendMonitorList(socket) {
let list = await this.getMonitorJSONList(socket.userID);
this.io.to(socket.userID).emit("monitorList", list);
@ -104,6 +146,45 @@ class UptimeKumaServer {
return result;
}
/**
* Send maintenance list to client
* @param {Socket} socket Socket.io instance to send to
* @returns {Object}
*/
async sendMaintenanceList(socket) {
return await this.sendMaintenanceListByUserID(socket.userID);
}
/**
* Send list of maintenances to user
* @param {number} userID
* @returns {Object}
*/
async sendMaintenanceListByUserID(userID) {
let list = await this.getMaintenanceJSONList(userID);
this.io.to(userID).emit("maintenanceList", list);
return list;
}
/**
* Get a list of maintenances for the given user.
* @param {string} userID - The ID of the user to get maintenances for.
* @returns {Promise<Object>} A promise that resolves to an object with maintenance IDs as keys and maintenances objects as values.
*/
async getMaintenanceJSONList(userID) {
let result = {};
let maintenanceList = await R.find("maintenance", " user_id = ? ORDER BY end_date DESC, title", [
userID,
]);
for (let maintenance of maintenanceList) {
result[maintenance.id] = await maintenance.toJSON();
}
return result;
}
/**
* Write error to log file
* @param {any} error The error to write
@ -130,6 +211,11 @@ class UptimeKumaServer {
errorLogStream.end();
}
/**
* Get the IP of the client connected to the socket
* @param {Socket} socket
* @returns {string}
*/
async getClientIP(socket) {
let clientIP = socket.client.conn.remoteAddress;
@ -138,15 +224,115 @@ class UptimeKumaServer {
}
if (await Settings.get("trustProxy")) {
return socket.client.conn.request.headers["x-forwarded-for"]
const forwardedFor = socket.client.conn.request.headers["x-forwarded-for"];
return (typeof forwardedFor === "string" ? forwardedFor.split(",")[0].trim() : null)
|| socket.client.conn.request.headers["x-real-ip"]
|| clientIP.replace(/^.*:/, "");
} else {
return clientIP.replace(/^.*:/, "");
}
}
/**
* Attempt to get the current server timezone
* If this fails, fall back to environment variables and then make a
* guess.
* @returns {string}
*/
async getTimezone() {
let timezone = await Settings.get("serverTimezone");
if (timezone) {
return timezone;
} else if (process.env.TZ) {
return process.env.TZ;
} else {
return dayjs.tz.guess();
}
}
/**
* Get the current offset
* @returns {string}
*/
getTimezoneOffset() {
return dayjs().format("Z");
}
/**
* Set the current server timezone and environment variables
* @param {string} timezone
*/
async setTimezone(timezone) {
await Settings.set("serverTimezone", timezone, "general");
process.env.TZ = timezone;
dayjs.tz.setDefault(timezone);
}
/** Load the timeslots for maintenance */
async generateMaintenanceTimeslots() {
let list = await R.find("maintenance_timeslot", " generated_next = 0 AND start_date <= DATETIME('now') ");
for (let maintenanceTimeslot of list) {
let maintenance = await maintenanceTimeslot.maintenance;
await MaintenanceTimeslot.generateTimeslot(maintenance, maintenanceTimeslot.end_date, false);
maintenanceTimeslot.generated_next = true;
await R.store(maintenanceTimeslot);
}
}
/** Stop the server */
async stop() {
clearTimeout(this.generateMaintenanceTimeslotsInterval);
}
loadPlugins() {
this.pluginsManager = new PluginsManager(this);
}
/**
*
* @returns {PluginsManager}
*/
getPluginManager() {
return this.pluginsManager;
}
/**
*
* @param {MonitorType} monitorType
*/
addMonitorType(monitorType) {
if (monitorType instanceof MonitorType && monitorType.name) {
if (monitorType.name in UptimeKumaServer.monitorTypeList) {
log.error("", "Conflict Monitor Type name");
}
UptimeKumaServer.monitorTypeList[monitorType.name] = monitorType;
} else {
log.error("", "Invalid Monitor Type: " + monitorType.name);
}
}
/**
*
* @param {MonitorType} monitorType
*/
removeMonitorType(monitorType) {
if (UptimeKumaServer.monitorTypeList[monitorType.name] === monitorType) {
delete UptimeKumaServer.monitorTypeList[monitorType.name];
} else {
log.error("", "Remove MonitorType failed: " + monitorType.name);
}
}
}
module.exports = {
UptimeKumaServer
};
// Must be at the end
const MaintenanceTimeslot = require("./model/maintenance_timeslot");
const { MonitorType } = require("./monitor-types/monitor-type");

View File

@ -1,5 +1,5 @@
const tcpp = require("tcp-ping");
const Ping = require("./ping-lite");
const ping = require("@louislam/ping");
const { R } = require("redbean-node");
const { log, genSecret } = require("../src/util");
const passwordHash = require("./password-hash");
@ -13,21 +13,22 @@ const { badgeConstants } = require("./config");
const mssql = require("mssql");
const { Client } = require("pg");
const postgresConParse = require("pg-connection-string").parse;
const mysql = require("mysql2");
const { MongoClient } = require("mongodb");
const { NtlmClient } = require("axios-ntlm");
const { Settings } = require("./settings");
const grpc = require("@grpc/grpc-js");
const protojs = require("protobufjs");
const radiusClient = require("node-radius-client");
const redis = require("redis");
const {
dictionaries: {
rfc2865: { file, attributes },
},
} = require("node-radius-utils");
const dayjs = require("dayjs");
// From ping-lite
exports.WIN = /^win/.test(process.platform);
exports.LIN = /^linux/.test(process.platform);
exports.MAC = /^darwin/.test(process.platform);
exports.FBSD = /^freebsd/.test(process.platform);
exports.BSD = /bsd$/.test(process.platform);
const isWindows = process.platform === /^win/.test(process.platform);
/**
* Init or reset JWT secret
@ -78,15 +79,16 @@ exports.tcping = function (hostname, port) {
/**
* Ping the specified machine
* @param {string} hostname Hostname / address of machine
* @param {number} [size=56] Size of packet to send
* @returns {Promise<number>} Time for ping in ms rounded to nearest integer
*/
exports.ping = async (hostname) => {
exports.ping = async (hostname, size = 56) => {
try {
return await exports.pingAsync(hostname);
return await exports.pingAsync(hostname, false, size);
} catch (e) {
// If the host cannot be resolved, try again with ipv6
if (e.message.includes("service not known")) {
return await exports.pingAsync(hostname, true);
return await exports.pingAsync(hostname, true, size);
} else {
throw e;
}
@ -97,22 +99,29 @@ exports.ping = async (hostname) => {
* Ping the specified machine
* @param {string} hostname Hostname / address of machine to ping
* @param {boolean} ipv6 Should IPv6 be used?
* @param {number} [size = 56] Size of ping packet to send
* @returns {Promise<number>} Time for ping in ms rounded to nearest integer
*/
exports.pingAsync = function (hostname, ipv6 = false) {
exports.pingAsync = function (hostname, ipv6 = false, size = 56) {
return new Promise((resolve, reject) => {
const ping = new Ping(hostname, {
ipv6
});
ping.send(function (err, ms, stdout) {
if (err) {
reject(err);
} else if (ms === null) {
reject(new Error(stdout));
ping.promise.probe(hostname, {
v6: ipv6,
min_reply: 1,
deadline: 10,
packetSize: size,
}).then((res) => {
// If ping failed, it will set field to unknown
if (res.alive) {
resolve(res.time);
} else {
resolve(Math.round(ms));
if (isWindows) {
reject(new Error(exports.convertToUTF8(res.output)));
} else {
reject(new Error(res.output));
}
}
}).catch((err) => {
reject(err);
});
});
};
@ -131,7 +140,7 @@ exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
const { port, username, password, interval = 20 } = options;
// Adds MQTT protocol to the hostname if not already present
if (!/^(?:http|mqtt)s?:\/\//.test(hostname)) {
if (!/^(?:http|mqtt|ws)s?:\/\//.test(hostname)) {
hostname = "mqtt://" + hostname;
}
@ -141,10 +150,11 @@ exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
reject(new Error("Timeout"));
}, interval * 1000 * 0.8);
log.debug("mqtt", "MQTT connecting");
const mqttUrl = `${hostname}:${port}`;
let client = mqtt.connect(hostname, {
port,
log.debug("mqtt", `MQTT connecting to ${mqttUrl}`);
let client = mqtt.connect(mqttUrl, {
username,
password
});
@ -244,19 +254,19 @@ exports.dnsResolve = function (hostname, resolverServer, resolverPort, rrtype) {
* @param {string} query The query to validate the database with
* @returns {Promise<(string[]|Object[]|Object)>}
*/
exports.mssqlQuery = function (connectionString, query) {
return new Promise((resolve, reject) => {
mssql.connect(connectionString).then(pool => {
return pool.request()
.query(query);
}).then(result => {
resolve(result);
}).catch(err => {
reject(err);
}).finally(() => {
mssql.close();
});
});
exports.mssqlQuery = async function (connectionString, query) {
let pool;
try {
pool = new mssql.ConnectionPool(connectionString);
await pool.connect();
await pool.request().query(query);
pool.close();
} catch (e) {
if (pool) {
pool.close();
}
throw e;
}
};
/**
@ -276,9 +286,36 @@ exports.postgresQuery = function (connectionString, query) {
const client = new Client({ connectionString });
client.connect();
client.connect((err) => {
if (err) {
reject(err);
client.end();
} else {
// Connected here
client.query(query, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
client.end();
});
}
});
return client.query(query)
});
};
/**
* Run a query on MySQL/MariaDB
* @param {string} connectionString The database connection string
* @param {string} query The query to validate the database with
* @returns {Promise<(string[]|Object[]|Object)>}
*/
exports.mysqlQuery = function (connectionString, query) {
return new Promise((resolve, reject) => {
const connection = mysql.createConnection(connectionString);
connection.promise().query(query)
.then(res => {
resolve(res);
})
@ -286,11 +323,39 @@ exports.postgresQuery = function (connectionString, query) {
reject(err);
})
.finally(() => {
client.end();
connection.end();
});
});
};
/**
* Connect to and Ping a MongoDB database
* @param {string} connectionString The database connection string
* @returns {Promise<(string[]|Object[]|Object)>}
*/
exports.mongodbPing = async function (connectionString) {
let client = await MongoClient.connect(connectionString);
let dbPing = await client.db().command({ ping: 1 });
await client.close();
if (dbPing["ok"] === 1) {
return "UP";
} else {
throw Error("failed");
}
};
/**
* Query radius server
* @param {string} hostname Hostname of radius server
* @param {string} username Username to use
* @param {string} password Password to use
* @param {string} calledStationId ID of called station
* @param {string} callingStationId ID of calling station
* @param {string} secret Secret to use
* @param {number} [port=1812] Port to contact radius server on
* @returns {Promise<any>}
*/
exports.radius = function (
hostname,
username,
@ -298,9 +363,11 @@ exports.radius = function (
calledStationId,
callingStationId,
secret,
port = 1812,
) {
const client = new radiusClient({
host: hostname,
hostPort: port,
dictionaries: [ file ],
});
@ -315,6 +382,30 @@ exports.radius = function (
});
};
/**
* Redis server ping
* @param {string} dsn The redis connection string
*/
exports.redisPingAsync = function (dsn) {
return new Promise((resolve, reject) => {
const client = redis.createClient({
url: dsn,
});
client.on("error", (err) => {
reject(err);
});
client.connect().then(() => {
client.ping().then((res, err) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
});
};
/**
* Retrieve value of setting based on key
* @param {string} key Key of setting to retrieve
@ -431,6 +522,10 @@ const parseCertificateInfo = function (info) {
* @returns {Object} Object containing certificate information
*/
exports.checkCertificate = function (res) {
if (!res.request.res.socket) {
throw new Error("No socket found");
}
const info = res.request.res.socket.getPeerCertificate(true);
const valid = res.request.res.socket.authorized || false;
@ -557,7 +652,7 @@ exports.doubleCheckPassword = async (socket, currentPassword) => {
exports.startUnitTest = async () => {
console.log("Starting unit test...");
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const child = childProcess.spawn(npm, [ "run", "jest" ]);
const child = childProcess.spawn(npm, [ "run", "jest-backend" ]);
child.stdout.on("data", (data) => {
console.log(data.toString());
@ -645,3 +740,121 @@ module.exports.send403 = (res, msg = "") => {
"msg": msg,
});
};
function timeObjectConvertTimezone(obj, timezone, timeObjectToUTC = true) {
let offsetString;
if (timezone) {
offsetString = dayjs().tz(timezone).format("Z");
} else {
offsetString = dayjs().format("Z");
}
let hours = parseInt(offsetString.substring(1, 3));
let minutes = parseInt(offsetString.substring(4, 6));
if (
(timeObjectToUTC && offsetString.startsWith("+")) ||
(!timeObjectToUTC && offsetString.startsWith("-"))
) {
hours *= -1;
minutes *= -1;
}
obj.hours += hours;
obj.minutes += minutes;
// Handle out of bound
if (obj.minutes < 0) {
obj.minutes += 60;
obj.hours--;
} else if (obj.minutes > 60) {
obj.minutes -= 60;
obj.hours++;
}
if (obj.hours < 0) {
obj.hours += 24;
} else if (obj.hours > 24) {
obj.hours -= 24;
}
return obj;
}
/**
*
* @param {object} obj
* @param {string} timezone
* @returns {object}
*/
module.exports.timeObjectToUTC = (obj, timezone = undefined) => {
return timeObjectConvertTimezone(obj, timezone, true);
};
/**
*
* @param {object} obj
* @param {string} timezone
* @returns {object}
*/
module.exports.timeObjectToLocal = (obj, timezone = undefined) => {
return timeObjectConvertTimezone(obj, timezone, false);
};
/**
* Create gRPC client stib
* @param {Object} options from gRPC client
*/
module.exports.grpcQuery = async (options) => {
const { grpcUrl, grpcProtobufData, grpcServiceName, grpcEnableTls, grpcMethod, grpcBody } = options;
const protocObject = protojs.parse(grpcProtobufData);
const protoServiceObject = protocObject.root.lookupService(grpcServiceName);
const Client = grpc.makeGenericClientConstructor({});
const credentials = grpcEnableTls ? grpc.credentials.createSsl() : grpc.credentials.createInsecure();
const client = new Client(
grpcUrl,
credentials
);
const grpcService = protoServiceObject.create(function (method, requestData, cb) {
const fullServiceName = method.fullName;
const serviceFQDN = fullServiceName.split(".");
const serviceMethod = serviceFQDN.pop();
const serviceMethodClientImpl = `/${serviceFQDN.slice(1).join(".")}/${serviceMethod}`;
log.debug("monitor", `gRPC method ${serviceMethodClientImpl}`);
client.makeUnaryRequest(
serviceMethodClientImpl,
arg => arg,
arg => arg,
requestData,
cb);
}, false, false);
return new Promise((resolve, _) => {
try {
return grpcService[`${grpcMethod}`](JSON.parse(grpcBody), function (err, response) {
const responseData = JSON.stringify(response);
if (err) {
return resolve({
code: err.code,
errorMessage: err.details,
data: ""
});
} else {
log.debug("monitor:", `gRPC response: ${JSON.stringify(response)}`);
return resolve({
code: 1,
errorMessage: "",
data: responseData
});
}
});
} catch (err) {
return resolve({
code: -1,
errorMessage: `Error ${err}. Please review your gRPC configuration option. The service name must not include package name value, and the method name must follow camelCase format`,
data: ""
});
}
});
};

View File

@ -22,6 +22,24 @@ textarea.form-control {
width: 10px;
}
.bg-maintenance {
color: white !important;
background-color: $maintenance !important;
}
.bg-dark {
color: white;
}
.text-maintenance {
color: $maintenance !important;
}
.incident a,
.bg-maintenance a {
color: inherit;
}
.list-group {
border-radius: 0.75rem;
@ -107,6 +125,19 @@ optgroup {
}
}
.btn-normal {
$bg-color: #F5F5F5;
background-color: $bg-color;
border-color: $bg-color;
&:hover {
$hover-color: darken($bg-color, 3%);
background-color: $hover-color;
border-color: $hover-color;
}
}
.btn-warning {
color: white;
@ -222,6 +253,11 @@ optgroup {
}
}
.incident a,
.bg-maintenance a {
color: inherit;
}
.form-control,
.form-control:focus,
.form-select,
@ -256,6 +292,20 @@ optgroup {
color: white;
}
.btn-normal {
$bg-color: $dark-header-bg;
color: $dark-font-color;
background-color: $bg-color;
border-color: $bg-color;
&:hover {
$hover-color: darken($bg-color, 3%);
background-color: $hover-color;
border-color: $hover-color;
}
}
.btn-warning {
color: $dark-font-color2;
@ -323,6 +373,7 @@ optgroup {
&.bg-info,
&.bg-warning,
&.bg-danger,
&.bg-maintenance,
&.bg-light {
color: $dark-font-color2;
}

View File

@ -2,4 +2,8 @@ html[lang='fa'] {
#app {
font-family: 'IRANSans', 'Iranian Sans','B Nazanin', 'Tahoma', ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, segoe ui, Roboto, helvetica neue, Arial, noto sans, sans-serif, apple color emoji, segoe ui emoji, segoe ui symbol, noto color emoji;
}
}
}
ul.multiselect__content {
padding-left: 0 !important;
}

View File

@ -1,6 +1,7 @@
$primary: #5cdd8b;
$danger: #dc3545;
$warning: #f8a306;
$maintenance: #1747f5;
$link-color: #111;
$border-radius: 50rem;

View File

@ -0,0 +1,39 @@
@import "@vuepic/vue-datepicker/dist/main.css";
@import "vars.scss";
// Must use #{ }
// Remark: https://stackoverflow.com/questions/50202991/unable-to-set-scss-variable-to-css-variable
.dp__theme_dark {
--dp-background-color: #{$dark-bg2};
--dp-text-color: #{$dark-font-color};
--dp-hover-color: #484848;
--dp-hover-text-color: #ffffff;
--dp-hover-icon-color: #959595;
--dp-primary-color: #{#5cdd8b};
--dp-primary-text-color: #ffffff;
--dp-secondary-color: #494949;
--dp-border-color: #{$dark-border-color};
--dp-menu-border-color: #2d2d2d;
--dp-border-color-hover: #{$dark-border-color};
--dp-disabled-color: #212121;
--dp-scroll-bar-background: #212121;
--dp-scroll-bar-color: #484848;
--dp-success-color: #{$primary};
--dp-success-color-disabled: #428f59;
--dp-icon-color: #959595;
--dp-danger-color: #e53935;
--dp-highlight-color: rgba(0, 92, 178, 0.2);
}
.dp__input {
border-radius: $border-radius;
}
// Fix: Full width of text input when using "inline textInput inlineWithInput" mode
.dp__main > div[aria-label="Datepicker input"] {
width: 100%;
}
.dp__main > div[aria-label="Datepicker menu"]:nth-child(2) {
margin-top: 20px;
}

View File

@ -3,14 +3,6 @@
</template>
<script>
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import timezone from "dayjs/plugin/timezone"; // dependent on utc plugin
import utc from "dayjs/plugin/utc";
dayjs.extend(utc);
dayjs.extend(timezone);
dayjs.extend(relativeTime);
export default {
props: {
/** Value of date time */

View File

@ -73,7 +73,7 @@ export default {
emits: [ "added" ],
data() {
return {
model: null,
modal: null,
processing: false,
id: null,
connectionTypes: [ "socket", "tcp" ],
@ -91,11 +91,16 @@ export default {
},
methods: {
/** Confirm deletion of docker host */
deleteConfirm() {
this.modal.hide();
this.$refs.confirmDelete.show();
},
/**
* Show specified docker host
* @param {number} dockerHostID
*/
show(dockerHostID) {
if (dockerHostID) {
let found = false;
@ -126,6 +131,7 @@ export default {
this.modal.show();
},
/** Add docker host */
submit() {
this.processing = true;
this.$root.getSocket().emit("addDockerHost", this.dockerHost, this.id, (res) => {
@ -144,6 +150,7 @@ export default {
});
},
/** Test the docker host */
test() {
this.processing = true;
this.$root.getSocket().emit("testDockerHost", this.dockerHost, (res) => {
@ -152,6 +159,7 @@ export default {
});
},
/** Delete this docker host */
deleteDockerHost() {
this.processing = true;
this.$root.getSocket().emit("deleteDockerHost", this.id, (res) => {

Some files were not shown because too many files have changed in this diff Show More