Merge branch '1.23.X'

# Conflicts:
#	package-lock.json
#	server/database.js
#	server/server.js
#	server/util-server.js
This commit is contained in:
Louis Lam 2023-10-09 21:28:01 +08:00
commit 852b3fa61b
12 changed files with 100 additions and 33 deletions

View File

@ -38,6 +38,10 @@ tsconfig.json
/extra/push-examples /extra/push-examples
/extra/uptime-kuma-push /extra/uptime-kuma-push
# Comment the following line if you want to rebuild the healthcheck binary
/extra/healthcheck-armv7
### .gitignore content (commented rules are duplicated) ### .gitignore content (commented rules are duplicated)
#node_modules #node_modules

View File

@ -5,11 +5,11 @@ name: Auto Test
on: on:
push: push:
branches: [ master ] branches: [ master, 1.23.X ]
paths-ignore: paths-ignore:
- '*.md' - '*.md'
pull_request: pull_request:
branches: [ master, 2.0.X ] branches: [ master, 1.23.X ]
paths-ignore: paths-ignore:
- '*.md' - '*.md'

View File

@ -0,0 +1,10 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
ALTER TABLE notification RENAME COLUMN config TO config_old;
ALTER TABLE notification ADD COLUMN config TEXT;
UPDATE notification SET config = config_old;
ALTER TABLE notification DROP COLUMN config_old;
COMMIT;

View File

@ -1,6 +1,6 @@
{ {
"name": "uptime-kuma", "name": "uptime-kuma",
"version": "1.23.2", "version": "1.23.3",
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
@ -42,7 +42,7 @@
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .", "build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push", "build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain", "upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.23.2 && npm ci --production && npm run download-dist", "setup": "git checkout 1.23.3 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js", "download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js", "mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js", "reset-password": "node extra/reset-password.js",
@ -99,7 +99,7 @@
"express-basic-auth": "~1.2.1", "express-basic-auth": "~1.2.1",
"express-static-gzip": "~2.1.7", "express-static-gzip": "~2.1.7",
"form-data": "~4.0.0", "form-data": "~4.0.0",
"gamedig": "~4.0.5", "gamedig": "~4.1.0",
"http-graceful-shutdown": "~3.1.7", "http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "~5.0.0", "http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1", "https-proxy-agent": "~5.0.1",

View File

@ -85,6 +85,7 @@ class Database {
"patch-monitor-oauth-cc.sql": true, "patch-monitor-oauth-cc.sql": true,
"patch-add-timeout-monitor.sql": true, "patch-add-timeout-monitor.sql": true,
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file "patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
"patch-notification-config.sql": true,
}; };
/** /**

View File

@ -56,7 +56,7 @@ class Monitor extends BeanModel {
obj.tags = await this.getTags(); obj.tags = await this.getTags();
} }
if (certExpiry && this.type === "http" && this.getURLProtocol() === "https:") { if (certExpiry && (this.type === "http" || this.type === "keyword" || this.type === "json-query") && this.getURLProtocol() === "https:") {
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id); const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
obj.certExpiryDaysRemaining = certExpiryDaysRemaining; obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
obj.validCert = validCert; obj.validCert = validCert;

View File

@ -1,6 +1,8 @@
const { BeanModel } = require("redbean-node/dist/bean-model"); const { BeanModel } = require("redbean-node/dist/bean-model");
const passwordHash = require("../password-hash"); const passwordHash = require("../password-hash");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const jwt = require("jsonwebtoken");
const { shake256, SHAKE256_LENGTH } = require("../util-server");
class User extends BeanModel { class User extends BeanModel {
/** /**
@ -27,6 +29,19 @@ class User extends BeanModel {
this.password = newPassword; this.password = newPassword;
} }
/**
* Create a new JWT for a user
* @param {User} user
* @param {string} jwtSecret
* @return {string}
*/
static createJWT(user, jwtSecret) {
return jwt.sign({
username: user.username,
h: shake256(user.password, SHAKE256_LENGTH),
}, jwtSecret);
}
} }
module.exports = User; module.exports = User;

View File

@ -1,5 +1,12 @@
let express = require("express"); let express = require("express");
const { allowDevAllOrigin, allowAllOrigin, percentageToColor, filterAndJoin, sendHttpError } = require("../util-server"); const {
setting,
allowDevAllOrigin,
allowAllOrigin,
percentageToColor,
filterAndJoin,
sendHttpError,
} = require("../util-server");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const apicache = require("../modules/apicache"); const apicache = require("../modules/apicache");
const Monitor = require("../model/monitor"); const Monitor = require("../model/monitor");
@ -23,10 +30,14 @@ router.get("/api/entry-page", async (request, response) => {
allowDevAllOrigin(response); allowDevAllOrigin(response);
let result = { }; let result = { };
let hostname = request.hostname;
if ((await setting("trustProxy")) && request.headers["x-forwarded-host"]) {
hostname = request.headers["x-forwarded-host"];
}
if (request.hostname in StatusPage.domainMappingList) { if (hostname in StatusPage.domainMappingList) {
result.type = "statusPageMatchedDomain"; result.type = "statusPageMatchedDomain";
result.statusPageSlug = StatusPage.domainMappingList[request.hostname]; result.statusPageSlug = StatusPage.domainMappingList[hostname];
} else { } else {
result.type = "entryPage"; result.type = "entryPage";
result.entryPage = server.entryPage; result.entryPage = server.entryPage;

View File

@ -78,9 +78,10 @@ const app = server.app;
log.info("server", "Importing this project modules"); log.info("server", "Importing this project modules");
log.debug("server", "Importing Monitor"); log.debug("server", "Importing Monitor");
const Monitor = require("./model/monitor"); const Monitor = require("./model/monitor");
const User = require("./model/user");
log.debug("server", "Importing Settings"); log.debug("server", "Importing Settings");
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, FBSD, doubleCheckPassword, startE2eTests, const { getSettings, setSettings, setting, initJWTSecret, checkLogin, startUnitTest, FBSD, doubleCheckPassword, startE2eTests, shake256, SHAKE256_LENGTH, allowDevAllOrigin,
allowDevAllOrigin
} = require("./util-server"); } = require("./util-server");
log.debug("server", "Importing Notification"); log.debug("server", "Importing Notification");
@ -326,6 +327,11 @@ let needSetup = false;
decoded.username, decoded.username,
]); ]);
// Check if the password changed
if (decoded.h !== shake256(user.password, SHAKE256_LENGTH)) {
throw new Error("The token is invalid due to password change or old token");
}
if (user) { if (user) {
log.debug("auth", "afterLogin"); log.debug("auth", "afterLogin");
afterLogin(socket, user); afterLogin(socket, user);
@ -347,9 +353,10 @@ let needSetup = false;
}); });
} }
} catch (error) { } catch (error) {
log.error("auth", `Invalid token. IP=${clientIP}`); log.error("auth", `Invalid token. IP=${clientIP}`);
if (error.message) {
log.error("auth", error.message, `IP=${clientIP}`);
}
callback({ callback({
ok: false, ok: false,
msg: "authInvalidToken", msg: "authInvalidToken",
@ -389,9 +396,7 @@ let needSetup = false;
callback({ callback({
ok: true, ok: true,
token: jwt.sign({ token: User.createJWT(user, server.jwtSecret),
username: data.username,
}, server.jwtSecret),
}); });
} }
@ -419,9 +424,7 @@ let needSetup = false;
callback({ callback({
ok: true, ok: true,
token: jwt.sign({ token: User.createJWT(user, server.jwtSecret),
username: data.username,
}, server.jwtSecret),
}); });
} else { } else {

View File

@ -36,6 +36,7 @@ const rl = readline.createInterface({ input: process.stdin,
// SASLOptions used in JSDoc // SASLOptions used in JSDoc
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
const { Kafka, SASLOptions } = require("kafkajs"); const { Kafka, SASLOptions } = require("kafkajs");
const crypto = require("crypto");
const isWindows = process.platform === /^win/.test(process.platform); const isWindows = process.platform === /^win/.test(process.platform);
/** /**
@ -290,22 +291,22 @@ exports.kafkaProducerAsync = function (brokers, topic, message, options = {}, sa
producer.connect().then( producer.connect().then(
() => { () => {
try { producer.send({
producer.send({ topic: topic,
topic: topic, messages: [{
messages: [{ value: message,
value: message, }],
}], }).then((_) => {
});
connectedToKafka = true;
clearTimeout(timeoutID);
resolve("Message sent successfully"); resolve("Message sent successfully");
} catch (e) { }).catch((e) => {
connectedToKafka = true; connectedToKafka = true;
producer.disconnect(); producer.disconnect();
clearTimeout(timeoutID); clearTimeout(timeoutID);
reject(new Error("Error sending message: " + e.message)); reject(new Error("Error sending message: " + e.message));
} }).finally(() => {
connectedToKafka = true;
clearTimeout(timeoutID);
});
} }
).catch( ).catch(
(e) => { (e) => {
@ -317,8 +318,10 @@ exports.kafkaProducerAsync = function (brokers, topic, message, options = {}, sa
); );
producer.on("producer.network.request_timeout", (_) => { producer.on("producer.network.request_timeout", (_) => {
clearTimeout(timeoutID); if (!connectedToKafka) {
reject(new Error("producer.network.request_timeout")); clearTimeout(timeoutID);
reject(new Error("producer.network.request_timeout"));
}
}); });
producer.on("producer.disconnect", (_) => { producer.on("producer.disconnect", (_) => {
@ -1060,6 +1063,23 @@ module.exports.grpcQuery = async (options) => {
}); });
}; };
module.exports.SHAKE256_LENGTH = 16;
/**
*
* @param {string} data
* @param {number} len
* @return {string}
*/
module.exports.shake256 = (data, len) => {
if (!data) {
return "";
}
return crypto.createHash("shake256", { outputLength: len })
.update(data)
.digest("hex");
};
module.exports.prompt = (query) => new Promise((resolve) => rl.question(query, resolve)); module.exports.prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
// For unit test, export functions // For unit test, export functions

View File

@ -881,6 +881,7 @@ const monitorDefaults = {
kafkaProducerSaslOptions: { kafkaProducerSaslOptions: {
mechanism: "None", mechanism: "None",
}, },
kafkaProducerSsl: false,
gamedigGivenPortOnly: true, gamedigGivenPortOnly: true,
}; };

View File

@ -438,7 +438,7 @@ export default {
lastUpdateTime: dayjs(), lastUpdateTime: dayjs(),
updateCountdown: null, updateCountdown: null,
updateCountdownText: null, updateCountdownText: null,
loading: false, loading: true,
}; };
}, },
computed: { computed: {
@ -702,6 +702,8 @@ export default {
this.incident = res.data.incident; this.incident = res.data.incident;
this.maintenanceList = res.data.maintenanceList; this.maintenanceList = res.data.maintenanceList;
this.$root.publicGroupList = res.data.publicGroupList; this.$root.publicGroupList = res.data.publicGroupList;
this.loading = false;
}).catch( function (error) { }).catch( function (error) {
if (error.response.status === 404) { if (error.response.status === 404) {
location.href = "/page-not-found"; location.href = "/page-not-found";