mirror of
https://github.com/louislam/uptime-kuma.git
synced 2024-12-22 05:55:16 -05:00
Merge branch '1.23.X'
# Conflicts: # package-lock.json # server/database.js # server/server.js # server/util-server.js
This commit is contained in:
commit
852b3fa61b
@ -38,6 +38,10 @@ tsconfig.json
|
||||
/extra/push-examples
|
||||
/extra/uptime-kuma-push
|
||||
|
||||
# Comment the following line if you want to rebuild the healthcheck binary
|
||||
/extra/healthcheck-armv7
|
||||
|
||||
|
||||
### .gitignore content (commented rules are duplicated)
|
||||
|
||||
#node_modules
|
||||
|
4
.github/workflows/auto-test.yml
vendored
4
.github/workflows/auto-test.yml
vendored
@ -5,11 +5,11 @@ name: Auto Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
pull_request:
|
||||
branches: [ master, 2.0.X ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
|
||||
|
10
db/patch-notification-config.sql
Normal file
10
db/patch-notification-config.sql
Normal file
@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||
UPDATE notification SET config = config_old;
|
||||
ALTER TABLE notification DROP COLUMN config_old;
|
||||
|
||||
COMMIT;
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "1.23.2",
|
||||
"version": "1.23.3",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -42,7 +42,7 @@
|
||||
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
|
||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||
"setup": "git checkout 1.23.2 && npm ci --production && npm run download-dist",
|
||||
"setup": "git checkout 1.23.3 && npm ci --production && npm run download-dist",
|
||||
"download-dist": "node extra/download-dist.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
@ -99,7 +99,7 @@
|
||||
"express-basic-auth": "~1.2.1",
|
||||
"express-static-gzip": "~2.1.7",
|
||||
"form-data": "~4.0.0",
|
||||
"gamedig": "~4.0.5",
|
||||
"gamedig": "~4.1.0",
|
||||
"http-graceful-shutdown": "~3.1.7",
|
||||
"http-proxy-agent": "~5.0.0",
|
||||
"https-proxy-agent": "~5.0.1",
|
||||
|
@ -85,6 +85,7 @@ class Database {
|
||||
"patch-monitor-oauth-cc.sql": true,
|
||||
"patch-add-timeout-monitor.sql": true,
|
||||
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
|
||||
"patch-notification-config.sql": true,
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -56,7 +56,7 @@ class Monitor extends BeanModel {
|
||||
obj.tags = await this.getTags();
|
||||
}
|
||||
|
||||
if (certExpiry && this.type === "http" && this.getURLProtocol() === "https:") {
|
||||
if (certExpiry && (this.type === "http" || this.type === "keyword" || this.type === "json-query") && this.getURLProtocol() === "https:") {
|
||||
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
|
||||
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
|
||||
obj.validCert = validCert;
|
||||
|
@ -1,6 +1,8 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const passwordHash = require("../password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const { shake256, SHAKE256_LENGTH } = require("../util-server");
|
||||
|
||||
class User extends BeanModel {
|
||||
/**
|
||||
@ -27,6 +29,19 @@ class User extends BeanModel {
|
||||
this.password = newPassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new JWT for a user
|
||||
* @param {User} user
|
||||
* @param {string} jwtSecret
|
||||
* @return {string}
|
||||
*/
|
||||
static createJWT(user, jwtSecret) {
|
||||
return jwt.sign({
|
||||
username: user.username,
|
||||
h: shake256(user.password, SHAKE256_LENGTH),
|
||||
}, jwtSecret);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = User;
|
||||
|
@ -1,5 +1,12 @@
|
||||
let express = require("express");
|
||||
const { allowDevAllOrigin, allowAllOrigin, percentageToColor, filterAndJoin, sendHttpError } = require("../util-server");
|
||||
const {
|
||||
setting,
|
||||
allowDevAllOrigin,
|
||||
allowAllOrigin,
|
||||
percentageToColor,
|
||||
filterAndJoin,
|
||||
sendHttpError,
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const apicache = require("../modules/apicache");
|
||||
const Monitor = require("../model/monitor");
|
||||
@ -23,10 +30,14 @@ router.get("/api/entry-page", async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
let result = { };
|
||||
let hostname = request.hostname;
|
||||
if ((await setting("trustProxy")) && request.headers["x-forwarded-host"]) {
|
||||
hostname = request.headers["x-forwarded-host"];
|
||||
}
|
||||
|
||||
if (request.hostname in StatusPage.domainMappingList) {
|
||||
if (hostname in StatusPage.domainMappingList) {
|
||||
result.type = "statusPageMatchedDomain";
|
||||
result.statusPageSlug = StatusPage.domainMappingList[request.hostname];
|
||||
result.statusPageSlug = StatusPage.domainMappingList[hostname];
|
||||
} else {
|
||||
result.type = "entryPage";
|
||||
result.entryPage = server.entryPage;
|
||||
|
@ -78,9 +78,10 @@ const app = server.app;
|
||||
log.info("server", "Importing this project modules");
|
||||
log.debug("server", "Importing Monitor");
|
||||
const Monitor = require("./model/monitor");
|
||||
const User = require("./model/user");
|
||||
|
||||
log.debug("server", "Importing Settings");
|
||||
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, FBSD, doubleCheckPassword, startE2eTests,
|
||||
allowDevAllOrigin
|
||||
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, startUnitTest, FBSD, doubleCheckPassword, startE2eTests, shake256, SHAKE256_LENGTH, allowDevAllOrigin,
|
||||
} = require("./util-server");
|
||||
|
||||
log.debug("server", "Importing Notification");
|
||||
@ -326,6 +327,11 @@ let needSetup = false;
|
||||
decoded.username,
|
||||
]);
|
||||
|
||||
// Check if the password changed
|
||||
if (decoded.h !== shake256(user.password, SHAKE256_LENGTH)) {
|
||||
throw new Error("The token is invalid due to password change or old token");
|
||||
}
|
||||
|
||||
if (user) {
|
||||
log.debug("auth", "afterLogin");
|
||||
afterLogin(socket, user);
|
||||
@ -347,9 +353,10 @@ let needSetup = false;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
log.error("auth", `Invalid token. IP=${clientIP}`);
|
||||
|
||||
if (error.message) {
|
||||
log.error("auth", error.message, `IP=${clientIP}`);
|
||||
}
|
||||
callback({
|
||||
ok: false,
|
||||
msg: "authInvalidToken",
|
||||
@ -389,9 +396,7 @@ let needSetup = false;
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
token: jwt.sign({
|
||||
username: data.username,
|
||||
}, server.jwtSecret),
|
||||
token: User.createJWT(user, server.jwtSecret),
|
||||
});
|
||||
}
|
||||
|
||||
@ -419,9 +424,7 @@ let needSetup = false;
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
token: jwt.sign({
|
||||
username: data.username,
|
||||
}, server.jwtSecret),
|
||||
token: User.createJWT(user, server.jwtSecret),
|
||||
});
|
||||
} else {
|
||||
|
||||
|
@ -36,6 +36,7 @@ const rl = readline.createInterface({ input: process.stdin,
|
||||
// SASLOptions used in JSDoc
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { Kafka, SASLOptions } = require("kafkajs");
|
||||
const crypto = require("crypto");
|
||||
|
||||
const isWindows = process.platform === /^win/.test(process.platform);
|
||||
/**
|
||||
@ -290,22 +291,22 @@ exports.kafkaProducerAsync = function (brokers, topic, message, options = {}, sa
|
||||
|
||||
producer.connect().then(
|
||||
() => {
|
||||
try {
|
||||
producer.send({
|
||||
topic: topic,
|
||||
messages: [{
|
||||
value: message,
|
||||
}],
|
||||
});
|
||||
connectedToKafka = true;
|
||||
clearTimeout(timeoutID);
|
||||
producer.send({
|
||||
topic: topic,
|
||||
messages: [{
|
||||
value: message,
|
||||
}],
|
||||
}).then((_) => {
|
||||
resolve("Message sent successfully");
|
||||
} catch (e) {
|
||||
}).catch((e) => {
|
||||
connectedToKafka = true;
|
||||
producer.disconnect();
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("Error sending message: " + e.message));
|
||||
}
|
||||
}).finally(() => {
|
||||
connectedToKafka = true;
|
||||
clearTimeout(timeoutID);
|
||||
});
|
||||
}
|
||||
).catch(
|
||||
(e) => {
|
||||
@ -317,8 +318,10 @@ exports.kafkaProducerAsync = function (brokers, topic, message, options = {}, sa
|
||||
);
|
||||
|
||||
producer.on("producer.network.request_timeout", (_) => {
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("producer.network.request_timeout"));
|
||||
if (!connectedToKafka) {
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("producer.network.request_timeout"));
|
||||
}
|
||||
});
|
||||
|
||||
producer.on("producer.disconnect", (_) => {
|
||||
@ -1060,6 +1063,23 @@ module.exports.grpcQuery = async (options) => {
|
||||
});
|
||||
};
|
||||
|
||||
module.exports.SHAKE256_LENGTH = 16;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} data
|
||||
* @param {number} len
|
||||
* @return {string}
|
||||
*/
|
||||
module.exports.shake256 = (data, len) => {
|
||||
if (!data) {
|
||||
return "";
|
||||
}
|
||||
return crypto.createHash("shake256", { outputLength: len })
|
||||
.update(data)
|
||||
.digest("hex");
|
||||
};
|
||||
|
||||
module.exports.prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
|
||||
|
||||
// For unit test, export functions
|
||||
|
@ -881,6 +881,7 @@ const monitorDefaults = {
|
||||
kafkaProducerSaslOptions: {
|
||||
mechanism: "None",
|
||||
},
|
||||
kafkaProducerSsl: false,
|
||||
gamedigGivenPortOnly: true,
|
||||
};
|
||||
|
||||
|
@ -438,7 +438,7 @@ export default {
|
||||
lastUpdateTime: dayjs(),
|
||||
updateCountdown: null,
|
||||
updateCountdownText: null,
|
||||
loading: false,
|
||||
loading: true,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
@ -702,6 +702,8 @@ export default {
|
||||
this.incident = res.data.incident;
|
||||
this.maintenanceList = res.data.maintenanceList;
|
||||
this.$root.publicGroupList = res.data.publicGroupList;
|
||||
|
||||
this.loading = false;
|
||||
}).catch( function (error) {
|
||||
if (error.response.status === 404) {
|
||||
location.href = "/page-not-found";
|
||||
|
Loading…
Reference in New Issue
Block a user