uptime-kuma: added
This commit is contained in:
parent
68c8c26857
commit
46d50954f7
5 changed files with 107 additions and 0 deletions
|
@ -26,6 +26,8 @@
|
|||
|
||||
./gerd/services/element.nix
|
||||
./gerd/services/matrix-synapse.nix
|
||||
|
||||
./gerd/services/uptime-kuma.nix
|
||||
];
|
||||
|
||||
networking.hostName = "gerd";
|
||||
|
@ -36,12 +38,14 @@
|
|||
disks = {
|
||||
disk = "/dev/sda";
|
||||
pools.rpool.datasets = {
|
||||
# zfs create -o quota=1G rpool/safe/svcs/uptime-kuma
|
||||
"safe/svcs/forgejo" = { mountpoint = "/srv/forgejo"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/hedgedoc" = { mountpoint = "/srv/hedgedoc"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/nextcloud" = { mountpoint = "/srv/nextcloud"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/stalwart" = { mountpoint = "/srv/stalwart"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/synapse" = { mountpoint = "/srv/synapse"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/wger" = { mountpoint = "/srv/wger"; extra.options.quota = "5G"; };
|
||||
"safe/svcs/uptime-kuma" = { mountpoint = "/srv/uptime-kuma"; extra.options.quota = "1G"; };
|
||||
"safe/svcs/postgresql" = { mountpoint = "/srv/postgresql"; extra.options.quota = "5G"; };
|
||||
"backup/postgresql" = { mountpoint = "/media/backup/postgresqlbackup"; extra.options.quota = "5G"; };
|
||||
};
|
||||
|
|
97
machines/gerd/services/uptime-kuma.nix
Normal file
97
machines/gerd/services/uptime-kuma.nix
Normal file
|
@ -0,0 +1,97 @@
|
|||
{ config, lib, pkgs, ... }:
|
||||
|
||||
let
|
||||
svc_domain = "uptime-kuma.${config.mine.shared.settings.domain}";
|
||||
|
||||
stateDir = config.mine.zfsMounts."rpool/safe/svcs/uptime-kuma";
|
||||
in {
|
||||
services.uptime-kuma = {
|
||||
enable = true;
|
||||
appriseSupport = true;
|
||||
|
||||
settings = {
|
||||
DATA_DIR = lib.mkForce stateDir;
|
||||
};
|
||||
|
||||
package = pkgs.uptime-kuma.overrideAttrs (old: rec {
|
||||
pname = "uptime-kuma";
|
||||
version = "2.0.0-dev";
|
||||
src = pkgs.fetchFromGitHub {
|
||||
owner = "M1CK431";
|
||||
repo = "uptime-kuma";
|
||||
rev = "5a16af40fdddcaa61d197242840344804a246d01";
|
||||
hash = "sha256-W7ieVrfm/SZU/MNB7dJW3V3vq0RBrAJVqv0gK7H4Xik=";
|
||||
};
|
||||
npmDepsHash = "sha256-Q2u6ClG6g8yoGvSJ/LGlKTL4XkJGWY+DAojpM1xBwQ0=";
|
||||
npmDeps = pkgs.fetchNpmDeps {
|
||||
inherit src;
|
||||
name = "${pname}-${version}-npm-deps";
|
||||
hash = npmDepsHash;
|
||||
};
|
||||
patches = [
|
||||
(pkgs.writeText "authelia.patch" ''
|
||||
diff --git a/server/database.js b/server/database.js
|
||||
index 3374aff9..9e890d28 100644
|
||||
--- a/server/database.js
|
||||
+++ b/server/database.js
|
||||
@@ -221,6 +221,7 @@ class Database {
|
||||
if (! fs.existsSync(Database.sqlitePath)) {
|
||||
log.info("server", "Copying Database");
|
||||
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
|
||||
+ fs.chmodSync(Database.path, 0o640);
|
||||
}
|
||||
|
||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||
'')
|
||||
];
|
||||
});
|
||||
};
|
||||
|
||||
# setup state dir
|
||||
systemd.services.uptime-kuma.serviceConfig = {
|
||||
ExecStartPre = [
|
||||
"+${pkgs.coreutils}/bin/chown %u:%g -R ${stateDir}"
|
||||
"+${pkgs.coreutils}/bin/chmod 777 -R ${stateDir}"
|
||||
];
|
||||
ReadWritePaths = [ stateDir ];
|
||||
BindPaths = [ stateDir ];
|
||||
};
|
||||
|
||||
|
||||
# TODO: Could maybe use this instead?
|
||||
# environment.persistence.root.directories = [
|
||||
# { directory = "/var/lib/private/lldap"; mode = "0700"; }
|
||||
# ];
|
||||
|
||||
|
||||
# setup ldap user for email
|
||||
services.lldap.provision.users = config.mine.shared.lib.ldap.mkScope (lconfig: llib: {
|
||||
uptime-kuma = llib.mkProvisionUserSystem "uptime-kuma" config.age.secrets.uptime-kuma-ldap-pass.path;
|
||||
});
|
||||
|
||||
# nginx
|
||||
services.nginx.virtualHosts."${svc_domain}" = config.mine.shared.lib.authelia.mkProtectedWebsite {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
|
||||
locations."/" = config.mine.shared.lib.authelia.mkProtectedLocation {
|
||||
proxyPass = "http://localhost:${builtins.toString config.services.uptime-kuma.settings.PORT}";
|
||||
};
|
||||
};
|
||||
|
||||
mine.shared.meta.uptime-kuma = {
|
||||
name = "Uptime Kuma";
|
||||
description = ''Fancy self-hosted monitoring tool, which supports VARIOUS methods of monitoring, as well as getting notifications. Multiple users is not officially support, so reach out to admins, and they will create a user for you. Abuse will NOT be tolerated.'';
|
||||
url = svc_domain;
|
||||
|
||||
package = let
|
||||
pkg = config.services.uptime-kuma.package;
|
||||
in {
|
||||
name = pkg.pname;
|
||||
version = pkg.version;
|
||||
meta = pkg.meta;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
}
|
|
@ -51,6 +51,9 @@
|
|||
|
||||
# searx
|
||||
searx-env.file = ./searx/env.age;
|
||||
|
||||
# uptime-kuma
|
||||
uptime-kuma-ldap-pass.file = ./uptime-kuma/ldap-pass.age;
|
||||
};
|
||||
|
||||
users.groups.secrets-lldap-bind-user-pass = {};
|
||||
|
|
|
@ -61,4 +61,7 @@ in
|
|||
|
||||
# searx
|
||||
"searx/env.age".publicKeys = defaultAccess;
|
||||
|
||||
# uptime-kuma
|
||||
"uptime-kuma/ldap-pass.age".publicKeys = defaultAccess;
|
||||
}
|
||||
|
|
BIN
secrets/uptime-kuma/ldap-pass.age
Normal file
BIN
secrets/uptime-kuma/ldap-pass.age
Normal file
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue