Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2022-10-23 12:01:47 +00:00 committed by GitHub
commit 1a1df3b37d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 36497 additions and 673 deletions

View File

@ -437,6 +437,13 @@
<link xlink:href="options.html#opt-services.listmonk.enable">services.listmonk</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://uptime.kuma.pet/">Uptime
Kuma</link>, a fancy self-hosted monitoring tool. Available as
<link linkend="opt-services.uptime-kuma.enable">services.uptime-kuma</link>.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-22.11-incompatibilities">
@ -866,6 +873,34 @@
has been hardened.
</para>
</listitem>
<listitem>
<para>
The <literal>services.grafana</literal> options were converted
to a
<link xlink:href="https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md">RFC
0042</link> configuration.
</para>
</listitem>
<listitem>
<para>
The <literal>services.grafana.provision.datasources</literal>
and <literal>services.grafana.provision.dashboards</literal>
options were converted to a
<link xlink:href="https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md">RFC
0042</link> configuration. They also now support specifying
the provisioning YAML file with <literal>path</literal>
option.
</para>
</listitem>
<listitem>
<para>
The <literal>services.grafana.provision.alerting</literal>
option was added. It includes suboptions for every
alerting-related objects (with the exception of
<literal>notifiers</literal>), which means its now possible
to configure modern Grafana alerting declaratively.
</para>
</listitem>
<listitem>
<para>
Matrix Synapse now requires entries in the

View File

@ -148,6 +148,8 @@ Available as [services.patroni](options.html#opt-services.patroni.enable).
- [Listmonk](https://listmonk.app), a self-hosted newsletter manager. Enable using [services.listmonk](options.html#opt-services.listmonk.enable).
- [Uptime Kuma](https://uptime.kuma.pet/), a fancy self-hosted monitoring tool. Available as [services.uptime-kuma](#opt-services.uptime-kuma.enable).
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
## Backward Incompatibilities {#sec-release-22.11-incompatibilities}
@ -280,6 +282,12 @@ Available as [services.patroni](options.html#opt-services.patroni.enable).
- The `services.matrix-synapse` systemd unit has been hardened.
- The `services.grafana` options were converted to a [RFC 0042](https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md) configuration.
- The `services.grafana.provision.datasources` and `services.grafana.provision.dashboards` options were converted to a [RFC 0042](https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md) configuration. They also now support specifying the provisioning YAML file with `path` option.
- The `services.grafana.provision.alerting` option was added. It includes suboptions for every alerting-related objects (with the exception of `notifiers`), which means it's now possible to configure modern Grafana alerting declaratively.
- Matrix Synapse now requires entries in the `state_group_edges` table to be unique, in order to prevent accidentally introducing duplicate information (for example, because a database backup was restored multiple times). If your Synapse database already has duplicate rows in this table, this could fail with an error and require manual remediation.
- The `diamond` package has been update from 0.8.36 to 2.0.15. See the [upstream release notes](https://github.com/bbuchfink/diamond/releases) for more details.

View File

@ -718,6 +718,7 @@
./services/monitoring/ups.nix
./services/monitoring/uptime.nix
./services/monitoring/vmagent.nix
./services/monitoring/uptime-kuma.nix
./services/monitoring/vnstat.nix
./services/monitoring/zabbix-agent.nix
./services/monitoring/zabbix-proxy.nix

View File

@ -106,9 +106,9 @@ in {
}
];
services.grafana.extraOptions = mkIf cfg.provisionGrafana {
RENDERING_SERVER_URL = "http://localhost:${toString cfg.settings.service.port}/render";
RENDERING_CALLBACK_URL = "http://localhost:${toString config.services.grafana.port}";
services.grafana.settings.rendering = mkIf cfg.provisionGrafana {
url = "http://localhost:${toString cfg.settings.service.port}/render";
callback_url = "http://localhost:${toString config.services.grafana.port}";
};
services.grafana-image-renderer.chromium = mkDefault pkgs.chromium;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,76 @@
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.services.uptime-kuma;
in
{
options = {
services.uptime-kuma = {
enable = mkEnableOption (mdDoc "Uptime Kuma, this assumes a reverse proxy to be set.");
package = mkOption {
type = types.package;
example = literalExpression "pkgs.uptime-kuma";
default = pkgs.uptime-kuma;
defaultText = "pkgs.uptime-kuma";
description = lib.mdDoc "Uptime Kuma package to use.";
};
settings = lib.mkOption {
type =
lib.types.submodule { freeformType = with lib.types; attrsOf str; };
default = { };
example = {
PORT = "4000";
NODE_EXTRA_CA_CERTS = "/etc/ssl/certs/ca-certificates.crt";
};
description = lib.mdDoc ''
Additional configuration for Uptime Kuma, see
<https://github.com/louislam/uptime-kuma/wiki/Environment-Variables">
for supported values.
'';
};
};
};
config = mkIf cfg.enable {
services.uptime-kuma.settings = {
DATA_DIR = "/var/lib/uptime-kuma/";
NODE_ENV = mkDefault "production";
};
systemd.services.uptime-kuma = {
description = "Uptime Kuma";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = cfg.settings;
serviceConfig = {
Type = "simple";
StateDirectory = "uptime-kuma";
DynamicUser = true;
ExecStart = "${cfg.package}/bin/uptime-kuma-server";
Restart = "on-failure";
ProtectHome = true;
ProtectSystem = "strict";
PrivateTmp = true;
PrivateDevices = true;
ProtectHostname = true;
ProtectClock = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectKernelLogs = true;
ProtectControlGroups = true;
NoNewPrivileges = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
RemoveIPC = true;
PrivateMounts = true;
};
};
};
}

View File

@ -15,7 +15,7 @@
<para>
Litestream service is managed by a dedicated user named <literal>litestream</literal>
which needs permission to the database file. Here's an example config which gives
required permissions to access <link linkend="opt-services.grafana.database.path">
required permissions to access <link linkend="opt-services.grafana.settings.database.path">
grafana database</link>:
<programlisting>
{ pkgs, ... }:

View File

@ -231,7 +231,7 @@ in {
gollum = handleTest ./gollum.nix {};
google-oslogin = handleTest ./google-oslogin {};
gotify-server = handleTest ./gotify-server.nix {};
grafana = handleTest ./grafana.nix {};
grafana = handleTest ./grafana {};
grafana-agent = handleTest ./grafana-agent.nix {};
graphite = handleTest ./graphite.nix {};
graylog = handleTest ./graylog.nix {};
@ -529,6 +529,7 @@ in {
pulseaudio = discoverTests (import ./pulseaudio.nix);
qboot = handleTestOn ["x86_64-linux" "i686-linux"] ./qboot.nix {};
quorum = handleTest ./quorum.nix {};
quake3 = handleTest ./quake3.nix {};
rabbitmq = handleTest ./rabbitmq.nix {};
radarr = handleTest ./radarr.nix {};
radicale = handleTest ./radicale.nix {};
@ -658,6 +659,7 @@ in {
unit-php = handleTest ./web-servers/unit-php.nix {};
upnp = handleTest ./upnp.nix {};
uptermd = handleTest ./uptermd.nix {};
uptime-kuma = handleTest ./uptime-kuma.nix {};
usbguard = handleTest ./usbguard.nix {};
user-activation-scripts = handleTest ./user-activation-scripts.nix {};
user-home-mode = handleTest ./user-home-mode.nix {};

View File

@ -1,4 +1,4 @@
import ./make-test-python.nix ({ lib, pkgs, ... }:
import ../make-test-python.nix ({ lib, pkgs, ... }:
let
inherit (lib) mkMerge nameValuePair maintainers;
@ -17,6 +17,8 @@ let
};
extraNodeConfs = {
sqlite = {};
declarativePlugins = {
services.grafana.declarativePlugins = [ pkgs.grafanaPlugins.grafana-clock-panel ];
};
@ -52,14 +54,9 @@ let
};
};
nodes = builtins.listToAttrs (map (dbName:
nameValuePair dbName (mkMerge [
baseGrafanaConf
(extraNodeConfs.${dbName} or {})
])) [ "sqlite" "declarativePlugins" "postgresql" "mysql" ]);
nodes = builtins.mapAttrs (_: val: mkMerge [ val baseGrafanaConf ]) extraNodeConfs;
in {
name = "grafana";
name = "grafana-basic";
meta = with maintainers; {
maintainers = [ willibutz ];

View File

@ -0,0 +1,9 @@
{ system ? builtins.currentSystem
, config ? { }
, pkgs ? import ../../.. { inherit system config; }
}:
{
basic = import ./basic.nix { inherit system pkgs; };
provision = import ./provision { inherit system pkgs; };
}

View File

@ -0,0 +1,9 @@
apiVersion: 1
contactPoints:
- name: "Test Contact Point"
receivers:
- uid: "test_contact_point"
type: prometheus-alertmanager
settings:
url: http://localhost:9000

View File

@ -0,0 +1,6 @@
apiVersion: 1
providers:
- name: 'default'
options:
path: /var/lib/grafana/dashboards

View File

@ -0,0 +1,7 @@
apiVersion: 1
datasources:
- name: 'Test Datasource'
type: 'testdata'
access: 'proxy'
uid: 'test_datasource'

View File

@ -0,0 +1,223 @@
import ../../make-test-python.nix ({ lib, pkgs, ... }:
let
inherit (lib) mkMerge nameValuePair maintainers;
baseGrafanaConf = {
services.grafana = {
enable = true;
addr = "localhost";
analytics.reporting.enable = false;
domain = "localhost";
security = {
adminUser = "testadmin";
adminPassword = "snakeoilpwd";
};
provision.enable = true;
};
systemd.tmpfiles.rules = [
"L /var/lib/grafana/dashboards/test.json 0700 grafana grafana - ${pkgs.writeText "test.json" (builtins.readFile ./test_dashboard.json)}"
];
};
extraNodeConfs = {
provisionOld = {
services.grafana.provision = {
datasources = [{
name = "Test Datasource";
type = "testdata";
access = "proxy";
uid = "test_datasource";
}];
dashboards = [{ options.path = "/var/lib/grafana/dashboards"; }];
notifiers = [{
uid = "test_notifiers";
name = "Test Notifiers";
type = "email";
settings = {
singleEmail = true;
addresses = "test@test.com";
};
}];
};
};
provisionNix = {
services.grafana.provision = {
datasources.settings = {
apiVersion = 1;
datasources = [{
name = "Test Datasource";
type = "testdata";
access = "proxy";
uid = "test_datasource";
}];
};
dashboards.settings = {
apiVersion = 1;
providers = [{
name = "default";
options.path = "/var/lib/grafana/dashboards";
}];
};
alerting = {
rules.settings = {
groups = [{
name = "test_rule_group";
folder = "test_folder";
interval = "60s";
rules = [{
uid = "test_rule";
title = "Test Rule";
condition = "A";
data = [{
refId = "A";
datasourceUid = "-100";
model = {
conditions = [{
evaluator = {
params = [ 3 ];
type = "git";
};
operator.type = "and";
query.params = [ "A" ];
reducer.type = "last";
type = "query";
}];
datasource = {
type = "__expr__";
uid = "-100";
};
expression = "1==0";
intervalMs = 1000;
maxDataPoints = 43200;
refId = "A";
type = "math";
};
}];
for = "60s";
}];
}];
};
contactPoints.settings = {
contactPoints = [{
name = "Test Contact Point";
receivers = [{
uid = "test_contact_point";
type = "prometheus-alertmanager";
settings.url = "http://localhost:9000";
}];
}];
};
policies.settings = {
policies = [{
receiver = "Test Contact Point";
}];
};
templates.settings = {
templates = [{
name = "Test Template";
template = "Test message";
}];
};
muteTimings.settings = {
muteTimes = [{
name = "Test Mute Timing";
}];
};
};
};
};
provisionYaml = {
services.grafana.provision = {
datasources.path = ./datasources.yaml;
dashboards.path = ./dashboards.yaml;
alerting = {
rules.path = ./rules.yaml;
contactPoints.path = ./contact-points.yaml;
policies.path = ./policies.yaml;
templates.path = ./templates.yaml;
muteTimings.path = ./mute-timings.yaml;
};
};
};
};
nodes = builtins.mapAttrs (_: val: mkMerge [ val baseGrafanaConf ]) extraNodeConfs;
in {
name = "grafana-provision";
meta = with maintainers; {
maintainers = [ kfears willibutz ];
};
inherit nodes;
testScript = ''
start_all()
nodeOld = ("Nix (old format)", provisionOld)
nodeNix = ("Nix (new format)", provisionNix)
nodeYaml = ("Nix (YAML)", provisionYaml)
for nodeInfo in [nodeOld, nodeNix, nodeYaml]:
with subtest(f"Should start provision node: {nodeInfo[0]}"):
nodeInfo[1].wait_for_unit("grafana.service")
nodeInfo[1].wait_for_open_port(3000)
with subtest(f"Successful datasource provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/datasources/uid/test_datasource | grep Test\ Datasource"
)
with subtest(f"Successful dashboard provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/dashboards/uid/test_dashboard | grep Test\ Dashboard"
)
with subtest(f"Successful notifiers provision with {nodeOld[0]}"):
nodeOld[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/alert-notifications/uid/test_notifiers | grep Test\ Notifiers"
)
for nodeInfo in [nodeNix, nodeYaml]:
with subtest(f"Successful rule provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/v1/provisioning/alert-rules/test_rule | grep Test\ Rule"
)
with subtest(f"Successful contact point provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/v1/provisioning/contact-points | grep Test\ Contact\ Point"
)
with subtest(f"Successful policy provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/v1/provisioning/policies | grep Test\ Contact\ Point"
)
with subtest(f"Successful template provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/v1/provisioning/templates | grep Test\ Template"
)
with subtest("Successful mute timings provision with {nodeInfo[0]}"):
nodeInfo[1].succeed(
"curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/v1/provisioning/mute-timings | grep Test\ Mute\ Timing"
)
'';
})

View File

@ -0,0 +1,4 @@
apiVersion: 1
muteTimes:
- name: "Test Mute Timing"

View File

@ -0,0 +1,4 @@
apiVersion: 1
policies:
- receiver: "Test Contact Point"

View File

@ -0,0 +1,36 @@
apiVersion: 1
groups:
- name: "test_rule_group"
folder: "test_group"
interval: 60s
rules:
- uid: "test_rule"
title: "Test Rule"
condition: A
data:
- refId: A
datasourceUid: '-100'
model:
conditions:
- evaluator:
params:
- 3
type: gt
operator:
type: and
query:
params:
- A
reducer:
type: last
type: query
datasource:
type: __expr__
uid: '-100'
expression: 1==0
intervalMs: 1000
maxDataPoints: 43200
refId: A
type: math
for: 60s

View File

@ -0,0 +1,5 @@
apiVersion: 1
templates:
- name: "Test Template"
template: "Test message"

View File

@ -0,0 +1,47 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 28,
"links": [],
"liveNow": false,
"panels": [],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Test Dashboard",
"uid": "test_dashboard",
"version": 1,
"weekStart": ""
}

95
nixos/tests/quake3.nix Normal file
View File

@ -0,0 +1,95 @@
import ./make-test-python.nix ({ pkgs, ...} :
let
# Build Quake with coverage instrumentation.
overrides = pkgs:
{
quake3game = pkgs.quake3game.override (args: {
stdenv = pkgs.stdenvAdapters.addCoverageInstrumentation args.stdenv;
});
};
# Only allow the demo data to be used (only if it's unfreeRedistributable).
unfreePredicate = pkg: with pkgs.lib; let
allowPackageNames = [ "quake3-demodata" "quake3-pointrelease" ];
allowLicenses = [ pkgs.lib.licenses.unfreeRedistributable ];
in elem pkg.pname allowPackageNames &&
elem (pkg.meta.license or null) allowLicenses;
client =
{ pkgs, ... }:
{ imports = [ ./common/x11.nix ];
hardware.opengl.driSupport = true;
environment.systemPackages = [ pkgs.quake3demo ];
nixpkgs.config.packageOverrides = overrides;
nixpkgs.config.allowUnfreePredicate = unfreePredicate;
};
in
rec {
name = "quake3";
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ domenkozar eelco ];
};
# TODO: lcov doesn't work atm
#makeCoverageReport = true;
nodes =
{ server =
{ pkgs, ... }:
{ systemd.services.quake3-server =
{ wantedBy = [ "multi-user.target" ];
script =
"${pkgs.quake3demo}/bin/quake3-server +set g_gametype 0 " +
"+map q3dm7 +addbot grunt +addbot daemia 2> /tmp/log";
};
nixpkgs.config.packageOverrides = overrides;
nixpkgs.config.allowUnfreePredicate = unfreePredicate;
networking.firewall.allowedUDPPorts = [ 27960 ];
};
client1 = client;
client2 = client;
};
testScript =
''
start_all()
server.wait_for_unit("quake3-server")
client1.wait_for_x()
client2.wait_for_x()
client1.execute("quake3 +set r_fullscreen 0 +set name Foo +connect server &")
client2.execute("quake3 +set r_fullscreen 0 +set name Bar +connect server &")
server.wait_until_succeeds("grep -q 'Foo.*entered the game' /tmp/log")
server.wait_until_succeeds("grep -q 'Bar.*entered the game' /tmp/log")
server.sleep(10) # wait for a while to get a nice screenshot
client1.block()
server.sleep(20)
client1.screenshot("screen1")
client2.screenshot("screen2")
client1.unblock()
server.sleep(10)
client1.screenshot("screen3")
client2.screenshot("screen4")
client1.shutdown()
client2.shutdown()
server.stop_job("quake3-server")
'';
})

View File

@ -0,0 +1,19 @@
import ./make-test-python.nix ({ lib, ... }:
with lib;
{
name = "uptime-kuma";
meta.maintainers = with maintainers; [ julienmalka ];
nodes.machine =
{ pkgs, ... }:
{ services.uptime-kuma.enable = true; };
testScript = ''
machine.start()
machine.wait_for_unit("uptime-kuma.service")
machine.wait_for_open_port(3001)
machine.succeed("curl --fail http://localhost:3001/")
'';
})

View File

@ -19,9 +19,9 @@
}
},
"beta": {
"version": "107.0.5304.36",
"sha256": "1jr5jncc44jqryhg90zc7pnp590qwqdvbc9nkd28418vs0dx98r4",
"sha256bin64": "0lczdihl955vcabr8f46ncglgis4ci8rnjga7dv7wxs4vlyxkhv8",
"version": "107.0.5304.62",
"sha256": "1w77id89gszichqxsrqgkb3v0kf26fic5p3y1ndgrw86d8kilgpj",
"sha256bin64": "0lc08mc1540w4chscq813pmiavvx32qfb581w7lh9ayby15j2nkk",
"deps": {
"gn": {
"version": "2022-09-14",
@ -32,9 +32,9 @@
}
},
"dev": {
"version": "108.0.5355.0",
"sha256": "185mj5sm6q2ahf0im52vkys9pcf0zxx849yrnghix3k487z959na",
"sha256bin64": "11gns3f7k1qj3asy5skrc8z3pb6var8lbqqra47c9gs1jby60d6l",
"version": "108.0.5359.10",
"sha256": "1wmb3qxd126j3466h8wb2fsfy92218rv812sxxqdbpyf2z66m8pj",
"sha256bin64": "0vv64hqx5wf8qsm3z518wv9phipb7nbg6kj5mghx55rn20qmfpbr",
"deps": {
"gn": {
"version": "2022-10-05",

View File

@ -17,6 +17,7 @@
, glib
, networkmanager
, libpulseaudio
, pipewire
}:
stdenv.mkDerivation rec {
@ -59,6 +60,8 @@ stdenv.mkDerivation rec {
gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-ugly
gst_all_1.gst-rtsp-server
gst_all_1.gst-vaapi
pipewire
networkmanager
libpulseaudio
];

View File

@ -1,6 +1,7 @@
{ stdenv
, lib
, fetchFromGitHub
, fetchurl
, cmake
, pkg-config
, openssl
@ -45,6 +46,14 @@ in stdenv.mkDerivation {
fetchSubmodules = true;
};
patches = [
# fix build with openssl 3.0
(fetchurl {
url = "https://salsa.debian.org/debian/transmission/-/raw/debian/3.00-2.1/debian/patches/openssl3-compat.patch";
hash = "sha256-v+SDTW/lCtc8B3TuhQB1pmjW/QRAGLtYncaImNNwpes=";
})
];
outputs = [ "out" "apparmor" ];
cmakeFlags =

View File

@ -61,7 +61,7 @@ buildGoPackage rec {
goPackagePath = "code.gitea.io/gitea";
passthru.tests.gitea = nixosTests.gitea;
passthru.tests = nixosTests.gitea;
meta = with lib; {
description = "Git with a cup of tea";

View File

@ -54,6 +54,7 @@ stdenv.mkDerivation rec {
libadwaita
gst_all_1.gstreamer
gst_all_1.gst-plugins-base
gst_all_1.gst-plugins-good # for scaletempo and webm
gst_all_1.gst-plugins-bad
];

View File

@ -1,6 +1,6 @@
{ lib, stdenv, fetchFromGitHub, fetchurl, fetchpatch
, ocaml, findlib, ocamlbuild, ocaml_oasis
, bitstring, camlzip, cmdliner, core_kernel, ezjsonm, fileutils, ocaml_lwt, ocamlgraph, ocurl, re, uri, zarith, piqi, piqi-ocaml, uuidm, llvm, frontc, ounit, ppx_jane, parsexp
, bitstring, camlzip, cmdliner, core_kernel, ezjsonm, fileutils, mmap, lwt, ocamlgraph, ocurl, re, uri, zarith, piqi, piqi-ocaml, uuidm, llvm, frontc, ounit, ppx_jane, parsexp
, utop, libxml2, ncurses
, linenoise
, ppx_bap
@ -44,7 +44,7 @@ stdenv.mkDerivation rec {
z3
utop libxml2 ncurses ];
propagatedBuildInputs = [ bitstring camlzip cmdliner ppx_bap core_kernel ezjsonm fileutils ocaml_lwt ocamlgraph ocurl re uri zarith piqi parsexp
propagatedBuildInputs = [ bitstring camlzip cmdliner ppx_bap core_kernel ezjsonm fileutils mmap lwt ocamlgraph ocurl re uri zarith piqi parsexp
piqi-ocaml uuidm frontc yojson ];
installPhase = ''

View File

@ -1,29 +1,25 @@
{ lib, fetchFromGitHub, pkg-config, ncurses, libev, buildDunePackage, ocaml
, cppo, dune-configurator, ocplib-endian, result
, mmap, seq
, ocaml-syntax-shims
{ lib, fetchFromGitHub, libev, buildDunePackage
, cppo, dune-configurator, ocplib-endian
}:
let inherit (lib) optional versionOlder; in
buildDunePackage rec {
pname = "lwt";
version = "5.5.0";
version = "5.6.1";
minimalOCamlVersion = "4.08";
src = fetchFromGitHub {
owner = "ocsigen";
repo = "lwt";
rev = version;
sha256 = "sha256:1jbjz2rsz3j56k8vh5qlmm87hhkr250bs2m3dvpy9vsri8rkzj9z";
sha256 = "sha256-XstKs0tMwliCyXnP0Vzi5WC27HKJGnATUYtbbQmH1TE=";
};
strictDeps = true;
nativeBuildInputs = [ pkg-config cppo ]
++ optional (versionOlder ocaml.version "4.08") ocaml-syntax-shims;
buildInputs = [ dune-configurator ]
++ optional (versionOlder ocaml.version "4.07") ncurses;
propagatedBuildInputs = [ libev mmap ocplib-endian seq result ];
nativeBuildInputs = [ cppo ];
buildInputs = [ dune-configurator ];
propagatedBuildInputs = [ libev ocplib-endian ];
meta = {
homepage = "https://ocsigen.org/lwt/";

View File

@ -1,45 +0,0 @@
{ buildDunePackage
, lib
, fetchFromGitHub
, ocaml
, cmdliner
, spacetime_lib
, yojson
, cohttp
, ocaml_lwt
, cohttp-lwt-unix
, lambda-term
, stdlib-shims
}:
buildDunePackage rec {
pname = "prof_spacetime";
version = "0.3.0";
useDune2 = true;
src = fetchFromGitHub {
owner = "lpw25";
repo = pname;
rev = version;
sha256 = "1s88gf6x5almmyi58zx4q23w89mvahfjwhvyfg29ya5s1pjbc9hi";
};
buildInputs = [
cmdliner
spacetime_lib
yojson
cohttp
ocaml_lwt
cohttp-lwt-unix
lambda-term
stdlib-shims
];
meta = {
description = "A viewer for OCaml spacetime profiles";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.symphorien ];
broken = true; # 2022-10-20, doesn't work with updated lambda-term
inherit (src.meta) homepage;
};
}

View File

@ -3,8 +3,8 @@
buildPecl {
pname = "grpc";
version = "1.48.1";
sha256 = "sha256-dOIvjq+DPmBecu933z1DK8bZlkffUy2XLxYYdAU4WeA=";
version = "1.50.0";
sha256 = "sha256-Lgvrw1HZywfvHTaF88T5dtKXu/lGR5xeS+TsqqNQCSc=";
doCheck = true;
checkTarget = "test";

View File

@ -294,6 +294,18 @@ buildPythonPackage rec {
./update-providers.py
'';
# Note on testing the web UI:
# You can (manually) test the web UI as follows:
#
# nix shell .#python3Packages.apache-airflow
# airflow db init
# airflow reset -y # WARNING: this will wipe any existing db state you might have!
# airflow standalone
#
# Then navigate to the localhost URL using the credentials printed, try
# triggering the 'example_bash_operator' and 'example_bash_operator' DAGs and
# see if they report success.
meta = with lib; {
description = "Programmatically author, schedule and monitor data pipelines";
homepage = "https://airflow.apache.org/";

View File

@ -9,7 +9,7 @@
buildPythonPackage rec {
pname = "heatzypy";
version = "2.0.4";
version = "2.0.6";
format = "setuptools";
disabled = pythonOlder "3.8";
@ -18,7 +18,7 @@ buildPythonPackage rec {
owner = "Cyr-ius";
repo = pname;
rev = version;
sha256 = "sha256-i5tGV9nJrLRqZwJZ3y5c65MHykz34bnr3yz+OdaQEoM=";
sha256 = "sha256-sD32zP3HWEq9FCM9PFywHaLEU+MJOvro+FpjrlM4dG4=";
};
propagatedBuildInputs = [

View File

@ -9,14 +9,14 @@
buildPythonPackage rec {
pname = "jsbeautifier";
version = "1.14.6";
version = "1.14.7";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-DVJEhRFE3Ec7HRBEvj3WxW9h/Wnr3B+TuBPYIkJy8G8=";
hash = "sha256-d5kyVNsf9vhOtuHXXjtrcsui7yCBOlhbLYHo5ePHE8Y=";
};
propagatedBuildInputs = [

View File

@ -8,7 +8,7 @@
buildPythonPackage rec {
pname = "life360";
version = "5.2.0";
version = "5.2.1";
format = "setuptools";
disabled = pythonOlder "3.8";
@ -17,7 +17,7 @@ buildPythonPackage rec {
owner = "pnbruckner";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-FLYqTuH/r56mbeOsgXgcLbKtQMiHnRpccDcdDiB0YMo=";
hash = "sha256-yhOqiLozeqPjl5ZBgPaMuZ2fJeOwhI460p9x7i1hVuM=";
};
propagatedBuildInputs = [

View File

@ -9,7 +9,7 @@
buildPythonPackage rec {
pname = "snapcast";
version = "2.2.0";
version = "2.3.0";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -18,7 +18,7 @@ buildPythonPackage rec {
owner = "happyleavesaoc";
repo = "python-snapcast";
rev = "refs/tags/${version}";
hash = "sha256-H41X5bfRRu+uE7eUsmUkONm6hugNs43+O7MvVPH0e+8=";
hash = "sha256-De/dxZLNTJT6DuTa/piavNsLhr6lsPKqR5rQNrvwXGQ=";
};
propagatedBuildInputs = [
@ -34,6 +34,12 @@ buildPythonPackage rec {
"snapcast"
];
disabledTests = [
# AssertionError and TypeError
"test_stream_setmeta"
"est_stream_setproperty"
];
meta = with lib; {
description = "Control Snapcast, a multi-room synchronous audio solution";
homepage = "https://github.com/happyleavesaoc/python-snapcast/";

View File

@ -2,7 +2,7 @@
buildGoModule rec {
pname = "grafana";
version = "9.2.0";
version = "9.2.1";
excludedPackages = [ "alert_webhook_listener" "clean-swagger" "release_publisher" "slow_proxy" "slow_proxy_mac" "macaron" "devenv" ];
@ -10,15 +10,15 @@ buildGoModule rec {
rev = "v${version}";
owner = "grafana";
repo = "grafana";
sha256 = "sha256-cfm+BfzSMtkDMkiDH7rsoh/tEofmqWhuUz1slk+FaOI=";
sha256 = "sha256-0TMvSILkT29Ebm/P3PK1NKNs+TbE+874aDRybahhMGg=";
};
srcStatic = fetchurl {
url = "https://dl.grafana.com/oss/release/grafana-${version}.linux-amd64.tar.gz";
sha256 = "sha256-qJnqIog5DQXI8MAZtb2USkb5UwY7c05nDBt2mf13BJ8=";
sha256 = "sha256-yL6qyAOZT47eiPkdxeBARkChP0L4vj1y7LDvrPUBmQQ=";
};
vendorSha256 = "sha256-SYDkKB/D+uWHoeGAcYJmYxLhMOw458vkmFJlLbcrf2k=";
vendorSha256 = "sha256-021b+Jdk1VUGNSVNef89KLbWLdy4XhhEry4S2S0AhRg=";
nativeBuildInputs = [ wire ];

View File

@ -0,0 +1 @@
legacy-peer-deps=true

View File

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-16_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -0,0 +1,68 @@
{ pkgs, lib, stdenv, fetchFromGitHub, fetchzip, substituteAll, nixosTests, iputils }:
let
deps = import ./composition.nix { inherit pkgs; };
in
stdenv.mkDerivation (finalAttrs: {
pname = "uptime-kuma";
version = "1.18.5";
src = fetchFromGitHub {
owner = "louislam";
repo = "uptime-kuma";
rev = finalAttrs.version;
sha256 = "sha256-4RLOY8OqhbcnSPa0VpAdMT3E1M0/ev/sSAmbQUQxqbw=";
};
uiSha256 = "sha256-0KbxagFh4bxNrnekUHx0DGr3urfUUz33zn4EtJIZBps=";
patches = [
# Fixes the permissions of the database being not set correctly
# See https://github.com/louislam/uptime-kuma/pull/2119
./fix-database-permissions.patch
];
postPatch = ''
substituteInPlace server/ping-lite.js \
--replace "/bin/ping" "${iputils}/bin/ping" \
--replace "/sbin/ping6" "${iputils}/bin/ping" \
--replace "/sbin/ping" "${iputils}/bin/ping"
'';
buildInputs = [ pkgs.makeWrapper ];
installPhase = ''
mkdir -p $out/share/
cp -r server $out/share/
cp -r db $out/share/
cp -r src $out/share/
cp package.json $out/share/
ln -s ${deps.package}/lib/node_modules/uptime-kuma/node_modules/ $out/share/
ln -s ${finalAttrs.passthru.ui} $out/share/dist
'';
postFixup = ''
makeWrapper ${pkgs.nodejs}/bin/node $out/bin/uptime-kuma-server \
--add-flags $out/share/server/server.js \
--chdir $out/share/
'';
passthru = {
tests.uptime-kuma = nixosTests.uptime-kuma;
updateScript = ./update.sh;
ui = fetchzip {
name = "uptime-kuma-dist-${finalAttrs.version}";
url = "https://github.com/louislam/uptime-kuma/releases/download/${finalAttrs.version}/dist.tar.gz";
sha256 = finalAttrs.uiSha256;
};
};
meta = with lib; {
description = "A fancy self-hosted monitoring tool";
homepage = "https://github.com/louislam/uptime-kuma";
license = licenses.mit;
maintainers = with maintainers; [ julienmalka ];
};
})

View File

@ -0,0 +1,12 @@
diff --git a/server/server.js b/server/server.js
index 0c9a45e6..cec31c7c 100644
--- a/server/server.js
+++ b/server/server.js
@@ -1583,6 +1583,7 @@ async function initDatabase(testMode = false) {
if (! fs.existsSync(Database.path)) {
log.info("server", "Copying Database");
fs.copyFileSync(Database.templatePath, Database.path);
+ fs.chmodSync(Database.path, 0o640);
}
log.info("server", "Connecting to the Database");

View File

@ -0,0 +1,598 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,170 @@
{
"name": "uptime-kuma",
"version": "1.18.5",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/louislam/uptime-kuma.git"
},
"engines": {
"node": "14.* || >=16.*"
},
"scripts": {
"install-legacy": "npm install",
"update-legacy": "npm update",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
"start": "npm run start-server",
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && npm run jest-backend",
"test-with-build": "npm run build && npm test",
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.18.5 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
"remove-2fa": "node extra/remove-2fa.js",
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d",
"build-dist-and-restart": "npm run build && npm run start-server-dev",
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
"cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e",
"cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\""
},
"dependencies": {
"@louislam/sqlite3": "~15.0.6",
"args-parser": "~1.3.0",
"axios": "~0.27.0",
"axios-ntlm": "~1.3.0",
"badge-maker": "~3.3.1",
"bcryptjs": "~2.4.3",
"bree": "~7.1.5",
"cacheable-lookup": "~6.0.4",
"chardet": "~1.4.0",
"check-password-strength": "^2.0.5",
"cheerio": "~1.0.0-rc.12",
"chroma-js": "~2.4.2",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"compression": "~1.7.4",
"dayjs": "~1.11.5",
"express": "~4.17.3",
"express-basic-auth": "~1.2.1",
"express-static-gzip": "~2.1.7",
"form-data": "~4.0.0",
"http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1",
"iconv-lite": "~0.6.3",
"jsesc": "~3.0.2",
"jsonwebtoken": "~8.5.1",
"jwt-decode": "~3.1.2",
"limiter": "~2.1.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"password-hash": "~1.2.2",
"pg": "~8.8.0",
"pg-connection-string": "~2.5.0",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1",
"redbean-node": "0.1.4",
"socket.io": "~4.4.1",
"socket.io-client": "~4.4.1",
"socks-proxy-agent": "6.1.1",
"tar": "~6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2"
},
"devDependencies": {
"@actions/github": "~5.0.1",
"@babel/eslint-parser": "~7.17.0",
"@babel/preset-env": "^7.15.8",
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@popperjs/core": "~2.10.2",
"@types/bootstrap": "~5.1.9",
"@vitejs/plugin-legacy": "~2.1.0",
"@vitejs/plugin-vue": "~3.1.0",
"@vue/compiler-sfc": "~3.2.36",
"aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3",
"chart.js": "~3.6.2",
"chartjs-adapter-dayjs": "~1.0.0",
"concurrently": "^7.1.0",
"core-js": "~3.18.3",
"cross-env": "~7.0.3",
"cypress": "^10.1.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"eslint": "~8.14.0",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"jest": "~27.2.5",
"postcss-html": "~1.5.0",
"postcss-rtlcss": "~3.7.2",
"postcss-scss": "~4.0.4",
"prismjs": "~1.29.0",
"qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1",
"stylelint": "~14.7.1",
"stylelint-config-standard": "~25.0.0",
"terser": "~5.15.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
"vite": "~3.1.0",
"vite-plugin-compression": "^0.5.1",
"vue": "next",
"vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.2.2",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-prism-editor": "~2.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0",
"wait-on": "^6.0.1"
}
}

View File

@ -0,0 +1,29 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl common-updater-scripts nodePackages.node2nix gnused nix coreutils jq
set -euo pipefail
latestVersion="$(curl -s ${GITHUB_TOKEN:+"-u \":$GITHUB_TOKEN\""} "https://api.github.com/repos/louislam/uptime-kuma/releases?per_page=1" | jq -r ".[0].tag_name" | sed 's/^v//')"
currentVersion=$(nix-instantiate --eval -E "with import ./. {}; uptime-kuma.version or (lib.getVersion uptime-kuma)" | tr -d '"')
if [[ "$currentVersion" == "$latestVersion" ]]; then
echo "uptime-kuma is up-to-date: $currentVersion"
exit 0
fi
update-source-version uptime-kuma 0 0000000000000000000000000000000000000000000000000000000000000000
update-source-version uptime-kuma "$latestVersion"
# use patched source
store_src="$(nix-build . -A uptime-kuma.src --no-out-link)"
cd "$(dirname "${BASH_SOURCE[0]}")"
node2nix \
--nodejs-16 \
--node-env ./node-env.nix \
--output ./node-packages.nix \
--lock "$store_src/package-lock.json" \
--composition ./composition.nix

View File

@ -9,24 +9,16 @@
}:
stdenv.mkDerivation rec {
version = "2.67";
version = "2.83";
pname = "asymptote";
src = fetchFromGitHub {
owner = "vectorgraphics";
repo = pname;
rev = version;
hash = "sha256-dMgsKBg6YQ3mdx3jFqjX4vZeizaier8+ZQUl4J6QXNE=";
hash = "sha256-Kz1uh3fMbADd39seunfL5O2Q31VLGKhu/ZuKi9/kuEc=";
};
patches =
(lib.optional (lib.versionOlder version "2.68")
(fetchpatch {
url = "https://github.com/vectorgraphics/asymptote/commit/3361214340d58235f4dbb8f24017d0cd5d94da72.patch";
hash = "sha256-1RYMZcwbjBAM7aAXFBbwst0eozWYFtJ8HcicjXogS/w=";
}))
;
nativeBuildInputs = [
autoreconfHook
bison
@ -80,7 +72,6 @@ stdenv.mkDerivation rec {
description = "A tool for programming graphics intended to replace Metapost";
license = licenses.gpl3Plus;
maintainers = [ maintainers.raskin ];
broken = stdenv.isDarwin; # https://github.com/vectorgraphics/asymptote/issues/69
platforms = platforms.linux ++ platforms.darwin;
};
}

View File

@ -1,6 +1,7 @@
{ lib
, buildGoModule
, fetchFromGitHub
, stdenv
}:
buildGoModule rec {
@ -27,5 +28,6 @@ buildGoModule rec {
homepage = "https://github.com/davrodpin/mole";
license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ];
broken = stdenv.isDarwin; # build fails with go > 1.17
};
}

View File

@ -2154,7 +2154,7 @@ with pkgs;
astc-encoder = callPackage ../tools/graphics/astc-encoder { };
asymptote = callPackage ../tools/graphics/asymptote {
texLive = texlive.combine { inherit (texlive) scheme-small epsf cm-super texinfo; };
texLive = texlive.combine { inherit (texlive) scheme-small epsf cm-super texinfo media9 ocgx2; };
gsl = gsl_1;
};
@ -7152,7 +7152,9 @@ with pkgs;
git-latexdiff = callPackage ../tools/typesetting/git-latexdiff { };
gitea = callPackage ../applications/version-management/gitea { };
gitea = callPackage ../applications/version-management/gitea {
buildGoPackage = buildGo118Package; # nixosTests.gitea fails with 1.19
};
gokart = callPackage ../development/tools/gokart { };
@ -9259,10 +9261,7 @@ with pkgs;
docbook-xsl = docbook_xsl;
};
mole = callPackage ../tools/networking/mole {
# pinned due to build failure or vendoring problems. When unpinning double check with: nix-build -A $name.go-modules --rebuild
buildGoModule = buildGo117Module;
};
mole = callPackage ../tools/networking/mole { };
morgen = callPackage ../applications/office/morgen {
electron = electron_15;
@ -12659,6 +12658,8 @@ with pkgs;
unrar-wrapper = python3Packages.callPackage ../tools/archivers/unrar-wrapper { };
uptime-kuma = callPackage ../servers/monitoring/uptime-kuma { };
vul = callPackage ../applications/misc/vul { };
xar = callPackage ../tools/compression/xar { };
@ -26850,10 +26851,7 @@ with pkgs;
vegur = callPackage ../data/fonts/vegur { };
vegeta = callPackage ../tools/networking/vegeta {
# pinned due to build failure or vendoring problems. When unpinning double check with: nix-build -A $name.go-modules --rebuild
buildGoModule = buildGo117Module;
};
vegeta = callPackage ../tools/networking/vegeta { };
venta = callPackage ../data/themes/venta { };
@ -28007,10 +28005,7 @@ with pkgs;
exaile = callPackage ../applications/audio/exaile { };
exercism = callPackage ../applications/misc/exercism {
# pinned due to build failure or vendoring problems. When unpinning double check with: nix-build -A $name.go-modules --rebuild
buildGoModule = buildGo117Module;
};
exercism = callPackage ../applications/misc/exercism { };
expenses = callPackage ../applications/misc/expenses { };
@ -29985,10 +29980,7 @@ with pkgs;
mapmap = libsForQt5.callPackage ../applications/video/mapmap { };
marathonctl = callPackage ../tools/virtualization/marathonctl {
# pinned due to build failure or vendoring problems. When unpinning double check with: nix-build -A $name.go-modules --rebuild
buildGoModule = buildGo117Module;
};
marathonctl = callPackage ../tools/virtualization/marathonctl { };
markdown-pp = callPackage ../tools/text/markdown-pp { };
@ -32217,9 +32209,7 @@ with pkgs;
transcribe = callPackage ../applications/audio/transcribe { };
transmission = callPackage ../applications/networking/p2p/transmission {
openssl = openssl_1_1;
};
transmission = callPackage ../applications/networking/p2p/transmission { };
libtransmission = transmission.override {
installLib = true;
enableDaemon = false;

View File

@ -1326,8 +1326,6 @@ let
process = callPackage ../development/ocaml-modules/process { };
prof_spacetime = callPackage ../development/ocaml-modules/prof_spacetime { };
progress = callPackage ../development/ocaml-modules/progress { };
promise_jsoo = callPackage ../development/ocaml-modules/promise_jsoo { };