Merge master into haskell-updates

This commit is contained in:
github-actions[bot] 2023-05-23 00:12:35 +00:00 committed by GitHub
commit 5fcbc12a29
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
338 changed files with 13461 additions and 37878 deletions

View File

@ -38,6 +38,10 @@ jobs:
into: staging-next-22.11
- from: staging-next-22.11
into: staging-22.11
- from: release-23.05
into: staging-next-23.05
- from: staging-next-23.05
into: staging-23.05
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
steps:
- uses: actions/checkout@v3

View File

@ -1 +1 @@
23.05
23.11

View File

@ -1,9 +1,12 @@
# buildFHSEnv {#sec-fhs-environments}
`buildFHSEnv` provides a way to build and run FHS-compatible lightweight sandboxes. It creates an isolated root with bound `/nix/store`, so its footprint in terms of disk space needed is quite small. This allows one to run software which is hard or unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions, games distributed as tarballs, software with integrity checking and/or external self-updated binaries. It uses Linux namespaces feature to create temporary lightweight environments which are destroyed after all child processes exit, without root user rights requirement. Accepted arguments are:
`buildFHSEnv` provides a way to build and run FHS-compatible lightweight sandboxes. It creates an isolated root filesystem with the host's `/nix/store`, so its footprint in terms of disk space is quite small. This allows you to run software which is hard or unfeasible to patch for NixOS; 3rd-party source trees with FHS assumptions, games distributed as tarballs, software with integrity checking and/or external self-updated binaries for instance.
It uses Linux' namespaces feature to create temporary lightweight environments which are destroyed after all child processes exit, without requiring elevated privileges. It works similar to containerisation technology such as Docker or FlatPak but provides no security-relevant separation from the host system.
Accepted arguments are:
- `name`
Environment name.
The name of the environment and the wrapper executable.
- `targetPkgs`
Packages to be installed for the main host's architecture (i.e. x86_64 on x86_64 installations). Along with libraries binaries are also installed.
- `multiPkgs`
@ -17,33 +20,35 @@
- `extraInstallCommands`
Additional commands to be executed for finalizing the derivation with runner script.
- `runScript`
A command that would be executed inside the sandbox and passed all the command line arguments. It defaults to `bash`.
A shell command to be executed inside the sandbox. It defaults to `bash`. Command line arguments passed to the resulting wrapper are appended to this command by default.
This command must be escaped; i.e. `"foo app" --do-stuff --with "some file"`. See `lib.escapeShellArgs`.
- `profile`
Optional script for `/etc/profile` within the sandbox.
One can create a simple environment using a `shell.nix` like that:
You can create a simple environment using a `shell.nix` like this:
```nix
{ pkgs ? import <nixpkgs> {} }:
(pkgs.buildFHSEnv {
name = "simple-x11-env";
targetPkgs = pkgs: (with pkgs;
[ udev
alsa-lib
]) ++ (with pkgs.xorg;
[ libX11
libXcursor
libXrandr
]);
multiPkgs = pkgs: (with pkgs;
[ udev
alsa-lib
]);
targetPkgs = pkgs: (with pkgs; [
udev
alsa-lib
]) ++ (with pkgs.xorg; [
libX11
libXcursor
libXrandr
]);
multiPkgs = pkgs: (with pkgs; [
udev
alsa-lib
]);
runScript = "bash";
}).env
```
Running `nix-shell` would then drop you into a shell with these libraries and binaries available. You can use this to run closed-source applications which expect FHS structure without hassles: simply change `runScript` to the application path, e.g. `./bin/start.sh` -- relative paths are supported.
Running `nix-shell` on it would drop you into a shell inside an FHS env where those libraries and binaries are available in FHS-compliant paths. Applications that expect an FHS structure (i.e. proprietary binaries) can run inside this environment without modification.
You can build a wrapper by running your binary in `runScript`, e.g. `./bin/start.sh`. Relative paths work as expected.
Additionally, the FHS builder links all relocated gsettings-schemas (the glib setup-hook moves them to `share/gsettings-schemas/${name}/glib-2.0/schemas`) to their standard FHS location. This means you don't need to wrap binaries with `wrapGAppsHook`.

View File

@ -202,6 +202,7 @@ in mkLicense lset) ({
fullName = "Business Source License 1.1";
url = "https://mariadb.com/bsl11";
free = false;
redistributable = true;
};
caossl = {
@ -618,6 +619,12 @@ in mkLicense lset) ({
fullName = "Licence Art Libre 1.3";
};
lens = {
fullName = "Lens Terms of Service Agreement";
url = "https://k8slens.dev/licenses/tos";
free = false;
};
lgpl2Only = {
spdxId = "LGPL-2.0-only";
fullName = "GNU Library General Public License v2 only";

View File

@ -155,6 +155,8 @@ rec {
# Name for the package, shown in option description
name:
{
# Whether the package can be null, for example to disable installing a package altogether.
nullable ? false,
# The attribute path where the default package is located (may be omitted)
default ? name,
# A string or an attribute path to use as an example (may be omitted)
@ -164,19 +166,24 @@ rec {
}:
let
name' = if isList name then last name else name;
in mkOption ({
type = with lib.types; (if nullable then nullOr else lib.id) package;
description = "The ${name'} package to use."
+ (if extraDescription == "" then "" else " ") + extraDescription;
} // (if default != null then let
default' = if isList default then default else [ default ];
defaultPath = concatStringsSep "." default';
defaultValue = attrByPath default'
(throw "${defaultPath} cannot be found in pkgs") pkgs;
in mkOption {
in {
default = defaultValue;
defaultText = literalExpression ("pkgs." + defaultPath);
type = lib.types.package;
description = "The ${name'} package to use."
+ (if extraDescription == "" then "" else " ") + extraDescription;
${if default != null then "default" else null} = defaultValue;
${if example != null then "example" else null} = literalExpression
} else if nullable then {
default = null;
} else { }) // lib.optionalAttrs (example != null) {
example = literalExpression
(if isList example then "pkgs." + concatStringsSep "." example else example);
};
});
/* Like mkPackageOption, but emit an mdDoc description instead of DocBook. */
mkPackageOptionMD = pkgs: name: extra:

View File

@ -182,6 +182,11 @@ checkConfigOutput '^true$' config.enableAlias ./alias-with-priority.nix
checkConfigOutput '^false$' config.enable ./alias-with-priority-can-override.nix
checkConfigOutput '^false$' config.enableAlias ./alias-with-priority-can-override.nix
# Check mkPackageOption
checkConfigOutput '^"hello"$' config.package.pname ./declare-mkPackageOption.nix
checkConfigError 'The option .undefinedPackage. is used but not defined' config.undefinedPackage ./declare-mkPackageOption.nix
checkConfigOutput '^null$' config.nullablePackage ./declare-mkPackageOption.nix
# submoduleWith
## specialArgs should work

View File

@ -0,0 +1,19 @@
{ lib, ... }: let
pkgs.hello = {
type = "derivation";
pname = "hello";
};
in {
options = {
package = lib.mkPackageOption pkgs "hello" { };
undefinedPackage = lib.mkPackageOption pkgs "hello" {
default = null;
};
nullablePackage = lib.mkPackageOption pkgs "hello" {
nullable = true;
default = null;
};
};
}

View File

@ -195,7 +195,7 @@ rec {
On each release the first letter is bumped and a new animal is chosen
starting with that new letter.
*/
codeName = "Stoat";
codeName = "Tapir";
/* Returns the current nixpkgs version suffix as string. */
versionSuffix =

View File

@ -1951,6 +1951,12 @@
githubId = 75972;
name = "Ben Booth";
};
benwis = {
name = "Ben Wishovich";
email = "ben@benw.is";
github = "benwis";
githubId = 6953353;
};
berberman = {
email = "berberman@yandex.com";
matrix = "@berberman:mozilla.org";
@ -2676,6 +2682,12 @@
}
];
};
Ch1keen = {
email = "gihoong7@gmail.com";
github = "Ch1keen";
githubId = 40013212;
name = "Han Jeongjun";
};
chaduffy = {
email = "charles@dyfis.net";
github = "charles-dyfis-net";
@ -4617,7 +4629,7 @@
};
emilytrau = {
name = "Emily Trau";
email = "nix@angus.ws";
email = "emily+nix@downunderctf.com";
github = "emilytrau";
githubId = 13267947;
};
@ -12485,6 +12497,12 @@
githubId = 3737;
name = "Peter Jones";
};
pjrm = {
email = "pedrojrmagalhaes@gmail.com";
github = "pjrm";
githubId = 4622652;
name = "Pedro Magalhães";
};
pkharvey = {
email = "kayharvey@protonmail.com";
github = "pkharvey";
@ -14217,6 +14235,13 @@
github = "sei40kr";
githubId = 11665236;
};
seirl = {
name = "Antoine Pietri";
email = "antoine.pietri1@gmail.com";
github = "seirl";
githubId = 4927883;
matrix = "@seirl:matrix.org";
};
sellout = {
email = "greg@technomadic.org";
github = "sellout";

View File

@ -3,6 +3,7 @@
This section lists the release notes for each stable version of NixOS and current unstable revision.
```{=include=} sections
rl-2311.section.md
rl-2305.section.md
rl-2211.section.md
rl-2205.section.md

View File

@ -54,6 +54,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [system-repart](https://www.freedesktop.org/software/systemd/man/systemd-repart.service.html), grow and add partitions to a partition table. Available as [systemd.repart](options.html#opt-systemd.repart) and [boot.initrd.systemd.repart](options.html#opt-boot.initrd.systemd.repart)
- [frigate](https://frigate.video), an open source NVR built around real-time AI object detection. Available as [services.frigate](#opt-services.frigate.enable).
- [fzf](https://github.com/junegunn/fzf), a command line fuzzyfinder. Available as [programs.fzf](#opt-programs.fzf.fuzzyCompletion).
- [readarr](https://github.com/Readarr/Readarr), Book Manager and Automation (Sonarr for Ebooks). Available as [services.readarr](options.html#opt-services.readarr.enable).
@ -64,6 +66,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [gmediarender](https://github.com/hzeller/gmrender-resurrect), a simple, headless UPnP/DLNA renderer. Available as [services.gmediarender](options.html#opt-services.gmediarender.enable).
- [go2rtc](https://github.com/AlexxIT/go2rtc), a camera streaming appliation with support for RTSP, WebRTC, HomeKit, FFMPEG, RTMP and other protocols. Available as [services.go2rtc](options.html#opt-services.go2rtc.enable).
- [harmonia](https://github.com/nix-community/harmonia/), Nix binary cache implemented in rust using libnix-store. Available as [services.harmonia](options.html#opt-services.harmonia.enable).
- [hyprland](https://github.com/hyprwm/hyprland), a dynamic tiling Wayland compositor that doesn't sacrifice on its looks. Available as [programs.hyprland](#opt-programs.hyprland.enable).

View File

@ -0,0 +1,12 @@
# Release 23.11 (“Tapir”, 2023.11/??) {#sec-release-23.11}
## Highlights {#sec-release-23.11-highlights}
## New Services {#sec-release-23.11-new-services}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
## Backward Incompatibilities {#sec-release-23.11-incompatibilities}
## Other Notable Changes {#sec-release-23.11-notable-changes}

View File

@ -52,7 +52,7 @@ let
buildMenuAdditionalParamsGrub2 = additional:
let
finalCfg = {
name = "${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel}";
name = "${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel}";
params = "init=${config.system.build.toplevel}/init ${additional} ${toString config.boot.kernelParams}";
image = "/boot/${config.system.boot.loader.kernelFile}";
initrd = "/boot/initrd";
@ -109,35 +109,35 @@ let
DEFAULT boot
LABEL boot
MENU LABEL ${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel}
MENU LABEL ${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel}
LINUX /boot/${config.system.boot.loader.kernelFile}
APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams}
INITRD /boot/${config.system.boot.loader.initrdFile}
# A variant to boot with 'nomodeset'
LABEL boot-nomodeset
MENU LABEL ${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (nomodeset)
MENU LABEL ${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (nomodeset)
LINUX /boot/${config.system.boot.loader.kernelFile}
APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} nomodeset
INITRD /boot/${config.system.boot.loader.initrdFile}
# A variant to boot with 'copytoram'
LABEL boot-copytoram
MENU LABEL ${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (copytoram)
MENU LABEL ${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (copytoram)
LINUX /boot/${config.system.boot.loader.kernelFile}
APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} copytoram
INITRD /boot/${config.system.boot.loader.initrdFile}
# A variant to boot with verbose logging to the console
LABEL boot-debug
MENU LABEL ${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (debug)
MENU LABEL ${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (debug)
LINUX /boot/${config.system.boot.loader.kernelFile}
APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} loglevel=7
INITRD /boot/${config.system.boot.loader.initrdFile}
# A variant to boot with a serial console enabled
LABEL boot-serial
MENU LABEL ${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (serial console=ttyS0,115200n8)
MENU LABEL ${config.isoImage.prependToMenuLabel}${config.system.nixos.distroName} ${config.system.nixos.label}${config.isoImage.appendToMenuLabel} (serial console=ttyS0,115200n8)
LINUX /boot/${config.system.boot.loader.kernelFile}
APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} console=ttyS0,115200n8
INITRD /boot/${config.system.boot.loader.initrdFile}
@ -452,6 +452,7 @@ in
isoImage.isoName = mkOption {
default = "${config.isoImage.isoBaseName}.iso";
type = lib.types.str;
description = lib.mdDoc ''
Name of the generated ISO image file.
'';
@ -459,6 +460,7 @@ in
isoImage.isoBaseName = mkOption {
default = config.system.nixos.distroId;
type = lib.types.str;
description = lib.mdDoc ''
Prefix of the name of the generated ISO image file.
'';
@ -466,6 +468,7 @@ in
isoImage.compressImage = mkOption {
default = false;
type = lib.types.bool;
description = lib.mdDoc ''
Whether the ISO image should be compressed using
{command}`zstd`.
@ -479,15 +482,16 @@ in
+ lib.optionalString isAarch "-Xbcj arm"
+ lib.optionalString (isPower && is32bit && isBigEndian) "-Xbcj powerpc"
+ lib.optionalString (isSparc) "-Xbcj sparc";
type = lib.types.str;
description = lib.mdDoc ''
Compression settings to use for the squashfs nix store.
'';
example = "zstd -Xcompression-level 6";
type = types.str;
};
isoImage.edition = mkOption {
default = "";
type = lib.types.str;
description = lib.mdDoc ''
Specifies which edition string to use in the volume ID of the generated
ISO image.
@ -497,6 +501,7 @@ in
isoImage.volumeID = mkOption {
# nixos-$EDITION-$RELEASE-$ARCH
default = "nixos${optionalString (config.isoImage.edition != "") "-${config.isoImage.edition}"}-${config.system.nixos.release}-${pkgs.stdenv.hostPlatform.uname.processor}";
type = lib.types.str;
description = lib.mdDoc ''
Specifies the label or volume ID of the generated ISO image.
Note that the label is used by stage 1 of the boot process to
@ -527,6 +532,7 @@ in
isoImage.includeSystemBuildDependencies = mkOption {
default = false;
type = lib.types.bool;
description = lib.mdDoc ''
Set this option to include all the needed sources etc in the
image. It significantly increases image size. Use that when
@ -538,6 +544,7 @@ in
isoImage.makeBiosBootable = mkOption {
default = false;
type = lib.types.bool;
description = lib.mdDoc ''
Whether the ISO image should be a BIOS-bootable disk.
'';
@ -545,6 +552,7 @@ in
isoImage.makeEfiBootable = mkOption {
default = false;
type = lib.types.bool;
description = lib.mdDoc ''
Whether the ISO image should be an EFI-bootable volume.
'';
@ -552,6 +560,7 @@ in
isoImage.makeUsbBootable = mkOption {
default = false;
type = lib.types.bool;
description = lib.mdDoc ''
Whether the ISO image should be bootable from CD as well as USB.
'';
@ -616,8 +625,22 @@ in
'';
};
isoImage.prependToMenuLabel = mkOption {
default = "";
type = types.str;
example = "Install ";
description = lib.mdDoc ''
The string to prepend before the menu label for the NixOS system.
This will be directly prepended (without whitespace) to the NixOS version
string, like for example if it is set to `XXX`:
`XXXNixOS 99.99-pre666`
'';
};
isoImage.appendToMenuLabel = mkOption {
default = " Installer";
type = types.str;
example = " Live System";
description = lib.mdDoc ''
The string to append after the menu label for the NixOS system.

View File

@ -1150,6 +1150,8 @@
./services/ttys/gpm.nix
./services/ttys/kmscon.nix
./services/video/epgstation/default.nix
./services/video/go2rtc/default.nix
./services/video/frigate.nix
./services/video/mirakurun.nix
./services/video/replay-sorcery.nix
./services/video/mediamtx.nix

View File

@ -107,8 +107,9 @@ in
ldap = {
package = mkOption {
type = types.package;
default = pkgs.openldap;
defaultText = lib.literalExpression "pkgs.openldap";
# needs openldap built with a libxcrypt that support crypt sha256 until https://github.com/majewsky/portunus/issues/2 is solved
default = pkgs.openldap.override { libxcrypt = pkgs.libxcrypt-legacy; };
defaultText = lib.literalExpression "pkgs.openldap.override { libxcrypt = pkgs.libxcrypt-legacy; }";
description = lib.mdDoc "The OpenLDAP package to use.";
};

View File

@ -7,26 +7,25 @@ let
opt = options.services.syncthing;
defaultUser = "syncthing";
defaultGroup = defaultUser;
settingsFormat = pkgs.formats.json { };
devices = mapAttrsToList (_: device: device // {
devices = mapAttrsToList (name: device: {
deviceID = device.id;
}) cfg.settings.devices;
inherit (device) name addresses introducer autoAcceptFolders;
}) cfg.devices;
folders = mapAttrsToList (_: folder: folder //
throwIf (folder?rescanInterval || folder?watch || folder?watchDelay) ''
The options services.syncthing.settings.folders.<name>.{rescanInterval,watch,watchDelay}
were removed. Please use, respectively, {rescanIntervalS,fsWatcherEnabled,fsWatcherDelayS} instead.
'' {
devices = map (device:
if builtins.isString device then
{ deviceId = cfg.settings.devices.${device}.id; }
else
device
) folder.devices;
}) (filterAttrs (_: folder:
folders = mapAttrsToList ( _: folder: {
inherit (folder) path id label type;
devices = map (device: { deviceId = cfg.devices.${device}.id; }) folder.devices;
rescanIntervalS = folder.rescanInterval;
fsWatcherEnabled = folder.watch;
fsWatcherDelayS = folder.watchDelay;
ignorePerms = folder.ignorePerms;
ignoreDelete = folder.ignoreDelete;
versioning = folder.versioning;
}) (filterAttrs (
_: folder:
folder.enable
) cfg.settings.folders);
) cfg.folders);
updateConfig = pkgs.writers.writeDash "merge-syncthing-config" ''
set -efu
@ -55,10 +54,10 @@ let
old_cfg=$(curl ${cfg.guiAddress}/rest/config)
# generate the new config by merging with the NixOS config options
new_cfg=$(printf '%s\n' "$old_cfg" | ${pkgs.jq}/bin/jq -c ${escapeShellArg ''. * ${builtins.toJSON cfg.settings} * {
"devices": (${builtins.toJSON devices}${optionalString (cfg.settings.devices == {} || ! cfg.overrideDevices) " + .devices"}),
"folders": (${builtins.toJSON folders}${optionalString (cfg.settings.folders == {} || ! cfg.overrideFolders) " + .folders"})
}''})
new_cfg=$(printf '%s\n' "$old_cfg" | ${pkgs.jq}/bin/jq -c '. * {
"devices": (${builtins.toJSON devices}${optionalString (cfg.devices == {} || ! cfg.overrideDevices) " + .devices"}),
"folders": (${builtins.toJSON folders}${optionalString (cfg.folders == {} || ! cfg.overrideFolders) " + .folders"})
} * ${builtins.toJSON cfg.extraOptions}')
# send the new config
curl -X PUT -d "$new_cfg" ${cfg.guiAddress}/rest/config
@ -100,282 +99,287 @@ in {
default = true;
description = mdDoc ''
Whether to delete the devices which are not configured via the
[devices](#opt-services.syncthing.settings.devices) option.
[devices](#opt-services.syncthing.devices) option.
If set to `false`, devices added via the web
interface will persist and will have to be deleted manually.
'';
};
devices = mkOption {
default = {};
description = mdDoc ''
Peers/devices which Syncthing should communicate with.
Note that you can still add devices manually, but those changes
will be reverted on restart if [overrideDevices](#opt-services.syncthing.overrideDevices)
is enabled.
'';
example = {
bigbox = {
id = "7CFNTQM-IMTJBHJ-3UWRDIU-ZGQJFR6-VCXZ3NB-XUH3KZO-N52ITXR-LAIYUAU";
addresses = [ "tcp://192.168.0.10:51820" ];
};
};
type = types.attrsOf (types.submodule ({ name, ... }: {
options = {
name = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The name of the device.
'';
};
addresses = mkOption {
type = types.listOf types.str;
default = [];
description = lib.mdDoc ''
The addresses used to connect to the device.
If this is left empty, dynamic configuration is attempted.
'';
};
id = mkOption {
type = types.str;
description = mdDoc ''
The device ID. See <https://docs.syncthing.net/dev/device-ids.html>.
'';
};
introducer = mkOption {
type = types.bool;
default = false;
description = mdDoc ''
Whether the device should act as an introducer and be allowed
to add folders on this computer.
See <https://docs.syncthing.net/users/introducer.html>.
'';
};
autoAcceptFolders = mkOption {
type = types.bool;
default = false;
description = mdDoc ''
Automatically create or share folders that this device advertises at the default path.
See <https://docs.syncthing.net/users/config.html?highlight=autoaccept#config-file-format>.
'';
};
};
}));
};
overrideFolders = mkOption {
type = types.bool;
default = true;
description = mdDoc ''
Whether to delete the folders which are not configured via the
[folders](#opt-services.syncthing.settings.folders) option.
[folders](#opt-services.syncthing.folders) option.
If set to `false`, folders added via the web
interface will persist and will have to be deleted manually.
'';
};
settings = mkOption {
type = types.submodule {
freeformType = settingsFormat.type;
folders = mkOption {
default = {};
description = mdDoc ''
Folders which should be shared by Syncthing.
Note that you can still add folders manually, but those changes
will be reverted on restart if [overrideFolders](#opt-services.syncthing.overrideFolders)
is enabled.
'';
example = literalExpression ''
{
"/home/user/sync" = {
id = "syncme";
devices = [ "bigbox" ];
};
}
'';
type = types.attrsOf (types.submodule ({ name, ... }: {
options = {
# global options
options = mkOption {
default = {};
description = mdDoc ''
The options element contains all other global configuration options
enable = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
Whether to share this folder.
This option is useful when you want to define all folders
in one place, but not every machine should share all folders.
'';
type = types.submodule ({ name, ... }: {
freeformType = settingsFormat.type;
};
path = mkOption {
# TODO for release 23.05: allow relative paths again and set
# working directory to cfg.dataDir
type = types.str // {
check = x: types.str.check x && (substring 0 1 x == "/" || substring 0 2 x == "~/");
description = types.str.description + " starting with / or ~/";
};
default = name;
description = lib.mdDoc ''
The path to the folder which should be shared.
Only absolute paths (starting with `/`) and paths relative to
the [user](#opt-services.syncthing.user)'s home directory
(starting with `~/`) are allowed.
'';
};
id = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The ID of the folder. Must be the same on all devices.
'';
};
label = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The label of the folder.
'';
};
devices = mkOption {
type = types.listOf types.str;
default = [];
description = mdDoc ''
The devices this folder should be shared with. Each device must
be defined in the [devices](#opt-services.syncthing.devices) option.
'';
};
versioning = mkOption {
default = null;
description = mdDoc ''
How to keep changed/deleted files with Syncthing.
There are 4 different types of versioning with different parameters.
See <https://docs.syncthing.net/users/versioning.html>.
'';
example = literalExpression ''
[
{
versioning = {
type = "simple";
params.keep = "10";
};
}
{
versioning = {
type = "trashcan";
params.cleanoutDays = "1000";
};
}
{
versioning = {
type = "staggered";
fsPath = "/syncthing/backup";
params = {
cleanInterval = "3600";
maxAge = "31536000";
};
};
}
{
versioning = {
type = "external";
params.versionsPath = pkgs.writers.writeBash "backup" '''
folderpath="$1"
filepath="$2"
rm -rf "$folderpath/$filepath"
''';
};
}
]
'';
type = with types; nullOr (submodule {
options = {
localAnnounceEnabled = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
Whether to send announcements to the local LAN, also use such announcements to find other devices.
type = mkOption {
type = enum [ "external" "simple" "staggered" "trashcan" ];
description = mdDoc ''
The type of versioning.
See <https://docs.syncthing.net/users/versioning.html>.
'';
};
localAnnouncePort = mkOption {
type = types.int;
default = 21027;
description = lib.mdDoc ''
The port on which to listen and send IPv4 broadcast announcements to.
fsPath = mkOption {
default = "";
type = either str path;
description = mdDoc ''
Path to the versioning folder.
See <https://docs.syncthing.net/users/versioning.html>.
'';
};
relaysEnabled = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
When true, relays will be connected to and potentially used for device to device connections.
'';
};
urAccepted = mkOption {
type = types.int;
default = 0;
description = lib.mdDoc ''
Whether the user has accepted to submit anonymous usage data.
The default, 0, mean the user has not made a choice, and Syncthing will ask at some point in the future.
"-1" means no, a number above zero means that that version of usage reporting has been accepted.
'';
};
limitBandwidthInLan = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
Whether to apply bandwidth limits to devices in the same broadcast domain as the local device.
'';
};
maxFolderConcurrency = mkOption {
type = types.int;
default = 0;
description = lib.mdDoc ''
This option controls how many folders may concurrently be in I/O-intensive operations such as syncing or scanning.
The mechanism is described in detail in a [separate chapter](https://docs.syncthing.net/advanced/option-max-concurrency.html).
params = mkOption {
type = attrsOf (either str path);
description = mdDoc ''
The parameters for versioning. Structure depends on
[versioning.type](#opt-services.syncthing.folders._name_.versioning.type).
See <https://docs.syncthing.net/users/versioning.html>.
'';
};
};
});
};
# device settings
devices = mkOption {
default = {};
description = mdDoc ''
Peers/devices which Syncthing should communicate with.
Note that you can still add devices manually, but those changes
will be reverted on restart if [overrideDevices](#opt-services.syncthing.overrideDevices)
is enabled.
rescanInterval = mkOption {
type = types.int;
default = 3600;
description = lib.mdDoc ''
How often the folder should be rescanned for changes.
'';
example = {
bigbox = {
id = "7CFNTQM-IMTJBHJ-3UWRDIU-ZGQJFR6-VCXZ3NB-XUH3KZO-N52ITXR-LAIYUAU";
addresses = [ "tcp://192.168.0.10:51820" ];
};
};
type = types.attrsOf (types.submodule ({ name, ... }: {
freeformType = settingsFormat.type;
options = {
name = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The name of the device.
'';
};
id = mkOption {
type = types.str;
description = mdDoc ''
The device ID. See <https://docs.syncthing.net/dev/device-ids.html>.
'';
};
autoAcceptFolders = mkOption {
type = types.bool;
default = false;
description = mdDoc ''
Automatically create or share folders that this device advertises at the default path.
See <https://docs.syncthing.net/users/config.html?highlight=autoaccept#config-file-format>.
'';
};
};
}));
};
# folder settings
folders = mkOption {
default = {};
description = mdDoc ''
Folders which should be shared by Syncthing.
Note that you can still add folders manually, but those changes
will be reverted on restart if [overrideFolders](#opt-services.syncthing.overrideFolders)
is enabled.
type = mkOption {
type = types.enum [ "sendreceive" "sendonly" "receiveonly" "receiveencrypted" ];
default = "sendreceive";
description = lib.mdDoc ''
Whether to only send changes for this folder, only receive them
or both. `receiveencrypted` can be used for untrusted devices. See
<https://docs.syncthing.net/users/untrusted.html> for reference.
'';
example = literalExpression ''
{
"/home/user/sync" = {
id = "syncme";
devices = [ "bigbox" ];
};
}
'';
type = types.attrsOf (types.submodule ({ name, ... }: {
freeformType = settingsFormat.type;
options = {
enable = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
Whether to share this folder.
This option is useful when you want to define all folders
in one place, but not every machine should share all folders.
'';
};
path = mkOption {
# TODO for release 23.05: allow relative paths again and set
# working directory to cfg.dataDir
type = types.str // {
check = x: types.str.check x && (substring 0 1 x == "/" || substring 0 2 x == "~/");
description = types.str.description + " starting with / or ~/";
};
default = name;
description = lib.mdDoc ''
The path to the folder which should be shared.
Only absolute paths (starting with `/`) and paths relative to
the [user](#opt-services.syncthing.user)'s home directory
(starting with `~/`) are allowed.
'';
};
id = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The ID of the folder. Must be the same on all devices.
'';
};
label = mkOption {
type = types.str;
default = name;
description = lib.mdDoc ''
The label of the folder.
'';
};
devices = mkOption {
type = types.listOf types.str;
default = [];
description = mdDoc ''
The devices this folder should be shared with. Each device must
be defined in the [devices](#opt-services.syncthing.settings.devices) option.
'';
};
versioning = mkOption {
default = null;
description = mdDoc ''
How to keep changed/deleted files with Syncthing.
There are 4 different types of versioning with different parameters.
See <https://docs.syncthing.net/users/versioning.html>.
'';
example = literalExpression ''
[
{
versioning = {
type = "simple";
params.keep = "10";
};
}
{
versioning = {
type = "trashcan";
params.cleanoutDays = "1000";
};
}
{
versioning = {
type = "staggered";
fsPath = "/syncthing/backup";
params = {
cleanInterval = "3600";
maxAge = "31536000";
};
};
}
{
versioning = {
type = "external";
params.versionsPath = pkgs.writers.writeBash "backup" '''
folderpath="$1"
filepath="$2"
rm -rf "$folderpath/$filepath"
''';
};
}
]
'';
type = with types; nullOr (submodule {
freeformType = settingsFormat.type;
options = {
type = mkOption {
type = enum [ "external" "simple" "staggered" "trashcan" ];
description = mdDoc ''
The type of versioning.
See <https://docs.syncthing.net/users/versioning.html>.
'';
};
};
});
};
copyOwnershipFromParent = mkOption {
type = types.bool;
default = false;
description = mdDoc ''
On Unix systems, tries to copy file/folder ownership from the parent directory (the directory its located in).
Requires running Syncthing as a privileged user, or granting it additional capabilities (e.g. CAP_CHOWN on Linux).
'';
};
};
}));
};
watch = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
Whether the folder should be watched for changes by inotify.
'';
};
watchDelay = mkOption {
type = types.int;
default = 10;
description = lib.mdDoc ''
The delay after an inotify event is triggered.
'';
};
ignorePerms = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
Whether to ignore permission changes.
'';
};
ignoreDelete = mkOption {
type = types.bool;
default = false;
description = mdDoc ''
Whether to skip deleting files that are deleted by peers.
See <https://docs.syncthing.net/advanced/folder-ignoredelete.html>.
'';
};
};
};
}));
};
extraOptions = mkOption {
type = types.addCheck (pkgs.formats.json {}).type isAttrs;
default = {};
description = mdDoc ''
Extra configuration options for Syncthing.
@ -526,10 +530,6 @@ in {
This option was removed because Syncthing now has the inotify functionality included under the name "fswatcher".
It can be enabled on a per-folder basis through the web interface.
'')
(mkRenamedOptionModule [ "services" "syncthing" "extraOptions" ] [ "services" "syncthing" "settings" ])
(mkRenamedOptionModule [ "services" "syncthing" "folders" ] [ "services" "syncthing" "settings" "folders" ])
(mkRenamedOptionModule [ "services" "syncthing" "devices" ] [ "services" "syncthing" "settings" "devices" ])
(mkRenamedOptionModule [ "services" "syncthing" "options" ] [ "services" "syncthing" "settings" "options" ])
] ++ map (o:
mkRenamedOptionModule [ "services" "syncthing" "declarative" o ] [ "services" "syncthing" o ]
) [ "cert" "key" "devices" "folders" "overrideDevices" "overrideFolders" "extraOptions"];
@ -615,7 +615,9 @@ in {
];
};
};
syncthing-init = mkIf (cfg.settings != {}) {
syncthing-init = mkIf (
cfg.devices != {} || cfg.folders != {} || cfg.extraOptions != {}
) {
description = "Syncthing configuration updater";
requisite = [ "syncthing.service" ];
after = [ "syncthing.service" ];

View File

@ -80,11 +80,11 @@ in
options.services.epgstation = {
enable = lib.mkEnableOption (lib.mdDoc description);
package = lib.mkOption {
default = pkgs.epgstation;
type = lib.types.package;
defaultText = lib.literalExpression "pkgs.epgstation";
description = lib.mdDoc "epgstation package to use";
package = lib.mkPackageOptionMD pkgs "epgstation" { };
ffmpeg = lib.mkPackageOptionMD pkgs "ffmpeg" {
default = [ "ffmpeg-headless" ];
example = "pkgs.ffmpeg-full";
};
usePreconfiguredStreaming = lib.mkOption {
@ -278,6 +278,8 @@ in
package = lib.mkDefault pkgs.mariadb;
ensureDatabases = [ cfg.database.name ];
# FIXME: enable once mysqljs supports auth_socket
# https://github.com/mysqljs/mysql/issues/1507
#
# ensureUsers = [ {
# name = username;
# ensurePermissions = { "${cfg.database.name}.*" = "ALL PRIVILEGES"; };
@ -295,8 +297,8 @@ in
database = cfg.database.name;
};
ffmpeg = lib.mkDefault "${pkgs.ffmpeg-full}/bin/ffmpeg";
ffprobe = lib.mkDefault "${pkgs.ffmpeg-full}/bin/ffprobe";
ffmpeg = lib.mkDefault "${cfg.ffmpeg}/bin/ffmpeg";
ffprobe = lib.mkDefault "${cfg.ffmpeg}/bin/ffprobe";
# for disambiguation with TypeScript files
recordedFileExtension = lib.mkDefault ".m2ts";
@ -308,9 +310,15 @@ in
];
systemd.tmpfiles.rules = [
"d '/var/lib/epgstation/key' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/streamfiles' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/drop' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/recorded' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/thumbnail' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/db/subscribers' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/db/migrations/mysql' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/db/migrations/postgres' - ${username} ${groupname} - -"
"d '/var/lib/epgstation/db/migrations/sqlite' - ${username} ${groupname} - -"
];
systemd.services.epgstation = {

View File

@ -0,0 +1,368 @@
{ config
, lib
, pkgs
, ...
}:
let
inherit (lib)
literalExpression
mkDefault
mdDoc
mkEnableOption
mkIf
mkOption
types;
cfg = config.services.frigate;
format = pkgs.formats.yaml {};
filteredConfig = lib.converge (lib.filterAttrsRecursive (_: v: ! lib.elem v [ null ])) cfg.settings;
cameraFormat = with types; submodule {
freeformType = format.type;
options = {
ffmpeg = {
inputs = mkOption {
description = mdDoc ''
List of inputs for this camera.
'';
type = listOf (submodule {
freeformType = format.type;
options = {
path = mkOption {
type = str;
example = "rtsp://192.0.2.1:554/rtsp";
description = mdDoc ''
Stream URL
'';
};
roles = mkOption {
type = listOf (enum [ "detect" "record" "rtmp" ]);
example = literalExpression ''
[ "detect" "rtmp" ]
'';
description = mdDoc ''
List of roles for this stream
'';
};
};
});
};
};
};
};
in
{
meta.buildDocsInSandbox = false;
options.services.frigate = with types; {
enable = mkEnableOption (mdDoc "Frigate NVR");
package = mkOption {
type = package;
default = pkgs.frigate;
description = mdDoc ''
The frigate package to use.
'';
};
hostname = mkOption {
type = str;
example = "frigate.exampe.com";
description = mdDoc ''
Hostname of the nginx vhost to configure.
Only nginx is supported by upstream for direct reverse proxying.
'';
};
settings = mkOption {
type = submodule {
freeformType = format.type;
options = {
cameras = mkOption {
type = attrsOf cameraFormat;
description = mdDoc ''
Attribute set of cameras configurations.
https://docs.frigate.video/configuration/cameras
'';
};
database = {
path = mkOption {
type = path;
default = "/var/lib/frigate/frigate.db";
description = mdDoc ''
Path to the SQLite database used
'';
};
};
mqtt = {
enabled = mkEnableOption (mdDoc "MQTT support");
host = mkOption {
type = nullOr str;
default = null;
example = "mqtt.example.com";
description = mdDoc ''
MQTT server hostname
'';
};
};
};
};
default = {};
description = mdDoc ''
Frigate configuration as a nix attribute set.
See the project documentation for how to configure frigate.
- [Creating a config file](https://docs.frigate.video/guides/getting_started)
- [Configuration reference](https://docs.frigate.video/configuration/index)
'';
};
};
config = mkIf cfg.enable {
services.nginx = {
enable =true;
additionalModules = with pkgs.nginxModules; [
secure-token
rtmp
vod
];
recommendedProxySettings = mkDefault true;
recommendedGzipSettings = mkDefault true;
upstreams = {
frigate-api.servers = {
"127.0.0.1:5001" = {};
};
frigate-mqtt-ws.servers = {
"127.0.0.1:5002" = {};
};
frigate-jsmpeg.servers = {
"127.0.0.1:8082" = {};
};
frigate-go2rtc.servers = {
"127.0.0.1:1984" = {};
};
};
# Based on https://github.com/blakeblackshear/frigate/blob/v0.12.0/docker/rootfs/usr/local/nginx/conf/nginx.conf
virtualHosts."${cfg.hostname}" = {
locations = {
"/api/" = {
proxyPass = "http://frigate-api/";
};
"~* /api/.*\.(jpg|jpeg|png)$" = {
proxyPass = "http://frigate-api";
extraConfig = ''
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS';
rewrite ^/api/(.*)$ $1 break;
'';
};
"/vod/" = {
extraConfig = ''
aio threads;
vod hls;
secure_token $args;
secure_token_types application/vnd.apple.mpegurl;
add_header Access-Control-Allow-Headers '*';
add_header Access-Control-Expose-Headers 'Server,range,Content-Length,Content-Range';
add_header Access-Control-Allow-Methods 'GET, HEAD, OPTIONS';
add_header Access-Control-Allow-Origin '*';
add_header Cache-Control "no-store";
expires off;
'';
};
"/stream/" = {
# TODO
};
"/ws" = {
proxyPass = "http://frigate-mqtt-ws/";
proxyWebsockets = true;
};
"/live/jsmpeg" = {
proxyPass = "http://frigate-jsmpeg/";
proxyWebsockets = true;
};
"/live/mse/" = {
proxyPass = "http://frigate-go2rtc/";
proxyWebsockets = true;
};
"/live/webrtc/" = {
proxyPass = "http://frigate-go2rtc/";
proxyWebsockets = true;
};
"/cache/" = {
alias = "/var/cache/frigate/";
};
"/clips/" = {
root = "/var/lib/frigate";
extraConfig = ''
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
add_header 'Access-Control-Allow-Credentials' 'true';
add_header 'Access-Control-Expose-Headers' 'Content-Length';
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' "$http_origin";
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
types {
video/mp4 mp4;
image/jpeg jpg;
}
autoindex on;
'';
};
"/recordings/" = {
root = "/var/lib/frigate";
extraConfig = ''
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
add_header 'Access-Control-Allow-Credentials' 'true';
add_header 'Access-Control-Expose-Headers' 'Content-Length';
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' "$http_origin";
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
types {
video/mp4 mp4;
}
autoindex on;
autoindex_format json;
'';
};
"/assets/" = {
root = cfg.package.web;
extraConfig = ''
access_log off;
expires 1y;
add_header Cache-Control "public";
'';
};
"/" = {
root = cfg.package.web;
tryFiles = "$uri $uri/ /index.html";
extraConfig = ''
add_header Cache-Control "no-store";
expires off;
sub_filter 'href="/BASE_PATH/' 'href="$http_x_ingress_path/';
sub_filter 'url(/BASE_PATH/' 'url($http_x_ingress_path/';
sub_filter '"/BASE_PATH/dist/' '"$http_x_ingress_path/dist/';
sub_filter '"/BASE_PATH/js/' '"$http_x_ingress_path/js/';
sub_filter '"/BASE_PATH/assets/' '"$http_x_ingress_path/assets/';
sub_filter '"/BASE_PATH/monacoeditorwork/' '"$http_x_ingress_path/assets/';
sub_filter 'return"/BASE_PATH/"' 'return window.baseUrl';
sub_filter '<body>' '<body><script>window.baseUrl="$http_x_ingress_path/";</script>';
sub_filter_types text/css application/javascript;
sub_filter_once off;
'';
};
};
extraConfig = ''
# vod settings
vod_base_url "";
vod_segments_base_url "";
vod_mode mapped;
vod_max_mapping_response_size 1m;
vod_upstream_location /api;
vod_align_segments_to_key_frames on;
vod_manifest_segment_durations_mode accurate;
vod_ignore_edit_list on;
vod_segment_duration 10000;
vod_hls_mpegts_align_frames off;
vod_hls_mpegts_interleave_frames on;
# file handle caching / aio
open_file_cache max=1000 inactive=5m;
open_file_cache_valid 2m;
open_file_cache_min_uses 1;
open_file_cache_errors on;
aio on;
# https://github.com/kaltura/nginx-vod-module#vod_open_file_thread_pool
vod_open_file_thread_pool default;
# vod caches
vod_metadata_cache metadata_cache 512m;
vod_mapping_cache mapping_cache 5m 10m;
# gzip manifest
gzip_types application/vnd.apple.mpegurl;
'';
};
appendConfig = ''
rtmp {
server {
listen 1935;
chunk_size 4096;
allow publish 127.0.0.1;
deny publish all;
allow play all;
application live {
live on;
record off;
meta copy;
}
}
}
'';
};
systemd.services.frigate = {
after = [
"go2rtc.service"
"network.target"
];
wantedBy = [
"multi-user.target"
];
environment = {
CONFIG_FILE = format.generate "frigate.yml" filteredConfig;
HOME = "/var/lib/frigate";
PYTHONPATH = cfg.package.pythonPath;
};
path = with pkgs; [
# unfree:
# config.boot.kernelPackages.nvidiaPackages.latest.bin
ffmpeg_5-headless
libva-utils
procps
radeontop
] ++ lib.optionals (!stdenv.isAarch64) [
# not available on aarch64-linux
intel-gpu-tools
];
serviceConfig = {
ExecStart = "${cfg.package.python.interpreter} -m frigate";
DynamicUser = true;
User = "frigate";
StateDirectory = "frigate";
UMask = "0077";
# Caches
PrivateTmp = true;
CacheDirectory = "frigate";
BindPaths = [
"/migrations:${cfg.package}/share/frigate/migrations:ro"
];
};
};
};
}

View File

@ -0,0 +1,111 @@
{ lib
, config
, options
, pkgs
, ...
}:
let
inherit (lib)
literalExpression
mdDoc
mkEnableOption
mkOption
mkPackageOptionMD
types
;
cfg = config.services.go2rtc;
opt = options.services.go2rtc;
format = pkgs.formats.yaml {};
configFile = format.generate "go2rtc.yaml" cfg.settings;
in
{
meta.buildDocsInSandbox = false;
options.services.go2rtc = with types; {
enable = mkEnableOption (mdDoc "go2rtc streaming server");
package = mkPackageOptionMD pkgs "go2rtc" { };
settings = mkOption {
default = {};
description = mdDoc ''
go2rtc configuration as a Nix attribute set.
See the [wiki](https://github.com/AlexxIT/go2rtc/wiki/Configuration) for possible configuration options.
'';
type = submodule {
freeformType = format.type;
options = {
# https://github.com/AlexxIT/go2rtc/blob/v1.5.0/README.md#module-api
api = {
listen = mkOption {
type = str;
default = ":1984";
example = "127.0.0.1:1984";
description = mdDoc ''
API listen address, conforming to a Go address string.
'';
};
};
# https://github.com/AlexxIT/go2rtc/blob/v1.5.0/README.md#source-ffmpeg
ffmpeg = {
bin = mkOption {
type = path;
default = "${lib.getBin pkgs.ffmpeg_6-headless}/bin/ffmpeg";
defaultText = literalExpression "\${lib.getBin pkgs.ffmpeg_6-headless}/bin/ffmpeg";
description = mdDoc ''
The ffmpeg package to use for transcoding.
'';
};
};
# TODO: https://github.com/AlexxIT/go2rtc/blob/v1.5.0/README.md#module-rtsp
rtsp = {
};
streams = mkOption {
type = attrsOf (either str (listOf str));
default = {};
example = literalExpression ''
{
cam1 = "onvif://admin:password@192.168.1.123:2020";
cam2 = "tcp://192.168.1.123:12345";
}
'';
description = mdDoc ''
Stream source configuration. Multiple source types are supported.
Check the [configuration reference](https://github.com/AlexxIT/go2rtc/blob/v${cfg.package.version}/README.md#module-streams) for possible options.
'';
};
# TODO: https://github.com/AlexxIT/go2rtc/blob/v1.5.0/README.md#module-webrtc
webrtc = {
};
};
};
};
};
config = lib.mkIf cfg.enable {
systemd.services.go2rtc = {
after = [
"network-online.target"
];
wantedBy = [
"multi-user.target"
];
serviceConfig = {
DynamicUser = true;
User = "go2rtc";
StateDirectory = "go2rtc";
ExecStart = "${cfg.package}/bin/go2rtc -config ${configFile}";
};
};
};
}

View File

@ -78,7 +78,8 @@ in
environment.PYTHONPATH = pkgs.powerdns-admin.pythonPath;
serviceConfig = {
ExecStart = "${pkgs.powerdns-admin}/bin/powerdns-admin --pid /run/powerdns-admin/pid ${escapeShellArgs cfg.extraArgs}";
ExecStartPre = "${pkgs.coreutils}/bin/env FLASK_APP=${pkgs.powerdns-admin}/share/powerdnsadmin/__init__.py ${pkgs.python3Packages.flask}/bin/flask db upgrade -d ${pkgs.powerdns-admin}/share/migrations";
# Set environment variables only for starting flask database upgrade
ExecStartPre = "${pkgs.coreutils}/bin/env FLASK_APP=${pkgs.powerdns-admin}/share/powerdnsadmin/__init__.py SESSION_TYPE= ${pkgs.python3Packages.flask}/bin/flask db upgrade -d ${pkgs.powerdns-admin}/share/migrations";
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
ExecStop = "${pkgs.coreutils}/bin/kill -TERM $MAINPID";
PIDFile = "/run/powerdns-admin/pid";

View File

@ -1,4 +1,4 @@
{ config, pkgs, lib, ... }:
{ config, pkgs, lib, utils, ... }:
let
cfg = config.systemd.repart;
@ -26,14 +26,29 @@ let
in
{
options = {
boot.initrd.systemd.repart.enable = lib.mkEnableOption (lib.mdDoc "systemd-repart") // {
description = lib.mdDoc ''
Grow and add partitions to a partition table at boot time in the initrd.
systemd-repart only works with GPT partition tables.
boot.initrd.systemd.repart = {
enable = lib.mkEnableOption (lib.mdDoc "systemd-repart") // {
description = lib.mdDoc ''
Grow and add partitions to a partition table at boot time in the initrd.
systemd-repart only works with GPT partition tables.
To run systemd-repart after the initrd, see
`options.systemd.repart.enable`.
'';
To run systemd-repart after the initrd, see
`options.systemd.repart.enable`.
'';
};
device = lib.mkOption {
type = with lib.types; nullOr str;
description = lib.mdDoc ''
The device to operate on.
If `device == null`, systemd-repart will operate on the device
backing the root partition. So in order to dynamically *create* the
root partition in the initrd you need to set a device.
'';
default = null;
example = "/dev/vda";
};
};
systemd.repart = {
@ -84,31 +99,42 @@ in
contents."/etc/repart.d".source = definitionsDirectory;
# Override defaults in upstream unit.
services.systemd-repart = {
# systemd-repart tries to create directories in /var/tmp by default to
# store large temporary files that benefit from persistence on disk. In
# the initrd, however, /var/tmp does not provide more persistence than
# /tmp, so we re-use it here.
environment."TMPDIR" = "/tmp";
serviceConfig = {
ExecStart = [
" " # required to unset the previous value.
# When running in the initrd, systemd-repart by default searches
# for definition files in /sysroot or /sysusr. We tell it to look
# in the initrd itself.
''${config.boot.initrd.systemd.package}/bin/systemd-repart \
services.systemd-repart =
let
deviceUnit = "${utils.escapeSystemdPath initrdCfg.device}.device";
in
{
# systemd-repart tries to create directories in /var/tmp by default to
# store large temporary files that benefit from persistence on disk. In
# the initrd, however, /var/tmp does not provide more persistence than
# /tmp, so we re-use it here.
environment."TMPDIR" = "/tmp";
serviceConfig = {
ExecStart = [
" " # required to unset the previous value.
# When running in the initrd, systemd-repart by default searches
# for definition files in /sysroot or /sysusr. We tell it to look
# in the initrd itself.
''${config.boot.initrd.systemd.package}/bin/systemd-repart \
--definitions=/etc/repart.d \
--dry-run=no
''
];
--dry-run=no ${lib.optionalString (initrdCfg.device != null) initrdCfg.device}
''
];
};
# systemd-repart needs to run after /sysroot (or /sysuser, but we
# don't have it) has been mounted because otherwise it cannot
# determine the device (i.e disk) to operate on. If you want to run
# systemd-repart without /sysroot (i.e. to create the root
# partition), you have to explicitly tell it which device to operate
# on. The service then needs to be ordered to run after this device
# is available.
requires = lib.mkIf (initrdCfg.device != null) [ deviceUnit ];
after =
if initrdCfg.device == null then
[ "sysroot.mount" ]
else
[ deviceUnit ];
};
# systemd-repart needs to run after /sysroot (or /sysuser, but we don't
# have it) has been mounted because otherwise it cannot determine the
# device (i.e disk) to operate on. If you want to run systemd-repart
# without /sysroot, you have to explicitly tell it which device to
# operate on.
after = [ "sysroot.mount" ];
};
};
environment.etc = lib.mkIf cfg.enable {

View File

@ -273,6 +273,7 @@ in {
freeswitch = handleTest ./freeswitch.nix {};
freshrss-sqlite = handleTest ./freshrss-sqlite.nix {};
freshrss-pgsql = handleTest ./freshrss-pgsql.nix {};
frigate = handleTest ./frigate.nix {};
frr = handleTest ./frr.nix {};
fsck = handleTest ./fsck.nix {};
fsck-systemd-stage-1 = handleTest ./fsck.nix { systemdStage1 = true; };
@ -603,6 +604,7 @@ in {
podman-tls-ghostunnel = handleTestOn ["aarch64-linux" "x86_64-linux"] ./podman/tls-ghostunnel.nix {};
polaris = handleTest ./polaris.nix {};
pomerium = handleTestOn ["x86_64-linux"] ./pomerium.nix {};
portunus = handleTest ./portunus.nix { };
postfix = handleTest ./postfix.nix {};
postfix-raise-smtpd-tls-security-level = handleTest ./postfix-raise-smtpd-tls-security-level.nix {};
postfixadmin = handleTest ./postfixadmin.nix {};
@ -792,6 +794,7 @@ in {
v2ray = handleTest ./v2ray.nix {};
varnish60 = handleTest ./varnish.nix { package = pkgs.varnish60; };
varnish72 = handleTest ./varnish.nix { package = pkgs.varnish72; };
varnish73 = handleTest ./varnish.nix { package = pkgs.varnish73; };
vault = handleTest ./vault.nix {};
vault-agent = handleTest ./vault-agent.nix {};
vault-dev = handleTest ./vault-dev.nix {};

View File

@ -48,5 +48,18 @@ import ./make-test-python.nix ({ pkgs, ... }: {
"umount /tmp/mnt",
"apfsck /dev/vdb",
)
with subtest("Snapshots"):
machine.succeed(
"mkapfs /dev/vdb",
"mount -o cknodes,readwrite /dev/vdb /tmp/mnt",
"echo 'Hello World' > /tmp/mnt/test.txt",
"apfs-snap /tmp/mnt snap-1",
"rm /tmp/mnt/test.txt",
"umount /tmp/mnt",
"mount -o cknodes,readwrite,snap=snap-1 /dev/vdb /tmp/mnt",
"echo 'Hello World' | diff - /tmp/mnt/test.txt",
"umount /tmp/mnt",
"apfsck /dev/vdb",
)
'';
})

60
nixos/tests/frigate.nix Normal file
View File

@ -0,0 +1,60 @@
import ./make-test-python.nix ({ pkgs, lib, ...} :
{
name = "frigate";
meta.maintainers = with lib.maintainers; [ hexa ];
nodes = {
machine = { config, ... }: {
services.frigate = {
enable = true;
hostname = "localhost";
settings = {
mqtt.enabled = false;
cameras.test = {
ffmpeg = {
input_args = "-fflags nobuffer -strict experimental -fflags +genpts+discardcorrupt -r 10 -use_wallclock_as_timestamps 1";
inputs = [ {
path = "http://127.0.0.1:8080";
roles = [
"record"
];
} ];
};
};
record.enabled = true;
};
};
systemd.services.video-stream = {
description = "Start a test stream that frigate can capture";
before = [
"frigate.service"
];
wantedBy = [
"multi-user.target"
];
serviceConfig = {
DynamicUser = true;
ExecStart = "${lib.getBin pkgs.ffmpeg-headless}/bin/ffmpeg -re -f lavfi -i smptebars=size=800x600:rate=10 -f mpegts -listen 1 http://0.0.0.0:8080";
};
};
};
};
testScript = ''
start_all()
machine.wait_for_unit("frigate.service")
machine.wait_for_open_port(5001)
machine.succeed("curl http://localhost:5001")
machine.wait_for_file("/var/cache/frigate/test-*.mp4")
'';
})

18
nixos/tests/portunus.nix Normal file
View File

@ -0,0 +1,18 @@
import ./make-test-python.nix ({ lib, ... }:
{
name = "portunus";
meta.maintainers = with lib.maintainers; [ SuperSandro2000 ];
nodes.machine = _: {
services.portunus = {
enable = true;
ldap.suffix = "dc=example,dc=org";
};
};
testScript = ''
machine.wait_for_unit("portunus.service")
machine.succeed("curl --fail -vvv http://localhost:8080/")
'';
})

View File

@ -10,6 +10,7 @@ let
defaultConfig = ''
BIND_ADDRESS = '127.0.0.1'
PORT = 8000
CAPTCHA_ENABLE = False
'';
makeAppTest = name: configs: makeTest {
@ -98,7 +99,30 @@ let
tcp = {
services.powerdns-admin.extraArgs = [ "-b" "127.0.0.1:8000" ];
system.build.testScript = ''
set -euxo pipefail
curl -sSf http://127.0.0.1:8000/
# Create account to check that the database migrations ran
csrf_token="$(curl -sSfc session http://127.0.0.1:8000/register | grep _csrf_token | cut -d\" -f6)"
# Outputs 'Redirecting' if successful
curl -sSfb session http://127.0.0.1:8000/register \
-F "_csrf_token=$csrf_token" \
-F "firstname=first" \
-F "lastname=last" \
-F "email=a@example.com" \
-F "username=user" \
-F "password=password" \
-F "rpassword=password" | grep Redirecting
# Login
# Outputs 'Redirecting' if successful
curl -sSfb session http://127.0.0.1:8000/login \
-F "_csrf_token=$csrf_token" \
-F "username=user" \
-F "password=password" | grep Redirecting
# Check that we are logged in, this redirects to /admin/setting/pdns if we are
curl -sSfb session http://127.0.0.1:8000/dashboard/ | grep /admin/setting
'';
};
unix = {

View File

@ -9,21 +9,14 @@ in {
nodes.machine = {
services.syncthing = {
enable = true;
settings = {
options.crashReportingEnabled = false;
devices.testDevice = {
id = testId;
};
folders.testFolder = {
path = "/tmp/test";
devices = [ "testDevice" ];
versioning = {
type = "simple";
params.keep = "10";
};
};
gui.user = "guiUser";
devices.testDevice = {
id = testId;
};
folders.testFolder = {
path = "/tmp/test";
devices = [ "testDevice" ];
};
extraOptions.gui.user = "guiUser";
};
};

View File

@ -56,8 +56,8 @@ let
# however, creates separate filesystem images without a partition table, so
# we have to create a disk image manually.
#
# This creates two partitions, an ESP mounted on /dev/vda1 and the root
# partition mounted on /dev/vda2
# This creates two partitions, an ESP available as /dev/vda1 and the root
# partition available as /dev/vda2.
system.build.diskImage = import ../lib/make-disk-image.nix {
inherit config pkgs lib;
# Use a raw format disk so that it can be resized before starting the
@ -131,4 +131,62 @@ in
assert "Growing existing partition 1." in systemd_repart_logs
'';
};
create-root = makeTest {
name = "systemd-repart-create-root";
meta.maintainers = with maintainers; [ nikstur ];
nodes.machine = { config, lib, pkgs, ... }: {
virtualisation.useDefaultFilesystems = false;
virtualisation.fileSystems = {
"/" = {
device = "/dev/disk/by-partlabel/created-root";
fsType = "ext4";
};
"/nix/store" = {
device = "/dev/vda2";
fsType = "ext4";
};
};
# Create an image containing only the Nix store. This enables creating
# the root partition with systemd-repart and then successfully booting
# into a working system.
#
# This creates two partitions, an ESP available as /dev/vda1 and the Nix
# store available as /dev/vda2.
system.build.diskImage = import ../lib/make-disk-image.nix {
inherit config pkgs lib;
onlyNixStore = true;
format = "raw";
bootSize = "32M";
additionalSpace = "0M";
partitionTableType = "efi";
installBootLoader = false;
copyChannel = false;
};
boot.initrd.systemd.enable = true;
boot.initrd.systemd.repart.enable = true;
boot.initrd.systemd.repart.device = "/dev/vda";
systemd.repart.partitions = {
"10-root" = {
Type = "root";
Label = "created-root";
Format = "ext4";
};
};
};
testScript = { nodes, ... }: ''
${useDiskImage nodes.machine}
machine.start()
machine.wait_for_unit("multi-user.target")
systemd_repart_logs = machine.succeed("journalctl --boot --unit systemd-repart.service")
assert "Adding new partition 2 to partition table." in systemd_repart_logs
'';
};
}

View File

@ -0,0 +1,74 @@
{ lib
, stdenv
, fetchFromGitHub
, autoreconfHook
, gtk-doc
, intltool
, itstool
, libtool
, pkg-config
, wrapGAppsHook
, yelp-tools
, clutter-gtk
, gst_all_1
, glib
, gtk2
, libgsf
, libxml2
, fluidsynth
, orc
}:
stdenv.mkDerivation {
pname = "buzztrax";
version = "unstable-2022-01-26";
src = fetchFromGitHub {
owner = "Buzztrax";
repo = "buzztrax";
rev = "833287c6a06bddc922cd346d6f0fcec7a882aee5";
hash = "sha256-iI6m+zBWDDBjmeuU9Nm4aIbEKfaPe36APPktdjznQpU=";
};
postPatch = ''
touch AUTHORS
'';
nativeBuildInputs = [
autoreconfHook
gtk-doc
intltool
itstool
libtool
pkg-config
wrapGAppsHook
yelp-tools
];
buildInputs = [
clutter-gtk
gst_all_1.gstreamer
gst_all_1.gst-plugins-base
gst_all_1.gst-plugins-good
glib
gtk2
libgsf
libxml2
# optional packages
fluidsynth
gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-ugly
orc
];
# 'g_memdup' is deprecated: Use 'g_memdup2' instead
env.NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations";
meta = with lib; {
description = "Buzztrax is a modular music composer for Linux.";
homepage = "https://www.buzztrax.org/";
license = licenses.lgpl21Plus;
maintainers = [ maintainers.bendlas ];
platforms = platforms.unix;
};
}

View File

@ -31,7 +31,9 @@ stdenv.mkDerivation rec {
};
cmakeFlags = [ "-DBUILD_CSOUND_AC=0" ] # fails to find Score.hpp
++ lib.optional stdenv.isDarwin "-DCS_FRAMEWORK_DEST=${placeholder "out"}/lib";
++ lib.optional stdenv.isDarwin "-DCS_FRAMEWORK_DEST=${placeholder "out"}/lib"
# Ignore gettext in CMAKE_PREFIX_PATH on cross to prevent find_program picking up the wrong gettext
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "-DCMAKE_IGNORE_PATH=${lib.getBin gettext}/bin";
nativeBuildInputs = [ cmake flex bison gettext ];
buildInputs = [ libsndfile libsamplerate boost ]

View File

@ -27,7 +27,6 @@
, pkg-config
, rnnoise
, rubberband
, speex
, speexdsp
, tbb
, wrapGAppsHook4
@ -43,7 +42,7 @@ stdenv.mkDerivation rec {
owner = "wwmm";
repo = "easyeffects";
rev = "v${version}";
sha256 = "sha256-JaqwzCWVnvFzzGHnmzYwe3occ9iw7s9xCH54eVKEuOs=";
hash = "sha256-JaqwzCWVnvFzzGHnmzYwe3occ9iw7s9xCH54eVKEuOs=";
};
nativeBuildInputs = [
@ -74,7 +73,6 @@ stdenv.mkDerivation rec {
pipewire
rnnoise
rubberband
speex
speexdsp
tbb
zita-convolver

View File

@ -1,4 +1,5 @@
{ lib, stdenv
{ lib
, stdenv
, coreutils
, fetchFromGitHub
, makeWrapper
@ -14,19 +15,21 @@
, p11-kit
, vim
, which
, ncurses
, fetchpatch
}:
with lib.strings;
let
version = "2.54.9";
version = "2.59.6";
src = fetchFromGitHub {
owner = "grame-cncm";
repo = "faust";
rev = version;
sha256 = "sha256-7eSZUsZ0h0vWJIpZWXaS+SHV6N2i9nv6Gr6a9cuu4Fg=";
sha256 = "sha256-m6dimBxI9C3KDhUxbJAn2Pf9z+LRahjrzD34W/bf1XA=";
fetchSubmodules = true;
};
@ -38,80 +41,97 @@ let
maintainers = with maintainers; [ magnetophon pmahoney ];
};
faust = stdenv.mkDerivation {
faust =
let ncurses_static = ncurses.override { enableStatic = true; };
in stdenv.mkDerivation {
pname = "faust";
inherit version;
pname = "faust";
inherit version;
inherit src;
inherit src;
nativeBuildInputs = [ makeWrapper pkg-config cmake vim which ];
buildInputs = [ llvm emscripten openssl libsndfile libmicrohttpd gnutls libtasn1 p11-kit ];
nativeBuildInputs = [ makeWrapper pkg-config cmake vim which ];
buildInputs = [
llvm
emscripten
openssl
libsndfile
libmicrohttpd
gnutls
libtasn1
p11-kit
ncurses_static
];
patches = [
# make preset management thread safe
# needed for magnetophonDSP.VoiceOfFaust
# see: https://github.com/grame-cncm/faust/issues/899
(fetchpatch {
url = "https://github.com/grame-cncm/faust/commit/a1c3a515abbcafea0a6e4e2ec7ecb0f092de5349.patch";
hash = "sha256-1Ndm+CgxvGEbS6TKGggeu9hW7N3pC+d1kluT2vhGzL8=";
})
];
passthru = {
inherit wrap wrapWithBuildEnv faust2ApplBase;
};
passthru = { inherit wrap wrapWithBuildEnv faust2ApplBase; };
preConfigure = ''
cd build
'';
cmakeFlags = [
"-C../backends/all.cmake"
"-C../targets/all.cmake"
];
postInstall = ''
# syntax error when eval'd directly
pattern="faust2!(*@(atomsnippets|graph|graphviewer|md|plot|sig|sigviewer|svg))"
(shopt -s extglob; rm "$out"/bin/$pattern)
'';
postFixup = ''
# The 'faustoptflags' is 'source'd into other faust scripts and
# not used as an executable, so patch 'uname' usage directly
# rather than use makeWrapper.
substituteInPlace "$out"/bin/faustoptflags \
--replace uname "${coreutils}/bin/uname"
# wrapper for scripts that don't need faust.wrap*
for script in "$out"/bin/faust2*; do
wrapProgram "$script" \
--prefix PATH : "$out"/bin
done
'';
meta = meta // {
description = "A functional programming language for realtime audio signal processing";
longDescription = ''
FAUST (Functional Audio Stream) is a functional programming
language specifically designed for real-time signal processing
and synthesis. FAUST targets high-performance signal processing
applications and audio plug-ins for a variety of platforms and
standards.
The Faust compiler translates DSP specifications into very
efficient C++ code. Thanks to the notion of architecture,
FAUST programs can be easily deployed on a large variety of
audio platforms and plugin formats (jack, alsa, ladspa, maxmsp,
puredata, csound, supercollider, pure, vst, coreaudio) without
any change to the FAUST code.
This package has just the compiler, libraries, and headers.
Install faust2* for specific faust2appl scripts.
preConfigure = ''
cd build
sed -i 's@LIBNCURSES_PATH ?= .*@LIBNCURSES_PATH ?= ${ncurses_static}/lib/libncurses.a@' Make.llvm.static
substituteInPlace Make.llvm.static \
--replace 'mkdir -p $@ && cd $@ && ar -x ../../$<' 'mkdir -p $@ && cd $@ && ar -x ../source/build/lib/libfaust.a && cd ../source/build/'
substituteInPlace Make.llvm.static \
--replace 'rm -rf $(TMP)' ' '
'';
};
};
cmakeFlags = [ "-C../backends/all.cmake" "-C../targets/all.cmake" ];
postInstall = ''
# syntax error when eval'd directly
pattern="faust2!(*@(atomsnippets|graph|graphviewer|md|plot|sig|sigviewer|svg))"
(shopt -s extglob; rm "$out"/bin/$pattern)
'';
postFixup = ''
# The 'faustoptflags' is 'source'd into other faust scripts and
# not used as an executable, so patch 'uname' usage directly
# rather than use makeWrapper.
substituteInPlace "$out"/bin/faustoptflags \
--replace uname "${coreutils}/bin/uname"
# wrapper for scripts that don't need faust.wrap*
for script in "$out"/bin/faust2*; do
wrapProgram "$script" \
--prefix PATH : "$out"/bin
done
'';
meta = meta // {
description =
"A functional programming language for realtime audio signal processing";
longDescription = ''
FAUST (Functional Audio Stream) is a functional programming
language specifically designed for real-time signal processing
and synthesis. FAUST targets high-performance signal processing
applications and audio plug-ins for a variety of platforms and
standards.
The Faust compiler translates DSP specifications into very
efficient C++ code. Thanks to the notion of architecture,
FAUST programs can be easily deployed on a large variety of
audio platforms and plugin formats (jack, alsa, ladspa, maxmsp,
puredata, csound, supercollider, pure, vst, coreaudio) without
any change to the FAUST code.
This package has just the compiler, libraries, and headers.
Install faust2* for specific faust2appl scripts.
'';
};
};
# Default values for faust2appl.
faust2ApplBase =
{ baseName
, dir ? "tools/faust2appls"
, scripts ? [ baseName ]
, ...
}@args:
{ baseName, dir ? "tools/faust2appls", scripts ? [ baseName ], ... }@args:
args // {
name = "${baseName}-${version}";
@ -141,7 +161,8 @@ let
'';
meta = meta // {
description = "The ${baseName} script, part of faust functional programming language for realtime audio signal processing";
description =
"The ${baseName} script, part of faust functional programming language for realtime audio signal processing";
};
};
@ -161,11 +182,7 @@ let
#
# The build input 'faust' is automatically added to the
# propagatedBuildInputs.
wrapWithBuildEnv =
{ baseName
, propagatedBuildInputs ? [ ]
, ...
}@args:
wrapWithBuildEnv = { baseName, propagatedBuildInputs ? [ ], ... }@args:
stdenv.mkDerivation ((faust2ApplBase args) // {
@ -205,26 +222,25 @@ let
# simply need to be wrapped with some dependencies on PATH.
#
# The build input 'faust' is automatically added to the PATH.
wrap =
{ baseName
, runtimeInputs ? [ ]
, ...
}@args:
wrap = { baseName, runtimeInputs ? [ ], ... }@args:
let
runtimePath = concatStringsSep ":" (map (p: "${p}/bin") ([ faust ] ++ runtimeInputs));
runtimePath =
concatStringsSep ":" (map (p: "${p}/bin") ([ faust ] ++ runtimeInputs));
in stdenv.mkDerivation ((faust2ApplBase args) // {
in
stdenv.mkDerivation ((faust2ApplBase args) // {
nativeBuildInputs = [ makeWrapper ];
nativeBuildInputs = [ makeWrapper ];
postFixup = ''
postFixup = ''
for script in "$out"/bin/*; do
wrapProgram "$script" --prefix PATH : "${runtimePath}"
done
'';
});
});
in faust
in
faust

View File

@ -1,13 +1,13 @@
{ stdenv, lib, fetchFromGitHub, faust2jaqt, faust2lv2 }:
stdenv.mkDerivation rec {
pname = "faustPhysicalModeling";
version = "2.54.9";
version = "2.59.6";
src = fetchFromGitHub {
owner = "grame-cncm";
repo = "faust";
rev = version;
sha256 = "sha256-1ZS7SVTWI1vNOGycZIDyKLgwfNooIGDa8Wmr6qfFSkU=";
sha256 = "sha256-Z/hAq6JlhlWBzWlodwQW/k9AkozVeMXmbVhkicNZ5os=";
};
buildInputs = [ faust2jaqt faust2lv2 ];

View File

@ -1,13 +1,14 @@
{ lib
, stdenv
, fetchFromGitHub
, alsa-lib
, flac
, libmad
, libpulseaudio
, libvorbis
, mpg123
, audioBackend ? "alsa"
, audioBackend ? if stdenv.isLinux then "alsa" else "portaudio"
, alsaSupport ? stdenv.isLinux
, alsa-lib
, dsdSupport ? true
, faad2Support ? true
, faad2
@ -19,10 +20,18 @@
, soxr
, sslSupport ? true
, openssl
, portaudioSupport ? stdenv.isDarwin
, portaudio
, AudioToolbox
, AudioUnit
, Carbon
, CoreAudio
, CoreVideo
, VideoDecodeAcceleration
}:
let
inherit (lib) optional optionalString;
inherit (lib) optional optionals optionalString;
pulseSupport = audioBackend == "pulse";
@ -44,7 +53,10 @@ stdenv.mkDerivation {
};
buildInputs = [ flac libmad libvorbis mpg123 ]
++ lib.singleton (if pulseSupport then libpulseaudio else alsa-lib)
++ optional pulseSupport libpulseaudio
++ optional alsaSupport alsa-lib
++ optional portaudioSupport portaudio
++ optionals stdenv.isDarwin [ CoreVideo VideoDecodeAcceleration CoreAudio AudioToolbox AudioUnit Carbon ]
++ optional faad2Support faad2
++ optional ffmpegSupport ffmpeg
++ optional opusSupport opusfile
@ -65,10 +77,15 @@ stdenv.mkDerivation {
++ optional (!faad2Support) "-DNO_FAAD"
++ optional ffmpegSupport "-DFFMPEG"
++ optional opusSupport "-DOPUS"
++ optional portaudioSupport "-DPORTAUDIO"
++ optional pulseSupport "-DPULSEAUDIO"
++ optional resampleSupport "-DRESAMPLE"
++ optional sslSupport "-DUSE_SSL";
env = lib.optionalAttrs stdenv.isDarwin {
LDADD = "-lportaudio -lpthread";
};
installPhase = ''
runHook preInstall
@ -85,6 +102,6 @@ stdenv.mkDerivation {
homepage = "https://github.com/ralph-irving/squeezelite";
license = with licenses; [ gpl3Plus ] ++ optional dsdSupport bsd2;
maintainers = with maintainers; [ adamcstephens ];
platforms = platforms.linux;
platforms = if (audioBackend == "pulse") then platforms.linux else platforms.linux ++ platforms.darwin;
};
}

View File

@ -13,11 +13,11 @@ let
in
stdenv.mkDerivation rec {
pname = "SunVox";
version = "2.0e";
version = "2.1c";
src = fetchurl {
url = "https://www.warmplace.ru/soft/sunvox/sunvox-${version}.zip";
sha256 = "sha256-v4dQnRr7pusOAHX8ytDChKixYxEIjg30vOTD6uA/S0o=";
sha256 = "sha256-yPVcbtlAVbO9uMsFlfZ51T408hA1VPJAI+R+Jdjcyjw=";
};
nativeBuildInputs = [ unzip ];

View File

@ -39,7 +39,6 @@ python3Packages.buildPythonApplication rec {
--replace pytest-runner ""
substituteInPlace src/vorta/assets/metadata/com.borgbase.Vorta.desktop \
--replace Exec=vorta "Exec=$out/bin/vorta" \
--replace com.borgbase.Vorta "com.borgbase.Vorta-symbolic"
'';

View File

@ -6,13 +6,13 @@
buildDotnetModule rec {
pname = "nbxplorer";
version = "2.3.62";
version = "2.3.63";
src = fetchFromGitHub {
owner = "dgarage";
repo = "NBXplorer";
rev = "v${version}";
sha256 = "sha256-FpAMkVgvl0SxJ59FjL4H3Fvqb1LKsET2I+A01TQlvFA=";
sha256 = "sha256-K3dlXwzKNzwJstp1DW5T5s5Gs0ebPNWXtzqr3rw5294=";
};
projectFile = "NBXplorer/NBXplorer.csproj";

View File

@ -47,6 +47,6 @@ in rustPlatform.buildRustPackage {
homepage = "https://helix-editor.com";
license = licenses.mpl20;
mainProgram = "hx";
maintainers = with maintainers; [ danth yusdacra ];
maintainers = with maintainers; [ danth yusdacra zowoq ];
};
}

View File

@ -65,12 +65,12 @@ final: prev:
Coqtail = buildVimPluginFrom2Nix {
pname = "Coqtail";
version = "2023-04-23";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "whonore";
repo = "Coqtail";
rev = "916bd5c97242d806ed8a05f1691e27042fd189fb";
sha256 = "13xi801cjrar813ad0bm9s6h482zs0gjs0j8202p2mz82g6saif9";
rev = "ec80f3d48dcbf19209ef51d6020838cda5a1d46e";
sha256 = "0ahhs6ffdffap566k6p0f1yncziacwnygq3ndaarngqz29w6wl62";
};
meta.homepage = "https://github.com/whonore/Coqtail/";
};
@ -173,12 +173,12 @@ final: prev:
LazyVim = buildVimPluginFrom2Nix {
pname = "LazyVim";
version = "2023-05-19";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "LazyVim";
repo = "LazyVim";
rev = "b227d9727a7ae1f0ad48504d613fb099dc9c461b";
sha256 = "02zk79ln1rp9mhf74vwyzkzzzh0mwki0ak4fwy5261p53z6l09m9";
rev = "7a7c024bf6488ce73610d181d989bb15a016fd0c";
sha256 = "1ls87rxwlk8ywswyq5k0hhls7cdhwpg8safd709a0210bbdypk5g";
};
meta.homepage = "https://github.com/LazyVim/LazyVim/";
};
@ -365,12 +365,12 @@ final: prev:
SpaceVim = buildVimPluginFrom2Nix {
pname = "SpaceVim";
version = "2023-05-07";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "SpaceVim";
repo = "SpaceVim";
rev = "cdc4762c489c9159f375dda7bef4dd721472cb54";
sha256 = "1530x0xy2mpah4vqxfr6jnvghg1d16zj4j3mks7n47xzv7ss6h0h";
rev = "fdd617e2a39f5f1d654cba4329638d730165b952";
sha256 = "0b5zn67pwaxcm4qgd3zhsx3l04skgvh6m0yh12b5h30nkmbaqzyi";
};
meta.homepage = "https://github.com/SpaceVim/SpaceVim/";
};
@ -486,12 +486,12 @@ final: prev:
aerial-nvim = buildVimPluginFrom2Nix {
pname = "aerial.nvim";
version = "2023-05-17";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "stevearc";
repo = "aerial.nvim";
rev = "3a17406d9d8f01f46d207f42d8849eb924eb0755";
sha256 = "00186chkr4y8k4yv5crlnkn1v2smw151hc4ga4swikvnkclddg2c";
rev = "189bf4cce7f029ca8b3684441dd9d8ca5e6925a4";
sha256 = "0k2fjgbymh159j8hkc9609q23rapblgj5jcjifr4szyfkjy5fp1l";
fetchSubmodules = true;
};
meta.homepage = "https://github.com/stevearc/aerial.nvim/";
@ -547,12 +547,12 @@ final: prev:
ale = buildVimPluginFrom2Nix {
pname = "ale";
version = "2023-05-06";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "dense-analysis";
repo = "ale";
rev = "9fe9f115213d7e7bf52d06ebdc69c6df38b1120b";
sha256 = "16fwvb6x50i40qyz09v4mfgskbkyq0rk8z7srnrpghyza575nz1p";
rev = "a46121a532b2baaa339016ab910c59f1cded46e5";
sha256 = "0pl4hbx5zickipfhw2qnbayclkjxdmffbhlarxsvkggvb8ay1d1x";
};
meta.homepage = "https://github.com/dense-analysis/ale/";
};
@ -919,12 +919,12 @@ final: prev:
barbar-nvim = buildVimPluginFrom2Nix {
pname = "barbar.nvim";
version = "2023-05-18";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "romgrk";
repo = "barbar.nvim";
rev = "cf3cae24e762f9d63de1a85a1a83c8cdeafeb344";
sha256 = "1i0nq5s9pwfpj4cksw4pgj2kmcvyh7zlxd7f2alygp94hw0kbzwb";
rev = "cfa1168203e5f69e7ff2eeee9c7966191907da1a";
sha256 = "0s1kyip884hgxvgaywxb94g5cdigrvnrfymc778wyxh7jk1ypm3k";
};
meta.homepage = "https://github.com/romgrk/barbar.nvim/";
};
@ -1135,12 +1135,12 @@ final: prev:
bufferline-nvim = buildVimPluginFrom2Nix {
pname = "bufferline.nvim";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "akinsho";
repo = "bufferline.nvim";
rev = "1952c33e425ede785d26aa9e250addfe304a8510";
sha256 = "0rcr2aj38ln07riil516c40fakaqzv72c2d1as7b6n8qikz9by7a";
rev = "32d74d5d044f7cc89892d4781a83d55ee4ed552a";
sha256 = "00bw1br0p9gvwrirrp3byazmr51klzbl512aqcp2j4x2g9s3zksq";
};
meta.homepage = "https://github.com/akinsho/bufferline.nvim/";
};
@ -2263,24 +2263,24 @@ final: prev:
coq-artifacts = buildVimPluginFrom2Nix {
pname = "coq.artifacts";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "ms-jpq";
repo = "coq.artifacts";
rev = "1ded9a1b8cd80616d5909a3ec5859feda66a7f2e";
sha256 = "1nr65pz6xiajyh9k9s5p95bd2v9br8g1r0gk3s9lz9hqyb2fpzx9";
rev = "18bb8e1b12b8ea6f153f18ce8849e8d007af5721";
sha256 = "0xxbplalsyhsjfji2h8mrgfpswwywfldijkpnf2zaavb8nvb2kbq";
};
meta.homepage = "https://github.com/ms-jpq/coq.artifacts/";
};
coq-thirdparty = buildVimPluginFrom2Nix {
pname = "coq.thirdparty";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "ms-jpq";
repo = "coq.thirdparty";
rev = "0c8d7e2f8652ee0a327df8e4e027e20d4d7ba616";
sha256 = "1isv1v4aj6qfngn4pdqd2lvb22j0n5iaa4f9v80rz6614cgsvwl1";
rev = "f4821b21e6a304ae929a9851f7f427d2f6d4322a";
sha256 = "1r6fkx4ghsqzpnm08yrrjsjisgkh2vylrpflji2g42rrvb750zv7";
};
meta.homepage = "https://github.com/ms-jpq/coq.thirdparty/";
};
@ -2299,12 +2299,12 @@ final: prev:
coq_nvim = buildVimPluginFrom2Nix {
pname = "coq_nvim";
version = "2023-05-18";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "ms-jpq";
repo = "coq_nvim";
rev = "b0eda162151cdd912ef2ccfc2d1afac6eb448d31";
sha256 = "04wwf92wrp5i17fwpbcj58d5i8dakqzw2d71wjg0lagqya8lbm4k";
rev = "d0523c2241f45323e2b48d45ad3b095a9f95df65";
sha256 = "07jbjqfxjcbx5x1pfi2v37yjva07wwn72njhcvhy01acbvc7pc91";
};
meta.homepage = "https://github.com/ms-jpq/coq_nvim/";
};
@ -2551,24 +2551,24 @@ final: prev:
denops-vim = buildVimPluginFrom2Nix {
pname = "denops.vim";
version = "2023-01-20";
version = "2023-05-08";
src = fetchFromGitHub {
owner = "vim-denops";
repo = "denops.vim";
rev = "44baa0666e69976dd82311a67a220c7f71273368";
sha256 = "1wdl2sq6vl4lmyqrhnp0mspxbfaza3h8pgkgdhf27jdkavg793ij";
rev = "6b29032365f82fc1ef78d206e4019fd510ea51fb";
sha256 = "0dzx5b5ckq3dfvg28cgy6iii0q7sh24klpm6idg1yj1r3038q2jf";
};
meta.homepage = "https://github.com/vim-denops/denops.vim/";
};
deol-nvim = buildVimPluginFrom2Nix {
pname = "deol.nvim";
version = "2023-05-19";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "Shougo";
repo = "deol.nvim";
rev = "632237abbc64118f1b11c896f3ee6f3bb3dd0c8e";
sha256 = "12zcvzf6qshql8adq55fh9w6g81sb1jsvnzgph4ys4y7wyn8y0kz";
rev = "850f83e39067889408c096bbf87788bb0e848311";
sha256 = "0z57klvid0g6nk4zixh07xvyjlk5a0wqiq6i1kb7497jp3sp2dzs";
};
meta.homepage = "https://github.com/Shougo/deol.nvim/";
};
@ -2913,12 +2913,12 @@ final: prev:
dressing-nvim = buildVimPluginFrom2Nix {
pname = "dressing.nvim";
version = "2023-05-12";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "stevearc";
repo = "dressing.nvim";
rev = "66e4990240f92e31b0d5e4df6deb6bb0160ae832";
sha256 = "0rdf5ci84ddx96i68bwh3nxg3ij196mjgb6phqlb9qz0a0jqsrfj";
rev = "2f17eee4d7709dacfad2a28f35e2acfe9a6cb09d";
sha256 = "15hvqr7s58dh4k6yxipyg44sq8zrfpj6nhpr3vhqjjn642yrchwg";
};
meta.homepage = "https://github.com/stevearc/dressing.nvim/";
};
@ -3312,11 +3312,11 @@ final: prev:
friendly-snippets = buildVimPluginFrom2Nix {
pname = "friendly-snippets";
version = "2023-05-12";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "rafamadriz";
repo = "friendly-snippets";
rev = "1d0dac346de7c6895ac72528df3276386c6b149b";
rev = "2bb3958e1fe0a613e028f3c6fd2d2923fc23bd0c";
sha256 = "1nlbm7ji73ggg48pzvlbb32jfscnj71bgfzg4q1p3zfvssisz85c";
};
meta.homepage = "https://github.com/rafamadriz/friendly-snippets/";
@ -3420,12 +3420,12 @@ final: prev:
fzf-lua = buildVimPluginFrom2Nix {
pname = "fzf-lua";
version = "2023-05-19";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "ibhagwan";
repo = "fzf-lua";
rev = "7160a2062fa516fd3e526187e0c669aa6b770a5f";
sha256 = "11dwx32fcskpz65q1yh9fa2lr0ys5xv1g0k0zb2zz0shsp6p07dv";
rev = "29d71212486554314e4f357a86f6174fb893bcc7";
sha256 = "1v857a2zlzf4ciz08zzl891pqylmb00arhj2kzd5v5kx07zl8rxi";
};
meta.homepage = "https://github.com/ibhagwan/fzf-lua/";
};
@ -3516,12 +3516,12 @@ final: prev:
git-blame-nvim = buildVimPluginFrom2Nix {
pname = "git-blame.nvim";
version = "2023-05-16";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "f-person";
repo = "git-blame.nvim";
rev = "c165cde611f1e171c843eeac07bdf139d7aae2d3";
sha256 = "1ljc2w51sm5ynj2n7jvggzig17b2qjizid6hkm7khgl6ps4r35gl";
rev = "b8a23393827a0478dbf606f5397c328895bd4f0e";
sha256 = "11kh480lx51zymv6i0gyxwrm1h8qdmwfrbq0lz1jkzks3y475amb";
};
meta.homepage = "https://github.com/f-person/git-blame.nvim/";
};
@ -3636,12 +3636,12 @@ final: prev:
go-nvim = buildVimPluginFrom2Nix {
pname = "go.nvim";
version = "2023-05-08";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "ray-x";
repo = "go.nvim";
rev = "b119217e8324f13a2be12935f5d2d15a1df09b09";
sha256 = "10wfmf2m5g22blks6fgi3x6mmmirhd452pi6r91jzxqliwf41pzc";
rev = "1a2530d3e9b29e9e1279a67d27304a05663537fd";
sha256 = "06xj50bbbmvb8rxdlgp7fn6jdr0swbifiy14bnsnmq3fddvzsi2z";
};
meta.homepage = "https://github.com/ray-x/go.nvim/";
};
@ -3816,11 +3816,11 @@ final: prev:
hare-vim = buildVimPluginFrom2Nix {
pname = "hare.vim";
version = "2023-05-10";
version = "2023-05-22";
src = fetchgit {
url = "https://git.sr.ht/~sircmpwn/hare.vim";
rev = "5451dcb380668bc691d20ea91cf258dfe777609d";
sha256 = "141l8l7jlk4khqp45yr0whm8xf1r1374h36cf35bl2rahdgfr15q";
rev = "1134cd8a50689a5f2bd98c195221e236e7c2a716";
sha256 = "14biq4v52pavpag87g2y8ygb74qril55lf7s4xbbr4wg37fbji7q";
};
meta.homepage = "https://git.sr.ht/~sircmpwn/hare.vim";
};
@ -3839,12 +3839,12 @@ final: prev:
haskell-tools-nvim = buildNeovimPluginFrom2Nix {
pname = "haskell-tools.nvim";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "MrcJkb";
repo = "haskell-tools.nvim";
rev = "f75225d30e1a122c70e713f7c4aa4da4c44b278f";
sha256 = "0bnqx0d6kjsva0b1sk44lk067lwa9dh402xcinzpc3yd1xff08ga";
rev = "3b68740a9ea2eed4fafc3a05c81601ae5191892d";
sha256 = "0hfnz4gl62g385ahldn82n2s0ki1lknxqv6w9qam9jn75nrmyvjs";
};
meta.homepage = "https://github.com/MrcJkb/haskell-tools.nvim/";
};
@ -4439,24 +4439,24 @@ final: prev:
lazy-nvim = buildVimPluginFrom2Nix {
pname = "lazy.nvim";
version = "2023-05-19";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "folke";
repo = "lazy.nvim";
rev = "91564cb0a6d038d7e0eeaf68d505ed2627de625b";
sha256 = "0haj80ic1v6aam3bahfijracjvxb38m7q0p9idzgcp52lk8ab2rm";
rev = "b382495d512fbc6dadbeac8b806efc72db7130e8";
sha256 = "0zsj1zi4p65naqylphv4kpzv0wgin0pfnrc2kbmv4qcrk1aa5qm6";
};
meta.homepage = "https://github.com/folke/lazy.nvim/";
};
lazygit-nvim = buildVimPluginFrom2Nix {
pname = "lazygit.nvim";
version = "2022-11-05";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "kdheepak";
repo = "lazygit.nvim";
rev = "32bffdebe273e571588f25c8a708ca7297928617";
sha256 = "1iw9297di1jdwf1kqj14fpfw0w9gdh5mr0i1s41gl7j16pbpn9f6";
rev = "883c25d13eafd8b89b3cc116c712b19c9d2a755e";
sha256 = "0cdkr8ndyycg3yk4dqgff0faw856jwlnc9qyqd2l2x4nxjv7vrar";
};
meta.homepage = "https://github.com/kdheepak/lazygit.nvim/";
};
@ -4607,12 +4607,12 @@ final: prev:
lightline-bufferline = buildVimPluginFrom2Nix {
pname = "lightline-bufferline";
version = "2022-12-17";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "mengelbrecht";
repo = "lightline-bufferline";
rev = "c0199a7027da92d9770d1e2a9f4bf6257c7ec7ef";
sha256 = "0q0s9gbakmlyd13mb33gq41brkf0qs2isjaz7fd5xpgakqsldl3b";
rev = "8a2e7ab946dd995d693e30744665d15eabe4c369";
sha256 = "1ld4z3smmw4s1inbk1ry04k5bpqp48xksbpbq2abx58x3brhg782";
};
meta.homepage = "https://github.com/mengelbrecht/lightline-bufferline/";
};
@ -4966,12 +4966,12 @@ final: prev:
luasnip = buildVimPluginFrom2Nix {
pname = "luasnip";
version = "2023-05-19";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "l3mon4d3";
repo = "luasnip";
rev = "ec7fba1d119fb5090a901eb616145450ffb95e31";
sha256 = "197frj7iinil9drqs95dz6ddzgghq9fx18d2ky3pivnqh3j5wik8";
rev = "fcdaa4313305fe20d928551134c1ec2266e7af2a";
sha256 = "0pmbbc8099xdwry8s75rijnh3w814n7blmnpvymjl06ffxgp5h42";
fetchSubmodules = true;
};
meta.homepage = "https://github.com/l3mon4d3/luasnip/";
@ -5567,12 +5567,12 @@ final: prev:
neodev-nvim = buildVimPluginFrom2Nix {
pname = "neodev.nvim";
version = "2023-05-15";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "folke";
repo = "neodev.nvim";
rev = "0c5d6c2ac2fadebedf08282d9403ef6c3dc31896";
sha256 = "03nh76d9ks6cpmdh6saa6wgb4920didnapp0ww6w6vw7wpqkajng";
rev = "a9c1324ab00bf4d18bc3b157429535b65ac62a36";
sha256 = "1xyf07chbdcs4rfln4fh5jyqjdmxi73vg5j0c7s1q5axr2g3w6ci";
};
meta.homepage = "https://github.com/folke/neodev.nvim/";
};
@ -5663,12 +5663,12 @@ final: prev:
neorg = buildVimPluginFrom2Nix {
pname = "neorg";
version = "2023-05-18";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nvim-neorg";
repo = "neorg";
rev = "238152ab40ec1fb293fae75744942146876ed08f";
sha256 = "1ysfdfwfi85391v3drkzqq4cfwi7axcpysw2vdavns3gcbdy4a04";
rev = "c23310083857d30b27db821a41fbeeba0a7bd5c1";
sha256 = "029mkxkzvmp54gljrry4y7fc01364gq45wqg8dzx471ady3dd7hw";
};
meta.homepage = "https://github.com/nvim-neorg/neorg/";
};
@ -5795,12 +5795,12 @@ final: prev:
neotest-haskell = buildVimPluginFrom2Nix {
pname = "neotest-haskell";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "MrcJkb";
repo = "neotest-haskell";
rev = "15970b4fbabb74ba97022f0cc35dbf72fabc4c59";
sha256 = "1s1l6bahba1xywav6fr4517i85sbx4fzdxl1xpj1nzr6pqmhxjd4";
rev = "bfd7fa4790028e4e54825dc39e88708b210bebd8";
sha256 = "18m87cmcrb5c8qx7fyl8y6js544pddri91cyjv38s84w7wwb1sii";
};
meta.homepage = "https://github.com/MrcJkb/neotest-haskell/";
};
@ -6095,12 +6095,12 @@ final: prev:
nlsp-settings-nvim = buildVimPluginFrom2Nix {
pname = "nlsp-settings.nvim";
version = "2023-05-18";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "tamago324";
repo = "nlsp-settings.nvim";
rev = "3b16ff8ef300aad3a1f93f0abc08b6ec0873af96";
sha256 = "0rjf3sb3jki44ll3mnwppvrdqxiyrv8yg7c1gsvazld9rqichd6s";
rev = "033db358dba8714384db966db29503122b14b8fd";
sha256 = "18vy7pccz16sbix5raalm4x21vhxmhykqdp8l3sm8gh541kis44y";
};
meta.homepage = "https://github.com/tamago324/nlsp-settings.nvim/";
};
@ -6143,12 +6143,12 @@ final: prev:
noice-nvim = buildVimPluginFrom2Nix {
pname = "noice.nvim";
version = "2023-05-10";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "folke";
repo = "noice.nvim";
rev = "c2a745a26ae562f1faecbf6177ac53377d2658d5";
sha256 = "01y0nh8cdjrwcagcpw99sw55yvylyia4jphpprbyswrggg4r83fw";
rev = "b4ff29c2492fbdc897866a4e3fdc413b27b48554";
sha256 = "0yq7xjlqwcr0fknfrhjw9792lp52qs4sjsgdrbs8qd4bl1c7n2na";
};
meta.homepage = "https://github.com/folke/noice.nvim/";
};
@ -6503,12 +6503,12 @@ final: prev:
nvim-dap-virtual-text = buildVimPluginFrom2Nix {
pname = "nvim-dap-virtual-text";
version = "2023-04-26";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "theHamsta";
repo = "nvim-dap-virtual-text";
rev = "ab988dbb7d20cdaebf9b3ef7554a89f6895de042";
sha256 = "0mchrwd49hjl9sgspnwqbl5pplp88g3jnaiphzj5ri44cdgvajym";
rev = "10f858c402eb985739627b7e69a38f5b2d79076b";
sha256 = "17yr0d7djvwv7rdjqhzki0ibj2i3anjg2pw6jfjz96lfcci4c17m";
};
meta.homepage = "https://github.com/theHamsta/nvim-dap-virtual-text/";
};
@ -6587,12 +6587,12 @@ final: prev:
nvim-highlite = buildVimPluginFrom2Nix {
pname = "nvim-highlite";
version = "2023-05-19";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "Iron-E";
repo = "nvim-highlite";
rev = "21cc97ea72b5271f9ec9779c765dcbdaa7ea6a83";
sha256 = "04347wdx93igdgkg2kz5qy3b7yyhcl3qyfvdcbgb9832brsmhfmh";
rev = "bf5c32f430fceb9c4a6dbc6f8b62f5e111ce0128";
sha256 = "03g6z2raq16rrrf2sbygf8wwhx4p06jrfq58ra1hy7wzhdsvb1y2";
};
meta.homepage = "https://github.com/Iron-E/nvim-highlite/";
};
@ -6707,12 +6707,12 @@ final: prev:
nvim-lspconfig = buildVimPluginFrom2Nix {
pname = "nvim-lspconfig";
version = "2023-05-19";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "neovim";
repo = "nvim-lspconfig";
rev = "6f1d124bbcf03c4c410c093143a86415f46d16a0";
sha256 = "0ks7565sh91ydc8w5n2b2dlikmxfr0q5jmf6gn776b3knflsqk19";
rev = "10142ff298f5fc3f8bf9d277c806593401b0fd06";
sha256 = "13g2ml4hvq9db9bw1ahlx1xaz88ql4ryr4z916z153mmk53avs0g";
};
meta.homepage = "https://github.com/neovim/nvim-lspconfig/";
};
@ -6767,12 +6767,12 @@ final: prev:
nvim-metals = buildVimPluginFrom2Nix {
pname = "nvim-metals";
version = "2023-05-19";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "scalameta";
repo = "nvim-metals";
rev = "fe5a6294efc9aadab905413486995bc8226f98fd";
sha256 = "0iyjj4gcxd077qdw6g8y5iy4z9hii0ddwz2y5p560jxpzzab54dd";
rev = "51cd9fabe8ba7ff9abafd0c1ec4292f3a2eb1a31";
sha256 = "001zjqgiqf61c7x3r1np8za7naxph4qizilngckxahxa8myams44";
};
meta.homepage = "https://github.com/scalameta/nvim-metals/";
};
@ -6995,24 +6995,24 @@ final: prev:
nvim-tree-lua = buildVimPluginFrom2Nix {
pname = "nvim-tree.lua";
version = "2023-05-15";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nvim-tree";
repo = "nvim-tree.lua";
rev = "736c7ff59065275f0483af4b7f07a9bc41449ad0";
sha256 = "0g9zl6lqnf06r0fkp2b2f63l806vgplmb64n608rcbc9j3iflhw3";
rev = "b1e074d2b52d45c8327b5b43a498b3d7e6c93b97";
sha256 = "0j6ccnp39caip3gg66sqg6aan3qbwm59kx7k98c8c6l6xqav86kf";
};
meta.homepage = "https://github.com/nvim-tree/nvim-tree.lua/";
};
nvim-treesitter = buildVimPluginFrom2Nix {
pname = "nvim-treesitter";
version = "2023-05-19";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nvim-treesitter";
repo = "nvim-treesitter";
rev = "dad1b7cd6606ffaa5c283ba73d707b4741a5f445";
sha256 = "1zvwwgyid84nl37504kl0rj6ihzq8rpy86d0gq6pxzdn65z3xfay";
rev = "f2778bd1a28b74adf5b1aa51aa57da85adfa3d16";
sha256 = "1jn0dhzp9yjy6f4qgf6khv8xwpzvns30q5g69jb5bpxg900szjr1";
};
meta.homepage = "https://github.com/nvim-treesitter/nvim-treesitter/";
};
@ -7138,12 +7138,12 @@ final: prev:
nvim-web-devicons = buildVimPluginFrom2Nix {
pname = "nvim-web-devicons";
version = "2023-05-07";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "nvim-tree";
repo = "nvim-web-devicons";
rev = "986875b7364095d6535e28bd4aac3a9357e91bbe";
sha256 = "1p67hzk8aj8gzh4l6znizfbp0kwa2d0ba9qlnvgsxmvfw3iz8hgr";
rev = "e283ab937e0197b37ec5d8013e49495193407324";
sha256 = "1njshr9y24915zqj8msv9drfc1hicwry5hsrxh0yjk9hdwianq94";
};
meta.homepage = "https://github.com/nvim-tree/nvim-web-devicons/";
};
@ -7246,12 +7246,12 @@ final: prev:
oil-nvim = buildVimPluginFrom2Nix {
pname = "oil.nvim";
version = "2023-05-17";
version = "2023-05-22";
src = fetchFromGitHub {
owner = "stevearc";
repo = "oil.nvim";
rev = "19563c365800ab519e46a08a0aa59d5677b329b6";
sha256 = "0ymbd7cyix0yhbzsb3k7509f22sy04jkcbzkclpyf63a971vh87q";
rev = "d27bfa1f370e8caddf65890364989b76f9794afb";
sha256 = "0hypz0qjv0qwff78qcpi6zlvifrhnq2f52f1mdvvfr0kqms862ap";
fetchSubmodules = true;
};
meta.homepage = "https://github.com/stevearc/oil.nvim/";
@ -8873,12 +8873,12 @@ final: prev:
telescope-file-browser-nvim = buildVimPluginFrom2Nix {
pname = "telescope-file-browser.nvim";
version = "2023-05-16";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nvim-telescope";
repo = "telescope-file-browser.nvim";
rev = "1aa7f12ce797bb5b548c96f38b2c93911e97c543";
sha256 = "1br20pnkr0nvi7xkx5r5ffz9dr3r8y3qwhr3bpsbm8cw2s4z2kx4";
rev = "6cf29d5139601993343c4e70ee2d1f67959d9cc5";
sha256 = "1vwwgjzgnsbvpi4jzqbg66cw6v2wv8axwwimfhc79ag5s1g02z8n";
};
meta.homepage = "https://github.com/nvim-telescope/telescope-file-browser.nvim/";
};
@ -9368,12 +9368,12 @@ final: prev:
tokyonight-nvim = buildVimPluginFrom2Nix {
pname = "tokyonight.nvim";
version = "2023-05-07";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "folke";
repo = "tokyonight.nvim";
rev = "df13e3268a44f142999fa166572fe95a650a0b37";
sha256 = "1l9pdgj3kqb21j3bhp1qy6ivv3wshlv47b4i3d1jnl0kg70wy8w9";
rev = "029dca9a40260788d495929d8cc83a9a8ebb6763";
sha256 = "05jqdcdzf0hv5jxry315byn862i2yx0m5lc1vxwym18j4sajd1dm";
};
meta.homepage = "https://github.com/folke/tokyonight.nvim/";
};
@ -9990,6 +9990,18 @@ final: prev:
meta.homepage = "https://github.com/junegunn/vim-after-object/";
};
vim-agda = buildVimPluginFrom2Nix {
pname = "vim-agda";
version = "2022-03-01";
src = fetchFromGitHub {
owner = "msuperdock";
repo = "vim-agda";
rev = "1695060850b5991e8aded0861fae0c31877950a7";
sha256 = "0mr21s3x2c7qxay3a92iwhwwyir97s6g3vz8r3m40zxm91xdm7y6";
};
meta.homepage = "https://github.com/msuperdock/vim-agda/";
};
vim-airline = buildVimPluginFrom2Nix {
pname = "vim-airline";
version = "2023-04-17";
@ -10652,12 +10664,12 @@ final: prev:
vim-dadbod = buildVimPluginFrom2Nix {
pname = "vim-dadbod";
version = "2023-04-03";
version = "2023-05-19";
src = fetchFromGitHub {
owner = "tpope";
repo = "vim-dadbod";
rev = "3f57c0cd41523423fd781422dfc833820095a3e8";
sha256 = "0zz640kkh8gfs00x1r4867xch7075kxadldi97gb0h78hp6pkd56";
rev = "15c0832a547d5ef85048a2d709173aab350f0afb";
sha256 = "0c9jg5hbvy7zk9njavxdmrh5w4lwmmfca2py3yrvbf3x81vcvzv9";
};
meta.homepage = "https://github.com/tpope/vim-dadbod/";
};
@ -14906,12 +14918,12 @@ final: prev:
zk-nvim = buildVimPluginFrom2Nix {
pname = "zk-nvim";
version = "2023-04-15";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "mickael-menu";
repo = "zk-nvim";
rev = "275578853dc76d282ee5b31f86cd3a4f02d91f2f";
sha256 = "0931qp0i0p8cqvz7a5i5b7ijdrd40qr4vriqmxl4hycjag8wcwq6";
rev = "5ddb53688035d115f941f0c8255f6e6618e608ac";
sha256 = "1zy6ar1cw4q8l4yk9lfsl56xk8jsv61d6p7s6pfrjvgrksh5jk5m";
};
meta.homepage = "https://github.com/mickael-menu/zk-nvim/";
};
@ -14930,24 +14942,24 @@ final: prev:
zoxide-vim = buildVimPluginFrom2Nix {
pname = "zoxide.vim";
version = "2023-04-20";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nanotee";
repo = "zoxide.vim";
rev = "7582d5441f68c46b8fbd42a8721cde0c0dfe344b";
sha256 = "0gbnwf535mk29yhn39sdy9bifzk2y8x6ldkybn8l3pwjrlr42cks";
rev = "cc5b702cacbcbb4192b2a619c0f9cf6ab6e76936";
sha256 = "0dc1mg9x8flda59vl8d89m1ri1n0jl72q4jhmxj7mg9gp2r39l3m";
};
meta.homepage = "https://github.com/nanotee/zoxide.vim/";
};
catppuccin-nvim = buildVimPluginFrom2Nix {
pname = "catppuccin-nvim";
version = "2023-05-18";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "catppuccin";
repo = "nvim";
rev = "8338b02e9a8ffcb999520de7f15943712618760f";
sha256 = "0yxachyinpnj145wzy74ahy995pkv0ik5h47dh25fs33sivabdq0";
rev = "cc517bdcb66a0f8dee90bab10ccdd651fa967bbe";
sha256 = "1k1ql3zr7gx4iw0rs3qdzmr4jrn8xq1ydgq983jyc9rwzizg3fxk";
};
meta.homepage = "https://github.com/catppuccin/nvim/";
};
@ -14966,12 +14978,12 @@ final: prev:
chad = buildVimPluginFrom2Nix {
pname = "chad";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "ms-jpq";
repo = "chadtree";
rev = "e38e4a51399d50c757572125813fbbcb2f1d1813";
sha256 = "0bica77v6vs6kqdj26hr2z206267k4qdp1ywg150d5gbn6mr65xs";
rev = "08dec64248acfb9d79ef5079fb634f36cfc26cbd";
sha256 = "0a9s212592ldbs5by0rv8rsbjrcs5i6m673npfhgk43l1grgw7m8";
};
meta.homepage = "https://github.com/ms-jpq/chadtree/";
};
@ -15050,24 +15062,24 @@ final: prev:
nvchad-extensions = buildVimPluginFrom2Nix {
pname = "nvchad-extensions";
version = "2023-05-14";
version = "2023-05-21";
src = fetchFromGitHub {
owner = "nvchad";
repo = "extensions";
rev = "6025bdbbac5c14b96ba4734e61eaf28db2742676";
sha256 = "1dfj4a3vh8djgylcc4f7bg7hq2mmg8imizglzbqr0my74v4shd1w";
rev = "fc1168f1d281d21dfb1b2baf47520f3258f6909f";
sha256 = "0riflj6kaq58dcanh7785f8n6zq3bz0rv8v1gyl2b74pfyipw7xw";
};
meta.homepage = "https://github.com/nvchad/extensions/";
};
nvchad-ui = buildVimPluginFrom2Nix {
pname = "nvchad-ui";
version = "2023-05-18";
version = "2023-05-20";
src = fetchFromGitHub {
owner = "nvchad";
repo = "ui";
rev = "168ca134ae186ad977872bff3301378c0af5be71";
sha256 = "0xwvgbv7xj1ja7fgw14vnm083hab6q19rihv8nky93wj5v5xjkya";
rev = "f9414e4837589f8b2baa9cec806ed26bd34cb0d1";
sha256 = "1a1fbxyl7fjnkaa4dnqisb7ap857lykbminvzhds4mvri19s0xzz";
};
meta.homepage = "https://github.com/nvchad/ui/";
};

View File

@ -126,12 +126,12 @@
};
c = buildGrammar {
language = "c";
version = "0.0.0+rev=424d014";
version = "0.0.0+rev=cac392a";
src = fetchFromGitHub {
owner = "tree-sitter";
repo = "tree-sitter-c";
rev = "424d0145efb0a87927269ab47709f98a564f8c4f";
hash = "sha256-cj8aEcdO5rsie9CqT8GLfvJm6O7yqBQPtn5aDe/lVpI=";
rev = "cac392ac3d7d365c469971b117e92a0df3bc8305";
hash = "sha256-ck6OEjljRReUl10W6yLu1dxa8ln8n8GMUz01BDj/kFk=";
};
meta.homepage = "https://github.com/tree-sitter/tree-sitter-c";
};
@ -258,12 +258,12 @@
};
cuda = buildGrammar {
language = "cuda";
version = "0.0.0+rev=a27cb7b";
version = "0.0.0+rev=7f6b482";
src = fetchFromGitHub {
owner = "theHamsta";
repo = "tree-sitter-cuda";
rev = "a27cb7b9d105c43205fa899f49bc0cc4cf399484";
hash = "sha256-OXm/urF5AIxod1J5i951C6kcf6pZRj+Q+ObwD0yj55Q=";
rev = "7f6b48249b8500d506bd424cfa8e4c9d83e17754";
hash = "sha256-A9AI3S/wToFvkj0Oe4UQ/B30r1a/tdgqRuObxazZlHs=";
};
meta.homepage = "https://github.com/theHamsta/tree-sitter-cuda";
};
@ -579,12 +579,12 @@
};
gitcommit = buildGrammar {
language = "gitcommit";
version = "0.0.0+rev=6c14f8b";
version = "0.0.0+rev=9d8c81e";
src = fetchFromGitHub {
owner = "gbprod";
repo = "tree-sitter-gitcommit";
rev = "6c14f8b63767cf6264c5c43ec71cc9351420e831";
hash = "sha256-x5FNXZ+/AexfXn92xSRIdKQGAepvAnMmAz/013x1Q7U=";
rev = "9d8c81e8e64b3b4f4c36de1425c5fc841097d7f4";
hash = "sha256-L3v+dQZhwC+kBOHf3YVbZjuCU+idbUDByEdUBmeGAlo=";
};
meta.homepage = "https://github.com/gbprod/tree-sitter-gitcommit";
};
@ -623,12 +623,12 @@
};
glsl = buildGrammar {
language = "glsl";
version = "0.0.0+rev=7a00509";
version = "0.0.0+rev=190c86e";
src = fetchFromGitHub {
owner = "theHamsta";
repo = "tree-sitter-glsl";
rev = "7a005091d3896dab80f34d8dba58935ad7ad6353";
hash = "sha256-L8FbCXea2cQ9Gyh8xtETynRKCt03TAXH0yM3XJTrGMY=";
rev = "190c86e633e6a6dfdb8a96f8b8460e347ff93f1c";
hash = "sha256-cwSidS+gzQKrvnmDihay0OfatTsBpEo/HEUIcRng5yk=";
};
meta.homepage = "https://github.com/theHamsta/tree-sitter-glsl";
};
@ -766,12 +766,12 @@
};
hlsl = buildGrammar {
language = "hlsl";
version = "0.0.0+rev=cad6130";
version = "0.0.0+rev=cd7a53e";
src = fetchFromGitHub {
owner = "theHamsta";
repo = "tree-sitter-hlsl";
rev = "cad6130182be8793ca5ef00a8581508e2f12f642";
hash = "sha256-gTek2joY9bhkFYXOxE7ZJ5PnyBbnPTLvUeZmFHjWXlU=";
rev = "cd7a53e9f82a2612cae0115692f964e9d0c416e2";
hash = "sha256-fx+9mpZkMxzuLjGOmT5wZZW2oUHfhLYOlTsL7N5QlEk=";
};
meta.homepage = "https://github.com/theHamsta/tree-sitter-hlsl";
};
@ -830,6 +830,17 @@
};
meta.homepage = "https://github.com/justinmk/tree-sitter-ini";
};
ispc = buildGrammar {
language = "ispc";
version = "0.0.0+rev=a5c2fd4";
src = fetchFromGitHub {
owner = "fab4100";
repo = "tree-sitter-ispc";
rev = "a5c2fd44a6a7fe4230c72d651c1b9b2d28fc20fe";
hash = "sha256-IctYjlsb2lPITj6aD22ovORd7O4Cxxe3mSo8kLPBHlo=";
};
meta.homepage = "https://github.com/fab4100/tree-sitter-ispc";
};
janet_simple = buildGrammar {
language = "janet_simple";
version = "0.0.0+rev=bd9cbaf";
@ -1198,35 +1209,35 @@
};
objc = buildGrammar {
language = "objc";
version = "0.0.0+rev=90773a7";
version = "0.0.0+rev=0612ff9";
src = fetchFromGitHub {
owner = "amaanq";
repo = "tree-sitter-objc";
rev = "90773a72d84d3c9a6eb8e373980e9b6b0bb665a0";
hash = "sha256-E0vRMAVWLCRmwqW9KqEWpQkRi8PX/XvjoE4U9Fy7wSc=";
rev = "0612ff9874bf376db4ae92b9f46ddcd4bdec1342";
hash = "sha256-t6+voOjZe2//bH80AIzzDi+giA8cT68PxvR9/SdshFI=";
};
meta.homepage = "https://github.com/amaanq/tree-sitter-objc";
};
ocaml = buildGrammar {
language = "ocaml";
version = "0.0.0+rev=f1106bf";
version = "0.0.0+rev=a09c63f";
src = fetchFromGitHub {
owner = "tree-sitter";
repo = "tree-sitter-ocaml";
rev = "f1106bf834703f1f2f795da1a3b5f8f40174ffcc";
hash = "sha256-5X2c2Deb8xNlp0LPQKFWIT3jwxKuuKdFlp9b3iA818Y=";
rev = "a09c63f4d754d2d2dffb7265f6e6f39c9e6e6db1";
hash = "sha256-6Zz/7XRmiBoXzAt41vCMvaV2LmT7co0Gsbt0nTz+0nA=";
};
location = "ocaml";
meta.homepage = "https://github.com/tree-sitter/tree-sitter-ocaml";
};
ocaml_interface = buildGrammar {
language = "ocaml_interface";
version = "0.0.0+rev=f1106bf";
version = "0.0.0+rev=a09c63f";
src = fetchFromGitHub {
owner = "tree-sitter";
repo = "tree-sitter-ocaml";
rev = "f1106bf834703f1f2f795da1a3b5f8f40174ffcc";
hash = "sha256-5X2c2Deb8xNlp0LPQKFWIT3jwxKuuKdFlp9b3iA818Y=";
rev = "a09c63f4d754d2d2dffb7265f6e6f39c9e6e6db1";
hash = "sha256-6Zz/7XRmiBoXzAt41vCMvaV2LmT7co0Gsbt0nTz+0nA=";
};
location = "interface";
meta.homepage = "https://github.com/tree-sitter/tree-sitter-ocaml";
@ -1586,12 +1597,12 @@
};
scala = buildGrammar {
language = "scala";
version = "0.0.0+rev=7d348f5";
version = "0.0.0+rev=78ae129";
src = fetchFromGitHub {
owner = "tree-sitter";
repo = "tree-sitter-scala";
rev = "7d348f51e442563f4ab2b6c3e136dac658649f93";
hash = "sha256-jIbVw4jKMJYbKeeai3u7J+xKRfo2YNoL3ZcW1NLc9fg=";
rev = "78ae129292990224bcae025e7d3f4873a88f772d";
hash = "sha256-g9jx06MvdMdAk12dK0yFwTP0gkqsd+efQbPAxD47pnU=";
};
meta.homepage = "https://github.com/tree-sitter/tree-sitter-scala";
};
@ -1674,12 +1685,12 @@
};
sql = buildGrammar {
language = "sql";
version = "0.0.0+rev=0f774f4";
version = "0.0.0+rev=721087c";
src = fetchFromGitHub {
owner = "derekstride";
repo = "tree-sitter-sql";
rev = "0f774f4ce1fbc7aa6df6202301e0b08b8c844ae4";
hash = "sha256-2NkcmwBlDxsvgxRYlZzDcNMw2GZmOIWOCziSPBMrRw4=";
rev = "721087c8819cda10ca37f974e914ab9be46b290f";
hash = "sha256-R23co3mAH6ToFzfgnq9PWyX/uu15vbnMAB+dRVB00oI=";
};
meta.homepage = "https://github.com/derekstride/tree-sitter-sql";
};

View File

@ -96,6 +96,9 @@
, openssl
, pkg-config
# vim-agda dependencies
, agda
# vim-go dependencies
, asmfmt
, delve
@ -1227,6 +1230,13 @@ self: super: {
dependencies = with self; [ webapi-vim vim-addon-mw-utils vim-addon-signs vim-addon-async ];
});
vim-agda = super.vim-agda.overrideAttrs (old: {
preFixup = ''
substituteInPlace "$out"/autoload/agda.vim \
--replace "jobstart(['agda'" "jobstart(['${agda}/bin/agda'"
'';
});
vim-bazel = super.vim-bazel.overrideAttrs (old: {
dependencies = with self; [ vim-maktaba ];
});

View File

@ -843,6 +843,7 @@ https://github.com/MarcWeber/vim-addon-syntax-checker/,,
https://github.com/MarcWeber/vim-addon-toggle-buffer/,,
https://github.com/MarcWeber/vim-addon-xdebug/,,
https://github.com/junegunn/vim-after-object/,,
https://github.com/msuperdock/vim-agda/,HEAD,
https://github.com/vim-airline/vim-airline/,,
https://github.com/enricobacis/vim-airline-clock/,,
https://github.com/vim-airline/vim-airline-themes/,,

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -1,598 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@ -1,11 +1,11 @@
{ pkgs, lib, stdenv, fetchFromGitHub, runCommand, rustPlatform, makeWrapper, llvmPackages
, nodePackages, cmake, nodejs, unzip, python3, pkg-config, libsecret
, buildNpmPackage, cmake, nodejs, unzip, python3, pkg-config, libsecret, darwin
}:
assert lib.versionAtLeast python3.version "3.5";
let
publisher = "vadimcn";
pname = "vscode-lldb";
version = "1.8.1";
version = "1.9.1";
vscodeExtUniqueId = "${publisher}.${pname}";
vscodeExtPublisher = publisher;
@ -15,7 +15,7 @@ let
owner = "vadimcn";
repo = "vscode-lldb";
rev = "v${version}";
sha256 = "sha256-5wrw8LNH14WAyIKIRGFbvrISb5RUXeD5Uh/weja9p4Q=";
sha256 = "sha256-DqxdZtSW8TZaOFGXOZQ7a4tmgRj6iAWDppCNomdfVxY=";
};
# need to build a custom version of lldb and llvm for enhanced rust support
@ -25,7 +25,7 @@ let
pname = "${pname}-adapter";
inherit version src;
cargoSha256 = "sha256-Lpo2jaDMaZGwSrpQBvBCscVbWi2Db1Cx1Tv84v1H4Es=";
cargoSha256 = "sha256-+hfNkr9cZbOcWdWKUWUqDj9a0PKjKeApFXYZzS1XokE=";
nativeBuildInputs = [ makeWrapper ];
@ -42,15 +42,35 @@ let
doCheck = false;
};
nodeDeps = ((import ./build-deps/default.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies.override (old: {
inherit src version;
nativeBuildInputs = [ pkg-config ];
buildInputs = [libsecret];
dontNpmInstall = true;
}));
nodeDeps = buildNpmPackage {
pname = "${pname}-node-deps";
inherit version src;
npmDepsHash = "sha256-Cdlq1jxHSCfPjXhasClc6XzEUp3vlLgkStbhYtCyc7E=";
nativeBuildInputs = [
python3
pkg-config
];
buildInputs = [
libsecret
] ++ lib.optionals stdenv.isDarwin (with darwin.apple_sdk.frameworks; [
Security
AppKit
]);
dontNpmBuild = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib
cp -r node_modules $out/lib
runHook postInstall
'';
};
in stdenv.mkDerivation {
pname = "vscode-extension-${publisher}-${pname}";
@ -62,8 +82,14 @@ in stdenv.mkDerivation {
patches = [ ./cmake-build-extension-only.patch ];
postPatch = ''
# temporary patch for forgotten version updates
substituteInPlace CMakeLists.txt \
--replace "1.9.0" ${version}
'';
postConfigure = ''
cp -r ${nodeDeps}/lib/{node_modules,package-lock.json} .
cp -r ${nodeDeps}/lib/node_modules .
'';
cmakeFlags = [
@ -72,6 +98,10 @@ in stdenv.mkDerivation {
];
makeFlags = [ "vsix_bootstrap" ];
preBuild = lib.optionalString stdenv.isDarwin ''
export HOME=$TMPDIR
'';
installPhase = ''
ext=$out/$installPrefix
runHook preInstall

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell
#! nix-shell ../../update-shell.nix -i bash
#! nix-shell ../../update-shell.nix -i bash -p wget prefetch-npm-deps
set -eo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
@ -40,6 +40,9 @@ sed -E 's#\bsha256 = ".*?"#sha256 = "'$srcHash'"#' --in-place "$nixFile"
cargoHash=$(nix-prefetch "{ sha256 }: (import $nixpkgs {}).vscode-extensions.vadimcn.vscode-lldb.adapter.cargoDeps.overrideAttrs (_: { outputHash = sha256; })")
sed -E 's#\bcargoSha256 = ".*?"#cargoSha256 = "'$cargoHash'"#' --in-place "$nixFile"
# update node dependencies
src="$(nix-build $nixpkgs -A vscode-extensions.vadimcn.vscode-lldb.src --no-out-link)"
nix-shell -p node2nix -I nixpkgs=$nixpkgs --run "cd build-deps && ls -R && node2nix -14 -d -i \"$src/package.json\" -l \"$src/package-lock.json\""
pushd $TMPDIR
wget https://raw.githubusercontent.com/$owner/$repo/v${version}/package-lock.json
npmDepsHash=$(prefetch-npm-deps ./package-lock.json)
popd
sed -E 's#\bnpmDepsHash = ".*?"#npmDepsHash = "'$npmDepsHash'"#' --in-place "$nixFile"

View File

@ -16,6 +16,9 @@
# sourceExecutableName is the name of the binary in the source archive, over
# which we have no control
, sourceExecutableName ? executableName
, useVSCodeRipgrep ? false
, ripgrep
}:
let
@ -131,10 +134,17 @@ let
# and the window immediately closes which renders VSCode unusable
# see https://github.com/NixOS/nixpkgs/issues/152939 for full log
ln -rs "$unpacked" "$packed"
# this fixes bundled ripgrep
chmod +x resources/app/node_modules/@vscode/ripgrep/bin/rg
'';
'' + (let
vscodeRipgrep = if stdenv.isDarwin then
"Contents/Resources/app/node_modules.asar.unpacked/@vscode/ripgrep/bin/rg"
else
"resources/app/node_modules/@vscode/ripgrep/bin/rg";
in if !useVSCodeRipgrep then ''
rm ${vscodeRipgrep}
ln -s ${ripgrep}/bin/rg ${vscodeRipgrep}
'' else ''
chmod +x ${vscodeRipgrep}
'');
inherit meta;
};

View File

@ -1,6 +1,7 @@
{ stdenv, lib, callPackage, fetchurl
, isInsiders ? false
, commandLineArgs ? ""
, useVSCodeRipgrep ? false
}:
let
@ -34,7 +35,7 @@ in
executableName = "code" + lib.optionalString isInsiders "-insiders";
longName = "Visual Studio Code" + lib.optionalString isInsiders " - Insiders";
shortName = "Code" + lib.optionalString isInsiders " - Insiders";
inherit commandLineArgs;
inherit commandLineArgs useVSCodeRipgrep;
src = fetchurl {
name = "VSCode_${version}_${plat}.${archive_fmt}";

View File

@ -1,4 +1,4 @@
{ lib, stdenv, callPackage, fetchurl, nixosTests, commandLineArgs ? "" }:
{ lib, stdenv, callPackage, fetchurl, nixosTests, commandLineArgs ? "", useVSCodeRipgrep ? false }:
let
inherit (stdenv.hostPlatform) system;
@ -24,7 +24,7 @@ let
sourceRoot = if stdenv.isDarwin then "" else ".";
in
callPackage ./generic.nix rec {
inherit sourceRoot commandLineArgs;
inherit sourceRoot commandLineArgs useVSCodeRipgrep;
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.

View File

@ -1,19 +1,19 @@
# Generated by ./update.sh - do not update manually!
# Last updated: 2023-05-13
# Last updated: 2023-05-18
{
compatList = {
rev = "b0dd7ed48e5544d9f458a5e832b111fba3571e26";
rev = "5f812033d64da3b70973463b8b160b7fa8aff61d";
hash = "sha256:1hdsza3wf9a0yvj6h55gsl7xqvhafvbz1i8paz9kg7l49b0gnlh1";
};
mainline = {
version = "1430";
hash = "sha256:0q5z078gnl92rh5md8pqcikkr63scapzak2ngsqff635m6qk99lb";
version = "1437";
hash = "sha256:1yhr4kh4dq78cx2r655xrzb1mr7s85vcmwy731rng8q7v6w8j76p";
};
ea = {
version = "3588";
distHash = "sha256:1fi61vs3hry23d3631a39pda0wzjp7bmy57y9zmf7qrqp8appvza";
fullHash = "sha256:0wx3p6mbfd3swiiw6f8j0yd4h23kjsa2xkd8garycbx0wsqmjg84";
version = "3596";
distHash = "sha256:0wi0rk7i7xdh52sawr52pkzhq2k63alk1xan1pkwgy5ybcqymr78";
fullHash = "sha256:1x374y17hniada2hbs04295crb0wxxvl9lmy3h9cwwbx1jjax8y8";
};
}

View File

@ -7,16 +7,16 @@
buildGoModule rec {
pname = "lf";
version = "29";
version = "30";
src = fetchFromGitHub {
owner = "gokcehan";
repo = "lf";
rev = "r${version}";
hash = "sha256-kch+FQAO/Xn3GFXOzBTV1VUeJ+0CnDj/GmzxPUO5dlo=";
hash = "sha256-hlhmnkPm1x7uJMwUM/B02rXLffsXFbkxXYITKD3BERY=";
};
vendorHash = "sha256-z34WN4z9reBbwITLm7igQscmIVuoRpdAvZ4QMNGAPaE=";
vendorHash = "sha256-DYReTxH4SHnJERbiE6rOp5XqzN3NRbICt5iNeX8Jgt8=";
nativeBuildInputs = [ installShellFiles ];

View File

@ -47,13 +47,13 @@ in
stdenv.mkDerivation (finalAttrs: {
pname = "imagemagick";
version = "7.1.1-9";
version = "7.1.1-10";
src = fetchFromGitHub {
owner = "ImageMagick";
repo = "ImageMagick";
rev = finalAttrs.version;
hash = "sha256-xTrkQpd4UYHEmHosb7ZLoqnDFSutrikjXPmuU5R9jQo=";
hash = "sha256-W7s/j4R4C22hYVhye8TjDYHtDq/1Q4GGu/9lxyjk6sA=";
};
outputs = [ "out" "dev" "doc" ]; # bin/ isn't really big

View File

@ -88,7 +88,7 @@ mkDerivation rec {
"-DPLUGIN_STANDARD_QM3C2=ON"
"-DPLUGIN_STANDARD_QMPLANE=ON"
"-DPLUGIN_STANDARD_QPOISSON_RECON=ON"
"-DPLUGIN_STANDARD_QRANSAC_SD=ON"
"-DPLUGIN_STANDARD_QRANSAC_SD=OFF" # not compatible with GPL, broken on non-x86
"-DPLUGIN_STANDARD_QSRA=ON"
"-DPLUGIN_STANDARD_QCLOUDLAYERS=ON"
];

View File

@ -183,6 +183,8 @@ stdenv.mkDerivation rec {
done
'';
passthru = { inherit python; };
meta = with lib; {
description = "3D Creation/Animation/Publishing System";
homepage = "https://www.blender.org";

View File

@ -19,11 +19,11 @@
stdenv.mkDerivation rec {
pname = "crow-translate";
version = "2.10.4";
version = "2.10.5";
src = fetchzip {
url = "https://github.com/${pname}/${pname}/releases/download/${version}/${pname}-${version}-source.tar.gz";
hash = "sha256-M2vAH1YAvNOhDsz+BWxvteR8YX89FHtbUcQZr1uVoCs=";
hash = "sha256-sAjgG2f0rAWakPd2cZNGXkooIxQQM5OPHm11ahyY1WU=";
};
patches = [

View File

@ -2,12 +2,12 @@
stdenvNoCC.mkDerivation rec {
pname = "fluidd";
version = "1.23.5";
version = "1.24.0";
src = fetchurl {
name = "fluidd-v${version}.zip";
url = "https://github.com/cadriel/fluidd/releases/download/v${version}/fluidd.zip";
sha256 = "sha256-od/RoxFjnOuyz7+D+avQJyJzpqpovzs+g4ErfyDJQpY=";
sha256 = "sha256-2J5SVEtlLhZhDzqakOh/gt8XTkSaM9KBa0zCYM4UZAQ=";
};
nativeBuildInputs = [ unzip ];

View File

@ -3,40 +3,36 @@
let
esbuild' = buildPackages.esbuild.override {
buildGoModule = args: buildPackages.buildGoModule (args // rec {
version = "0.16.15";
version = "0.17.19";
src = fetchFromGitHub {
owner = "evanw";
repo = "esbuild";
rev = "v${version}";
hash = "sha256-iTAtPHjrBvHweSIiAbkkbBLgjF3v68jipJEzc0I4G04=";
hash = "sha256-PLC7OJLSOiDq4OjvrdfCawZPfbfuZix4Waopzrj8qsU=";
};
vendorHash = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ=";
});
};
in buildNpmPackage rec {
pname = "kaufkauflist";
version = "2.0.0";
version = "2.2.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "annaaurora";
repo = "kaufkauflist";
rev = "v${version}";
hash = "sha256-oXrb6n1oD27bHt/zPWP0REQyCyZXI8BB57pdR/q42gY=";
hash = "sha256-a7C4yHTHPhL5/p1/XsrMA0PnbIzer6FShDiwUMOg69Y=";
};
npmDepsHash = "sha256-lSnGLK7+ac/wEpAxlpkZS/kgr9F+8WK+nRjCzkrPJt0=";
npmDepsHash = "sha256-uQ4XoaR3JjvPm8EQ2pnDM+x4zjVn4PEHq7BRqVbvFyw=";
ESBUILD_BINARY_PATH = "${lib.getExe esbuild'}";
installPhase = ''
runHook preInstall
postInstall = ''
mkdir -p $out/share/kaufkauflist $out/share/pocketbase
cp -vr build/* $out/share/kaufkauflist/
cp -v pb_schema.json $out/share/pocketbase/
runHook postInstall
'';
# Uncomment this when nix-update-script supports Gitea.

View File

@ -0,0 +1,36 @@
{ lib
, stdenv
, fetchFromGitHub
, freetype
}:
stdenv.mkDerivation rec {
pname = "otf2bdf";
version = "3.1";
# Original site http://sofia.nmsu.edu/~mleisher/Software/otf2bdf/ unreachable,
# This is a mirror.
src = fetchFromGitHub {
owner = "jirutka";
repo = "otf2bdf";
rev = "v${version}";
hash = "sha256-HK9ZrnwKhhYcBvSl+3RwFD7m/WSaPkGKX6utXnk5k+A=";
};
buildInputs = [ freetype ];
installPhase = ''
mkdir -p $out/bin $out/share/man/man1
install otf2bdf $out/bin
cp otf2bdf.man $out/share/man/man1/otf2bdf.1
'';
meta = with lib; {
#homepage = "http://sofia.nmsu.edu/~mleisher/Software/otf2bdf/"; # timeout
homepage = "https://github.com/jirutka/otf2bdf";
description = "OpenType to BDF font converter";
license = licenses.mit0;
platforms = platforms.all;
maintainers = with maintainers; [ hzeller ];
};
}

View File

@ -170,6 +170,10 @@ let
deprecated = (super.deprecated.override {
sphinxHook = null;
}).overridePythonAttrs dropDocOutput;
wrapt = (super.wrapt.override {
sphinxHook = null;
sphinx-rtd-theme = null;
}).overridePythonAttrs dropDocOutput;
};
};
in

View File

@ -37,13 +37,13 @@
stdenv.mkDerivation rec {
pname = "synergy";
version = "1.14.5.22";
version = "1.14.6.19-stable";
src = fetchFromGitHub {
owner = "symless";
repo = "synergy-core";
rev = version;
sha256 = "sha256-rqQ4n8P8pZSWRCxaQLa2PuduXMt2XeaFs051qcT3/o8=";
sha256 = "sha256-0QqklfSsvcXh7I2jaHk82k0nY8gQOj9haA4WOjGqBqY=";
fetchSubmodules = true;
};

View File

@ -0,0 +1,61 @@
{ appstream-glib
, desktop-file-utils
, fetchFromGitHub
, gettext
, glib-networking
, gobject-introspection
, gtk4
, gtksourceview5
, lib
, libadwaita
, libsoup_3
, meson
, ninja
, pkg-config
, python3
, stdenv
, wrapGAppsHook4
}:
stdenv.mkDerivation (finalAttrs: {
pname = "webfont-kit-generator";
version = "1.0.3";
src = fetchFromGitHub {
owner = "rafaelmardojai";
repo = "webfont-kit-generator";
rev = finalAttrs.version;
hash = "sha256-aD/1moWIiU4zpLTW+VHH9n/sj10vCZ8UzB2ey3mR0/k=";
};
nativeBuildInputs = [
appstream-glib
desktop-file-utils
gettext
gobject-introspection
gtk4 # For gtk4-update-icon-cache
meson
ninja
pkg-config
wrapGAppsHook4
];
buildInputs = [
glib-networking
gtk4
gtksourceview5
libadwaita
libsoup_3
(python3.withPackages (ps: with ps; [
fonttools
pygobject3
]))
];
meta = with lib; {
description = "Webfont Kit Generator is a simple utility that allows you to generate woff, woff2 and the necessary CSS boilerplate from non-web font formats (otf & ttf)";
homepage = "https://apps.gnome.org/app/com.rafaelmardojai.WebfontKitGenerator";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ benediktbroich ];
platforms = platforms.unix;
};
})

View File

@ -4,6 +4,8 @@
, dpkg
, wrapGAppsHook
, autoPatchelfHook
, clash
, clash-meta
, openssl
, webkitgtk
, udev
@ -12,15 +14,13 @@
stdenv.mkDerivation rec {
pname = "clash-verge";
version = "1.3.1";
version = "1.3.2";
src = fetchurl {
url = "https://github.com/zzzgydi/clash-verge/releases/download/v${version}/clash-verge_${version}_amd64.deb";
hash = "sha256-AEOFMKxrkPditf5ks++tII6zeuH72Fxw/TVtZeXS3v4=";
hash = "sha256-46+7P9WH85fC3m+5LQHpvZX2ggeH6djoO53fQxQJdYk=";
};
unpackPhase = "dpkg-deb -x $src .";
nativeBuildInputs = [
dpkg
wrapGAppsHook
@ -43,10 +43,16 @@ stdenv.mkDerivation rec {
mkdir -p $out/bin
mv usr/* $out
rm $out/bin/{clash,clash-meta}
runHook postInstall
'';
postFixup = ''
ln -s ${lib.getExe clash} $out/bin/clash
ln -s ${lib.getExe clash-meta} $out/bin/clash-meta
'';
meta = with lib; {
description = "A Clash GUI based on tauri";
homepage = "https://github.com/zzzgydi/clash-verge";

View File

@ -0,0 +1,73 @@
# k3s versions
K3s, Kubernetes, and other clustered software has the property of not being able to update atomically. Most software in nixpkgs, like for example bash, can be updated as part of a "nixos-rebuild switch" without having to worry about the old and the new bash interacting in some way.
K3s/Kubernetes, on the other hand, is typically run across several NixOS machines, and each NixOS machine is updated independently. As such, different versions of the package and NixOS module must maintain compatibility with each other through temporary version skew during updates.
The upstream Kubernetes project [documents this in their version-skew policy](https://kubernetes.io/releases/version-skew-policy/#supported-component-upgrade-order).
Within nixpkgs, we strive to maintain a valid "upgrade path" that does not run
afoul of the upstream version skew policy.
## Upstream release cadence and support
K3s is built on top of K8s, and typically provides a similar release cadence and support window (simply by cherry-picking over k8s patches). As such, we assume k3s's support lifecycle is identical to upstream K8s.
This is documented upstream [here](https://kubernetes.io/releases/patch-releases/#support-period).
In short, a new Kubernetes version is released roughly every 4 months, and each release is supported for a little over 1 year.
Any version that is not supported by upstream should be dropped from nixpkgs.
## Versions in NixOS releases
NixOS releases should avoid having deprecated software, or making major version upgrades, wherever possible.
As such, we would like to have only the newest K3s version in each NixOS
release at the time the release branch is branched off, which will ensure the
K3s version in that release will receieve updates for the longest duration
possible.
However, this conflicts with another desire: we would like people to be able to upgrade between NixOS stable releases without needing to make a large enough k3s version jump that they violate the Kubernetes version skew policy.
To give an example, we may have the following timeline for k8s releases:
(Note, the exact versions and dates may be wrong, this is an illustrative example, reality may differ).
```mermaid
gitGraph
branch k8s
commit
branch "k8s-1.24"
checkout "k8s-1.24"
commit id: "1.24.0" tag: "2022-05-03"
branch "k8s-1.25"
checkout "k8s-1.25"
commit id: "1.25.0" tag: "2022-08-23"
branch "k8s-1.26"
checkout "k8s-1.26"
commit id: "1.26.0" tag: "2022-12-08"
checkout k8s-1.24
commit id: "1.24-EOL" tag: "2023-07-28"
checkout k8s-1.25
commit id: "1.25-EOL" tag: "2023-10-27"
checkout k8s-1.26
commit id: "1.26-EOL" tag: "2024-02-28"
```
(Note: the above graph will render if you view this markdown on GitHub, or when using [mermaid](https://mermaid.js.org/))
In this scenario even though k3s 1.24 is still technically supported when the NixOS 23.05
release is cut, since it goes EOL before the NixOS 23.11 release is made, we would
not want to include it. Similarly, k3s 1.25 would go EOL before NixOS 23.11.
As such, we should only include k3s 1.26 in the 23.05 release.
We can then make a similar argument when NixOS 23.11 comes around to not
include k3s 1.26 or 1.27. However, that means someone upgrading from the NixOS
22.05 release to the NixOS 23.11 would not have a supported upgrade path.
In order to resolve this issue, we propose backporting not just new patch releases to older NixOS releases, but also new k3s versions, up to one version before the first version that is included in the next NixOS release.
In the above example, where NixOS 23.05 included k3s 1.26, and 23.11 included k3s 1.28, that means we would backport 1.27 to the NixOS 23.05 release, and backport all patches for 1.26 and 1.27.
This would allow someone to upgrade between those NixOS releases in a supported configuration.

View File

@ -22,7 +22,7 @@ stdenv.mkDerivation rec {
meta = with lib; {
description = "The Kubernetes IDE";
homepage = "https://k8slens.dev/";
license = licenses.mit;
license = licenses.lens;
maintainers = with maintainers; [ dbirks ];
platforms = [ "aarch64-darwin" ];
};

View File

@ -37,7 +37,7 @@ appimageTools.wrapType2 {
meta = with lib; {
description = "The Kubernetes IDE";
homepage = "https://k8slens.dev/";
license = licenses.mit;
license = licenses.lens;
maintainers = with maintainers; [ dbirks RossComputerGuy ];
platforms = [ "x86_64-linux" ];
};

View File

@ -13,13 +13,13 @@
}:
let
spark = { pname, version, sha256, extraMeta ? {} }:
spark = { pname, version, hash, extraMeta ? {} }:
stdenv.mkDerivation rec {
inherit pname version;
jdk = if hadoopSupport then hadoop.jdk else jdk8;
src = fetchzip {
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
sha256 = sha256;
inherit hash;
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jdk python3Packages.python ]
@ -74,6 +74,18 @@ in
spark_3_4 = spark rec {
pname = "spark";
version = "3.4.0";
sha256 = "sha256-0y80dRYzb6Ceu6MlGQHtpMdzOob/TBg6kf8dtF6KyCk=";
hash = "sha256-0y80dRYzb6Ceu6MlGQHtpMdzOob/TBg6kf8dtF6KyCk=";
};
spark_3_3 = spark rec {
pname = "spark";
version = "3.3.2";
hash = "sha256-AeKe2QN+mhUJgZRSIgbi/DttAWlDgwC1kl9p7syEvbo=";
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
};
spark_3_2 = spark rec {
pname = "spark";
version = "3.2.4";
hash = "sha256-xL4W+dTWbvmmncq3/8iXmhp24rp5SftvoRfkTyxCI8E=";
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
};
}

View File

@ -46,11 +46,11 @@
"vendorHash": "sha256-nwl8GvS/hc07xSzM+wEwOAkT9oQcAuguHaEcM1nWjwg="
},
"alicloud": {
"hash": "sha256-qacbj6xquC87cLAuKlI4NzPtqfR8gbLrTfeVN5VSuMI=",
"hash": "sha256-mwYwZObU2WadA1X3EiCVh5T1iHYfPzluEHSUZtrMz98=",
"homepage": "https://registry.terraform.io/providers/aliyun/alicloud",
"owner": "aliyun",
"repo": "terraform-provider-alicloud",
"rev": "v1.204.1",
"rev": "v1.205.0",
"spdx": "MPL-2.0",
"vendorHash": null
},

View File

@ -0,0 +1,38 @@
{ lib
, buildGoModule
, fetchFromGitHub
, testers
, tf-summarize
}:
buildGoModule rec {
pname = "tf-summarize";
version = "0.3.2";
src = fetchFromGitHub {
owner = "dineshba";
repo = "tf-summarize";
rev = "v${version}";
sha256 = "0c6fcz0n22mq8bqr82h9lfxx4n1bk9gjlc7d131lpf14yiacih3p";
};
vendorSha256 = "cnybdZth7qlP2BHK8uvLCoqJtggMIkvaL2+YugiUZRE=";
ldflags = [
"-s"
"-w"
"-X main.version=${version}"
];
passthru.tests.version = testers.testVersion {
package = tf-summarize;
command = "tf-summarize -v";
inherit version;
};
meta = with lib; {
description = "Command-line utility to print the summary of the terraform plan";
homepage = "https://github.com/dineshba/tf-summarize";
license = licenses.mit;
maintainers = with maintainers; [ pjrm ];
};
}

View File

@ -10,16 +10,16 @@
buildGoModule rec {
pname = "werf";
version = "1.2.233";
version = "1.2.235";
src = fetchFromGitHub {
owner = "werf";
repo = "werf";
rev = "v${version}";
hash = "sha256-3T8LbMbM9ZcG8uLspZSNvM7LYuDAOIWweZsYPTC/DOY=";
hash = "sha256-fEo/hHVV+xv60VNe9AqjrP4JGzGXcM8yr/KjhRfOAhk=";
};
vendorHash = "sha256-AKoK/WAelzMhu9W1zoRe4p5Cx/3NzktLGJEG7gPXekQ=";
vendorHash = "sha256-1r32uT98I/pd6lxCb3bKy6uxJJodsaslwy9wynE4Pmg=";
proxyVendor = true;
@ -64,10 +64,6 @@ buildGoModule rec {
integration/suites \
pkg/true_git/*test.go \
test/e2e
# Remove failing tests.
rm -rf \
cmd/werf/docs/replacers/kubectl/kubectl_test.go
'' + lib.optionalString (CGO_ENABLED == 0) ''
# A workaround for osusergo.
export USER=nixbld

File diff suppressed because it is too large Load Diff

View File

@ -16,23 +16,23 @@
stdenv.mkDerivation rec {
pname = "flare";
version = "0.6.0";
version = "0.8.0";
src = fetchFromGitLab {
domain = "gitlab.com";
owner = "Schmiddiii";
repo = pname;
rev = version;
hash = "sha256-wY95sXWGDjEy8vvP79XliJOn5GQkAvDmOXKmRz0TPEw=";
hash = "sha256-w4WaWcUsjKiWfNe5StwRcPlcXqWz0427It96L1NsR0U=";
};
cargoDeps = rustPlatform.importCargoLock {
lockFile = ./Cargo.lock;
outputHashes = {
"curve25519-dalek-3.2.1" = "sha256-T/NGZddFQWq32eRu6FYfgdPqU8Y4Shi1NpMaX4GeQ54=";
"libsignal-protocol-0.1.0" = "sha256-gapAurbs/BdsfPlVvWWF7Ai1nXZcxCW8qc5gQdbnthM=";
"libsignal-service-0.1.0" = "sha256-AXWCR1maqgIPk8H/IKR22BvMToqJrtlaOelFAnMJ6kI=";
"presage-0.4.0" = "sha256-HtqSNEaQXgvgrs9xvm76W1v7PLmdsJ5M3fbqH2Dpw8A=";
"curve25519-dalek-3.2.1" = "sha256-0hFRhn920tLBpo6ZNCl6DYtTMHMXY/EiDvuhOPVjvC0=";
"libsignal-protocol-0.1.0" = "sha256-IBhmd3WzkICiADO24WLjDJ8pFILGwWNUHLXKpt+Y0IY=";
"libsignal-service-0.1.0" = "sha256-art5O06X4lhp9PoAd23mi6F1wRWkUcyON7AK8uBDoK8=";
"presage-0.6.0-dev" = "sha256-DVImXySYL0zlGkwss/5DnQ3skTaBa7l55VWIGCd6kQU=";
};
};

View File

@ -1,20 +0,0 @@
# This file has been generated by node2nix 1.7.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) lib stdenv python2 util-linux runCommand writeTextFile;
inherit nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
locpkgs = import ./node-packages.nix {
inherit (pkgs) fetchurl fetchgit;
inherit nodeEnv;
globalBuildInputs = [
locpkgs.node-pre-gyp
];
};
in locpkgs

View File

@ -1,27 +0,0 @@
{ lib, stdenv, pkgs }:
(import ./composition.nix {
inherit pkgs;
inherit (stdenv.hostPlatform) system;
})."package".override {
postInstall = ''
mkdir "$out/bin"
echo '#!/bin/sh' >> "$out/bin/matrix-recorder"
echo "'${pkgs.nodejs_14}/bin/node'" \
"'$out/lib/node_modules/matrix-recorder/matrix-recorder.js'" \
'"$@"' >> "$out/bin/matrix-recorder"
echo '#!/bin/sh' >> "$out/bin/matrix-recorder-to-html"
echo 'cd "$1"' >> "$out/bin/matrix-recorder-to-html"
echo "test -d templates/ || ln -sfT '$out/lib/node_modules/matrix-recorder/templates' templates" >> "$out/bin/matrix-recorder-to-html"
echo "'${pkgs.nodejs_14}/bin/node'" \
"'$out/lib/node_modules/matrix-recorder/recorder-to-html.js'" \
'.' >> "$out/bin/matrix-recorder-to-html"
chmod a+x "$out/bin/matrix-recorder"
chmod a+x "$out/bin/matrix-recorder-to-html"
'';
meta = {
description = "Matrix message recorder";
homepage = "https://gitlab.com/argit/matrix-recorder/";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.raskin ];
};
}

View File

@ -1,540 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, util-linux, libtool, runCommand, writeTextFile}:
let
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
includeDependencies = {dependencies}:
lib.optionalString (dependencies != [])
(lib.concatMapStrings (dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
cd node_modules
# Only include dependencies if they don't exist. They may also be bundled in the package.
if [ ! -e "${dependency.name}" ]
then
${composePackage dependency}
fi
cd ..
''
) dependencies);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
''
DIR=$(pwd)
cd $TMPDIR
unpackFile ${src}
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/${packageName}")"
if [ -f "${src}" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/${packageName}"
elif [ -d "${src}" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash ${src})"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/${packageName}"
fi
# Unset the stripped name to not confuse the next unpack step
unset strippedName
# Include the dependencies of the package
cd "$DIR/${packageName}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(packageLock.lockfileVersion !== 1) {
process.stderr.write("Sorry, I only understand lock file version 1!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "$dontNpmInstall" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node_${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) util-linux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
nodeDependencies = stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) util-linux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) util-linux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=$nodeDependencies/lib/node_modules
export PATH="$nodeDependencies/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@ -1,27 +0,0 @@
{
"name": "matrix-recorder",
"version": "0.0.6",
"description": "A recorder that can record Matrix rooms you are a member of (including E2E-encrypted rooms).",
"author": "Hello Matrix <hello@hello-matrix.net>",
"main": "matrix-recorder.js",
"scripts": {
"start": "node matrix-recorder.js"
},
"repository": {
"type": "git",
"url": "https://gitlab.com/argit/matrix-recorder.git"
},
"dependencies": {
"marked": "^0.6.2",
"matrix-js-sdk": "^0.7.13",
"mime-types": "^2.1.14",
"mustache": "^2.3.0",
"node-fetch": "^1.6.3",
"node-localstorage": "^1.3.0",
"sqlite3": "^4.0.7",
"olm": "https://packages.matrix.org/npm/olm/olm-2.3.0.tgz"
},
"license": "MIT",
"optionalDependencies": {
}
}

View File

@ -14,17 +14,17 @@
let
pname = "mattermost-desktop";
version = "5.1.0";
version = "5.3.1";
srcs = {
"x86_64-linux" = {
url = "https://releases.mattermost.com/desktop/${version}/${pname}-${version}-linux-x64.tar.gz";
hash = "sha256-KmtQUqg2ODbZ6zJjsnwlvB+vhR1xbK2X9qqmZpyTR78=";
hash = "sha256-rw+SYCFmN2W4t5iIWEpV9VHxcvwTLOckMV58WRa5dZE=";
};
"i686-linux" = {
url = "https://releases.mattermost.com/desktop/${version}/${pname}-${version}-linux-ia32.tar.gz";
hash = "sha256-X8Zrthw1hZOqmcYidt72l2vonh31iiA3EDGmCQr7e4c=";
"aarch64-linux" = {
url = "https://releases.mattermost.com/desktop/${version}/${pname}-${version}-linux-arm64.tar.gz";
hash = "sha256-FEIldkb3FbUfVAYRkjs7oPRJDHdsIGDW5iaC2Qz1dpc=";
};
};
@ -86,7 +86,7 @@ stdenv.mkDerivation {
homepage = "https://about.mattermost.com/";
sourceProvenance = with sourceTypes; [ binaryNativeCode ];
license = licenses.asl20;
platforms = [ "x86_64-linux" "i686-linux" ];
platforms = [ "x86_64-linux" "aarch64-linux" ];
maintainers = [ maintainers.joko ];
};
}

View File

@ -2,13 +2,13 @@
(if stdenv.isDarwin then darwin.apple_sdk_11_0.clang14Stdenv else stdenv).mkDerivation rec {
pname = "signalbackup-tools";
version = "20230510";
version = "20230518";
src = fetchFromGitHub {
owner = "bepaald";
repo = pname;
rev = version;
hash = "sha256-EsFF9fPpHfVmbLm2hRpcJBmwfovfK4CV3LukrG9nP3U=";
hash = "sha256-wtCCQtYYYR+aFpNLS/pABEyYrTEW0W0Fh4kDClJn0dg=";
};
postPatch = ''

View File

@ -17,13 +17,13 @@
stdenv.mkDerivation rec {
pname = "teams-for-linux";
version = "1.0.92";
version = "1.0.93";
src = fetchFromGitHub {
owner = "IsmaelMartinez";
repo = pname;
rev = "v${version}";
sha256 = "sha256-wRgXb0yzrpRlZkZ6RHMU2wdR11lwR5n6tTUbCEURvDQ=";
sha256 = "sha256-mWLjGednrKnEIvrL2iHQP3xoCb6SxptzbE40aJ5wH1U=";
};
offlineCache = fetchYarnDeps {

View File

@ -23,13 +23,13 @@ let
pname = "wire-desktop";
version = {
x86_64-darwin = "3.30.4506";
x86_64-linux = "3.30.3018";
x86_64-darwin = "3.31.4556";
x86_64-linux = "3.31.3060";
}.${system} or throwSystem;
hash = {
x86_64-darwin = "sha256-+htDeNIuucB4qzNBNYoSUH1DbfgouZS08G5hxPtIuzY=";
x86_64-linux = "sha256-46WjFA+E9M7RfTOM/Xoho+9ooToSgQiZaMlcZ3lJvBQ=";
x86_64-darwin = "sha256-qRRdt/TvSvQ3RiO/I36HT+C88+ev3gFcj+JaEG38BfU=";
x86_64-linux = "sha256-9LdTsBOE1IJH0OM+Ag7GJADsFRgYMjbPXBH6roY7Msg=";
}.${system} or throwSystem;
meta = with lib; {

View File

@ -7,14 +7,14 @@
python3.pkgs.buildPythonApplication rec {
pname = "maestral-qt";
version = "1.7.1";
version = "1.7.2";
disabled = python3.pythonOlder "3.7";
src = fetchFromGitHub {
owner = "SamSchott";
repo = "maestral-qt";
rev = "refs/tags/v${version}";
hash = "sha256-YYlH9s3iNEIacs8izEnIU32j+2lruQ5JJrjvDIzQjRE=";
hash = "sha256-o2KuqKiy+8fbzogR3ATPVkK60JAKYXQ9dogU/g6CS/M=";
};
format = "pyproject";

View File

@ -6,14 +6,14 @@
}:
stdenv.mkDerivation rec {
version = "20230512";
version = "20230517";
pname = "neomutt";
src = fetchFromGitHub {
owner = "neomutt";
repo = "neomutt";
rev = version;
sha256 = "sha256-/NeY9WrPXg6sSM1jnjgQKL7vSn8dTrAnvj229KcEEro=";
sha256 = "sha256-1i0STaJulJP0LWdNfLLIEKVapfkcguYRnbc+psWlVE4=";
};
patches = [

View File

@ -13,7 +13,7 @@
cd "$(dirname $(readlink -f $0))"
node2nix \
--nodejs-16 \
--nodejs-18 \
--strip-optional-dependencies \
--node-env node-env.nix \
--input package.json \

View File

@ -2,7 +2,7 @@
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_16"}:
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {

View File

@ -85,40 +85,40 @@ let
sha512 = "hzrjPNQcJoSPe0oS20V5i98oiEZSM3mKNiR6P3xHTHTPI/F23lyjGZ+/CSkCmJbSWfGZ5sHZZcU6AWuS7xBdTw==";
};
};
"@aws-sdk/client-cognito-identity-3.332.0" = {
"@aws-sdk/client-cognito-identity-3.335.0" = {
name = "_at_aws-sdk_slash_client-cognito-identity";
packageName = "@aws-sdk/client-cognito-identity";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.332.0.tgz";
sha512 = "o2G3+w0Qm+jd5fnmG6+FF5KRu90PIv2Kd0mmMJIFmACVd+VtuWqsk85capX21YLcxizKe+okqaaD8/9vV7nvfw==";
url = "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.335.0.tgz";
sha512 = "ghsAzb1K/CR70tQgJHiDzqy39az1zVmCW0AFwnUWIaK1sY+1pSQZ0Ey9BkywmzRIcBfbBxftexDdDX5nHg7oMA==";
};
};
"@aws-sdk/client-sso-3.332.0" = {
"@aws-sdk/client-sso-3.335.0" = {
name = "_at_aws-sdk_slash_client-sso";
packageName = "@aws-sdk/client-sso";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.332.0.tgz";
sha512 = "4q1Nko8M6YVANdEiLYvdv1qb00j4xN4ppE/6d4xpGp7DxHYlm0GA762h0/TR2dun+2I+SMnwj4Fv6BxOmzBaEw==";
url = "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.335.0.tgz";
sha512 = "tMvOq366QeMzcrRTDhMwuCFirntANX25qi4U32NDl//ny/7V6+7WK8Hf8lRAHvWnY9eT4RdNklXESo2yxlPyUg==";
};
};
"@aws-sdk/client-sso-oidc-3.332.0" = {
"@aws-sdk/client-sso-oidc-3.335.0" = {
name = "_at_aws-sdk_slash_client-sso-oidc";
packageName = "@aws-sdk/client-sso-oidc";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.332.0.tgz";
sha512 = "tz8k8Yqm4TScIfit0Tum2zWAq1md+gZKr747CSixd4Zwcp7Vwh75cRoL7Rz1ZHSEn1Yo983MWREevVez3SubLw==";
url = "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.335.0.tgz";
sha512 = "szaMq6tDznGy4EuidxPqhZKqEnfGJfoPWUpoFlhXsgZXinZY/vJlJ4G5l6nikhnS3omq3C3WPGJXMKF1ejVXKg==";
};
};
"@aws-sdk/client-sts-3.332.0" = {
"@aws-sdk/client-sts-3.335.0" = {
name = "_at_aws-sdk_slash_client-sts";
packageName = "@aws-sdk/client-sts";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.332.0.tgz";
sha512 = "uVobnXIzMcEhwBDyk6iOt36N/TRNI8hwq7MQugjYGj7Inma9g4vnR09hXJ24HxyKCoVUoIgMbEguQ43+/+uvDQ==";
url = "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.335.0.tgz";
sha512 = "W+LW1b/3auvGg3EmFeJiraMyH/nxX7qIEBEPPWlJKphGSJAt0l08o8glL2O8s+o2oYWCB2DmgdWyOt1D6YRldQ==";
};
};
"@aws-sdk/config-resolver-3.329.0" = {
@ -130,13 +130,13 @@ let
sha512 = "Oj6eiT3q+Jn685yvUrfRi8PhB3fb81hasJqdrsEivA8IP8qAgnVUTJzXsh8O2UX8UM2MF6A1gTgToSgneJuw2Q==";
};
};
"@aws-sdk/credential-provider-cognito-identity-3.332.0" = {
"@aws-sdk/credential-provider-cognito-identity-3.335.0" = {
name = "_at_aws-sdk_slash_credential-provider-cognito-identity";
packageName = "@aws-sdk/credential-provider-cognito-identity";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.332.0.tgz";
sha512 = "FJI936QVSFd49PWOgTlW7e8rKO/6Y8sMnkvTJ/APQ1K8em+jWkaAMFBl15NrpOo/jlZCzhkkQDatDHAlbSUXGw==";
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.335.0.tgz";
sha512 = "WMR9buxEbEMcghVITk/buVm1ev4rrlUCY8MR9Gg0QI6hUdDUSP6QfWz2Hn++Tfe96v6maHFANvkRLk9NNZQBeg==";
};
};
"@aws-sdk/credential-provider-env-3.329.0" = {
@ -157,22 +157,22 @@ let
sha512 = "ggPlnd7QROPTid0CwT01TYYGvstRRTpzTGsQ/B31wkh30IrRXE81W3S4xrOYuqQD3u0RnflSxnvhs+EayJEYjg==";
};
};
"@aws-sdk/credential-provider-ini-3.332.0" = {
"@aws-sdk/credential-provider-ini-3.335.0" = {
name = "_at_aws-sdk_slash_credential-provider-ini";
packageName = "@aws-sdk/credential-provider-ini";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.332.0.tgz";
sha512 = "DTW6d6rcqizPVyvcIrwvxecQ7e5GONtVc5Wyf0RTfqf41sDOVZYmn6G+zEFSpBLW0975uZbJS0lyLWtJe2VujQ==";
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.335.0.tgz";
sha512 = "3AsKlpAnddLYGEZkfT8ZsAB+1WySSzbLA2eoJTW80nKWVUnvYV6gq/sNXEY43i7T2rOXmblJHbTuMAWA1ruMFg==";
};
};
"@aws-sdk/credential-provider-node-3.332.0" = {
"@aws-sdk/credential-provider-node-3.335.0" = {
name = "_at_aws-sdk_slash_credential-provider-node";
packageName = "@aws-sdk/credential-provider-node";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.332.0.tgz";
sha512 = "KkBayS9k4WyJTvC86ngeRM+RmWxNCS1BHvudkR6PLXfnsNPDzxySDVY0UgxVhbNYDYsO561fXZt9ccpKyVWjgg==";
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.335.0.tgz";
sha512 = "aIelF8GBTbXuVntpeEdnbcajYtkO01OfSmXb08JxvtQ0tPCWY6SbLpNHUAIfBW1OVkm5E7SX+Hc1tawxq9IKAA==";
};
};
"@aws-sdk/credential-provider-process-3.329.0" = {
@ -184,13 +184,13 @@ let
sha512 = "5oO220qoFc2pMdZDQa6XN/mVhp669I3+LqMbbscGtX/UgLJPSOb7YzPld9Wjv12L5rf+sD3G1PF3LZXO0vKLFA==";
};
};
"@aws-sdk/credential-provider-sso-3.332.0" = {
"@aws-sdk/credential-provider-sso-3.335.0" = {
name = "_at_aws-sdk_slash_credential-provider-sso";
packageName = "@aws-sdk/credential-provider-sso";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.332.0.tgz";
sha512 = "SaKXl48af3n6LRitcaEqbeg1YDXwQ0A5QziC1xQyYPraEIj3IZ/GyTjx04Lo2jxNYHuEOE8u4aTw1+IK1GDKbg==";
url = "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.335.0.tgz";
sha512 = "omEF3m9Vy18QfuGuGx/48MaiKDOdvMZKZI9FKyQxFIwfqRyhmF2jzQ7070FD/E9YakscOZ0hSeYEPJ7nkJa8ww==";
};
};
"@aws-sdk/credential-provider-web-identity-3.329.0" = {
@ -202,13 +202,13 @@ let
sha512 = "lcEibZD7AlutCacpQ6DyNUqElZJDq+ylaIo5a8MH9jGh7Pg2WpDg0Sy+B6FbGCkVn4eIjdHxeX54JM245nhESg==";
};
};
"@aws-sdk/credential-providers-3.332.0" = {
"@aws-sdk/credential-providers-3.335.0" = {
name = "_at_aws-sdk_slash_credential-providers";
packageName = "@aws-sdk/credential-providers";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.332.0.tgz";
sha512 = "UZM8hCJqBBI4yEopVnfQ7HgUCuiYuWJziPFovQpbwvZKadibzo332/n6e5IsQbJxPjymqFLgTn3PQds/+1FOlQ==";
url = "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.335.0.tgz";
sha512 = "KWZL+B+6BDj1PfP7+Bb3/A6yFWxYtjYR7vi2UgD6QrmB09iUQtheiwObZY3f30OAq10O03gOmhxC2N1o6+i0sQ==";
};
};
"@aws-sdk/fetch-http-handler-3.329.0" = {
@ -436,13 +436,13 @@ let
sha512 = "7E0fGpBKxwFqHHAOqNbgNsHSEmCZLuvmU9yvG9DXKVzrS4P48O/PfOro123WpcFZs3STyOVgH8wjUPftHAVKmg==";
};
};
"@aws-sdk/token-providers-3.332.0" = {
"@aws-sdk/token-providers-3.335.0" = {
name = "_at_aws-sdk_slash_token-providers";
packageName = "@aws-sdk/token-providers";
version = "3.332.0";
version = "3.335.0";
src = fetchurl {
url = "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.332.0.tgz";
sha512 = "fccbg6OSl0l658pxl2p1MoU9gEePo5B361+JNaN0zfRMu7c5HBXCpdl4djlFxAHjltrX9f1+BKqfGHYgI3h8SQ==";
url = "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.335.0.tgz";
sha512 = "2Hu62xH4/6V+N5JWsPuvxCCmaf/QUnxtz48ClpxzBKM/whrTTkLku8W2fh2MmnzGzAHtT+N97jkIsy2B+onqIg==";
};
};
"@aws-sdk/types-3.329.0" = {
@ -859,13 +859,13 @@ let
sha512 = "ztJ+5lk0yWf4E7sQQqsidPYJa0a/511Ln/IaI3A+fGv6z0SrGDG0Lu6SAehczcehrhgNwMhPlerJMeXw7vZs2g==";
};
};
"@codemirror/language-6.6.0" = {
"@codemirror/language-6.7.0" = {
name = "_at_codemirror_slash_language";
packageName = "@codemirror/language";
version = "6.6.0";
version = "6.7.0";
src = fetchurl {
url = "https://registry.npmjs.org/@codemirror/language/-/language-6.6.0.tgz";
sha512 = "cwUd6lzt3MfNYOobdjf14ZkLbJcnv4WtndYaoBkbor/vF+rCNguMPK0IRtvZJG4dsWiaWPcK8x1VijhvSxnstg==";
url = "https://registry.npmjs.org/@codemirror/language/-/language-6.7.0.tgz";
sha512 = "4SMwe6Fwn57klCUsVN0y4/h/iWT+XIXFEmop2lIHHuWO0ubjCrF3suqSZLyOQlznxkNnNbOOfKe5HQbQGCAmTg==";
};
};
"@codemirror/lint-6.2.1" = {
@ -886,13 +886,13 @@ let
sha512 = "69QXtcrsc3RYtOtd+GsvczJ319udtBf1PTrr2KbLWM/e2CXUPnh0Nz9AUo8WfhSQ7GeL8dPVNUmhQVgpmuaNGA==";
};
};
"@codemirror/view-6.11.3" = {
"@codemirror/view-6.12.0" = {
name = "_at_codemirror_slash_view";
packageName = "@codemirror/view";
version = "6.11.3";
version = "6.12.0";
src = fetchurl {
url = "https://registry.npmjs.org/@codemirror/view/-/view-6.11.3.tgz";
sha512 = "JInirTUhmwDOEZZHcsx4/wfnBgJk0q3vnDZh1i2k7W+t1SqMugBCO/+J5zgfjJ5rXYFjnpBG9Dkz/ZMSn4bNzg==";
url = "https://registry.npmjs.org/@codemirror/view/-/view-6.12.0.tgz";
sha512 = "xNHvbJBc2v8JuEcIGOck6EUGShpP+TYGCEMVEVQMYxbFXfMhYnoF3znxB/2GgeKR0nrxBs+nhBupiTYQqCp2kw==";
};
};
"@colors/colors-1.5.0" = {
@ -1417,13 +1417,13 @@ let
sha512 = "1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==";
};
};
"@oclif/command-1.8.25" = {
"@oclif/command-1.8.26" = {
name = "_at_oclif_slash_command";
packageName = "@oclif/command";
version = "1.8.25";
version = "1.8.26";
src = fetchurl {
url = "https://registry.npmjs.org/@oclif/command/-/command-1.8.25.tgz";
sha512 = "teCfKH6GNF46fiCn/P5EMHX93RE3KJAW4i0sq3X9phrzs6807WRauhythdc8OKINxd+LpqwQ1i5bnaCKvLZRcQ==";
url = "https://registry.npmjs.org/@oclif/command/-/command-1.8.26.tgz";
sha512 = "IT9kOLFRMc3s6KJ1FymsNjbHShI211eVgAg+JMiDVl8LXwOJxYe8ybesgL1kpV9IUFByOBwZKNG2mmrVeNBHPg==";
};
};
"@oclif/config-1.18.6" = {
@ -1435,13 +1435,13 @@ let
sha512 = "OWhCpdu4QqggOPX1YPZ4XVmLLRX+lhGjXV6RNA7sogOwLqlEmSslnN/lhR5dkhcWZbKWBQH29YCrB3LDPRu/IA==";
};
};
"@oclif/config-1.18.8" = {
"@oclif/config-1.18.9" = {
name = "_at_oclif_slash_config";
packageName = "@oclif/config";
version = "1.18.8";
version = "1.18.9";
src = fetchurl {
url = "https://registry.npmjs.org/@oclif/config/-/config-1.18.8.tgz";
sha512 = "FetS52+emaZQui0roFSdbBP8ddBkIezEoH2NcjLJRjqkMGdE9Z1V+jsISVqTYXk2KJ1gAI0CHDXFjJlNBYbJBg==";
url = "https://registry.npmjs.org/@oclif/config/-/config-1.18.9.tgz";
sha512 = "CGABvY60IbzK3kecDekCQS4T7fvpraBHV3nvYDtehrqljbMxtTeeJkFJVLbBnZnwzD2u1ApQX/Zggja3lyCoJA==";
};
};
"@oclif/core-1.26.2" = {
@ -1480,13 +1480,13 @@ let
sha512 = "Ups2dShK52xXa8w6iBWLgcjPJWjais6KPJQq3gQ/88AY6BXoTX+MIGFPrWQO1KLMiQfoTpcLnUwloN4brrVUHw==";
};
};
"@oclif/parser-3.8.10" = {
"@oclif/parser-3.8.11" = {
name = "_at_oclif_slash_parser";
packageName = "@oclif/parser";
version = "3.8.10";
version = "3.8.11";
src = fetchurl {
url = "https://registry.npmjs.org/@oclif/parser/-/parser-3.8.10.tgz";
sha512 = "J4l/NcnfbIU84+NNdy6bxq9yJt4joFWNvpk59hq+uaQPUNtjmNJDVGuRvf6GUOxHNgRsVK1JRmd/Ez+v7Z9GqQ==";
url = "https://registry.npmjs.org/@oclif/parser/-/parser-3.8.11.tgz";
sha512 = "B3NweRn1yZw2g7xaF10Zh/zwlqTJJINfU+CRkqll+LaTisSNvZbW0RR9WGan26EqqLp4qzNjzX/e90Ew8l9NLw==";
};
};
"@oclif/screen-3.0.4" = {
@ -1750,6 +1750,24 @@ let
sha512 = "sBSO19KzdrJCM3gdx6eIxV8M9Gxfgg6iDQmH5TIAGaUu+X9VDdsINXJOnoiZ1Kx3TrHdH4bt5UVglkjsEGBcvw==";
};
};
"@smithy/protocol-http-1.0.1" = {
name = "_at_smithy_slash_protocol-http";
packageName = "@smithy/protocol-http";
version = "1.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-1.0.1.tgz";
sha512 = "9OrEn0WfOVtBNYJUjUAn9AOiJ4lzERCJJ/JeZs8E6yajTGxBaFRxUnNBHiNqoDJVg076hY36UmEnPx7xXrvUSg==";
};
};
"@smithy/types-1.0.0" = {
name = "_at_smithy_slash_types";
packageName = "@smithy/types";
version = "1.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/@smithy/types/-/types-1.0.0.tgz";
sha512 = "kc1m5wPBHQCTixwuaOh9vnak/iJm21DrSf9UK6yDE5S3mQQ4u11pqAUiKWnlrZnYkeLfAI9UEHj9OaMT1v5Umg==";
};
};
"@sqltools/formatter-1.2.5" = {
name = "_at_sqltools_slash_formatter";
packageName = "@sqltools/formatter";
@ -1759,13 +1777,13 @@ let
sha512 = "Uy0+khmZqUrUGm5dmMqVlnvufZRSK0FbYzVgp0UMstm+F5+W2/jnEEQyc9vo1ZR/E5ZI/B1WjjoTqBqwJL6Krw==";
};
};
"@swc/core-1.3.58" = {
"@swc/core-1.3.59" = {
name = "_at_swc_slash_core";
packageName = "@swc/core";
version = "1.3.58";
version = "1.3.59";
src = fetchurl {
url = "https://registry.npmjs.org/@swc/core/-/core-1.3.58.tgz";
sha512 = "tSDcHXMBQIo2ohQ/0ryZnUA+0mBrVhe49+cR+QsFru+XEhCok1BLqdE6cZ2a+sgZ1I+Dmw8aTxYm8Ox64PSKPQ==";
url = "https://registry.npmjs.org/@swc/core/-/core-1.3.59.tgz";
sha512 = "ZBw31zd2E5SXiodwGvjQdx5ZC90b2uyX/i2LeMMs8LKfXD86pfOfQac+JVrnyEKDhASXj9icgsF9NXBhaMr3Kw==";
};
};
"@swc/helpers-0.5.1" = {
@ -1777,13 +1795,13 @@ let
sha512 = "sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg==";
};
};
"@swc/wasm-1.3.58" = {
"@swc/wasm-1.3.59" = {
name = "_at_swc_slash_wasm";
packageName = "@swc/wasm";
version = "1.3.58";
version = "1.3.59";
src = fetchurl {
url = "https://registry.npmjs.org/@swc/wasm/-/wasm-1.3.58.tgz";
sha512 = "u85cAPJuLn7RBd2JllVpQtF3ngeEeg3oNWzlPD76nw13Z1EZflWBbFCXE5JYfB78NfboEJgMGQF4pYou/7yIdA==";
url = "https://registry.npmjs.org/@swc/wasm/-/wasm-1.3.59.tgz";
sha512 = "HMC6y2rqtomrspvHwEZZuQb8kzw1GZSmaZ8fbbjSRvvvtOHLbIetuFWGVJ6dgthkt10YII21AMZcvxvrTC6H/Q==";
};
};
"@techteamer/ocsp-1.0.0" = {
@ -2002,13 +2020,13 @@ let
sha512 = "/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==";
};
};
"@types/node-20.1.7" = {
"@types/node-20.2.1" = {
name = "_at_types_slash_node";
packageName = "@types/node";
version = "20.1.7";
version = "20.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/@types/node/-/node-20.1.7.tgz";
sha512 = "WCuw/o4GSwDGMoonES8rcvwsig77dGCMbZDrZr2x4ZZiNW4P/gcoZXe/0twgtobcTkmg9TuKflxYL/DuwDyJzg==";
url = "https://registry.npmjs.org/@types/node/-/node-20.2.1.tgz";
sha512 = "DqJociPbZP1lbZ5SQPk4oag6W7AyaGMO6gSfRwq3PWl4PXTwJpRQJhDq4W0kzrg3w6tJ1SwlvGZ5uKFHY13LIg==";
};
};
"@types/node-fetch-2.6.4" = {
@ -2695,13 +2713,13 @@ let
sha512 = "p/ImGq8duQ4sOqRzB5dJ81T2jto2LcfxEgSHuSHH9+pzOyHdkoMLAgMtVfX9bt9m4i0EP/20jLM2ydID8G7Umw==";
};
};
"aws-sdk-2.1379.0" = {
"aws-sdk-2.1381.0" = {
name = "aws-sdk";
packageName = "aws-sdk";
version = "2.1379.0";
version = "2.1381.0";
src = fetchurl {
url = "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1379.0.tgz";
sha512 = "kziOtAtJxdgYJwhzY+uhNi/AGPrDEMHd0dEz46YR1AB5bVqjS9/SjOZHemB88QfpW11IVB/FoiIusXlGEvgq9Q==";
url = "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1381.0.tgz";
sha512 = "4dzE/zZZCG/MIYntKs61F0h0zJa3S5aXX90B2ZnNOf1zvPEJ/gMpv7u3C3FV7UMPyfg00JWAu3g9i+G2j8Wujw==";
};
};
"aws-sign2-0.7.0" = {
@ -2749,13 +2767,13 @@ let
sha512 = "t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==";
};
};
"axios-retry-3.4.0" = {
"axios-retry-3.5.0" = {
name = "axios-retry";
packageName = "axios-retry";
version = "3.4.0";
version = "3.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/axios-retry/-/axios-retry-3.4.0.tgz";
sha512 = "VdgaP+gHH4iQYCCNUWF2pcqeciVOdGrBBAYUfTY+wPcO5Ltvp/37MLFNCmJKo7Gj3SHvCSdL8ouI1qLYJN3liA==";
url = "https://registry.npmjs.org/axios-retry/-/axios-retry-3.5.0.tgz";
sha512 = "g48qNrLX30VU6ECWltpFCPegKK6dWzMDYv2o83W2zUL/Zh/SLXbT6ksGoKqYZHtghzqeeXhZBcSXJkO1fPbCcw==";
};
};
"babel-helper-vue-jsx-merge-props-2.0.3" = {
@ -2830,13 +2848,13 @@ let
sha512 = "rQdKZHTWok2uC3wHyGwoV6mOxhnOyp07iHhyWQlS+U5zkYyhOEOT6Ri4Q0vPThTqCYs6RCbtAfTbPG+lUZkocw==";
};
};
"better-sqlite3-8.3.0" = {
"better-sqlite3-8.4.0" = {
name = "better-sqlite3";
packageName = "better-sqlite3";
version = "8.3.0";
version = "8.4.0";
src = fetchurl {
url = "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-8.3.0.tgz";
sha512 = "JTmvBZL/JLTc+3Msbvq6gK6elbU9/wVMqiudplHrVJpr7sVMR9KJrNhZAbW+RhXKlpMcuEhYkdcHa3TXKNXQ1w==";
url = "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-8.4.0.tgz";
sha512 = "NmsNW1CQvqMszu/CFAJ3pLct6NEFlNfuGM6vw72KHkjOD1UDnL96XNN1BMQc1hiHo8vE2GbOWQYIpZ+YM5wrZw==";
};
};
"big-integer-1.6.51" = {
@ -5602,13 +5620,13 @@ let
sha512 = "SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==";
};
};
"glob-10.2.4" = {
"glob-10.2.6" = {
name = "glob";
packageName = "glob";
version = "10.2.4";
version = "10.2.6";
src = fetchurl {
url = "https://registry.npmjs.org/glob/-/glob-10.2.4.tgz";
sha512 = "fDboBse/sl1oXSLhIp0FcCJgzW9KmhC/q8ULTKC82zc+DL3TL7FNb8qlt5qqXN53MsKEUSIcb+7DLmEygOE5Yw==";
url = "https://registry.npmjs.org/glob/-/glob-10.2.6.tgz";
sha512 = "U/rnDpXJGF414QQQZv5uVsabTVxMSwzS5CH0p3DRCIV6ownl4f7PzGnkGmvlum2wB+9RlJWJZ6ACU1INnBqiPA==";
};
};
"glob-7.2.3" = {
@ -6790,13 +6808,13 @@ let
sha512 = "Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==";
};
};
"jackspeak-2.2.0" = {
"jackspeak-2.2.1" = {
name = "jackspeak";
packageName = "jackspeak";
version = "2.2.0";
version = "2.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/jackspeak/-/jackspeak-2.2.0.tgz";
sha512 = "r5XBrqIJfwRIjRt/Xr5fv9Wh09qyhHfKnYddDlpM+ibRR20qrYActpCAgU6U+d53EOEjzkvxPMVHSlgR7leXrQ==";
url = "https://registry.npmjs.org/jackspeak/-/jackspeak-2.2.1.tgz";
sha512 = "MXbxovZ/Pm42f6cDIDkl3xpwv1AGwObKwfmjs2nQePiy85tP3fatofl3FC1aBsOtP/6fq5SbtgHwWcMsLP+bDw==";
};
};
"jake-10.8.6" = {
@ -8221,13 +8239,13 @@ let
sha512 = "lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==";
};
};
"minimatch-9.0.0" = {
"minimatch-9.0.1" = {
name = "minimatch";
packageName = "minimatch";
version = "9.0.0";
version = "9.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/minimatch/-/minimatch-9.0.0.tgz";
sha512 = "0jJj8AvgKqWN05mrwuqi8QYKx1WmYSUoKSxu5Qhs9prezTz10sxAHGNZe9J9cqIJzta8DWsleh2KaVaLl6Ru2w==";
url = "https://registry.npmjs.org/minimatch/-/minimatch-9.0.1.tgz";
sha512 = "0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==";
};
};
"minimist-1.2.8" = {
@ -8266,13 +8284,13 @@ let
sha512 = "3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==";
};
};
"minipass-6.0.1" = {
"minipass-6.0.2" = {
name = "minipass";
packageName = "minipass";
version = "6.0.1";
version = "6.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/minipass/-/minipass-6.0.1.tgz";
sha512 = "Tenl5QPpgozlOGBiveNYHg2f6y+VpxsXRoIHFUVJuSmTonXRAE6q9b8Mp/O46762/2AlW4ye4Nkyvx0fgWDKbw==";
url = "https://registry.npmjs.org/minipass/-/minipass-6.0.2.tgz";
sha512 = "MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==";
};
};
"minipass-collect-1.0.2" = {
@ -9499,13 +9517,13 @@ let
sha512 = "LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==";
};
};
"path-scurry-1.9.1" = {
"path-scurry-1.9.2" = {
name = "path-scurry";
packageName = "path-scurry";
version = "1.9.1";
version = "1.9.2";
src = fetchurl {
url = "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.1.tgz";
sha512 = "UgmoiySyjFxP6tscZDgWGEAgsW5ok8W3F5CJDnnH2pozwSTGE6eH7vwTotMwATWA2r5xqdkKdxYPkwlJjAI/3g==";
url = "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.2.tgz";
sha512 = "qSDLy2aGFPm8i4rsbHd4MNyTcrzHFsLQykrtbuGRknZZCBBVXSv2tSCDN2Cg6Rt/GFRw8GoW9y9Ecw5rIPG1sg==";
};
};
"path-to-regexp-0.1.7" = {
@ -9733,13 +9751,13 @@ let
sha512 = "JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==";
};
};
"pinia-2.1.1" = {
"pinia-2.1.3" = {
name = "pinia";
packageName = "pinia";
version = "2.1.1";
version = "2.1.3";
src = fetchurl {
url = "https://registry.npmjs.org/pinia/-/pinia-2.1.1.tgz";
sha512 = "Y2CgpcUtD8Ogdvo5LW5g20ykSZgnVDMgTSZFr40EvO6HB8axQk+0lHa1UrRah9wworFaxjovwRlY/wRICWj/KQ==";
url = "https://registry.npmjs.org/pinia/-/pinia-2.1.3.tgz";
sha512 = "XNA/z/ye4P5rU1pieVmh0g/hSuDO98/a5UC8oSP0DNdvt6YtetJNHTrXwpwsQuflkGT34qKxAEcp7lSxXNjf/A==";
};
};
"popsicle-12.1.0" = {
@ -12046,13 +12064,13 @@ let
sha512 = "Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==";
};
};
"tslib-2.5.0" = {
"tslib-2.5.2" = {
name = "tslib";
packageName = "tslib";
version = "2.5.0";
version = "2.5.2";
src = fetchurl {
url = "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz";
sha512 = "336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==";
url = "https://registry.npmjs.org/tslib/-/tslib-2.5.2.tgz";
sha512 = "5svOrSA2w3iGFDs1HibEVBGbDrAY82bFQ3HZ3ixB+88nsbsWQoKqDRb5UBYAUPEzbBn6dAp5gRNXglySbx1MlA==";
};
};
"tsscmp-1.0.6" = {
@ -12577,13 +12595,13 @@ let
sha512 = "BoLCEHisXi2QgwlhZBg9UepvzZZmi4176vbr+31Shen5WWZwSLVgdScEPcB+yrAtuHAz42309C0A4+WiL9lNBw==";
};
};
"vue-demi-0.14.3" = {
"vue-demi-0.14.5" = {
name = "vue-demi";
packageName = "vue-demi";
version = "0.14.3";
version = "0.14.5";
src = fetchurl {
url = "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.3.tgz";
sha512 = "aknytzARm7U20nMhvOdfa5IRiS+oyATtd55s3fICsT7wEtN/qoOiOINsNsNJjeZCOsPNOGS4p1yDOwH9cTxgjg==";
url = "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.5.tgz";
sha512 = "o9NUVpl/YlsGJ7t+xuqJKx8EBGf1quRhCiT6D/J0pfwmk9zUwYkC7yrF4SZCe6fETvSM3UNL2edcbYrSyc4QHA==";
};
};
"vue-fragment-1.5.1" = {
@ -13195,20 +13213,20 @@ in
];
})
sources."@aws-sdk/abort-controller-3.329.0"
sources."@aws-sdk/client-cognito-identity-3.332.0"
sources."@aws-sdk/client-sso-3.332.0"
sources."@aws-sdk/client-sso-oidc-3.332.0"
sources."@aws-sdk/client-sts-3.332.0"
sources."@aws-sdk/client-cognito-identity-3.335.0"
sources."@aws-sdk/client-sso-3.335.0"
sources."@aws-sdk/client-sso-oidc-3.335.0"
sources."@aws-sdk/client-sts-3.335.0"
sources."@aws-sdk/config-resolver-3.329.0"
sources."@aws-sdk/credential-provider-cognito-identity-3.332.0"
sources."@aws-sdk/credential-provider-cognito-identity-3.335.0"
sources."@aws-sdk/credential-provider-env-3.329.0"
sources."@aws-sdk/credential-provider-imds-3.329.0"
sources."@aws-sdk/credential-provider-ini-3.332.0"
sources."@aws-sdk/credential-provider-node-3.332.0"
sources."@aws-sdk/credential-provider-ini-3.335.0"
sources."@aws-sdk/credential-provider-node-3.335.0"
sources."@aws-sdk/credential-provider-process-3.329.0"
sources."@aws-sdk/credential-provider-sso-3.332.0"
sources."@aws-sdk/credential-provider-sso-3.335.0"
sources."@aws-sdk/credential-provider-web-identity-3.329.0"
sources."@aws-sdk/credential-providers-3.332.0"
sources."@aws-sdk/credential-providers-3.335.0"
sources."@aws-sdk/fetch-http-handler-3.329.0"
sources."@aws-sdk/hash-node-3.329.0"
sources."@aws-sdk/invalid-dependency-3.329.0"
@ -13234,7 +13252,7 @@ in
sources."@aws-sdk/shared-ini-file-loader-3.329.0"
sources."@aws-sdk/signature-v4-3.329.0"
sources."@aws-sdk/smithy-client-3.329.0"
sources."@aws-sdk/token-providers-3.332.0"
sources."@aws-sdk/token-providers-3.335.0"
sources."@aws-sdk/types-3.329.0"
sources."@aws-sdk/url-parser-3.329.0"
sources."@aws-sdk/util-base64-3.310.0"
@ -13306,10 +13324,10 @@ in
sources."@codemirror/lang-javascript-6.1.8"
sources."@codemirror/lang-json-6.0.1"
sources."@codemirror/lang-sql-6.5.0"
sources."@codemirror/language-6.6.0"
sources."@codemirror/language-6.7.0"
sources."@codemirror/lint-6.2.1"
sources."@codemirror/state-6.2.0"
sources."@codemirror/view-6.11.3"
sources."@codemirror/view-6.12.0"
sources."@colors/colors-1.5.0"
sources."@cspotcode/source-map-support-0.8.1"
sources."@curlconverter/yargs-0.0.2"
@ -13431,8 +13449,8 @@ in
sources."mkdirp-1.0.4"
];
})
sources."@oclif/command-1.8.25"
sources."@oclif/config-1.18.8"
sources."@oclif/command-1.8.26"
sources."@oclif/config-1.18.9"
(sources."@oclif/core-1.26.2" // {
dependencies = [
sources."supports-color-8.1.1"
@ -13453,7 +13471,7 @@ in
];
})
sources."@oclif/linewrap-1.0.0"
sources."@oclif/parser-3.8.10"
sources."@oclif/parser-3.8.11"
sources."@oclif/screen-3.0.4"
sources."@opentelemetry/api-1.4.1"
sources."@protobufjs/aspromise-1.1.2"
@ -13517,10 +13535,12 @@ in
];
})
sources."@servie/events-1.0.0"
sources."@smithy/protocol-http-1.0.1"
sources."@smithy/types-1.0.0"
sources."@sqltools/formatter-1.2.5"
sources."@swc/core-1.3.58"
sources."@swc/core-1.3.59"
sources."@swc/helpers-0.5.1"
sources."@swc/wasm-1.3.58"
sources."@swc/wasm-1.3.59"
sources."@techteamer/ocsp-1.0.0"
sources."@tediousjs/connection-string-0.4.2"
sources."@tokenizer/token-0.3.0"
@ -13543,7 +13563,7 @@ in
sources."@types/long-4.0.2"
sources."@types/mime-1.3.2"
sources."@types/multer-1.4.7"
sources."@types/node-20.1.7"
sources."@types/node-20.2.1"
(sources."@types/node-fetch-2.6.4" // {
dependencies = [
sources."form-data-3.0.1"
@ -13645,7 +13665,7 @@ in
sources."axios-0.24.0"
];
})
(sources."aws-sdk-2.1379.0" // {
(sources."aws-sdk-2.1381.0" // {
dependencies = [
sources."buffer-4.9.2"
sources."events-1.1.1"
@ -13658,7 +13678,7 @@ in
sources."aws-sign2-0.7.0"
sources."aws4-1.12.0"
sources."axios-0.21.4"
sources."axios-retry-3.4.0"
sources."axios-retry-3.5.0"
sources."babel-helper-vue-jsx-merge-props-2.0.3"
(sources."babel-runtime-6.26.0" // {
dependencies = [
@ -13676,7 +13696,7 @@ in
sources."bcrypt-pbkdf-1.0.2"
sources."bcryptjs-2.4.3"
sources."better-eval-1.3.0"
sources."better-sqlite3-8.3.0"
sources."better-sqlite3-8.4.0"
sources."big-integer-1.6.51"
sources."big.js-6.2.1"
sources."bignumber.js-9.1.1"
@ -14107,9 +14127,9 @@ in
})
sources."getpass-0.1.7"
sources."github-from-package-0.0.0"
(sources."glob-10.2.4" // {
(sources."glob-10.2.6" // {
dependencies = [
sources."minimatch-9.0.0"
sources."minimatch-9.0.1"
];
})
sources."glob-parent-5.1.2"
@ -14277,7 +14297,7 @@ in
sources."iso-639-1-2.1.15"
sources."isomorphic-ws-4.0.1"
sources."isstream-0.1.2"
sources."jackspeak-2.2.0"
sources."jackspeak-2.2.1"
sources."jake-10.8.6"
sources."jmespath-0.16.0"
sources."join-component-1.1.0"
@ -14465,7 +14485,7 @@ in
];
})
sources."minimist-1.2.8"
sources."minipass-6.0.1"
sources."minipass-6.0.2"
(sources."minipass-collect-1.0.2" // {
dependencies = [
sources."minipass-3.3.6"
@ -14733,7 +14753,7 @@ in
sources."path-is-absolute-1.0.1"
sources."path-key-2.0.1"
sources."path-parse-1.0.7"
sources."path-scurry-1.9.1"
sources."path-scurry-1.9.2"
sources."path-to-regexp-0.1.7"
sources."path-type-4.0.0"
sources."pause-0.0.1"
@ -14772,7 +14792,7 @@ in
sources."pgpass-1.0.5"
sources."picocolors-1.0.0"
sources."picomatch-2.3.1"
sources."pinia-2.1.1"
sources."pinia-2.1.3"
sources."popsicle-12.1.0"
sources."popsicle-content-encoding-1.0.0"
sources."popsicle-cookie-jar-1.0.0"
@ -15155,7 +15175,7 @@ in
sources."triple-beam-1.3.0"
sources."ts-expect-1.3.0"
sources."ts-node-10.9.1"
sources."tslib-2.5.0"
sources."tslib-2.5.2"
sources."tsscmp-1.0.6"
sources."tunnel-0.0.6"
sources."tunnel-agent-0.6.0"
@ -15250,7 +15270,7 @@ in
sources."vue-2.7.14"
sources."vue-agile-2.0.0"
sources."vue-color-2.8.1"
sources."vue-demi-0.14.3"
sources."vue-demi-0.14.5"
sources."vue-fragment-1.5.1"
sources."vue-i18n-8.28.2"
sources."vue-infinite-loading-2.4.5"

View File

@ -1,53 +1,55 @@
{ lib, stdenv, fetchFromGitHub, mkYarnPackage, nixosTests, writeText, python3 }:
let
version = "0.3.0";
version = "0.4.1";
src = fetchFromGitHub {
owner = "ngoduykhanh";
owner = "PowerDNS-Admin";
repo = "PowerDNS-Admin";
rev = "v${version}";
hash = "sha256-e11u0jdJr+2TDXvBAPlDfnuuDwSfBq+JtvnDUTNKp/c=";
hash = "sha256-AwqEcAPD1SF1Ma3wtH03mXlTywM0Q19hciCmTtlr3gk=";
};
python = python3;
pythonDeps = with python.pkgs; [
flask flask_assets flask-login flask-sqlalchemy flask_migrate flask-seasurf flask_mail flask-session flask-sslify
flask flask_assets flask-login flask-sqlalchemy flask_migrate flask-seasurf flask_mail flask-session flask-session-captcha flask-sslify
mysqlclient psycopg2 sqlalchemy
cffi configobj cryptography bcrypt requests python-ldap pyotp qrcode dnspython
gunicorn python3-saml pytz cssmin rjsmin authlib bravado-core
lima pytimeparse pyyaml jinja2 itsdangerous werkzeug
certifi cffi configobj cryptography bcrypt requests python-ldap pyotp qrcode dnspython
gunicorn itsdangerous python3-saml pytz rcssmin rjsmin authlib bravado-core
lima lxml passlib pyasn1 pytimeparse pyyaml jinja2 itsdangerous webcolors werkzeug zipp zxcvbn
];
assets = mkYarnPackage {
inherit src version;
packageJSON = ./package.json;
yarnNix = ./yarndeps.nix;
# Copied from package.json, see also
# https://github.com/NixOS/nixpkgs/pull/214952
packageResolutions = {
"@fortawesome/fontawesome-free" = "6.3.0";
};
nativeBuildInputs = pythonDeps;
patchPhase = ''
sed -i -r -e "s|'cssmin',\s?'cssrewrite'|'cssmin'|g" powerdnsadmin/assets.py
sed -i -r -e "s|'rcssmin',\s?'cssrewrite'|'rcssmin'|g" powerdnsadmin/assets.py
'';
buildPhase = ''
# The build process expects the directory to be writable
# with node_modules at a specific path
# https://github.com/ngoduykhanh/PowerDNS-Admin/blob/master/.yarnrc
# https://github.com/PowerDNS-Admin/PowerDNS-Admin/blob/master/.yarnrc
approot=deps/powerdns-admin-assets
ln -s $node_modules $approot/powerdnsadmin/static/node_modules
FLASK_APP=$approot/powerdnsadmin/__init__.py flask assets build
SESSION_TYPE=filesystem FLASK_APP=$approot/powerdnsadmin/__init__.py flask assets build
'';
installPhase = ''
# https://github.com/ngoduykhanh/PowerDNS-Admin/blob/54b257768f600c5548a1c7e50eac49c40df49f92/docker/Dockerfile#L43
# https://github.com/PowerDNS-Admin/PowerDNS-Admin/blob/54b257768f600c5548a1c7e50eac49c40df49f92/docker/Dockerfile#L43
mkdir $out
cp -r $approot/powerdnsadmin/static/{generated,assets,img} $out
find $node_modules -name webfonts -exec cp -r {} $out \;
find $node_modules -name fonts -exec cp -r {} $out \;
find $node_modules/icheck/skins/square -name '*.png' -exec cp {} $out/generated \;
mkdir $out/fonts
cp $node_modules/ionicons/dist/fonts/* $out/fonts
cp $node_modules/bootstrap/dist/fonts/* $out/fonts
cp $node_modules/font-awesome/fonts/* $out/fonts
'';
distPhase = "true";
};
@ -61,7 +63,7 @@ let
assets.register('js_main', 'generated/main.js')
assets.register('css_main', 'generated/main.css')
'';
in stdenv.mkDerivation rec {
in stdenv.mkDerivation {
pname = "powerdns-admin";
inherit src version;
@ -81,7 +83,13 @@ in stdenv.mkDerivation rec {
postPatch = ''
rm -r powerdnsadmin/static powerdnsadmin/assets.py
sed -i "s/id:/'id':/" migrations/versions/787bdba9e147_init_db.py
# flask-migrate 4.0 compatibility: https://github.com/PowerDNS-Admin/PowerDNS-Admin/issues/1376
substituteInPlace migrations/env.py --replace "render_as_batch=config.get_main_option('sqlalchemy.url').startswith('sqlite:')," ""
# flask-session and powerdns-admin both try to add sqlalchemy to flask.
# Reuse the database for flask-session
substituteInPlace powerdnsadmin/__init__.py --replace "sess = Session(app)" "app.config['SESSION_SQLALCHEMY'] = models.base.db; sess = Session(app)"
# Routes creates session database tables, so it needs a context
substituteInPlace powerdnsadmin/__init__.py --replace "routes.init_app(app)" "with app.app_context(): routes.init_app(app)"
'';
installPhase = ''
@ -113,7 +121,7 @@ in stdenv.mkDerivation rec {
meta = with lib; {
description = "A PowerDNS web interface with advanced features";
homepage = "https://github.com/ngoduykhanh/PowerDNS-Admin";
homepage = "https://github.com/PowerDNS-Admin/PowerDNS-Admin";
license = licenses.mit;
maintainers = with maintainers; [ Flakebi zhaofengli ];
};

View File

@ -1,17 +1,24 @@
{
"dependencies": {
"admin-lte": "2.4.9",
"bootstrap": "^3.4.1",
"bootstrap-datepicker": "^1.8.0",
"@fortawesome/fontawesome-free": "6.3.0",
"admin-lte": "3.2.0",
"bootstrap": "4.6.2",
"bootstrap-datepicker": "^1.9.0",
"bootstrap-validator": "^0.11.9",
"datatables.net-plugins": "^1.10.19",
"datatables.net-plugins": "^1.13.1",
"icheck": "^1.0.2",
"jquery-slimscroll": "^1.3.8",
"jquery-ui-dist": "^1.12.1",
"jquery-sparkline": "^2.4.0",
"jquery-ui-dist": "^1.13.2",
"jquery.quicksearch": "^2.4.0",
"jtimeout": "^3.1.0",
"jquery-validation": "^1.19.5",
"jtimeout": "^3.2.0",
"knockout": "^3.5.1",
"multiselect": "^0.9.12"
},
"resolutions": {
"admin-lte/@fortawesome/fontawesome-free": "6.3.0"
},
"name": "powerdns-admin-assets",
"version": "0.3.0"
"version": "0.4.1"
}

File diff suppressed because it is too large Load Diff

View File

@ -13,13 +13,13 @@
stdenv.mkDerivation rec {
pname = "savvycan";
version = "208";
version = "213";
src = fetchFromGitHub {
owner = "collin80";
repo = "SavvyCAN";
rev = "V${version}";
hash = "sha256-agvCl8c7LqGyIKe0K3PdzuBUqTJZtUr434134olbUMw=";
hash = "sha256-duITY6s/uadeBCFuG42JbLCaq7yaYv1qB8Q3GA8UJ0A=";
};
buildInputs = [ qtbase qttools qtserialbus qtserialport qtdeclarative ];

View File

@ -2,19 +2,20 @@
, stdenv
, fetchurl
, dpkg
, wrapGAppsHook
, wrapQtAppsHook
, autoPatchelfHook
, alsa-lib
, at-spi2-core
, libtool
, libxkbcommon
, nspr
, mesa
, libtiff
, cups
, udev
, gtk3
, qtbase
, xorg
, cups
, pango
, makeWrapper
, useChineseVersion ? false
}:
@ -33,35 +34,43 @@ stdenv.mkDerivation rec {
unpackCmd = "dpkg -x $src .";
sourceRoot = ".";
postUnpack = ''
# distribution is missing libkappessframework.so, so we should not let
# autoPatchelfHook fail on the following dead libraries
rm -r opt/kingsoft/wps-office/office6/addons/pdfbatchcompression
# Remove the following libraries because they depend on qt4
rm -r opt/kingsoft/wps-office/office6/{librpcetapi.so,librpcwpsapi.so,librpcwppapi.so,libavdevice.so.58.10.100,libmediacoder.so}
rm -r opt/kingsoft/wps-office/office6/addons/wppcapturer/libwppcapturer.so
rm -r opt/kingsoft/wps-office/office6/addons/wppencoder/libwppencoder.so
'';
nativeBuildInputs = [ dpkg wrapGAppsHook wrapQtAppsHook makeWrapper autoPatchelfHook ];
nativeBuildInputs = [
dpkg
autoPatchelfHook
];
buildInputs = [
alsa-lib
xorg.libXdamage
xorg.libXtst
at-spi2-core
libtool
libxkbcommon
nspr
mesa
libtiff
udev
gtk3
qtbase
xorg.libXdamage
xorg.libXtst
xorg.libXv
];
dontWrapQtApps = true;
runtimeDependencies = map lib.getLib [
cups
pango
];
autoPatchelfIgnoreMissingDeps = [
# distribution is missing libkappessframework.so
"libkappessframework.so"
# qt4 support is deprecated
"libQtCore.so.4"
"libQtNetwork.so.4"
"libQtXml.so.4"
];
installPhase = ''
runHook preInstall
prefix=$out/opt/kingsoft/wps-office
@ -79,25 +88,13 @@ stdenv.mkDerivation rec {
runHook postInstall
'';
dontWrapQtApps = true;
dontWrapGApps = true;
preFixup = ''
# The following libraries need libtiff.so.5, but nixpkgs provides libtiff.so.6
patchelf --replace-needed libtiff.so.5 libtiff.so $out/opt/kingsoft/wps-office/office6/{libpdfmain.so,libqpdfpaint.so,qt/plugins/imageformats/libqtiff.so}
patchelf --replace-needed libtiff.so.5 libtiff.so $out/opt/kingsoft/wps-office/office6/{libpdfmain.so,libqpdfpaint.so,qt/plugins/imageformats/libqtiff.so,addons/pdfbatchcompression/libpdfbatchcompressionapp.so}
# dlopen dependency
patchelf --add-needed libudev.so.1 $out/opt/kingsoft/wps-office/office6/addons/cef/libcef.so
'';
postFixup = ''
for f in "$out"/bin/*; do
echo "Wrapping $f"
wrapProgram "$f" \
"''${gappsWrapperArgs[@]}" \
"''${qtWrapperArgs[@]}"
done
'';
meta = with lib; {
description = "Office suite, formerly Kingsoft Office";
homepage = "https://www.wps.com";

View File

@ -5,28 +5,36 @@
, boost
, eigen
, zlib
, llvmPackages
}:
stdenv.mkDerivation rec {
pname = "iqtree";
version = "2.2.0.4";
version = "2.2.2.4";
src = fetchFromGitHub {
owner = "iqtree";
repo = "iqtree2";
rev = "v${version}";
sha256 = "sha256:0ickw1ldpvv2m66yzbvqfhn8k07qdkhbjrlqjs6vcf3s42j5c6pq";
hash = "sha256-5NF0Ej3M19Vd08xfmOHRhZkM1YGQ/ZlFj0HsSw1sw1w=";
fetchSubmodules = true;
};
nativeBuildInputs = [ cmake ];
buildInputs = [ boost eigen zlib ];
buildInputs = [
boost
eigen
zlib
] ++ lib.optionals stdenv.isDarwin [
llvmPackages.openmp
];
meta = with lib; {
homepage = "http://www.iqtree.org/";
description = "Efficient and versatile phylogenomic software by maximum likelihood";
license = licenses.lgpl2;
maintainers = with maintainers; [ bzizou ];
platforms = [ "x86_64-linux" "x86_64-darwin" ];
};
}

View File

@ -0,0 +1,101 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, blas
, superlu
, suitesparse
, python3
, libintl
, libiconv
}:
let
# this is a fork version of fetk (http://www.fetk.org/)
# which is maintained by apbs team
fetk = stdenv.mkDerivation (finalAttrs: {
pname = "fetk";
version = "1.9.3";
src = fetchFromGitHub {
owner = "Electrostatics";
repo = "fetk";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-uFA1JRR05cNcUGaJj9IyGNONB2hU9IOBPzOj/HucNH4=";
};
nativeBuildInputs = [
cmake
];
cmakeFlags = [
"-DBLAS_LIBRARIES=${blas}/lib"
"-DBLA_STATIC=OFF"
"-DBUILD_SUPERLU=OFF"
];
buildInputs = [
blas
superlu
suitesparse
];
meta = with lib; {
description = "Fork of the Finite Element ToolKit from fetk.org";
homepage = "https://github.com/Electrostatics/FETK";
changelog = "https://github.com/Electrostatics/FETK/releases/tag/${finalAttrs.version}";
license = licenses.lgpl21Plus;
maintainers = with maintainers; [ natsukium ];
platforms = platforms.unix;
};
});
in
stdenv.mkDerivation (finalAttrs: {
pname = "apbs";
version = "3.4.1";
src = fetchFromGitHub {
owner = "Electrostatics";
repo = "apbs";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-2DnHU9hMDl4OJBaTtcRiB+6R7gAeFcuOUy7aI63A3gQ=";
};
postPatch = ''
# ImportFETK.cmake downloads source and builds fetk
substituteInPlace CMakeLists.txt \
--replace "include(ImportFETK)" "" \
--replace 'import_fetk(''${FETK_VERSION})' ""
'';
nativeBuildInputs = [
cmake
];
buildInputs = [
fetk
suitesparse
blas
python3
] ++ lib.optionals stdenv.isDarwin [
libintl
libiconv
];
cmakeFlags = [
"-DPYTHON_VERSION=${python3.version}"
"-DAPBS_LIBS=mc;maloc"
"-DCMAKE_MODULE_PATH=${fetk}/share/fetk/cmake;"
"-DENABLE_TESTS=1"
];
doCheck = true;
meta = with lib; {
description = "Software for biomolecular electrostatics and solvation calculations";
homepage = "https://www.poissonboltzmann.org/";
changelog = "https://github.com/Electrostatics/apbs/releases/tag/v${finalAttrs.version}";
license = licenses.bsd3;
maintainers = with maintainers; [ natsukium ];
platforms = platforms.unix;
};
})

View File

@ -24,6 +24,7 @@
, with3d ? true
, withI18n ? true
, srcs ? { }
, symlinkJoin
}:
# The `srcs` parameter can be used to override the kicad source code
@ -136,6 +137,17 @@ stdenv.mkDerivation rec {
++ optionals (withScripting)
[ python.pkgs.wrapPython ];
# KICAD7_TEMPLATE_DIR only works with a single path (it does not handle : separated paths)
# but it's used to find both the templates and the symbol/footprint library tables
# https://gitlab.com/kicad/code/kicad/-/issues/14792
template_dir = symlinkJoin {
name = "KiCad_template_dir";
paths = with passthru.libraries; [
"${templates}/share/kicad/template"
"${footprints}/share/kicad/template"
"${symbols}/share/kicad/template"
];
};
# We are emulating wrapGAppsHook, along with other variables to the wrapper
makeWrapperArgs = with passthru.libraries; [
"--prefix XDG_DATA_DIRS : ${base}/share"
@ -150,9 +162,7 @@ stdenv.mkDerivation rec {
"--set-default MOZ_DBUS_REMOTE 1"
"--set-default KICAD7_FOOTPRINT_DIR ${footprints}/share/kicad/footprints"
"--set-default KICAD7_SYMBOL_DIR ${symbols}/share/kicad/symbols"
"--set-default KICAD7_TEMPLATE_DIR ${templates}/share/kicad/template"
"--prefix KICAD7_TEMPLATE_DIR : ${symbols}/share/kicad/template"
"--prefix KICAD7_TEMPLATE_DIR : ${footprints}/share/kicad/template"
"--set-default KICAD7_TEMPLATE_DIR ${template_dir}"
]
++ optionals (with3d)
[

View File

@ -2,7 +2,7 @@
stdenv.mkDerivation rec {
pname = "lean";
version = "3.50.3";
version = "3.51.0";
src = fetchFromGitHub {
owner = "leanprover-community";
@ -11,8 +11,8 @@ stdenv.mkDerivation rec {
# from. this is then used to check whether an olean file should be
# rebuilt. don't use a tag as rev because this will get replaced into
# src/githash.h.in in preConfigure.
rev = "855e5b74e3a52a40552e8f067169d747d48743fd";
sha256 = "sha256-RH4w7PpzC+fhqCHikXQO2pUUvWD2qrA0mVMUGxpauwE=";
rev = "9fc1dee97a72a3e34d658aefb4b8a95ecd3d477c";
hash = "sha256-Vcsph4dTNLafeaTtVwJS8tWoWCgcP6pxF0ssZDE/YfM=";
};
nativeBuildInputs = [ cmake ];

View File

@ -50,21 +50,21 @@ let
srcs = {
toolbox = fetchFromGitHub {
owner = pname + "-toolbox";
repo = pname;
rev = pname + "_" + version;
sha256 = "05s9dclmk7x5d7wnnj4qr6r6c827m72a44gizcv09lxr28pr9inz";
owner = "shogun-toolbox";
repo = "shogun";
rev = "shogun_${version}";
sha256 = "sha256-38aULxK50wQ2+/ERosSpRyBmssmYSGv5aaWfWSlrSRc=";
fetchSubmodules = true;
};
# The CMake external projects expect the packed archives
rxcpp = fetchurl {
url = "https://github.com/Reactive-Extensions/RxCpp/archive/v${rxcppVersion}.tar.gz";
sha256 = "0y2isr8dy2n1yjr9c5570kpc9lvdlch6jv0jvw000amwn5d3krsh";
sha256 = "sha256-UOc5WrG8KgAA3xJsaSCjbdPE7gSnFJay9MEK31DWUXg=";
};
gtest = fetchurl {
url = "https://github.com/google/googletest/archive/release-${gtestVersion}.tar.gz";
sha256 = "1n5p1m2m3fjrjdj752lf92f9wq3pl5cbsfrb49jqbg52ghkz99jq";
sha256 = "sha256-WKb0J3yivIVlIis7vVihd2CenEiOinJkk1m6UUUNt9g=";
};
};
in
@ -77,13 +77,6 @@ stdenv.mkDerivation rec {
src = srcs.toolbox;
patches = [
# Fix compile errors with json-c
# https://github.com/shogun-toolbox/shogun/pull/4104
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/365ce4c4c700736d2eec8ba6c975327a5ac2cd9b.patch";
sha256 = "158hqv4xzw648pmjbwrhxjp7qcppqa7kvriif87gn3zdn711c49s";
})
# Fix compile errors with GCC 9+
# https://github.com/shogun-toolbox/shogun/pull/4811
(fetchpatch {
@ -95,6 +88,20 @@ stdenv.mkDerivation rec {
sha256 = "sha256-AgJJKQA8vc5oKaTQDqMdwBR4hT4sn9+uW0jLe7GteJw=";
})
# Fix virtual destruction
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/ef0e4dc1cc4a33c9e6b17a108fa38a436de2d7ee.patch";
sha256 = "sha256-a9Rm0ytqkSAgC3dguv8m3SwOSipb+VByBHHdmV0d63w=";
})
./fix-virtual-destruction.patch
# Fix compile errors with json-c
# https://github.com/shogun-toolbox/shogun/pull/4104
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/365ce4c4c700736d2eec8ba6c975327a5ac2cd9b.patch";
sha256 = "sha256-OhEWwrHtD/sOcjHmPY/C9zJ8ruww8yXrRcTw38nGEJU=";
})
# Fix compile errors with Eigen 3.4
./eigen-3.4.patch
@ -126,6 +133,16 @@ stdenv.mkDerivation rec {
cmakeFlags = let
enableIf = cond: if cond then "ON" else "OFF";
excludeTestsRegex = lib.concatStringsSep "|" [
# sporadic segfault
"TrainedModelSerialization"
# broken by openblas 0.3.21
"mathematics_lapack"
# these take too long on CI
"evaluation_cross_validation"
"modelselection_combined_kernel"
"modelselection_grid_search"
];
in [
"-DBUILD_META_EXAMPLES=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_ARPACK=ON"
@ -134,7 +151,7 @@ stdenv.mkDerivation rec {
"-DCMAKE_DISABLE_FIND_PACKAGE_Mosek=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_TFLogger=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_ViennaCL=ON"
"-DCMAKE_CTEST_ARGUMENTS='--exclude-regex;TrainedModelSerialization'" # Sporadic segfault
"-DCMAKE_CTEST_ARGUMENTS=--exclude-regex;'${excludeTestsRegex}'"
"-DENABLE_TESTING=${enableIf doCheck}"
"-DDISABLE_META_INTEGRATION_TESTS=ON"
"-DTRAVIS_DISABLE_META_CPP=ON"

View File

@ -0,0 +1,20 @@
From: Sebastián Mancilla <smancill@smancill.dev>
Subject: Fix virtual destruction
---
src/shogun/solver/LDASolver.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/shogun/solver/LDASolver.h b/src/shogun/solver/LDASolver.h
index 9300a85c5..d500eca5d 100644
--- a/src/shogun/solver/LDASolver.h
+++ b/src/shogun/solver/LDASolver.h
@@ -87,7 +87,7 @@ namespace shogun
compute_within_cov();
}
- ~LDASolver()
+ virtual ~LDASolver()
{
SG_UNREF(m_features)
SG_UNREF(m_labels)

View File

@ -1,13 +1,13 @@
{ lib, stdenv, fetchFromGitHub, pkg-config, coin-utils, zlib, osi }:
stdenv.mkDerivation rec {
version = "1.17.7";
version = "1.17.8";
pname = "clp";
src = fetchFromGitHub {
owner = "coin-or";
repo = "Clp";
rev = "releases/${version}";
hash = "sha256-CfAK/UbGaWvyk2ZxKEgziVruzZfz7WMJVi/YvdR/UNA=";
hash = "sha256-3Z6ysoCcDVB8UePiwbZNqvO/o/jgPcv6XFkpJZBK+Os=";
};
nativeBuildInputs = [ pkg-config ];

View File

@ -9,11 +9,11 @@ assert (!blas.isILP64) && (!lapack.isILP64);
stdenv.mkDerivation rec {
pname = "giac${lib.optionalString enableGUI "-with-xcas"}";
version = "1.9.0-29"; # TODO try to remove preCheck phase on upgrade
version = "1.9.0-43"; # TODO try to remove preCheck phase on upgrade
src = fetchurl {
url = "https://www-fourier.ujf-grenoble.fr/~parisse/debian/dists/stable/main/source/giac_${version}.tar.gz";
sha256 = "sha256-9jUVcsrV8jMfqrmnymZ4vIaWlabF9ppCuq7VDlZ5Cw4=";
sha256 = "sha256-466jB8ZRqHkU5XCY+j0Fh7Dq/mMaOu10rHECKbtNGrs=";
};
patches = [
@ -27,16 +27,12 @@ stdenv.mkDerivation rec {
# the compiler rightfully warns about (with an error nowadays).
(fetchpatch {
name = "fix-string-compiler-error.patch";
url = "https://salsa.debian.org/science-team/giac/-/raw/08cb807ef41f5216b712928886ebf74f69d5ddf6/debian/patches/fix-string-compiler-error.patch";
sha256 = "sha256-K4KAJY1F9Y4DTZFmVEOCXTnxBmHo4//3A10UR3Wlliw=";
url = "https://salsa.debian.org/science-team/giac/-/raw/9ca8dbf4bb16d9d96948aa4024326d32485d7917/debian/patches/fix-string-compiler-error.patch";
sha256 = "sha256-r+M+9MRPRqhHcdhYWI6inxyNvWbXUbBcPCeDY7aulvk=";
})
# increase pari stack size for test chk_fhan4
(fetchpatch {
name = "increase-pari-stack-size.patch";
url = "https://salsa.debian.org/science-team/giac/-/raw/08cb807ef41f5216b712928886ebf74f69d5ddf6/debian/patches/increase-pari-size.patch";
sha256 = "sha256-764P0IJ7ndURap7hotOmYJK0wAhYdqMbQNOnhJxVNt0=";
})
# increase pari stack size for test chk_fhan{4,6}
./increase-pari-stack-size.patch
] ++ lib.optionals (!enableGUI) [
# when enableGui is false, giac is compiled without fltk. That
# means some outputs differ in the make check. Patch around this:

View File

@ -0,0 +1,18 @@
diff -ur a/check/chk_fhan4 b/check/chk_fhan4
--- a/check/chk_fhan4 2018-03-13 19:27:11.000000000 +0100
+++ b/check/chk_fhan4 2023-05-20 16:31:30.349063063 +0200
@@ -1,4 +1,5 @@
#! /bin/sh
unset LANG
+export PARI_SIZE=2048000
../src/icas TP04-sol.cas > TP04.tst
diff TP04.tst TP04-sol.cas.out1
diff -ur a/check/chk_fhan6 b/check/chk_fhan6
--- a/check/chk_fhan6 2018-03-13 19:27:21.000000000 +0100
+++ b/check/chk_fhan6 2023-05-20 16:32:04.199407065 +0200
@@ -1,4 +1,5 @@
#! /bin/sh
unset LANG
+export PARI_SIZE=2048000
../src/icas TP06-sol.cas > TP06.tst
diff TP06.tst TP06-sol.cas.out1

Some files were not shown because too many files have changed in this diff Show More