Merge master into haskell-updates

This commit is contained in:
github-actions[bot] 2021-10-19 00:07:13 +00:00 committed by GitHub
commit fce8110371
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
99 changed files with 19610 additions and 6197 deletions

View File

@ -4421,6 +4421,16 @@
githubId = 54728477;
name = "Happy River";
};
hardselius = {
email = "martin@hardselius.dev";
github = "hardselius";
githubId = 1422583;
name = "Martin Hardselius";
keys = [{
longkeyid = "rsa4096/0x03A6E6F786936619";
fingerprint = "3F35 E4CA CBF4 2DE1 2E90 53E5 03A6 E6F7 8693 6619";
}];
};
haslersn = {
email = "haslersn@fius.informatik.uni-stuttgart.de";
github = "haslersn";

View File

@ -33,8 +33,7 @@ TMP_FILE="$(mktemp)"
GENERATED_NIXFILE="pkgs/development/lua-modules/generated-packages.nix"
LUAROCKS_CONFIG="$NIXPKGS_PATH/maintainers/scripts/luarocks-config.lua"
HEADER = """
/* {GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
HEADER = """/* {GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
Regenerate it with:
nixpkgs$ ./maintainers/scripts/update-luarocks-packages
@ -99,9 +98,8 @@ class LuaEditor(Editor):
header2 = textwrap.dedent(
# header2 = inspect.cleandoc(
"""
{ self, stdenv, lib, fetchurl, fetchgit, ... } @ args:
self: super:
with self;
{ self, stdenv, lib, fetchurl, fetchgit, callPackage, ... } @ args:
final: prev:
{
""")
f.write(header2)
@ -199,6 +197,7 @@ def generate_pkg_nix(plug: LuaPlugin):
log.debug("running %s", ' '.join(cmd))
output = subprocess.check_output(cmd, text=True)
output = "callPackage(" + output.strip() + ") {};\n\n"
return (plug, output)
def main():

View File

@ -1554,6 +1554,47 @@ Superuser created successfully.
encapsulation.
</para>
</listitem>
<listitem>
<para>
Changing systemd <literal>.socket</literal> units now restarts
them and stops the service that is activated by them.
Additionally, services with
<literal>stopOnChange = false</literal> dont break anymore
when they are socket-activated.
</para>
</listitem>
<listitem>
<para>
The <literal>virtualisation.libvirtd</literal> module has been
refactored and updated with new options:
</para>
<itemizedlist spacing="compact">
<listitem>
<para>
<literal>virtualisation.libvirtd.qemu*</literal> options
(e.g.:
<literal>virtualisation.libvirtd.qemuRunAsRoot</literal>)
were moved to
<link xlink:href="options.html#opt-virtualisation.libvirtd.qemu"><literal>virtualisation.libvirtd.qemu</literal></link>
submodule,
</para>
</listitem>
<listitem>
<para>
software TPM1/TPM2 support (e.g.: Windows 11 guests)
(<link xlink:href="options.html#opt-virtualisation.libvirtd.qemu.swtpm"><literal>virtualisation.libvirtd.qemu.swtpm</literal></link>),
</para>
</listitem>
<listitem>
<para>
custom OVMF package (e.g.:
<literal>pkgs.OVMFFull</literal> with HTTP, CSM and Secure
Boot support)
(<link xlink:href="options.html#opt-virtualisation.libvirtd.qemu.ovmf.package"><literal>virtualisation.libvirtd.qemu.ovmf.package</literal></link>).
</para>
</listitem>
</itemizedlist>
</listitem>
</itemizedlist>
</section>
</section>

View File

@ -449,3 +449,10 @@ In addition to numerous new and upgraded packages, this release has the followin
- The `networking` module has a new `networking.fooOverUDP` option to configure Foo-over-UDP encapsulations.
- `networking.sits` now supports Foo-over-UDP encapsulation.
- Changing systemd `.socket` units now restarts them and stops the service that is activated by them. Additionally, services with `stopOnChange = false` don't break anymore when they are socket-activated.
- The `virtualisation.libvirtd` module has been refactored and updated with new options:
- `virtualisation.libvirtd.qemu*` options (e.g.: `virtualisation.libvirtd.qemuRunAsRoot`) were moved to [`virtualisation.libvirtd.qemu`](options.html#opt-virtualisation.libvirtd.qemu) submodule,
- software TPM1/TPM2 support (e.g.: Windows 11 guests) ([`virtualisation.libvirtd.qemu.swtpm`](options.html#opt-virtualisation.libvirtd.qemu.swtpm)),
- custom OVMF package (e.g.: `pkgs.OVMFFull` with HTTP, CSM and Secure Boot support) ([`virtualisation.libvirtd.qemu.ovmf.package`](options.html#opt-virtualisation.libvirtd.qemu.ovmf.package)).

View File

@ -68,9 +68,8 @@ rec {
prefixLength = 24;
} ];
});
in
{ key = "ip-address";
config =
networkConfig =
{ networking.hostName = mkDefault m.fst;
networking.interfaces = listToAttrs interfaces;
@ -96,7 +95,15 @@ rec {
in flip concatMap interfacesNumbered
({ fst, snd }: qemu-common.qemuNICFlags snd fst m.snd);
};
}
in
{ key = "ip-address";
config = networkConfig // {
# Expose the networkConfig items for tests like nixops
# that need to recreate the network config.
system.build.networkConfig = networkConfig;
};
}
)
(getAttr m.fst nodes)
] );

View File

@ -83,10 +83,13 @@ let
optionsListVisible = lib.filter (opt: opt.visible && !opt.internal) (lib.optionAttrSetToDocList options);
# Customly sort option list for the man page.
# Always ensure that the sort order matches sortXML.py!
optionsList = lib.sort optionLess optionsListDesc;
# Convert the list of options into an XML file.
optionsXML = builtins.toFile "options.xml" (builtins.toXML optionsList);
# This file is *not* sorted sorted to save on eval time, since the docbook XML
# and the manpage depend on it and thus we evaluate this on every system rebuild.
optionsXML = builtins.toFile "options.xml" (builtins.toXML optionsListDesc);
optionsNix = builtins.listToAttrs (map (o: { name = o.name; value = removeAttrs o ["name" "visible" "internal"]; }) optionsList);
@ -185,9 +188,10 @@ in {
exit 1
fi
${pkgs.python3Minimal}/bin/python ${./sortXML.py} $optionsXML sorted.xml
${pkgs.libxslt.bin}/bin/xsltproc \
--stringparam revision '${revision}' \
-o intermediate.xml ${./options-to-docbook.xsl} $optionsXML
-o intermediate.xml ${./options-to-docbook.xsl} sorted.xml
${pkgs.libxslt.bin}/bin/xsltproc \
-o "$out" ${./postprocess-option-descriptions.xsl} intermediate.xml
'';

View File

@ -0,0 +1,28 @@
import xml.etree.ElementTree as ET
import sys
tree = ET.parse(sys.argv[1])
# the xml tree is of the form
# <expr><list> {all options, each an attrs} </list></expr>
options = list(tree.getroot().find('list'))
def sortKey(opt):
def order(s):
if s.startswith("enable"):
return 0
if s.startswith("package"):
return 1
return 2
return [
(order(p.attrib['value']), p.attrib['value'])
for p in opt.findall('attr[@name="loc"]/list/string')
]
# always ensure that the sort order matches the order used in the nix expression!
options.sort(key=sortKey)
doc = ET.Element("expr")
newOptions = ET.SubElement(doc, "list")
newOptions.extend(options)
ET.ElementTree(doc).write(sys.argv[2], encoding='utf-8')

View File

@ -1,6 +1,6 @@
let
pkgs = (import ../../../../../../default.nix {});
machine = import "${pkgs.path}/nixos/lib/eval-config.nix" {
machine = import (pkgs.path + "/nixos/lib/eval-config.nix") {
system = "x86_64-linux";
modules = [
({config, ...}: { imports = [ ./system.nix ]; })

View File

@ -109,7 +109,7 @@ let cfg = config.services.subsonic; in {
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
script = ''
${pkgs.jre}/bin/java -Xmx${toString cfg.maxMemory}m \
${pkgs.jre8}/bin/java -Xmx${toString cfg.maxMemory}m \
-Dsubsonic.home=${cfg.home} \
-Dsubsonic.host=${cfg.listenAddress} \
-Dsubsonic.port=${toString cfg.port} \

View File

@ -272,7 +272,7 @@ in
(mkIf cfg.ldap-proxy.enable {
systemd.services.privacyidea-ldap-proxy = let
ldap-proxy-env = pkgs.python2.withPackages (ps: [ ps.privacyidea-ldap-proxy ]);
ldap-proxy-env = pkgs.python3.withPackages (ps: [ ps.privacyidea-ldap-proxy ]);
in {
description = "privacyIDEA LDAP proxy";
wantedBy = [ "multi-user.target" ];

View File

@ -11,7 +11,6 @@ use Cwd 'abs_path';
my $out = "@out@";
# FIXME: maybe we should use /proc/1/exe to get the current systemd.
my $curSystemd = abs_path("/run/current-system/sw/bin");
# To be robust against interruption, record what units need to be started etc.
@ -19,13 +18,16 @@ my $startListFile = "/run/nixos/start-list";
my $restartListFile = "/run/nixos/restart-list";
my $reloadListFile = "/run/nixos/reload-list";
# Parse restart/reload requests by the activation script
# Parse restart/reload requests by the activation script.
# Activation scripts may write newline-separated units to this
# file and switch-to-configuration will handle them. While
# `stopIfChanged = true` is ignored, switch-to-configuration will
# handle `restartIfChanged = false` and `reloadIfChanged = true`.
# This also works for socket-activated units.
my $restartByActivationFile = "/run/nixos/activation-restart-list";
my $reloadByActivationFile = "/run/nixos/activation-reload-list";
my $dryRestartByActivationFile = "/run/nixos/dry-activation-restart-list";
my $dryReloadByActivationFile = "/run/nixos/dry-activation-reload-list";
make_path("/run/nixos", { mode => 0755 });
make_path("/run/nixos", { mode => oct(755) });
my $action = shift @ARGV;
@ -147,6 +149,92 @@ sub fingerprintUnit {
return abs_path($s) . (-f "${s}.d/overrides.conf" ? " " . abs_path "${s}.d/overrides.conf" : "");
}
sub handleModifiedUnit {
my ($unit, $baseName, $newUnitFile, $activePrev, $unitsToStop, $unitsToStart, $unitsToReload, $unitsToRestart, $unitsToSkip) = @_;
if ($unit eq "sysinit.target" || $unit eq "basic.target" || $unit eq "multi-user.target" || $unit eq "graphical.target" || $unit =~ /\.slice$/ || $unit =~ /\.path$/) {
# Do nothing. These cannot be restarted directly.
# Slices and Paths don't have to be restarted since
# properties (resource limits and inotify watches)
# seem to get applied on daemon-reload.
} elsif ($unit =~ /\.mount$/) {
# Reload the changed mount unit to force a remount.
$unitsToReload->{$unit} = 1;
recordUnit($reloadListFile, $unit);
} else {
my $unitInfo = parseUnit($newUnitFile);
if (boolIsTrue($unitInfo->{'X-ReloadIfChanged'} // "no")) {
$unitsToReload->{$unit} = 1;
recordUnit($reloadListFile, $unit);
}
elsif (!boolIsTrue($unitInfo->{'X-RestartIfChanged'} // "yes") || boolIsTrue($unitInfo->{'RefuseManualStop'} // "no") || boolIsTrue($unitInfo->{'X-OnlyManualStart'} // "no")) {
$unitsToSkip->{$unit} = 1;
} else {
# If this unit is socket-activated, then stop it instead
# of restarting it to make sure the new version of it is
# socket-activated.
my $socketActivated = 0;
if ($unit =~ /\.service$/) {
my @sockets = split / /, ($unitInfo->{Sockets} // "");
if (scalar @sockets == 0) {
@sockets = ("$baseName.socket");
}
foreach my $socket (@sockets) {
if (-e "$out/etc/systemd/system/$socket") {
$socketActivated = 1;
$unitsToStop->{$unit} = 1;
# If the socket was not running previously,
# start it now.
if (not defined $activePrev->{$socket}) {
$unitsToStart->{$socket} = 1;
}
}
}
}
# Don't do the rest of this for socket-activated units
# because we handled these above where we stop the unit.
# Since only services can be socket-activated, the
# following condition always evaluates to `true` for
# non-service units.
if ($socketActivated) {
return;
}
# If we are restarting a socket, also stop the corresponding
# service. This is required because restarting a socket
# when the service is already activated fails.
if ($unit =~ /\.socket$/) {
my $service = $unitInfo->{Service} // "";
if ($service eq "") {
$service = "$baseName.service";
}
if (defined $activePrev->{$service}) {
$unitsToStop->{$service} = 1;
}
$unitsToRestart->{$unit} = 1;
recordUnit($restartListFile, $unit);
} else {
# Always restart non-services instead of stopping and starting them
# because it doesn't make sense to stop them with a config from
# the old evaluation.
if (!boolIsTrue($unitInfo->{'X-StopIfChanged'} // "yes") || $unit !~ /\.service$/) {
# This unit should be restarted instead of
# stopped and started.
$unitsToRestart->{$unit} = 1;
recordUnit($restartListFile, $unit);
} else {
# We write to a file to ensure that the
# service gets restarted if we're interrupted.
$unitsToStart->{$unit} = 1;
recordUnit($startListFile, $unit);
$unitsToStop->{$unit} = 1;
}
}
}
}
}
# Figure out what units need to be stopped, started, restarted or reloaded.
my (%unitsToStop, %unitsToSkip, %unitsToStart, %unitsToRestart, %unitsToReload);
@ -219,65 +307,7 @@ while (my ($unit, $state) = each %{$activePrev}) {
}
elsif (fingerprintUnit($prevUnitFile) ne fingerprintUnit($newUnitFile)) {
if ($unit eq "sysinit.target" || $unit eq "basic.target" || $unit eq "multi-user.target" || $unit eq "graphical.target") {
# Do nothing. These cannot be restarted directly.
} elsif ($unit =~ /\.mount$/) {
# Reload the changed mount unit to force a remount.
$unitsToReload{$unit} = 1;
recordUnit($reloadListFile, $unit);
} elsif ($unit =~ /\.socket$/ || $unit =~ /\.path$/ || $unit =~ /\.slice$/) {
# FIXME: do something?
} else {
my $unitInfo = parseUnit($newUnitFile);
if (boolIsTrue($unitInfo->{'X-ReloadIfChanged'} // "no")) {
$unitsToReload{$unit} = 1;
recordUnit($reloadListFile, $unit);
}
elsif (!boolIsTrue($unitInfo->{'X-RestartIfChanged'} // "yes") || boolIsTrue($unitInfo->{'RefuseManualStop'} // "no") || boolIsTrue($unitInfo->{'X-OnlyManualStart'} // "no")) {
$unitsToSkip{$unit} = 1;
} else {
if (!boolIsTrue($unitInfo->{'X-StopIfChanged'} // "yes")) {
# This unit should be restarted instead of
# stopped and started.
$unitsToRestart{$unit} = 1;
recordUnit($restartListFile, $unit);
} else {
# If this unit is socket-activated, then stop the
# socket unit(s) as well, and restart the
# socket(s) instead of the service.
my $socketActivated = 0;
if ($unit =~ /\.service$/) {
my @sockets = split / /, ($unitInfo->{Sockets} // "");
if (scalar @sockets == 0) {
@sockets = ("$baseName.socket");
}
foreach my $socket (@sockets) {
if (defined $activePrev->{$socket}) {
$unitsToStop{$socket} = 1;
# Only restart sockets that actually
# exist in new configuration:
if (-e "$out/etc/systemd/system/$socket") {
$unitsToStart{$socket} = 1;
recordUnit($startListFile, $socket);
$socketActivated = 1;
}
}
}
}
# If the unit is not socket-activated, record
# that this unit needs to be started below.
# We write this to a file to ensure that the
# service gets restarted if we're interrupted.
if (!$socketActivated) {
$unitsToStart{$unit} = 1;
recordUnit($startListFile, $unit);
}
$unitsToStop{$unit} = 1;
}
}
}
handleModifiedUnit($unit, $baseName, $newUnitFile, $activePrev, \%unitsToStop, \%unitsToStart, \%unitsToReload, \%unitsToRestart, %unitsToSkip);
}
}
}
@ -362,8 +392,6 @@ sub filterUnits {
}
my @unitsToStopFiltered = filterUnits(\%unitsToStop);
my @unitsToStartFiltered = filterUnits(\%unitsToStart);
# Show dry-run actions.
if ($action eq "dry-activate") {
@ -375,21 +403,44 @@ if ($action eq "dry-activate") {
print STDERR "would activate the configuration...\n";
system("$out/dry-activate", "$out");
$unitsToRestart{$_} = 1 foreach
split('\n', read_file($dryRestartByActivationFile, err_mode => 'quiet') // "");
# Handle the activation script requesting the restart or reload of a unit.
my %unitsToAlsoStop;
my %unitsToAlsoSkip;
foreach (split('\n', read_file($dryRestartByActivationFile, err_mode => 'quiet') // "")) {
my $unit = $_;
my $baseUnit = $unit;
my $newUnitFile = "$out/etc/systemd/system/$baseUnit";
$unitsToReload{$_} = 1 foreach
split('\n', read_file($dryReloadByActivationFile, err_mode => 'quiet') // "");
# Detect template instances.
if (!-e $newUnitFile && $unit =~ /^(.*)@[^\.]*\.(.*)$/) {
$baseUnit = "$1\@.$2";
$newUnitFile = "$out/etc/systemd/system/$baseUnit";
}
my $baseName = $baseUnit;
$baseName =~ s/\.[a-z]*$//;
handleModifiedUnit($unit, $baseName, $newUnitFile, $activePrev, \%unitsToAlsoStop, \%unitsToStart, \%unitsToReload, \%unitsToRestart, %unitsToAlsoSkip);
}
unlink($dryRestartByActivationFile);
my @unitsToAlsoStopFiltered = filterUnits(\%unitsToAlsoStop);
if (scalar(keys %unitsToAlsoStop) > 0) {
print STDERR "would stop the following units as well: ", join(", ", @unitsToAlsoStopFiltered), "\n"
if scalar @unitsToAlsoStopFiltered;
}
print STDERR "would NOT restart the following changed units as well: ", join(", ", sort(keys %unitsToAlsoSkip)), "\n"
if scalar(keys %unitsToAlsoSkip) > 0;
print STDERR "would restart systemd\n" if $restartSystemd;
print STDERR "would restart the following units: ", join(", ", sort(keys %unitsToRestart)), "\n"
if scalar(keys %unitsToRestart) > 0;
print STDERR "would start the following units: ", join(", ", @unitsToStartFiltered), "\n"
if scalar @unitsToStartFiltered;
print STDERR "would reload the following units: ", join(", ", sort(keys %unitsToReload)), "\n"
if scalar(keys %unitsToReload) > 0;
unlink($dryRestartByActivationFile);
unlink($dryReloadByActivationFile);
print STDERR "would restart the following units: ", join(", ", sort(keys %unitsToRestart)), "\n"
if scalar(keys %unitsToRestart) > 0;
my @unitsToStartFiltered = filterUnits(\%unitsToStart);
print STDERR "would start the following units: ", join(", ", @unitsToStartFiltered), "\n"
if scalar @unitsToStartFiltered;
exit 0;
}
@ -400,7 +451,7 @@ if (scalar (keys %unitsToStop) > 0) {
print STDERR "stopping the following units: ", join(", ", @unitsToStopFiltered), "\n"
if scalar @unitsToStopFiltered;
# Use current version of systemctl binary before daemon is reexeced.
system("$curSystemd/systemctl", "stop", "--", sort(keys %unitsToStop)); # FIXME: ignore errors?
system("$curSystemd/systemctl", "stop", "--", sort(keys %unitsToStop));
}
print STDERR "NOT restarting the following changed units: ", join(", ", sort(keys %unitsToSkip)), "\n"
@ -414,12 +465,38 @@ system("$out/activate", "$out") == 0 or $res = 2;
# Handle the activation script requesting the restart or reload of a unit.
# We can only restart and reload (not stop/start) because the units to be
# stopped are already stopped before the activation script is run.
$unitsToRestart{$_} = 1 foreach
split('\n', read_file($restartByActivationFile, err_mode => 'quiet') // "");
# stopped are already stopped before the activation script is run. We do however
# make an exception for services that are socket-activated and that have to be stopped
# instead of being restarted.
my %unitsToAlsoStop;
my %unitsToAlsoSkip;
foreach (split('\n', read_file($restartByActivationFile, err_mode => 'quiet') // "")) {
my $unit = $_;
my $baseUnit = $unit;
my $newUnitFile = "$out/etc/systemd/system/$baseUnit";
$unitsToReload{$_} = 1 foreach
split('\n', read_file($reloadByActivationFile, err_mode => 'quiet') // "");
# Detect template instances.
if (!-e $newUnitFile && $unit =~ /^(.*)@[^\.]*\.(.*)$/) {
$baseUnit = "$1\@.$2";
$newUnitFile = "$out/etc/systemd/system/$baseUnit";
}
my $baseName = $baseUnit;
$baseName =~ s/\.[a-z]*$//;
handleModifiedUnit($unit, $baseName, $newUnitFile, $activePrev, \%unitsToAlsoStop, \%unitsToStart, \%unitsToReload, \%unitsToRestart, %unitsToAlsoSkip);
}
unlink($restartByActivationFile);
my @unitsToAlsoStopFiltered = filterUnits(\%unitsToAlsoStop);
if (scalar(keys %unitsToAlsoStop) > 0) {
print STDERR "stopping the following units as well: ", join(", ", @unitsToAlsoStopFiltered), "\n"
if scalar @unitsToAlsoStopFiltered;
system("$curSystemd/systemctl", "stop", "--", sort(keys %unitsToAlsoStop));
}
print STDERR "NOT restarting the following changed units as well: ", join(", ", sort(keys %unitsToAlsoSkip)), "\n"
if scalar(keys %unitsToAlsoSkip) > 0;
# Restart systemd if necessary. Note that this is done using the
# current version of systemd, just in case the new one has trouble
@ -460,14 +537,40 @@ if (scalar(keys %unitsToReload) > 0) {
print STDERR "reloading the following units: ", join(", ", sort(keys %unitsToReload)), "\n";
system("@systemd@/bin/systemctl", "reload", "--", sort(keys %unitsToReload)) == 0 or $res = 4;
unlink($reloadListFile);
unlink($reloadByActivationFile);
}
# Restart changed services (those that have to be restarted rather
# than stopped and started).
if (scalar(keys %unitsToRestart) > 0) {
print STDERR "restarting the following units: ", join(", ", sort(keys %unitsToRestart)), "\n";
system("@systemd@/bin/systemctl", "restart", "--", sort(keys %unitsToRestart)) == 0 or $res = 4;
# We split the units to be restarted into sockets and non-sockets.
# This is because restarting sockets may fail which is not bad by
# itself but which will prevent changes on the sockets. We usually
# restart the socket and stop the service before that. Restarting
# the socket will fail however when the service was re-activated
# in the meantime. There is no proper way to prevent that from happening.
my @unitsWithErrorHandling = grep { $_ !~ /\.socket$/ } sort(keys %unitsToRestart);
my @unitsWithoutErrorHandling = grep { $_ =~ /\.socket$/ } sort(keys %unitsToRestart);
if (scalar(@unitsWithErrorHandling) > 0) {
system("@systemd@/bin/systemctl", "restart", "--", @unitsWithErrorHandling) == 0 or $res = 4;
}
if (scalar(@unitsWithoutErrorHandling) > 0) {
# Don't print warnings from systemctl
no warnings 'once';
open(OLDERR, ">&", \*STDERR);
close(STDERR);
my $ret = system("@systemd@/bin/systemctl", "restart", "--", @unitsWithoutErrorHandling);
# Print stderr again
open(STDERR, ">&OLDERR");
if ($ret ne 0) {
print STDERR "warning: some sockets failed to restart. Please check your journal (journalctl -eb) and act accordingly.\n";
}
}
unlink($restartListFile);
unlink($restartByActivationFile);
}
@ -478,6 +581,7 @@ if (scalar(keys %unitsToRestart) > 0) {
# that are symlinks to other units. We shouldn't start both at the
# same time because we'll get a "Failed to add path to set" error from
# systemd.
my @unitsToStartFiltered = filterUnits(\%unitsToStart);
print STDERR "starting the following units: ", join(", ", @unitsToStartFiltered), "\n"
if scalar @unitsToStartFiltered;
system("@systemd@/bin/systemctl", "start", "--", sort(keys %unitsToStart)) == 0 or $res = 4;
@ -485,7 +589,7 @@ unlink($startListFile);
# Print failed and new units.
my (@failed, @new, @restarting);
my (@failed, @new);
my $activeNew = getActiveUnits;
while (my ($unit, $state) = each %{$activeNew}) {
if ($state->{state} eq "failed") {
@ -501,7 +605,9 @@ while (my ($unit, $state) = each %{$activeNew}) {
push @failed, $unit;
}
}
elsif ($state->{state} ne "failed" && !defined $activePrev->{$unit}) {
# Ignore scopes since they are not managed by this script but rather
# created and managed by third-party services via the systemd dbus API.
elsif ($state->{state} ne "failed" && !defined $activePrev->{$unit} && $unit !~ /\.scope$/) {
push @new, $unit;
}
}

View File

@ -84,6 +84,13 @@ let
export localeArchive="${config.i18n.glibcLocales}/lib/locale/locale-archive"
substituteAll ${./switch-to-configuration.pl} $out/bin/switch-to-configuration
chmod +x $out/bin/switch-to-configuration
${optionalString (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) ''
if ! output=$($perl/bin/perl -c $out/bin/switch-to-configuration 2>&1); then
echo "switch-to-configuration syntax is not valid:"
echo "$output"
exit 1
fi
''}
echo -n "${toString config.system.extraDependencies}" > $out/extra-dependencies

View File

@ -34,7 +34,7 @@ in {
initrd.availableKernelModules = [ "hyperv_keyboard" ];
kernelParams = [
"video=hyperv_fb:${cfg.videoMode} elevator=noop"
"video=hyperv_fb:${cfg.videoMode}" "elevator=noop"
];
};

View File

@ -13,23 +13,140 @@ let
'';
ovmfFilePrefix = if pkgs.stdenv.isAarch64 then "AAVMF" else "OVMF";
qemuConfigFile = pkgs.writeText "qemu.conf" ''
${optionalString cfg.qemuOvmf ''
${optionalString cfg.qemu.ovmf.enable ''
nvram = [ "/run/libvirt/nix-ovmf/${ovmfFilePrefix}_CODE.fd:/run/libvirt/nix-ovmf/${ovmfFilePrefix}_VARS.fd" ]
''}
${optionalString (!cfg.qemuRunAsRoot) ''
${optionalString (!cfg.qemu.runAsRoot) ''
user = "qemu-libvirtd"
group = "qemu-libvirtd"
''}
${cfg.qemuVerbatimConfig}
${cfg.qemu.verbatimConfig}
'';
dirName = "libvirt";
subDirs = list: [ dirName ] ++ map (e: "${dirName}/${e}") list;
in {
ovmfModule = types.submodule {
options = {
enable = mkOption {
type = types.bool;
default = true;
description = ''
Allows libvirtd to take advantage of OVMF when creating new
QEMU VMs with UEFI boot.
'';
};
package = mkOption {
type = types.package;
default = pkgs.OVMF;
defaultText = literalExpression "pkgs.OVMF";
example = literalExpression "pkgs.OVMFFull";
description = ''
OVMF package to use.
'';
};
};
};
swtpmModule = types.submodule {
options = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Allows libvirtd to use swtpm to create an emulated TPM.
'';
};
package = mkOption {
type = types.package;
default = pkgs.swtpm;
defaultText = literalExpression "pkgs.swtpm";
description = ''
swtpm package to use.
'';
};
};
};
qemuModule = types.submodule {
options = {
package = mkOption {
type = types.package;
default = pkgs.qemu;
defaultText = literalExpression "pkgs.qemu";
description = ''
Qemu package to use with libvirt.
`pkgs.qemu` can emulate alien architectures (e.g. aarch64 on x86)
`pkgs.qemu_kvm` saves disk space allowing to emulate only host architectures.
'';
};
runAsRoot = mkOption {
type = types.bool;
default = true;
description = ''
If true, libvirtd runs qemu as root.
If false, libvirtd runs qemu as unprivileged user qemu-libvirtd.
Changing this option to false may cause file permission issues
for existing guests. To fix these, manually change ownership
of affected files in /var/lib/libvirt/qemu to qemu-libvirtd.
'';
};
verbatimConfig = mkOption {
type = types.lines;
default = ''
namespaces = []
'';
description = ''
Contents written to the qemu configuration file, qemu.conf.
Make sure to include a proper namespace configuration when
supplying custom configuration.
'';
};
ovmf = mkOption {
type = ovmfModule;
default = { };
description = ''
QEMU's OVMF options.
'';
};
swtpm = mkOption {
type = swtpmModule;
default = { };
description = ''
QEMU's swtpm options.
'';
};
};
};
in
{
imports = [
(mkRemovedOptionModule [ "virtualisation" "libvirtd" "enableKVM" ]
"Set the option `virtualisation.libvirtd.qemuPackage' instead.")
"Set the option `virtualisation.libvirtd.qemu.package' instead.")
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuPackage" ]
[ "virtualisation" "libvirtd" "qemu" "package" ])
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuRunAsRoot" ]
[ "virtualisation" "libvirtd" "qemu" "runAsRoot" ])
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuVerbatimConfig" ]
[ "virtualisation" "libvirtd" "qemu" "verbatimConfig" ])
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuOvmf" ]
[ "virtualisation" "libvirtd" "qemu" "ovmf" "enable" ])
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuOvmfPackage" ]
[ "virtualisation" "libvirtd" "qemu" "ovmf" "package" ])
(mkRenamedOptionModule
[ "virtualisation" "libvirtd" "qemuSwtpm" ]
[ "virtualisation" "libvirtd" "qemu" "swtpm" "enable" ])
];
###### interface
@ -56,17 +173,6 @@ in {
'';
};
qemuPackage = mkOption {
type = types.package;
default = pkgs.qemu;
defaultText = literalExpression "pkgs.qemu";
description = ''
Qemu package to use with libvirt.
`pkgs.qemu` can emulate alien architectures (e.g. aarch64 on x86)
`pkgs.qemu_kvm` saves disk space allowing to emulate only host architectures.
'';
};
extraConfig = mkOption {
type = types.lines;
default = "";
@ -76,56 +182,6 @@ in {
'';
};
qemuRunAsRoot = mkOption {
type = types.bool;
default = true;
description = ''
If true, libvirtd runs qemu as root.
If false, libvirtd runs qemu as unprivileged user qemu-libvirtd.
Changing this option to false may cause file permission issues
for existing guests. To fix these, manually change ownership
of affected files in /var/lib/libvirt/qemu to qemu-libvirtd.
'';
};
qemuVerbatimConfig = mkOption {
type = types.lines;
default = ''
namespaces = []
'';
description = ''
Contents written to the qemu configuration file, qemu.conf.
Make sure to include a proper namespace configuration when
supplying custom configuration.
'';
};
qemuOvmf = mkOption {
type = types.bool;
default = true;
description = ''
Allows libvirtd to take advantage of OVMF when creating new
QEMU VMs with UEFI boot.
'';
};
qemuOvmfPackage = mkOption {
type = types.package;
default = pkgs.OVMF;
defaultText = literalExpression "pkgs.OVMF";
example = literalExpression "pkgs.OVMFFull";
description = ''
OVMF package to use.
'';
};
qemuSwtpm = mkOption {
type = types.bool;
default = false;
description = ''
Allows libvirtd to use swtpm to create an emulated TPM.
'';
};
extraOptions = mkOption {
type = types.listOf types.str;
default = [ ];
@ -136,7 +192,7 @@ in {
};
onBoot = mkOption {
type = types.enum ["start" "ignore" ];
type = types.enum [ "start" "ignore" ];
default = "start";
description = ''
Specifies the action to be done to / on the guests when the host boots.
@ -148,7 +204,7 @@ in {
};
onShutdown = mkOption {
type = types.enum ["shutdown" "suspend" ];
type = types.enum [ "shutdown" "suspend" ];
default = "suspend";
description = ''
When shutting down / restarting the host what method should
@ -166,6 +222,13 @@ in {
'';
};
qemu = mkOption {
type = qemuModule;
default = { };
description = ''
QEMU related options.
'';
};
};
@ -179,16 +242,18 @@ in {
message = "The libvirtd module currently requires Polkit to be enabled ('security.polkit.enable = true').";
}
{
assertion = builtins.elem "fd" cfg.qemuOvmfPackage.outputs;
assertion = builtins.elem "fd" cfg.qemu.ovmf.package.outputs;
message = "The option 'virtualisation.libvirtd.qemuOvmfPackage' needs a package that has an 'fd' output.";
}
];
environment = {
# this file is expected in /etc/qemu and not sysconfdir (/var/lib)
etc."qemu/bridge.conf".text = lib.concatMapStringsSep "\n" (e:
"allow ${e}") cfg.allowedBridges;
systemPackages = with pkgs; [ libressl.nc iptables cfg.package cfg.qemuPackage ];
etc."qemu/bridge.conf".text = lib.concatMapStringsSep "\n"
(e:
"allow ${e}")
cfg.allowedBridges;
systemPackages = with pkgs; [ libressl.nc iptables cfg.package cfg.qemu.package ];
etc.ethertypes.source = "${pkgs.ebtables}/etc/ethertypes";
};
@ -230,17 +295,17 @@ in {
cp -f ${qemuConfigFile} /var/lib/${dirName}/qemu.conf
# stable (not GC'able as in /nix/store) paths for using in <emulator> section of xml configs
for emulator in ${cfg.package}/libexec/libvirt_lxc ${cfg.qemuPackage}/bin/qemu-kvm ${cfg.qemuPackage}/bin/qemu-system-*; do
for emulator in ${cfg.package}/libexec/libvirt_lxc ${cfg.qemu.package}/bin/qemu-kvm ${cfg.qemu.package}/bin/qemu-system-*; do
ln -s --force "$emulator" /run/${dirName}/nix-emulators/
done
for helper in libexec/qemu-bridge-helper bin/qemu-pr-helper; do
ln -s --force ${cfg.qemuPackage}/$helper /run/${dirName}/nix-helpers/
ln -s --force ${cfg.qemu.package}/$helper /run/${dirName}/nix-helpers/
done
${optionalString cfg.qemuOvmf ''
ln -s --force ${cfg.qemuOvmfPackage.fd}/FV/${ovmfFilePrefix}_CODE.fd /run/${dirName}/nix-ovmf/
ln -s --force ${cfg.qemuOvmfPackage.fd}/FV/${ovmfFilePrefix}_VARS.fd /run/${dirName}/nix-ovmf/
${optionalString cfg.qemu.ovmf.enable ''
ln -s --force ${cfg.qemu.ovmf.package.fd}/FV/${ovmfFilePrefix}_CODE.fd /run/${dirName}/nix-ovmf/
ln -s --force ${cfg.qemu.ovmf.package.fd}/FV/${ovmfFilePrefix}_VARS.fd /run/${dirName}/nix-ovmf/
''}
'';
@ -256,16 +321,20 @@ in {
systemd.services.libvirtd = {
requires = [ "libvirtd-config.service" ];
after = [ "libvirtd-config.service" ]
++ optional vswitch.enable "ovs-vswitchd.service";
++ optional vswitch.enable "ovs-vswitchd.service";
environment.LIBVIRTD_ARGS = escapeShellArgs (
[ "--config" configFile
"--timeout" "120" # from ${libvirt}/var/lib/sysconfig/libvirtd
] ++ cfg.extraOptions);
[
"--config"
configFile
"--timeout"
"120" # from ${libvirt}/var/lib/sysconfig/libvirtd
] ++ cfg.extraOptions
);
path = [ cfg.qemuPackage ] # libvirtd requires qemu-img to manage disk images
++ optional vswitch.enable vswitch.package
++ optional cfg.qemuSwtpm pkgs.swtpm;
path = [ cfg.qemu.package ] # libvirtd requires qemu-img to manage disk images
++ optional vswitch.enable vswitch.package
++ optional cfg.qemu.swtpm.enable cfg.qemu.swtpm.package;
serviceConfig = {
Type = "notify";

View File

@ -311,6 +311,7 @@ in
nitter = handleTest ./nitter.nix {};
nix-serve = handleTest ./nix-ssh-serve.nix {};
nix-ssh-serve = handleTest ./nix-ssh-serve.nix {};
nixops = handleTest ./nixops/default.nix {};
nixos-generate-config = handleTest ./nixos-generate-config.nix {};
node-red = handleTest ./node-red.nix {};
nomad = handleTest ./nomad.nix {};

View File

@ -0,0 +1,115 @@
{ pkgs, ... }:
let
inherit (pkgs) lib;
tests = {
# TODO: uncomment stable
# - Blocked on https://github.com/NixOS/nixpkgs/issues/138584 which has a
# PR in staging: https://github.com/NixOS/nixpkgs/pull/139986
# - Alternatively, blocked on a NixOps 2 release
# https://github.com/NixOS/nixops/issues/1242
# stable = testsLegacyNetwork { nixopsPkg = pkgs.nixops; };
unstable = testsForPackage { nixopsPkg = pkgs.nixopsUnstable; };
# inherit testsForPackage;
};
testsForPackage = lib.makeOverridable (args: lib.recurseIntoAttrs {
legacyNetwork = testLegacyNetwork args;
});
testLegacyNetwork = { nixopsPkg }: pkgs.nixosTest ({
nodes = {
deployer = { config, lib, nodes, pkgs, ... }: {
imports = [ ../../modules/installer/cd-dvd/channel.nix ];
environment.systemPackages = [ nixopsPkg ];
nix.binaryCaches = lib.mkForce [ ];
users.users.person.isNormalUser = true;
virtualisation.writableStore = true;
virtualisation.memorySize = 1024 /*MiB*/;
virtualisation.pathsInNixDB = [
pkgs.hello
pkgs.figlet
# This includes build dependencies all the way down. Not efficient,
# but we do need build deps to an *arbitrary* depth, which is hard to
# determine.
(allDrvOutputs nodes.server.config.system.build.toplevel)
];
};
server = { lib, ... }: {
imports = [ ./legacy/base-configuration.nix ];
};
};
testScript = { nodes }:
let
deployerSetup = pkgs.writeScript "deployerSetup" ''
#!${pkgs.runtimeShell}
set -eux -o pipefail
cp --no-preserve=mode -r ${./legacy} unicorn
cp --no-preserve=mode ${../ssh-keys.nix} unicorn/ssh-keys.nix
mkdir -p ~/.ssh
cp ${snakeOilPrivateKey} ~/.ssh/id_ed25519
chmod 0400 ~/.ssh/id_ed25519
'';
serverNetworkJSON = pkgs.writeText "server-network.json"
(builtins.toJSON nodes.server.config.system.build.networkConfig);
in
''
import shlex
def deployer_do(cmd):
cmd = shlex.quote(cmd)
return deployer.succeed(f"su person -l -c {cmd} &>/dev/console")
start_all()
deployer_do("cat /etc/hosts")
deployer_do("${deployerSetup}")
deployer_do("cp ${serverNetworkJSON} unicorn/server-network.json")
# Establish that ssh works, regardless of nixops
# Easy way to accept the server host key too.
server.wait_for_open_port(22)
deployer.wait_for_unit("network.target")
# Put newlines on console, to flush the console reader's line buffer
# in case nixops' last output did not end in a newline, as is the case
# with a status line (if implemented?)
deployer.succeed("while sleep 60s; do echo [60s passed] >/dev/console; done &")
deployer_do("cd ~/unicorn; ssh -oStrictHostKeyChecking=accept-new root@server echo hi")
# Create and deploy
deployer_do("cd ~/unicorn; nixops create")
deployer_do("cd ~/unicorn; nixops deploy --confirm")
deployer_do("cd ~/unicorn; nixops ssh server 'hello | figlet'")
'';
});
inherit (import ../ssh-keys.nix pkgs) snakeOilPrivateKey snakeOilPublicKey;
/*
Return a store path with a closure containing everything including
derivations and all build dependency outputs, all the way down.
*/
allDrvOutputs = pkg:
let name = lib.strings.sanitizeDerivationName "allDrvOutputs-${pkg.pname or pkg.name or "unknown"}";
in
pkgs.runCommand name { refs = pkgs.writeReferencesToFile pkg.drvPath; } ''
touch $out
while read ref; do
case $ref in
*.drv)
cat $ref >>$out
;;
esac
done <$refs
'';
in
tests

View File

@ -0,0 +1,31 @@
{ lib, modulesPath, pkgs, ... }:
let
ssh-keys =
if builtins.pathExists ../../ssh-keys.nix
then # Outside sandbox
../../ssh-keys.nix
else # In sandbox
./ssh-keys.nix;
inherit (import ssh-keys pkgs)
snakeOilPrivateKey snakeOilPublicKey;
in
{
imports = [
(modulesPath + "/virtualisation/qemu-vm.nix")
(modulesPath + "/testing/test-instrumentation.nix")
];
virtualisation.writableStore = true;
nix.binaryCaches = lib.mkForce [ ];
virtualisation.graphics = false;
documentation.enable = false;
services.qemuGuest.enable = true;
boot.loader.grub.enable = false;
services.openssh.enable = true;
users.users.root.openssh.authorizedKeys.keys = [
snakeOilPublicKey
];
security.pam.services.sshd.limits =
[{ domain = "*"; item = "memlock"; type = "-"; value = 1024; }];
}

View File

@ -0,0 +1,15 @@
{
network = {
description = "Legacy Network using <nixpkgs> and legacy state.";
# NB this is not really what makes it a legacy network; lack of flakes is.
storage.legacy = { };
};
server = { lib, pkgs, ... }: {
deployment.targetEnv = "none";
imports = [
./base-configuration.nix
(lib.modules.importJSON ./server-network.json)
];
environment.systemPackages = [ pkgs.hello pkgs.figlet ];
};
}

View File

@ -7,15 +7,224 @@ import ./make-test-python.nix ({ pkgs, ...} : {
};
nodes = {
machine = { ... }: {
machine = { config, pkgs, lib, ... }: {
environment.systemPackages = [ pkgs.socat ]; # for the socket activation stuff
users.mutableUsers = false;
specialisation = {
# A system with a simple socket-activated unit
simple-socket.configuration = {
systemd.services.socket-activated.serviceConfig = {
ExecStart = pkgs.writeScript "socket-test.py" /* python */ ''
#!${pkgs.python3}/bin/python3
from socketserver import TCPServer, StreamRequestHandler
import socket
class Handler(StreamRequestHandler):
def handle(self):
self.wfile.write("hello".encode("utf-8"))
class Server(TCPServer):
def __init__(self, server_address, handler_cls):
# Invoke base but omit bind/listen steps (performed by systemd activation!)
TCPServer.__init__(
self, server_address, handler_cls, bind_and_activate=False)
# Override socket
self.socket = socket.fromfd(3, self.address_family, self.socket_type)
if __name__ == "__main__":
server = Server(("localhost", 1234), Handler)
server.serve_forever()
'';
};
systemd.sockets.socket-activated = {
wantedBy = [ "sockets.target" ];
listenStreams = [ "/run/test.sock" ];
socketConfig.SocketMode = lib.mkDefault "0777";
};
};
# The same system but the socket is modified
modified-socket.configuration = {
imports = [ config.specialisation.simple-socket.configuration ];
systemd.sockets.socket-activated.socketConfig.SocketMode = "0666";
};
# The same system but the service is modified
modified-service.configuration = {
imports = [ config.specialisation.simple-socket.configuration ];
systemd.services.socket-activated.serviceConfig.X-Test = "test";
};
# The same system but both service and socket are modified
modified-service-and-socket.configuration = {
imports = [ config.specialisation.simple-socket.configuration ];
systemd.services.socket-activated.serviceConfig.X-Test = "some_value";
systemd.sockets.socket-activated.socketConfig.SocketMode = "0444";
};
# A system with a socket-activated service and some simple services
service-and-socket.configuration = {
imports = [ config.specialisation.simple-socket.configuration ];
systemd.services.simple-service = {
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStart = "${pkgs.coreutils}/bin/true";
};
};
systemd.services.simple-restart-service = {
stopIfChanged = false;
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStart = "${pkgs.coreutils}/bin/true";
};
};
systemd.services.simple-reload-service = {
reloadIfChanged = true;
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStart = "${pkgs.coreutils}/bin/true";
ExecReload = "${pkgs.coreutils}/bin/true";
};
};
systemd.services.no-restart-service = {
restartIfChanged = false;
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStart = "${pkgs.coreutils}/bin/true";
};
};
};
# The same system but with an activation script that restarts all services
restart-and-reload-by-activation-script.configuration = {
imports = [ config.specialisation.service-and-socket.configuration ];
system.activationScripts.restart-and-reload-test = {
supportsDryActivation = true;
deps = [];
text = ''
if [ "$NIXOS_ACTION" = dry-activate ]; then
f=/run/nixos/dry-activation-restart-list
else
f=/run/nixos/activation-restart-list
fi
cat <<EOF >> "$f"
simple-service.service
simple-restart-service.service
simple-reload-service.service
no-restart-service.service
socket-activated.service
EOF
'';
};
};
# A system with a timer
with-timer.configuration = {
systemd.timers.test-timer = {
wantedBy = [ "timers.target" ];
timerConfig.OnCalendar = "@1395716396"; # chosen by fair dice roll
};
systemd.services.test-timer = {
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.coreutils}/bin/true";
};
};
};
# The same system but with another time
with-timer-modified.configuration = {
imports = [ config.specialisation.with-timer.configuration ];
systemd.timers.test-timer.timerConfig.OnCalendar = lib.mkForce "Fri 2012-11-23 16:00:00";
};
# A system with a systemd mount
with-mount.configuration = {
systemd.mounts = [
{
description = "Testmount";
what = "tmpfs";
type = "tmpfs";
where = "/testmount";
options = "size=1M";
wantedBy = [ "local-fs.target" ];
}
];
};
# The same system but with another time
with-mount-modified.configuration = {
systemd.mounts = [
{
description = "Testmount";
what = "tmpfs";
type = "tmpfs";
where = "/testmount";
options = "size=10M";
wantedBy = [ "local-fs.target" ];
}
];
};
# A system with a path unit
with-path.configuration = {
systemd.paths.test-watch = {
wantedBy = [ "paths.target" ];
pathConfig.PathExists = "/testpath";
};
systemd.services.test-watch = {
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.coreutils}/bin/touch /testpath-modified";
};
};
};
# The same system but watching another file
with-path-modified.configuration = {
imports = [ config.specialisation.with-path.configuration ];
systemd.paths.test-watch.pathConfig.PathExists = lib.mkForce "/testpath2";
};
# A system with a slice
with-slice.configuration = {
systemd.slices.testslice.sliceConfig.MemoryMax = "1"; # don't allow memory allocation
systemd.services.testservice = {
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStart = "${pkgs.coreutils}/bin/true";
Slice = "testslice.slice";
};
};
};
# The same system but the slice allows to allocate memory
with-slice-non-crashing.configuration = {
imports = [ config.specialisation.with-slice.configuration ];
systemd.slices.testslice.sliceConfig.MemoryMax = lib.mkForce null;
};
};
};
other = { ... }: {
users.mutableUsers = true;
};
};
testScript = {nodes, ...}: let
testScript = { nodes, ... }: let
originalSystem = nodes.machine.config.system.build.toplevel;
otherSystem = nodes.other.config.system.build.toplevel;
@ -27,12 +236,182 @@ import ./make-test-python.nix ({ pkgs, ...} : {
set -o pipefail
exec env -i "$@" | tee /dev/stderr
'';
in ''
in /* python */ ''
def switch_to_specialisation(name, action="test"):
out = machine.succeed(f"${originalSystem}/specialisation/{name}/bin/switch-to-configuration {action} 2>&1")
assert_lacks(out, "switch-to-configuration line") # Perl warnings
return out
def assert_contains(haystack, needle):
if needle not in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack)
print("---")
raise Exception(f"Expected string '{needle}' was not found")
def assert_lacks(haystack, needle):
if needle in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack, end="")
print("---")
raise Exception(f"Unexpected string '{needle}' was found")
machine.succeed(
"${stderrRunner} ${originalSystem}/bin/switch-to-configuration test"
)
machine.succeed(
"${stderrRunner} ${otherSystem}/bin/switch-to-configuration test"
)
with subtest("systemd sockets"):
machine.succeed("${originalSystem}/bin/switch-to-configuration test")
# Simple socket is created
out = switch_to_specialisation("simple-socket")
assert_lacks(out, "stopping the following units:")
# not checking for reload because dbus gets reloaded
assert_lacks(out, "restarting the following units:")
assert_lacks(out, "\nstarting the following units:")
assert_contains(out, "the following new units were started: socket-activated.socket\n")
assert_lacks(out, "as well:")
machine.succeed("[ $(stat -c%a /run/test.sock) = 777 ]")
# Changing the socket restarts it
out = switch_to_specialisation("modified-socket")
assert_lacks(out, "stopping the following units:")
#assert_lacks(out, "reloading the following units:")
assert_contains(out, "restarting the following units: socket-activated.socket\n")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
machine.succeed("[ $(stat -c%a /run/test.sock) = 666 ]") # change was applied
# The unit is properly activated when the socket is accessed
if machine.succeed("socat - UNIX-CONNECT:/run/test.sock") != "hello":
raise Exception("Socket was not properly activated")
# Changing the socket restarts it and ignores the active service
out = switch_to_specialisation("simple-socket")
assert_contains(out, "stopping the following units: socket-activated.service\n")
assert_lacks(out, "reloading the following units:")
assert_contains(out, "restarting the following units: socket-activated.socket\n")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
machine.succeed("[ $(stat -c%a /run/test.sock) = 777 ]") # change was applied
# Changing the service does nothing when the service is not active
out = switch_to_specialisation("modified-service")
assert_lacks(out, "stopping the following units:")
assert_lacks(out, "reloading the following units:")
assert_lacks(out, "restarting the following units:")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
# Activating the service and modifying it stops it but leaves the socket untouched
machine.succeed("socat - UNIX-CONNECT:/run/test.sock")
out = switch_to_specialisation("simple-socket")
assert_contains(out, "stopping the following units: socket-activated.service\n")
assert_lacks(out, "reloading the following units:")
assert_lacks(out, "restarting the following units:")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
# Activating the service and both the service and the socket stops the service and restarts the socket
machine.succeed("socat - UNIX-CONNECT:/run/test.sock")
out = switch_to_specialisation("modified-service-and-socket")
assert_contains(out, "stopping the following units: socket-activated.service\n")
assert_lacks(out, "reloading the following units:")
assert_contains(out, "restarting the following units: socket-activated.socket\n")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
with subtest("restart and reload by activation file"):
out = switch_to_specialisation("service-and-socket")
# Switch to a system where the example services get restarted
# by the activation script
out = switch_to_specialisation("restart-and-reload-by-activation-script")
assert_lacks(out, "stopping the following units:")
assert_contains(out, "stopping the following units as well: simple-service.service, socket-activated.service\n")
assert_contains(out, "reloading the following units: simple-reload-service.service\n")
assert_contains(out, "restarting the following units: simple-restart-service.service\n")
assert_contains(out, "\nstarting the following units: simple-service.service")
# The same, but in dry mode
switch_to_specialisation("service-and-socket")
out = switch_to_specialisation("restart-and-reload-by-activation-script", action="dry-activate")
assert_lacks(out, "would stop the following units:")
assert_contains(out, "would stop the following units as well: simple-service.service, socket-activated.service\n")
assert_contains(out, "would reload the following units: simple-reload-service.service\n")
assert_contains(out, "would restart the following units: simple-restart-service.service\n")
assert_contains(out, "\nwould start the following units: simple-service.service")
with subtest("mounts"):
switch_to_specialisation("with-mount")
out = machine.succeed("mount | grep 'on /testmount'")
assert_contains(out, "size=1024k")
out = switch_to_specialisation("with-mount-modified")
assert_lacks(out, "stopping the following units:")
assert_contains(out, "reloading the following units: testmount.mount\n")
assert_lacks(out, "restarting the following units:")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
# It changed
out = machine.succeed("mount | grep 'on /testmount'")
assert_contains(out, "size=10240k")
with subtest("timers"):
switch_to_specialisation("with-timer")
out = machine.succeed("systemctl show test-timer.timer")
assert_contains(out, "OnCalendar=2014-03-25 02:59:56 UTC")
out = switch_to_specialisation("with-timer-modified")
assert_lacks(out, "stopping the following units:")
assert_lacks(out, "reloading the following units:")
assert_contains(out, "restarting the following units: test-timer.timer\n")
assert_lacks(out, "\nstarting the following units:")
assert_lacks(out, "the following new units were started:")
assert_lacks(out, "as well:")
# It changed
out = machine.succeed("systemctl show test-timer.timer")
assert_contains(out, "OnCalendar=Fri 2012-11-23 16:00:00")
with subtest("paths"):
switch_to_specialisation("with-path")
machine.fail("test -f /testpath-modified")
# touch the file, unit should be triggered
machine.succeed("touch /testpath")
machine.wait_until_succeeds("test -f /testpath-modified")
machine.succeed("rm /testpath /testpath-modified")
switch_to_specialisation("with-path-modified")
machine.succeed("touch /testpath")
machine.fail("test -f /testpath-modified")
machine.succeed("touch /testpath2")
machine.wait_until_succeeds("test -f /testpath-modified")
# This test ensures that changes to slice configuration get applied.
# We test this by having a slice that allows no memory allocation at
# all and starting a service within it. If the service crashes, the slice
# is applied and if we modify the slice to allow memory allocation, the
# service should successfully start.
with subtest("slices"):
machine.succeed("echo 0 > /proc/sys/vm/panic_on_oom") # allow OOMing
out = switch_to_specialisation("with-slice")
machine.fail("systemctl start testservice.service")
out = switch_to_specialisation("with-slice-non-crashing")
machine.succeed("systemctl start testservice.service")
machine.succeed("echo 1 > /proc/sys/vm/panic_on_oom") # disallow OOMing
'';
})

View File

@ -13,13 +13,13 @@
mkDerivation rec {
pname = "ptcollab";
version = "0.4.3";
version = "0.5.0";
src = fetchFromGitHub {
owner = "yuxshao";
repo = "ptcollab";
rev = "v${version}";
sha256 = "sha256-bFFWPl7yaTwCKz7/f9Vk6mg0roUnig0dFERS4IE4R7g=";
sha256 = "sha256-sN3O8m+ib6Chb/RXTFbNWW6PnrolCHpmC/avRX93AH4=";
};
nativeBuildInputs = [ qmake pkg-config ];

View File

@ -1,6 +1,7 @@
{ lib
, mkDerivation
, fetchurl
, fetchpatch
, poppler_utils
, pkg-config
, libpng
@ -26,18 +27,21 @@
mkDerivation rec {
pname = "calibre";
version = "5.24.0";
version = "5.29.0";
src = fetchurl {
url = "https://download.calibre-ebook.com/${version}/${pname}-${version}.tar.xz";
hash = "sha256:18dr577nv7ijw3ar6mrk2xrc54mlrqkaj5jrc6s5sirl0710fdfg";
sha256 = "sha256-9ymHEpTHDUM3NAGoeSETzKRLKgJLRY4eEli6N5lbZug=";
};
# https://sources.debian.org/patches/calibre/5.29.0+dfsg-1
patches = [
# Plugin installation (very insecure) disabled (from Debian)
./disable_plugins.patch
# Automatic version update disabled by default (from Debian)
./no_updates_dialog.patch
# allow for plugin update check, but no calibre version check
(fetchpatch {
name = "0001_only_plugin_update.patch";
url = "https://sources.debian.org/data/main/c/calibre/5.29.0%2Bdfsg-1/debian/patches/0001-only-plugin-update.patch";
sha256 = "sha256-aGT8rJ/eQKAkmyHBWdY0ouZuWvDwtLVJU5xY6d3hY3k=";
})
]
++ lib.optional (!unrarSupport) ./dont_build_unrar_plugin.patch;

View File

@ -1,17 +0,0 @@
Description: Disable plugin dialog. It uses a totally non-authenticated and non-trusted way of installing arbitrary code.
Author: Martin Pitt <mpitt@debian.org>
Bug-Debian: http://bugs.debian.org/640026
Index: calibre-0.8.29+dfsg/src/calibre/gui2/actions/preferences.py
===================================================================
--- calibre-0.8.29+dfsg.orig/src/calibre/gui2/actions/preferences.py 2011-12-16 05:49:14.000000000 +0100
+++ calibre-0.8.29+dfsg/src/calibre/gui2/actions/preferences.py 2011-12-20 19:29:04.798468930 +0100
@@ -28,8 +28,6 @@
pm.addAction(QIcon(I('config.png')), _('Preferences'), self.do_config)
cm('welcome wizard', _('Run welcome wizard'),
icon='wizard.png', triggered=self.gui.run_wizard)
- cm('plugin updater', _('Get plugins to enhance calibre'),
- icon='plugins/plugin_updater.png', triggered=self.get_plugins)
if not DEBUG:
pm.addSeparator()
cm('restart', _('Restart in debug mode'), icon='debug.png',

View File

@ -1,15 +0,0 @@
diff -burN calibre-2.9.0.orig/src/calibre/gui2/main.py calibre-2.9.0/src/calibre/gui2/main.py
--- calibre-2.9.0.orig/src/calibre/gui2/main.py 2014-11-09 20:09:54.081231882 +0800
+++ calibre-2.9.0/src/calibre/gui2/main.py 2014-11-09 20:15:48.193033844 +0800
@@ -37,8 +37,9 @@
help=_('Start minimized to system tray.'))
parser.add_option('-v', '--verbose', default=0, action='count',
help=_('Ignored, do not use. Present only for legacy reasons'))
- parser.add_option('--no-update-check', default=False, action='store_true',
- help=_('Do not check for updates'))
+ parser.add_option('--update-check', dest='no_update_check', default=True,
+ action='store_false',
+ help=_('Check for updates'))
parser.add_option('--ignore-plugins', default=False, action='store_true',
help=_('Ignore custom plugins, useful if you installed a plugin'
' that is preventing calibre from starting'))

View File

@ -1,4 +1,5 @@
{ pkgs
{ nixosTests
, pkgs
, poetry2nix
, lib
, overrides ? (self: super: {})
@ -59,10 +60,17 @@ let
}
).python;
in interpreter.pkgs.nixops.withPlugins(ps: [
ps.nixops-encrypted-links
ps.nixops-virtd
ps.nixops-aws
ps.nixops-gcp
ps.nixopsvbox
])
pkg = interpreter.pkgs.nixops.withPlugins(ps: [
ps.nixops-encrypted-links
ps.nixops-virtd
ps.nixops-aws
ps.nixops-gcp
ps.nixopsvbox
]) // rec {
# Workaround for https://github.com/NixOS/nixpkgs/issues/119407
# TODO after #1199407: Use .overrideAttrs(pkg: old: { passthru.tests = .....; })
tests = nixosTests.nixops.unstable.override { nixopsPkg = pkg; };
# Not strictly necessary, but probably expected somewhere; part of the workaround:
passthru.tests = tests;
};
in pkg

View File

@ -5,13 +5,13 @@
mkDerivation rec {
pname = "qownnotes";
version = "21.9.2";
version = "21.10.9";
src = fetchurl {
url = "https://download.tuxfamily.org/${pname}/src/${pname}-${version}.tar.xz";
# Fetch the checksum of current version with curl:
# curl https://download.tuxfamily.org/qownnotes/src/qownnotes-<version>.tar.xz.sha256
sha256 = "sha256-R+aXPnQ2Ns2D8PBTvaeh8ht3juZZhZJIb52A8CVRtFI=";
sha256 = "2c86d66ae427bdcd16d706b982cedaa669a27340f7819fc97a8e2b24c709e74f";
};
nativeBuildInputs = [ qmake qttools ];

View File

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "fast-export";
version = "200213";
version = "210917";
src = fetchFromGitHub {
owner = "frej";
repo = pname;
rev = "v${version}";
sha256 = "0hzyh66rlawxip4n2pvz7pbs0cq82clqv1d6c7hf60v1drjxw287";
sha256 = "0xg8r9rbqv7mriraqxdks2mgj7j4c9gap3kc05y1kxi3nniywyd3";
};
nativeBuildInputs = [ makeWrapper ];

View File

@ -1,68 +0,0 @@
{ lib, stdenv, fetchurl, python2Packages, makeWrapper
, guiSupport ? false, tk ? null
, ApplicationServices
}:
let
inherit (python2Packages) docutils hg-git dulwich python;
in python2Packages.buildPythonApplication rec {
pname = "mercurial";
version = "4.9.1";
src = fetchurl {
url = "https://mercurial-scm.org/release/mercurial-${version}.tar.gz";
sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v";
};
format = "other";
inherit python; # pass it so that the same version can be used in hg2git
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ docutils ]
++ lib.optionals stdenv.isDarwin [ ApplicationServices ];
propagatedBuildInputs = [ hg-git dulwich ];
makeFlags = [ "PREFIX=$(out)" ];
postInstall = (lib.optionalString guiSupport
''
mkdir -p $out/etc/mercurial
cp contrib/hgk $out/bin
cat >> $out/etc/mercurial/hgrc << EOF
[extensions]
hgk=$out/lib/${python.libPrefix}/site-packages/hgext/hgk.py
EOF
# setting HG so that hgk can be run itself as well (not only hg view)
WRAP_TK=" --set TK_LIBRARY ${tk}/lib/${tk.libPrefix}
--set HG $out/bin/hg
--prefix PATH : ${tk}/bin "
'') +
''
for i in $(cd $out/bin && ls); do
wrapProgram $out/bin/$i \
$WRAP_TK
done
# copy hgweb.cgi to allow use in apache
mkdir -p $out/share/cgi-bin
cp -v hgweb.cgi contrib/hgweb.wsgi $out/share/cgi-bin
chmod u+x $out/share/cgi-bin/hgweb.cgi
# install bash/zsh completions
install -v -m644 -D contrib/bash_completion $out/share/bash-completion/completions/_hg
install -v -m644 -D contrib/zsh_completion $out/share/zsh/site-functions/_hg
'';
meta = {
description = "A fast, lightweight SCM system for very large distributed projects";
homepage = "https://www.mercurial-scm.org";
downloadPage = "https://www.mercurial-scm.org/release/";
license = lib.licenses.gpl2;
maintainers = [ lib.maintainers.eraserhd ];
updateWalker = true;
platforms = lib.platforms.unix;
};
}

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "freetube";
version = "0.14.0";
version = "0.15.0";
src = fetchurl {
url = "https://github.com/FreeTubeApp/FreeTube/releases/download/v${version}-beta/freetube_${version}_amd64.AppImage";
sha256 = "sha256:0qaghj70ffc90wck1i4217ky5d6cryrmgna2ipsc4v8dcvbyc1lh";
sha256 = "sha256-52cVY3SBT048tErydk3l27yBvM/FMVpEMf5miAeInDM=";
};
appimageContents = appimageTools.extractType2 {

View File

@ -0,0 +1,26 @@
{ stdenvNoCC, lib, fetchFromGitHub }:
stdenvNoCC.mkDerivation {
pname = "bront_fonts";
version = "unstable-2015-06-28";
src = fetchFromGitHub {
owner = "chrismwendt";
repo = "bront";
rev = "aef23d9a11416655a8351230edb3c2377061c077";
sha256 = "1sx2gv19pgdyccb38sx3qnwszksmva7pqa1c8m35s6cipgjhhgb4";
};
installPhase = ''
install -m444 -Dt $out/share/fonts/truetype *Bront.ttf
'';
meta = with lib; {
description = "Bront Fonts";
longDescription = "Ubuntu Mono Bront and DejaVu Sans Mono Bront fonts.";
homepage = "https://github.com/chrismwendt/bront";
license = licenses.free;
platforms = platforms.all;
maintainers = [ maintainers.grburst ];
};
}

View File

@ -15,13 +15,13 @@
stdenv.mkDerivation rec {
pname = "arc-theme";
version = "20210412";
version = "20211018";
src = fetchFromGitHub {
owner = "jnsh";
repo = pname;
rev = version;
sha256 = "sha256-BNJirtBtdWsIzQfsJsZzg1zFbJEzZPq1j2qZ+1QjRH8=";
sha256 = "1rrxm5b7l8kq1h0lm08ck54xljzm8w573mxx904n3rhdg3ri9d63";
};
nativeBuildInputs = [

View File

@ -16,6 +16,6 @@ in {
graalvm11-ce = mkGraal rec {
version = "21.2.0";
javaVersion = "11";
platforms = ["x86_64-linux" "x86_64-darwin"];
platforms = ["x86_64-linux" "aarch64-linux" "x86_64-darwin"];
};
}

View File

@ -10,17 +10,26 @@
, gtkSupport ? true, cairo, glib, gtk3 }:
let
platform = if stdenv.isDarwin then "darwin-amd64" else "linux-amd64";
platform = {
aarch64-linux = "linux-aarch64";
x86_64-linux = "linux-amd64";
x86_64-darwin = "darwin-amd64";
}.${stdenv.system} or (throw "Unsupported system: ${stdenv.system}");
runtimeDependencies = [ cups ]
++ lib.optionals gtkSupport [ cairo glib gtk3 ];
runtimeLibraryPath = lib.makeLibraryPath runtimeDependencies;
javaVersionPlatform = "${javaVersion}-${platform}";
graalvmXXX-ce = stdenv.mkDerivation rec {
name = "graalvm${javaVersion}-ce";
srcs = [
(fetchurl {
sha256 = {
"8-linux-amd64" = "01gyxjmfp7wpcyn7x8b184fn0lp3xryfw619bqch120pzvr6z88f";
"11-linux-aarch64" = "sha256-u9841eaHH347JHCrm5u3YGZ9RSTuKiDq368TY2otAYw=";
"11-linux-amd64" = "0w7lhvxm4nggqdcl4xrhdd3y6dqw9jhyca9adjkp508n4lqf1lxv";
"11-darwin-amd64" = "0dnahicdl0vhrbiml9z9nbb7k75hbsjj8rs246i1lwril12dqb7n";
}.${javaVersionPlatform};
@ -29,6 +38,7 @@ let
(fetchurl {
sha256 = {
"8-linux-amd64" = "1jlvrxdlbsmlk3ia43h9m29kmmdn83h6zdlnf8qb7bm38c84nhsc";
"11-linux-aarch64" = "sha256-7W5gkhj2kON2ocrGpyH/OL/phOyHkjNDId2CtyUAEWY=";
"11-linux-amd64" = "1ybd7a6ii6582skr0nkxx7bccsa7gkg0yriql2h1lcz0rfzcdi3g";
"11-darwin-amd64" = "1jdy845vanmz05zx5b9227gb1msh9wdrz2kf3fx9z54ssd9qgdhm";
}.${javaVersionPlatform};
@ -37,11 +47,23 @@ let
(fetchurl {
sha256 = {
"8-linux-amd64" = "18ip0ay06q1pryqs8ja988mvk9vw475c0nfjcznnsd1zp296p6jc";
"11-linux-aarch64" = "sha256-i9ysgqbI52PiXofZQ5AnPSzs2TeR8An5CIYzcwhx28o=";
"11-linux-amd64" = "1jszz97mkqavxzyhx5jxhi43kqjxk9c36j5l5hy3kn8sdfmbplm4";
"11-darwin-amd64" = "1767ryhv2cn5anlys63ysax1p8ag79bykac1xfrjfan8yv6d8ybl";
}.${javaVersionPlatform};
url = "https://github.com/oracle/truffleruby/releases/download/vm-${version}/ruby-installable-svm-java${javaVersionPlatform}-${version}.jar";
})
(fetchurl {
sha256 = {
"8-linux-amd64" = "08s36rjy5irg25b7lqx0m4v2wpywin3cqyhdrywhvq14f7zshsd5";
"11-linux-aarch64" = "sha256-Lkc/mq1w18+PQ5McvLGyQBSOz/TMSUgwioRZ0Dtyhm4=";
"11-linux-amd64" = "1ybjaknmbsdg8qzb986x39fq0h7fyiymdcigc7y86swk8dd916hv";
"11-darwin-amd64" = "02dwlb62kqr4rjjmvkhn2xk9l1p47ahg9xyyfkw7im1jwlqmqnzf";
}.${javaVersionPlatform};
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-${version}/wasm-installable-svm-java${javaVersionPlatform}-${version}.jar";
})
] ++ lib.optionals (platform == "amd64") [
# graalpython is not available on aarch64 platforms yet
(fetchurl {
sha256 = {
"8-linux-amd64" = "0il15438qnikqsxdsl7fcdg0c8zs3cbm4ry7pys7fxxr1ckd8szq";
@ -50,14 +72,6 @@ let
}.${javaVersionPlatform};
url = "https://github.com/graalvm/graalpython/releases/download/vm-${version}/python-installable-svm-java${javaVersionPlatform}-${version}.jar";
})
(fetchurl {
sha256 = {
"8-linux-amd64" = "08s36rjy5irg25b7lqx0m4v2wpywin3cqyhdrywhvq14f7zshsd5";
"11-linux-amd64" = "1ybjaknmbsdg8qzb986x39fq0h7fyiymdcigc7y86swk8dd916hv";
"11-darwin-amd64" = "02dwlb62kqr4rjjmvkhn2xk9l1p47ahg9xyyfkw7im1jwlqmqnzf";
}.${javaVersionPlatform};
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-${version}/wasm-installable-svm-java${javaVersionPlatform}-${version}.jar";
})
];
buildInputs = lib.optionals stdenv.isLinux [
@ -124,10 +138,9 @@ let
exit 1
fi
unpack_jar ''${arr[1]}
unpack_jar ''${arr[2]}
unpack_jar ''${arr[3]}
unpack_jar ''${arr[4]}
for jar in "''${arr[@]:1}"; do
unpack_jar "$jar"
done
'';
outputs = [ "out" "lib" ];
@ -145,7 +158,6 @@ let
ln -s $f ${basepath}/${platform}/$(basename $f)
done
'';
copyClibrariesToLib = ''
# add those libraries to $lib output too, so we can use them with
# `native-image -H:CLibraryPath=''${graalvm11-ce.lib}/lib ...` and reduce
@ -173,6 +185,13 @@ let
${copyClibrariesToLib}
'';
"11-linux-aarch64" = ''
${nativePRNGWorkaround "$out/conf/security/java.security"}
${copyClibrariesToOut "$out/lib/svm/clibraries"}
${copyClibrariesToLib}
'';
"11-darwin-amd64" = ''
# create empty $lib/lib to avoid breaking builds
mkdir -p $lib/lib
@ -219,53 +238,60 @@ let
doInstallCheck = true;
installCheckPhase = ''
echo ${
lib.escapeShellArg ''
public class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello World");
}
echo ${
lib.escapeShellArg ''
public class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello World");
}
''
} > HelloWorld.java
$out/bin/javac HelloWorld.java
}
''
} > HelloWorld.java
$out/bin/javac HelloWorld.java
# run on JVM with Graal Compiler
$out/bin/java -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI -XX:+UseJVMCICompiler HelloWorld | fgrep 'Hello World'
# run on JVM with Graal Compiler
$out/bin/java -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI -XX:+UseJVMCICompiler HelloWorld | fgrep 'Hello World'
# Ahead-Of-Time compilation
$out/bin/native-image -H:-CheckToolchain -H:+ReportExceptionStackTraces --no-server HelloWorld
./helloworld | fgrep 'Hello World'
# Ahead-Of-Time compilation
$out/bin/native-image -H:-CheckToolchain -H:+ReportExceptionStackTraces --no-server HelloWorld
./helloworld | fgrep 'Hello World'
${
lib.optionalString stdenv.isLinux ''
# Ahead-Of-Time compilation with --static
# --static flag doesn't work for darwin
$out/bin/native-image --no-server --static HelloWorld
./helloworld | fgrep 'Hello World'
''
}
${
lib.optionalString stdenv.isLinux ''
# Ahead-Of-Time compilation with --static
# --static flag doesn't work for darwin
$out/bin/native-image --no-server --static HelloWorld
./helloworld | fgrep 'Hello World'
''
}
echo "Testing interpreted languages"
$out/bin/graalpython -c 'print(1 + 1)'
$out/bin/ruby -e 'puts(1 + 1)'
${
lib.optionalString (platform == "amd64") ''
echo "Testing interpreted languages"
$out/bin/graalpython -c 'print(1 + 1)'
$out/bin/ruby -e 'puts(1 + 1)'
echo '1 + 1' | $out/bin/graalpython
echo '1 + 1' | $out/bin/graalpython
''
}
${
lib.optionalString stdenv.isLinux ''
# TODO: `irb` on MacOS gives an error saying "Could not find OpenSSL
# headers, install via Homebrew or MacPorts or set OPENSSL_PREFIX", even
# though `openssl` is in `propagatedBuildInputs`. For more details see:
# https://github.com/NixOS/nixpkgs/pull/105815
echo '1 + 1' | $out/bin/irb
''
}
${# TODO: `irb` on MacOS gives an error saying "Could not find OpenSSL
# headers, install via Homebrew or MacPorts or set OPENSSL_PREFIX", even
# though `openssl` is in `propagatedBuildInputs`. For more details see:
# https://github.com/NixOS/nixpkgs/pull/105815
# TODO: "truffleruby: an internal exception escaped out of the interpreter"
# error on linux-aarch64
lib.optionalString (platform == "linux-amd64") ''
echo '1 + 1' | $out/bin/irb
''
}
${lib.optionalString (javaVersion == "11" && stdenv.isLinux) ''
# Doesn't work on MacOS, we have this error: "Launching JShell execution engine threw: Operation not permitted (Bind failed)"
echo '1 + 1' | $out/bin/jshell
''}'';
${# TODO: Doesn't work on MacOS, we have this error:
# "Launching JShell execution engine threw: Operation not permitted (Bind failed)"
lib.optionalString (javaVersion == "11" && stdenv.isLinux) ''
echo '1 + 1' | $out/bin/jshell
''
}'';
passthru.home = graalvmXXX-ce;
@ -280,6 +306,7 @@ let
glittershark
babariviere
ericdallo
thiagokokada
];
platforms = platforms;
};

View File

@ -1,5 +1,5 @@
{ lib, stdenv, fetchFromGitHub, cmake
, boost, python3, eigen
, boost, python3, eigen, python3Packages
, icestorm, trellis
, llvmPackages
@ -39,17 +39,18 @@ stdenv.mkDerivation rec {
= [ cmake ]
++ (lib.optional enableGui wrapQtAppsHook);
buildInputs
= [ boostPython python3 eigen ]
= [ boostPython python3 eigen python3Packages.apycula ]
++ (lib.optional enableGui qtbase)
++ (lib.optional stdenv.cc.isClang llvmPackages.openmp);
cmakeFlags =
[ "-DCURRENT_GIT_VERSION=${lib.substring 0 7 (lib.elemAt srcs 0).rev}"
"-DARCH=generic;ice40;ecp5"
"-DARCH=generic;ice40;ecp5;gowin"
"-DBUILD_TESTS=ON"
"-DICESTORM_INSTALL_PREFIX=${icestorm}"
"-DTRELLIS_INSTALL_PREFIX=${trellis}"
"-DTRELLIS_LIBDIR=${trellis}/lib/trellis"
"-DGOWIN_BBA_EXECUTABLE=${python3Packages.apycula}/bin/gowin_bba"
"-DUSE_OPENMP=ON"
# warning: high RAM usage
"-DSERIALIZE_CHIPDBS=OFF"
@ -74,6 +75,7 @@ stdenv.mkDerivation rec {
wrapQtApp $out/bin/nextpnr-generic
wrapQtApp $out/bin/nextpnr-ice40
wrapQtApp $out/bin/nextpnr-ecp5
wrapQtApp $out/bin/nextpnr-gowin
'';
meta = with lib; {

View File

@ -25,7 +25,6 @@ pname
# propagate build dependencies so in case we have A -> B -> C,
# C can import package A propagated by B
, propagatedBuildInputs ? []
, propagatedNativeBuildInputs ? []
# used to disable derivation, useful for specific lua versions
# TODO move from this setting meta.broken to a 'disabled' attribute on the
@ -50,7 +49,7 @@ pname
# The latter is used to work-around luarocks having a problem with
# multiple-output derivations as external deps:
# https://github.com/luarocks/luarocks/issues/766<Paste>
, externalDeps ? lib.unique (lib.filter (drv: !drv ? luaModule) (propagatedBuildInputs ++ buildInputs))
, externalDeps ? []
# Appended to the generated luarocks config
, extraConfig ? ""
@ -74,7 +73,6 @@ pname
let
generatedRockspecFilename = "${rockspecDir}/${pname}-${version}.rockspec";
# TODO fix warnings "Couldn't load rockspec for ..." during manifest
# construction -- from initial investigation, appears it will require
# upstream luarocks changes to fix cleanly (during manifest construction,
@ -83,7 +81,7 @@ let
luarocks_config = "luarocks-config.lua";
luarocks_content = let
generatedConfig = lua.pkgs.lib.generateLuarocksConfig {
inherit externalDeps;
externalDeps = externalDeps ++ externalDepsGenerated;
inherit extraVariables;
inherit rocksSubdir;
inherit requiredLuaRocks;
@ -99,12 +97,13 @@ let
# Filter out the lua derivation itself from the Lua module dependency
# closure, as it doesn't have a rock tree :)
requiredLuaRocks = lib.filter (d: d ? luaModule)
(lua.pkgs.requiredLuaModules propagatedBuildInputs);
(lua.pkgs.requiredLuaModules luarocksDrv.propagatedBuildInputs);
# example externalDeps': [ { name = "CRYPTO"; dep = pkgs.openssl; } ]
externalDepsGenerated = lib.unique (lib.filter (drv: !drv ? luaModule) (luarocksDrv.propagatedBuildInputs ++ luarocksDrv.buildInputs));
externalDeps' = lib.filter (dep: !lib.isDerivation dep) externalDeps;
in
toLuaModule ( lua.stdenv.mkDerivation (
luarocksDrv = toLuaModule ( lua.stdenv.mkDerivation (
builtins.removeAttrs attrs ["disabled" "checkInputs" "externalDeps" "extraVariables"] // {
name = namePrefix + pname + "-" + version;
@ -146,13 +145,12 @@ builtins.removeAttrs attrs ["disabled" "checkInputs" "externalDeps" "extraVariab
runHook postConfigure
'';
# TODO could be moved to configurePhase
buildPhase = ''
runHook preBuild
nix_debug "Using LUAROCKS_CONFIG=$LUAROCKS_CONFIG"
LUAROCKS=luarocks
LUAROCKS=${lua.pkgs.luarocks}/bin/luarocks
if (( ''${NIX_DEBUG:-0} >= 1 )); then
LUAROCKS="$LUAROCKS --verbose"
fi
@ -195,6 +193,7 @@ builtins.removeAttrs attrs ["disabled" "checkInputs" "externalDeps" "extraVariab
passthru = {
inherit lua; # The lua interpreter
inherit externalDeps;
inherit luarocks_content;
} // passthru;
meta = {
@ -203,4 +202,6 @@ builtins.removeAttrs attrs ["disabled" "checkInputs" "externalDeps" "extraVariab
maintainers = (meta.maintainers or []) ++ [ ];
broken = disabled;
} // meta;
}))
}));
in
luarocksDrv

View File

@ -1,12 +1,15 @@
{ lib, stdenv, fetchurl, capnproto, cmake }:
{ lib, stdenv, fetchFromGitHub, capnproto, cmake }:
stdenv.mkDerivation rec {
pname = "capnproto";
version = "0.9.0";
src = fetchurl {
url = "https://capnproto.org/capnproto-c++-${version}.tar.gz";
sha256 = "sha256-soBUp6K/6kK/w5LI0AljDZTXLozoaiOtbxi15yV0Bk8=";
# release tarballs are missing some ekam rules
src = fetchFromGitHub {
owner = "capnproto";
repo = "capnproto";
rev = "v${version}";
sha256 = "038i40apywn8sg95kwld4mg9p9m08izcw5xj7mwkmshycmqw65na";
};
nativeBuildInputs = [ cmake ]

View File

@ -15,7 +15,7 @@ let
overridenPackages = import ./overrides.nix { inherit pkgs; };
generatedPackages = if (builtins.pathExists ./generated-packages.nix) then
pkgs.callPackage ./generated-packages.nix { } else (final: prev: {});
(final: prev: pkgs.callPackage ./generated-packages.nix { inherit (final) callPackage; } final prev) else (final: prev: {});
extensible-self = lib.makeExtensible
(extends overrides

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,20 @@ let
in unique ([lua] ++ modules ++ concatLists (catAttrs "requiredLuaModules" modules));
# Check whether a derivation provides a lua module.
hasLuaModule = drv: drv ? luaModule;
/*
Use this to override the arguments passed to buildLuarocksPackage
*/
overrideLuarocks = drv: f: (drv.override (args: args // {
buildLuarocksPackage = drv: (args.buildLuarocksPackage drv).override f;
})) // {
overrideScope = scope: overrideLuarocks (drv.overrideScope scope) f;
};
in
rec {
inherit overrideLuarocks;
inherit hasLuaModule requiredLuaModules;
luaPathList = [

View File

@ -1,11 +1,11 @@
{ pkgs, ... }:
self: super:
with super;
{ pkgs }:
final: prev:
with prev;
{
##########################################3
#### manual fixes for generated packages
##########################################3
bit32 = super.bit32.override({
bit32 = prev.bit32.overrideAttrs(oa: {
# Small patch in order to no longer redefine a Lua 5.2 function that Luajit
# 2.1 also provides, see https://github.com/LuaJIT/LuaJIT/issues/325 for
# more
@ -14,7 +14,7 @@ with super;
];
});
busted = super.busted.override({
busted = prev.busted.overrideAttrs(oa: {
postConfigure = ''
substituteInPlace ''${rockspecFilename} \
--replace "'lua_cliargs = 3.0-1'," "'lua_cliargs >= 3.0-1',"
@ -25,14 +25,7 @@ with super;
'';
});
cqueues = super.cqueues.override(rec {
# Parse out a version number without the Lua version inserted
version = with pkgs.lib; let
version' = super.cqueues.version;
rel = splitVersion version';
date = head rel;
rev = last (splitString "-" (last rel));
in "${date}-${rev}";
cqueues = (prev.lib.overrideLuarocks prev.cqueues (drv: {
nativeBuildInputs = [
pkgs.gnum4
];
@ -41,9 +34,17 @@ with super;
{ name = "OPENSSL"; dep = pkgs.openssl; }
];
disabled = luaOlder "5.1" || luaAtLeast "5.4";
})).overrideAttrs(oa: rec {
# Parse out a version number without the Lua version inserted
version = with pkgs.lib; let
version' = prev.cqueues.version;
rel = splitVersion version';
date = head rel;
rev = last (splitString "-" (last rel));
in "${date}-${rev}";
# Upstream rockspec is pointlessly broken into separate rockspecs, per Lua
# version, which doesn't work well for us, so modify it
postConfigure = let inherit (super.cqueues) pname; in ''
postConfigure = let inherit (prev.cqueues) pname; in ''
# 'all' target auto-detects correct Lua version, which is fine for us as
# we only have the right one available :)
sed -Ei ''${rockspecFilename} \
@ -56,13 +57,13 @@ with super;
'';
});
cyrussasl = super.cyrussasl.override({
cyrussasl = prev.lib.overrideLuarocks prev.cyrussasl (drv: {
externalDeps = [
{ name = "LIBSASL"; dep = pkgs.cyrus_sasl; }
];
});
http = super.http.override({
http = prev.http.overrideAttrs(oa: {
patches = [
(pkgs.fetchpatch {
name = "invalid-state-progression.patch";
@ -76,7 +77,7 @@ with super;
*/
});
ldbus = super.ldbus.override({
ldbus = prev.lib.overrideLuarocks prev.ldbus (drv: {
extraVariables = {
DBUS_DIR="${pkgs.dbus.lib}";
DBUS_ARCH_INCDIR="${pkgs.dbus.lib}/lib/dbus-1.0/include";
@ -87,7 +88,7 @@ with super;
];
});
ljsyscall = super.ljsyscall.override(rec {
ljsyscall = prev.ljsyscall.overrideAttrs(oa: rec {
version = "unstable-20180515";
# package hasn't seen any release for a long time
src = pkgs.fetchFromGitHub {
@ -106,7 +107,7 @@ with super;
propagatedBuildInputs = with pkgs.lib; optional (!isLuaJIT) luaffi;
});
lgi = super.lgi.override({
lgi = prev.lib.overrideLuarocks prev.lgi (drv: {
nativeBuildInputs = [
pkgs.pkg-config
];
@ -128,31 +129,31 @@ with super;
'';
});
lrexlib-gnu = super.lrexlib-gnu.override({
lrexlib-gnu = prev.lib.overrideLuarocks prev.lrexlib-gnu (drv: {
buildInputs = [
pkgs.gnulib
];
});
lrexlib-pcre = super.lrexlib-pcre.override({
lrexlib-pcre = prev.lib.overrideLuarocks prev.lrexlib-pcre (drv: {
externalDeps = [
{ name = "PCRE"; dep = pkgs.pcre; }
];
});
lrexlib-posix = super.lrexlib-posix.override({
lrexlib-posix = prev.lib.overrideLuarocks prev.lrexlib-posix (drv: {
buildInputs = [
pkgs.glibc.dev
];
});
lua-iconv = super.lua-iconv.override({
lua-iconv = prev.lib.overrideLuarocks prev.lua-iconv (drv: {
buildInputs = [
pkgs.libiconv
];
});
lua-lsp = super.lua-lsp.override({
lua-lsp = prev.lua-lsp.overrideAttrs(oa: {
# until Alloyed/lua-lsp#28
postConfigure = ''
substituteInPlace ''${rockspecFilename} \
@ -160,14 +161,14 @@ with super;
'';
});
lua-zlib = super.lua-zlib.override({
lua-zlib = prev.lib.overrideLuarocks prev.lua-zlib (drv: {
buildInputs = [
pkgs.zlib.dev
];
disabled = luaOlder "5.1" || luaAtLeast "5.4";
});
luadbi-mysql = super.luadbi-mysql.override({
luadbi-mysql = prev.lib.overrideLuarocks prev.luadbi-mysql (drv: {
extraVariables = {
# Can't just be /include and /lib, unfortunately needs the trailing 'mysql'
MYSQL_INCDIR="${pkgs.libmysqlclient.dev}/include/mysql";
@ -179,19 +180,19 @@ with super;
];
});
luadbi-postgresql = super.luadbi-postgresql.override({
luadbi-postgresql = prev.lib.overrideLuarocks prev.luadbi-postgresql (drv: {
buildInputs = [
pkgs.postgresql
];
});
luadbi-sqlite3 = super.luadbi-sqlite3.override({
luadbi-sqlite3 = prev.lib.overrideLuarocks prev.luadbi-sqlite3 (drv: {
externalDeps = [
{ name = "SQLITE"; dep = pkgs.sqlite; }
];
});
luaevent = super.luaevent.override({
luaevent = prev.lib.overrideLuarocks prev.luaevent (drv: {
propagatedBuildInputs = [
luasocket
];
@ -201,7 +202,7 @@ with super;
disabled = luaOlder "5.1" || luaAtLeast "5.4";
});
luaexpat = super.luaexpat.override({
luaexpat = prev.lib.overrideLuarocks prev.luaexpat (drv: {
externalDeps = [
{ name = "EXPAT"; dep = pkgs.expat; }
];
@ -212,59 +213,57 @@ with super;
# TODO Somehow automatically amend buildInputs for things that need luaffi
# but are in luajitPackages?
luaffi = super.luaffi.override({
luaffi = prev.lib.overrideLuarocks prev.luaffi (drv: {
# The packaged .src.rock version is pretty old, and doesn't work with Lua 5.3
src = pkgs.fetchFromGitHub {
owner = "facebook"; repo = "luaffifb";
rev = "532c757e51c86f546a85730b71c9fef15ffa633d";
sha256 = "1nwx6sh56zfq99rcs7sph0296jf6a9z72mxknn0ysw9fd7m1r8ig";
};
knownRockspec = with super.luaffi; "${pname}-${version}.rockspec";
knownRockspec = with prev.luaffi; "${pname}-${version}.rockspec";
disabled = luaOlder "5.1" || luaAtLeast "5.4" || isLuaJIT;
});
luaossl = super.luaossl.override({
luaossl = prev.lib.overrideLuarocks prev.luaossl (drv: {
externalDeps = [
{ name = "CRYPTO"; dep = pkgs.openssl; }
{ name = "OPENSSL"; dep = pkgs.openssl; }
];
});
luasec = super.luasec.override({
luasec = prev.lib.overrideLuarocks prev.luasec (drv: {
externalDeps = [
{ name = "OPENSSL"; dep = pkgs.openssl; }
];
});
luasql-sqlite3 = super.luasql-sqlite3.override({
luasql-sqlite3 = prev.lib.overrideLuarocks prev.luasql-sqlite3 (drv: {
externalDeps = [
{ name = "SQLITE"; dep = pkgs.sqlite; }
];
});
luasystem = super.luasystem.override({
buildInputs = pkgs.lib.optionals pkgs.stdenv.isLinux [
pkgs.glibc
];
});
luasystem = prev.lib.overrideLuarocks prev.luasystem (drv: { buildInputs = [ pkgs.glibc.out ]; });
luazip = super.luazip.override({
luazip = prev.lib.overrideLuarocks prev.luazip (drv: {
buildInputs = [
pkgs.zziplib
];
});
lua-yajl = super.lua-yajl.override({
lua-yajl = prev.lib.overrideLuarocks prev.lua-yajl (drv: {
buildInputs = [
pkgs.yajl
];
});
luuid = super.luuid.override(old: {
luuid = (prev.lib.overrideLuarocks prev.luuid (drv: {
externalDeps = [
{ name = "LIBUUID"; dep = pkgs.libuuid; }
];
meta = old.meta // {
disabled = luaOlder "5.1" || (luaAtLeast "5.4");
})).overrideAttrs(oa: {
meta = oa.meta // {
platforms = pkgs.lib.platforms.linux;
};
# Trivial patch to make it work in both 5.1 and 5.2. Basically just the
@ -276,13 +275,12 @@ with super;
patches = [
./luuid.patch
];
postConfigure = let inherit (super.luuid) version pname; in ''
postConfigure = let inherit (prev.luuid) version pname; in ''
sed -Ei ''${rockspecFilename} -e 's|lua >= 5.2|lua >= 5.1,|'
'';
disabled = luaOlder "5.1" || (luaAtLeast "5.4");
});
luv = super.luv.override({
luv = prev.lib.overrideLuarocks prev.luv (drv: {
# Use system libuv instead of building local and statically linking
# This is a hacky way to specify -DWITH_SHARED_LIBUV=ON which
# is not possible with luarocks and the current luv rockspec
@ -296,8 +294,8 @@ with super;
buildInputs = [ pkgs.libuv ];
passthru = {
libluv = self.luv.override ({
preBuild = self.luv.preBuild + ''
libluv = final.luv.overrideAttrs (oa: {
preBuild = final.luv.preBuild + ''
sed -i 's,\(option(BUILD_MODULE.*\)ON,\1OFF,' CMakeLists.txt
sed -i 's,\(option(BUILD_SHARED_LIBS.*\)OFF,\1ON,' CMakeLists.txt
sed -i 's,${"\${.*INSTALL_INC_DIR}"},${placeholder "out"}/include/luv,' CMakeLists.txt
@ -312,32 +310,32 @@ with super;
};
});
lyaml = super.lyaml.override({
lyaml = prev.lib.overrideLuarocks prev.lyaml (oa: {
buildInputs = [
pkgs.libyaml
];
});
mpack = super.mpack.override({
mpack = prev.lib.overrideLuarocks prev.mpack (drv: {
buildInputs = [ pkgs.libmpack ];
# the rockspec doesn't use the makefile so you may need to export more flags
USE_SYSTEM_LUA = "yes";
USE_SYSTEM_MPACK = "yes";
});
rapidjson = super.rapidjson.override({
rapidjson = prev.rapidjson.overrideAttrs(oa: {
preBuild = ''
sed -i '/set(CMAKE_CXX_FLAGS/d' CMakeLists.txt
sed -i '/set(CMAKE_C_FLAGS/d' CMakeLists.txt
'';
});
readline = (super.readline.override ({
readline = (prev.lib.overrideLuarocks prev.readline (drv: {
unpackCmd = ''
unzip "$curSrc"
tar xf *.tar.gz
'';
propagatedBuildInputs = super.readline.propagatedBuildInputs ++ [ pkgs.readline ];
propagatedBuildInputs = prev.readline.propagatedBuildInputs ++ [ pkgs.readline.out ];
extraVariables = rec {
READLINE_INCDIR = "${pkgs.readline.dev}/include";
HISTORY_INCDIR = READLINE_INCDIR;
@ -349,14 +347,14 @@ with super;
'';
});
std-_debug = super.std-_debug.overrideAttrs(oa: {
std-_debug = prev.std-_debug.overrideAttrs(oa: {
# run make to generate lib/std/_debug/version.lua
preConfigure = ''
make all
'';
});
std-normalize = super.std-normalize.overrideAttrs(oa: {
std-normalize = prev.std-normalize.overrideAttrs(oa: {
# run make to generate lib/std/_debug/version.lua
preConfigure = ''
make all
@ -365,10 +363,10 @@ with super;
# TODO just while testing, remove afterwards
# toVimPlugin should do it instead
gitsigns-nvim = super.gitsigns-nvim.overrideAttrs(oa: {
gitsigns-nvim = prev.gitsigns-nvim.overrideAttrs(oa: {
nativeBuildInputs = oa.nativeBuildInputs or [] ++ [ pkgs.vimUtils.vimGenDocHook ];
});
# aliases
cjson = super.lua-cjson;
cjson = prev.lua-cjson;
}

View File

@ -91,6 +91,22 @@ let
'';
};
mdctl-cli = super."@medable/mdctl-cli".override {
nativeBuildInputs = with pkgs; with darwin.apple_sdk.frameworks; [
glib
libsecret
pkg-config
] ++ lib.optionals stdenv.isDarwin [
AppKit
Security
];
buildInputs = with pkgs; [
nodePackages.node-gyp-build
nodePackages.node-pre-gyp
nodejs
];
};
coc-imselect = super.coc-imselect.override {
meta.broken = since "10";
};
@ -319,14 +335,6 @@ let
'';
};
netlify-cli =
super.netlify-cli.override {
preRebuild = ''
export ESBUILD_BINARY_PATH="${pkgs.esbuild_netlify}/bin/esbuild"
'';
meta.maintainers = with lib.maintainers; [ roberth ];
};
ssb-server = super.ssb-server.override {
buildInputs = [ pkgs.automake pkgs.autoconf self.node-gyp-build ];
meta.broken = since "10";

View File

@ -6,6 +6,7 @@
, "@commitlint/cli"
, "@commitlint/config-conventional"
, "@hyperspace/cli"
, "@medable/mdctl-cli"
, "@nerdwallet/shepherd"
, "@nestjs/cli"
, "@squoosh/cli"
@ -184,7 +185,6 @@
, "multi-file-swagger"
, "musescore-downloader"
, "neovim"
, "netlify-cli"
, "nijs"
, "node-gyp"
, "node-gyp-build"

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +1,23 @@
{ lib, fetchurl, buildDunePackage
, dune-configurator
, ppx_sexp_conv
, bos, ctypes, fmt, logs, rresult, sexplib
, bos, ctypes, fmt, logs, rresult
, mdx, alcotest, crowbar, junit_alcotest, ezjsonm
}:
buildDunePackage rec {
pname = "yaml";
version = "2.1.0";
version = "3.0.0";
useDune2 = true;
src = fetchurl {
url = "https://github.com/avsm/ocaml-yaml/releases/download/v${version}/yaml-v${version}.tbz";
sha256 = "03g8vsh5jgi1cm5q78v15slgnzifp91fp7n4v1i7pa8yk0bkh585";
sha256 = "1iws6lbnrrd5hhmm7lczfvqp0aidx5xn7jlqk2s5rjfmj9qf4j2c";
};
buildInputs = [ dune-configurator ];
propagatedBuildInputs = [ bos ctypes fmt logs ppx_sexp_conv rresult sexplib ];
propagatedBuildInputs = [ bos ctypes rresult ];
checkInputs = [ fmt logs mdx alcotest crowbar junit_alcotest ezjsonm ];
meta = {
description = "Parse and generate YAML 1.1 files";

View File

@ -0,0 +1,13 @@
{ lib, fetchurl, buildDunePackage, yaml, dune-configurator, ppx_sexp_conv, sexplib }:
buildDunePackage rec {
pname = "yaml-sexp";
inherit (yaml) version src useDune2;
propagatedBuildInputs = [ yaml ppx_sexp_conv sexplib ];
meta = yaml.meta // {
description = "ocaml-yaml with sexp support";
};
}

View File

@ -9,11 +9,11 @@
buildPythonPackage rec {
pname = "auth0-python";
version = "3.18.0";
version = "3.19.0";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-jitJF+puXaLv3qyJOjLFetzxRpnlbi4BKS0TzDmCRe8=";
sha256 = "ed33557f252cf8b022b788ebd2b851c681979f200171498acde2b92d760db026";
};
propagatedBuildInputs = [
@ -23,7 +23,6 @@ buildPythonPackage rec {
checkInputs = [
mock
pyjwt
pytestCheckHook
];

View File

@ -8,14 +8,14 @@
}:
buildPythonPackage rec {
version = "0.15.4";
version = "0.15.5";
pname = "authlib";
src = fetchFromGitHub {
owner = "lepture";
repo = "authlib";
rev = "v${version}";
sha256 = "1jc7rssi1y6brkwjplj8qmi4q5w9h9wz03fbhg01c0y5bmy0g1nj";
sha256 = "1893mkzrlfxpxrgv10y134y8c3ni5hb0qvb0wsc76d2k4mci5j3n";
};
propagatedBuildInputs = [ cryptography requests ];

View File

@ -11,12 +11,12 @@
buildPythonPackage rec {
pname = "azure-mgmt-containerservice";
version = "16.2.0";
version = "16.3.0";
src = fetchPypi {
inherit pname version;
extension = "zip";
sha256 = "ee40fa76d84b82e37e1d8a364649de25bb1e0c6c07f182f145a3ade44ec43711";
sha256 = "87f06525c6cc47de86d7886ed35f04140ddaf1b0c59482ba578fdb3f5d143b1c";
};
propagatedBuildInputs = [
@ -31,6 +31,8 @@ buildPythonPackage rec {
# has no tests
doCheck = false;
pythonImportsCheck = [ "azure.mgmt.containerservice" ];
meta = with lib; {
description = "This is the Microsoft Azure Container Service Management Client Library";
homepage = "https://github.com/Azure/azure-sdk-for-python";

View File

@ -2,11 +2,11 @@
buildPythonPackage rec {
pname = "bracex";
version = "2.1.1";
version = "2.2";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-AfcVzQ7XpiLsizIyLnFYE/dXTeUx8Jtw9vOywQ9oJCU=";
sha256 = "8230f3a03f1f76c192a7844377124300fbaec83870a728b629dfabd9be9e83d0";
};
checkInputs = [ pytestCheckHook ];

View File

@ -7,11 +7,11 @@
buildPythonPackage rec {
pname = "python-debian";
version = "0.1.40";
version = "0.1.42";
src = fetchPypi {
inherit pname version;
sha256 = "385dfb965eca75164d256486c7cf9bae772d24144249fd18b9d15d3cffb70eea";
sha256 = "a794f4c4ee2318ae7260c2e32dac252b833bdaf6686efc2a1afbc6ecf3f0931f";
};
propagatedBuildInputs = [ chardet six ];

View File

@ -4,26 +4,26 @@
, fetchFromGitHub
, substituteAll
, gdb
, django
, flask
, gevent
, psutil
, pytest-timeout
, pytest-xdist
, pytestCheckHook
, requests
, isPy27
, django
, gevent
, isPy3k
}:
buildPythonPackage rec {
pname = "debugpy";
version = "1.4.3";
version = "1.5.0";
src = fetchFromGitHub {
owner = "Microsoft";
repo = pname;
rev = "v${version}";
hash = "sha256-ULxVoZuMNDL0Win/+55RnbkCPZ8OI8nhSKshvJOMFQ4=";
sha256 = "sha256-xgxKyqtSqKITwze7DKDdkxZlq1mWM+x4C/eJlUJmYuk=";
};
patches = [
@ -65,30 +65,21 @@ buildPythonPackage rec {
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}")}
)'';
doCheck = isPy3k;
checkInputs = [
django
flask
gevent
psutil
pytest-timeout
pytest-xdist
pytestCheckHook
requests
] ++ lib.optionals (!isPy27) [
django
gevent
];
# Override default arguments in pytest.ini
pytestFlagsArray = [ "--timeout=0" "-n=$NIX_BUILD_CORES" ];
disabledTests = lib.optionals isPy27 [
# django 1.11 is the last version to support Python 2.7
# and is no longer built in nixpkgs
"django"
# gevent fails to import zope.interface with Python 2.7
"gevent"
];
pythonImportsCheck = [ "debugpy" ];
meta = with lib; {

View File

@ -6,11 +6,11 @@
buildPythonPackage rec {
pname = "ecoaliface";
version = "0.4.0";
version = "0.5.0";
src = fetchPypi {
inherit pname version;
sha256 = "1hzx0r3311f952jik3pgmrg74xp5m6w9c5v6snfrb8w2m19vs6qy";
sha256 = "f17b3452cfd31bb8c3509d59b846889c81db5fb85082c061c32703162cbe9083";
};
propagatedBuildInputs = [

View File

@ -2,13 +2,13 @@
buildPythonPackage rec {
pname = "flufl.bounce";
version = "3.0.2";
version = "4.0";
buildInputs = [ nose2 ];
propagatedBuildInputs = [ atpublic zope_interface ];
src = fetchPypi {
inherit pname version;
sha256 = "5014b23248fce72b13143c32da30073e6abc655b963e7739575608280c52c9a7";
sha256 = "25504aeb976ec0fe5a19cd6c413a3410cb514fdcdbdca9f9b5d8d343a8603831";
};
}

View File

@ -2,7 +2,7 @@
buildPythonPackage rec {
pname = "flufl.i18n";
version = "3.1.5";
version = "3.2";
propagatedBuildInputs = [ atpublic ];
@ -12,6 +12,6 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "b7016000a4d9d366866e776947b777dbe2b1532c990efeef58922b3e7bcab36e";
sha256 = "c35c8f8eab66adb7fd64a1420860105066d2b36cb655b33ffb14afe8e223ed62";
};
}

View File

@ -4,11 +4,11 @@
buildPythonPackage rec {
pname = "flufl.lock";
version = "5.1";
version = "6.0";
src = fetchPypi {
inherit pname version;
sha256 = "1ylahys50vaj7iw6dhlx3gg7vs8cms9sbyvscd06clnk6lqk78ad";
sha256 = "fc748ee609ec864b4838ef649dbd1170fa79deb0c213c2fd51151bee6a7fc242";
};
propagatedBuildInputs = [ atpublic psutil ];

View File

@ -7,13 +7,13 @@
buildPythonPackage rec {
pname = "hatasmota";
version = "0.2.20";
version = "0.2.21";
src = fetchFromGitHub {
owner = "emontnemery";
repo = pname;
rev = version;
sha256 = "1qdvm1bnn7x2mf4fq997gvq6a5901ndhd2s75h92zsgmlcp7rc77";
sha256 = "sha256-mtn/r6pvHeGMLkvUP4w6CT+2+viLna4Vvn9RFMEmqts=";
};
propagatedBuildInputs = [

View File

@ -1,30 +1,24 @@
{ lib, buildPythonPackage, isPy3k, fetchFromGitHub, twisted, ldaptor, configobj }:
{ lib, buildPythonPackage, fetchFromGitHub, twisted, ldaptor, configobj }:
buildPythonPackage rec {
pname = "privacyidea-ldap-proxy";
version = "0.6.1";
# https://github.com/privacyidea/privacyidea-ldap-proxy/issues/50
disabled = isPy3k;
version = "0.6.2";
src = fetchFromGitHub {
owner = "privacyidea";
repo = pname;
rev = "v${version}";
sha256 = "1kc1n9wr1a66xd5zvl6dq78xnkqkn5574jpzashc99pvm62dr24j";
sha256 = "sha256-U2lg4zDQKn9FQ7O0zSLaijIkXKVjg8wi2ItueF4ACDU=";
};
propagatedBuildInputs = [ twisted ldaptor configobj ];
# python 2 zope.interface test import path issues
doCheck = false;
pythonImportsCheck = [ "pi_ldapproxy" ];
meta = with lib; {
description = "LDAP Proxy to intercept LDAP binds and authenticate against privacyIDEA";
homepage = "https://github.com/privacyidea/privacyidea-ldap-proxy";
license = licenses.agpl3;
license = licenses.agpl3Only;
maintainers = [ maintainers.globin ];
};
}

View File

@ -8,7 +8,7 @@
buildPythonPackage rec {
pname = "pycontrol4";
version = "0.3.0";
version = "0.3.1";
disabled = pythonOlder "3.6";
@ -16,7 +16,7 @@ buildPythonPackage rec {
owner = "lawtancool";
repo = "pyControl4";
rev = "v${version}";
sha256 = "sha256-z7MDz9fGwZY4JcqabeYFGZ9nsRU2qa5LYnNQx/ae/4Y=";
sha256 = "068iiyi17ndv6cv124r5dzvififblbi2zw7jgnzb5xi0q093czkj";
};
propagatedBuildInputs = [

View File

@ -0,0 +1,67 @@
{ lib, stdenv, fetchFromGitHub, capnproto }:
stdenv.mkDerivation {
pname = "ekam";
version = "unstable-2021-09-18";
src = fetchFromGitHub {
owner = "capnproto";
repo = "ekam";
rev = "77c338f8bd8f4a2ce1e6199b2a52363f1fccf388";
sha256 = "0q4bizlb1ykzdp4ca0kld6xm5ml9q866xrj3ijffcnyiyqr51qr8";
};
# The capnproto *source* is required to build ekam.
# https://github.com/capnproto/ekam/issues/5
#
# Specifically, the git version of the source is required, as
# capnproto release tarballs do not include ekam rule files.
postUnpack = ''
mkdir -p $sourceRoot/deps
cp -r ${capnproto.src} $sourceRoot/deps/capnproto
'';
postPatch = ''
# A single capnproto test file expects to be able to write to
# /var/tmp. We change it to use /tmp because /var is not available
# under nix-build.
substituteInPlace deps/capnproto/c++/src/kj/filesystem-disk-test.c++ \
--replace "/var/tmp" "/tmp"
'';
# NIX_ENFORCE_PURITY prevents ld from linking against anything outside
# of the nix store -- but ekam builds capnp locally and links against it,
# so that causes the build to fail. So, we turn this off.
#
# See: https://nixos.wiki/wiki/Development_environment_with_nix-shell#Troubleshooting
preBuild = ''
unset NIX_ENFORCE_PURITY
'';
makeFlags = [
"PARALLEL=$(NIX_BUILD_CORES)"
];
installPhase = ''
mkdir $out
cp -r bin $out
# Remove capnproto tools; there's a separate nix package for that.
rm $out/bin/capnp*
# Don't distribute ekam-bootstrap, which is not needed outside this build.
rm $out/bin/ekam-bootstrap
'';
meta = with lib; {
description = ''Build system ("make" in reverse)'';
longDescription = ''
Ekam ("make" spelled backwards) is a build system which automatically
figures out what to build and how to build it purely based on the
source code. No separate "makefile" is needed.
'';
homepage = "https://github.com/capnproto/ekam";
license = licenses.asl20;
platforms = platforms.linux;
maintainers = [ maintainers.garrison ];
};
}

View File

@ -21,6 +21,7 @@ buildGoModule rec {
};
meta = with lib; {
mainProgram = "drone";
maintainers = with maintainers; [ ];
license = licenses.asl20;
description = "Command line client for the Drone continuous integration server";

View File

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "kustomize-sops";
version = "2.6.0";
version = "3.0.1";
src = fetchFromGitHub {
owner = "viaduct-ai";
repo = pname;
rev = "v${version}";
sha256 = "sha256-3dSWIDPIT4crsJuaB1TDfrUzobn8RfRlFAhqMXzZbKI=";
sha256 = "sha256-ZCEgv+2WC1XLDFdRtigkiWu81zLMHvmB8vvIBWN2UYY=";
};
vendorSha256 = "sha256-+MVViFwaApGZZxCyTwLzIEWTZDbr7WSx7e/yGbJ309Y=";
vendorSha256 = "sha256-LFa0s2FBkw97P0CV+9JBmUAjaKVO+RzCX+iWGPUD9iA=";
installPhase = ''
mkdir -p $out/lib/viaduct.ai/v1/ksops-exec/

View File

@ -5,8 +5,8 @@ luarocks.overrideAttrs(old: {
src = fetchFromGitHub {
owner = "nix-community";
repo = "luarocks-nix";
rev = "test-speedup";
sha256 = "sha256-WfzLSpIp0V7Ib4sjYvoJHF+/vHaieccvfVAr5W47QsQ=";
rev = "standalone";
sha256 = "sha256-53Zi+GTayO9EQTCIVrzPeRRHeIkHLqy0mHyBDzbcQQk=";
};
patches = [];

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,9 @@
{ rustPlatform
, fetchFromGitHub
, lib
, openssl
{ lib
, rustPlatform
, fetchCrate
, nodejs
, pkg-config
, openssl
, stdenv
, curl
, Security
@ -13,32 +14,27 @@ rustPlatform.buildRustPackage rec {
pname = "wasm-bindgen-cli";
version = "0.2.78";
src =
let
tarball = fetchFromGitHub {
owner = "rustwasm";
repo = "wasm-bindgen";
rev = version;
hash = "sha256-1Z5d4gjZUic6Yrd+O8oLWYpJqAYGcByZYP0H1iInXHA=";
};
in
runCommand "source" { } ''
cp -R ${tarball} $out
chmod -R +w $out
cp ${./Cargo.lock} $out/Cargo.lock
'';
src = fetchCrate {
inherit pname version;
sha256 = "sha256-5s+HidnVfDV0AXA+/YcXNGVjv/E9JeK0Ttng4mCVX8M=";
};
cargoSha256 = "sha256-CbtjUFwowP/QqyAMCzmUiSib4EpRhQAmO4ekX00xYGE=";
buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ Security curl ];
nativeBuildInputs = [ pkg-config ];
cargoHash = "sha256-RixIEat7EzGzgSQTnPennePpiucmAatrDGhbFSfTajo=";
cargoBuildFlags = [ "-p" pname ];
buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ curl Security ];
checkInputs = [ nodejs ];
# other tests require it to be ran in the wasm-bindgen monorepo
cargoTestFlags = [ "--test=interface-types" ];
meta = with lib; {
homepage = "https://rustwasm.github.io/docs/wasm-bindgen/";
license = licenses.asl20;
license = with licenses; [ asl20 /* or */ mit ];
description = "Facilitating high-level interactions between wasm modules and JavaScript";
maintainers = with maintainers; [ ma27 nitsky rizary ];
platforms = platforms.unix;
mainProgram = "wasm-bindgen";
};
}

View File

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.9.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -0,0 +1,14 @@
{ callPackage, fetchFromGitHub, lib, pkgs }:
let
nodePackages = import ./composition.nix { inherit pkgs; };
in
nodePackages.package.override {
preRebuild = ''
export ESBUILD_BINARY_PATH="${pkgs.esbuild_netlify}/bin/esbuild"
'';
src = fetchFromGitHub (builtins.fromJSON (builtins.readFile ./netlify-cli.json));
bypassCache = true;
reconstructLock = true;
passthru.tests.test = callPackage ./test.nix { };
meta.maintainers = with lib.maintainers; [ roberth ];
}

View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -eu -o pipefail
cd "$( dirname "${BASH_SOURCE[0]}" )"
rm -f ./node-env.nix
src="$(nix-build --expr '(import ../../../.. {}).fetchFromGitHub (builtins.fromJSON (builtins.readFile ./netlify-cli.json))')"
echo $src
node2nix \
--input $src/package.json \
--lock $src/npm-shrinkwrap.json \
--output node-packages.nix \
--composition composition.nix \
--node-env node-env.nix \
--nodejs-14 \
;

View File

@ -0,0 +1,7 @@
{
"owner": "netlify",
"repo": "cli",
"rev": "a50e410fddda92d3f3f256321eddefb8cb8ba6e1",
"sha256": "sisX58I5UxxEPGCh5JGtQHw72A4+pLuENpBB9WKRTZc=",
"fetchSubmodules": false
}

View File

@ -0,0 +1,573 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
includeDependencies = {dependencies}:
lib.optionalString (dependencies != [])
(lib.concatMapStrings (dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
cd node_modules
# Only include dependencies if they don't exist. They may also be bundled in the package.
if [ ! -e "${dependency.name}" ]
then
${composePackage dependency}
fi
cd ..
''
) dependencies);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
DIR=$(pwd)
cd $TMPDIR
unpackFile ${src}
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/${packageName}")"
if [ -f "${src}" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/${packageName}"
elif [ -d "${src}" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash ${src})"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/${packageName}"
fi
# Unset the stripped name to not confuse the next unpack step
unset strippedName
# Include the dependencies of the package
cd "$DIR/${packageName}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,7 @@
{ pkgs ? import ../../../.. {} }:
pkgs.mkShell {
nativeBuildInputs = [
pkgs.nodePackages.node2nix
pkgs.nix-prefetch-github
];
}

View File

@ -0,0 +1,37 @@
{
curl,
netlify-cli,
runCommand,
}:
runCommand "netlify-cli-test" {
nativeBuildInputs = [
netlify-cli
curl
];
meta.timeout = 600;
} ''
mkdir home
export HOME=$PWD/home
# Create a simple site
echo '<h1>hi</h1>' >index.html
echo '/with-redirect /' >_redirects
# Start a local server and wait for it to respond
netlify dev --offline --port 8888 2>&1 | tee log &
sleep 0.1 || true
for (( i=0; i<300; i++ )); do
if grep --ignore-case 'Server now ready' <log; then
break
else
sleep 1
fi
done
# Test the local server
curl -L http://localhost:8888/with-redirect | grep '<h1>hi</h1>'
# Success
touch $out
''

View File

@ -0,0 +1,11 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash
set -euo pipefail
mv netlify-cli.json{,.old}
nix-prefetch-github-latest-release netlify cli >netlify-cli.json
if ! diff -U3 netlify-cli.json{.old,}; then
echo New version detected\; generating expressions...
./generate.sh
fi
rm -f netlify-cli.json.old

View File

@ -0,0 +1,103 @@
{ stdenv, lib, buildFHSUserEnv, dpkg, glibc, gcc-unwrapped, autoPatchelfHook, fetchurl, wrapGAppsHook
, gnome2, xorg
, libSM, libXxf86vm, libX11, glib, pango, cairo, gtk2-x11, zlib, openssl
, libpulseaudio
, SDL2, xorg_sys_opengl, libGL
}:
let
runescape = stdenv.mkDerivation rec {
pname = "runescape-launcher";
version = "2.2.9";
src = fetchurl {
url = "https://content.runescape.com/downloads/ubuntu/pool/non-free/r/${pname}/${pname}_${version}_amd64.deb";
sha256 = "0r5v1pwh0aas31b1d3pkrc8lqmqz9b4fml2b4kxmg5xzp677h271";
};
nativeBuildInputs = [
autoPatchelfHook
wrapGAppsHook
dpkg
];
buildInputs = [
glibc
gcc-unwrapped
libSM
libXxf86vm
libX11
glib
pango
cairo
gtk2-x11
zlib
openssl
];
runtimeDependencies = [
libpulseaudio
libGL
SDL2
xorg_sys_opengl
openssl
zlib
];
dontUnpack = true;
preBuild = ''
export DH_VERBOSE=1
'';
envVarsWithXmodifiers = ''
export MESA_GLSL_CACHE_DIR=~/Jagex
export GDK_SCALE=2
unset XMODIFIERS
'';
installPhase = ''
mkdir -p $out/bin $out/share
dpkg -x $src $out
patchShebangs $out/usr/bin/runescape-launcher
substituteInPlace $out/usr/bin/runescape-launcher \
--replace "unset XMODIFIERS" "$envVarsWithXmodifiers" \
--replace "/usr/share/games/runescape-launcher/runescape" "$out/share/games/runescape-launcher/runescape"
cp -r $out/usr/bin $out/
cp -r $out/usr/share $out/
rm -r $out/usr
'';
meta = with lib; {
description = "Launcher for RuneScape 3, the current main RuneScape";
homepage = "https://www.runescape.com/";
license = licenses.unfree;
maintainers = with lib.maintainers; [ grburst ];
platforms = [ "x86_64-linux" ];
};
};
in
/*
* We can patch the runescape launcher, but it downloads a client at runtime and checks it for changes.
* For that we need use a buildFHSUserEnv.
* FHS simulates a classic linux shell
*/
buildFHSUserEnv {
name = "RuneScape";
targetPkgs = pkgs: [
runescape
dpkg glibc gcc-unwrapped
libSM libXxf86vm libX11 glib pango cairo gtk2-x11 zlib openssl
libpulseaudio
xorg.libX11
SDL2 xorg_sys_opengl libGL
];
multiPkgs = pkgs: [ libGL ];
runScript = "runescape-launcher";
}

View File

@ -329,12 +329,12 @@ final: prev:
better-escape-nvim = buildVimPluginFrom2Nix {
pname = "better-escape.nvim";
version = "2021-10-09";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "max397574";
repo = "better-escape.nvim";
rev = "a644df43e4607df2e645cb7b6f50ede79fffe477";
sha256 = "1kjc1h35xfqgsqbg00hsvg52wkjskfilxxsxnnlimcsv9v8iadmh";
rev = "bbb8b0e3d1b1088a1db6b5ece776a41709929128";
sha256 = "1xiaywjy12rj25qdq5ys8ayrwlw9p9frpjzzagx1p6zgp03jpq63";
};
meta.homepage = "https://github.com/max397574/better-escape.nvim/";
};
@ -461,12 +461,12 @@ final: prev:
chadtree = buildVimPluginFrom2Nix {
pname = "chadtree";
version = "2021-10-17";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "ms-jpq";
repo = "chadtree";
rev = "63259154cae7c01a877cd493c7ff53d8bbaf36a8";
sha256 = "0072yqwcs23j1q5akybc9q933lg3davrbh10rjqs690b8p69rz0p";
rev = "47071cacfb9bcd5ac86dddb1e3ef272ca7ac254d";
sha256 = "1b2dx5j44441xgfk3dj8f135kim38fnp2s8rpf098q8r2gn3zv8d";
};
meta.homepage = "https://github.com/ms-jpq/chadtree/";
};
@ -870,12 +870,12 @@ final: prev:
comment-nvim = buildVimPluginFrom2Nix {
pname = "comment.nvim";
version = "2021-10-17";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "numtostr";
repo = "comment.nvim";
rev = "a86384dc153e86a9a3cbec1436a8acfa4b07690d";
sha256 = "043nczs67sa7i2nmmadhrd52d3yzjr1kcm3bgb4q5zig1a7r4dcj";
rev = "5365cc7f1fc2522ffa1b560830d3125372928d6a";
sha256 = "0lvcl3pykcry35r6c9fxmjklzvzz7lpxfchbz1qgadpq45pidyir";
};
meta.homepage = "https://github.com/numtostr/comment.nvim/";
};
@ -1014,12 +1014,12 @@ final: prev:
conflict-marker-vim = buildVimPluginFrom2Nix {
pname = "conflict-marker.vim";
version = "2020-09-23";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "rhysd";
repo = "conflict-marker.vim";
rev = "6a9b8f92a57ea8a90cbf62c960db9e5894be2d7a";
sha256 = "0vw5kvnmwwia65gni97vk42b9s47r3p5bglrhpcxsvs3f4s250vq";
rev = "22b6133116795ea8fb6705ddca981aa8faecedda";
sha256 = "0rh3c1sl145hwyh6idwgyqbrgnwvd91spxc5qs2hfr1xsh53ssx2";
};
meta.homepage = "https://github.com/rhysd/conflict-marker.vim/";
};
@ -1907,12 +1907,12 @@ final: prev:
floobits-neovim = buildVimPluginFrom2Nix {
pname = "floobits-neovim";
version = "2018-08-01";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "floobits";
repo = "floobits-neovim";
rev = "29ab2ed4bd5c879df0bd6df313a776155eb98ad8";
sha256 = "0bnncn3waw9birpd51j27hrzlriz8dk4naxdajmbwznwcnbkkgwx";
rev = "dbfa051e4f097dfa3f46997a2019556a62861258";
sha256 = "1zsr1536qf7zqdskpshf366m333w66hfjrfdw3ws5yz2l7kq5bcm";
};
meta.homepage = "https://github.com/floobits/floobits-neovim/";
};
@ -2159,12 +2159,12 @@ final: prev:
gitlinker-nvim = buildVimPluginFrom2Nix {
pname = "gitlinker.nvim";
version = "2021-10-12";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "ruifm";
repo = "gitlinker.nvim";
rev = "a727080a527cb0f01843b59e9c445d7c4dce0f12";
sha256 = "0mbnbiikavl6p0jsaal7vmxafrzdcn59xngwszx16qw8f00l76hw";
rev = "a6fb6d1ec2746fc18f81433648416edfb1a96d43";
sha256 = "0c417li0jx970h8qkgrww7ifzjjknfi26i7n13qx96c5axjxady5";
};
meta.homepage = "https://github.com/ruifm/gitlinker.nvim/";
};
@ -2964,12 +2964,12 @@ final: prev:
lightspeed-nvim = buildVimPluginFrom2Nix {
pname = "lightspeed.nvim";
version = "2021-10-09";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "ggandor";
repo = "lightspeed.nvim";
rev = "0836af9ad0c4bb913c8e00595bb25620c894fb97";
sha256 = "0vj84656vifg5c47m2pj7sfp9gz5ikpq9n0p57047d8r1arw50xg";
rev = "d2d47534b00d6fcd16cabab8ec8a6cd15c40ebf3";
sha256 = "13yb0srx7g9yf6rrr0bycxr4kac1ip1a1nzz27hamfkq3l9rcgn5";
};
meta.homepage = "https://github.com/ggandor/lightspeed.nvim/";
};
@ -3900,12 +3900,12 @@ final: prev:
null-ls-nvim = buildVimPluginFrom2Nix {
pname = "null-ls.nvim";
version = "2021-10-15";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "jose-elias-alvarez";
repo = "null-ls.nvim";
rev = "f9d3acd1a4e3b7e6d84951754fbdaadb196ae0fd";
sha256 = "15jk540qb2zwpq3vh31skdl1gn9v6y28vqv9jrw78fjmhrs99lld";
rev = "9534bda66ec8e919bace771bec74678b50b87a88";
sha256 = "0n61rf8qg8kqxa9hmf7jvnrj36xqi5ml9h3kfwszzbjmq89533kw";
};
meta.homepage = "https://github.com/jose-elias-alvarez/null-ls.nvim/";
};
@ -4008,12 +4008,12 @@ final: prev:
nvim-cmp = buildVimPluginFrom2Nix {
pname = "nvim-cmp";
version = "2021-10-16";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "hrsh7th";
repo = "nvim-cmp";
rev = "4ecf2a24265626a2c00427394deb4747c7b9d5dc";
sha256 = "1p134wb9g2ki0gn7aqhsh64frcx5n00npkcj1lgnzcc4cc95p7d4";
rev = "a6a98856c3986de675bc40c9c98b7458fb19e95c";
sha256 = "0x0hzymvna939iscz0llm64ic28iinh4bn6xihv8afgm693j3jbi";
};
meta.homepage = "https://github.com/hrsh7th/nvim-cmp/";
};
@ -4212,12 +4212,12 @@ final: prev:
nvim-lsp-ts-utils = buildVimPluginFrom2Nix {
pname = "nvim-lsp-ts-utils";
version = "2021-10-03";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "jose-elias-alvarez";
repo = "nvim-lsp-ts-utils";
rev = "efa321ad03fbffeca699bc04ca1a59db0c54d16f";
sha256 = "1bxj37jfcq6vrxpl5cslzmg03aqf2i13i71birvcvlw6n4p3kbbm";
rev = "cae4c06308c1ba4f2fdde31836fd98de3fc3e2b5";
sha256 = "1s2jbl4wpylvqfc4mrycd211xsi1p97r8r579fccjxpswvsm4bbk";
};
meta.homepage = "https://github.com/jose-elias-alvarez/nvim-lsp-ts-utils/";
};
@ -4228,8 +4228,8 @@ final: prev:
src = fetchFromGitHub {
owner = "neovim";
repo = "nvim-lspconfig";
rev = "9661d267f64a872a451dab0453b2b951e55cef27";
sha256 = "0hv77ridsxi5n2qibrp9a66kbmlw1c9yl8gpakr1iz2hhazq58x9";
rev = "7f902f952944aa708c78138f6536c0dc55aec3a2";
sha256 = "1n8srlrfliak2587r30la87x3jgl9iq1x8jdxlhrx7i874ha3ykp";
};
meta.homepage = "https://github.com/neovim/nvim-lspconfig/";
};
@ -4704,12 +4704,12 @@ final: prev:
plenary-nvim = buildVimPluginFrom2Nix {
pname = "plenary.nvim";
version = "2021-10-06";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "nvim-lua";
repo = "plenary.nvim";
rev = "80bb2b9bb74bdca38a46480b6f2e15af990406e4";
sha256 = "11akcpxcp4m997a2y76ajknnmsifac2hj4nq9i4a8b1j08bxinim";
rev = "901b96d37a30be0504c97cc2c05d3a99b4cca842";
sha256 = "14nkpj4x9213waqsy93sdgnll42s4dxxpq5kv6g8w015drjrbwhv";
};
meta.homepage = "https://github.com/nvim-lua/plenary.nvim/";
};
@ -4897,12 +4897,12 @@ final: prev:
rainbow = buildVimPluginFrom2Nix {
pname = "rainbow";
version = "2020-05-28";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "luochen1990";
repo = "rainbow";
rev = "4d15633cdaf61602e1d9fd216a77fc02e0881b2d";
sha256 = "168mbdf2h3zhkqrdyyhh0pbkjdvxwida80rdwk8ml97mxxii8ziw";
rev = "e96f502eb0e88968858a2cb0348c8e2253637bcc";
sha256 = "02lj86yrqjpcs83z4qfh3nhiy4nsq1fdrrcy3x56701zsiml511i";
};
meta.homepage = "https://github.com/luochen1990/rainbow/";
};
@ -4993,12 +4993,12 @@ final: prev:
registers-nvim = buildVimPluginFrom2Nix {
pname = "registers.nvim";
version = "2021-10-16";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "tversteeg";
repo = "registers.nvim";
rev = "6445f9131d872d6bf2236ea301e5d400a96f961d";
sha256 = "0g60vks084jcajv1rsjnkn4idcwvzfbwqwnpkcw0xh009p71wyjd";
rev = "35227ec930cfa836f9a82bfdc3afc302b68a372f";
sha256 = "1pcc5bhacs6h0bxr3ksr6bwdgl75irqwmiwk4l3dwifdj1arhvq7";
};
meta.homepage = "https://github.com/tversteeg/registers.nvim/";
};
@ -5595,12 +5595,12 @@ final: prev:
syntastic = buildVimPluginFrom2Nix {
pname = "syntastic";
version = "2021-09-06";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "vim-syntastic";
repo = "syntastic";
rev = "97bf9ec720662af51ae403b6dfe720d4a24bfcbc";
sha256 = "0rcivwwvb6hmd420jkgy8gnzyv78z0bb8gw6232qrwf9m2lskzad";
rev = "d086f49d389e1c2d58211b1f92cf20c9f63dc325";
sha256 = "0cnd2m1dnx69657dpckiiy1slx2fpnpggm3qs0nzm5rm3qpzx185";
};
meta.homepage = "https://github.com/vim-syntastic/syntastic/";
};
@ -7638,12 +7638,12 @@ final: prev:
vim-fugitive = buildVimPluginFrom2Nix {
pname = "vim-fugitive";
version = "2021-10-17";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "tpope";
repo = "vim-fugitive";
rev = "93f41ace7dc068cf89314bcea23bdf8da686407a";
sha256 = "0hbif3hndmn7jll4w1kci6x3j7379wmpp9rnxg8rlbbnyxl2fpxj";
rev = "4d29c1d6a0def18923b4762c8f85ca3ee5ae6c83";
sha256 = "1m8qw6pqgyvfnbph8xwpsvgwdyapsg2abxbpqvsjhcg6ylbxfx17";
};
meta.homepage = "https://github.com/tpope/vim-fugitive/";
};
@ -7770,12 +7770,12 @@ final: prev:
vim-go = buildVimPluginFrom2Nix {
pname = "vim-go";
version = "2021-10-11";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "fatih";
repo = "vim-go";
rev = "f75762b6252fcdcfe6167e895e8171fb1ecfcd34";
sha256 = "0xgzl4ms1x9gp6pa5krasks106958cr69kza1ybqy56kbngghzq0";
rev = "66ce1595569513a23e3e0dc7aeb8bcacec3b220c";
sha256 = "0bcbrbyqbn993g1i5py2ix8rnsvcxzyhn9fbk7c08257l2i7cc6x";
};
meta.homepage = "https://github.com/fatih/vim-go/";
};
@ -8504,12 +8504,12 @@ final: prev:
vim-lsp-cxx-highlight = buildVimPluginFrom2Nix {
pname = "vim-lsp-cxx-highlight";
version = "2021-04-29";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "jackguo380";
repo = "vim-lsp-cxx-highlight";
rev = "9e42350272a413a51ce9a93b00c98682815ff7c1";
sha256 = "1nsac8f2c0lj42a77wxcv3k6i8sbpm5ghip6nx7yz0dj7zd4xm10";
rev = "679db721db12c2a1c3ae7addf2cc17ae9a26cf08";
sha256 = "064sqa11hnnxj6fnnp9dvd7m367ywg6vzwvqxawqv3cwvgr7123w";
};
meta.homepage = "https://github.com/jackguo380/vim-lsp-cxx-highlight/";
};
@ -9717,12 +9717,12 @@ final: prev:
vim-sneak = buildVimPluginFrom2Nix {
pname = "vim-sneak";
version = "2021-07-11";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "justinmk";
repo = "vim-sneak";
rev = "95374ad3e4b5ef902854e8f4bcfa9a7a31a91d71";
sha256 = "0ns80kjirk72l5lapl7m32ybyr5q71p2mr8a45ihh1k2dlc2wv06";
rev = "94c2de47ab301d476a2baec9ffda07367046bec9";
sha256 = "110f06rf1m6p0asr5h4sr80wpwji3krwna5vdn6aakvcr8a7qqdi";
};
meta.homepage = "https://github.com/justinmk/vim-sneak/";
};
@ -9982,12 +9982,12 @@ final: prev:
vim-test = buildVimPluginFrom2Nix {
pname = "vim-test";
version = "2021-10-15";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "vim-test";
repo = "vim-test";
rev = "5fe8ada99a3c711c8db65eb341c3f631043c4a26";
sha256 = "1vz28llgmjybg2hgxlv6d9q69f7lhj32id531p2hlnhxl49s9jbn";
rev = "2052bd926c37b5c828dd07c47ac676102ca34a0e";
sha256 = "1lyx57jryxx5r9s60hg3v6y0kx5p2qc408nl8zhffqhzzmxvkgp9";
};
meta.homepage = "https://github.com/vim-test/vim-test/";
};
@ -10222,12 +10222,12 @@ final: prev:
vim-ultest = buildVimPluginFrom2Nix {
pname = "vim-ultest";
version = "2021-10-05";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "rcarriga";
repo = "vim-ultest";
rev = "7861d1925baef8fe3fa313affdfbdcaa6b2af26f";
sha256 = "165klmixdch1nc9cxdldl5yg4q79q58riw0mg0mahqvvr5m1yrw3";
rev = "71290da8930cddb946758e108f01a9546c5dda75";
sha256 = "0jjb8b20dy65rjfmhhxq4jlcpdfqvmi4ianc6h2m6n08h5a4iw61";
};
meta.homepage = "https://github.com/rcarriga/vim-ultest/";
};
@ -10246,12 +10246,12 @@ final: prev:
vim-unimpaired = buildVimPluginFrom2Nix {
pname = "vim-unimpaired";
version = "2021-09-24";
version = "2021-10-18";
src = fetchFromGitHub {
owner = "tpope";
repo = "vim-unimpaired";
rev = "39f195d7e66141d7f1fa683927547026501e9961";
sha256 = "0bbiv32brznns82v8s0s2fylcn4j5d3vw4x2kp5h6zb4lqgya30q";
rev = "e4006d68cd4f390efef935bc09be0ce3bd022e72";
sha256 = "048n8p7bjpcwdk924glqkwkp10fl813ffrjsagwwzsnakax3da5f";
};
meta.homepage = "https://github.com/tpope/vim-unimpaired/";
};
@ -10655,12 +10655,12 @@ final: prev:
vimtex = buildVimPluginFrom2Nix {
pname = "vimtex";
version = "2021-10-13";
version = "2021-10-17";
src = fetchFromGitHub {
owner = "lervag";
repo = "vimtex";
rev = "4b3ceb9eb7f0e12c239d7ef4262728491aee89ba";
sha256 = "1q3ygk39pbm2bhq463c0rc7zqm5zzg2ln2k1qka1v2i4ghngaszi";
rev = "0824ade4187472fcdc1634f462da84b3cfc5931f";
sha256 = "0p7308x3yy9n43jhpggqb1vmz39k00ckx3svpxbckwh9y21hjxnc";
};
meta.homepage = "https://github.com/lervag/vimtex/";
};

View File

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "4.14.250";
version = "4.14.251";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
@ -13,6 +13,6 @@ buildLinux (args // rec {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
sha256 = "0zbf6gxm8iwf7cra310gfd0zyj8l9ckdahky9w6wb9wlznwkvd54";
sha256 = "08g1i6wylwi50rns2grdi9f0m8np64qvfqb28drafy772m2klinp";
};
} // (args.argsOverride or {}))

View File

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "4.19.211";
version = "4.19.212";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
@ -13,6 +13,6 @@ buildLinux (args // rec {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
sha256 = "1m3y5gsf5s8bb4jxkri20dlxi8aiqabzaijj2h1svz4r19ca8j7v";
sha256 = "0hxn3mzxh4hpnrkyjv9gipb81k6p0zd07a3xvb6fb6njvmwdpvsw";
};
} // (args.argsOverride or {}))

View File

@ -1,12 +1,12 @@
{ buildPackages, fetchurl, perl, buildLinux, nixosTests, stdenv, ... } @ args:
buildLinux (args // rec {
version = "4.4.288";
version = "4.4.289";
extraMeta.branch = "4.4";
extraMeta.broken = stdenv.isAarch64;
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
sha256 = "028w3yzmzymy7l1nf3n1pfbg95hfi4ilphkyl00ddgj8sfm542c7";
sha256 = "1g77kf0yzwvpdxs3kw7wdvb07mmk3zm6ydjcw5dnsza8q2inl69k";
};
} // (args.argsOverride or {}))

View File

@ -1,12 +1,12 @@
{ buildPackages, fetchurl, perl, buildLinux, nixosTests, stdenv, ... } @ args:
buildLinux (args // rec {
version = "4.9.286";
version = "4.9.287";
extraMeta.branch = "4.9";
extraMeta.broken = stdenv.isAarch64;
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
sha256 = "0id62p43dmkl9ir278p0dsfg3q7m1d8rlxv0c9qi1pzalkygv8p2";
sha256 = "1hlpxnlz0y5vxcmkavsirk2kfb2l34fcvmhlcb99slh28xkjhhfl";
};
} // (args.argsOverride or {}))

View File

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "5.10.73";
version = "5.10.74";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
@ -13,6 +13,6 @@ buildLinux (args // rec {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
sha256 = "0xhf0g5pra27hnavpy0y3mn05m5hqn5rd3d6fx0a3vr35c1jicpd";
sha256 = "1c717mn47mg43k7xfwydygwv14r67aksn1c24c99hf8qf14acmap";
};
} // (args.argsOverride or {}))

View File

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "5.14.12";
version = "5.14.13";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
@ -13,6 +13,6 @@ buildLinux (args // rec {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
sha256 = "0dswxf1qk70lms5lph15i7nz3ybwiia58v8zzrmi71ajviwjc9wd";
sha256 = "0kcn9g5jyd043f75wk3k34j430callzhw5jh1if9zacqq2s7haw3";
};
} // (args.argsOverride or { }))

View File

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "5.4.153";
version = "5.4.154";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
@ -13,6 +13,6 @@ buildLinux (args // rec {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
sha256 = "0jaz57sd51xqc7w8k3f43bfc5mdsh1413mdngqqsgr8isv4hg7vd";
sha256 = "01iwbz1ncakw90yykdw3cx04wnclwf1qa8nmlis08svbcvs99285";
};
} // (args.argsOverride or {}))

View File

@ -6,7 +6,7 @@
, ... } @ args:
let
version = "5.10.65-rt53"; # updated by ./update-rt.sh
version = "5.10.73-rt54"; # updated by ./update-rt.sh
branch = lib.versions.majorMinor version;
kversion = builtins.elemAt (lib.splitString "-" version) 0;
in buildLinux (args // {
@ -18,14 +18,14 @@ in buildLinux (args // {
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${kversion}.tar.xz";
sha256 = "0riyq1gdm18642fpnhpcw8hspcjqzprzhqnygjxabjjvrvdxxlzd";
sha256 = "0xhf0g5pra27hnavpy0y3mn05m5hqn5rd3d6fx0a3vr35c1jicpd";
};
kernelPatches = let rt-patch = {
name = "rt";
patch = fetchurl {
url = "mirror://kernel/linux/kernel/projects/rt/${branch}/older/patch-${version}.patch.xz";
sha256 = "1sxyic3895dch3x7cabiip5lxv9wqypn22hcy02jg9825260cmd3";
sha256 = "0szqm9f939p9z701i5hj881nf5bhfa0a6037bbcz974y0x8g9nmk";
};
}; in [ rt-patch ] ++ kernelPatches;

View File

@ -5,11 +5,11 @@
stdenv.mkDerivation rec {
pname = "knot-dns";
version = "3.1.2";
version = "3.1.3";
src = fetchurl {
url = "https://secure.nic.cz/files/knot-dns/knot-${version}.tar.xz";
sha256 = "580087695df350898b2da8a5c2bdf1dc5eb262ed5ff2cb1538cee480a50fa094";
sha256 = "a3fc448cbce3209575f93a3cf1224fa37802fc6606f7c7d4bb3aa6dbeaed2c64";
};
outputs = [ "bin" "out" "dev" ];

View File

@ -509,10 +509,10 @@ in
name = "vod";
owner = "kaltura";
repo = "nginx-vod-module";
rev = "e46079f51282d5a378e6911714b5f3a533bb7700";
sha256 = "0pzzq4xcq7jg8mxwnz7srj1nczg9ajd1b8q58qlm03lny8nd2hr5";
rev = "1.29";
sha256 = "1z0ka0cwqbgh3fv2d5yva395sf90626rdzx7lyfrgs89gy4h9nrr";
};
inputs = [ pkgs.ffmpeg_3 pkgs.fdk_aac pkgs.openssl pkgs.libxml2 pkgs.libiconv ];
inputs = with pkgs; [ ffmpeg fdk_aac openssl libxml2 libiconv ];
};
vts = {

View File

@ -1,23 +1,24 @@
{ lib, stdenv, fetchurl, fetchFromGitHub, buildGoPackage, buildEnv }:
{ lib, stdenv, fetchurl, fetchFromGitHub, buildGoModule, buildEnv }:
let
version = "5.32.1";
version = "5.37.2";
mattermost-server = buildGoPackage rec {
mattermost-server = buildGoModule rec {
pname = "mattermost-server";
inherit version;
src = fetchFromGitHub {
owner = "mattermost";
repo = "mattermost-server";
repo = pname;
rev = "v${version}";
sha256 = "BssrTfkIxUbXYXIfz9i+5b4rEYSzBim+/riK78m8Bxo=";
sha256 = "sha256-kO5wSj/ApPhS2k9a9VjS3Qk55azNZeiFmevAxSkdGe0=";
};
goPackagePath = "github.com/mattermost/mattermost-server";
vendorSha256 = null;
doCheck = false;
ldflags = [
"-X ${goPackagePath}/model.BuildNumber=nixpkgs-${version}"
"-s" "-w" "-X github.com/mattermost/mattermost-server/v${lib.versions.major version}/model.BuildNumber=${version}"
];
};
@ -28,7 +29,7 @@ let
src = fetchurl {
url = "https://releases.mattermost.com/${version}/mattermost-${version}-linux-amd64.tar.gz";
sha256 = "kRerl3fYRTrotj86AIFSor3GpjhABkCmego1ms9HmkQ=";
sha256 = "sha256-BzQVkOPo/f6O2ncQ0taS3cZkglOL+D+zBcfNYrpMgTM=";
};
installPhase = ''

View File

@ -2,25 +2,24 @@
buildGoModule rec {
pname = "hilbish";
version = "0.5.1";
version = "0.6.0";
src = fetchFromGitHub {
owner = "Rosettea";
repo = "Hilbish";
rev = "v${version}";
sha256 = "sha256-557Je9KeBpkZxVAxcjWAhybIJJYlzhtbnIyZh0rCRUc=";
sha256 = "sha256-ACHHHGT3VGnvZVi1UZb57+g/slcld5e3bh+DDhUVVpQ=";
fetchSubmodules = true;
};
vendorSha256 = "sha256-8l+Kb1ADMLwv0Hf/ikok8eUcEEST07rhk8BjHxJI0lc=";
vendorSha256 = "sha256-SVGPMFpQjVOWCfiPpEmqhp6MEO0wqeyAZVyeNmTuXl0=";
buildInputs = [ readline ];
ldflags = [ "-s" "-w" ];
postPatch = ''
# in master vars.go is called vars_linux.go
substituteInPlace vars.go \
substituteInPlace vars_linux.go \
--replace "/usr/share" "${placeholder "out"}/share/"
'';
@ -28,8 +27,13 @@ buildGoModule rec {
mkdir -p "$out/share/hilbish"
cp .hilbishrc.lua $out/share/hilbish/
cp -r docs -t $out/share/hilbish
cp -r libs -t $out/share/hilbish/
cp preload.lua $out/share/hilbish/
# segfaults and it's already been generated upstream
# we copy the docs over with the above cp command
rm $out/bin/docgen
'';
meta = with lib; {

View File

@ -13,7 +13,7 @@ let
pythonEnv = python.withPackages (p: with p; [
cffi
cryptography
pyopenssl
openssl
crcmod
] ++ lib.optional (with-gce) google-compute-engine);
@ -21,33 +21,33 @@ let
sources = name: system: {
x86_64-darwin = {
url = "${baseUrl}/${name}-darwin-x86_64.tar.gz";
sha256 = "1a17bbvimdqq4k25lprqk9cq3lpfchd65hzjf23ha4imndpbjgqr";
sha256 = "0cjy6znhpv90mj7463lghmzhivwhaxa7q9da37wdpwh53h7kf05r";
};
aarch64-darwin = {
url = "${baseUrl}/${name}-darwin-arm.tar.gz";
sha256 = "184k1kv10g4zzzxgmwpakvg5ffxhz01dd01kb5h32mf1j5fid1zh";
sha256 = "0phby3s9375zyphjwk1hrpr8fiybik1ag3yfnpmi7msq54lf4h3x";
};
x86_64-linux = {
url = "${baseUrl}/${name}-linux-x86_64.tar.gz";
sha256 = "0hhaq5hf5nvaah06h6v8q2hpn8hc815ihsi74dpwg6pmg9h266pr";
sha256 = "0j1n8mzck3sizjslm12x4lgxklw1xvbxp2186xnxm4pmj4kwp4k1";
};
i686-linux = {
url = "${baseUrl}/${name}-linux-x86.tar.gz";
sha256 = "14z1nzwc0j3qhbw2ldrskd8zjsslwgsw7pxxq3v0ypc1rjibsql5";
sha256 = "1sll47bhd4x5r0z65325ak0wbbky07qbzqkf7w97nilv7wz5dgxa";
};
aarch64-linux = {
url = "${baseUrl}/${name}-linux-arm.tar.gz";
sha256 = "0f6xrij2wbx57s4897bi12l9fz3flj1wyibbk7jjg0l5332h4yhr";
sha256 = "1jk17fn3q1i625q1cdyxlvv58rw9ma7lwvngc04jqrccczsl1jqr";
};
}.${system};
}.${system} or (throw "Unsupported system: ${system}");
in stdenv.mkDerivation rec {
pname = "google-cloud-sdk";
version = "351.0.0";
version = "360.0.0";
src = fetchurl (sources "${pname}-${version}" stdenv.hostPlatform.system);

View File

@ -33,15 +33,14 @@ diff --git a/lib/googlecloudsdk/api_lib/container/kubeconfig.py b/lib/googleclou
index 4330988d6..37424b841 100644
--- a/lib/googlecloudsdk/api_lib/container/kubeconfig.py
+++ b/lib/googlecloudsdk/api_lib/container/kubeconfig.py
@@ -255,7 +255,7 @@ def _AuthProvider(name='gcp'):
raise Error(SDK_BIN_PATH_NOT_FOUND)
@@ -352,7 +352,7 @@ def _AuthProvider(name='gcp',
if sdk_bin_path is None:
log.error(SDK_BIN_PATH_NOT_FOUND)
raise Error(SDK_BIN_PATH_NOT_FOUND)
- cmd_path = os.path.join(sdk_bin_path, bin_name)
+ cmd_path = bin_name
cfg = {
# Command for gcloud credential helper
- 'cmd-path': os.path.join(sdk_bin_path, bin_name),
+ 'cmd-path': bin_name,
# Args for gcloud credential helper
'cmd-args': 'config config-helper --format=json',
# JSONpath to the field that is the raw access token
--
2.21.0

View File

@ -2,7 +2,7 @@ diff --git a/platform/gsutil/gslib/command_runner.py b/platform/gsutil/gslib/com
index f490bb3..dc6bbcc 100644
--- a/platform/gsutil/gslib/command_runner.py
+++ b/platform/gsutil/gslib/command_runner.py
@@ -330,17 +330,6 @@ class CommandRunner(object):
@@ -330,18 +330,6 @@ class CommandRunner(object):
Returns:
Return value(s) from Command that was run.
"""
@ -17,10 +17,11 @@ index f490bb3..dc6bbcc 100644
- if system_util.IsRunningInteractively() and collect_analytics:
- metrics.CheckAndMaybePromptForAnalyticsEnabling()
-
- self.MaybePromptForPythonUpdate(command_name)
if not args:
args = []
@@ -413,18 +402,10 @@ class CommandRunner(object):
@@ -415,15 +403,6 @@ class CommandRunner(object):
ShutDownGsutil()
if GetFailureCount() > 0:
return_code = 1
@ -35,6 +36,10 @@ index f490bb3..dc6bbcc 100644
- )))
return return_code
def SkipUpdateCheck(self):
@@ -467,6 +446,7 @@ class CommandRunner(object):
return True
def MaybeCheckForAndOfferSoftwareUpdate(self, command_name, debug):
+ return False
"""Checks the last time we checked for an update and offers one if needed.

View File

@ -6,11 +6,11 @@ let
in
stdenv.mkDerivation rec {
pname = "bandwidth";
version = "1.10.1";
version = "1.10.4";
src = fetchurl {
url = "https://zsmith.co/archives/${pname}-${version}.tar.gz";
sha256 = "sha256-trya+/cBNIittQAc5tcykZbImeISqIolO/Y8uOI0jGk=";
sha256 = "sha256-e/eP2rA7ElFrh2Z4qTzRGR/cxY1UI6s+LQ9Og1x46/I=";
};
postPatch = ''

View File

@ -0,0 +1,40 @@
{ lib, buildGoModule, fetchFromGitHub, installShellFiles }:
buildGoModule rec {
pname = "steampipe";
version = "0.8.5";
src = fetchFromGitHub {
owner = "turbot";
repo = "steampipe";
rev = "v${version}";
sha256 = "sha256-3vetSUJwCeaBzKj+635siskfcDPs/kkgCH954cg/REA=";
};
vendorSha256 = "sha256-TGDFNHWWbEy1cD7b2yPqAN7rRrLvL0ZX/R3BWGRWjjw=";
# tests are failing for no obvious reasons
doCheck = false;
nativeBuildInputs = [ installShellFiles ];
ldflags = [
"-s"
"-w"
];
postInstall = ''
INSTALL_DIR=$(mktemp -d)
installShellCompletion --cmd steampipe \
--bash <($out/bin/steampipe --install-dir $INSTALL_DIR completion bash) \
--fish <($out/bin/steampipe --install-dir $INSTALL_DIR completion fish) \
--zsh <($out/bin/steampipe --install-dir $INSTALL_DIR completion zsh)
'';
meta = with lib; {
homepage = "https://steampipe.io/";
description = "select * from cloud;";
license = licenses.agpl3;
maintainers = with maintainers; [ hardselius ];
};
}

View File

@ -9,16 +9,16 @@
rustPlatform.buildRustPackage rec {
pname = "synth";
version = "0.5.6";
version = "0.6.0";
src = fetchFromGitHub {
owner = "getsynth";
repo = pname;
rev = "v${version}";
sha256 = "06kgzaja04553gaxrfz6d1rqi3xwa6ijl0q6425fg0mqq9ifv7xk";
sha256 = "sha256-i5X2HUOCgY2znH4rDzhFpsPXsFeM7GR4soAO/rFDjjo=";
};
cargoSha256 = "sha256-bjda4uE5K+cJkS2TsTv7FN3H6q3cElRr674FTKaIexA=";
cargoSha256 = "sha256-47i46Y6JjTGWC7mfMd2x2k8v0SY1o2UHdEU4rF0VrsY=";
nativeBuildInputs = [ pkg-config ];

View File

@ -2,16 +2,16 @@
rustPlatform.buildRustPackage rec {
pname = "tidy-viewer";
version = "0.0.21";
version = "0.0.22";
src = fetchFromGitHub {
owner = "alexhallam";
repo = "tv";
rev = version;
sha256 = "1zjnc2b9y2f6x114svp31r1lzkak4xfn71qrxch30mq2aj4yzd2l";
sha256 = "sha256-OuNpp9EKiU+fQSZZO7zpI36nCiZBXK6OznJGY4KMftU=";
};
cargoSha256 = "sha256-M6HInLevKvF4zBNe+Sg8fQK6koefRaO0l5AcrFhH+vI=";
cargoSha256 = "sha256-LRoNGDOrbCjmZJRe+EUKxkDJjC92bQRRwP48jqDp9PU=";
# this test parses command line arguments
# error: Found argument '--test-threads' which wasn't expected, or isn't valid in this context

View File

@ -1,36 +1,38 @@
{ lib, nimPackages, fetchFromGitLab
{ lib
, nimPackages
, fetchFromGitLab
, enableShells ? [ "bash" "zsh" "fish" "sh" "posh" ]
}:
nimPackages.buildNimPackage rec{
nimPackages.buildNimPackage {
name = "swaycwd";
version = "0.0.1";
name = "swaycwd";
version = "0.0.2";
src = fetchFromGitLab {
owner = "cab404";
repo = "swaycwd";
rev = "aca81695ec2102b9bca6f5bae364f69a8b9d399f";
hash = "sha256-MkyY3wWByQo0l0J28xKDfGtxfazVPRyZHCObl9Fszh4=";
};
src = fetchFromGitLab {
owner = "cab404";
repo = name;
rev = "v${version}";
hash = "sha256-OZWOPtOqcX+fVQCxWntrn98EzFu70WH55rfYCPDMSKk=";
};
preConfigure = ''
{
echo 'let enabledShells: seq[string] = @${builtins.toJSON enableShells}'
echo 'export enabledShells'
} > shells.nim
cat << EOF > swaycwd.nimble
srcDir = "."
bin = "swaycwd"
EOF
'';
preConfigure = ''
{
echo 'let enabledShells: seq[string] = @${builtins.toJSON enableShells}'
echo 'export enabledShells'
} > shells.nim
cat << EOF > swaycwd.nimble
srcDir = "."
bin = "swaycwd"
EOF
'';
nimFlags = [ "--opt:speed" ];
nimFlags = [ "--opt:speed" ];
meta = with lib; {
homepage = "https://gitlab.com/cab404/swaycwd";
description = "Returns cwd for shell in currently focused sway window, or home directory if cannot find shell";
maintainers = with maintainers; [ cab404 ];
platforms = platforms.linux;
license = licenses.gpl3Only;
};
meta = with lib; {
homepage = "https://gitlab.com/cab404/swaycwd";
description = "Returns cwd for shell in currently focused sway window, or home directory if cannot find shell";
maintainers = with maintainers; [ cab404 ];
platforms = platforms.linux;
license = licenses.gpl3Only;
};
}

View File

@ -548,6 +548,7 @@ mapAliases ({
mopidy-local-sqlite = throw "mopidy-local-sqlite has been removed as it's unmaintained. Its functionality has been merged into the mopidy-local extension."; # added 2020-10-18
mysql-client = hiPrio mariadb.client;
memtest86 = memtest86plus; # added 2019-05-08
mercurial_4 = throw "mercurial_4 has been removed as it's unmaintained"; # added 2021-10-18
mesa_noglu = mesa; # added 2019-05-28
# NOTE: 2018-07-12: legacy alias:
# grsecurity business is done: https://www.theregister.co.uk/2018/02/08/bruce_perens_grsecurity_anti_slapp/

View File

@ -2751,6 +2751,8 @@ with pkgs;
eggdrop = callPackage ../tools/networking/eggdrop { };
ekam = callPackage ../development/tools/build-managers/ekam { };
eksctl = callPackage ../tools/admin/eksctl { };
electronplayer = callPackage ../applications/video/electronplayer/electronplayer.nix { };
@ -3452,6 +3454,8 @@ with pkgs;
statserial = callPackage ../tools/misc/statserial { };
steampipe = callPackage ../tools/misc/steampipe { };
step-ca = callPackage ../tools/security/step-ca {
inherit (darwin.apple_sdk.frameworks) PCSC;
};
@ -5467,7 +5471,7 @@ with pkgs;
git-fame = callPackage ../applications/version-management/git-and-tools/git-fame {};
git-fast-export = callPackage ../applications/version-management/git-and-tools/fast-export { mercurial = mercurial_4; };
git-fast-export = callPackage ../applications/version-management/git-and-tools/fast-export { };
git-filter-repo = callPackage ../applications/version-management/git-and-tools/git-filter-repo {
pythonPackages = python3Packages;
@ -7655,7 +7659,7 @@ with pkgs;
netkittftp = callPackage ../tools/networking/netkit/tftp { };
netlify-cli = nodePackages.netlify-cli;
netlify-cli = callPackage ../development/web/netlify-cli { };
netpbm = callPackage ../tools/graphics/netpbm { };
@ -8996,6 +9000,8 @@ with pkgs;
runelite = callPackage ../games/runelite { };
runescape = callPackage ../games/runescape-launcher { };
runningx = callPackage ../tools/X11/runningx { };
rund = callPackage ../development/tools/rund { };
@ -10311,6 +10317,7 @@ with pkgs;
wasm-bindgen-cli = callPackage ../development/tools/wasm-bindgen-cli {
inherit (darwin.apple_sdk.frameworks) Security;
nodejs = nodejs_latest;
};
welkin = callPackage ../tools/graphics/welkin {};
@ -23405,6 +23412,8 @@ with pkgs;
ucs-fonts = callPackage ../data/fonts/ucs-fonts
{ inherit (buildPackages.xorg) fonttosfnt mkfontscale; };
bront_fonts = callPackage ../data/fonts/bront { };
ultimate-oldschool-pc-font-pack = callPackage ../data/fonts/ultimate-oldschool-pc-font-pack { };
ultralist = callPackage ../applications/misc/ultralist { };
@ -26255,9 +26264,6 @@ with pkgs;
inherit (xorg) libX11 libXrandr;
};
mercurial_4 = callPackage ../applications/version-management/mercurial/4.9.nix {
inherit (darwin.apple_sdk.frameworks) ApplicationServices;
};
mercurial = callPackage ../applications/version-management/mercurial {
inherit (darwin.apple_sdk.frameworks) ApplicationServices;
};

View File

@ -1408,6 +1408,8 @@ let
yaml = callPackage ../development/ocaml-modules/yaml { };
yaml-sexp = callPackage ../development/ocaml-modules/yaml/yaml-sexp.nix { };
yojson = callPackage ../development/ocaml-modules/yojson { };
z3 = callPackage ../development/ocaml-modules/z3 {

View File

@ -5872,6 +5872,8 @@ in {
prison = callPackage ../development/python-modules/prison { };
privacyidea-ldap-proxy = callPackage ../development/python-modules/privacyidea-ldap-proxy { };
pyjwt1 = callPackage ../development/python-modules/pyjwt/1.nix { };
proboscis = callPackage ../development/python-modules/proboscis { };

View File

@ -407,8 +407,6 @@ with self; with super; {
prettytable = callPackage ../development/python-modules/prettytable/1.nix { };
privacyidea-ldap-proxy = callPackage ../development/python-modules/privacyidea/ldap-proxy.nix { };
ldaptor = callPackage ../development/python-modules/ldaptor/19.nix { };
progressbar231 = callPackage ../development/python-modules/progressbar231 { };