Merge master into staging-next

This commit is contained in:
github-actions[bot] 2024-10-17 00:14:14 +00:00 committed by GitHub
commit 2d65a9d98d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
114 changed files with 9050 additions and 1662 deletions

View File

@ -99,7 +99,16 @@ class Driver:
with self.logger.nested("cleanup"):
self.race_timer.cancel()
for machine in self.machines:
machine.release()
try:
machine.release()
except Exception as e:
self.logger.error(f"Error during cleanup of {machine.name}: {e}")
for vlan in self.vlans:
try:
vlan.stop()
except Exception as e:
self.logger.error(f"Error during cleanup of vlan{vlan.nr}: {e}")
def subtest(self, name: str) -> Iterator[None]:
"""Group logs under a given test name"""

View File

@ -1234,6 +1234,9 @@ class Machine:
self.monitor.close()
self.serial_thread.join()
if self.qmp_client:
self.qmp_client.close()
def run_callbacks(self) -> None:
for callback in self.callbacks:
callback()

View File

@ -49,7 +49,7 @@ class QMPSession:
sock.connect(str(path))
return cls(sock)
def __del__(self) -> None:
def close(self) -> None:
self.sock.close()
def _wait_for_new_result(self) -> dict[str, str]:

View File

@ -59,7 +59,7 @@ class VLan:
self.logger.info(f"running vlan (pid {self.pid}; ctl {self.socket_dir})")
def __del__(self) -> None:
def stop(self) -> None:
self.logger.info(f"kill vlan (pid {self.pid})")
self.fd.close()
self.process.terminate()

View File

@ -16,7 +16,7 @@ in
config = lib.mkIf cfg.enable {
services.udev.packages = [
pkgs.steamPackages.steam
pkgs.steam-unwrapped
];
# The uinput module needs to be loaded in order to trigger the udev rules

View File

@ -1,7 +1,4 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.mysql;
@ -9,7 +6,7 @@ let
isMariaDB = lib.getName cfg.package == lib.getName pkgs.mariadb;
isOracle = lib.getName cfg.package == lib.getName pkgs.mysql80;
# Oracle MySQL has supported "notify" service type since 8.0
hasNotify = isMariaDB || (isOracle && versionAtLeast cfg.package.version "8.0");
hasNotify = isMariaDB || (isOracle && lib.versionAtLeast cfg.package.version "8.0");
mysqldOptions =
"--user=${cfg.user} --datadir=${cfg.dataDir} --basedir=${cfg.package}";
@ -21,11 +18,11 @@ in
{
imports = [
(mkRemovedOptionModule [ "services" "mysql" "pidDir" ] "Don't wait for pidfiles, describe dependencies through systemd.")
(mkRemovedOptionModule [ "services" "mysql" "rootPassword" ] "Use socket authentication or set the password outside of the nix store.")
(mkRemovedOptionModule [ "services" "mysql" "extraOptions" ] "Use services.mysql.settings.mysqld instead.")
(mkRemovedOptionModule [ "services" "mysql" "bind" ] "Use services.mysql.settings.mysqld.bind-address instead.")
(mkRemovedOptionModule [ "services" "mysql" "port" ] "Use services.mysql.settings.mysqld.port instead.")
(lib.mkRemovedOptionModule [ "services" "mysql" "pidDir" ] "Don't wait for pidfiles, describe dependencies through systemd.")
(lib.mkRemovedOptionModule [ "services" "mysql" "rootPassword" ] "Use socket authentication or set the password outside of the nix store.")
(lib.mkRemovedOptionModule [ "services" "mysql" "extraOptions" ] "Use services.mysql.settings.mysqld instead.")
(lib.mkRemovedOptionModule [ "services" "mysql" "bind" ] "Use services.mysql.settings.mysqld.bind-address instead.")
(lib.mkRemovedOptionModule [ "services" "mysql" "port" ] "Use services.mysql.settings.mysqld.port instead.")
];
###### interface
@ -34,18 +31,18 @@ in
services.mysql = {
enable = mkEnableOption "MySQL server";
enable = lib.mkEnableOption "MySQL server";
package = mkOption {
type = types.package;
example = literalExpression "pkgs.mariadb";
package = lib.mkOption {
type = lib.types.package;
example = lib.literalExpression "pkgs.mariadb";
description = ''
Which MySQL derivation to use. MariaDB packages are supported too.
'';
};
user = mkOption {
type = types.str;
user = lib.mkOption {
type = lib.types.str;
default = "mysql";
description = ''
User account under which MySQL runs.
@ -58,8 +55,8 @@ in
'';
};
group = mkOption {
type = types.str;
group = lib.mkOption {
type = lib.types.str;
default = "mysql";
description = ''
Group account under which MySQL runs.
@ -72,8 +69,8 @@ in
'';
};
dataDir = mkOption {
type = types.path;
dataDir = lib.mkOption {
type = lib.types.path;
example = "/var/lib/mysql";
description = ''
The data directory for MySQL.
@ -85,8 +82,8 @@ in
'';
};
configFile = mkOption {
type = types.path;
configFile = lib.mkOption {
type = lib.types.path;
default = configFile;
defaultText = ''
A configuration file automatically generated by NixOS.
@ -95,7 +92,7 @@ in
Override the configuration file used by MySQL. By default,
NixOS generates one automatically from {option}`services.mysql.settings`.
'';
example = literalExpression ''
example = lib.literalExpression ''
pkgs.writeText "my.cnf" '''
[mysqld]
datadir = /var/lib/mysql
@ -107,7 +104,7 @@ in
'';
};
settings = mkOption {
settings = lib.mkOption {
type = format.type;
default = {};
description = ''
@ -123,7 +120,7 @@ in
`1`, or `0`. See the provided example below.
:::
'';
example = literalExpression ''
example = lib.literalExpression ''
{
mysqld = {
key_buffer_size = "6G";
@ -139,17 +136,17 @@ in
'';
};
initialDatabases = mkOption {
type = types.listOf (types.submodule {
initialDatabases = lib.mkOption {
type = lib.types.listOf (lib.types.submodule {
options = {
name = mkOption {
type = types.str;
name = lib.mkOption {
type = lib.types.str;
description = ''
The name of the database to create.
'';
};
schema = mkOption {
type = types.nullOr types.path;
schema = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
description = ''
The initial schema of the database; if null (the default),
@ -163,7 +160,7 @@ in
List of database names and their initial schemas that should be used to create databases on the first startup
of MySQL. The schema attribute is optional: If not specified, an empty database is created.
'';
example = literalExpression ''
example = lib.literalExpression ''
[
{ name = "foodatabase"; schema = ./foodatabase.sql; }
{ name = "bardatabase"; }
@ -171,14 +168,14 @@ in
'';
};
initialScript = mkOption {
type = types.nullOr types.path;
initialScript = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
description = "A file containing SQL statements to be executed on the first startup. Can be used for granting certain permissions on the database.";
};
ensureDatabases = mkOption {
type = types.listOf types.str;
ensureDatabases = lib.mkOption {
type = lib.types.listOf lib.types.str;
default = [];
description = ''
Ensures that the specified databases exist.
@ -192,17 +189,17 @@ in
];
};
ensureUsers = mkOption {
type = types.listOf (types.submodule {
ensureUsers = lib.mkOption {
type = lib.types.listOf (lib.types.submodule {
options = {
name = mkOption {
type = types.str;
name = lib.mkOption {
type = lib.types.str;
description = ''
Name of the user to ensure.
'';
};
ensurePermissions = mkOption {
type = types.attrsOf types.str;
ensurePermissions = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = {};
description = ''
Permissions to ensure for the user, specified as attribute set.
@ -216,7 +213,7 @@ in
[GRANT syntax](https://mariadb.com/kb/en/library/grant/).
The attributes are used as `GRANT ''${attrName} ON ''${attrValue}`.
'';
example = literalExpression ''
example = lib.literalExpression ''
{
"database.*" = "ALL PRIVILEGES";
"*.*" = "SELECT, LOCK TABLES";
@ -234,7 +231,7 @@ in
option is changed. This means that users created and permissions assigned once through this option or
otherwise have to be removed manually.
'';
example = literalExpression ''
example = lib.literalExpression ''
[
{
name = "nextcloud";
@ -253,40 +250,40 @@ in
};
replication = {
role = mkOption {
type = types.enum [ "master" "slave" "none" ];
role = lib.mkOption {
type = lib.types.enum [ "master" "slave" "none" ];
default = "none";
description = "Role of the MySQL server instance.";
};
serverId = mkOption {
type = types.int;
serverId = lib.mkOption {
type = lib.types.int;
default = 1;
description = "Id of the MySQL server instance. This number must be unique for each instance.";
};
masterHost = mkOption {
type = types.str;
masterHost = lib.mkOption {
type = lib.types.str;
description = "Hostname of the MySQL master server.";
};
slaveHost = mkOption {
type = types.str;
slaveHost = lib.mkOption {
type = lib.types.str;
description = "Hostname of the MySQL slave server.";
};
masterUser = mkOption {
type = types.str;
masterUser = lib.mkOption {
type = lib.types.str;
description = "Username of the MySQL replication user.";
};
masterPassword = mkOption {
type = types.str;
masterPassword = lib.mkOption {
type = lib.types.str;
description = "Password of the MySQL replication user.";
};
masterPort = mkOption {
type = types.port;
masterPort = lib.mkOption {
type = lib.types.port;
default = 3306;
description = "Port number on which the MySQL master server runs.";
};
@ -298,30 +295,30 @@ in
###### implementation
config = mkIf cfg.enable {
config = lib.mkIf cfg.enable {
services.mysql.dataDir =
mkDefault (if versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql"
lib.mkDefault (if lib.versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql"
else "/var/mysql");
services.mysql.settings.mysqld = mkMerge [
services.mysql.settings.mysqld = lib.mkMerge [
{
datadir = cfg.dataDir;
port = mkDefault 3306;
port = lib.mkDefault 3306;
}
(mkIf (cfg.replication.role == "master" || cfg.replication.role == "slave") {
(lib.mkIf (cfg.replication.role == "master" || cfg.replication.role == "slave") {
log-bin = "mysql-bin-${toString cfg.replication.serverId}";
log-bin-index = "mysql-bin-${toString cfg.replication.serverId}.index";
relay-log = "mysql-relay-bin";
server-id = cfg.replication.serverId;
binlog-ignore-db = [ "information_schema" "performance_schema" "mysql" ];
})
(mkIf (!isMariaDB) {
(lib.mkIf (!isMariaDB) {
plugin-load-add = "auth_socket.so";
})
];
users.users = optionalAttrs (cfg.user == "mysql") {
users.users = lib.optionalAttrs (cfg.user == "mysql") {
mysql = {
description = "MySQL server user";
group = cfg.group;
@ -329,7 +326,7 @@ in
};
};
users.groups = optionalAttrs (cfg.group == "mysql") {
users.groups = lib.optionalAttrs (cfg.group == "mysql") {
mysql.gid = config.ids.gids.mysql;
};
@ -380,7 +377,7 @@ in
# The super user account to use on *first* run of MySQL server
superUser = if isMariaDB then cfg.user else "root";
in ''
${optionalString (!hasNotify) ''
${lib.optionalString (!hasNotify) ''
# Wait until the MySQL server is available for use
while [ ! -e /run/mysqld/mysqld.sock ]
do
@ -397,13 +394,13 @@ in
echo "GRANT ALL PRIVILEGES ON *.* TO '${cfg.user}'@'localhost' WITH GRANT OPTION;"
) | ${cfg.package}/bin/mysql -u ${superUser} -N
${concatMapStrings (database: ''
${lib.concatMapStrings (database: ''
# Create initial databases
if ! test -e "${cfg.dataDir}/${database.name}"; then
echo "Creating initial database: ${database.name}"
( echo 'create database `${database.name}`;'
${optionalString (database.schema != null) ''
${lib.optionalString (database.schema != null) ''
echo 'use `${database.name}`;'
# TODO: this silently falls through if database.schema does not exist,
@ -420,7 +417,7 @@ in
fi
'') cfg.initialDatabases}
${optionalString (cfg.replication.role == "master")
${lib.optionalString (cfg.replication.role == "master")
''
# Set up the replication master
@ -431,7 +428,7 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N
''}
${optionalString (cfg.replication.role == "slave")
${lib.optionalString (cfg.replication.role == "slave")
''
# Set up the replication slave
@ -441,7 +438,7 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N
''}
${optionalString (cfg.initialScript != null)
${lib.optionalString (cfg.initialScript != null)
''
# Execute initial script
# using toString to avoid copying the file to nix store if given as path instead of string,
@ -452,25 +449,25 @@ in
rm ${cfg.dataDir}/mysql_init
fi
${optionalString (cfg.ensureDatabases != []) ''
${lib.optionalString (cfg.ensureDatabases != []) ''
(
${concatMapStrings (database: ''
${lib.concatMapStrings (database: ''
echo "CREATE DATABASE IF NOT EXISTS \`${database}\`;"
'') cfg.ensureDatabases}
) | ${cfg.package}/bin/mysql -N
''}
${concatMapStrings (user:
${lib.concatMapStrings (user:
''
( echo "CREATE USER IF NOT EXISTS '${user.name}'@'localhost' IDENTIFIED WITH ${if isMariaDB then "unix_socket" else "auth_socket"};"
${concatStringsSep "\n" (mapAttrsToList (database: permission: ''
${lib.concatStringsSep "\n" (lib.mapAttrsToList (database: permission: ''
echo "GRANT ${permission} ON ${database} TO '${user.name}'@'localhost';"
'') user.ensurePermissions)}
) | ${cfg.package}/bin/mysql -N
'') cfg.ensureUsers}
'';
serviceConfig = mkMerge [
serviceConfig = lib.mkMerge [
{
Type = if hasNotify then "notify" else "simple";
Restart = "on-abort";
@ -506,7 +503,7 @@ in
# System Call Filtering
SystemCallArchitectures = "native";
}
(mkIf (cfg.dataDir == "/var/lib/mysql") {
(lib.mkIf (cfg.dataDir == "/var/lib/mysql") {
StateDirectory = "mysql";
StateDirectoryMode = "0700";
})

View File

@ -91,7 +91,7 @@ in
};
port = mkOption {
type = types.port;
default = 3001;
default = 2283;
description = "The port that immich will listen on.";
};
openFirewall = mkOption {

View File

@ -20,6 +20,12 @@ let
''}
ln -s ${config.system.build.etc}/etc $out/etc
${lib.optionalString config.system.etc.overlay.enable ''
ln -s ${config.system.build.etcMetadataImage} $out/etc-metadata-image
ln -s ${config.system.build.etcBasedir} $out/etc-basedir
''}
ln -s ${config.system.path} $out/sw
ln -s "$systemd" $out/systemd

View File

@ -507,12 +507,20 @@ in {
in nameValuePair "${n}.automount" (automountToUnit v)) cfg.automounts);
services.initrd-nixos-activation = {
after = [ "initrd-fs.target" ];
services.initrd-find-nixos-closure = {
description = "Find NixOS closure";
unitConfig = {
RequiresMountsFor = "/sysroot/nix/store";
DefaultDependencies = false;
};
before = [ "shutdown.target" ];
conflicts = [ "shutdown.target" ];
requiredBy = [ "initrd.target" ];
unitConfig.AssertPathExists = "/etc/initrd-release";
serviceConfig.Type = "oneshot";
description = "NixOS Activation";
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
};
script = /* bash */ ''
set -uo pipefail
@ -542,6 +550,8 @@ in {
# Assume the directory containing the init script is the closure.
closure="$(dirname "$closure")"
ln --symbolic "$closure" /nixos-closure
# If we are not booting a NixOS closure (e.g. init=/bin/sh),
# we don't know what root to prepare so we don't do anything
if ! [ -x "/sysroot$(readlink "/sysroot$closure/prepare-root" || echo "$closure/prepare-root")" ]; then
@ -550,12 +560,48 @@ in {
exit 0
fi
echo 'NEW_INIT=' > /etc/switch-root.conf
'';
};
# We need to propagate /run for things like /run/booted-system
# and /run/current-system.
mounts = [
{
where = "/sysroot/run";
what = "/run";
options = "bind";
unitConfig = {
# See the comment on the mount unit for /run/etc-metadata
DefaultDependencies = false;
};
requiredBy = [ "initrd-fs.target" ];
before = [ "initrd-fs.target" ];
}
];
# We need to propagate /run for things like /run/booted-system
# and /run/current-system.
mkdir -p /sysroot/run
mount --bind /run /sysroot/run
services.initrd-nixos-activation = {
requires = [
config.boot.initrd.systemd.services.initrd-find-nixos-closure.name
];
after = [
"initrd-fs.target"
config.boot.initrd.systemd.services.initrd-find-nixos-closure.name
];
requiredBy = [ "initrd.target" ];
unitConfig = {
AssertPathExists = "/etc/initrd-release";
RequiresMountsFor = [
"/sysroot/run"
];
};
serviceConfig.Type = "oneshot";
description = "NixOS Activation";
script = /* bash */ ''
set -uo pipefail
export PATH="/bin:${cfg.package.util-linux}/bin"
closure="$(realpath /nixos-closure)"
# Initialize the system
export IN_NIXOS_SYSTEMD_STAGE1=true

View File

@ -1,4 +1,4 @@
{ config, lib, ... }:
{ config, lib, pkgs, ... }:
{
@ -34,12 +34,30 @@
mounts = [
{
where = "/run/etc-metadata";
what = "/sysroot${config.system.build.etcMetadataImage}";
what = "/etc-metadata-image";
type = "erofs";
options = "loop";
unitConfig.RequiresMountsFor = [
"/sysroot/nix/store"
unitConfig = {
# Since this unit depends on the nix store being mounted, it cannot
# be a dependency of local-fs.target, because if it did, we'd have
# local-fs.target ordered after the nix store mount which would cause
# things like network.target to only become active after the nix store
# has been mounted.
# This breaks for instance setups where sshd needs to be up before
# any encrypted disks can be mounted.
DefaultDependencies = false;
RequiresMountsFor = [
"/sysroot/nix/store"
];
};
requires = [
config.boot.initrd.systemd.services.initrd-find-etc.name
];
after = [
config.boot.initrd.systemd.services.initrd-find-etc.name
];
requiredBy = [ "initrd-fs.target" ];
before = [ "initrd-fs.target" ];
}
{
where = "/sysroot/etc";
@ -49,7 +67,7 @@
"relatime"
"redirect_dir=on"
"metacopy=on"
"lowerdir=/run/etc-metadata::/sysroot${config.system.build.etcBasedir}"
"lowerdir=/run/etc-metadata::/etc-basedir"
] ++ lib.optionals config.system.etc.overlay.mutable [
"rw"
"upperdir=/sysroot/.rw-etc/upper"
@ -59,28 +77,77 @@
]);
requiredBy = [ "initrd-fs.target" ];
before = [ "initrd-fs.target" ];
requires = lib.mkIf config.system.etc.overlay.mutable [ "rw-etc.service" ];
after = lib.mkIf config.system.etc.overlay.mutable [ "rw-etc.service" ];
unitConfig.RequiresMountsFor = [
"/sysroot/nix/store"
"/run/etc-metadata"
requires = [
config.boot.initrd.systemd.services.initrd-find-etc.name
] ++ lib.optionals config.system.etc.overlay.mutable [
config.boot.initrd.systemd.services."rw-etc".name
];
after = [
config.boot.initrd.systemd.services.initrd-find-etc.name
] ++ lib.optionals config.system.etc.overlay.mutable [
config.boot.initrd.systemd.services."rw-etc".name
];
unitConfig = {
RequiresMountsFor = [
"/sysroot/nix/store"
"/run/etc-metadata"
];
DefaultDependencies = false;
};
}
];
services = lib.mkIf config.system.etc.overlay.mutable {
rw-etc = {
unitConfig = {
DefaultDependencies = false;
RequiresMountsFor = "/sysroot";
services = lib.mkMerge [
(lib.mkIf config.system.etc.overlay.mutable {
rw-etc = {
requiredBy = [ "initrd-fs.target" ];
before = [ "initrd-fs.target" ];
unitConfig = {
DefaultDependencies = false;
RequiresMountsFor = "/sysroot";
};
serviceConfig = {
Type = "oneshot";
ExecStart = ''
/bin/mkdir -p -m 0755 /sysroot/.rw-etc/upper /sysroot/.rw-etc/work
'';
};
};
serviceConfig = {
Type = "oneshot";
ExecStart = ''
/bin/mkdir -p -m 0755 /sysroot/.rw-etc/upper /sysroot/.rw-etc/work
})
{
initrd-find-etc = {
description = "Find the path to the etc metadata image and based dir";
requires = [
config.boot.initrd.systemd.services.initrd-find-nixos-closure.name
];
after = [
config.boot.initrd.systemd.services.initrd-find-nixos-closure.name
];
before = [ "shutdown.target" ];
conflicts = [ "shutdown.target" ];
requiredBy = [ "initrd.target" ];
unitConfig = {
DefaultDependencies = false;
RequiresMountsFor = "/sysroot/nix/store";
};
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
};
script = /* bash */ ''
set -uo pipefail
closure="$(realpath /nixos-closure)"
metadata_image="$(chroot /sysroot ${lib.getExe' pkgs.coreutils "realpath"} "$closure/etc-metadata-image")"
ln -s "/sysroot$metadata_image" /etc-metadata-image
basedir="$(chroot /sysroot ${lib.getExe' pkgs.coreutils "realpath"} "$closure/etc-basedir")"
ln -s "/sysroot$basedir" /etc-basedir
'';
};
};
};
}
];
};
})

View File

@ -15,6 +15,10 @@
boot.kernelPackages = pkgs.linuxPackages_latest;
time.timeZone = "Utc";
# The standard resolvconf service tries to write to /etc and crashes,
# which makes nixos-rebuild exit uncleanly when switching into the new generation
services.resolved.enable = true;
environment.etc = {
"mountpoint/.keep".text = "keep";
"filemount".text = "keep";
@ -26,6 +30,13 @@
};
testScript = ''
with subtest("/run/etc-metadata/ is mounted"):
print(machine.succeed("mountpoint /run/etc-metadata"))
with subtest("No temporary files leaked into stage 2"):
machine.succeed("[ ! -e /etc-metadata-image ]")
machine.succeed("[ ! -e /etc-basedir ]")
with subtest("/etc is mounted as an overlay"):
machine.succeed("findmnt --kernel --type overlay /etc")
@ -50,6 +61,9 @@
with subtest("switching to the same generation"):
machine.succeed("/run/current-system/bin/switch-to-configuration test")
with subtest("the initrd didn't get rebuilt"):
machine.succeed("test /run/current-system/initrd -ef /run/current-system/specialisation/new-generation/initrd")
with subtest("switching to a new generation"):
machine.fail("stat /etc/newgen")

View File

@ -18,12 +18,22 @@
};
testScript = ''
with subtest("/run/etc-metadata/ is mounted"):
print(machine.succeed("mountpoint /run/etc-metadata"))
with subtest("No temporary files leaked into stage 2"):
machine.succeed("[ ! -e /etc-metadata-image ]")
machine.succeed("[ ! -e /etc-basedir ]")
with subtest("/etc is mounted as an overlay"):
machine.succeed("findmnt --kernel --type overlay /etc")
with subtest("switching to the same generation"):
machine.succeed("/run/current-system/bin/switch-to-configuration test")
with subtest("the initrd didn't get rebuilt"):
machine.succeed("test /run/current-system/initrd -ef /run/current-system/specialisation/new-generation/initrd")
with subtest("switching to a new generation"):
machine.fail("stat /etc/newgen")
machine.succeed("echo -n 'mutable' > /etc/mutable")

View File

@ -29,6 +29,8 @@ import ./make-test-python.nix ({ lib, pkgs, ... }: {
machine.succeed("[ -e /dev/shm ]") # /dev/shm
machine.succeed("[ -e /dev/pts/ptmx ]") # /dev/pts
machine.succeed("[ -e /run/keys ]") # /run/keys
# /nixos-closure didn't leak into stage-2
machine.succeed("[ ! -e /nixos-closure ]")
with subtest("groups work"):
machine.fail("journalctl -b 0 | grep 'systemd-udevd.*Unknown group.*ignoring'")

View File

@ -83,7 +83,7 @@ let
'';
};
applicablePostgresqlVersions = filterAttrs (_: value: versionAtLeast value.version "12") postgresql-versions;
applicablePostgresqlVersions = filterAttrs (_: value: versionAtLeast value.version "14") postgresql-versions;
in
mapAttrs'
(name: package: {

View File

@ -26,24 +26,24 @@ import ../make-test-python.nix (
machine.wait_for_unit("immich-server.service")
machine.wait_for_open_port(3001) # Server
machine.wait_for_open_port(2283) # Server
machine.wait_for_open_port(3003) # Machine learning
machine.succeed("curl --fail http://localhost:3001/")
machine.succeed("curl --fail http://localhost:2283/")
machine.succeed("""
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "name": "Admin", "password": "admin" }' -X POST http://localhost:3001/api/auth/admin-sign-up
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "name": "Admin", "password": "admin" }' -X POST http://localhost:2283/api/auth/admin-sign-up
""")
res = machine.succeed("""
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "password": "admin" }' -X POST http://localhost:3001/api/auth/login
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "password": "admin" }' -X POST http://localhost:2283/api/auth/login
""")
token = json.loads(res)['accessToken']
res = machine.succeed("""
curl -H 'Content-Type: application/json' -H 'Cookie: immich_access_token=%s' --data '{ "name": "API Key", "permissions": ["all"] }' -X POST http://localhost:3001/api/api-keys
curl -H 'Content-Type: application/json' -H 'Cookie: immich_access_token=%s' --data '{ "name": "API Key", "permissions": ["all"] }' -X POST http://localhost:2283/api/api-keys
""" % token)
key = json.loads(res)['secret']
machine.succeed(f"immich login http://localhost:3001/api {key}")
machine.succeed(f"immich login http://localhost:2283/api {key}")
res = machine.succeed("immich server-info")
print(res)
'';

View File

@ -1,35 +1,38 @@
{ lib, buildGoModule, fetchFromGitHub, installShellFiles }:
{
lib,
buildGoModule,
cilium-cli,
fetchFromGitHub,
installShellFiles,
testers,
}:
buildGoModule rec {
pname = "cilium-cli";
version = "0.16.15";
version = "0.16.19";
src = fetchFromGitHub {
owner = "cilium";
repo = pname;
rev = "v${version}";
hash = "sha256-5LqRHa0ytprwAAIl7iNZQ9zKnn5wNtFubQdvLuX9qGM=";
repo = "cilium-cli";
rev = "refs/tags/v${version}";
hash = "sha256-I5HC1H517oCizZf2mcHOKmgJqnvPjkNVfDy2/9Kkw44=";
};
nativeBuildInputs = [ installShellFiles ];
vendorHash = null;
subPackages = [ "cmd/cilium" ];
ldflags = [
"-s" "-w"
"-X github.com/cilium/cilium-cli/defaults.CLIVersion=${version}"
"-X=github.com/cilium/cilium-cli/defaults.CLIVersion=${version}"
];
# Required to workaround install check error:
# 2022/06/25 10:36:22 Unable to start gops: mkdir /homeless-shelter: permission denied
HOME = "$TMPDIR";
doInstallCheck = true;
installCheckPhase = ''
$out/bin/cilium version --client | grep ${version} > /dev/null
'';
nativeBuildInputs = [ installShellFiles ];
postInstall = ''
installShellCompletion --cmd cilium \
--bash <($out/bin/cilium completion bash) \
@ -37,11 +40,17 @@ buildGoModule rec {
--zsh <($out/bin/cilium completion zsh)
'';
passthru.tests.version = testers.testVersion {
package = cilium-cli;
command = "cilium version --client";
version = "${version}";
};
meta = {
changelog = "https://github.com/cilium/cilium-cli/releases/tag/v${version}";
description = "CLI to install, manage & troubleshoot Kubernetes clusters running Cilium";
license = lib.licenses.asl20;
homepage = "https://www.cilium.io/";
changelog = "https://github.com/cilium/cilium-cli/releases/tag/v${version}";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ bryanasdev000 humancalico qjoly ];
mainProgram = "cilium";
};

View File

@ -165,8 +165,8 @@ rec {
mkTerraform = attrs: pluggable (generic attrs);
terraform_1 = mkTerraform {
version = "1.9.7";
hash = "sha256-L0F0u96et18IlqAUsc0HK+cLeav2OqN4kxs58hPNMIM=";
version = "1.9.8";
hash = "sha256-0xBhOdaIbw1fLmbI4KDvQoHD4BmVZoiMT/zv9MnwuD4=";
vendorHash = "sha256-tH9KQF4oHcQh34ikB9Bx6fij/iLZN+waxv5ZilqGGlU=";
patches = [ ./provider-path-0_15.patch ];
passthru = {

View File

@ -1,138 +0,0 @@
{ lib
, fetchPypi
, python3
, stress
}:
python3.pkgs.buildPythonApplication rec {
pname = "snakemake";
version = "8.20.1";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-adNwIA1z/TwWsa0gQb4hAsUvHInjd30sm1dYKXvvXy8=";
};
postPatch = ''
patchShebangs --build tests/
substituteInPlace tests/common.py \
--replace-fail 'os.environ["PYTHONPATH"] = os.getcwd()' "pass" \
--replace-fail 'del os.environ["PYTHONPATH"]' "pass"
substituteInPlace snakemake/unit_tests/__init__.py \
--replace-fail '"unit_tests/templates"' '"'"$PWD"'/snakemake/unit_tests/templates"'
'';
propagatedBuildInputs = with python3.pkgs; [
appdirs
configargparse
connection-pool
datrie
docutils
gitpython
humanfriendly
immutables
jinja2
jsonschema
nbformat
psutil
pulp
pygments
pyyaml
requests
reretry
smart-open
snakemake-interface-executor-plugins
snakemake-interface-common
snakemake-interface-storage-plugins
snakemake-interface-report-plugins
stopit
tabulate
throttler
toposort
wrapt
yte
];
# See
# https://github.com/snakemake/snakemake/blob/main/.github/workflows/main.yml#L99
# for the current basic test suite. Slurm, Tibanna and Tes require extra
# setup.
nativeCheckInputs = with python3.pkgs; [
numpy
pandas
pytestCheckHook
pytest-mock
requests-mock
snakemake-executor-plugin-cluster-generic
snakemake-storage-plugin-fs
stress
];
pytestFlagsArray = [
"tests/tests.py"
"tests/test_expand.py"
"tests/test_io.py"
"tests/test_schema.py"
"tests/test_executor_test_suite.py"
"tests/test_api.py"
];
# Some will be disabled via https://github.com/snakemake/snakemake/pull/3074
disabledTests = [
# requires graphviz
"test_filegraph"
# requires s3
"test_storage"
"test_default_storage"
"test_output_file_cache_storage"
# requires peppy and eido
"test_pep"
"test_modules_peppy"
# requires perl
"test_shadow"
# requires snakemake-storage-plugin-http
"test_ancient"
"test_modules_prefix"
# requires snakemake-storage-plugin-s3
"test_deploy_sources"
# requires modules
"test_env_modules"
# issue with locating template file
"test_generate_unit_tests"
# weird
"test_strict_mode"
"test_issue1256"
"test_issue2574"
"test_github_issue1384"
# future-proofing
"conda"
"singularity"
"apptainer"
"container"
];
pythonImportsCheck = [
"snakemake"
];
preCheck = ''
export HOME="$(mktemp -d)"
'';
meta = with lib; {
homepage = "https://snakemake.github.io";
license = licenses.mit;
description = "Python-based execution environment for make-like workflows";
mainProgram = "snakemake";
longDescription = ''
Snakemake is a workflow management system that aims to reduce the complexity of
creating workflows by providing a fast and comfortable execution environment,
together with a clean and readable specification language in Python style. Snakemake
workflows are essentially Python scripts extended by declarative code to define
rules. Rules describe how to create output files from input files.
'';
maintainers = with maintainers; [ helkafen renatoGarcia veprbl ];
};
}

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "abcmidi";
version = "2024.08.13";
version = "2024.10.10";
src = fetchFromGitHub {
owner = "sshlien";
repo = "abcmidi";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-+X7ZPjZtqxEq2GSzdhLA48aqHfWFimST1GCfZ/NLjeU=";
hash = "sha256-dAxr1RJrYppt/Gw6ZF3fL0lDhwJNG5v75M6VA1okrtw=";
};
meta = {

View File

@ -10,13 +10,13 @@
stdenv.mkDerivation rec {
pname = "ayatana-ido";
version = "0.10.3";
version = "0.10.4";
src = fetchFromGitHub {
owner = "AyatanaIndicators";
repo = pname;
rev = version;
sha256 = "sha256-WEPW9BstDv2k/5dTEDQza3eOQ9bd6CEVvmd817sEPAs=";
sha256 = "sha256-KeErrT2umMaIVfLDr4CcQCmFrMb8/h6pNYbunuC/JtI=";
};
nativeBuildInputs = [

View File

@ -0,0 +1,38 @@
{
lib,
rustPlatform,
fetchFromGitHub,
pkg-config,
dbus,
}:
rustPlatform.buildRustPackage rec {
pname = "bluetui";
version = "0.5.1";
src = fetchFromGitHub {
owner = "pythops";
repo = "bluetui";
rev = "v${version}";
hash = "sha256-9svPIZzKuI4XBlxBsKucGLdX2dkfAy9ERT5oj8Su9TM=";
};
cargoHash = "sha256-w6rrZQNu5kLKEWSXFa/vSqwm76zWZug/ZqztMDY7buE=";
nativeBuildInputs = [
pkg-config
];
buildInputs = [
dbus
];
meta = {
description = "TUI for managing bluetooth on Linux";
homepage = "https://github.com/pythops/bluetui";
license = lib.licenses.gpl3Only;
maintainers = with lib.maintainers; [ donovanglover ];
mainProgram = "bluetui";
platforms = lib.platforms.linux;
};
}

View File

@ -13,16 +13,16 @@
stdenv.mkDerivation rec {
pname = "ccid";
version = "1.5.5";
version = "1.6.1";
src = fetchurl {
url = "https://ccid.apdu.fr/files/${pname}-${version}.tar.bz2";
hash = "sha256-GUcI91/jadRd18Feiz6Kfbi0nPxVV1dMoqLnbvEsoMo=";
url = "https://ccid.apdu.fr/files/${pname}-${version}.tar.xz";
hash = "sha256-LsqPsH6P58DTna6sp7l81zxA7Ztyc4okrT3L38kY4eo=";
};
postPatch = ''
patchShebangs .
substituteInPlace src/Makefile.in --replace-fail /bin/echo echo
substituteInPlace src/Makefile.am --replace-fail /bin/echo echo
'';
configureFlags = [

View File

@ -20,18 +20,18 @@
rustPlatform.buildRustPackage rec {
pname = "cinny-desktop";
# We have to be using the same version as cinny-web or this isn't going to work.
version = "4.2.1";
version = "4.2.2";
src = fetchFromGitHub {
owner = "cinnyapp";
repo = "cinny-desktop";
rev = "refs/tags/v${version}";
hash = "sha256-W73ma8ScF3LGv45yhZCV80zhh7URLuWhbi+JumyTp+4=";
hash = "sha256-W8WSnfUqWTtyb6x0Kmej5sAxsi1Kh/uDkIx6SZhgSvw=";
};
sourceRoot = "${src.name}/src-tauri";
cargoHash = "sha256-ved2W4+Dt7pN9j9vIaDlAkaY517nBEgPKgu8ArcHXsM=";
cargoHash = "sha256-rg4NdxyJfnEPmFjb2wKJcF7ga7t5WNX/LB0haOvGbXU=";
postPatch =
let

View File

@ -14,16 +14,16 @@
buildNpmPackage rec {
pname = "cinny-unwrapped";
version = "4.2.1";
version = "4.2.2";
src = fetchFromGitHub {
owner = "cinnyapp";
repo = "cinny";
rev = "v${version}";
hash = "sha256-+sJQosQMji2iLGgOMRykSJm0zIhghsOsROJZvTQk2zQ=";
hash = "sha256-S8vOydjQLL2JK5g8B/PBaDRd+Er3JEKrsYSkDrOdi2k=";
};
npmDepsHash = "sha256-VSTpe1CA6lv5MoqXyk1iZSwzRc6Axy5cM8PmqPOyheA=";
npmDepsHash = "sha256-W3XXrhg7BblS0w4jI6oQDNggt7G56AzHQKC9tD0TrvU=";
# Fix error: no member named 'aligned_alloc' in the global namespace
env.NIX_CFLAGS_COMPILE = lib.optionalString (

View File

@ -539,16 +539,16 @@ dependencies = [
[[package]]
name = "calloop"
version = "0.14.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c58a38167d6fba8c67cce63c4a91f2a73ca42cbdaf6fb9ba164f1e07b43ecc10"
checksum = "a1ead1e1514bce44c0f40e027899fbc595907fc112635bed21b3b5d975c0a5e7"
dependencies = [
"async-task",
"bitflags 2.6.0",
"log",
"polling",
"rustix",
"slab",
"tracing",
]
[[package]]
@ -823,7 +823,7 @@ dependencies = [
"anyhow",
"bitflags 2.6.0",
"bytemuck",
"calloop 0.14.0",
"calloop 0.14.1",
"cosmic-comp-config",
"cosmic-config",
"cosmic-protocols",
@ -847,6 +847,7 @@ dependencies = [
"ordered-float",
"png",
"profiling",
"rand",
"regex",
"ron",
"rust-embed",
@ -885,10 +886,10 @@ dependencies = [
[[package]]
name = "cosmic-config"
version = "0.1.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#af68a3f660402b850dfd00041372d964d3b098d7"
dependencies = [
"atomicwrites",
"calloop 0.14.0",
"calloop 0.14.1",
"cosmic-config-derive",
"dirs",
"iced_futures",
@ -904,7 +905,7 @@ dependencies = [
[[package]]
name = "cosmic-config-derive"
version = "0.1.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#af68a3f660402b850dfd00041372d964d3b098d7"
dependencies = [
"quote",
"syn 1.0.109",
@ -913,7 +914,7 @@ dependencies = [
[[package]]
name = "cosmic-protocols"
version = "0.1.0"
source = "git+https://github.com/pop-os/cosmic-protocols?branch=main#de2fead49d6af3a221db153642e4d7c2235aafc4"
source = "git+https://github.com/pop-os/cosmic-protocols?branch=main#91aeb55052a8e6e15a7ddd53e039a9350f16fa69"
dependencies = [
"bitflags 2.6.0",
"wayland-backend",
@ -926,7 +927,7 @@ dependencies = [
[[package]]
name = "cosmic-settings-config"
version = "0.1.0"
source = "git+https://github.com/pop-os/cosmic-settings-daemon#362c77f9faaeb7f1b9e4aa79a7d5588001f04874"
source = "git+https://github.com/pop-os/cosmic-settings-daemon#1ed68808e85ce681da882446ec572d44c68a6866"
dependencies = [
"cosmic-config",
"serde",
@ -939,7 +940,7 @@ dependencies = [
[[package]]
name = "cosmic-text"
version = "0.12.1"
source = "git+https://github.com/pop-os/cosmic-text.git#e16b39f29c84773a05457fe39577a602de27855c"
source = "git+https://github.com/pop-os/cosmic-text.git#e8f567cf5b456dfab749a575c257acaa36f622d9"
dependencies = [
"bitflags 2.6.0",
"fontdb",
@ -949,6 +950,7 @@ dependencies = [
"rustc-hash",
"rustybuzz 0.14.1",
"self_cell 1.0.4",
"smol_str",
"swash",
"sys-locale",
"ttf-parser 0.21.1",
@ -961,7 +963,7 @@ dependencies = [
[[package]]
name = "cosmic-theme"
version = "0.1.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#af68a3f660402b850dfd00041372d964d3b098d7"
dependencies = [
"almost",
"cosmic-config",
@ -1077,7 +1079,7 @@ version = "0.19.0"
source = "git+https://github.com/gfx-rs/wgpu?rev=20fda69#20fda698341efbdc870b8027d6d49f5bf3f36109"
dependencies = [
"bitflags 2.6.0",
"libloading 0.8.5",
"libloading 0.7.4",
"winapi",
]
@ -1218,7 +1220,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412"
dependencies = [
"libloading 0.8.5",
"libloading 0.7.4",
]
[[package]]
@ -2155,7 +2157,7 @@ dependencies = [
"bitflags 2.6.0",
"com",
"libc",
"libloading 0.8.5",
"libloading 0.7.4",
"thiserror",
"widestring",
"winapi",
@ -2305,7 +2307,7 @@ dependencies = [
[[package]]
name = "iced"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"dnd",
"iced_core",
@ -2321,7 +2323,7 @@ dependencies = [
[[package]]
name = "iced_core"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"bitflags 2.6.0",
"dnd",
@ -2341,7 +2343,7 @@ dependencies = [
[[package]]
name = "iced_futures"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"futures",
"iced_core",
@ -2353,7 +2355,7 @@ dependencies = [
[[package]]
name = "iced_graphics"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"bitflags 2.6.0",
"bytemuck",
@ -2377,7 +2379,7 @@ dependencies = [
[[package]]
name = "iced_renderer"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"iced_graphics",
"iced_tiny_skia",
@ -2389,7 +2391,7 @@ dependencies = [
[[package]]
name = "iced_runtime"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"dnd",
"iced_core",
@ -2401,7 +2403,7 @@ dependencies = [
[[package]]
name = "iced_style"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"iced_core",
"once_cell",
@ -2411,7 +2413,7 @@ dependencies = [
[[package]]
name = "iced_tiny_skia"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"bytemuck",
"cosmic-text",
@ -2428,7 +2430,7 @@ dependencies = [
[[package]]
name = "iced_wgpu"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"as-raw-xcb-connection",
"bitflags 2.6.0",
@ -2457,7 +2459,7 @@ dependencies = [
[[package]]
name = "iced_widget"
version = "0.12.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#a962865230f3b9ecba40c0c09e9c279e832c9f10"
dependencies = [
"dnd",
"iced_renderer",
@ -2790,7 +2792,7 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]]
name = "libcosmic"
version = "0.1.0"
source = "git+https://github.com/pop-os/libcosmic/#b40839638ab0e1d96de3f817eded647e6952db40"
source = "git+https://github.com/pop-os/libcosmic/#af68a3f660402b850dfd00041372d964d3b098d7"
dependencies = [
"apply",
"chrono",
@ -2836,7 +2838,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@ -4086,9 +4088,9 @@ dependencies = [
[[package]]
name = "quick-xml"
version = "0.34.0"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f24d770aeca0eacb81ac29dfbc55ebcc09312fdd1f8bbecdc7e4a84e000e3b4"
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
dependencies = [
"memchr",
]
@ -4480,12 +4482,6 @@ dependencies = [
"regex",
]
[[package]]
name = "scan_fmt"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b53b0a5db882a8e2fdaae0a43f7b39e7e9082389e978398bdf223a55b581248"
[[package]]
name = "scoped-tls"
version = "1.0.1"
@ -4703,12 +4699,12 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "smithay"
version = "0.3.0"
source = "git+https://github.com/smithay//smithay?rev=e7f0857#e7f08570bceab6107863267ae168d0afb018e8f5"
source = "git+https://github.com/smithay//smithay?rev=08d31e1#08d31e17ea4ac47cddeb56e2ac18ee50b331911b"
dependencies = [
"appendlist",
"ash 0.38.0+1.3.281",
"bitflags 2.6.0",
"calloop 0.14.0",
"calloop 0.14.1",
"cc",
"cgmath",
"cursor-icon",
@ -4723,7 +4719,6 @@ dependencies = [
"glow 0.12.3",
"indexmap 2.3.0",
"input",
"lazy_static",
"libc",
"libloading 0.8.5",
"libseat",
@ -4733,7 +4728,6 @@ dependencies = [
"profiling",
"rand",
"rustix",
"scan_fmt",
"scopeguard",
"smallvec",
"tempfile",
@ -4794,7 +4788,7 @@ dependencies = [
[[package]]
name = "smithay-egui"
version = "0.1.0"
source = "git+https://github.com/Smithay/smithay-egui.git?rev=cdc652e0#cdc652e0d4823b16a5bd9badd288e38512789dc5"
source = "git+https://github.com/Smithay/smithay-egui.git?rev=0d0b4ca0#0d0b4ca01a851b97cd27bdc94cce1c1f52723ad5"
dependencies = [
"cgmath",
"egui",
@ -5233,6 +5227,7 @@ version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
@ -5702,9 +5697,9 @@ dependencies = [
[[package]]
name = "wayland-backend"
version = "0.3.6"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f90e11ce2ca99c97b940ee83edbae9da2d56a08f9ea8158550fd77fa31722993"
checksum = "056535ced7a150d45159d3a8dc30f91a2e2d588ca0b23f70e56033622b8016f6"
dependencies = [
"cc",
"downcast-rs",
@ -5716,9 +5711,9 @@ dependencies = [
[[package]]
name = "wayland-client"
version = "0.31.5"
version = "0.31.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e321577a0a165911bdcfb39cf029302479d7527b517ee58ab0f6ad09edf0943"
checksum = "e3f45d1222915ef1fd2057220c1d9d9624b7654443ea35c3877f7a52bd0a5a2d"
dependencies = [
"bitflags 2.6.0",
"rustix",
@ -5760,9 +5755,9 @@ dependencies = [
[[package]]
name = "wayland-protocols"
version = "0.32.3"
version = "0.32.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62989625a776e827cc0f15d41444a3cea5205b963c3a25be48ae1b52d6b4daaa"
checksum = "2b5755d77ae9040bb872a25026555ce4cb0ae75fd923e90d25fba07d81057de0"
dependencies = [
"bitflags 2.6.0",
"wayland-backend",
@ -5813,9 +5808,9 @@ dependencies = [
[[package]]
name = "wayland-scanner"
version = "0.31.4"
version = "0.31.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7b56f89937f1cf2ee1f1259cf2936a17a1f45d8f0aa1019fae6d470d304cfa6"
checksum = "597f2001b2e5fc1121e3d5b9791d3e78f05ba6bfa4641053846248e3a13661c3"
dependencies = [
"proc-macro2",
"quick-xml",
@ -5824,9 +5819,9 @@ dependencies = [
[[package]]
name = "wayland-server"
version = "0.31.4"
version = "0.31.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f0a4bab6d420ee4a609b63ef4d5f9b5d309c6b93a029fccab70f2594c0cb3ae"
checksum = "0f18d47038c0b10479e695d99ed073e400ccd9bdbb60e6e503c96f62adcb12b6"
dependencies = [
"bitflags 2.6.0",
"downcast-rs",
@ -5838,9 +5833,9 @@ dependencies = [
[[package]]
name = "wayland-sys"
version = "0.31.4"
version = "0.31.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43676fe2daf68754ecf1d72026e4e6c15483198b5d24e888b74d3f22f887a148"
checksum = "efa8ac0d8e8ed3e3b5c9fc92c7881406a268e11555abe36493efabe649a29e09"
dependencies = [
"dlib",
"log",
@ -5956,7 +5951,7 @@ dependencies = [
"js-sys",
"khronos-egl",
"libc",
"libloading 0.8.5",
"libloading 0.7.4",
"log",
"metal",
"naga",

View File

@ -21,13 +21,13 @@
rustPlatform.buildRustPackage rec {
pname = "cosmic-comp";
version = "1.0.0-alpha.1";
version = "1.0.0-alpha.2";
src = fetchFromGitHub {
owner = "pop-os";
repo = "cosmic-comp";
rev = "epoch-${version}";
hash = "sha256-4NAIpyaITFNaTDBcsleIwKPq8nHNa77C7y+5hCIYXZE=";
hash = "sha256-IbGMp+4nRg4v5yRvp3ujGx7+nJ6wJmly6dZBXbQAnr8=";
};
cargoLock = {
@ -35,16 +35,17 @@ rustPlatform.buildRustPackage rec {
outputHashes = {
"atomicwrites-0.4.2" = "sha256-QZSuGPrJXh+svMeFWqAXoqZQxLq/WfIiamqvjJNVhxA=";
"clipboard_macos-0.1.0" = "sha256-cG5vnkiyDlQnbEfV2sPbmBYKv1hd3pjJrymfZb8ziKk=";
"cosmic-config-0.1.0" = "sha256-nZCefRCq40K0Mcsav+akZbX89kHnliqAkB7vKx5WIwY=";
"cosmic-protocols-0.1.0" = "sha256-qgo8FMKo/uCbhUjfykRRN8KSavbyhZpu82M8npLcIPI=";
"cosmic-settings-config-0.1.0" = "sha256-/Qav6r4VQ8ZDSs/tqHeutxYH3u4HiTBFWTfAYUSl2HQ=";
"cosmic-text-0.12.1" = "sha256-x0XTxzbmtE2d4XCG/Nuq3DzBpz15BbnjRRlirfNJEiU=";
"cosmic-config-0.1.0" = "sha256-MZLjSIhPz+cpaSHA1R1S+9FD60ys+tHaJ+2Cz+2B/uE=";
"cosmic-protocols-0.1.0" = "sha256-6XM6kcM2CEGAziCkal4uO0EL1nEWOKb3rFs7hFh6r7Y=";
"cosmic-settings-config-0.1.0" = "sha256-j4tAclYoenNM+iBwk8iHOj4baIXc4wkclPl5RZsADGI=";
"cosmic-text-0.12.1" = "sha256-3opGta6Co8l+hIQRVGkfSy6IqJXq/N8ZzqF+YGQADmI=";
"d3d12-0.19.0" = "sha256-usrxQXWLGJDjmIdw1LBXtBvX+CchZDvE8fHC0LjvhD4=";
"glyphon-0.5.0" = "sha256-j1HrbEpUBqazWqNfJhpyjWuxYAxkvbXzRKeSouUoPWg=";
"iced-0.12.0" = "sha256-1RSl5Zd6pkSdAD0zkjL8mzgBbCuc0AE564uI8zrNCyc=";
"id_tree-1.8.0" = "sha256-uKdKHRfPGt3vagOjhnri3aYY5ar7O3rp2/ivTfM2jT0=";
"smithay-0.3.0" = "sha256-puo6xbWRTIco8luz3Jz83VXoRMkyb0ZH7kKHGlTzS5Q=";
"smithay-0.3.0" = "sha256-vep0/Hv1E5YvnHFV91+4Y3CTxOYCAndEnguw/XJ3sNM=";
"smithay-clipboard-0.8.0" = "sha256-4InFXm0ahrqFrtNLeqIuE3yeOpxKZJZx+Bc0yQDtv34=";
"smithay-egui-0.1.0" = "sha256-FcSoKCwYk3okwQURiQlDUcfk9m/Ne6pSblGAzHDaVHg=";
"smithay-egui-0.1.0" = "sha256-i8Rlo221v8G7QUAVVBtBNdOtQv1Drv2oj+EhTBak25g=";
"softbuffer-0.4.1" = "sha256-a0bUFz6O8CWRweNt/OxTvflnPYwO5nm6vsyc/WcXyNg=";
"taffy-0.3.11" = "sha256-SCx9GEIJjWdoNVyq+RZAGn0N71qraKZxf9ZWhvyzLaI=";
};
@ -77,6 +78,14 @@ rustPlatform.buildRustPackage rec {
"-Wl,--pop-state"
];
makeFlags = [
"prefix=$(out)"
"CARGO_TARGET_DIR=target/${stdenv.hostPlatform.rust.cargoShortTarget}"
];
# Use default stdenv installPhase, not the buildRustPackage one.
installPhase = "installPhase";
# These libraries are only used by the X11 backend, which will not
# be the common case, so just make them available, don't link them.
postInstall = ''

File diff suppressed because it is too large Load Diff

View File

@ -1,25 +1,51 @@
{ lib
, fetchFromGitHub
, rustPlatform
, pkg-config
, udev
{
lib,
fetchFromGitHub,
rustPlatform,
pkg-config,
libinput,
udev,
}:
rustPlatform.buildRustPackage rec {
pname = "cosmic-settings-daemon";
version = "unstable-2023-12-29";
version = "1.0.0-alpha.2";
src = fetchFromGitHub {
owner = "pop-os";
repo = pname;
rev = "f7183b68c6ca3f68054b5dd6457b1d5798a75a48";
hash = "sha256-Wck0NY6CUjD16gxi74stayiahs4UiqS7iQCkbOXCgKE=";
repo = "cosmic-settings-daemon";
rev = "epoch-${version}";
hash = "sha256-mtnMqG3aUSgtN3+Blj3w90UsX8NUu/QlzYgr64KPE9s=";
};
cargoHash = "sha256-vCs20RdGhsI1+f78KEau7ohtoGTrGP9QH91wooQlgOE=";
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"accesskit-0.12.2" = "sha256-1UwgRyUe0PQrZrpS7574oNLi13fg5HpgILtZGW6JNtQ=";
"atomicwrites-0.4.2" = "sha256-QZSuGPrJXh+svMeFWqAXoqZQxLq/WfIiamqvjJNVhxA=";
"clipboard_macos-0.1.0" = "sha256-cG5vnkiyDlQnbEfV2sPbmBYKv1hd3pjJrymfZb8ziKk=";
"cosmic-comp-config-0.1.0" = "sha256-224Z6/KF6x0mOOe81Ny+9RTjHt+Y63UZ+4+mQ8Y7aqU=";
"cosmic-config-0.1.0" = "sha256-S7/SZgOCeiuFkKNoPfG5YizAs3cGdjb7XIiMbHZ56ss=";
"cosmic-text-0.12.0" = "sha256-VUUCcW5XnkmCB8cQ5t2xT70wVD5WKXEOPNgNd2xod2A=";
"d3d12-0.19.0" = "sha256-usrxQXWLGJDjmIdw1LBXtBvX+CchZDvE8fHC0LjvhD4=";
"geoclue2-0.1.0" = "sha256-+1XB7r45Uc71fLnNR4U0DUF2EB8uzKeE4HIrdvKhFXo=";
"glyphon-0.5.0" = "sha256-j1HrbEpUBqazWqNfJhpyjWuxYAxkvbXzRKeSouUoPWg=";
"smithay-clipboard-0.8.0" = "sha256-pBQZ+UXo9hZ907mfpcZk+a+8pKrIWdczVvPkjT3TS8U=";
"softbuffer-0.4.1" = "sha256-a0bUFz6O8CWRweNt/OxTvflnPYwO5nm6vsyc/WcXyNg=";
"taffy-0.3.11" = "sha256-SCx9GEIJjWdoNVyq+RZAGn0N71qraKZxf9ZWhvyzLaI=";
};
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [ udev ];
buildInputs = [
libinput
udev
];
postInstall = ''
mkdir -p $out/share/polkit-1/rules.d
cp data/polkit-1/rules.d/*.rules $out/share/polkit-1/rules.d/
'';
meta = with lib; {
homepage = "https://github.com/pop-os/cosmic-settings-daemon";

View File

@ -0,0 +1,34 @@
{
lib,
python3,
fetchFromGitHub,
}:
python3.pkgs.buildPythonApplication rec {
pname = "gitxray";
version = "1.0.15-unstable-2024-09-20";
pyproject = true;
src = fetchFromGitHub {
owner = "kulkansecurity";
repo = "gitxray";
# https://github.com/kulkansecurity/gitxray/issues/1
rev = "7e02f8c789f1c8bf3f4df6c1c301d1a666cedd1c";
hash = "sha256-ucXHfclvaAbSi2HtrhkR2iW0r7jWq9yHqROwRAowOhA=";
};
build-system = with python3.pkgs; [ setuptools ];
dependencies = with python3.pkgs; [ requests ];
pythonImportsCheck = [ "gitxray" ];
meta = {
description = "Tool which leverages Public GitHub REST APIs for various tasks";
homepage = "https://github.com/kulkansecurity/gitxray";
changelog = "https://github.com/kulkansecurity/gitxray/blob/${src.rev}/CHANGELOG.md";
license = lib.licenses.agpl3Only;
maintainers = with lib.maintainers; [ fab ];
mainProgram = "gitxray";
};
}

View File

@ -0,0 +1,35 @@
{
lib,
buildGoModule,
fetchFromGitHub,
}:
buildGoModule rec {
pname = "grimoire";
version = "0.1.0";
src = fetchFromGitHub {
owner = "DataDog";
repo = "grimoire";
rev = "refs/tags/v${version}";
hash = "sha256-V6j6PBoZqTvGfYSbpxd0vOyTb/i2EV8pDVSuZeq1s5o=";
};
vendorHash = "sha256-K1kVXSfIjBpuJ7TyTCtaWj6jWRXPQdBvUlf5LC60tj0=";
subPackages = [ "cmd/grimoire/" ];
ldflags = [
"-s"
"-w"
];
meta = {
description = "Tool to generate datasets of cloud audit logs for common attacks";
homepage = "https://github.com/DataDog/grimoire";
changelog = "https://github.com/DataDog/grimoire/releases/tag/v${version}";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ fab ];
mainProgram = "grimoire";
};
}

View File

@ -8,27 +8,6 @@
let
python = python3.override {
self = python;
packageOverrides = self: super: {
pydantic = super.pydantic_1;
versioningit = super.versioningit.overridePythonAttrs (_: {
doCheck = false;
});
albumentations = super.albumentations.overridePythonAttrs (old: rec {
version = "1.4.3";
src = fetchFromGitHub {
owner = "albumentations-team";
repo = "albumentations";
rev = version;
hash = "sha256-JIBwjYaUP4Sc1bVM/zlj45cz9OWpb/LOBsIqk1m+sQA=";
};
dependencies = old.dependencies ++ [
self.scikit-learn
];
});
};
};
in
python.pkgs.buildPythonApplication rec {
@ -44,7 +23,10 @@ python.pkgs.buildPythonApplication rec {
substituteInPlace app/test_main.py --replace-fail ": cv2.Mat" ""
'';
pythonRelaxDeps = [ "setuptools" ];
pythonRelaxDeps = [
"pydantic-settings"
"setuptools"
];
pythonRemoveDeps = [ "opencv-python-headless" ];
build-system = with python.pkgs; [
@ -60,6 +42,8 @@ python.pkgs.buildPythonApplication rec {
pillow
fastapi
uvicorn
pydantic
pydantic-settings
aiocache
rich
ftfy
@ -69,7 +53,6 @@ python.pkgs.buildPythonApplication rec {
gunicorn
huggingface-hub
tokenizers
pydantic
]
++ uvicorn.optional-dependencies.standard;

View File

@ -225,7 +225,12 @@ buildNpmPackage' {
description = "Self-hosted photo and video backup solution";
homepage = "https://immich.app/";
license = lib.licenses.agpl3Only;
maintainers = with lib.maintainers; [ jvanbruegge ];
maintainers = with lib.maintainers; [
dotlambda
jvanbruegge
Scrumplex
titaniumtown
];
platforms = lib.platforms.linux;
mainProgram = "server";
};

View File

@ -1,22 +1,22 @@
{
"version": "1.117.0",
"hash": "sha256-v4TxKL+NaaAFxlJx/AG/5JxWnPK9uO6GjM4aoW53nzQ=",
"version": "1.118.1",
"hash": "sha256-rWBW0EwehuWnKk6qEte+dPd9l7FbLzwdkCSKMm22Orw=",
"components": {
"cli": {
"npmDepsHash": "sha256-ARjrBHx4aOiNy2PbHWS7kP9Z8QiNyTeyImSxIsXwPnU=",
"version": "2.2.23"
"npmDepsHash": "sha256-0je82BtDH6cUzoMrmeIS0jLmWPbmkdIQJ/SnmbAMtbw=",
"version": "2.2.25"
},
"server": {
"npmDepsHash": "sha256-RjaTRqfZpDhI8lMVvsgICUn8g4NFnqcPptem/AwRr38=",
"version": "1.117.0"
"npmDepsHash": "sha256-Jxb47Y4x9A6s4zGODIp6rze7iQ/w8Gvt31NHSATLYCM=",
"version": "1.118.1"
},
"web": {
"npmDepsHash": "sha256-TZnpbLJbTNFwI2Kvng88z0T1jFf4Tj2xwR0X0wCLaD0=",
"version": "1.117.0"
"npmDepsHash": "sha256-BUgkdsC6raURkyy6eN31uCMKmBbL+fCbGabfHJgJn8g=",
"version": "1.118.1"
},
"open-api/typescript-sdk": {
"npmDepsHash": "sha256-G+iivJ0jibRCw/RChv5heVwY7c7oY/EG4bL+kpjoADQ=",
"version": "1.117.0"
"npmDepsHash": "sha256-Ga/aU5hojd3SgtoiM5QLsmzS5k7CRvh13a4lkC0BZA8=",
"version": "1.118.1"
}
}
}

View File

@ -7,7 +7,7 @@
buildGoModule rec {
version = "photos-v0.9.35";
version = "photos-v0.9.46";
pname = "museum";
src = fetchFromGitHub {
@ -15,7 +15,7 @@ buildGoModule rec {
repo = "ente";
sparseCheckout = [ "server" ];
rev = version;
hash = "sha256-A/M2OhDzzOMGXnaqFFV9Z8bn/3HeZc50p2mIv++Q0uE=";
hash = "sha256-dJCZxQLnKb+mFG0iaYNrXyDSaslqKdPTXMK4KwvqBd8=";
};
sourceRoot = "${src.name}/server";

View File

@ -58,13 +58,13 @@ let
in
{
pname = "nanopb";
version = "0.4.8";
version = "0.4.9";
src = fetchFromGitHub {
owner = "nanopb";
repo = "nanopb";
rev = self.version;
hash = "sha256-LfARVItT+7dczg2u08RlXZLrLR7ScvC44tgmcy/Zv48=";
hash = "sha256-zXhUEajCZ24VA/S0pSFewz096s8rmhKARSWbSC5TdAg=";
};
dontPatch = true;

View File

@ -13,13 +13,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "prisma";
version = "5.18.0";
version = "5.21.0";
src = fetchFromGitHub {
owner = "prisma";
repo = "prisma";
rev = finalAttrs.version;
hash = "sha256-BLD2nKryigXr03BCgGwb3PnCcBLMyDfSFb9Snj0VPKI=";
hash = "sha256-i37Hiawmu/06Mv56FtYkvFGOtqW3x4Q2H1C0JW6/0pI=";
};
nativeBuildInputs = [
@ -32,7 +32,7 @@ stdenv.mkDerivation (finalAttrs: {
pnpmDeps = pnpm_8.fetchDeps {
inherit (finalAttrs) pname version src;
hash = "sha256-lgdJk7HCfX3cAvdEI8xG/IVBiLWezdUN0q+e/0LtVUQ=";
hash = "sha256-o6m9Lxg+oqq15CtdA9RQRukdJWPPGtw/SwRyHDUf91A=";
};
patchPhase = ''

View File

@ -0,0 +1,36 @@
{
lib,
stdenv,
fetchFromGitHub,
rustPlatform,
python3,
}:
rustPlatform.buildRustPackage rec {
pname = "proton-vpn-local-agent";
version = "0-unstable-2024-10-10";
cargoHash = "sha256-yAeqx9zo4xz4g/klo10vMEcecc8npIUY8tkV/nq11WA=";
src = fetchFromGitHub {
owner = "ProtonVPN";
repo = "python-proton-vpn-local-agent";
rev = "01332194d217d91a514ecaebcdfbfa3d21ccd1ed";
hash = "sha256-I+tbVQzD4xJUsoRF8TU/2EMldVqtfxY3E7PQN3ks0mA=";
};
sourceRoot = "${src.name}/python-proton-vpn-local-agent";
installPhase = ''
# manually install the python binding
mkdir -p $out/${python3.sitePackages}/proton/vpn/
cp ./target/${stdenv.hostPlatform.rust.cargoShortTarget}/release/libpython_proton_vpn_local_agent.so $out/${python3.sitePackages}/proton/vpn/local_agent.so
'';
meta = {
description = "Proton VPN local agent written in Rust with Python bindings";
homepage = "https://github.com/ProtonVPN/python-proton-vpn-local-agent";
license = lib.licenses.gpl3Only;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ sebtm ];
};
}

View File

@ -5,11 +5,11 @@
stdenv.mkDerivation (finalAttrs: {
pname = "pv";
version = "1.8.12";
version = "1.8.14";
src = fetchurl {
url = "https://www.ivarch.com/programs/sources/pv-${finalAttrs.version}.tar.gz";
hash = "sha256-lof53u2wnQ3ADYDDBpHwyRKCwNXY+n1qKghch0LCzXw=";
hash = "sha256-DMGIEaSAmlh9SxHUdpG7wK2DpdldLCYGr3Tqe0pnR1Y=";
};
meta = {

View File

@ -0,0 +1,170 @@
{
lib,
stdenv,
fetchPypi,
python3Packages,
stress,
versionCheckHook,
}:
python3Packages.buildPythonApplication rec {
pname = "snakemake";
version = "8.23.0";
pyproject = true;
src = fetchPypi {
inherit pname version;
hash = "sha256-XENI9VJW62KyrxDGSwQiygggYZOu9yW2QSNyp4BO9Us=";
};
postPatch = ''
patchShebangs --build tests/
substituteInPlace tests/common.py \
--replace-fail 'os.environ["PYTHONPATH"] = os.getcwd()' "pass" \
--replace-fail 'del os.environ["PYTHONPATH"]' "pass"
substituteInPlace snakemake/unit_tests/__init__.py \
--replace-fail '"unit_tests/templates"' '"'"$PWD"'/snakemake/unit_tests/templates"'
'';
build-system = with python3Packages; [
setuptools
];
dependencies = with python3Packages; [
appdirs
conda-inject
configargparse
connection-pool
datrie
docutils
gitpython
humanfriendly
immutables
jinja2
jsonschema
nbformat
psutil
pulp
pygments
pyyaml
requests
reretry
smart-open
snakemake-interface-executor-plugins
snakemake-interface-common
snakemake-interface-storage-plugins
snakemake-interface-report-plugins
stopit
tabulate
throttler
toposort
wrapt
yte
];
# See
# https://github.com/snakemake/snakemake/blob/main/.github/workflows/main.yml#L99
# for the current basic test suite. Slurm, Tibanna and Tes require extra
# setup.
nativeCheckInputs = with python3Packages; [
numpy
pandas
pytestCheckHook
pytest-mock
requests-mock
snakemake-executor-plugin-cluster-generic
snakemake-storage-plugin-fs
stress
versionCheckHook
];
versionCheckProgramArg = [ "--version" ];
pytestFlagsArray = [
"tests/tests.py"
"tests/test_expand.py"
"tests/test_io.py"
"tests/test_schema.py"
"tests/test_executor_test_suite.py"
"tests/test_api.py"
];
disabledTests =
[
# FAILED tests/tests.py::test_env_modules - AssertionError: expected successful execution
"test_ancient"
"test_conda_create_envs_only"
"test_env_modules"
"test_generate_unit_tests"
"test_modules_prefix"
"test_strict_mode"
# Requires perl
"test_shadow"
# Require peppy and eido
"test_peppy"
"test_modules_peppy"
"test_pep_pathlib"
# CalledProcessError
"test_filegraph" # requires graphviz
"test_github_issue1384"
# AssertionError: assert 127 == 1
"test_issue1256"
"test_issue2574"
# Require `snakemake-storage-plugin-fs` (circular dependency)
"test_default_storage"
"test_default_storage_local_job"
"test_deploy_sources"
"test_output_file_cache_storage"
"test_storage"
]
++ lib.optionals stdenv.isDarwin [
# Unclear failure:
# AssertionError: expected successful execution
# `__darwinAllowLocalNetworking` doesn't help
"test_excluded_resources_not_submitted_to_cluster"
"test_group_job_resources_with_pipe"
"test_group_jobs_resources"
"test_group_jobs_resources_with_limited_resources"
"test_group_jobs_resources_with_max_threads"
"test_issue850"
"test_issue860"
"test_multicomp_group_jobs"
"test_queue_input"
"test_queue_input_dryrun"
"test_queue_input_forceall"
"test_resources_submitted_to_cluster"
"test_scopes_submitted_to_cluster"
];
pythonImportsCheck = [
"snakemake"
];
preCheck = ''
export HOME="$(mktemp -d)"
'';
meta = {
homepage = "https://snakemake.github.io";
license = lib.licenses.mit;
description = "Python-based execution environment for make-like workflows";
changelog = "https://github.com/snakemake/snakemake/blob/v${version}/CHANGELOG.md";
mainProgram = "snakemake";
longDescription = ''
Snakemake is a workflow management system that aims to reduce the complexity of
creating workflows by providing a fast and comfortable execution environment,
together with a clean and readable specification language in Python style. Snakemake
workflows are essentially Python scripts extended by declarative code to define
rules. Rules describe how to create output files from input files.
'';
maintainers = with lib.maintainers; [
helkafen
renatoGarcia
veprbl
];
};
}

View File

@ -1,7 +1,7 @@
{ lib, stdenv, fetchurl, runtimeShell, traceDeps ? false, bash }:
{ lib, stdenv, fetchurl, bash }:
stdenv.mkDerivation (finalAttrs: {
pname = "steam-original";
pname = "steam-unwrapped";
version = "1.0.0.81";
src = fetchurl {
@ -12,20 +12,8 @@ stdenv.mkDerivation (finalAttrs: {
makeFlags = [ "DESTDIR=$(out)" "PREFIX=" ];
postInstall =
let
traceLog = "/tmp/steam-trace-dependencies.log";
in ''
postInstall = ''
rm $out/bin/steamdeps
${lib.optionalString traceDeps ''
cat > $out/bin/steamdeps <<EOF
#!${runtimeShell}
echo \$1 >> ${traceLog}
cat \$1 >> ${traceLog}
echo >> ${traceLog}
EOF
chmod +x $out/bin/steamdeps
''}
# install udev rules
mkdir -p $out/etc/udev/rules.d/
@ -38,7 +26,7 @@ stdenv.mkDerivation (finalAttrs: {
sed -e 's,/usr/bin/steam,steam,g' steam.desktop > $out/share/applications/steam.desktop
'';
passthru.updateScript = ./update-bootstrap.py;
passthru.updateScript = ./update.py;
meta = with lib; {
description = "Digital distribution platform";
@ -49,7 +37,7 @@ stdenv.mkDerivation (finalAttrs: {
'';
homepage = "https://store.steampowered.com/";
license = licenses.unfreeRedistributable;
maintainers = with maintainers; [ jagajaga ];
maintainers = lib.teams.steam.members ++ [ lib.maintainers.jagajaga ];
mainProgram = "steam";
};
})

View File

@ -27,5 +27,5 @@ if len(found_versions) == 0:
sys.exit(1)
found_versions.sort()
subprocess.run(["nix-update", "--version", found_versions[-1], "steamPackages.steam"])
subprocess.run(["nix-update", "--version", found_versions[-1], "steam-unwrapped"])
found_versions[0]

View File

@ -1,6 +1,6 @@
{
lib,
steam,
steam-unwrapped,
buildFHSEnv,
writeShellScript,
extraPkgs ? pkgs: [ ], # extra packages to add to targetPkgs
@ -21,7 +21,7 @@ let
# https://gitlab.steamos.cloud/steamrt/steam-runtime-tools/-/blob/main/docs/distro-assumptions.md#command-line-tools
targetPkgs = pkgs: with pkgs; [
steam
steam-unwrapped
bash
coreutils
@ -63,8 +63,8 @@ let
libcap # not documented, required by srt-bwrap
] ++ extraLibraries pkgs;
extraInstallCommands = lib.optionalString (steam != null) ''
ln -s ${steam}/share $out/share
extraInstallCommands = lib.optionalString (steam-unwrapped != null) ''
ln -s ${steam-unwrapped}/share $out/share
'';
profile = ''
@ -124,7 +124,7 @@ in steamEnv {
exec "$@"
'';
meta = (steam.meta or {}) // {
meta = (steam-unwrapped.meta or {}) // {
description = "Run commands in the same FHS environment that is used for Steam";
mainProgram = "steam-run";
name = "steam-run";
@ -135,7 +135,7 @@ in steamEnv {
};
};
meta = (steam.meta or {}) // {
meta = (steam-unwrapped.meta or {}) // {
description = "Steam dependencies (dummy package, do not use)";
};
}

View File

@ -1,6 +1,7 @@
{ lib
, stdenv
, fetchFromGitHub
, fetchpatch
, pkgsBuildBuild
, pkg-config
, cmake
@ -11,6 +12,7 @@
, libedit
, libffi
, libpfm
, lit
, mpfr
, zlib
, ncurses
@ -45,7 +47,7 @@ let
isNative = stdenv.hostPlatform == stdenv.buildPlatform;
in stdenv.mkDerivation (finalAttrs: {
pname = "triton-llvm";
version = "17.0.0-c5dede880d17";
version = "19.1.0-rc1"; # One of the tags at https://github.com/llvm/llvm-project/commit/10dc3a8e916d73291269e5e2b82dd22681489aa1
outputs = [
"out"
@ -60,9 +62,18 @@ in stdenv.mkDerivation (finalAttrs: {
src = fetchFromGitHub {
owner = "llvm";
repo = "llvm-project";
rev = "c5dede880d175f7229c9b2923f4753e12702305d";
hash = "sha256-v4r3+7XVFK+Dzxt/rErZNJ9REqFO3JmGN4X4vZ+77ew=";
rev = "10dc3a8e916d73291269e5e2b82dd22681489aa1";
hash = "sha256-9DPvcFmhzw6MipQeCQnr35LktW0uxtEL8axMMPXIfWw=";
};
patches = [
# glibc-2.40 support
# [llvm-exegesis] Use correct rseq struct size #100804
# https://github.com/llvm/llvm-project/issues/100791
(fetchpatch {
url = "https://github.com/llvm/llvm-project//commit/84837e3cc1cf17ed71580e3ea38299ed2bfaa5f6.patch";
hash = "sha256-QKa+kyXjjGXwTQTEpmKZx5yYjOyBX8A8NQoIYUaGcIw=";
})
];
nativeBuildInputs = [
pkg-config
@ -74,6 +85,7 @@ in stdenv.mkDerivation (finalAttrs: {
doxygen
sphinx
python3Packages.recommonmark
python3Packages.myst-parser
];
buildInputs = [
@ -90,7 +102,9 @@ in stdenv.mkDerivation (finalAttrs: {
ncurses
];
sourceRoot = "${finalAttrs.src.name}/llvm";
preConfigure = ''
cd llvm
'';
cmakeFlags = [
(lib.cmakeFeature "LLVM_TARGETS_TO_BUILD" (lib.concatStringsSep ";" llvmTargetsToBuild'))
@ -140,23 +154,25 @@ in stdenv.mkDerivation (finalAttrs: {
postPatch = ''
# `CMake Error: cannot write to file "/build/source/llvm/build/lib/cmake/mlir/MLIRTargets.cmake": Permission denied`
chmod +w -R ../mlir
patchShebangs ../mlir/test/mlir-reduce
chmod +w -R ./mlir
patchShebangs ./mlir/test/mlir-reduce
# FileSystem permissions tests fail with various special bits
rm test/tools/llvm-objcopy/ELF/mirror-permissions-unix.test
rm unittests/Support/Path.cpp
rm llvm/test/tools/llvm-objcopy/ELF/mirror-permissions-unix.test
rm llvm/unittests/Support/Path.cpp
substituteInPlace unittests/Support/CMakeLists.txt \
substituteInPlace llvm/unittests/Support/CMakeLists.txt \
--replace "Path.cpp" ""
'' + lib.optionalString stdenv.hostPlatform.isAarch64 ''
# Not sure why this fails
rm test/tools/llvm-exegesis/AArch64/latency-by-opcode-name.s
rm llvm/test/tools/llvm-exegesis/AArch64/latency-by-opcode-name.s
'';
postInstall = lib.optionalString (!isNative) ''
postInstall = ''
cp ${lib.getExe lit} $out/bin/llvm-lit
'' + (lib.optionalString (!isNative) ''
cp -a NATIVE/bin/llvm-config $out/bin/llvm-config-native
'';
'');
doCheck = buildTests;

File diff suppressed because it is too large Load Diff

View File

@ -12,16 +12,16 @@
rustPlatform.buildRustPackage rec {
pname = "xdg-desktop-portal-cosmic";
version = "1.0.0-alpha.1";
version = "1.0.0-alpha.2";
src = fetchFromGitHub {
owner = "pop-os";
repo = pname;
rev = "epoch-${version}";
hash = "sha256-HjQ8VttWjWcMfVBXyeiju27nyZziY/5V1csUEstqTtE=";
hash = "sha256-MbcktIXkiH3uxQLduXF76ZGn2aoTd/D6xKeUM4M/btM=";
};
env.VERGEN_GIT_COMMIT_DATE = "2024-08-02";
env.VERGEN_GIT_COMMIT_DATE = "2024-09-24";
env.VERGEN_GIT_SHA = src.rev;
cargoLock = {
@ -30,18 +30,20 @@ rustPlatform.buildRustPackage rec {
"accesskit-0.12.2" = "sha256-1UwgRyUe0PQrZrpS7574oNLi13fg5HpgILtZGW6JNtQ=";
"atomicwrites-0.4.2" = "sha256-QZSuGPrJXh+svMeFWqAXoqZQxLq/WfIiamqvjJNVhxA=";
"clipboard_macos-0.1.0" = "sha256-cG5vnkiyDlQnbEfV2sPbmBYKv1hd3pjJrymfZb8ziKk=";
"cosmic-bg-config-0.1.0" = "sha256-e195Hp0LD0bvHRi3AQvtQ9vccgWBqYwna6g+4U8rWdI=";
"cosmic-bg-config-0.1.0" = "sha256-lAFAZBo5FnXgJV3MrZhaYmBxqtH1E7+Huj53ho/hPik=";
"cosmic-client-toolkit-0.1.0" = "sha256-1XtyEvednEMN4MApxTQid4eed19dEN5ZBDt/XRjuda0=";
"cosmic-config-0.1.0" = "sha256-l4LKJ19/5UOMm8oWhhVFvoN4Kbar/EMwBKaiA8RZ7VU=";
"cosmic-files-0.1.0" = "sha256-ZEAWOvT8rlM5dke5pYeGu1MO8umPu0LQmUkNq4BGPsQ=";
"cosmic-settings-daemon-0.1.0" = "sha256-+1XB7r45Uc71fLnNR4U0DUF2EB8uzKeE4HIrdvKhFXo=";
"cosmic-text-0.12.0" = "sha256-x7UMzlzYkWySFgSQTO1rRn+pyPG9tXKpJ7gzx/wpm8U=";
"cosmic-config-0.1.0" = "sha256-gXrMEoAN+7nYAEcs4w6wROhQTjMCxkGn+muJutktLyk=";
"cosmic-files-0.1.0" = "sha256-rBR6IPpMgOltyaRPPZ5V8tYH/xtQphgrPWci/kvlgEg=";
"cosmic-settings-daemon-0.1.0" = "sha256-6cEgFfkBxEpIo8LsvKDR2khMdhEz/dp2oYJXXBiC9zg=";
"cosmic-text-0.12.1" = "sha256-u2Tw+XhpIKeFg8Wgru/sjGw6GUZ2m50ZDmRBJ1IM66w=";
"d3d12-0.19.0" = "sha256-usrxQXWLGJDjmIdw1LBXtBvX+CchZDvE8fHC0LjvhD4=";
"fs_extra-1.3.0" = "sha256-ftg5oanoqhipPnbUsqnA4aZcyHqn9XsINJdrStIPLoE=";
"glyphon-0.5.0" = "sha256-j1HrbEpUBqazWqNfJhpyjWuxYAxkvbXzRKeSouUoPWg=";
"libspa-0.8.0" = "sha256-iOT9y8hppY9hisHdbMRAhkRIAB/wzNnjWzAgT2Vf6eY=";
"smithay-clipboard-0.8.0" = "sha256-pBQZ+UXo9hZ907mfpcZk+a+8pKrIWdczVvPkjT3TS8U=";
"libspa-0.8.0" = "sha256-kp5x5QhmgEqCrt7xDRfMFGoTK5IXOuvW2yOW02B8Ftk=";
"smithay-clipboard-0.8.0" = "sha256-4InFXm0ahrqFrtNLeqIuE3yeOpxKZJZx+Bc0yQDtv34=";
"softbuffer-0.4.1" = "sha256-a0bUFz6O8CWRweNt/OxTvflnPYwO5nm6vsyc/WcXyNg=";
"taffy-0.3.11" = "sha256-SCx9GEIJjWdoNVyq+RZAGn0N71qraKZxf9ZWhvyzLaI=";
"trash-5.1.1" = "sha256-So8rQ8gLF5o79Az396/CQY/veNo4ticxYpYZPfMJyjQ=";
"winit-0.29.10" = "sha256-ScTII2AzK3SC8MVeASZ9jhVWsEaGrSQ2BnApTxgfxK4=";
};
};
@ -63,8 +65,10 @@ rustPlatform.buildRustPackage rec {
];
postInstall = ''
mkdir -p $out/share/{dbus-1/services,xdg-desktop-portal/portals}
mkdir -p $out/share/{dbus-1/services,icons,xdg-desktop-portal/portals}
cp -r data/icons $out/share/icons/hicolor
cp data/*.service $out/share/dbus-1/services/
cp data/cosmic-portals.conf $out/share/xdg-desktop-portal/
cp data/cosmic.portal $out/share/xdg-desktop-portal/portals/
'';

View File

@ -0,0 +1,47 @@
{
lib,
buildPythonPackage,
fetchFromGitHub,
# build-system
poetry-core,
# dependencies
pyyaml,
}:
buildPythonPackage rec {
pname = "conda-inject";
version = "1.3.2";
pyproject = true;
src = fetchFromGitHub {
owner = "koesterlab";
repo = "conda-inject";
rev = "refs/tags/v${version}";
hash = "sha256-M4+bz7ZuHlcF8tF5kSCUjjkIHG75eCCW1IJxcwxNL6o=";
};
build-system = [
poetry-core
];
dependencies = [
pyyaml
];
pythonImportsCheck = [
"conda_inject"
];
# no tests
doCheck = false;
meta = {
description = "Helper functions for injecting a conda environment into the current python environment";
homepage = "https://github.com/koesterlab/conda-inject";
changelog = "https://github.com/koesterlab/conda-inject/blob/${src.rev}/CHANGELOG.md";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ GaetanLepage ];
};
}

View File

@ -9,7 +9,7 @@
buildPythonPackage rec {
pname = "findimports";
version = "2.5.0";
version = "2.5.1";
pyproject = true;
disabled = pythonOlder "3.7";
@ -18,10 +18,10 @@ buildPythonPackage rec {
owner = "mgedmin";
repo = "findimports";
rev = "refs/tags/${version}";
hash = "sha256-kHm0TiLe7zvUnU6+MR1M0xOt0gpMDJ5FJ5+HgY0LPeo=";
hash = "sha256-0HD5n9kxlXB86w8zkti6MkVZxEgGRrXzM6f+g0H/jrs=";
};
nativeBuildInputs = [ setuptools ];
build-system = [ setuptools ];
pythonImportsCheck = [ "findimports" ];
@ -36,7 +36,6 @@ buildPythonPackage rec {
meta = with lib; {
description = "Module for the analysis of Python import statements";
mainProgram = "findimports";
homepage = "https://github.com/mgedmin/findimports";
changelog = "https://github.com/mgedmin/findimports/blob/${version}/CHANGES.rst";
license = with licenses; [
@ -44,5 +43,6 @@ buildPythonPackage rec {
gpl3Only
];
maintainers = with maintainers; [ fab ];
mainProgram = "findimports";
};
}

View File

@ -2,7 +2,6 @@
lib,
stdenv,
buildPythonPackage,
pythonOlder,
fetchFromGitHub,
fetchpatch,
isPyPy,
@ -33,34 +32,22 @@
buildPythonPackage rec {
pname = "imageio";
version = "2.35.1";
version = "2.36.0";
pyproject = true;
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "imageio";
repo = "imageio";
rev = "refs/tags/v${version}";
hash = "sha256-WeoZE2TPBAhzBBcZNQqoiqvribMCLSZWk/XpdMydvCQ=";
hash = "sha256-dQrAVPXtDdibaxxfqW29qY7j5LyegvmI0Y7/btXmsyY=";
};
patches =
[
# Fix tests failing with new enough ffmpeg
# Upstream PR: https://github.com/imageio/imageio/pull/1101
# FIXME: remove when merged
(fetchpatch {
url = "https://github.com/imageio/imageio/commit/8d1bea4b560f3aa10ed2d250e483173f488f50fe.patch";
hash = "sha256-68CzSoJzbr21N97gWu5qVYh6QeBS9zon8XmytcVK89c=";
})
]
++ lib.optionals (!stdenv.hostPlatform.isDarwin) [
(substituteAll {
src = ./libgl-path.patch;
libgl = "${libGL.out}/lib/libGL${stdenv.hostPlatform.extensions.sharedLibrary}";
})
];
patches = lib.optionals (!stdenv.hostPlatform.isDarwin) [
(substituteAll {
src = ./libgl-path.patch;
libgl = "${libGL.out}/lib/libGL${stdenv.hostPlatform.extensions.sharedLibrary}";
})
];
build-system = [ setuptools ];

View File

@ -1,7 +1,6 @@
{
lib,
buildPythonPackage,
pythonOlder,
fetchFromGitHub,
# build-system
@ -9,35 +8,38 @@
poetry-core,
# dependencies
async-timeout,
asgi-logger,
cloudevents,
fastapi,
grpcio,
httpx,
azure-identity,
kubernetes,
numpy,
orjson,
pandas,
prometheus-client,
protobuf,
requests,
psutil,
uvicorn,
# optional-dependencies
azure-identity,
azure-storage-blob,
azure-storage-file-share,
boto3,
google-cloud-storage,
huggingface-hub,
asgi-logger,
ray,
prometheus-client,
protobuf,
requests,
psutil,
pydantic,
python-dateutil,
pyyaml,
ray,
six,
tabulate,
timing-asgi,
uvicorn,
# checks
# tests
avro,
grpcio-testing,
pytest-asyncio,
@ -47,16 +49,14 @@
buildPythonPackage rec {
pname = "kserve";
version = "0.13.1";
version = "0.14.0";
pyproject = true;
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "kserve";
repo = "kserve";
rev = "refs/tags/v${version}";
hash = "sha256-wGS001PK+k21oCOaQCiAtytTDjfe0aiTVJ9spyOucYA=";
hash = "sha256-N/IgiTiyBNw7WQWxcUJlXU+Q9o3UUaduD9ZBKwu0uRE=";
};
sourceRoot = "${src.name}/python/kserve";
@ -66,7 +66,6 @@ buildPythonPackage rec {
"httpx"
"prometheus-client"
"protobuf"
"ray"
"uvicorn"
"psutil"
];
@ -77,7 +76,6 @@ buildPythonPackage rec {
];
dependencies = [
async-timeout
cloudevents
fastapi
grpcio
@ -92,12 +90,11 @@ buildPythonPackage rec {
pydantic
python-dateutil
pyyaml
ray
six
tabulate
timing-asgi
uvicorn
] ++ ray.optional-dependencies.serve-deps;
];
optional-dependencies = {
storage = [
@ -105,6 +102,7 @@ buildPythonPackage rec {
azure-storage-blob
azure-storage-file-share
boto3
huggingface-hub
google-cloud-storage
requests
];
@ -129,11 +127,11 @@ buildPythonPackage rec {
disabledTests = [
# Require network access
"test_health_handler"
"test_infer"
"test_infer_v2"
# Assertion error due to HTTP response code
"test_unload"
"test_infer_graph_endpoint"
"test_infer_path_based_routing"
# Tries to access `/tmp` (hardcoded)
"test_local_path_with_out_dir_exist"
];
meta = {

View File

@ -1,7 +1,6 @@
{
lib,
buildPythonPackage,
pythonOlder,
fetchFromGitHub,
# build
@ -19,14 +18,14 @@
buildPythonPackage rec {
pname = "lightning-utilities";
version = "0.11.7";
version = "0.11.8";
pyproject = true;
src = fetchFromGitHub {
owner = "Lightning-AI";
repo = "utilities";
rev = "refs/tags/v${version}";
hash = "sha256-0XxBDe9OGQLfl4viuUm5Hx8WvZhSj+J0FoDqD/JOiZM=";
hash = "sha256-1npXzPqasgtI5KLq791hfneKFO5GrSiRdqfRd13//6M=";
};
postPatch = ''

View File

@ -11,6 +11,7 @@
huggingface-hub,
protobuf,
regex,
safetensors,
sentencepiece,
timm,
torch,
@ -28,14 +29,14 @@
}:
buildPythonPackage rec {
pname = "open-clip-torch";
version = "2.26.1";
version = "2.27.0";
pyproject = true;
src = fetchFromGitHub {
owner = "mlfoundations";
repo = "open_clip";
rev = "refs/tags/v${version}";
hash = "sha256-XjPOsGet8VNzwEwzz14f1nF3XOgpkb4OERIc6VrDDZ8=";
hash = "sha256-1LdxgRl72fDYdM9tZKMnHTvAY5QsWYiQSDWEGrngaOo=";
};
build-system = [ pdm-backend ];
@ -45,6 +46,7 @@ buildPythonPackage rec {
huggingface-hub
protobuf
regex
safetensors
sentencepiece
timm
torch

View File

@ -3,12 +3,13 @@
buildPythonPackage,
fetchFromGitHub,
pythonOlder,
unittestCheckHook,
setuptools,
unittestCheckHook,
}:
buildPythonPackage rec {
pname = "python-ipware";
version = "2.0.0";
version = "3.0.0";
pyproject = true;
disabled = pythonOlder "3.7";
@ -16,16 +17,16 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "un33k";
repo = "python-ipware";
rev = "v${version}";
hash = "sha256-j43uAcb1dyKe/SHQLLR+QJS6hKGB5qxjb9NiJaUPj8Y=";
rev = "refs/tags/v${version}";
hash = "sha256-S8/HbRztYGzrpLQRTHcvO7Zv3mNn/0+y5PNBYLpd++E=";
};
nativeBuildInputs = [ setuptools ];
pythonImportsCheck = [ "python_ipware" ];
build-system = [ setuptools ];
nativeCheckInputs = [ unittestCheckHook ];
pythonImportsCheck = [ "python_ipware" ];
meta = with lib; {
description = "Python package for server applications to retrieve client's IP address";
homepage = "https://github.com/un33k/python-ipware";

View File

@ -3,22 +3,27 @@
buildPythonPackage,
docutils,
fetchFromGitHub,
isPy27,
pytestCheckHook,
pythonOlder,
setuptools,
}:
buildPythonPackage rec {
version = "1.0.10";
pname = "python_toolbox";
disabled = isPy27;
pname = "python-toolbox";
version = "1.0.11";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchFromGitHub {
owner = "cool-RR";
repo = pname;
rev = version;
sha256 = "1hpls1hwisdjx1g15cq052bdn9fvh43r120llws8bvgvj9ivnaha";
repo = "python_toolbox";
rev = "refs/tags/${version}";
hash = "sha256-Y9RmVndgsBESrUCEORUwAdaFYBiunY3kWArhB9d7bw4=";
};
build-system = [ setuptools ];
nativeCheckInputs = [
docutils
pytestCheckHook
@ -30,9 +35,15 @@ buildPythonPackage rec {
"test_python_toolbox/test_cute_profile/test_cute_profile.py"
];
disabledTests = [
# AssertionError
"test_repr"
];
meta = with lib; {
description = "Tools for testing PySnooper";
homepage = "https://github.com/cool-RR/python_toolbox";
changelog = "https://github.com/cool-RR/python_toolbox/releases/tag/${version}";
license = licenses.mit;
maintainers = with maintainers; [ seqizz ];
};

View File

@ -11,12 +11,12 @@
buildPythonPackage rec {
pname = "shiv";
version = "1.0.6";
version = "1.0.7";
format = "pyproject";
src = fetchPypi {
inherit pname version;
hash = "sha256-4iJ2gTWXe+vftcDRp9/qKVV8VmtY0wDVuMJTXvIj13Y=";
hash = "sha256-lHdX/iY4OuntoMV288uiRN+jcV7S9Jk1RLdYJF9xqxU=";
};
propagatedBuildInputs = [

View File

@ -10,19 +10,19 @@
buildPythonPackage rec {
pname = "snakemake-interface-executor-plugins";
version = "9.2.0";
format = "pyproject";
version = "9.3.2";
pyproject = true;
src = fetchFromGitHub {
owner = "snakemake";
repo = pname;
repo = "snakemake-interface-executor-plugins";
rev = "refs/tags/v${version}";
hash = "sha256-WMbJP17YnDzFVcr6YepT5Ltw+Jo6PPn7ayIrjx2k+go=";
hash = "sha256-3XdsEnL+kuYhNOeAxkAsjTJ2R6NOtq97zPhQg9kdFkI=";
};
nativeBuildInputs = [ poetry-core ];
build-system = [ poetry-core ];
propagatedBuildInputs = [
dependencies = [
argparse-dataclass
throttler
snakemake-interface-common
@ -30,10 +30,11 @@ buildPythonPackage rec {
pythonImportsCheck = [ "snakemake_interface_executor_plugins" ];
meta = with lib; {
meta = {
description = "This package provides a stable interface for interactions between Snakemake and its executor plugins";
homepage = "https://github.com/snakemake/snakemake-interface-executor-plugins";
license = licenses.mit;
maintainers = with maintainers; [ veprbl ];
changelog = "https://github.com/snakemake/snakemake-interface-executor-plugins/blob/${src.rev}/CHANGELOG.md";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ veprbl ];
};
}

View File

@ -0,0 +1,29 @@
From c5d4087519eae6f41c80bbd8ffbcc9390db44c7f Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else+aalto@someonex.net>
Date: Thu, 10 Oct 2024 19:19:18 +0000
Subject: [PATCH] cmake.py: propagate cmakeFlags from environment
---
tools/setup_helpers/cmake.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/tools/setup_helpers/cmake.py b/tools/setup_helpers/cmake.py
index 4b605fe5975..ea1d6a1ef46 100644
--- a/tools/setup_helpers/cmake.py
+++ b/tools/setup_helpers/cmake.py
@@ -332,6 +332,12 @@ class CMake:
file=sys.stderr,
)
print(e, file=sys.stderr)
+
+ # Nixpkgs compat:
+ if "cmakeFlags" in os.environ:
+ import shlex
+ args.extend(shlex.split(os.environ["cmakeFlags"]))
+
# According to the CMake manual, we should pass the arguments first,
# and put the directory as the last element. Otherwise, these flags
# may not be passed correctly.
--
2.46.0

View File

@ -35,10 +35,8 @@
removeReferencesTo,
# Build inputs
darwin,
numactl,
Accelerate,
CoreServices,
libobjc,
# Propagated build inputs
astunparse,
@ -56,6 +54,17 @@
tritonSupport ? (!stdenv.hostPlatform.isDarwin),
triton,
# TODO: 1. callPackage needs to learn to distinguish between the task
# of "asking for an attribute from the parent scope" and
# the task of "exposing a formal parameter in .override".
# TODO: 2. We should probably abandon attributes such as `torchWithCuda` (etc.)
# as they routinely end up consuming the wrong arguments\
# (dependencies without cuda support).
# Instead we should rely on overlays and nixpkgsFun.
# (@SomeoneSerge)
_tritonEffective ? if cudaSupport then triton-cuda else triton,
triton-cuda,
# Unit tests
hypothesis,
psutil,
@ -95,6 +104,8 @@ let
;
inherit (cudaPackages) cudaFlags cudnn nccl;
triton = throw "python3Packages.torch: use _tritonEffective instead of triton to avoid divergence";
rocmPackages = rocmPackages_5;
setBool = v: if v then "1" else "0";
@ -240,6 +251,7 @@ buildPythonPackage rec {
# Allow setting PYTHON_LIB_REL_PATH with an environment variable.
# https://github.com/pytorch/pytorch/pull/128419
./passthrough-python-lib-rel-path.patch
./0001-cmake.py-propagate-cmakeFlags-from-environment.patch
]
++ lib.optionals cudaSupport [ ./fix-cmake-cuda-toolkit.patch ]
++ lib.optionals (stdenv.hostPlatform.isDarwin && stdenv.hostPlatform.isx86_64) [
@ -257,7 +269,18 @@ buildPythonPackage rec {
];
postPatch =
lib.optionalString rocmSupport ''
''
substituteInPlace cmake/public/cuda.cmake \
--replace-fail \
'message(FATAL_ERROR "Found two conflicting CUDA' \
'message(WARNING "Found two conflicting CUDA' \
--replace-warn \
"set(CUDAToolkit_ROOT" \
"# Upstream: set(CUDAToolkit_ROOT"
substituteInPlace third_party/gloo/cmake/Cuda.cmake \
--replace-warn "find_package(CUDAToolkit 7.0" "find_package(CUDAToolkit"
''
+ lib.optionalString rocmSupport ''
# https://github.com/facebookincubator/gloo/pull/297
substituteInPlace third_party/gloo/cmake/Hipify.cmake \
--replace "\''${HIPIFY_COMMAND}" "python \''${HIPIFY_COMMAND}"
@ -351,6 +374,17 @@ buildPythonPackage rec {
# NB technical debt: building without NNPACK as workaround for missing `six`
USE_NNPACK = 0;
cmakeFlags =
[
# (lib.cmakeBool "CMAKE_FIND_DEBUG_MODE" true)
(lib.cmakeFeature "CUDAToolkit_VERSION" cudaPackages.cudaVersion)
]
++ lib.optionals cudaSupport [
# Unbreaks version discovery in enable_language(CUDA) when wrapping nvcc with ccache
# Cf. https://gitlab.kitware.com/cmake/cmake/-/issues/26363
(lib.cmakeFeature "CMAKE_CUDA_COMPILER_TOOLKIT_VERSION" cudaPackages.cudaVersion)
];
preBuild = ''
export MAX_JOBS=$NIX_BUILD_CORES
${python.pythonOnBuildForHost.interpreter} setup.py build --cmake-only
@ -495,11 +529,11 @@ buildPythonPackage rec {
++ lib.optionals (cudaSupport || rocmSupport) [ effectiveMagma ]
++ lib.optionals stdenv.hostPlatform.isLinux [ numactl ]
++ lib.optionals stdenv.hostPlatform.isDarwin [
Accelerate
CoreServices
libobjc
darwin.apple_sdk.frameworks.Accelerate
darwin.apple_sdk.frameworks.CoreServices
darwin.libobjc
]
++ lib.optionals tritonSupport [ triton ]
++ lib.optionals tritonSupport [ _tritonEffective ]
++ lib.optionals MPISupport [ mpi ]
++ lib.optionals rocmSupport [ rocmtoolkit_joined ];
@ -527,7 +561,7 @@ buildPythonPackage rec {
# torch/csrc requires `pybind11` at runtime
pybind11
] ++ lib.optionals tritonSupport [ triton ];
] ++ lib.optionals tritonSupport [ _tritonEffective ];
propagatedCxxBuildInputs =
[ ] ++ lib.optionals MPISupport [ mpi ] ++ lib.optionals rocmSupport [ rocmtoolkit_joined ];
@ -662,7 +696,9 @@ buildPythonPackage rec {
thoughtpolice
tscholak
]; # tscholak esp. for darwin-related builds
platforms = with lib.platforms; linux ++ lib.optionals (!cudaSupport && !rocmSupport) darwin;
platforms =
lib.platforms.linux
++ lib.optionals (!cudaSupport && !rocmSupport) lib.platforms.darwin;
broken = builtins.any trivial.id (builtins.attrValues brokenConditions);
};
}

View File

@ -1,15 +0,0 @@
diff --git a/python/setup.py b/python/setup.py
index 18764ec13..b3bb5b60a 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -269,10 +269,6 @@ class CMakeBuild(build_ext):
subprocess.check_call(["cmake", self.base_dir] + cmake_args, cwd=cmake_dir, env=env)
subprocess.check_call(["cmake", "--build", "."] + build_args, cwd=cmake_dir)
-
-download_and_copy_ptxas()
-
-
setup(
name="triton",
version="2.1.0",

View File

@ -0,0 +1,35 @@
From 2751c5de5c61c90b56e3e392a41847f4c47258fd Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else+aalto@someonex.net>
Date: Sun, 13 Oct 2024 14:16:48 +0000
Subject: [PATCH 1/3] _build: allow extra cc flags
---
python/triton/runtime/build.py | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/python/triton/runtime/build.py b/python/triton/runtime/build.py
index d7baeb286..d334dce77 100644
--- a/python/triton/runtime/build.py
+++ b/python/triton/runtime/build.py
@@ -42,9 +42,17 @@ def _build(name, src, srcdir, library_dirs, include_dirs, libraries):
py_include_dir = sysconfig.get_paths(scheme=scheme)["include"]
include_dirs = include_dirs + [srcdir, py_include_dir]
cc_cmd = [cc, src, "-O3", "-shared", "-fPIC", "-o", so]
+
+ # Nixpkgs support branch
+ # Allows passing e.g. extra -Wl,-rpath
+ cc_cmd_extra_flags = "@ccCmdExtraFlags@"
+ if cc_cmd_extra_flags != ("@" + "ccCmdExtraFlags@"): # substituteAll hack
+ import shlex
+ cc_cmd.extend(shlex.split(cc_cmd_extra_flags))
+
cc_cmd += [f'-l{lib}' for lib in libraries]
cc_cmd += [f"-L{dir}" for dir in library_dirs]
- cc_cmd += [f"-I{dir}" for dir in include_dirs]
+ cc_cmd += [f"-I{dir}" for dir in include_dirs if dir is not None]
ret = subprocess.check_call(cc_cmd)
if ret == 0:
return so
--
2.46.0

View File

@ -1,27 +0,0 @@
From 10f3d49aa6084d1b9b9624017cce7df106b9fb7e Mon Sep 17 00:00:00 2001
From: Yaroslav Bolyukin <iam@lach.pw>
Date: Tue, 6 Feb 2024 13:51:28 +0100
Subject: [PATCH] ptxas: disable version key for non-cuda targets
---
python/triton/runtime/jit.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/python/triton/runtime/jit.py b/python/triton/runtime/jit.py
index d55972b4b..bd875a701 100644
--- a/python/triton/runtime/jit.py
+++ b/python/triton/runtime/jit.py
@@ -117,8 +117,8 @@ def version_key():
with open(lib.module_finder.find_spec(lib.name).origin, "rb") as f:
contents += [hashlib.md5(f.read()).hexdigest()]
# ptxas version
- ptxas = path_to_ptxas()[0]
- ptxas_version = hashlib.md5(subprocess.check_output([ptxas, "--version"])).hexdigest()
+ # ptxas = path_to_ptxas()[0]
+ ptxas_version = "noptxas"
return '-'.join(TRITON_VERSION) + '-' + ptxas_version + '-' + '-'.join(contents)
--
2.43.0

View File

@ -0,0 +1,64 @@
From 587d1f3428eca63544238802f19e0be670d03244 Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else@someonex.net>
Date: Mon, 29 Jul 2024 14:31:11 +0000
Subject: [PATCH] setup.py: introduce TRITON_OFFLINE_BUILD
To prevent any vendoring whatsoever
---
python/setup.py | 26 ++++++++++++++++++++++++--
1 file changed, 24 insertions(+), 2 deletions(-)
diff --git a/python/setup.py b/python/setup.py
index 73800ec40..4e5b04de4 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -112,6 +112,20 @@ def get_env_with_keys(key: list):
return os.environ[k]
return ""
+def is_offline_build() -> bool:
+ """
+ Downstream projects and distributions which bootstrap their own dependencies from scratch
+ and run builds in offline sandboxes
+ may set `TRITON_OFFLINE_BUILD` in the build environment to prevent any attempts at downloading
+ pinned dependencies from the internet or at using dependencies vendored in-tree.
+
+ Dependencies must be defined using respective search paths (cf. `syspath_var_name` in `Package`).
+ Missing dependencies lead to an early abortion.
+ Dependencies' compatibility is not verified.
+
+ Note that this flag isn't tested by the CI and does not provide any guarantees.
+ """
+ return os.environ.get("TRITON_OFFLINE_BUILD", "") != ""
# --- third party packages -----
@@ -220,8 +234,14 @@ def get_thirdparty_packages(packages: list):
if os.environ.get(p.syspath_var_name):
package_dir = os.environ[p.syspath_var_name]
version_file_path = os.path.join(package_dir, "version.txt")
- if p.syspath_var_name not in os.environ and\
- (not os.path.exists(version_file_path) or Path(version_file_path).read_text() != p.url):
+
+ input_defined = p.syspath_var_name not in os.environ
+ input_exists = input_defined and os.path.exists(version_file_path)
+ input_compatible = input_exists and Path(version_file_path).read_text() == p.url
+
+ if is_offline_build() and not input_defined:
+ raise RuntimeError(f"Requested an offline build but {p.syspath_var_name} is not set")
+ if not is_offline_build() and not input_compatible:
with contextlib.suppress(Exception):
shutil.rmtree(package_root_dir)
os.makedirs(package_root_dir, exist_ok=True)
@@ -245,6 +265,8 @@ def get_thirdparty_packages(packages: list):
def download_and_copy(name, src_path, variable, version, url_func):
+ if is_offline_build():
+ return
triton_cache_path = get_triton_cache_path()
if variable in os.environ:
return
--
2.45.1

View File

@ -0,0 +1,70 @@
From 7407cb03eec82768e333909d87b7668b633bfe86 Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else+aalto@someonex.net>
Date: Sun, 13 Oct 2024 14:28:48 +0000
Subject: [PATCH 2/3] {nvidia,amd}/driver: short-circuit before ldconfig
---
python/triton/runtime/build.py | 6 +++---
third_party/amd/backend/driver.py | 7 +++++++
third_party/nvidia/backend/driver.py | 3 +++
3 files changed, 13 insertions(+), 3 deletions(-)
diff --git a/python/triton/runtime/build.py b/python/triton/runtime/build.py
index d334dce77..a64e98da0 100644
--- a/python/triton/runtime/build.py
+++ b/python/triton/runtime/build.py
@@ -42,6 +42,9 @@ def _build(name, src, srcdir, library_dirs, include_dirs, libraries):
py_include_dir = sysconfig.get_paths(scheme=scheme)["include"]
include_dirs = include_dirs + [srcdir, py_include_dir]
cc_cmd = [cc, src, "-O3", "-shared", "-fPIC", "-o", so]
+ cc_cmd += [f'-l{lib}' for lib in libraries]
+ cc_cmd += [f"-L{dir}" for dir in library_dirs]
+ cc_cmd += [f"-I{dir}" for dir in include_dirs if dir is not None]
# Nixpkgs support branch
# Allows passing e.g. extra -Wl,-rpath
@@ -50,9 +53,6 @@ def _build(name, src, srcdir, library_dirs, include_dirs, libraries):
import shlex
cc_cmd.extend(shlex.split(cc_cmd_extra_flags))
- cc_cmd += [f'-l{lib}' for lib in libraries]
- cc_cmd += [f"-L{dir}" for dir in library_dirs]
- cc_cmd += [f"-I{dir}" for dir in include_dirs if dir is not None]
ret = subprocess.check_call(cc_cmd)
if ret == 0:
return so
diff --git a/third_party/amd/backend/driver.py b/third_party/amd/backend/driver.py
index 0a8cd7bed..aab8805f6 100644
--- a/third_party/amd/backend/driver.py
+++ b/third_party/amd/backend/driver.py
@@ -24,6 +24,13 @@ def _get_path_to_hip_runtime_dylib():
return env_libhip_path
raise RuntimeError(f"TRITON_LIBHIP_PATH '{env_libhip_path}' does not point to a valid {lib_name}")
+ # ...on release/3.1.x:
+ # return mmapped_path
+ # raise RuntimeError(f"memory mapped '{mmapped_path}' in process does not point to a valid {lib_name}")
+
+ if os.path.isdir("@libhipDir@"):
+ return ["@libhipDir@"]
+
paths = []
import site
diff --git a/third_party/nvidia/backend/driver.py b/third_party/nvidia/backend/driver.py
index 90f71138b..30fbadb2a 100644
--- a/third_party/nvidia/backend/driver.py
+++ b/third_party/nvidia/backend/driver.py
@@ -21,6 +21,9 @@ def libcuda_dirs():
if env_libcuda_path:
return [env_libcuda_path]
+ if os.path.exists("@libcudaStubsDir@"):
+ return ["@libcudaStubsDir@"]
+
libs = subprocess.check_output(["/sbin/ldconfig", "-p"]).decode()
# each line looks like the following:
# libcuda.so.1 (libc6,x86-64) => /lib/x86_64-linux-gnu/libcuda.so.1
--
2.46.0

View File

@ -0,0 +1,46 @@
From 6f92d54e5a544bc34bb07f2808d554a71cc0e4c3 Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else+aalto@someonex.net>
Date: Sun, 13 Oct 2024 14:30:19 +0000
Subject: [PATCH 3/3] nvidia: cudart a systempath
---
third_party/nvidia/backend/driver.c | 2 +-
third_party/nvidia/backend/driver.py | 5 +++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/third_party/nvidia/backend/driver.c b/third_party/nvidia/backend/driver.c
index 44524da27..fbdf0d156 100644
--- a/third_party/nvidia/backend/driver.c
+++ b/third_party/nvidia/backend/driver.c
@@ -1,4 +1,4 @@
-#include "cuda.h"
+#include <cuda.h>
#include <dlfcn.h>
#include <stdbool.h>
#define PY_SSIZE_T_CLEAN
diff --git a/third_party/nvidia/backend/driver.py b/third_party/nvidia/backend/driver.py
index 30fbadb2a..65c0562ed 100644
--- a/third_party/nvidia/backend/driver.py
+++ b/third_party/nvidia/backend/driver.py
@@ -10,7 +10,8 @@ from triton.backends.compiler import GPUTarget
from triton.backends.driver import GPUDriver
dirname = os.path.dirname(os.path.realpath(__file__))
-include_dir = [os.path.join(dirname, "include")]
+import shlex
+include_dir = [*shlex.split("@cudaToolkitIncludeDirs@"), os.path.join(dirname, "include")]
libdevice_dir = os.path.join(dirname, "lib")
libraries = ['cuda']
@@ -149,7 +150,7 @@ def make_launcher(constants, signature, ids):
# generate glue code
params = [i for i in signature.keys() if i not in constants]
src = f"""
-#include \"cuda.h\"
+#include <cuda.h>
#include <stdbool.h>
#include <Python.h>
#include <dlfcn.h>
--
2.46.0

View File

@ -0,0 +1,26 @@
From e503e572b6d444cd27f1cdf124aaf553aa3a8665 Mon Sep 17 00:00:00 2001
From: SomeoneSerge <else+aalto@someonex.net>
Date: Mon, 14 Oct 2024 00:12:05 +0000
Subject: [PATCH 4/4] nvidia: allow static ptxas path
---
third_party/nvidia/backend/compiler.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/third_party/nvidia/backend/compiler.py b/third_party/nvidia/backend/compiler.py
index 6d7994923..6720e8f97 100644
--- a/third_party/nvidia/backend/compiler.py
+++ b/third_party/nvidia/backend/compiler.py
@@ -20,6 +20,9 @@ def _path_to_binary(binary: str):
os.path.join(os.path.dirname(__file__), "bin", binary),
]
+ import shlex
+ paths.extend(shlex.split("@nixpkgsExtraBinaryPaths@"))
+
for bin in paths:
if os.path.exists(bin) and os.path.isfile(bin):
result = subprocess.check_output([bin, "--version"], stderr=subprocess.STDOUT)
--
2.46.0

View File

@ -5,11 +5,8 @@
cudaPackages,
buildPythonPackage,
fetchurl,
isPy38,
isPy39,
isPy310,
isPy311,
python,
pythonOlder,
autoPatchelfHook,
filelock,
lit,
@ -29,7 +26,7 @@ buildPythonPackage rec {
in
fetchurl srcs;
disabled = !(isPy38 || isPy39 || isPy310 || isPy311);
disabled = pythonOlder "3.8";
pythonRemoveDeps = [
"cmake"

View File

@ -1,111 +1,92 @@
{
lib,
config,
addDriverRunpath,
buildPythonPackage,
fetchFromGitHub,
fetchpatch,
setuptools,
cmake,
ninja,
pybind11,
config,
cudaPackages,
fetchFromGitHub,
filelock,
gtest,
zlib,
ncurses,
libxml2,
lit,
llvm,
filelock,
torchWithRocm,
ncurses,
ninja,
pybind11,
python,
runCommand,
cudaPackages,
pytestCheckHook,
stdenv,
substituteAll,
setuptools,
torchWithRocm,
zlib,
cudaSupport ? config.cudaSupport,
rocmSupport ? config.rocmSupport,
rocmPackages,
triton,
}:
let
ptxas = lib.getExe' cudaPackages.cuda_nvcc "ptxas"; # Make sure cudaPackages is the right version each update (See python/setup.py)
in
buildPythonPackage rec {
buildPythonPackage {
pname = "triton";
version = "2.1.0";
version = "3.0.0";
pyproject = true;
src = fetchFromGitHub {
owner = "openai";
repo = pname;
rev = "v${version}";
hash = "sha256-8UTUwLH+SriiJnpejdrzz9qIquP2zBp1/uwLdHmv0XQ=";
owner = "triton-lang";
repo = "triton";
# latest branch commit from https://github.com/triton-lang/triton/commits/release/3.0.x/
rev = "91f24d87e50cb748b121a6c24e65a01187699c22";
hash = "sha256-L5KqiR+TgSyKjEBlkE0yOU1pemMHFk2PhEmxLdbbxUU=";
};
patches =
[
# fix overflow error
(fetchpatch {
url = "https://github.com/openai/triton/commit/52c146f66b79b6079bcd28c55312fc6ea1852519.patch";
hash = "sha256-098/TCQrzvrBAbQiaVGCMaF3o5Yc3yWDxzwSkzIuAtY=";
./0001-setup.py-introduce-TRITON_OFFLINE_BUILD.patch
(substituteAll {
src = ./0001-_build-allow-extra-cc-flags.patch;
ccCmdExtraFlags = "-Wl,-rpath,${addDriverRunpath.driverLink}/lib";
})
# Upstream startded pinning CUDA version and falling back to downloading from Conda
# in https://github.com/triton-lang/triton/pull/1574/files#diff-eb8b42d9346d0a5d371facf21a8bfa2d16fb49e213ae7c21f03863accebe0fcfR120-R123
./0000-dont-download-ptxas.patch
(substituteAll (
{
src = ./0002-nvidia-amd-driver-short-circuit-before-ldconfig.patch;
}
// lib.optionalAttrs rocmSupport { libhipDir = "${lib.getLib rocmPackages.clr}/lib"; }
// lib.optionalAttrs cudaSupport {
libcudaStubsDir = "${lib.getLib cudaPackages.cuda_cudart}/lib/stubs";
ccCmdExtraFlags = "-Wl,-rpath,${addDriverRunpath.driverLink}/lib";
}
))
]
++ lib.optionals (!cudaSupport) [
# triton wants to get ptxas version even if ptxas is not
# used, resulting in ptxas not found error.
./0001-ptxas-disable-version-key-for-non-cuda-targets.patch
++ lib.optionals cudaSupport [
(substituteAll {
src = ./0003-nvidia-cudart-a-systempath.patch;
cudaToolkitIncludeDirs = "${lib.getInclude cudaPackages.cuda_cudart}/include";
})
(substituteAll {
src = ./0004-nvidia-allow-static-ptxas-path.patch;
nixpkgsExtraBinaryPaths = lib.escapeShellArgs [ (lib.getExe' cudaPackages.cuda_nvcc "ptxas") ];
})
];
postPatch =
let
quote = x: ''"${x}"'';
subs.ldFlags =
let
# Bash was getting weird without linting,
# but basically upstream contains [cc, ..., "-lcuda", ...]
# and we replace it with [..., "-lcuda", "-L/run/opengl-driver/lib", "-L$stubs", ...]
old = [ "-lcuda" ];
new = [
"-lcuda"
"-L${addDriverRunpath.driverLink}"
"-L${cudaPackages.cuda_cudart}/lib/stubs/"
];
in
{
oldStr = lib.concatMapStringsSep ", " quote old;
newStr = lib.concatMapStringsSep ", " quote new;
};
in
''
# Use our `cmakeFlags` instead and avoid downloading dependencies
substituteInPlace python/setup.py \
--replace "= get_thirdparty_packages(triton_cache_path)" "= os.environ[\"cmakeFlags\"].split()"
postPatch = ''
# Use our `cmakeFlags` instead and avoid downloading dependencies
# remove any downloads
substituteInPlace python/setup.py \
--replace-fail "get_json_package_info(), get_pybind11_package_info()" ""\
--replace-fail "get_pybind11_package_info(), get_llvm_package_info()" ""\
--replace-fail 'packages += ["triton/profiler"]' ""\
--replace-fail "curr_version != version" "False"
# Already defined in llvm, when built with -DLLVM_INSTALL_UTILS
substituteInPlace bin/CMakeLists.txt \
--replace "add_subdirectory(FileCheck)" ""
# Don't fetch googletest
substituteInPlace unittest/CMakeLists.txt \
--replace-fail "include (\''${CMAKE_CURRENT_SOURCE_DIR}/googletest.cmake)" ""\
--replace-fail "include(GoogleTest)" "find_package(GTest REQUIRED)"
'';
# Don't fetch googletest
substituteInPlace unittest/CMakeLists.txt \
--replace "include (\''${CMAKE_CURRENT_SOURCE_DIR}/googletest.cmake)" ""\
--replace "include(GoogleTest)" "find_package(GTest REQUIRED)"
cat << \EOF >> python/triton/common/build.py
def libcuda_dirs():
return [ "${addDriverRunpath.driverLink}/lib" ]
EOF
''
+ lib.optionalString cudaSupport ''
# Use our linker flags
substituteInPlace python/triton/common/build.py \
--replace '${subs.ldFlags.oldStr}' '${subs.ldFlags.newStr}'
'';
build-system = [ setuptools ];
nativeBuildInputs = [
setuptools
# pytestCheckHook # Requires torch (circular dependency) and probably needs GPUs:
cmake
ninja
@ -125,7 +106,7 @@ buildPythonPackage rec {
zlib
];
propagatedBuildInputs = [
dependencies = [
filelock
# triton uses setuptools at runtime:
# https://github.com/NixOS/nixpkgs/pull/286763/#discussion_r1480392652
@ -139,76 +120,42 @@ buildPythonPackage rec {
];
# Avoid GLIBCXX mismatch with other cuda-enabled python packages
preConfigure =
''
# Ensure that the build process uses the requested number of cores
export MAX_JOBS="$NIX_BUILD_CORES"
preConfigure = ''
# Ensure that the build process uses the requested number of cores
export MAX_JOBS="$NIX_BUILD_CORES"
# Upstream's setup.py tries to write cache somewhere in ~/
export HOME=$(mktemp -d)
# Upstream's setup.py tries to write cache somewhere in ~/
export HOME=$(mktemp -d)
# Upstream's github actions patch setup.cfg to write base-dir. May be redundant
echo "
[build_ext]
base-dir=$PWD" >> python/setup.cfg
# Upstream's github actions patch setup.cfg to write base-dir. May be redundant
echo "
[build_ext]
base-dir=$PWD" >> python/setup.cfg
# The rest (including buildPhase) is relative to ./python/
cd python
''
+ lib.optionalString cudaSupport ''
export CC=${cudaPackages.backendStdenv.cc}/bin/cc;
export CXX=${cudaPackages.backendStdenv.cc}/bin/c++;
# Work around download_and_copy_ptxas()
mkdir -p $PWD/triton/third_party/cuda/bin
ln -s ${ptxas} $PWD/triton/third_party/cuda/bin
'';
# CMake is run by setup.py instead
dontUseCmakeConfigure = true;
# Setuptools (?) strips runpath and +x flags. Let's just restore the symlink
postFixup = lib.optionalString cudaSupport ''
rm -f $out/${python.sitePackages}/triton/third_party/cuda/bin/ptxas
ln -s ${ptxas} $out/${python.sitePackages}/triton/third_party/cuda/bin/ptxas
# The rest (including buildPhase) is relative to ./python/
cd python
'';
checkInputs = [ cmake ]; # ctest
dontUseSetuptoolsCheck = true;
env =
{
TRITON_BUILD_PROTON = "OFF";
TRITON_OFFLINE_BUILD = true;
}
// lib.optionalAttrs cudaSupport {
CC = lib.getExe' cudaPackages.backendStdenv.cc "cc";
CXX = lib.getExe' cudaPackages.backendStdenv.cc "c++";
preCheck = ''
# build/temp* refers to build_ext.build_temp (looked up in the build logs)
(cd ./build/temp* ; ctest)
# For pytestCheckHook
cd test/unit
'';
# Circular dependency on torch
# pythonImportsCheck = [
# "triton"
# "triton.language"
# ];
# Ultimately, torch is our test suite:
passthru.tests = {
inherit torchWithRocm;
# Implemented as alternative to pythonImportsCheck, in case if circular dependency on torch occurs again,
# and pythonImportsCheck is commented back.
import-triton =
runCommand "import-triton"
{ nativeBuildInputs = [ (python.withPackages (ps: [ ps.triton ])) ]; }
''
python << \EOF
import triton
import triton.language
EOF
touch "$out"
'';
};
# TODO: Unused because of how TRITON_OFFLINE_BUILD currently works (subject to change)
TRITON_PTXAS_PATH = lib.getExe' cudaPackages.cuda_nvcc "ptxas"; # Make sure cudaPackages is the right version each update (See python/setup.py)
TRITON_CUOBJDUMP_PATH = lib.getExe' cudaPackages.cuda_cuobjdump "cuobjdump";
TRITON_NVDISASM_PATH = lib.getExe' cudaPackages.cuda_nvdisasm "nvdisasm";
TRITON_CUDACRT_PATH = lib.getInclude cudaPackages.cuda_nvcc;
TRITON_CUDART_PATH = lib.getInclude cudaPackages.cuda_cudart;
TRITON_CUPTI_PATH = cudaPackages.cuda_cupti;
};
pythonRemoveDeps = [
# Circular dependency, cf. https://github.com/openai/triton/issues/1374
# Circular dependency, cf. https://github.com/triton-lang/triton/issues/1374
"torch"
# CLI tools without dist-info
@ -216,14 +163,117 @@ buildPythonPackage rec {
"lit"
];
# CMake is run by setup.py instead
dontUseCmakeConfigure = true;
nativeCheckInputs = [ cmake ];
preCheck = ''
# build/temp* refers to build_ext.build_temp (looked up in the build logs)
(cd ./build/temp* ; ctest)
'';
pythonImportsCheck = [
"triton"
"triton.language"
];
passthru.gpuCheck = stdenv.mkDerivation {
pname = "triton-pytest";
inherit (triton) version src;
requiredSystemFeatures = [ "cuda" ];
nativeBuildInputs = [
(python.withPackages (ps: [
ps.scipy
ps.torchWithCuda
ps.triton-cuda
]))
];
dontBuild = true;
nativeCheckInputs = [ pytestCheckHook ];
doCheck = true;
preCheck = ''
cd python/test/unit
export HOME=$TMPDIR
'';
checkPhase = "pytestCheckPhase";
installPhase = "touch $out";
};
passthru.tests = {
# Ultimately, torch is our test suite:
inherit torchWithRocm;
# Test as `nix run -f "<nixpkgs>" python3Packages.triton.tests.axpy-cuda`
# or, using `programs.nix-required-mounts`, as `nix build -f "<nixpkgs>" python3Packages.triton.tests.axpy-cuda.gpuCheck`
axpy-cuda =
cudaPackages.writeGpuTestPython
{
libraries = ps: [
ps.triton
ps.torch-no-triton
];
}
''
# Adopted from Philippe Tillet https://triton-lang.org/main/getting-started/tutorials/01-vector-add.html
import triton
import triton.language as tl
import torch
import os
@triton.jit
def axpy_kernel(n, a: tl.constexpr, x_ptr, y_ptr, out, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n
x = tl.load(x_ptr + offsets, mask=mask)
y = tl.load(y_ptr + offsets, mask=mask)
output = a * x + y
tl.store(out + offsets, output, mask=mask)
def axpy(a, x, y):
output = torch.empty_like(x)
assert x.is_cuda and y.is_cuda and output.is_cuda
n_elements = output.numel()
def grid(meta):
return (triton.cdiv(n_elements, meta['BLOCK_SIZE']), )
axpy_kernel[grid](n_elements, a, x, y, output, BLOCK_SIZE=1024)
return output
if __name__ == "__main__":
if os.environ.get("HOME", None) == "/homeless-shelter":
os.environ["HOME"] = os.environ.get("TMPDIR", "/tmp")
if "CC" not in os.environ:
os.environ["CC"] = "${lib.getExe' cudaPackages.backendStdenv.cc "cc"}"
torch.manual_seed(0)
size = 12345
x = torch.rand(size, device='cuda')
y = torch.rand(size, device='cuda')
output_torch = 3.14 * x + y
output_triton = axpy(3.14, x, y)
assert output_torch.sub(output_triton).abs().max().item() < 1e-6
print("Triton axpy: OK")
'';
};
meta = with lib; {
description = "Language and compiler for writing highly efficient custom Deep-Learning primitives";
homepage = "https://github.com/openai/triton";
homepage = "https://github.com/triton-lang/triton";
platforms = platforms.linux;
license = licenses.mit;
maintainers = with maintainers; [
SomeoneSerge
Madouura
derdennisop
];
};
}

View File

@ -1,40 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nix-prefetch-scripts
set -eou pipefail
version=$1
linux_bucket="https://download.pytorch.org/whl"
url_and_key_list=(
"x86_64-linux-38 $linux_bucket/triton-${version}-0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl triton-${version}-cp38-cp38-linux_x86_64.whl"
"x86_64-linux-39 $linux_bucket/triton-${version}-0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl triton-${version}-cp39-cp39-linux_x86_64.whl"
"x86_64-linux-310 $linux_bucket/triton-${version}-0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl triton-${version}-cp310-cp310-linux_x86_64.whl"
"x86_64-linux-311 $linux_bucket/triton-${version}-0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl triton-${version}-cp311-cp311-linux_x86_64.whl"
)
hashfile=binary-hashes-"$version".nix
echo " \"$version\" = {" >> $hashfile
for url_and_key in "${url_and_key_list[@]}"; do
key=$(echo "$url_and_key" | cut -d' ' -f1)
url=$(echo "$url_and_key" | cut -d' ' -f2)
name=$(echo "$url_and_key" | cut -d' ' -f3)
echo "prefetching ${url}..."
hash=$(nix hash to-sri --type sha256 `nix-prefetch-url "$url" --name "$name"`)
cat << EOF >> $hashfile
$key = {
name = "$name";
url = "$url";
hash = "$hash";
};
EOF
echo
done
echo " };" >> $hashfile
echo "done."

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,6 @@
, lib
, Security
, openssl
, git
, pkg-config
, protobuf
, rustPlatform
@ -14,13 +13,13 @@
# function correctly.
rustPlatform.buildRustPackage rec {
pname = "prisma-engines";
version = "5.18.0";
version = "5.21.0";
src = fetchFromGitHub {
owner = "prisma";
repo = "prisma-engines";
rev = version;
hash = "sha256-ucAOz00dBgX2Bb63ueaBbyu1XtVQD+96EncUyo7STwA=";
hash = "sha256-X5aBrnyZ/tMykJFifyY1LeR/nShBlxm9HazVE0L7RJk=";
};
# Use system openssl.
@ -33,21 +32,23 @@ rustPlatform.buildRustPackage rec {
"cuid-1.3.2" = "sha256-qBu1k/dJiA6rWBwk4nOOqouIneD9h2TTBT8tvs0TDfA=";
"graphql-parser-0.3.0" = "sha256-0ZAsj2mW6fCLhwTETucjbu4rPNzfbNiHu2wVTBlTNe4=";
"mysql_async-0.31.3" = "sha256-2wOupQ/LFV9pUifqBLwTvA0tySv+XWbxHiqs7iTzvvg=";
"postgres-native-tls-0.5.0" = "sha256-UYPsxhCkXXWk8yPbqjNS0illwjS5mVm3Z/jFwpVwqfw=";
"postgres-native-tls-0.5.0" = "sha256-4CftieImsG2mBqpoJFfyq0R2yd2EyQX4oddAwyXMDZc=";
"mongodb-3.0.0" = "sha256-1WQgY0zSZhFjt1nrLYTUBrpqBxpCCgKRSeGJLtkE6pw=";
};
};
nativeBuildInputs = [ pkg-config git ];
nativeBuildInputs = [ pkg-config ];
buildInputs = [
openssl
protobuf
] ++ lib.optionals stdenv.hostPlatform.isDarwin [ Security ];
# FIXME: Workaround Rust 1.80 support by updating time to 0.3.36
# https://github.com/prisma/prisma-engines/issues/4989
# FIXME: fix this upstream and remove this patch with the next version update.
postPatch = ''
ln -sfn ${./Cargo.lock} Cargo.lock
file=libs/user-facing-errors/src/schema_engine.rs
echo "#![allow(dead_code)]" | cat - $file > $file.new
mv $file.new $file
'';
preBuild = ''
@ -59,6 +60,8 @@ rustPlatform.buildRustPackage rec {
export SQLITE_MAX_VARIABLE_NUMBER=250000
export SQLITE_MAX_EXPR_DEPTH=10000
export GIT_HASH=0000000000000000000000000000000000000000
'';
cargoBuildFlags = [

View File

@ -5,16 +5,16 @@
buildNpmPackage rec {
pname = "jake";
version = "10.8.7";
version = "10.9.1";
src = fetchFromGitHub {
owner = "jakejs";
repo = "jake";
rev = "v${version}";
hash = "sha256-Qado9huQx9MVTFp8t7szB+IUVNWQqT/ni62JnURQqeM=";
hash = "sha256-rYWr/ACr14/WE88Gk6Kpyl2pq1XRHSfZGXHrwbGC8hQ=";
};
npmDepsHash = "sha256-3pOFrH/em/HMTswrZLAeqPAb9U0/odcZPt4AkQkMhZM=";
npmDepsHash = "sha256-BwOfPRiVMpFo9tG9oY2r82w2g3y/7sL3PD5epd2igmI=";
dontNpmBuild = true;

View File

@ -3,6 +3,8 @@
, fetchCrate
, pkg-config
, udev
, avrdude
, makeBinaryWrapper
, nix-update-script
, testers
, ravedude
@ -19,10 +21,14 @@ rustPlatform.buildRustPackage rec {
cargoHash = "sha256-HeFmQsgr6uHrWi6s5sMQ6n63a44Msarb5p0+wUzKFkE=";
nativeBuildInputs = [ pkg-config ];
nativeBuildInputs = [ pkg-config makeBinaryWrapper ];
buildInputs = [ udev ];
postInstall = ''
wrapProgram $out/bin/ravedude --suffix PATH : ${lib.makeBinPath [ avrdude ]}
'';
passthru = {
updateScript = nix-update-script { };
tests.version = testers.testVersion {
@ -36,7 +42,7 @@ rustPlatform.buildRustPackage rec {
homepage = "https://crates.io/crates/ravedude";
license = with licenses; [ mit /* or */ asl20 ];
platforms = platforms.linux;
maintainers = with maintainers; [ rvarago ];
maintainers = with maintainers; [ rvarago liff ];
mainProgram = "ravedude";
};
}

View File

@ -1,47 +0,0 @@
source $stdenv/setup
outp=$out/lib/steam-runtime
buildDir() {
paths="$1"
pkgs="$2"
for pkg in $pkgs; do
echo "adding package $pkg"
for path in $paths; do
if [ -d $pkg/$path ]; then
cd $pkg/$path
for file in *; do
found=""
for i in $paths; do
if [ -e "$outp/$i/$file" ]; then
found=1
break
fi
done
if [ -z "$found" ]; then
mkdir -p $outp/$path
ln -s "$pkg/$path/$file" $outp/$path
sovers=$(echo $file | perl -ne 'print if s/.*?\.so\.(.*)/\1/')
if [ ! -z "$sovers" ]; then
fname=''${file%.''${sovers}}
for ver in ''${sovers//./ }; do
found=""
for i in $paths; do
if [ -e "$outp/$i/$fname" ]; then
found=1
break
fi
done
[ -n "$found" ] || ln -s "$pkg/$path/$file" "$outp/$path/$fname"
fname="$fname.$ver"
done
fi
fi
done
fi
done
done
}
eval "$installPhase"

View File

@ -1,25 +0,0 @@
{ makeScopeWithSplicing', generateSplicesForMkScope
, stdenv
}:
let
steamPackagesFun = self: let
inherit (self) callPackage;
in rec {
steamArch = if stdenv.hostPlatform.system == "x86_64-linux" then "amd64"
else if stdenv.hostPlatform.system == "i686-linux" then "i386"
else throw "Unsupported platform: ${stdenv.hostPlatform.system}";
steam = callPackage ./steam.nix { };
steam-fhsenv = callPackage ./fhsenv.nix {};
# This has to exist so Hydra tries to build all of Steam's dependencies.
# FIXME: Maybe we should expose it as something more generic?
steam-fhsenv-without-steam = steam-fhsenv.override { steam = null; };
steamcmd = callPackage ./steamcmd.nix { };
};
in makeScopeWithSplicing' {
otherSplices = generateSplicesForMkScope "steamPackages";
f = steamPackagesFun;
}

View File

@ -46,6 +46,7 @@ stdenv.mkDerivation rec {
] ++ (with xorg; [
libX11
libXi
libXcursor
]));
desktopItems = [

View File

@ -130,9 +130,9 @@
"hash": "sha256-/L3qETy8AqWynHrVApoLxwXctp+3TTojZDfUebwrV2c="
},
"kwin": {
"version": "6.2.1",
"url": "mirror://kde/stable/plasma/6.2.1/kwin-6.2.1.tar.xz",
"hash": "sha256-R4pl0v2xeMHx4kzwy5spDirjfFeP2JwHjDORwz+JU50="
"version": "6.2.1.1",
"url": "mirror://kde/stable/plasma/6.2.1/kwin-6.2.1.1.tar.xz",
"hash": "sha256-Qqc6q2yExt/NdhNoajmkkDIMadbVk6PEwkV4xioZIeg="
},
"kwrited": {
"version": "6.2.1",
@ -270,9 +270,9 @@
"hash": "sha256-oKvYKyCe27g7Qx+GN2R4nGtS4WoXp3fA+Oz0rtWRFPw="
},
"plasma-workspace": {
"version": "6.2.1",
"url": "mirror://kde/stable/plasma/6.2.1/plasma-workspace-6.2.1.tar.xz",
"hash": "sha256-eKrVJIqTcaYHYLyz/HMsGWKqjGNLQkhHZJaJn4FUwi4="
"version": "6.2.1.1",
"url": "mirror://kde/stable/plasma/6.2.1/plasma-workspace-6.2.1.1.tar.xz",
"hash": "sha256-9tzQxRlttLYRsUPQUNwNYqMFxIlO/sEFzRz+jNnQ1QY="
},
"plasma-workspace-wallpapers": {
"version": "6.2.1",

View File

@ -135,13 +135,13 @@ index 4d31c6f408..17418b1ff7 100644
}
return p;
diff --git a/startkde/systemd/plasma-ksplash-ready.service.in b/startkde/systemd/plasma-ksplash-ready.service.in
index 0bd88e6c92..eb1e304d37 100644
index 1e903130a9..1d807a8526 100644
--- a/startkde/systemd/plasma-ksplash-ready.service.in
+++ b/startkde/systemd/plasma-ksplash-ready.service.in
@@ -6,5 +6,5 @@ PartOf=graphical-session.target
[Service]
Type=oneshot
-ExecStart=dbus-send --session --reply-timeout=1 --dest=org.kde.KSplash /KSplash org.kde.KSplash.setStage string:ready
+ExecStart=@dbus-send@ --session --reply-timeout=1 --dest=org.kde.KSplash /KSplash org.kde.KSplash.setStage string:ready
-ExecStart=dbus-send --session --reply-timeout=1 --type=method_call --dest=org.kde.KSplash /KSplash org.kde.KSplash.setStage string:ready
+ExecStart=@dbus-send@ --session --reply-timeout=1 --type=method_call --dest=org.kde.KSplash /KSplash org.kde.KSplash.setStage string:ready
Slice=session.slice

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "bsull-console-datasource";
version = "1.0.1";
zipHash = "sha256-V6D/VIdwwQvG21nVMXD/xF86Uy8WRecL2RjyDTZr1wQ=";
meta = with lib; {
description = "This is a streaming Grafana data source which can connect to the Tokio console subscriber.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "fetzerch-sunandmoon-datasource";
version = "0.3.3";
zipHash = "sha256-IJe1OiPt9MxqqPymuH0K27jToSb92M0P4XGZXvk0paE=";
meta = with lib; {
description = "SunAndMoon is a Datasource Plugin for Grafana that calculates the position of Sun and Moon as well as the Moon illumination using SunCalc.";
license = licenses.mit;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "frser-sqlite-datasource";
version = "3.5.0";
zipHash = "sha256-BwAurFpMyyR318HMzVXCnOEQWM8W2vPPisXhhklFLBY=";
meta = with lib; {
description = "This is a Grafana backend plugin to allow using an SQLite database as a data source. The SQLite database needs to be accessible to the filesystem of the device where Grafana itself is running.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -2,12 +2,12 @@
grafanaPlugin rec {
pname = "grafana-clickhouse-datasource";
version = "3.3.0";
version = "4.4.0";
zipHash = {
x86_64-linux = "sha256-FkOX/2vPmLtxe/oOISldlVhayy7AwfFxLeiwJ5TNgYY=";
aarch64-linux = "sha256-4rCj+NaKPZbuVohlKmSf1M6n5ng9HZMrwzBCgLPdiok=";
x86_64-darwin = "sha256-bpey6EwwAqXgxjvjJ6ou4rinidHCpUr+Z89YpAZK7z8=";
aarch64-darwin = "sha256-u/U2lu4szf9JFt/zfhGmWKH2OUqpJDNaSI69EDdi1+w=";
x86_64-linux = "sha256-rh+oTJrW7WxLHG7jSkT1Pog+/tqhE+j/0jdbgaHu1a4=";
aarch64-linux = "sha256-uV+WKh3/jBgOwX2lrwC3Q7TGr3/BH83QZhwmtL4G3qo=";
x86_64-darwin = "sha256-Y6Xp4HCYF+Nkw8CNrfEMOtpNgKunMI/4oVqD8Wq5VEI=";
aarch64-darwin = "sha256-x/Z5BA9N5sZurQ5K1NQCYXQPZ/yF1p/372GPIeVU0ps=";
};
meta = with lib; {
description = "Connects Grafana to ClickHouse";

View File

@ -2,8 +2,8 @@
grafanaPlugin rec {
pname = "grafana-clock-panel";
version = "2.1.3";
zipHash = "sha256-ZedeV/SQsBu55jAxFyyXQefir4hEl1/TQDmaTJN9bag=";
version = "2.1.8";
zipHash = "sha256-QLvq2CSlJuEaYAazn8MoY3XCiXeRILj4dTp/aqrHL/k=";
meta = with lib; {
description = "Clock panel for Grafana";
license = licenses.mit;

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "grafana-discourse-datasource";
version = "2.0.2";
zipHash = "sha256-0MTxPe7RJHMA0SwjOcFlbi4VkhlLUFP+5r2DsHAaffc=";
meta = with lib; {
description = "The Discourse data source plugin allows users to search and view topics, posts, users, tags, categories, and reports on a given Discourse forum.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "grafana-github-datasource";
version = "1.9.0";
zipHash = {
x86_64-linux = "sha256-DQKb8VKa41bL6D9DN8OpL3sqBIlRCa1zgIjduD6AcQc=";
aarch64-linux = "sha256-RHFURMnBF14QCZhVxZQO3JJ3OP6JXD2Hfef8IyVOgBs=";
x86_64-darwin = "sha256-UBwc8CZRRHsEKpzTgn5PNXjxLzETyWKGsDFtXZnkRW4=";
aarch64-darwin = "sha256-xgQ7s3QP7Sq8ni0n54NE/nYlyALIESfXNKncruAWty0=";
};
meta = with lib; {
description = "The GitHub datasource allows GitHub API data to be visually represented in Grafana dashboards.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "grafana-googlesheets-datasource";
version = "1.2.14";
zipHash = {
x86_64-linux = "sha256-N4JZ/aWpvezR9daJKU559GXd+FNGmDA4P9CrlC4RFmQ=";
aarch64-linux = "sha256-HZhyg6NhptFib/3JJ8AnSywF+eaZOwiCij3TlMB0YG8=";
x86_64-darwin = "sha256-EwE6w67ARVp/2GE9pSqaD5TuBnsgwsDLZCrEXPfRfUE=";
aarch64-darwin = "sha256-3UGd/t1k6aZsKsQCplLV9klmjQAga19VaopHx330xUs=";
};
meta = with lib; {
description = "The Grafana JSON Datasource plugin empowers you to seamlessly integrate JSON data into Grafana.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "grafana-mqtt-datasource";
version = "1.1.0-beta.2";
zipHash = {
x86_64-linux = "sha256-QYv+6zDLSYiB767A3ODgZ1HzPd7Hpa90elKDV1+dNx8=";
aarch64-linux = "sha256-cquaTD3e40vj7PuQDHvODHOpXeWx3AaN6Mv+Vu+ikbI=";
x86_64-darwin = "sha256-PZmUkghYawU5aKA536u3/LCzsvkIFVJIzl1FVWcrKTI=";
aarch64-darwin = "sha256-9FP7UbNI4q4nqRTzlNKcEPnJ9mdqzOL4E0nuEAdFNJw=";
};
meta = with lib; {
description = "The MQTT data source plugin allows you to visualize streaming MQTT data from within Grafana.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -2,9 +2,8 @@
grafanaPlugin {
pname = "grafana-oncall-app";
versionPrefix = "v";
version = "1.8.5";
zipHash = "sha256-HuZYHPTWm0EPKQbmapALK2j+PzM+J7gcWM9w8vU2yI0=";
version = "1.10.2";
zipHash = "sha256-wRgzdPKSA24O4kSDhaO/09uOG6lIoJGWUGOgX1vdjlU=";
meta = with lib; {
description = "Developer-friendly incident response for Grafana";
license = licenses.agpl3Only;

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "grafana-opensearch-datasource";
version = "2.19.0";
zipHash = {
x86_64-linux = "sha256-jTeiIbaM2wPBTxFyXPQhBXxxzgRZbaXkqeN9+tHgWPc=";
aarch64-linux = "sha256-8ti5CibWbycAO9o3Wse/CuE07JjwV1Quhy/Vm6BDmyM=";
x86_64-darwin = "sha256-6rqdTsYcqjqcXtM20ekJguT42w5dr4EUHvNuRDIU6k0=";
aarch64-darwin = "sha256-Z4ISwwkFJXXdVcLOspAK8euI4yor4Ii08K7zZffY9tM=";
};
meta = with lib; {
description = "The Grafana JSON Datasource plugin empowers you to seamlessly integrate JSON data into Grafana.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -2,8 +2,8 @@
grafanaPlugin rec {
pname = "grafana-polystat-panel";
version = "2.1.4";
zipHash = "sha256-15mi5NzbbWXJ/69VEwUS058atQ+z2g4C3T9/b+/Exwk=";
version = "2.1.13";
zipHash = "sha256-O8YOSVLhJ1hDNbBHKwkikNBOjQTrGofGklVTalgDH4I=";
meta = with lib; {
description = "Hexagonal multi-stat panel for Grafana";
license = licenses.asl20;

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "marcusolsson-calendar-panel";
version = "3.7.0";
zipHash = "sha256-O8EvkS+lWq2qaIj1HJzPagRGhrEENvY1YDBusvUejM0=";
meta = with lib; {
description = "Calendar Panel is a Grafana plugin that displays events from various data sources.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "marcusolsson-csv-datasource";
version = "0.6.19";
zipHash = {
x86_64-linux = "sha256-HCwh8v9UeO7eeESZ78Hj6uvLext/x7bPfACe1u2BqTM=";
aarch64-linux = "sha256-2Qtwe34fe8KlIye3RuuNLjlWWgXGJvAmwWUnZD8LdWE=";
x86_64-darwin = "sha256-6sGA06INQbiRCd23ykdtUWAR+oA3YFh57KBT7zWUP44=";
aarch64-darwin = "sha256-gzQRcPeRqLvl27SB18hTTtcHx/namT2V0NOgX5J1mbs=";
};
meta = with lib; {
description = "The Grafana CSV Datasource plugin is designed to load CSV data into Grafana, expanding your capabilities to visualize and analyze data stored in CSV (Comma-Separated Values) format.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "marcusolsson-json-datasource";
version = "1.3.17";
zipHash = "sha256-L1G5s9fEEuvNs5AWXlT00f+dU2/2Rtjm4R3kpFc4NRg=";
meta = with lib; {
description = "The Grafana JSON Datasource plugin empowers you to seamlessly integrate JSON data into Grafana.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -4,16 +4,33 @@
grafanaPlugin = callPackage ./grafana-plugin.nix { };
bsull-console-datasource = callPackage ./bsull-console-datasource { };
doitintl-bigquery-datasource = callPackage ./doitintl-bigquery-datasource { };
fetzerch-sunandmoon-datasource = callPackage ./fetzerch-sunandmoon-datasource { };
frser-sqlite-datasource = callPackage ./frser-sqlite-datasource { };
grafadruid-druid-datasource = callPackage ./grafadruid-druid-datasource { };
grafana-clickhouse-datasource = callPackage ./grafana-clickhouse-datasource { };
grafana-clock-panel = callPackage ./grafana-clock-panel { };
grafana-discourse-datasource = callPackage ./grafana-discourse-datasource { };
grafana-github-datasource = callPackage ./grafana-github-datasource { };
grafana-googlesheets-datasource = callPackage ./grafana-googlesheets-datasource { };
grafana-mqtt-datasource = callPackage ./grafana-mqtt-datasource { };
grafana-oncall-app = callPackage ./grafana-oncall-app { };
grafana-opensearch-datasource = callPackage ./grafana-opensearch-datasource { };
grafana-piechart-panel = callPackage ./grafana-piechart-panel { };
grafana-polystat-panel = callPackage ./grafana-polystat-panel { };
grafana-worldmap-panel = callPackage ./grafana-worldmap-panel { };
marcusolsson-calendar-panel = callPackage ./marcusolsson-calendar-panel { };
marcusolsson-csv-datasource = callPackage ./marcusolsson-csv-datasource { };
marcusolsson-dynamictext-panel = callPackage ./marcusolsson-dynamictext-panel { };
marcusolsson-json-datasource = callPackage ./marcusolsson-json-datasource { };
redis-app = callPackage ./redis-app { };
redis-datasource = callPackage ./redis-datasource { };
redis-explorer-app = callPackage ./redis-explorer-app { };
ventura-psychrometric-panel = callPackage ./ventura-psychrometric-panel { };
volkovlabs-echarts-panel = callPackage ./volkovlabs-echarts-panel { };
volkovlabs-form-panel = callPackage ./volkovlabs-form-panel { };
volkovlabs-rss-datasource = callPackage ./volkovlabs-rss-datasource { };
volkovlabs-variable-panel = callPackage ./volkovlabs-variable-panel { };
yesoreyeram-infinity-datasource = callPackage ./yesoreyeram-infinity-datasource { };
}

View File

@ -1,8 +1,46 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl jq common-updater-scripts
#!nix-shell -i bash -p nix curl jq common-updater-scripts
set -eu -o pipefail
cd "${0%/*}"/../../../../../
readonly plugin_name="$1"
readonly latest_version="$(curl "https://grafana.com/api/plugins/${plugin_name}" | jq -r .version)"
update-source-version "grafanaPlugins.${plugin_name}" "$latest_version"
readonly api_response="$(curl --silent "https://grafana.com/api/plugins/${plugin_name}")"
readonly latest_version="$(echo "$api_response" | jq -r .version)"
update() {
local system="${2:+--system=$2}"
local pkg="$(echo "$api_response" | jq -e .packages.\""$1"\")"
if echo "$pkg" | jq -er .sha256 > /dev/null; then
local hash="$(echo "$pkg" | jq -er .sha256)"
else
# Some packages only have an md5 hash. Download the file for hash computation.
local urlPath="$(echo "$pkg" | jq -er .downloadUrl)"
local hash="$(nix-prefetch-url --type sha256 --name "$plugin_name" "https://grafana.com$urlPath")"
fi
hash="$(nix --extra-experimental-features nix-command hash to-sri --type sha256 "$hash")"
# Set version number to a random number first to force update to happen.
#
# `update-source-version` will exit early if it considers the version number to be the same.
# However we have already downloaded the file and computed the hash, so it makes sense to set
# the newly computed information unconditionally.
#
# As an example of a workflow that was made more complicated than strictly necessary is my own
# attempts to improve this script where I spent multiple hours investigating why the update
# script would refuse to update a hash that I intentionally malformed (in hopes to test the
# operation of this script.)
update-source-version $system "grafanaPlugins.${plugin_name}" $RANDOM "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
update-source-version $system "grafanaPlugins.${plugin_name}" "$latest_version" "$hash"
}
if echo "$api_response" | jq -e .packages.any > /dev/null; then
# the package contains an "any" package, so there should be only one zipHash.
update "any"
else
update "linux-amd64" "x86_64-linux"
update "linux-arm64" "aarch64-linux"
update "darwin-amd64" "x86_64-darwin"
update "darwin-arm64" "aarch64-darwin"
fi

View File

@ -0,0 +1,18 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "ventura-psychrometric-panel";
version = "4.5.1";
zipHash = "sha256-Y/Eh3eWZkPS8Q1eha7sEJ3wTMI7QxOr7MEbPc25fnGg=";
meta = with lib; {
description = "Grafana plugin to display air conditions on a psychrometric chart.";
license = licenses.bsd3 // {
spdxId = "BSD-3-Clause-LBNL";
url = "https://spdx.org/licenses/BSD-3-Clause-LBNL.html";
fullName = "Lawrence Berkeley National Labs BSD variant license";
shortName = "lbnl-bsd3";
};
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "volkovlabs-echarts-panel";
version = "6.4.1";
zipHash = "sha256-RHOfFKplZs0gbD/esvrpXkkPKPfo5R4zjCUJWPpkDNU=";
meta = with lib; {
description = "The Apache ECharts plugin is a visualization panel for Grafana that allows you to incorporate the popular Apache ECharts library into your Grafana dashboard.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "volkovlabs-form-panel";
version = "4.6.0";
zipHash = "sha256-ne2dfCr+PBodeaxGfZL0VrAxHLYEAaeQfuZQf2F3s0s=";
meta = with lib; {
description = "The Data Manipulation Panel is the first plugin that allows inserting and updating application data, as well as modifying configuration directly from your Grafana dashboard.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "volkovlabs-rss-datasource";
version = "4.2.0";
zipHash = "sha256-+3tgvpH6xlJORqN4Sx7qwzsiQZoLwdarzhx6kHvtOoY=";
meta = with lib; {
description = "The RSS/Atom data source is a plugin for Grafana that retrieves RSS/Atom feeds and allows visualizing them using Dynamic Text and other panels.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,13 @@
{ grafanaPlugin, lib }:
grafanaPlugin rec {
pname = "volkovlabs-variable-panel";
version = "3.5.0";
zipHash = "sha256-SqMTCdB+8OUo94zJ3eS5NoCeyjc7sdMCR0CTvVe/L1g=";
meta = with lib; {
description = "The Variable panel allows you to have dashboard filters in a separate panel which you can place anywhere on the dashboard.";
license = licenses.asl20;
maintainers = with maintainers; [ nagisa ];
platforms = platforms.unix;
};
}

Some files were not shown because too many files have changed in this diff Show More