Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-07-29 00:02:36 +00:00 committed by GitHub
commit 85536e3a09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 1206 additions and 12275 deletions

View File

@ -16,6 +16,8 @@
- [river](https://github.com/riverwm/river), A dynamic tiling wayland compositor. Available as [programs.river](#opt-programs.river.enable).
- [wayfire](https://wayfire.org), A modular and extensible wayland compositor. Available as [programs.wayfire](#opt-programs.wayfire.enable).
- [GoToSocial](https://gotosocial.org/), an ActivityPub social network server, written in Golang. Available as [services.gotosocial](#opt-services.gotosocial.enable).
- [Typesense](https://github.com/typesense/typesense), a fast, typo-tolerant search engine for building delightful search experiences. Available as [services.typesense](#opt-services.typesense.enable).

File diff suppressed because it is too large Load Diff

View File

@ -9,13 +9,13 @@
}:
rustPlatform.buildRustPackage rec {
pname = "polkadot";
version = "0.9.43";
version = "1.0.0";
src = fetchFromGitHub {
owner = "paritytech";
repo = "polkadot";
rev = "v${version}";
hash = "sha256-h+9b+KQgdYowHYGr0nPsqibcwOPmBVo9tKi/uEbLhqo=";
hash = "sha256-izm0rpLzwlhpp3dciQ1zj1boWxhgGnNMG5ceZoZQGEE=";
# the build process of polkadot requires a .git folder in order to determine
# the git commit hash that is being built and add it to the version string.
@ -34,11 +34,24 @@ rustPlatform.buildRustPackage rec {
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"binary-merkle-tree-4.0.0-dev" = "sha256-/8bGqnM/yqtCgVWkIaVEySZSV3XGYuiA3JuyHYTp2lw=";
"binary-merkle-tree-4.0.0-dev" = "sha256-J09SHQVOLGStMGONdreI5QZlk+uNNKzWRZpGiNJ+lrk=";
"sub-tokens-0.1.0" = "sha256-GvhgZhOIX39zF+TbQWtTCgahDec4lQjH+NqamLFLUxM=";
};
};
# NOTE: the build process currently tries to read some files to generate
# documentation from hardcoded paths that aren't compatible with the cargo
# vendoring strategy, so we need to manually put them in their expected place.
# this should be fixed with the next polkadot release that includes
# https://github.com/paritytech/substrate/pull/14570.
postPatch = ''
FAST_UNSTAKE_DIR=$PWD/../cargo-vendor-dir/pallet-fast-unstake-4.0.0-dev
FAST_UNSTAKE_DOCIFY_DIR=$FAST_UNSTAKE_DIR/frame/fast-unstake
mkdir -p $FAST_UNSTAKE_DOCIFY_DIR
cp -r $FAST_UNSTAKE_DIR/src $FAST_UNSTAKE_DOCIFY_DIR
'';
buildInputs = lib.optionals stdenv.isDarwin [ Security SystemConfiguration ];
nativeBuildInputs = [ rustPlatform.bindgenHook ];

View File

@ -2,6 +2,7 @@
, unzip, libsecret, libXScrnSaver, libxshmfence, buildPackages
, atomEnv, at-spi2-atk, autoPatchelfHook
, systemd, fontconfig, libdbusmenu, glib, buildFHSEnv, wayland
, libglvnd
# Populate passthru.tests
, tests
@ -113,6 +114,9 @@ let
# The credentials should be stored in a secure keychain already, so the benefit of this is questionable
# in the first place.
rm -rf $out/lib/vscode/resources/app/node_modules/vscode-encrypt
# Unbundle libglvnd as VSCode doesn't include libGLESv2.so.2 which is necessary for GPU acceleration
rm -rf $out/lib/vscode/libGLESv2.so
'') + ''
runHook postInstall
'';
@ -121,6 +125,7 @@ let
gappsWrapperArgs+=(
# Add gio to PATH so that moving files to the trash works when not using a desktop environment
--prefix PATH : ${glib.bin}/bin
--prefix LD_LIBRARY_PATH : ${lib.makeLibraryPath [ libglvnd ]}
--add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations}}"
--add-flags ${lib.escapeShellArg commandLineArgs}
)

View File

@ -0,0 +1,91 @@
{ stdenv
, lib
, fetchFromGitHub
, crystal
, wrapGAppsHook4
, gobject-introspection
, desktopToDarwinBundle
, webkitgtk_6_0
, sqlite
, gi-crystal
, libadwaita
, gtk4
, pango
}:
let
gtk4' = gtk4.override { x11Support = true; };
pango' = pango.override { withIntrospection = true; };
in
crystal.buildCrystalPackage rec {
pname = "rtfm";
version = "0.2.2";
src = fetchFromGitHub {
owner = "hugopl";
repo = "rtfm";
rev = "v${version}";
name = "rtfm";
hash = "sha256-SmQq3hG94oV346dHtqTHC0xE4cWB3rspD3XXu+mSI8Q=";
};
patches = [
# 1) fixed gi-crystal binding generator command
# 2) removed `-v` arg to `cp` command to prevent build failure due to stdout buffer overflow
# 3) added commands to build gschemas and update icon-cache
./patches/make.patch
# fixed docset path and gi libs directory names
./patches/friendly-docs-path.patch
# added chmod +w for copied docs to prevent error:
# `Error opening file with mode 'wb': '.../style.css': Permission denied`
./patches/enable-write-permissions.patch
];
postPatch = ''
substituteInPlace Makefile \
--replace "crystal run src/create_crystal_docset.cr" "crystal src/create_crystal_docset.cr ${crystal}/share/doc/crystal/api/" \
--replace "crystal run src/create_gtk_docset.cr" "crystal src/create_gtk_docset.cr gtk-doc/"
'';
shardsFile = ./shards.nix;
nativeBuildInputs = [
wrapGAppsHook4
gobject-introspection
gi-crystal
] ++ lib.optionals stdenv.isDarwin [ desktopToDarwinBundle ];
buildInputs = [
webkitgtk_6_0
sqlite
libadwaita
gtk4'
pango'
];
buildTargets = [ "configure" "rtfm" "docsets" ];
preBuild = ''
mkdir gtk-doc/
for file in "${gtk4'.devdoc}"/share/doc/*; do
ln -s "$file" "gtk-doc/$(basename "$file")"
done
for file in "${pango'.devdoc}"/share/doc/*; do
ln -s "$file" "gtk-doc/$(basename "$file")"
done
for file in "${libadwaita.devdoc}"/share/doc/*; do
ln -s "$file" "gtk-doc/$(basename "$file")"
done
'';
meta = with lib; {
description = "Dash/docset reader with built in documentation for Crystal and GTK APIs";
homepage = "https://github.com/hugopl/rtfm/";
license = licenses.mit;
maintainers = with maintainers; [ sund3RRR ];
};
}

View File

@ -0,0 +1,10 @@
--- a/src/doc2dash/doc_set_builder.cr 2023-07-19 14:00:06.864770147 +0300
+++ b/src/doc2dash/doc_set_builder.cr 2023-07-19 13:59:35.440707740 +0300
@@ -44,6 +44,7 @@
real_dest = @html_dest.join(dest || source)
Dir.mkdir_p(Path.new(real_dest).dirname)
File.copy(original, real_dest)
+ File.chmod(real_dest, 0o600)
dest || source
end

View File

@ -0,0 +1,11 @@
--- a/src/create_gtk_docset.cr 2023-07-17 14:28:04.882620660 +0300
+++ b/src/create_gtk_docset.cr 2023-07-17 14:27:09.660643747 +0300
@@ -136,7 +136,7 @@
end
def find_modules : Array(Path)
- basedir = Path.new("/usr/share/doc")
+ basedir = Path.new(ARGV[0]? || "gtk-docs")
MODULES.compact_map do |mod|
print "#{mod.ljust(20, '.')}"
mod_dir = basedir.join(mod)

View File

@ -0,0 +1,30 @@
--- a/Makefile 2023-07-17 17:18:28.000000000 +0300
+++ b/Makefile 2023-07-19 12:13:44.627168135 +0300
@@ -4,8 +4,7 @@
all: configure .WAIT rtfm docsets
configure:
- shards install
- ./bin/gi-crystal
+ gi-crystal
rtfm:
shards build --release -s rtfm
@@ -29,13 +28,15 @@
install -D -m644 data/io.github.hugopl.rtfm.gschema.xml $(DESTDIR)$(PREFIX)/share/glib-2.0/schemas/io.github.hugopl.rtfm.gschema.xml
# docsets
mkdir -p $(DESTDIR)$(PREFIX)/share/rtfm/docsets/
- cp -rv data/Crystal.docset $(DESTDIR)$(PREFIX)/share/rtfm/docsets/
- cp -rv data/Gtk4.docset $(DESTDIR)$(PREFIX)/share/rtfm/docsets/
+ cp -r data/Crystal.docset $(DESTDIR)$(PREFIX)/share/rtfm/docsets/
+ cp -r data/Gtk4.docset $(DESTDIR)$(PREFIX)/share/rtfm/docsets/
# License
install -D -m0644 LICENSE $(DESTDIR)$(PREFIX)/share/licenses/rtfm/LICENSE
# Changelog
install -D -m0644 CHANGELOG.md $(DESTDIR)$(PREFIX)/share/doc/rtfm/CHANGELOG.md
gzip -9fn $(DESTDIR)$(PREFIX)/share/doc/rtfm/CHANGELOG.md
+ gtk4-update-icon-cache --ignore-theme-index $(PREFIX)/share/icons/hicolor
+ glib-compile-schemas $(DESTDIR)$(PREFIX)/share/glib-2.0/schemas
uninstall:
rm -f $(DESTDIR)$(PREFIX)/bin/rtfm

View File

@ -0,0 +1,42 @@
{
db = {
url = "https://github.com/crystal-lang/crystal-db.git";
rev = "v0.12.0";
sha256 = "1in8w2dz7nlhqgc9l6b3pi6f944m29nhbg3p5j40qzvsrr8lqaj7";
};
fzy = {
url = "https://github.com/hugopl/fzy.git";
rev = "v0.5.5";
sha256 = "1zk95m43ymx9ilwr6iw9l44nkmp4sas28ib0dkr07hkhgrkw68sv";
};
gio = {
url = "https://github.com/hugopl/gio.cr.git";
rev = "v0.1.0";
sha256 = "0vj35bi64d4hni18nrl8fmms306a0gl4zlxpf3aq08lh0sbwzhd8";
};
gtk4 = {
url = "https://github.com/hugopl/gtk4.cr.git";
rev = "v0.15.0";
sha256 = "100j5k4sfc2dpj3nplzjcaxw1bwy3hsy5cw93asg00kda9h8dbb1";
};
harfbuzz = {
url = "https://github.com/hugopl/harfbuzz.cr.git";
rev = "v0.2.0";
sha256 = "06wgqxwyib5416yp53j2iwcbr3bl4jjxb1flm7z103l365par694";
};
libadwaita = {
url = "https://github.com/geopjr/libadwaita.cr.git";
rev = "23ce21d6400af7563ede0b53deea6d1f77436985";
sha256 = "09jz6r0yp4qsm47qcknzgkjxavr5j3dkxf2yjbw0jkaz1an58pfw";
};
pango = {
url = "https://github.com/hugopl/pango.cr.git";
rev = "v0.2.0";
sha256 = "0dl3qrhi2ybylmvzx1x5gsznp2pcdkc50waxrljxwnf5avn8ixsf";
};
sqlite3 = {
url = "https://github.com/crystal-lang/crystal-sqlite3.git";
rev = "v0.20.0";
sha256 = "0mqy6rc26i0sf2fdllbbzdhbd1d35npmpqqjz0b1n1vrzrm6fg05";
};
}

View File

@ -45,9 +45,9 @@
}
},
"ungoogled-chromium": {
"version": "115.0.5790.102",
"sha256": "0sxhhsrn4cg9akpnb2qpn7kkgp286rh8y2mmypm2409s5grf1xh6",
"sha256bin64": "18n7xqbvcdd68856wmbrxx1f5lqj61g9cyiir9dzlfmf0a9wxvml",
"version": "115.0.5790.110",
"sha256": "0wgp44qnvmdqf2kk870ndm51rcvar36li2qq632ay4n8gfpbrm79",
"sha256bin64": "1w2jl92x78s4vxv4p1imkz7qaq51yvs0wiz2bclbjz0hjlw9akr3",
"deps": {
"gn": {
"version": "2023-05-19",
@ -56,8 +56,8 @@
"sha256": "0y07c18xskq4mclqiz3a63fz8jicz2kqridnvdhqdf75lhp61f8a"
},
"ungoogled-patches": {
"rev": "115.0.5790.102-1",
"sha256": "0g3igkca75d4h1ydzhh2xsp4lw6i6420pvhv71f92msppmsz83n8"
"rev": "115.0.5790.110-1",
"sha256": "1jahy4jl5bnnzl6433hln0dj3b39v5zqd90n8zf7ss45wqrff91b"
}
}
}

View File

@ -7,7 +7,7 @@
, gnome/*.gnome-shell*/
, browserpass, gnome-browser-connector, uget-integrator, plasma5Packages, bukubrow, pipewire
, tridactyl-native
, fx_cast_bridge
, fx-cast-bridge
, udev
, libkrb5
, libva
@ -69,7 +69,7 @@ let
++ lib.optional (cfg.enableGnomeExtensions or false) gnome-browser-connector
++ lib.optional (cfg.enableUgetIntegrator or false) uget-integrator
++ lib.optional (cfg.enablePlasmaBrowserIntegration or false) plasma5Packages.plasma-browser-integration
++ lib.optional (cfg.enableFXCastBridge or false) fx_cast_bridge
++ lib.optional (cfg.enableFXCastBridge or false) fx-cast-bridge
++ extraNativeMessagingHosts
;
libs = lib.optionals stdenv.isLinux [ udev libva mesa libnotify xorg.libXScrnSaver cups pciutils ]

View File

@ -5,6 +5,6 @@
# Example: nix-shell ./maintainers/scripts/update.nix --argstr package cacert
import ./generic.nix {
version = "3.91";
hash = "sha256-hL1GN23xcRjFX21z0w/ZOgryEpbGbnaQRxVH5YmPxLM=";
version = "3.92";
hash = "sha256-PbGS1uiCA5rwKufq8yF+0RS7etg0FMZGdyq4Ah4kolQ=";
}

View File

@ -1,20 +0,0 @@
{ lib, buildPythonPackage, fetchPypi, future }:
buildPythonPackage rec {
pname = "backports.csv";
version = "1.0.7";
src = fetchPypi {
inherit pname version;
sha256 = "0vdx5jlhs91iizc8j8l8811nqprwvdx39pgkdc82w2qkfgzxyxqj";
};
propagatedBuildInputs = [ future ];
meta = with lib; {
description = "Backport of Python 3 csv module";
homepage = "https://github.com/ryanhiebert";
license = licenses.psfl;
};
}

View File

@ -1,32 +0,0 @@
{ lib
, buildPythonPackage
, fetchPypi
, setuptools-scm
, isPy3k
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "backports.functools_lru_cache";
version = "1.6.4";
src = fetchPypi {
inherit pname version;
sha256 = "d5ed2169378b67d3c545e5600d363a923b09c456dab1593914935a68ad478271";
};
nativeBuildInputs = [ setuptools-scm ];
nativeCheckInputs = [ pytestCheckHook ];
# Test fail on Python 2
doCheck = isPy3k;
pythonNamespaces = [ "backports" ];
meta = {
description = "Backport of functools.lru_cache";
homepage = "https://github.com/jaraco/backports.functools_lru_cache";
license = lib.licenses.mit;
};
}

View File

@ -1,34 +0,0 @@
{ lib
, unittestCheckHook
, buildPythonPackage
, fetchPypi
, setuptools-scm
, backports_weakref
}:
buildPythonPackage rec {
pname = "backports.tempfile";
version = "1.0";
src = fetchPypi {
inherit pname version;
sha256 = "1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762";
};
buildInputs = [ setuptools-scm ];
propagatedBuildInputs = [ backports_weakref ];
# requires https://pypi.org/project/backports.test.support
doCheck = false;
nativeCheckInputs = [ unittestCheckHook ];
unittestFlagsArray = [ "-s" "tests" ];
meta = {
description = "Backport of new features in Python's tempfile module";
license = lib.licenses.psfl;
homepage = "https://github.com/pjdelport/backports.tempfile";
};
}

View File

@ -1,26 +0,0 @@
{ lib, buildPythonPackage, fetchPypi, setuptools-scm, mock }:
buildPythonPackage rec {
pname = "backports.unittest_mock";
version = "1.5";
src = fetchPypi {
inherit pname version;
sha256 = "eff58e53de8fdeb27a1c87a9d57e7b91d15d1bc3854e85344b1a2e69f31ecda7";
};
propagatedBuildInputs = [ mock ];
buildInputs = [ setuptools-scm ];
# does not contain tests
doCheck = false;
pythonImportsCheck = [ "backports.unittest_mock" ];
meta = with lib; {
description = "Provides a function install() which makes the mock module";
homepage = "https://github.com/jaraco/backports.unittest_mock";
license = licenses.mit;
maintainers = with maintainers; [ ];
};
}

View File

@ -1,32 +0,0 @@
{ lib
, buildPythonPackage
, fetchPypi
, setuptools-scm
# , backports
, unittestCheckHook
}:
buildPythonPackage rec {
pname = "backports.weakref";
version = "1.0.post1";
src = fetchPypi {
inherit pname version;
sha256 = "bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2";
};
buildInputs = [ setuptools-scm ];
# nativeCheckInputs = [ backports ];
# Requires backports package
doCheck = false;
nativeCheckInputs = [ unittestCheckHook ];
unittestFlagsArray = [ "tests" ];
meta = with lib; {
description = "Backports of new features in Pythons weakref module";
license = licenses.psfl;
maintainers = with maintainers; [ jyp ];
};
}

View File

@ -15,7 +15,7 @@
, pytz
, requests
, six
, zope_component
, zope-component
, zope_interface
, dialog
, gnureadline
@ -50,7 +50,7 @@ buildPythonPackage rec {
pytz
requests
six
zope_component
zope-component
zope_interface
];

View File

@ -2,7 +2,6 @@
, buildPythonPackage
, fetchPypi
, pythonOlder
, backports_csv
, configobj
, mock
, pytestCheckHook

View File

@ -8,14 +8,14 @@
buildPythonPackage rec {
pname = "dtlssocket";
version = "0.1.15";
version = "0.1.16";
format = "pyproject";
src = fetchPypi {
pname = "DTLSSocket";
inherit version;
hash = "sha256-RWscUxJsmLkI2GPjnpS1oJVPsJ+xbqPAKk4Q1G7ISu4=";
hash = "sha256-MLEIrkX84cAz4+9sLd1+dBgGKuN0Io46f6lpslQ2ajk=";
};
nativeBuildInputs = [

View File

@ -8,16 +8,19 @@
, numpy
, pytestCheckHook
, pytest-mpl
, pythonOlder
}:
buildPythonPackage rec {
pname = "hist";
version = "2.6.3";
version = "2.7.1";
format = "pyproject";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
sha256 = "dede097733d50b273af9f67386e6dcccaab77e900ae702e1a9408a856e217ce9";
hash = "sha256-/74xTCvQPDQrnxaNznFa2PNigesjFyoAlwiCqTRP6Yg=";
};
buildInputs = [
@ -38,7 +41,8 @@ buildPythonPackage rec {
meta = with lib; {
description = "Histogramming for analysis powered by boost-histogram";
homepage = "https://hist.readthedocs.io/en/latest/";
homepage = "https://hist.readthedocs.io/";
changelog = "https://github.com/scikit-hep/hist/releases/tag/v${version}";
license = licenses.bsd3;
maintainers = with maintainers; [ veprbl ];
};

View File

@ -2,7 +2,7 @@
, buildPythonPackage
, fetchPypi
, webtest
, zope_component
, zope-component
, hupper
, pastedeploy
, plaster
@ -43,7 +43,7 @@ buildPythonPackage rec {
nativeCheckInputs = [
webtest
zope_component
zope-component
];
pythonImportsCheck = [

View File

@ -27,6 +27,13 @@ buildPythonPackage rec {
hash = "sha256-IJ+ovLQv6/UU1oepmUQjzaWBG3Rdd3xvui7FjK85Urs=";
};
patches = [
# https://github.com/sopel-irc/sopel/issues/2401
# https://github.com/sopel-irc/sopel/commit/596adc44330939519784389cbb927435305ef758.patch
# rewrite the patch because there are too many patches needed to apply the above patch.
./python311-support.patch
];
propagatedBuildInputs = [
dnspython
geoip2

View File

@ -0,0 +1,96 @@
diff --git a/sopel/plugins/handlers.py b/sopel/plugins/handlers.py
index 76902aa0..05f0279d 100644
--- a/sopel/plugins/handlers.py
+++ b/sopel/plugins/handlers.py
@@ -46,20 +46,15 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import imp
import importlib
+import importlib.util
import inspect
import itertools
import os
+import sys
from sopel import loader
from . import exceptions
-try:
- reload = importlib.reload
-except AttributeError:
- # py2: no reload function
- # TODO: imp is deprecated, to be removed when py2 support is dropped
- reload = imp.reload
-
class AbstractPluginHandler(object):
"""Base class for plugin handlers.
@@ -301,7 +296,7 @@ class PyModulePlugin(AbstractPluginHandler):
This method assumes the plugin is already loaded.
"""
- self._module = reload(self._module)
+ self._module = importlib.reload(self._module)
def is_loaded(self):
return self._module is not None
@@ -402,45 +397,31 @@ class PyFilePlugin(PyModulePlugin):
if good_file:
name = os.path.basename(filename)[:-3]
- module_type = imp.PY_SOURCE
+ spec = importlib.util.spec_from_file_location(
+ name,
+ filename,
+ )
elif good_dir:
name = os.path.basename(filename)
- module_type = imp.PKG_DIRECTORY
+ spec = importlib.util.spec_from_file_location(
+ name,
+ os.path.join(filename, '__init__.py'),
+ submodule_search_locations=filename,
+ )
else:
raise exceptions.PluginError('Invalid Sopel plugin: %s' % filename)
self.filename = filename
self.path = filename
- self.module_type = module_type
+ self.module_spec = spec
super(PyFilePlugin, self).__init__(name)
def _load(self):
- # The current implementation uses `imp.load_module` to perform the
- # load action, which also reloads the module. However, `imp` is
- # deprecated in Python 3, so that might need to be changed when the
- # support for Python 2 is dropped.
- #
- # However, the solution for Python 3 is non-trivial, since the
- # `importlib` built-in module does not have a similar function,
- # therefore requires to dive into its public internals
- # (``importlib.machinery`` and ``importlib.util``).
- #
- # All of that is doable, but represents a lot of work. As long as
- # Python 2 is supported, we can keep it for now.
- #
- # TODO: switch to ``importlib`` when Python2 support is dropped.
- if self.module_type == imp.PY_SOURCE:
- with open(self.path) as mod:
- description = ('.py', 'U', self.module_type)
- mod = imp.load_module(self.name, mod, self.path, description)
- elif self.module_type == imp.PKG_DIRECTORY:
- description = ('', '', self.module_type)
- mod = imp.load_module(self.name, None, self.path, description)
- else:
- raise TypeError('Unsupported module type')
-
- return mod
+ module = importlib.util.module_from_spec(self.module_spec)
+ sys.modules[self.name] = module
+ self.module_spec.loader.exec_module(module)
+ return module
def get_meta_description(self):
"""Retrieve a meta description for the plugin.

View File

@ -3,7 +3,6 @@
, fetchPypi
, hatchling
, isPy3k
, backports_functools_lru_cache
}:
buildPythonPackage rec {

View File

@ -1,5 +1,4 @@
{ lib
, backports_unittest-mock
, buildPythonPackage
, fetchPypi
, pytestCheckHook
@ -26,7 +25,6 @@ buildPythonPackage rec {
];
nativeCheckInputs = [
backports_unittest-mock
pytestCheckHook
];

View File

@ -16,12 +16,12 @@
buildPythonPackage rec {
pname = "steamship";
version = "2.17.11";
version = "2.17.18";
format = "pyproject";
src = fetchPypi {
inherit pname version;
hash = "sha256-Jy7ORAMnrBSeDZob3KcAnqhLBI1az/g6s30BYPA0bTE=";
hash = "sha256-1pWSP+s1jjtuRWWoPD5CcYZzt9JSiGHPNxxkLXP+pkc=";
};
pythonRelaxDeps = [

View File

@ -18,7 +18,6 @@
, wheel
, jax
, opt-einsum
, backports_weakref
, tensorflow-estimator-bin
, tensorboard
, config
@ -83,8 +82,7 @@ in buildPythonPackage {
keras-applications
keras-preprocessing
h5py
] ++ lib.optional (!isPy3k) mock
++ lib.optionals (pythonOlder "3.4") [ backports_weakref ];
] ++ lib.optional (!isPy3k) mock;
nativeBuildInputs = [ wheel ] ++ lib.optionals cudaSupport [ addOpenGLRunpath ];

View File

@ -7,7 +7,7 @@
, pythonOlder
, sybil
, twisted
, zope_component
, zope-component
}:
buildPythonPackage rec {
@ -33,7 +33,7 @@ buildPythonPackage rec {
pytestCheckHook
sybil
twisted
zope_component
zope-component
];
disabledTestPaths = [

View File

@ -1,6 +1,4 @@
{ lib, fetchPypi, buildPythonPackage, pytestCheckHook
, isPy3k
, backports_functools_lru_cache
, setuptools
}:
@ -15,9 +13,7 @@ buildPythonPackage rec {
nativeCheckInputs = [ pytestCheckHook ];
propagatedBuildInputs = [ setuptools ] ++ lib.optionals (!isPy3k) [
backports_functools_lru_cache
];
propagatedBuildInputs = [ setuptools ];
# To prevent infinite recursion with pytest
doCheck = false;

View File

@ -1,35 +1,47 @@
{ lib
, buildPythonPackage
, fetchPypi
, zope_configuration
, zope-deferredimport
, zope_deprecation
, zope_event
, zope-hookable
, zope_interface
, zope_configuration
, zope_i18nmessageid
, zope_interface
}:
buildPythonPackage rec {
pname = "zope.component";
version = "5.1.0";
pname = "zope-component";
version = "6.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-pQj5/vG29ShkYtM0DNif+rXHiZ3KBAEzcjnLa6fGuwo=";
pname = "zope.component";
inherit version;
hash = "sha256-mgoEcq0gG5S0/mdBzprCwwuLsixRYHe/A2kt7E37aQY=";
};
propagatedBuildInputs = [
zope-deferredimport zope_deprecation zope_event zope-hookable zope_interface
zope_configuration zope_i18nmessageid
zope_configuration
zope-deferredimport
zope_deprecation
zope_event
zope-hookable
zope_i18nmessageid
zope_interface
];
# ignore tests because of a circular dependency on zope_security
doCheck = false;
pythonImportsCheck = [
"zope.component"
];
meta = with lib; {
homepage = "https://github.com/zopefoundation/zope.component";
description = "Zope Component Architecture";
changelog = "https://github.com/zopefoundation/zope.component/blob/${version}/CHANGES.rst";
license = licenses.zpl20;
maintainers = with maintainers; [ goibhniu ];
};

View File

@ -3,7 +3,7 @@
, fetchPypi
, isPy3k
, zope_event
, zope_component
, zope-component
}:
buildPythonPackage rec {
@ -15,7 +15,7 @@ buildPythonPackage rec {
hash = "sha256-9ahU6J/5fe6ke/vqN4u77yeJ0uDMkKHB2lfZChzmfLU=";
};
propagatedBuildInputs = [ zope_event zope_component ];
propagatedBuildInputs = [ zope_event zope-component ];
# namespace colides with local directory
doCheck = false;

View File

@ -223,7 +223,7 @@ stdenv.mkDerivation rec {
# Additional tests that check bazels functionality. Execute
#
# nix-build . -A bazel_5.tests
# nix-build . -A bazel_6.tests
#
# in the nixpkgs checkout root to exercise them locally.
passthru.tests =

View File

@ -127,9 +127,9 @@ rec {
# https://docs.gradle.org/current/userguide/compatibility.html
gradle_8 = gen {
version = "8.0.1";
version = "8.2.1";
nativeVersion = "0.22-milestone-24";
sha256 = "02g9i1mrpdydj8d6395cv6a4ny9fw3z7sjzr7n6l6a9zx65masqv";
sha256 = "1lasx96qgh1pjmjjk8a5a772ppgqmp33mp6axyfsjalg71nigv03";
defaultJava = jdk17;
};

View File

@ -2963,9 +2963,7 @@ lib.composeManyExtensions [
});
wcwidth = super.wcwidth.overridePythonAttrs (old: {
propagatedBuildInputs = (old.propagatedBuildInputs or [ ]) ++
lib.optional self.isPy27 (self.backports-functools-lru-cache or self.backports_functools_lru_cache)
;
propagatedBuildInputs = (old.propagatedBuildInputs or [ ]);
});
wtforms = super.wtforms.overridePythonAttrs (old: {

View File

@ -10,13 +10,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "doomrunner";
version = "1.7.3";
version = "1.8.0";
src = fetchFromGitHub {
owner = "Youda008";
repo = "DoomRunner";
rev = "v${finalAttrs.version}";
hash = "sha256-8355WuVF3OQ2xl1VCvMZYDRRhHaTd8rdll5e4YzrYLc=";
hash = "sha256-twiykuUhp4+TMgUhezgelldJBbtlqA32Ah3DalFsvPo=";
};
buildInputs = [ qtbase ];

View File

@ -283,18 +283,30 @@ let
};
wireless = {
CFG80211_WEXT = option yes; # Without it, ipw2200 drivers don't build
IPW2100_MONITOR = option yes; # support promiscuous mode
IPW2200_MONITOR = option yes; # support promiscuous mode
HOSTAP_FIRMWARE = option yes; # Support downloading firmware images with Host AP driver
HOSTAP_FIRMWARE_NVRAM = option yes;
ATH9K_PCI = option yes; # Detect Atheros AR9xxx cards on PCI(e) bus
ATH9K_AHB = option yes; # Ditto, AHB bus
B43_PHY_HT = option yes;
BCMA_HOST_PCI = option yes;
RTW88 = whenAtLeast "5.2" module;
RTW88_8822BE = mkMerge [ (whenBetween "5.2" "5.8" yes) (whenAtLeast "5.8" module) ];
RTW88_8822CE = mkMerge [ (whenBetween "5.2" "5.8" yes) (whenAtLeast "5.8" module) ];
CFG80211_WEXT = option yes; # Without it, ipw2200 drivers don't build
IPW2100_MONITOR = option yes; # support promiscuous mode
IPW2200_MONITOR = option yes; # support promiscuous mode
HOSTAP_FIRMWARE = option yes; # Support downloading firmware images with Host AP driver
HOSTAP_FIRMWARE_NVRAM = option yes;
ATH9K_PCI = option yes; # Detect Atheros AR9xxx cards on PCI(e) bus
ATH9K_AHB = option yes; # Ditto, AHB bus
# The description of this option makes it sound dangerous or even illegal
# But OpenWRT enables it by default: https://github.com/openwrt/openwrt/blob/master/package/kernel/mac80211/Makefile#L55
# At the time of writing (25-06-2023): this is only used in a "correct" way by ath drivers for initiating DFS radiation
# for "certified devices"
EXPERT = option yes; # this is needed for offering the certification option
CFG80211_CERTIFICATION_ONUS = option yes;
# DFS: "Dynamic Frequency Selection" is a spectrum-sharing mechanism that allows
# you to use certain interesting frequency when your local regulatory domain mandates it.
# ATH drivers hides the feature behind this option and makes hostapd works with DFS frequencies.
# OpenWRT enables it too: https://github.com/openwrt/openwrt/blob/master/package/kernel/mac80211/ath.mk#L42
ATH9K_DFS_CERTIFIED = option yes;
ATH10K_DFS_CERTIFIED = option yes;
B43_PHY_HT = option yes;
BCMA_HOST_PCI = option yes;
RTW88 = whenAtLeast "5.2" module;
RTW88_8822BE = mkMerge [ (whenBetween "5.2" "5.8" yes) (whenAtLeast "5.8" module) ];
RTW88_8822CE = mkMerge [ (whenBetween "5.2" "5.8" yes) (whenAtLeast "5.8" module) ];
};
fb = {

View File

@ -9,11 +9,11 @@
stdenv.mkDerivation rec {
pname = "bind";
version = "9.18.16";
version = "9.18.17";
src = fetchurl {
url = "https://downloads.isc.org/isc/bind9/${version}/${pname}-${version}.tar.xz";
sha256 = "sha256-yII0/gfudcPIqeWRUv7mS3FGQ96OIs+Y2j200LV+B3U=";
hash = "sha256-veHFAXuB0decaeuPU38uUDL9NiOs3V7oMNT3S8JINFg=";
};
outputs = [ "out" "lib" "dev" "man" "dnsutils" "host" ];

View File

@ -36,7 +36,7 @@ buildPythonPackage rec {
passlib
requests
sqlalchemy
zope_component
zope-component
zope_configuration
];

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_12"}:
let
nodeEnv = import ../../development/node-packages/node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p common-updater-scripts nodePackages.node2nix curl jq gnused nix coreutils
set -euo pipefail
pushd .
cd "$(dirname "${BASH_SOURCE[0]}")"/../../..
latestVersion="$(curl -s "https://api.github.com/repos/agersant/polaris-web/releases?per_page=1" | jq -r ".[0].tag_name")"
currentVersion=$(nix-instantiate --eval -E "with import ./. {}; polaris-web.version or (lib.getVersion polaris-web)" | tr -d '"')
if [[ "$currentVersion" == "$latestVersion" ]]; then
echo "polaris-web is up-to-date: $currentVersion"
exit 0
fi
update-source-version polaris-web "$latestVersion"
store_src="$(nix-build . -A polaris-web.src --no-out-link)"
popd
cd "$(dirname "${BASH_SOURCE[0]}")"
node2nix \
--nodejs-12 \
--development \
--node-env ../../development/node-packages/node-env.nix \
--input "$store_src"/package.json \
--lock "$store_src"/package-lock.json \
--output ./node-packages.nix \
--composition ./node-composition.nix

View File

@ -1,65 +1,24 @@
{ lib
, stdenv
, pkgs
, buildNpmPackage
, fetchFromGitHub
, nodejs
, cypress
}:
stdenv.mkDerivation rec {
buildNpmPackage rec {
pname = "polaris-web";
version = "build-55";
version = "67";
src = fetchFromGitHub {
owner = "agersant";
repo = "polaris-web";
rev = version;
sha256 = "2XqU4sExF7Or7RxpOK2XU9APtBujfPhM/VkOLKVDvF4=";
rev = "build-${version}";
hash = "sha256-mhrgHNbqxLhhLWP4eu1A3ytrx9Q3X0EESL2LuTfgsBE=";
};
nativeBuildInputs = [
nodejs
];
npmDepsHash = "sha256-lScXbxkJiRq5LLFkoz5oZsmKz8I/t1rZJVonfct9r+0=";
buildPhase =
let
nodeDependencies = (import ./node-composition.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies.override (old: {
# access to path '/nix/store/...-source' is forbidden in restricted mode
src = src;
dontNpmInstall = true;
# ERROR: .../.bin/node-gyp-build: /usr/bin/env: bad interpreter: No such file or directory
# https://github.com/svanderburg/node2nix/issues/275
# There are multiple instances of it, hence the globstar
preRebuild = ''
shopt -s globstar
sed -i -e "s|#!/usr/bin/env node|#! ${pkgs.nodejs}/bin/node|" \
node_modules/**/node-gyp-build/bin.js \
'';
buildInputs = [ cypress ];
# prevent downloading cypress, use the executable in path instead
CYPRESS_INSTALL_BINARY = "0";
});
in
''
runHook preBuild
export PATH="${nodeDependencies}/bin:${nodejs}/bin:$PATH"
# https://github.com/parcel-bundler/parcel/issues/8005
export NODE_OPTIONS=--no-experimental-fetch
ln -s ${nodeDependencies}/lib/node_modules .
npm run production
runHook postBuild
'';
env.CYPRESS_INSTALL_BINARY = "0";
npmBuildScript = "production";
installPhase = ''
runHook preInstall
@ -70,13 +29,10 @@ stdenv.mkDerivation rec {
runHook postInstall
'';
passthru.updateScript = ./update-web.sh;
meta = with lib; {
description = "Web client for Polaris";
homepage = "https://github.com/agersant/polaris-web";
license = licenses.mit;
maintainers = with maintainers; [ pbsds ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,55 @@
{ lib, buildNpmPackage, fetchFromGitHub, avahi-compat, nodejs, python3 }:
buildNpmPackage rec {
pname = "fx-cast-bridge";
version = "0.3.1";
src = fetchFromGitHub {
owner = "hensm";
repo = "fx_cast";
rev = "v${version}";
hash = "sha256-hB4NVJW2exHoKsMp0CKzHerYgj8aR77rV+ZsCoWA1Dg=";
};
sourceRoot = "source/app";
npmDepsHash = "sha256-GLrDRZqKcX1PDGREx+MLZ1TEjr88r9nz4TvZ9nvo40g=";
nativeBuildInputs = [ python3 ];
buildInputs = [ avahi-compat ];
postPatch = ''
substituteInPlace bin/lib/paths.js \
--replace "../../../" "../../"
'';
dontNpmInstall = true;
installPhase = ''
runHook preInstall
mkdir -p $out/{bin,lib/mozilla/native-messaging-hosts}
substituteInPlace dist/app/fx_cast_bridge.json \
--replace "$(realpath dist/app/fx_cast_bridge.sh)" "$out/bin/fx_cast_bridge"
mv dist/app/fx_cast_bridge.json $out/lib/mozilla/native-messaging-hosts
rm dist/app/fx_cast_bridge.sh
mv dist/app $out/lib/fx_cast_bridge
mv node_modules $out/lib/fx_cast_bridge/node_modules
echo "#! /bin/sh
NODE_PATH=\"$out/lib/node_modules\" \\
exec ${nodejs}/bin/node \\
$out/lib/fx_cast_bridge/src/main.js \\
--_name fx_cast_bridge \"\$@\"
" >$out/bin/fx_cast_bridge
chmod +x $out/bin/fx_cast_bridge
runHook postInstall
'';
meta = with lib; {
description = "Implementation of the Chrome Sender API (Chromecast) within Firefox";
homepage = "https://hensm.github.io/fx_cast/";
license = licenses.mit;
maintainers = with maintainers; [ dtzWill pedrohlc ];
};
}

View File

@ -1,66 +0,0 @@
# How to generate a new version:
#
# Update version and hash as usual.
#
# ```
# git clone https://github.com/hensm/fx_cast.git
# cd fx_cast/app
# # Add `"name": "fx_cast_bridge", "version": "...",` to package.json and package-lock.json
# nix run nixpkgs#nodePackages.node2nix -- -c node2nix -l package-lock.json -d
# cp -v node-*.nix package*.json ${nixpkgs_path:?}/pkgs/tools/misc/fx_cast/
# ```
{ pkgs, stdenv }: let
nodeEnv = import ./node-env.nix {
inherit (pkgs) nodejs stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs;
libtool = if stdenv.isDarwin then pkgs.darwin.cctools else null;
};
nodePackages = import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
globalBuildInputs = [pkgs.avahi-compat];
};
in
stdenv.mkDerivation rec {
pname = "fx_cast_bridge";
version = "0.3.1";
src = pkgs.fetchFromGitHub {
owner = "hensm";
repo = "fx_cast";
rev = "v${version}";
hash = "sha256-hB4NVJW2exHoKsMp0CKzHerYgj8aR77rV+ZsCoWA1Dg=";
};
buildInputs = with pkgs; [
nodejs
];
buildPhase = ''
ln -vs ${nodePackages.nodeDependencies}/lib/node_modules app/node_modules
# The temporary home solves the "failed with exit code 243"
HOME="$(mktemp -d)" npm run build:app
'';
installPhase = ''
mkdir -p $out/bin $out/lib/mozilla/native-messaging-hosts $out/opt
substituteInPlace dist/app/fx_cast_bridge.json \
--replace "$(realpath dist/app/fx_cast_bridge.sh)" "$out/bin/fx_cast_bridge"
mv dist/app/fx_cast_bridge.json $out/lib/mozilla/native-messaging-hosts
echo "#! /bin/sh
NODE_PATH=${nodePackages.nodeDependencies}/lib/node_modules exec ${pkgs.nodejs}/bin/node $out/opt/fx_cast_bridge/src/main.js --_name fx_cast_bridge \"\$@\"
" >$out/bin/fx_cast_bridge
chmod +x $out/bin/fx_cast_bridge
mv dist/app $out/opt/fx_cast_bridge
'';
meta = with pkgs.lib; {
description = "Implementation of the Chrome Sender API (Chromecast) within Firefox";
homepage = "https://hensm.github.io/fx_cast/";
license = licenses.mit;
maintainers = with maintainers; [ dtzWill kevincox ];
};
}

View File

@ -1,686 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,37 +0,0 @@
{
"name": "fx_cast_bridge",
"version": "0.3.1",
"type": "module",
"scripts": {
"build": "node bin/build.js",
"package": "node bin/build.js --package",
"install-manifest": "node bin/install-manifest.js",
"remove-manifest": "node bin/install-manifest.js --remove"
},
"dependencies": {
"bplist-creator": "^0.1.0",
"bplist-parser": "^0.3.1",
"castv2": "^0.1.10",
"fast-srp-hap": "^2.0.4",
"mdns": "^2.7.2",
"mime-types": "^2.1.35",
"node-fetch": "^3.2.3",
"tweetnacl": "^1.0.3",
"ws": "^8.5.0",
"yargs": "^17.5.1"
},
"devDependencies": {
"@types/mdns": "^0.0.34",
"@types/mime-types": "^2.1.1",
"@types/minimist": "^1.2.2",
"@types/node": "^17.0.26",
"@types/node-fetch": "^2.6.1",
"@types/ws": "^8.5.3",
"@types/yargs": "^17.0.11",
"fs-extra": "^10.1.0",
"mustache": "^4.2.0",
"pkg": "^5.6.0",
"tiny-typed-emitter": "^2.1.0",
"typescript": "^4.6.3"
}
}

View File

@ -4,11 +4,11 @@ let inherit (lib) getDev; in
mkDerivation rec {
pname = "qt5ct";
version = "1.5";
version = "1.7";
src = fetchurl {
url = "mirror://sourceforge/${pname}/${pname}-${version}.tar.bz2";
sha256 = "sha256-1j0M4W4CQnIH2GUx9wpxxbnIUARN1bLcsihVMfQW5JA=";
sha256 = "sha256-7VhUam5MUN/tG5/2oUjUpGj+m017WycnuWUB3ilVuNc=";
};
nativeBuildInputs = [ qmake qttools ];
@ -18,6 +18,7 @@ mkDerivation rec {
qmakeFlags = [
"LRELEASE_EXECUTABLE=${getDev qttools}/bin/lrelease"
"PLUGINDIR=${placeholder "out"}/${qtbase.qtPluginPrefix}"
"LIBDIR=${placeholder "out"}/lib"
];
meta = with lib; {

View File

@ -1,4 +1,4 @@
{ stdenv, fetchurl, lib
{ stdenv, fetchurl, lib, fetchpatch
, pandoc, pkg-config, makeWrapper, curl, openssl, tpm2-tss, libuuid
, abrmdSupport ? true, tpm2-abrmd ? null }:
@ -11,6 +11,14 @@ stdenv.mkDerivation rec {
sha256 = "sha256-H9tJxzBTe/2u0IiISIGmHjv9Eh6VfsC9zu7AJhI2wSM=";
};
patches = [
# https://github.com/tpm2-software/tpm2-tools/pull/3271
(fetchpatch {
url = "https://github.com/tpm2-software/tpm2-tools/commit/b98be08f6f88b0cca9e0667760c4e1e5eb417fbd.patch";
sha256 = "sha256-2sEam9i4gwscJhLwraX2EAjVM8Dh1vmNnG3zYsOF0fc=";
})
];
nativeBuildInputs = [ pandoc pkg-config makeWrapper ];
buildInputs = [
curl openssl tpm2-tss libuuid

View File

@ -556,6 +556,7 @@ mapAliases ({
fuse_exfat = throw "'fuse_exfat' has been renamed to/replaced by 'exfat'"; # Converted to throw 2022-02-22
fuseki = throw "'fuseki' has been renamed to/replaced by 'apache-jena-fuseki'"; # Converted to throw 2022-02-22
fuse2fs = if stdenv.isLinux then e2fsprogs.fuse2fs else null; # Added 2022-03-27 preserve, reason: convenience, arch has a package named fuse2fs too.
fx_cast_bridge = fx-cast-bridge; # added 2023-07-26
fwupdate = throw "fwupdate was merged into fwupd"; # Added 2020-05-19
fcitx = throw "fcitx is deprecated, please use fcitx5 instead."; # Added 2023-03-13
@ -1803,6 +1804,13 @@ mapAliases ({
wavesurfer = throw "wavesurfer has been removed: depended on snack which has been removed"; # Added 2022-04-21
way-cooler = throw "way-cooler is abandoned by its author: https://way-cooler.org/blog/2020/01/09/way-cooler-post-mortem.html"; # Added 2020-01-13
wayfireApplications-unwrapped = throw ''
'wayfireApplications-unwrapped.wayfire' has been renamed to/replaced by 'wayfire'
'wayfireApplications-unwrapped.wayfirePlugins' has been renamed to/replaced by 'wayfirePlugins'
'wayfireApplications-unwrapped.wcm' has been renamed to/replaced by 'wayfirePlugins.wcm'
'wayfireApplications-unwrapped.wlroots' has been removed
''; # Add 2023-07-29
wcm = throw "'wcm' has been renamed to/replaced by 'wayfirePlugins.wcm'"; # Add 2023-07-29
webbrowser = throw "webbrowser was removed because it's unmaintained upstream and was marked as broken in nixpkgs for over a year"; # Added 2022-03-21
webkit = throw "'webkit' has been renamed to/replaced by 'webkitgtk'"; # Converted to throw 2022-02-22
webkitgtk_5_0 = throw "'webkitgtk_5_0' has been superseded by 'webkitgtk_6_0'"; # Added 2023-02-25

View File

@ -5284,7 +5284,7 @@ with pkgs;
inherit (darwin.apple_sdk.frameworks) DiskArbitration;
};
fx_cast_bridge = callPackage ../tools/misc/fx_cast { };
fx-cast-bridge = callPackage ../tools/misc/fx-cast-bridge { };
fzf = callPackage ../tools/misc/fzf { };
@ -15583,145 +15583,41 @@ with pkgs;
extraPackages = [];
};
gcc48 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/4.8 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "4.8");
isl = if !stdenv.isDarwin then isl_0_14 else null;
cloog = if !stdenv.isDarwin then cloog else null;
texinfo = texinfo5; # doesn't validate since 6.1 -> 6.3 bump
}));
gcc49 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/4.9 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "4.9");
isl = if !stdenv.isDarwin then isl_0_11 else null;
cloog = if !stdenv.isDarwin then cloog_0_18_0 else null;
# Build fails on Darwin with clang
stdenv = if stdenv.isDarwin then gccStdenv else stdenv;
}));
gcc6 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/6 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "6");
# gcc 10 is too strict to cross compile gcc <= 8
stdenv = if (stdenv.targetPlatform != stdenv.buildPlatform) && stdenv.cc.isGNU then gcc7Stdenv else stdenv;
isl = if stdenv.isDarwin
then null
else if stdenv.targetPlatform.isRedox
then isl_0_17
else isl_0_14;
}));
gcc7 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/7 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "7");
# gcc 10 is too strict to cross compile gcc <= 8
stdenv = if (stdenv.targetPlatform != stdenv.buildPlatform) && stdenv.cc.isGNU then gcc7Stdenv else stdenv;
isl = if !stdenv.isDarwin then isl_0_17 else null;
}));
gcc8 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/8 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "8");
# gcc 10 is too strict to cross compile gcc <= 8
stdenv = if (stdenv.targetPlatform != stdenv.buildPlatform) && stdenv.cc.isGNU then gcc7Stdenv else stdenv;
isl = if !stdenv.isDarwin then isl_0_17 else null;
}));
gcc9 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/9 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "9");
isl = if !stdenv.isDarwin then isl_0_20 else null;
}));
gcc10 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/10 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "10");
isl = if !stdenv.isDarwin then isl_0_20 else null;
}));
gcc11 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/11 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "11");
isl = if !stdenv.isDarwin then isl_0_20 else null;
}));
gcc12 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/12 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = lib.optionalAttrs (stdenv.targetPlatform != stdenv.buildPlatform) (threadsCrossFor "12");
isl = if !stdenv.isDarwin then isl_0_20 else null;
}));
gcc13 = lowPrio (wrapCC (callPackage ../development/compilers/gcc/13 {
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = if stdenv.targetPlatform != stdenv.buildPlatform then threadsCrossFor "13" else { };
isl = if !stdenv.isDarwin then isl_0_20 else null;
}));
# This expression will be pushed into pkgs/development/compilers/gcc/common
# once the top-level gcc/${version}/default.nix files are deduplicated.
inherit
(lib.listToAttrs (map (version:
let atLeast = lib.versionAtLeast version;
attrName = "gcc${lib.replaceStrings ["."] [""] version}";
pkg = lowPrio (wrapCC (callPackage (../development/compilers/gcc + "/${version}") ({
inherit noSysDirs;
reproducibleBuild = true;
profiledCompiler = false;
libcCross = if stdenv.targetPlatform != stdenv.buildPlatform then libcCross else null;
threadsCross = if stdenv.targetPlatform != stdenv.buildPlatform then threadsCrossFor version else { };
isl = if stdenv.isDarwin then null
else if atLeast "9" then isl_0_20
else if atLeast "7" then isl_0_17
else if atLeast "6" then (if stdenv.targetPlatform.isRedox then isl_0_17 else isl_0_14)
else if atLeast "4.9" then isl_0_11
else /* "4.8" */ isl_0_14;
} // lib.optionalAttrs (version == "4.8") {
texinfo = texinfo5; # doesn't validate since 6.1 -> 6.3 bump
} // lib.optionalAttrs (version == "4.9") {
# Build fails on Darwin with clang
stdenv = if stdenv.isDarwin then gccStdenv else stdenv;
} // lib.optionalAttrs (!(atLeast "6")) {
cloog = if stdenv.isDarwin
then null
else if atLeast "4.9" then cloog_0_18_0
else /* 4.8 */ cloog;
} // lib.optionalAttrs (atLeast "6" && !(atLeast "9")) {
# gcc 10 is too strict to cross compile gcc <= 8
stdenv = if (stdenv.targetPlatform != stdenv.buildPlatform) && stdenv.cc.isGNU then gcc7Stdenv else stdenv;
})));
in lib.nameValuePair attrName pkg
) [ "4.8" "4.9" "6" "7" "8" "9" "10" "11" "12" "13" ]))
gcc48 gcc49 gcc6 gcc7 gcc8 gcc9 gcc10 gcc11 gcc12 gcc13;
gcc_latest = gcc13;
@ -34802,6 +34698,8 @@ with pkgs;
rsync = callPackage ../applications/networking/sync/rsync (config.rsync or {});
rrsync = callPackage ../applications/networking/sync/rsync/rrsync.nix { };
rtfm = callPackage ../applications/misc/rtfm { };
rtl_433 = callPackage ../applications/radio/rtl_433 { };
rtl-ais = callPackage ../applications/radio/rtl-ais { };

View File

@ -48,6 +48,11 @@ mapAliases ({
asyncio-nats-client = nats-py; # added 2022-02-08
awkward0 = throw "awkward0 has been removed, use awkward instead"; # added 2022-12-13
Babel = babel; # added 2022-05-06
backports_csv = throw "backports_csv has been removed, since we no longer need to backport to python2"; # added 2023-07-28
backports_functools_lru_cache = throw "backports_functools_lru_cache has been removed, since we no longer need to backport to python3.2"; # added 2023-07-28
backports_tempfile = throw "backports_tempfile has been removed, since we no longer need to backport to python3.3"; # added 2023-07-28
backports_unittest-mock = throw "backports_unittest-mock has been removed, since we no longer need to backport to python3.2"; # added 2023-07-28
backports_weakref = throw "backports_weakref has been removed, since we no longer need to backport to python3.3"; # added 2023-07-28
bedup = throw "bedup was removed because it was broken and abandoned upstream"; # added 2023-02-04
bitcoin-price-api = throw "bitcoin-price-api has been removed, it was using setuptools 2to3 translation feautre, which has been removed in setuptools 58"; # added 2022-02-15
BlinkStick = blinkstick; # added 2023-02-19
@ -378,4 +383,5 @@ mapAliases ({
zc-buildout221 = zc-buildout; # added 2021-07-21
zc_buildout_nix = throw "zc_buildout_nix was pinned to a version no longer compatible with other modules";
zope_broken = throw "zope_broken has been removed because it is obsolete and not needed in zodb>=3.10"; # added 2023-07-26
zope_component = zope-component; # added 2023-07-28
})

View File

@ -1201,24 +1201,14 @@ self: super: with self; {
backports-cached-property = callPackage ../development/python-modules/backports-cached-property { };
backports_csv = callPackage ../development/python-modules/backports_csv { };
backports-datetime-fromisoformat = callPackage ../development/python-modules/backports-datetime-fromisoformat { };
backports-entry-points-selectable = callPackage ../development/python-modules/backports-entry-points-selectable { };
backports_functools_lru_cache = callPackage ../development/python-modules/backports_functools_lru_cache { };
backports_shutil_get_terminal_size = callPackage ../development/python-modules/backports_shutil_get_terminal_size { };
backports-shutil-which = callPackage ../development/python-modules/backports-shutil-which { };
backports_tempfile = callPackage ../development/python-modules/backports_tempfile { };
backports_unittest-mock = callPackage ../development/python-modules/backports_unittest-mock { };
backports_weakref = callPackage ../development/python-modules/backports_weakref { };
backports-zoneinfo = callPackage ../development/python-modules/backports-zoneinfo { };
bacpypes = callPackage ../development/python-modules/bacpypes { };
@ -13916,7 +13906,7 @@ self: super: with self; {
zope-cachedescriptors = callPackage ../development/python-modules/zope-cachedescriptors { };
zope_component = callPackage ../development/python-modules/zope_component { };
zope-component = callPackage ../development/python-modules/zope-component { };
zope_configuration = callPackage ../development/python-modules/zope_configuration { };