mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-02-22 12:04:40 +00:00
Merge master into staging-next
This commit is contained in:
commit
e061b757fe
@ -4,6 +4,12 @@
|
||||
|
||||
In this document and related Nix expressions, we use the term, _BEAM_, to describe the environment. BEAM is the name of the Erlang Virtual Machine and, as far as we're concerned, from a packaging perspective, all languages that run on the BEAM are interchangeable. That which varies, like the build system, is transparent to users of any given BEAM package, so we make no distinction.
|
||||
|
||||
## Available versions and deprecations schedule
|
||||
|
||||
### Elixir
|
||||
|
||||
nixpkgs follows the [official elixir deprecation schedule](https://hexdocs.pm/elixir/compatibility-and-deprecations.html) and keeps the last 5 released versions of Elixir available.
|
||||
|
||||
## Structure {#beam-structure}
|
||||
|
||||
All BEAM-related expressions are available via the top-level `beam` attribute, which includes:
|
||||
|
5
lib/flake.nix
Normal file
5
lib/flake.nix
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
description = "Library of low-level helper functions for nix expressions.";
|
||||
|
||||
outputs = { self }: { lib = import ./lib; };
|
||||
}
|
@ -48,6 +48,7 @@ let
|
||||
"node"
|
||||
"openldap"
|
||||
"openvpn"
|
||||
"pihole"
|
||||
"postfix"
|
||||
"postgres"
|
||||
"py-air-control"
|
||||
|
@ -0,0 +1,74 @@
|
||||
{ config, lib, pkgs, options }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.prometheus.exporters.pihole;
|
||||
in
|
||||
{
|
||||
port = 9617;
|
||||
extraOpts = {
|
||||
apiToken = mkOption {
|
||||
type = types.str;
|
||||
default = "";
|
||||
example = "580a770cb40511eb85290242ac130003580a770cb40511eb85290242ac130003";
|
||||
description = ''
|
||||
pi-hole API token which can be used instead of a password
|
||||
'';
|
||||
};
|
||||
interval = mkOption {
|
||||
type = types.str;
|
||||
default = "10s";
|
||||
example = "30s";
|
||||
description = ''
|
||||
How often to scrape new data
|
||||
'';
|
||||
};
|
||||
password = mkOption {
|
||||
type = types.str;
|
||||
default = "";
|
||||
example = "password";
|
||||
description = ''
|
||||
The password to login into pihole. An api token can be used instead.
|
||||
'';
|
||||
};
|
||||
piholeHostname = mkOption {
|
||||
type = types.str;
|
||||
default = "pihole";
|
||||
example = "127.0.0.1";
|
||||
description = ''
|
||||
Hostname or address where to find the pihole webinterface
|
||||
'';
|
||||
};
|
||||
piholePort = mkOption {
|
||||
type = types.port;
|
||||
default = "80";
|
||||
example = "443";
|
||||
description = ''
|
||||
The port pihole webinterface is reachable on
|
||||
'';
|
||||
};
|
||||
protocol = mkOption {
|
||||
type = types.enum [ "http" "https" ];
|
||||
default = "http";
|
||||
example = "https";
|
||||
description = ''
|
||||
The protocol which is used to connect to pihole
|
||||
'';
|
||||
};
|
||||
};
|
||||
serviceOpts = {
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${pkgs.bash}/bin/bash -c "${pkgs.prometheus-pihole-exporter}/bin/pihole-exporter \
|
||||
-interval ${cfg.interval} \
|
||||
${optionalString (cfg.apiToken != "") "-pihole_api_token ${cfg.apiToken}"} \
|
||||
-pihole_hostname ${cfg.piholeHostname} \
|
||||
${optionalString (cfg.password != "") "-pihole_password ${cfg.password}"} \
|
||||
-pihole_port ${toString cfg.piholePort} \
|
||||
-pihole_protocol ${cfg.protocol} \
|
||||
-port ${toString cfg.port}"
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
@ -121,7 +121,6 @@ in {
|
||||
EnvironmentFile = [ configFile ] ++ optional (cfg.environmentFile != null) cfg.environmentFile;
|
||||
ExecStart = "${bitwarden_rs}/bin/bitwarden_rs";
|
||||
LimitNOFILE = "1048576";
|
||||
LimitNPROC = "64";
|
||||
PrivateTmp = "true";
|
||||
PrivateDevices = "true";
|
||||
ProtectHome = "true";
|
||||
|
@ -1,41 +1,46 @@
|
||||
{ lib, stdenv, fetchFromGitHub, cmake, git, pkg-config, python3
|
||||
{ stdenv, lib, fetchurl, fetchFromGitHub, cmake, git, pkg-config, python3
|
||||
, cairo, libsndfile, libxcb, libxkbcommon, xcbutil, xcbutilcursor, xcbutilkeysyms, zenity
|
||||
, curl, rsync
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "surge";
|
||||
version = "1.8.1";
|
||||
version = "1.9.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "surge-synthesizer";
|
||||
repo = pname;
|
||||
rev = "release_${version}";
|
||||
sha256 = "0lla860g7zgn9n1zgy14g4j72d5n5y7isyxz2w5xy2fzdpdg24ql";
|
||||
leaveDotGit = true; # for SURGE_VERSION
|
||||
fetchSubmodules = true;
|
||||
src = fetchurl {
|
||||
url = "https://github.com/surge-synthesizer/releases/releases/download/${version}/SurgeSrc_${version}.tgz";
|
||||
sha256 = "00af4lfcipl0rn0dn4gfipx7nbk8ym1mrmji8v0ar98frsrpxg4k";
|
||||
};
|
||||
|
||||
extraContent = fetchFromGitHub {
|
||||
owner = "surge-synthesizer";
|
||||
repo = "surge-extra-content";
|
||||
# rev from: https://github.com/surge-synthesizer/surge/blob/release_1.8.1/cmake/stage-extra-content.cmake#L6
|
||||
# or: https://github.com/surge-synthesizer/surge/blob/main/cmake/stage-extra-content.cmake
|
||||
# SURGE_EXTRA_CONTENT_HASH
|
||||
rev = "afc591cc06d9adc3dc8dc515a55c66873fa10296";
|
||||
sha256 = "1wqv86l70nwlrb10n47rib80f47a96j9qqg8w5dv46ys1sq2nz7z";
|
||||
};
|
||||
nativeBuildInputs = [ cmake git pkg-config python3 ];
|
||||
buildInputs = [ cairo libsndfile libxcb libxkbcommon xcbutil xcbutilcursor xcbutilkeysyms zenity ];
|
||||
buildInputs = [ cairo libsndfile libxcb libxkbcommon xcbutil xcbutilcursor xcbutilkeysyms zenity curl rsync ];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace src/common/SurgeStorage.cpp --replace "/usr/share/Surge" "$out/share/surge"
|
||||
substituteInPlace src/linux/UserInteractionsLinux.cpp --replace '"zenity' '"${zenity}/bin/zenity'
|
||||
substituteInPlace vstgui.surge/vstgui/lib/platform/linux/x11fileselector.cpp --replace /usr/bin/zenity ${zenity}/bin/zenity
|
||||
patchShebangs scripts/linux/emit-vector-piggy
|
||||
patchShebangs scripts/linux/generate-lv2-ttl
|
||||
patchShebangs scripts/linux/
|
||||
cp -r $extraContent/Skins/ resources/data/skins
|
||||
'';
|
||||
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/lib/lv2 $out/lib/vst3 $out/share/surge
|
||||
cp -r surge_products/Surge.lv2 $out/lib/lv2/
|
||||
cp -r surge_products/Surge.vst3 $out/lib/vst3/
|
||||
cp -r ../resources/data/* $out/share/surge/
|
||||
cd ..
|
||||
cmake --build build --config Release --target install-everything-global
|
||||
'';
|
||||
|
||||
doInstallCheck = true;
|
||||
installCheckPhase = ''
|
||||
cd ..
|
||||
export HOME=$(mktemp -d)
|
||||
export SURGE_DISABLE_NETWORK_TESTS=TRUE
|
||||
build/surge-headless
|
||||
'';
|
||||
|
||||
|
@ -40,7 +40,12 @@ let
|
||||
erlang-ls = callPackage ./erlang-ls { };
|
||||
|
||||
# BEAM-based languages.
|
||||
elixir = elixir_1_11;
|
||||
elixir = elixir_1_12;
|
||||
|
||||
elixir_1_12 = lib'.callElixir ../interpreters/elixir/1.12.nix {
|
||||
inherit erlang;
|
||||
debugInfo = true;
|
||||
};
|
||||
|
||||
elixir_1_11 = lib'.callElixir ../interpreters/elixir/1.11.nix {
|
||||
inherit erlang;
|
||||
@ -62,6 +67,8 @@ let
|
||||
debugInfo = true;
|
||||
};
|
||||
|
||||
# Remove old versions of elixir, when the supports fades out:
|
||||
# https://hexdocs.pm/elixir/compatibility-and-deprecations.html
|
||||
elixir_1_7 = lib'.callElixir ../interpreters/elixir/1.7.nix {
|
||||
inherit erlang;
|
||||
debugInfo = true;
|
||||
@ -69,9 +76,6 @@ let
|
||||
|
||||
elixir_ls = callPackage ./elixir_ls.nix { inherit elixir fetchMixDeps mixRelease; };
|
||||
|
||||
# Remove old versions of elixir, when the supports fades out:
|
||||
# https://hexdocs.pm/elixir/compatibility-and-deprecations.html
|
||||
|
||||
lfe = lfe_1_3;
|
||||
lfe_1_2 = lib'.callLFE ../interpreters/lfe/1.2.nix { inherit erlang buildRebar3 buildHex; };
|
||||
lfe_1_3 = lib'.callLFE ../interpreters/lfe/1.3.nix { inherit erlang buildRebar3 buildHex; };
|
||||
|
9
pkgs/development/interpreters/elixir/1.12.nix
Normal file
9
pkgs/development/interpreters/elixir/1.12.nix
Normal file
@ -0,0 +1,9 @@
|
||||
{ mkDerivation }:
|
||||
|
||||
# How to obtain `sha256`:
|
||||
# nix-prefetch-url --unpack https://github.com/elixir-lang/elixir/archive/v${version}.tar.gz
|
||||
mkDerivation {
|
||||
version = "1.12.0";
|
||||
sha256 = "sha256-Jnxi0vFYMnwEgTqkPncZbj+cR57hjvH77RCseJdUoFs=";
|
||||
minimumOTPVersion = "22";
|
||||
}
|
@ -1,5 +1,14 @@
|
||||
{ pkgs, lib, stdenv, fetchFromGitHub, erlang, makeWrapper,
|
||||
coreutils, curl, bash, debugInfo ? false }:
|
||||
{ pkgs
|
||||
, lib
|
||||
, stdenv
|
||||
, fetchFromGitHub
|
||||
, erlang
|
||||
, makeWrapper
|
||||
, coreutils
|
||||
, curl
|
||||
, bash
|
||||
, debugInfo ? false
|
||||
}:
|
||||
|
||||
{ baseName ? "elixir"
|
||||
, version
|
||||
@ -13,62 +22,58 @@ let
|
||||
inherit (lib) getVersion versionAtLeast optional;
|
||||
|
||||
in
|
||||
assert versionAtLeast (getVersion erlang) minimumOTPVersion;
|
||||
assert versionAtLeast (getVersion erlang) minimumOTPVersion;
|
||||
|
||||
stdenv.mkDerivation ({
|
||||
name = "${baseName}-${version}";
|
||||
stdenv.mkDerivation ({
|
||||
name = "${baseName}-${version}";
|
||||
|
||||
inherit src version;
|
||||
inherit src version debugInfo;
|
||||
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
buildInputs = [ erlang ];
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
buildInputs = [ erlang ];
|
||||
|
||||
LANG = "C.UTF-8";
|
||||
LC_TYPE = "C.UTF-8";
|
||||
LANG = "C.UTF-8";
|
||||
LC_TYPE = "C.UTF-8";
|
||||
|
||||
setupHook = ./setup-hook.sh;
|
||||
buildFlags = optional debugInfo "ERL_COMPILER_OPTIONS=debug_info";
|
||||
|
||||
inherit debugInfo;
|
||||
preBuild = ''
|
||||
patchShebangs lib/elixir/generate_app.escript || true
|
||||
|
||||
buildFlags = optional debugInfo "ERL_COMPILER_OPTIONS=debug_info";
|
||||
substituteInPlace Makefile \
|
||||
--replace "/usr/local" $out
|
||||
'';
|
||||
|
||||
preBuild = ''
|
||||
patchShebangs lib/elixir/generate_app.escript || true
|
||||
postFixup = ''
|
||||
# Elixir binaries are shell scripts which run erl. Add some stuff
|
||||
# to PATH so the scripts can run without problems.
|
||||
|
||||
substituteInPlace Makefile \
|
||||
--replace "/usr/local" $out
|
||||
for f in $out/bin/*; do
|
||||
b=$(basename $f)
|
||||
if [ "$b" = mix ]; then continue; fi
|
||||
wrapProgram $f \
|
||||
--prefix PATH ":" "${lib.makeBinPath [ erlang coreutils curl bash ]}"
|
||||
done
|
||||
|
||||
substituteInPlace $out/bin/mix \
|
||||
--replace "/usr/bin/env elixir" "${coreutils}/bin/env elixir"
|
||||
'';
|
||||
|
||||
pos = builtins.unsafeGetAttrPos "sha256" args;
|
||||
meta = with lib; {
|
||||
homepage = "https://elixir-lang.org/";
|
||||
description = "A functional, meta-programming aware language built on top of the Erlang VM";
|
||||
|
||||
longDescription = ''
|
||||
Elixir is a functional, meta-programming aware language built on
|
||||
top of the Erlang VM. It is a dynamic language with flexible
|
||||
syntax and macro support that leverages Erlang's abilities to
|
||||
build concurrent, distributed and fault-tolerant applications
|
||||
with hot code upgrades.
|
||||
'';
|
||||
|
||||
postFixup = ''
|
||||
# Elixir binaries are shell scripts which run erl. Add some stuff
|
||||
# to PATH so the scripts can run without problems.
|
||||
|
||||
for f in $out/bin/*; do
|
||||
b=$(basename $f)
|
||||
if [ "$b" = mix ]; then continue; fi
|
||||
wrapProgram $f \
|
||||
--prefix PATH ":" "${lib.makeBinPath [ erlang coreutils curl bash ]}"
|
||||
done
|
||||
|
||||
substituteInPlace $out/bin/mix \
|
||||
--replace "/usr/bin/env elixir" "${coreutils}/bin/env elixir"
|
||||
'';
|
||||
|
||||
pos = builtins.unsafeGetAttrPos "sha256" args;
|
||||
meta = with lib; {
|
||||
homepage = "https://elixir-lang.org/";
|
||||
description = "A functional, meta-programming aware language built on top of the Erlang VM";
|
||||
|
||||
longDescription = ''
|
||||
Elixir is a functional, meta-programming aware language built on
|
||||
top of the Erlang VM. It is a dynamic language with flexible
|
||||
syntax and macro support that leverages Erlang's abilities to
|
||||
build concurrent, distributed and fault-tolerant applications
|
||||
with hot code upgrades.
|
||||
'';
|
||||
|
||||
license = licenses.epl10;
|
||||
platforms = platforms.unix;
|
||||
maintainers = teams.beam.members;
|
||||
};
|
||||
})
|
||||
license = licenses.epl10;
|
||||
platforms = platforms.unix;
|
||||
maintainers = teams.beam.members;
|
||||
};
|
||||
})
|
||||
|
@ -1,5 +0,0 @@
|
||||
addErlLibPath() {
|
||||
addToSearchPath ERL_LIBS $1/lib/elixir/lib
|
||||
}
|
||||
|
||||
addEnvHooks "$hostOffset" addErlLibPath
|
@ -80,6 +80,7 @@ buildPythonPackage rec {
|
||||
"test_retry_dns_error"
|
||||
"test_custom_asyncio_loop_enabled_true"
|
||||
"test_custom_loop_asyncio"
|
||||
"FileFeedStoragePreFeedOptionsTest" # https://github.com/scrapy/scrapy/issues/5157
|
||||
] ++ lib.optionals stdenv.isDarwin [
|
||||
"test_xmliter_encoding"
|
||||
"test_download"
|
||||
|
@ -30,6 +30,10 @@
|
||||
, keras-applications
|
||||
, keras-preprocessing
|
||||
, addOpenGLRunpath
|
||||
, astunparse
|
||||
, flatbuffers
|
||||
, h5py
|
||||
, typing-extensions
|
||||
}:
|
||||
|
||||
# We keep this binary build for two reasons:
|
||||
@ -48,13 +52,14 @@ let
|
||||
|
||||
variant = if cudaSupport then "-gpu" else "";
|
||||
pname = "tensorflow${variant}";
|
||||
|
||||
metadataPatch = ./relax-dependencies-metadata.patch;
|
||||
patch = ./relax-dependencies.patch;
|
||||
in buildPythonPackage {
|
||||
inherit pname;
|
||||
inherit (packages) version;
|
||||
format = "wheel";
|
||||
|
||||
disabled = pythonAtLeast "3.8";
|
||||
disabled = pythonAtLeast "3.9";
|
||||
|
||||
src = let
|
||||
pyVerNoDot = lib.strings.stringAsChars (x: if x == "." then "" else x) python.pythonVersion;
|
||||
@ -64,6 +69,9 @@ in buildPythonPackage {
|
||||
in fetchurl packages.${key};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
astunparse
|
||||
flatbuffers
|
||||
typing-extensions
|
||||
protobuf
|
||||
numpy
|
||||
scipy
|
||||
@ -80,6 +88,7 @@ in buildPythonPackage {
|
||||
tensorflow-tensorboard_2
|
||||
keras-applications
|
||||
keras-preprocessing
|
||||
h5py
|
||||
] ++ lib.optional (!isPy3k) mock
|
||||
++ lib.optionals (pythonOlder "3.4") [ backports_weakref ];
|
||||
|
||||
@ -93,24 +102,15 @@ in buildPythonPackage {
|
||||
|
||||
pushd dist
|
||||
|
||||
# Unpack the wheel file.
|
||||
wheel unpack --dest unpacked ./*.whl
|
||||
|
||||
# Tensorflow wheels tightly constrain the versions of gast, tensorflow-estimator and scipy.
|
||||
# This code relaxes these requirements:
|
||||
substituteInPlace ./unpacked/tensorflow*/tensorflow_core/tools/pip_package/setup.py \
|
||||
--replace "tensorflow_estimator >= 2.1.0rc0, < 2.2.0" "tensorflow_estimator" \
|
||||
--replace "tensorboard >= 2.1.0, < 2.2.0" "tensorboard" \
|
||||
--replace "gast == 0.2.2" "gast" \
|
||||
--replace "scipy == 1.2.2" "scipy"
|
||||
|
||||
substituteInPlace ./unpacked/tensorflow*/tensorflow*.dist-info/METADATA \
|
||||
--replace "gast (==0.2.2)" "gast" \
|
||||
--replace "tensorflow-estimator (<2.2.0,>=2.1.0rc0)" "tensorflow_estimator" \
|
||||
--replace "tensorboard (<2.2.0,>=2.1.0)" "tensorboard" \
|
||||
--replace "scipy (==1.4.1)" "scipy"
|
||||
|
||||
# Pack the wheel file back up.
|
||||
(
|
||||
cd unpacked/tensorflow*
|
||||
# relax too strict versions in setup.py
|
||||
patch -p 1 < ${patch}
|
||||
cd *.dist-info
|
||||
# relax too strict versions in *.dist-info/METADATA
|
||||
patch -p 3 < ${metadataPatch}
|
||||
)
|
||||
wheel pack ./unpacked/tensorflow*
|
||||
|
||||
popd
|
||||
@ -143,14 +143,19 @@ in buildPythonPackage {
|
||||
# TODO: Create this list programmatically, and remove paths that aren't
|
||||
# actually needed.
|
||||
rrPathArr=(
|
||||
"$out/${python.sitePackages}/tensorflow_core/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/compiler/tf2tensorrt/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/compiler/tf2xla/ops/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/lite/experimental/microfrontend/python/ops/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/lite/python/interpreter_wrapper/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/lite/python/optimize/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/python/"
|
||||
"$out/${python.sitePackages}/tensorflow_core/python/framework/"
|
||||
"$out/${python.sitePackages}/tensorflow/"
|
||||
"$out/${python.sitePackages}/tensorflow/core/kernels"
|
||||
"$out/${python.sitePackages}/tensorflow/compiler/tf2tensorrt/"
|
||||
"$out/${python.sitePackages}/tensorflow/compiler/tf2xla/ops/"
|
||||
"$out/${python.sitePackages}/tensorflow/lite/experimental/microfrontend/python/ops/"
|
||||
"$out/${python.sitePackages}/tensorflow/lite/python/interpreter_wrapper/"
|
||||
"$out/${python.sitePackages}/tensorflow/lite/python/optimize/"
|
||||
"$out/${python.sitePackages}/tensorflow/python/"
|
||||
"$out/${python.sitePackages}/tensorflow/python/framework/"
|
||||
"$out/${python.sitePackages}/tensorflow/python/autograph/impl/testing"
|
||||
"$out/${python.sitePackages}/tensorflow/python/data/experimental/service"
|
||||
"$out/${python.sitePackages}/tensorflow/python/framework"
|
||||
"$out/${python.sitePackages}/tensorflow/python/profiler/internal"
|
||||
"${rpath}"
|
||||
)
|
||||
|
||||
@ -189,8 +194,5 @@ in buildPythonPackage {
|
||||
license = licenses.asl20;
|
||||
maintainers = with maintainers; [ jyp abbradar cdepillabout ];
|
||||
platforms = [ "x86_64-linux" "x86_64-darwin" ];
|
||||
# Python 2.7 build uses different string encoding.
|
||||
# See https://github.com/NixOS/nixpkgs/pull/37044#issuecomment-373452253
|
||||
broken = stdenv.isDarwin && !isPy3k;
|
||||
};
|
||||
}
|
||||
|
@ -1,51 +1,39 @@
|
||||
{
|
||||
version = "2.1.0";
|
||||
linux_py_27_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.1.0-cp27-cp27mu-manylinux2010_x86_64.whl";
|
||||
sha256 = "17lnhr7vdrls68c79n3sah5rpd0q1x2v5m84azvlyxxh2wpypfmb";
|
||||
};
|
||||
linux_py_27_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.1.0-cp27-cp27mu-manylinux2010_x86_64.whl";
|
||||
sha256 = "10lz3i4pcpgqrcbjmxm0n7k1gsqlpna3kdid902j2fy060cpi93z";
|
||||
};
|
||||
linux_py_35_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.1.0-cp35-cp35m-manylinux2010_x86_64.whl";
|
||||
sha256 = "09s081n08dpmflwgir3zwzfijfpmahbh2gy5fn5bv5ll86g1szsy";
|
||||
};
|
||||
linux_py_35_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.1.0-cp35-cp35m-manylinux2010_x86_64.whl";
|
||||
sha256 = "1aa7v9fnvx03hqvhl3x3xcn41qy6qxw5xybg54ifjvvicp455c8l";
|
||||
};
|
||||
linux_py_36_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.1.0-cp36-cp36m-manylinux2010_x86_64.whl";
|
||||
sha256 = "1dqp080ljbl9v3115vjp63ls0fimiwym6zxyanyhrlk8kwsq20zc";
|
||||
};
|
||||
version = "2.4.0";
|
||||
linux_py_36_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.1.0-cp36-cp36m-manylinux2010_x86_64.whl";
|
||||
sha256 = "133z8anx7xm9rr5i9s9dwnp1wf06nr6s7q1lbs4lxpk6kn9nl480";
|
||||
};
|
||||
linux_py_37_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.1.0-cp37-cp37m-manylinux2010_x86_64.whl";
|
||||
sha256 = "0yabl3xmcpr67w0zksqs3qc68nl9ax0vcd7w7b35nq8f65xl0ghy";
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.4.0-cp36-cp36m-manylinux2010_x86_64.whl";
|
||||
sha256 = "0pn0cjf50q0xsv6k0vihrz22kr392anznvdhxv80gk52c6lcgmsc";
|
||||
};
|
||||
linux_py_37_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.1.0-cp37-cp37m-manylinux2010_x86_64.whl";
|
||||
sha256 = "04gngbngyg7p1gwx1q89my0cl8j7lq4kknqh51s2ynrix71zvsy6";
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.4.0-cp37-cp37m-manylinux2010_x86_64.whl";
|
||||
sha256 = "0mdd83c0invqfy58qmpa3hk4yml5ic7wlwggyd5wpikadlv8vq89";
|
||||
};
|
||||
mac_py_27_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.1.0-cp27-cp27m-macosx_10_9_x86_64.whl";
|
||||
sha256 = "1mprp72w5kk0lyjm2mh4lf57827xk3wsg28c4gizwm00ydfgacg6";
|
||||
linux_py_38_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-2.4.0-cp38-cp38-manylinux2010_x86_64.whl";
|
||||
sha256 = "1mm1yz9aj3v6fxfpxh7wy37rvsncr0b5y6glqlcxmhr6mqfp8k6d";
|
||||
};
|
||||
mac_py_35_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.1.0-cp35-cp35m-macosx_10_6_intel.whl";
|
||||
sha256 = "1as7brf5ai6r7v1di9646jfrbnirpk2b0d1g29mn3shavb62kw8w";
|
||||
linux_py_36_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.4.0-cp36-cp36m-manylinux2010_x86_64.whl";
|
||||
sha256 = "113iygiq2kmj97g0glhcqng6rhl3rrj1iqw5xj1d5hla8xjy8cfv";
|
||||
};
|
||||
linux_py_37_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.4.0-cp37-cp37m-manylinux2010_x86_64.whl";
|
||||
sha256 = "19ap8xx2j5nbmnqv5rzf1ryfvw2fbs6bm0fxjqrvhc3jxys6yqqs";
|
||||
};
|
||||
linux_py_38_gpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.4.0-cp38-cp38-manylinux2010_x86_64.whl";
|
||||
sha256 = "0ly3cinzj6j3b547sw8bd3p774khn3b14cgrj7nvfrz668d3f89x";
|
||||
};
|
||||
mac_py_36_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.1.0-cp36-cp36m-macosx_10_9_x86_64.whl";
|
||||
sha256 = "1v1rw9kjrskhcq1yas4ly2yfnzf2i1pjh6qg6zixfbkpkw7sw3wc";
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.4.0-cp36-cp36m-macosx_10_14_x86_64.whl";
|
||||
sha256 = "1b5ld1wj48l1i5s3vk8db5m578zdg4xfl0m1lc8w5lx1vi4cwsjp";
|
||||
};
|
||||
mac_py_37_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl";
|
||||
sha256 = "1hh4n0d97mrq35cmmsrnlmcv9vlswsyjy368lj3pda3y9dvck3rf";
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.4.0-cp37-cp37m-macosx_10_14_x86_64.whl";
|
||||
sha256 = "1mgfyjgcwvx5jzawrpfnbch5sqw7kpnzp35rfxj22kwdsl28r47r";
|
||||
};
|
||||
mac_py_38_cpu = {
|
||||
url = "https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-2.4.0-cp38-cp38-macosx_10_14_x86_64.whl";
|
||||
sha256 = "034qh0nk786wxzma58179g086x479c3pd9vi8v8p26grs6f2fm0p";
|
||||
};
|
||||
}
|
||||
|
@ -1,23 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
version=2.1.0
|
||||
version="2.4.0"
|
||||
|
||||
bucket="https://storage.googleapis.com/tensorflow"
|
||||
|
||||
# List of binary wheels for Tensorflow. The most recent versions can be found
|
||||
# on the following page:
|
||||
# https://www.tensorflow.org/install/pip?lang=python3#package-location
|
||||
url_and_key_list=(
|
||||
"linux_py_27_gpu https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-${version}-cp27-cp27mu-manylinux2010_x86_64.whl"
|
||||
"linux_py_27_cpu https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-${version}-cp27-cp27mu-manylinux2010_x86_64.whl"
|
||||
"linux_py_35_gpu https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-${version}-cp35-cp35m-manylinux2010_x86_64.whl"
|
||||
"linux_py_35_cpu https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-${version}-cp35-cp35m-manylinux2010_x86_64.whl"
|
||||
"linux_py_36_gpu https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-${version}-cp36-cp36m-manylinux2010_x86_64.whl"
|
||||
"linux_py_36_cpu https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-${version}-cp36-cp36m-manylinux2010_x86_64.whl"
|
||||
"linux_py_37_gpu https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-${version}-cp37-cp37m-manylinux2010_x86_64.whl"
|
||||
"linux_py_37_cpu https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow_cpu-${version}-cp37-cp37m-manylinux2010_x86_64.whl"
|
||||
"mac_py_27_cpu https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-${version}-cp27-cp27m-macosx_10_9_x86_64.whl"
|
||||
"mac_py_35_cpu https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-${version}-cp35-cp35m-macosx_10_6_intel.whl"
|
||||
"mac_py_36_cpu https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-${version}-cp36-cp36m-macosx_10_9_x86_64.whl"
|
||||
"mac_py_37_cpu https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-${version}-cp37-cp37m-macosx_10_9_x86_64.whl"
|
||||
"linux_py_36_cpu $bucket/linux/cpu/tensorflow_cpu-${version}-cp36-cp36m-manylinux2010_x86_64.whl"
|
||||
"linux_py_37_cpu $bucket/linux/cpu/tensorflow_cpu-${version}-cp37-cp37m-manylinux2010_x86_64.whl"
|
||||
"linux_py_38_cpu $bucket/linux/cpu/tensorflow_cpu-${version}-cp38-cp38-manylinux2010_x86_64.whl"
|
||||
"linux_py_36_gpu $bucket/linux/gpu/tensorflow_gpu-${version}-cp36-cp36m-manylinux2010_x86_64.whl"
|
||||
"linux_py_37_gpu $bucket/linux/gpu/tensorflow_gpu-${version}-cp37-cp37m-manylinux2010_x86_64.whl"
|
||||
"linux_py_38_gpu $bucket/linux/gpu/tensorflow_gpu-${version}-cp38-cp38-manylinux2010_x86_64.whl"
|
||||
"mac_py_36_cpu $bucket/mac/cpu/tensorflow-${version}-cp36-cp36m-macosx_10_14_x86_64.whl"
|
||||
"mac_py_37_cpu $bucket/mac/cpu/tensorflow-${version}-cp37-cp37m-macosx_10_14_x86_64.whl"
|
||||
"mac_py_38_cpu $bucket/mac/cpu/tensorflow-${version}-cp38-cp38-macosx_10_14_x86_64.whl"
|
||||
)
|
||||
|
||||
hashfile=binary-hashes.nix
|
||||
|
@ -0,0 +1,46 @@
|
||||
diff -ur unpacked/tensorflow-2.4.0/tensorflow-2.4.0.dist-info/METADATA unpacked.new/tensorflow-2.4.0/tensorflow-2.4.0.dist-info/METADATA
|
||||
--- unpacked/tensorflow-2.4.0/tensorflow-2.4.0.dist-info/METADATA 2021-05-06 23:51:40.298995191 -0700
|
||||
+++ unpacked.new/tensorflow-2.4.0/tensorflow-2.4.0.dist-info/METADATA 2021-05-07 00:03:49.856882153 -0700
|
||||
@@ -27,24 +27,24 @@
|
||||
Classifier: Topic :: Software Development :: Libraries
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Description-Content-Type: text/markdown
|
||||
-Requires-Dist: absl-py (~=0.10)
|
||||
-Requires-Dist: astunparse (~=1.6.3)
|
||||
-Requires-Dist: flatbuffers (~=1.12.0)
|
||||
-Requires-Dist: google-pasta (~=0.2)
|
||||
-Requires-Dist: h5py (~=2.10.0)
|
||||
-Requires-Dist: keras-preprocessing (~=1.1.2)
|
||||
-Requires-Dist: numpy (~=1.19.2)
|
||||
-Requires-Dist: opt-einsum (~=3.3.0)
|
||||
-Requires-Dist: protobuf (>=3.9.2)
|
||||
-Requires-Dist: six (~=1.15.0)
|
||||
-Requires-Dist: termcolor (~=1.1.0)
|
||||
-Requires-Dist: typing-extensions (~=3.7.4)
|
||||
-Requires-Dist: wheel (~=0.35)
|
||||
-Requires-Dist: wrapt (~=1.12.1)
|
||||
-Requires-Dist: gast (==0.3.3)
|
||||
-Requires-Dist: tensorboard (~=2.4)
|
||||
-Requires-Dist: tensorflow-estimator (<2.5.0,>=2.4.0rc0)
|
||||
-Requires-Dist: grpcio (~=1.32.0)
|
||||
+Requires-Dist: absl-py
|
||||
+Requires-Dist: astunparse
|
||||
+Requires-Dist: flatbuffers
|
||||
+Requires-Dist: google-pasta
|
||||
+Requires-Dist: h5py
|
||||
+Requires-Dist: keras-preprocessing
|
||||
+Requires-Dist: numpy
|
||||
+Requires-Dist: opt-einsum
|
||||
+Requires-Dist: protobuf
|
||||
+Requires-Dist: six
|
||||
+Requires-Dist: termcolor
|
||||
+Requires-Dist: typing-extensions
|
||||
+Requires-Dist: wheel
|
||||
+Requires-Dist: wrapt
|
||||
+Requires-Dist: gast
|
||||
+Requires-Dist: tensorboard
|
||||
+Requires-Dist: tensorflow-estimator
|
||||
+Requires-Dist: grpcio
|
||||
|
||||
[](https://badge.fury.io/py/tensorflow)
|
||||
[](https://badge.fury.io/py/tensorflow)
|
22
pkgs/servers/monitoring/prometheus/pihole-exporter.nix
Normal file
22
pkgs/servers/monitoring/prometheus/pihole-exporter.nix
Normal file
@ -0,0 +1,22 @@
|
||||
{ lib, buildGoModule, fetchFromGitHub }:
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "pihole-exporter";
|
||||
version = "0.0.11";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "eko";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-SojEq6pedoq08wo/3zPHex7ex1QqSVIzZpBd49tLOjI=";
|
||||
};
|
||||
|
||||
vendorSha256 = "sha256-LXgI9ioJgyhUiOCqRku0Q4enZF7q6MB0hYhPJlLusdc=";
|
||||
|
||||
meta = with lib; {
|
||||
description = "Prometheus exporter for PI-Hole's Raspberry PI ad blocker";
|
||||
homepage = "https://github.com/eko/pihole-exporter";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ SuperSandro2000 ];
|
||||
};
|
||||
}
|
@ -12040,7 +12040,7 @@ in
|
||||
inherit (beam.interpreters)
|
||||
erlang erlangR24 erlangR23 erlangR22 erlangR21 erlangR20 erlangR19 erlangR18
|
||||
erlang_odbc erlang_javac erlang_odbc_javac erlang_basho_R16B02
|
||||
elixir elixir_1_11 elixir_1_10 elixir_1_9 elixir_1_8 elixir_1_7
|
||||
elixir elixir_1_12 elixir_1_11 elixir_1_10 elixir_1_9 elixir_1_8 elixir_1_7
|
||||
elixir_ls;
|
||||
|
||||
erlang_nox = beam_nox.interpreters.erlang;
|
||||
@ -19559,6 +19559,7 @@ in
|
||||
prometheus-gitlab-ci-pipelines-exporter = callPackage ../servers/monitoring/prometheus/gitlab-ci-pipelines-exporter.nix { };
|
||||
prometheus-haproxy-exporter = callPackage ../servers/monitoring/prometheus/haproxy-exporter.nix { };
|
||||
prometheus-jitsi-exporter = callPackage ../servers/monitoring/prometheus/jitsi-exporter.nix { };
|
||||
prometheus-jmx-httpserver = callPackage ../servers/monitoring/prometheus/jmx-httpserver.nix { };
|
||||
prometheus-json-exporter = callPackage ../servers/monitoring/prometheus/json-exporter.nix { };
|
||||
prometheus-kea-exporter = callPackage ../servers/monitoring/prometheus/kea-exporter.nix { };
|
||||
prometheus-keylight-exporter = callPackage ../servers/monitoring/prometheus/keylight-exporter.nix { };
|
||||
@ -19576,6 +19577,7 @@ in
|
||||
prometheus-node-exporter = callPackage ../servers/monitoring/prometheus/node-exporter.nix { };
|
||||
prometheus-openldap-exporter = callPackage ../servers/monitoring/prometheus/openldap-exporter.nix { };
|
||||
prometheus-openvpn-exporter = callPackage ../servers/monitoring/prometheus/openvpn-exporter.nix { };
|
||||
prometheus-pihole-exporter = callPackage ../servers/monitoring/prometheus/pihole-exporter.nix { };
|
||||
prometheus-postfix-exporter = callPackage ../servers/monitoring/prometheus/postfix-exporter.nix { };
|
||||
prometheus-postgres-exporter = callPackage ../servers/monitoring/prometheus/postgres-exporter.nix { };
|
||||
prometheus-process-exporter = callPackage ../servers/monitoring/prometheus/process-exporter.nix { };
|
||||
@ -19585,17 +19587,16 @@ in
|
||||
prometheus-rtl_433-exporter = callPackage ../servers/monitoring/prometheus/rtl_433-exporter.nix { };
|
||||
prometheus-smokeping-prober = callPackage ../servers/monitoring/prometheus/smokeping-prober.nix { };
|
||||
prometheus-snmp-exporter = callPackage ../servers/monitoring/prometheus/snmp-exporter.nix { };
|
||||
prometheus-statsd-exporter = callPackage ../servers/monitoring/prometheus/statsd-exporter.nix { };
|
||||
prometheus-surfboard-exporter = callPackage ../servers/monitoring/prometheus/surfboard-exporter.nix { };
|
||||
prometheus-sql-exporter = callPackage ../servers/monitoring/prometheus/sql-exporter.nix { };
|
||||
prometheus-systemd-exporter = callPackage ../servers/monitoring/prometheus/systemd-exporter.nix { };
|
||||
prometheus-tor-exporter = callPackage ../servers/monitoring/prometheus/tor-exporter.nix { };
|
||||
prometheus-statsd-exporter = callPackage ../servers/monitoring/prometheus/statsd-exporter.nix { };
|
||||
prometheus-surfboard-exporter = callPackage ../servers/monitoring/prometheus/surfboard-exporter.nix { };
|
||||
prometheus-unbound-exporter = callPackage ../servers/monitoring/prometheus/unbound-exporter.nix {
|
||||
inherit (darwin.apple_sdk.frameworks) Security;
|
||||
};
|
||||
prometheus-unifi-exporter = callPackage ../servers/monitoring/prometheus/unifi-exporter { };
|
||||
prometheus-varnish-exporter = callPackage ../servers/monitoring/prometheus/varnish-exporter.nix { };
|
||||
prometheus-jmx-httpserver = callPackage ../servers/monitoring/prometheus/jmx-httpserver.nix { };
|
||||
prometheus-wireguard-exporter = callPackage ../servers/monitoring/prometheus/wireguard-exporter.nix {
|
||||
inherit (darwin.apple_sdk.frameworks) Security;
|
||||
};
|
||||
|
@ -126,7 +126,7 @@ rec {
|
||||
# access for example elixir built with different version of Erlang, use
|
||||
# `beam.packages.erlangR23.elixir`.
|
||||
inherit (packages.erlang)
|
||||
elixir elixir_1_11 elixir_1_10 elixir_1_9 elixir_1_8 elixir_1_7 elixir_ls;
|
||||
elixir elixir_1_12 elixir_1_11 elixir_1_10 elixir_1_9 elixir_1_8 elixir_1_7 elixir_ls;
|
||||
|
||||
inherit (packages.erlang) lfe lfe_1_2 lfe_1_3;
|
||||
};
|
||||
|
@ -235,7 +235,7 @@ lib.makeScope pkgs.newScope (self: with self; {
|
||||
(dep: "mkdir -p ext; ln -s ${dep.dev}/include ext/${dep.extensionName}")
|
||||
internalDeps}
|
||||
'';
|
||||
checkPhase = "echo n | make test";
|
||||
checkPhase = "runHook preCheck; echo n | make test; runHook postCheck";
|
||||
outputs = [ "out" "dev" ];
|
||||
installPhase = ''
|
||||
mkdir -p $out/lib/php/extensions
|
||||
@ -270,6 +270,20 @@ lib.makeScope pkgs.newScope (self: with self; {
|
||||
{ name = "dba"; }
|
||||
{ name = "dom";
|
||||
buildInputs = [ libxml2 ];
|
||||
patches = [
|
||||
# https://github.com/php/php-src/pull/7030
|
||||
(fetchpatch {
|
||||
url = "https://github.com/php/php-src/commit/4cc261aa6afca2190b1b74de39c3caa462ec6f0b.patch";
|
||||
sha256 = "11qsdiwj1zmpfc2pgh6nr0sn7qa1nyjg4jwf69cgwnd57qfjcy4k";
|
||||
excludes = [ "ext/dom/tests/bug43364.phpt" "ext/dom/tests/bug80268.phpt" ];
|
||||
})
|
||||
];
|
||||
# For some reason `patch` fails to remove these files correctly.
|
||||
# Since `postPatch` is already used in `mkExtension`, we have to make it here.
|
||||
preCheck = ''
|
||||
rm tests/bug43364.phpt
|
||||
rm tests/bug80268.phpt
|
||||
'';
|
||||
configureFlags = [ "--enable-dom" ]
|
||||
# Required to build on darwin.
|
||||
++ lib.optionals (lib.versionOlder php.version "7.4") [ "--with-libxml-dir=${libxml2.dev}" ]; }
|
||||
|
Loading…
Reference in New Issue
Block a user