Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2024-02-10 06:01:46 +00:00 committed by GitHub
commit ff00aed7c8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 923 additions and 134 deletions

1
.github/CODEOWNERS vendored
View File

@ -52,6 +52,7 @@
/pkgs/pkgs-lib @infinisil
## Format generators/serializers
/pkgs/pkgs-lib/formats/libconfig @ckiee @h7x4
/pkgs/pkgs-lib/formats/hocon @h7x4
# pkgs/by-name
/pkgs/test/nixpkgs-check-by-name @infinisil

View File

@ -254,13 +254,31 @@ rec {
else if all isInt list && all (x: x == head list) list then head list
else throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
/*
Require a single definition.
WARNING: Does not perform nested checks, as this does not run the merge function!
*/
mergeOneOption = mergeUniqueOption { message = ""; };
mergeUniqueOption = { message }: loc: defs:
if length defs == 1
then (head defs).value
else assert length defs > 1;
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
/*
Require a single definition.
NOTE: When the type is not checked completely by check, pass a merge function for further checking (of sub-attributes, etc).
*/
mergeUniqueOption = args@{
message,
# WARNING: the default merge function assumes that the definition is a valid (option) value. You MUST pass a merge function if the return value needs to be
# - type checked beyond what .check does (which should be very litte; only on the value head; not attribute values, etc)
# - if you want attribute values to be checked, or list items
# - if you want coercedTo-like behavior to work
merge ? loc: defs: (head defs).value }:
loc: defs:
if length defs == 1
then merge loc defs
else
assert length defs > 1;
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
/* "Merge" option definitions by checking that they all have the same value. */
mergeEqualOption = loc: defs:

View File

@ -407,6 +407,16 @@ checkConfigOutput "{}" config.submodule.a ./emptyValues.nix
checkConfigError 'The option .int.a. is used but not defined' config.int.a ./emptyValues.nix
checkConfigError 'The option .nonEmptyList.a. is used but not defined' config.nonEmptyList.a ./emptyValues.nix
# types.unique
# requires a single definition
checkConfigError 'The option .examples\.merged. is defined multiple times while it.s expected to be unique' config.examples.merged.a ./types-unique.nix
# user message is printed
checkConfigError 'We require a single definition, because seeing the whole value at once helps us maintain critical invariants of our system.' config.examples.merged.a ./types-unique.nix
# let the inner merge function check the values (on demand)
checkConfigError 'A definition for option .examples\.badLazyType\.a. is not of type .string.' config.examples.badLazyType.a ./types-unique.nix
# overriding still works (unlike option uniqueness)
checkConfigOutput '^"bee"$' config.examples.override.b ./types-unique.nix
## types.raw
checkConfigOutput '^true$' config.unprocessedNestingEvaluates.success ./raw.nix
checkConfigOutput "10" config.processedToplevel ./raw.nix

View File

@ -0,0 +1,27 @@
{ lib, ... }:
let
inherit (lib) mkOption types;
in
{
options.examples = mkOption {
type = types.lazyAttrsOf
(types.unique
{ message = "We require a single definition, because seeing the whole value at once helps us maintain critical invariants of our system."; }
(types.attrsOf types.str));
};
imports = [
{ examples.merged = { b = "bee"; }; }
{ examples.override = lib.mkForce { b = "bee"; }; }
];
config.examples = {
merged = {
a = "aye";
};
override = {
a = "aye";
};
badLazyType = {
a = true;
};
};
}

View File

@ -614,23 +614,12 @@ rec {
nestedTypes.elemType = elemType;
};
# Value of given type but with no merging (i.e. `uniq list`s are not concatenated).
uniq = elemType: mkOptionType rec {
name = "uniq";
inherit (elemType) description descriptionClass check;
merge = mergeOneOption;
emptyValue = elemType.emptyValue;
getSubOptions = elemType.getSubOptions;
getSubModules = elemType.getSubModules;
substSubModules = m: uniq (elemType.substSubModules m);
functor = (defaultFunctor name) // { wrapped = elemType; };
nestedTypes.elemType = elemType;
};
uniq = unique { message = ""; };
unique = { message }: type: mkOptionType rec {
name = "unique";
inherit (type) description descriptionClass check;
merge = mergeUniqueOption { inherit message; };
merge = mergeUniqueOption { inherit message; inherit (type) merge; };
emptyValue = type.emptyValue;
getSubOptions = type.getSubOptions;
getSubModules = type.getSubModules;

View File

@ -326,7 +326,7 @@ Composed types are types that take a type as parameter. `listOf
`types.uniq` *`t`*
: Ensures that type *`t`* cannot be merged. It is used to ensure option
definitions are declared only once.
definitions are provided only once.
`types.unique` `{ message = m }` *`t`*

View File

@ -5,12 +5,7 @@ with lib;
let
cfg = config.services.jibri;
# Copied from the jitsi-videobridge.nix file.
toHOCON = x:
if isAttrs x && x ? __hocon_envvar then ("\${" + x.__hocon_envvar + "}")
else if isAttrs x then "{${ concatStringsSep "," (mapAttrsToList (k: v: ''"${k}":${toHOCON v}'') x) }}"
else if isList x then "[${ concatMapStringsSep "," toHOCON x }]"
else builtins.toJSON x;
format = pkgs.formats.hocon { };
# We're passing passwords in environment variables that have names generated
# from an attribute name, which may not be a valid bash identifier.
@ -38,13 +33,13 @@ let
control-login = {
domain = env.control.login.domain;
username = env.control.login.username;
password.__hocon_envvar = toVarName "${name}_control";
password = format.lib.mkSubstitution (toVarName "${name}_control");
};
call-login = {
domain = env.call.login.domain;
username = env.call.login.username;
password.__hocon_envvar = toVarName "${name}_call";
password = format.lib.mkSubstitution (toVarName "${name}_call");
};
strip-from-room-domain = env.stripFromRoomDomain;
@ -85,13 +80,13 @@ let
};
# Allow overriding leaves of the default config despite types.attrs not doing any merging.
jibriConfig = recursiveUpdate defaultJibriConfig cfg.config;
configFile = pkgs.writeText "jibri.conf" (toHOCON { jibri = jibriConfig; });
configFile = format.generate "jibri.conf" { jibri = jibriConfig; };
in
{
options.services.jibri = with types; {
enable = mkEnableOption (lib.mdDoc "Jitsi BRoadcasting Infrastructure. Currently Jibri must be run on a host that is also running {option}`services.jitsi-meet.enable`, so for most use cases it will be simpler to run {option}`services.jitsi-meet.jibri.enable`");
config = mkOption {
type = attrs;
type = format.type;
default = { };
description = lib.mdDoc ''
Jibri configuration.

View File

@ -5,14 +5,9 @@ with lib;
let
cfg = config.services.jicofo;
# HOCON is a JSON superset that some jitsi-meet components use for configuration
toHOCON = x: if isAttrs x && x ? __hocon_envvar then ("\${" + x.__hocon_envvar + "}")
else if isAttrs x && x ? __hocon_unquoted_string then x.__hocon_unquoted_string
else if isAttrs x then "{${ concatStringsSep "," (mapAttrsToList (k: v: ''"${k}":${toHOCON v}'') x) }}"
else if isList x then "[${ concatMapStringsSep "," toHOCON x }]"
else builtins.toJSON x;
format = pkgs.formats.hocon { };
configFile = pkgs.writeText "jicofo.conf" (toHOCON cfg.config);
configFile = format.generate "jicofo.conf" cfg.config;
in
{
options.services.jicofo = with types; {
@ -77,7 +72,7 @@ in
};
config = mkOption {
type = (pkgs.formats.json {}).type;
type = format.type;
default = { };
example = literalExpression ''
{
@ -99,7 +94,7 @@ in
hostname = cfg.xmppHost;
username = cfg.userName;
domain = cfg.userDomain;
password = { __hocon_envvar = "JICOFO_AUTH_PASS"; };
password = format.lib.mkSubstitution "JICOFO_AUTH_PASS";
xmpp-domain = if cfg.xmppDomain == null then cfg.xmppHost else cfg.xmppDomain;
};
service = client;

View File

@ -6,16 +6,7 @@ let
cfg = config.services.jitsi-videobridge;
attrsToArgs = a: concatStringsSep " " (mapAttrsToList (k: v: "${k}=${toString v}") a);
# HOCON is a JSON superset that videobridge2 uses for configuration.
# It can substitute environment variables which we use for passwords here.
# https://github.com/lightbend/config/blob/master/README.md
#
# Substitution for environment variable FOO is represented as attribute set
# { __hocon_envvar = "FOO"; }
toHOCON = x: if isAttrs x && x ? __hocon_envvar then ("\${" + x.__hocon_envvar + "}")
else if isAttrs x then "{${ concatStringsSep "," (mapAttrsToList (k: v: ''"${k}":${toHOCON v}'') x) }}"
else if isList x then "[${ concatMapStringsSep "," toHOCON x }]"
else builtins.toJSON x;
format = pkgs.formats.hocon { };
# We're passing passwords in environment variables that have names generated
# from an attribute name, which may not be a valid bash identifier.
@ -38,7 +29,7 @@ let
hostname = xmppConfig.hostName;
domain = xmppConfig.domain;
username = xmppConfig.userName;
password = { __hocon_envvar = toVarName name; };
password = format.lib.mkSubstitution (toVarName name);
muc_jids = xmppConfig.mucJids;
muc_nickname = xmppConfig.mucNickname;
disable_certificate_verification = xmppConfig.disableCertificateVerification;
@ -221,7 +212,7 @@ in
"-Dnet.java.sip.communicator.SC_HOME_DIR_LOCATION" = "/etc/jitsi";
"-Dnet.java.sip.communicator.SC_HOME_DIR_NAME" = "videobridge";
"-Djava.util.logging.config.file" = "/etc/jitsi/videobridge/logging.properties";
"-Dconfig.file" = pkgs.writeText "jvb.conf" (toHOCON jvbConfig);
"-Dconfig.file" = format.generate "jvb.conf" jvbConfig;
# Mitigate CVE-2021-44228
"-Dlog4j2.formatMsgNoLookups" = true;
} // (mapAttrs' (k: v: nameValuePair "-D${k}" v) cfg.extraProperties);

View File

@ -326,6 +326,29 @@ in
RuntimeDirectoryMode = "0700";
User = "murmur";
Group = "murmur";
# service hardening
AmbientCapabilities = "CAP_NET_BIND_SERVICE";
CapabilityBoundingSet = "CAP_NET_BIND_SERVICE";
LockPersonality = true;
MemoryDenyWriteExecute = true;
NoNewPrivileges = true;
PrivateDevices = true;
PrivateTmp = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "full";
RestrictAddressFamilies = "~AF_PACKET AF_NETLINK";
RestrictNamespaces = true;
RestrictSUIDSGID = true;
RestrictRealtime = true;
SystemCallArchitectures = "native";
SystemCallFilter = "@system-service";
};
};

View File

@ -3,6 +3,8 @@
let
cfg = config.services.suwayomi-server;
inherit (lib) mkOption mdDoc mkEnableOption mkIf types;
format = pkgs.formats.hocon { };
in
{
options = {
@ -48,19 +50,7 @@ in
settings = mkOption {
type = types.submodule {
freeformType =
let
recursiveAttrsType = with types; attrsOf (nullOr (oneOf [
str
path
int
float
bool
(listOf str)
(recursiveAttrsType // { description = "instances of this type recursively"; })
]));
in
recursiveAttrsType;
freeformType = format.type;
options = {
server = {
ip = mkOption {
@ -180,38 +170,7 @@ in
systemd.services.suwayomi-server =
let
flattenConfig = prefix: config:
lib.foldl'
lib.mergeAttrs
{ }
(lib.attrValues
(lib.mapAttrs
(k: v:
if !(lib.isAttrs v)
then { "${prefix}${k}" = v; }
else flattenConfig "${prefix}${k}." v
)
config
)
);
# HOCON is a JSON superset that suwayomi-server use for configuration
toHOCON = attr:
let
attrType = builtins.typeOf attr;
in
if builtins.elem attrType [ "string" "path" "int" "float" ]
then ''"${toString attr}"''
else if attrType == "bool"
then lib.boolToString attr
else if attrType == "list"
then "[\n${lib.concatMapStringsSep ",\n" toHOCON attr}\n]"
else # attrs, lambda, null
throw ''
[suwayomi-server]: invalid config value type '${attrType}'.
'';
configFile = pkgs.writeText "server.conf" (lib.pipe cfg.settings [
configFile = format.generate "server.conf" (lib.pipe cfg.settings [
(settings: lib.recursiveUpdate settings {
server.basicAuthPasswordFile = null;
server.basicAuthPassword =
@ -219,12 +178,8 @@ in
then "$TACHIDESK_SERVER_BASIC_AUTH_PASSWORD"
else null;
})
(flattenConfig "")
(lib.filterAttrs (_: x: x != null))
(lib.mapAttrsToList (name: value: ''${name} = ${toHOCON value}''))
lib.concatLines
(lib.filterAttrsRecursive (_: x: x != null))
]);
in
{
description = "A free and open source manga reader server that runs extensions built for Tachiyomi.";

View File

@ -384,7 +384,13 @@ All versions of a package _must_ be included in `all-packages.nix` to make sure
* `meta.license` must be set and match the upstream license.
* If there is no upstream license, `meta.license` should default to `lib.licenses.unfree`.
* If in doubt, try to contact the upstream developers for clarification.
* `meta.mainProgram` must be set when appropriate.
* `meta.mainProgram` must be set to the name of the executable which facilitates the primary function or purpose of the package, if there is such an executable in `$bin/bin/` (or `$out/bin/`, if there is no `"bin"` output).
* Packages that only have a single executable in the applicable directory above should set `meta.mainProgram`. For example, the package `ripgrep` only has a single executable `rg` under `$out/bin/`, so `ripgrep.meta.mainProgram` is set to `"rg"`.
* Packages like `polkit_gnome` that have no executables in the applicable directory should not set `meta.mainProgram`.
* Packages like `e2fsprogs` that have multiple executables, none of which can be considered the main program, should not set `meta.mainProgram`.
* Packages which are not primarily used for a single executable do not need to set `meta.mainProgram`.
* Always prefer using a hardcoded string (don't use `pname`, for example).
* When in doubt, ask for reviewer input.
* `meta.maintainers` must be set for new packages.
See the Nixpkgs manual for more details on [standard meta-attributes](https://nixos.org/nixpkgs/manual/#sec-standard-meta-attributes).

View File

@ -1,8 +1,8 @@
{ config, stdenv, fetchurl, lib, acpica-tools, dev86, pam, libxslt, libxml2, wrapQtAppsHook
, libX11, xorgproto, libXext, libXcursor, libXmu, libIDL, SDL2, libcap, libGL, libGLU
, libpng, glib, lvm2, libXrandr, libXinerama, libopus, qtbase, qtx11extras
, libpng, glib, lvm2, libXrandr, libXinerama, libopus, libtpms, qtbase, qtx11extras
, qttools, qtsvg, qtwayland, pkg-config, which, docbook_xsl, docbook_xml_dtd_43
, alsa-lib, curl, libvpx, nettools, dbus, substituteAll, gsoap, zlib
, alsa-lib, curl, libvpx, nettools, dbus, substituteAll, gsoap, zlib, xz
, yasm, glslang
, linuxPackages
# If open-watcom-bin is not passed, VirtualBox will fall back to use
@ -17,6 +17,7 @@
, headless ? false
, enable32bitGuests ? true
, enableWebService ? false
, extraConfigureFlags ? ""
}:
with lib;
@ -46,7 +47,7 @@ in stdenv.mkDerivation {
buildInputs = [
acpica-tools dev86 libxslt libxml2 xorgproto libX11 libXext libXcursor libIDL
libcap glib lvm2 alsa-lib curl libvpx pam makeself perl
libXmu libXrandr libpng libopus python3 ]
libXmu libXrandr libpng libopus libtpms python3 xz ]
++ optional javaBindings jdk
++ optional pythonBindings python3 # Python is needed even when not building bindings
++ optional pulseSupport libpulseaudio
@ -158,6 +159,7 @@ in stdenv.mkDerivation {
${optionalString (!enable32bitGuests) "--disable-vmmraw"} \
${optionalString enableWebService "--enable-webservice"} \
${optionalString (open-watcom-bin != null) "--with-ow-dir=${open-watcom-bin}"} \
${extraConfigureFlags} \
--disable-kmods
sed -e 's@PKG_CONFIG_PATH=.*@PKG_CONFIG_PATH=${libIDL}/lib/pkgconfig:${glib.dev}/lib/pkgconfig ${libIDL}/bin/libIDL-config-2@' \
-i AutoConfig.kmk

View File

@ -15,13 +15,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "ccache";
version = "4.9";
version = "4.9.1";
src = fetchFromGitHub {
owner = "ccache";
repo = "ccache";
rev = "refs/tags/v${finalAttrs.version}";
sha256 = "sha256-/R9ReX1l3okUuVD93IdomoaBTYdKvuIuggyk0sJoYmg=";
sha256 = "sha256-n0MTq8x6KNkgwhJQG7F+e3iCOS644nLkMsiRztJe8QU=";
};
outputs = [ "out" "man" ];

View File

@ -14,9 +14,9 @@
}:
stdenv.mkDerivation (self: {
pname = "srm-cuarzo";
version = "0.5.0-1";
version = "0.5.1-1";
rev = "v${self.version}";
hash = "sha256-q3pMWryiBR8BEPHvZ/g/jK2hIBTd15RxyU7uocSJsZ8=";
hash = "sha256-+Qn/obgYHWceQN0T3mbGjs/psj+lg43gm/cCBoMnRUk=";
src = fetchFromGitHub {
inherit (self) rev hash;

View File

@ -18,7 +18,7 @@ stdenv.mkDerivation rec {
owner = "matinlotfali";
repo = "KDE-Rounded-Corners";
rev = "v${version}";
hash = "sha256-S6Z0j61LQHmZTYiLEpwG77JH9Nd32lF5Azb0U0+rdNg=";
hash = "sha256-DE3XTu3CQY9mGuOpehWno/4yFyLjHuh4RxdUh+aTU7M=";
};
postConfigure = ''

View File

@ -41,6 +41,13 @@ let
dontUseCmakeConfigure = true;
postPatch = ''
# Add missing includes for gcc-13 for webkit build:
sed -e '1i #include <cstdio>' \
-i modules/javafx.web/src/main/native/Source/bmalloc/bmalloc/Heap.cpp \
modules/javafx.web/src/main/native/Source/bmalloc/bmalloc/IsoSharedPageInlines.h
'';
config = writeText "gradle.properties" (''
CONF = Release
JDK_HOME = ${openjdk17_headless.home}

View File

@ -58,6 +58,8 @@ mkDerivation rec {
env.NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations -Wno-error=type-limits";
meta = with lib; {
# Does not build against gcc-13, the repository is archived upstream.
broken = true;
description = "Interactive, thoroughly customizable maps in native Android, iOS, macOS, Node.js, and Qt applications, powered by vector tiles and OpenGL";
homepage = "https://mapbox.com/mobile";
license = licenses.bsd2;

View File

@ -5,14 +5,14 @@
, cmake
, nasm
# NUMA support enabled by default on NUMA platforms:
# NUMA support enabled by default on NUMA platforms:
, numaSupport ? (stdenv.hostPlatform.isLinux && (stdenv.hostPlatform.isx86 || stdenv.hostPlatform.isAarch64))
, numactl
# Multi bit-depth support (8bit+10bit+12bit):
# Multi bit-depth support (8bit+10bit+12bit):
, multibitdepthSupport ? (stdenv.is64bit && !(stdenv.isAarch64 && stdenv.isLinux))
# Other options:
# Other options:
, cliSupport ? true # Build standalone CLI application
, custatsSupport ? false # Internal profiling of encoder work
, debugSupport ? false # Run-time sanity checks (debugging)
@ -72,6 +72,12 @@ stdenv.mkDerivation rec {
substituteInPlace cmake/Version.cmake \
--replace "unknown" "${version}" \
--replace "0.0" "${version}"
''
# There is broken and complicated logic when setting X265_LATEST_TAG for
# mingwW64 builds. This bypasses the logic by setting it at the end of the
# file
+ lib.optionalString stdenv.hostPlatform.isMinGW ''
echo 'set(X265_LATEST_TAG "${version}")' >> ./cmake/Version.cmake
'';
nativeBuildInputs = [ cmake nasm ] ++ lib.optionals (numaSupport) [ numactl ];
@ -137,10 +143,10 @@ stdenv.mkDerivation rec {
meta = with lib; {
description = "Library for encoding H.265/HEVC video streams";
homepage = "https://www.x265.org/";
changelog = "https://x265.readthedocs.io/en/master/releasenotes.html#version-${lib.strings.replaceStrings ["."] ["-"] version}";
license = licenses.gpl2Plus;
homepage = "https://www.x265.org/";
changelog = "https://x265.readthedocs.io/en/master/releasenotes.html#version-${lib.strings.replaceStrings ["."] ["-"] version}";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ codyopel ];
platforms = platforms.all;
platforms = platforms.all;
};
}

View File

@ -60,11 +60,11 @@ assert (versionAtLeast version "4.9");
PAGE_POISONING_ZERO = whenOlder "5.11" yes;
# Enable init_on_alloc and init_on_free by default
INIT_ON_ALLOC_DEFAULT_ON = yes;
INIT_ON_FREE_DEFAULT_ON = yes;
INIT_ON_ALLOC_DEFAULT_ON = whenAtLeast "5.3" yes;
INIT_ON_FREE_DEFAULT_ON = whenAtLeast "5.3" yes;
# Wipe all caller-used registers on exit from a function
ZERO_CALL_USED_REGS = yes;
ZERO_CALL_USED_REGS = whenAtLeast "5.15" yes;
# Enable the SafeSetId LSM
SECURITY_SAFESETID = whenAtLeast "5.1" yes;
@ -86,8 +86,8 @@ assert (versionAtLeast version "4.9");
# https://www.kernel.org/doc/html/latest/dev-tools/ubsan.html
# https://developers.redhat.com/blog/2014/10/16/gcc-undefined-behavior-sanitizer-ubsan
UBSAN = yes;
UBSAN_TRAP = yes;
UBSAN_BOUNDS = yes;
UBSAN_TRAP = whenAtLeast "5.7" yes;
UBSAN_BOUNDS = whenAtLeast "5.7" yes;
UBSAN_SANITIZE_ALL = yes;
UBSAN_LOCAL_BOUNDS = option yes; # clang only
CFI_CLANG = option yes; # clang only Control Flow Integrity since 6.1

View File

@ -41,6 +41,8 @@ rec {
libconfig = (import ./formats/libconfig/default.nix { inherit lib pkgs; }).format;
hocon = (import ./formats/hocon/default.nix { inherit lib pkgs; }).format;
json = {}: {
type = with lib.types; let

View File

@ -0,0 +1,189 @@
{ lib
, pkgs
}:
let
inherit (pkgs) buildPackages callPackage;
hocon-generator = buildPackages.rustPlatform.buildRustPackage {
name = "hocon-generator";
version = "0.1.0";
src = ./src;
passthru.updateScript = ./update.sh;
cargoLock.lockFile = ./src/Cargo.lock;
};
hocon-validator = pkgs.writers.writePython3Bin "hocon-validator" {
libraries = [ pkgs.python3Packages.pyhocon ];
} ''
from sys import argv
from pyhocon import ConfigFactory
if not len(argv) == 2:
print("USAGE: hocon-validator <file>")
ConfigFactory.parse_file(argv[1])
'';
in
{
# https://github.com/lightbend/config/blob/main/HOCON.md
format = {
generator ? hocon-generator
, validator ? hocon-validator
# `include classpath("")` is not implemented in pyhocon.
# In the case that you need this functionality,
# you will have to disable pyhocon validation.
, doCheck ? true
}: let
hoconLib = {
mkInclude = value: let
includeStatement = if lib.isAttrs value && !(lib.isDerivation value) then {
required = false;
type = null;
_type = "include";
} // value else {
value = toString value;
required = false;
type = null;
_type = "include";
};
in
assert lib.assertMsg (lib.elem includeStatement.type [ "file" "url" "classpath" null ]) ''
Type of HOCON mkInclude is not of type 'file', 'url' or 'classpath':
${(lib.generators.toPretty {}) includeStatement}
'';
includeStatement;
mkAppend = value: {
inherit value;
_type = "append";
};
mkSubstitution = value:
if lib.isString value
then
{
inherit value;
optional = false;
_type = "substitution";
}
else
assert lib.assertMsg (lib.isAttrs value) ''
Value of invalid type provided to `hocon.lib.mkSubstition`: ${lib.typeOf value}
'';
assert lib.assertMsg (value ? "value") ''
Argument to `hocon.lib.mkSubstition` is missing a `value`:
${builtins.toJSON value}
'';
{
value = value.value;
optional = value.optional or false;
_type = "substitution";
};
};
in {
type = let
type' = with lib.types; let
atomType = nullOr (oneOf [
bool
float
int
path
str
]);
in (oneOf [
atomType
(listOf atomType)
(attrsOf type')
]) // {
description = "HOCON value";
};
in type';
lib = hoconLib;
generate = name: value:
let
# TODO: remove in 24.11
# Backwards compatability for generators in the following locations:
# - nixos/modules/services/networking/jibri/default.nix (__hocon_envvar)
# - nixos/modules/services/networking/jicofo.nix (__hocon_envvar, __hocon_unquoted_string)
# - nixos/modules/services/networking/jitsi-videobridge.nix (__hocon_envvar)
replaceOldIndicators = value:
if lib.isAttrs value then
(if value ? "__hocon_envvar"
then
lib.warn ''
Use of `__hocon_envvar` has been deprecated, and will
be removed in the future.
Please use `(pkgs.formats.hocon {}).lib.mkSubstitution` instead.
''
(hoconLib.mkSubstitution value.__hocon_envvar)
else if value ? "__hocon_unquoted_string"
then
lib.warn ''
Use of `__hocon_unquoted_string` has been deprecated, and will
be removed in the future.
Please make use of the freeform options of
`(pkgs.formats.hocon {}).format` instead.
''
{
value = value.__hocon_unquoted_string;
_type = "unquoted_string";
}
else lib.mapAttrs (_: replaceOldIndicators) value)
else if lib.isList value
then map replaceOldIndicators value
else value;
finalValue = replaceOldIndicators value;
in
callPackage
({
stdenvNoCC
, hocon-generator
, hocon-validator
, writeText
}:
stdenvNoCC.mkDerivation rec {
inherit name;
dontUnpack = true;
json = builtins.toJSON finalValue;
passAsFile = [ "json" ];
strictDeps = true;
nativeBuildInputs = [ hocon-generator ];
buildPhase = ''
runHook preBuild
hocon-generator < $jsonPath > output.conf
runHook postBuild
'';
inherit doCheck;
nativeCheckInputs = [ hocon-validator ];
checkPhase = ''
runHook preCheck
hocon-validator output.conf
runHook postCheck
'';
installPhase = ''
runHook preInstall
mv output.conf $out
runHook postInstall
'';
passthru.json = writeText "${name}.json" json;
})
{
hocon-generator = generator;
hocon-validator = validator;
};
};
}

View File

@ -0,0 +1 @@
target

View File

@ -0,0 +1,89 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "hocon-generator"
version = "0.1.0"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "itoa"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
[[package]]
name = "serde"
version = "1.0.190"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.190"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "syn"
version = "2.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View File

@ -0,0 +1,10 @@
[package]
name = "hocon-generator"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = "1.0.178"
serde_json = "1.0.104"

View File

@ -0,0 +1,237 @@
use serde_json::{value, Map, Value};
#[derive(Debug)]
enum HOCONValue {
Null,
Append(Box<HOCONValue>),
Bool(bool),
Number(value::Number),
String(String),
List(Vec<HOCONValue>),
Substitution(String, bool),
Object(Vec<HOCONInclude>, Vec<(String, HOCONValue)>),
Literal(String),
}
#[derive(Debug)]
enum HOCONInclude {
Heuristic(String, bool),
Url(String, bool),
File(String, bool),
ClassPath(String, bool),
}
impl HOCONInclude {
fn map_fst(&self, f: &dyn Fn(&String) -> String) -> HOCONInclude {
match self {
HOCONInclude::Heuristic(s, r) => HOCONInclude::Heuristic(f(s), *r),
HOCONInclude::Url(s, r) => HOCONInclude::Url(f(s), *r),
HOCONInclude::File(s, r) => HOCONInclude::File(f(s), *r),
HOCONInclude::ClassPath(s, r) => HOCONInclude::ClassPath(f(s), *r),
}
}
}
fn parse_include(o: &Map<String, Value>) -> HOCONInclude {
let value = o
.get("value")
.expect("Missing field 'value' for include statement")
.as_str()
.expect("Field 'value' is not a string in include statement")
.to_string();
let required = o
.get("required")
.expect("Missing field 'required' for include statement")
.as_bool()
.expect("Field 'required'is not a bool in include statement");
let include_type = match o
.get("type")
.expect("Missing field 'type' for include statement")
{
Value::Null => None,
Value::String(s) => Some(s.as_str()),
t => panic!("Field 'type' is not a string in include statement: {:?}", t),
};
// Assert that this was an intentional include
debug_assert!(o.get("_type").and_then(|t| t.as_str()) == Some("include"));
match include_type {
None => HOCONInclude::Heuristic(value, required),
Some("url") => HOCONInclude::Url(value, required),
Some("file") => HOCONInclude::File(value, required),
Some("classpath") => HOCONInclude::ClassPath(value, required),
_ => panic!(
"Could not recognize type for include statement: {}",
include_type.unwrap()
),
}
}
fn parse_special_types(o: &Map<String, Value>) -> Option<HOCONValue> {
o.get("_type")
.and_then(|r#type| r#type.as_str())
.map(|r#type| match r#type {
"substitution" => {
let value = o
.get("value")
.expect("Missing value for substitution")
.as_str()
.unwrap_or_else(|| panic!("Substition value is not a string: {:?}", o));
let required = o
.get("required")
.unwrap_or(&Value::Bool(false))
.as_bool()
.unwrap_or_else(|| panic!("Substition value is not a string: {:?}", o));
debug_assert!(!value.contains('}'));
HOCONValue::Substitution(value.to_string(), required)
}
"append" => {
let value = o.get("value").expect("Missing value for append");
HOCONValue::Append(Box::new(json_to_hocon(value)))
}
"unquoted_string" => {
let value = o
.get("value")
.expect("Missing value for unquoted_string")
.as_str()
.unwrap_or_else(|| panic!("Unquoted string value is not a string: {:?}", o));
HOCONValue::Literal(value.to_string())
}
_ => panic!(
"\
Attribute set contained special element '_type',\
but its value is not recognized:\n{}",
r#type
),
})
}
fn json_to_hocon(v: &Value) -> HOCONValue {
match v {
Value::Null => HOCONValue::Null,
Value::Bool(b) => HOCONValue::Bool(*b),
Value::Number(n) => HOCONValue::Number(n.clone()),
Value::String(s) => HOCONValue::String(s.clone()),
Value::Array(a) => {
let items = a.iter().map(json_to_hocon).collect::<Vec<HOCONValue>>();
HOCONValue::List(items)
}
Value::Object(o) => {
if let Some(result) = parse_special_types(o) {
return result;
}
let mut items = o
.iter()
.filter(|(key, _)| key.as_str() != "_includes")
.map(|(key, value)| (key.clone(), json_to_hocon(value)))
.collect::<Vec<(String, HOCONValue)>>();
items.sort_by(|(a, _), (b, _)| a.partial_cmp(b).unwrap());
let includes = o
.get("_includes")
.map(|x| {
x.as_array()
.expect("_includes is not an array")
.iter()
.map(|x| {
x.as_object()
.unwrap_or_else(|| panic!("Include is not an object: {}", x))
})
.map(parse_include)
.collect::<Vec<HOCONInclude>>()
})
.unwrap_or(vec![]);
HOCONValue::Object(includes, items)
}
}
}
impl ToString for HOCONValue {
fn to_string(&self) -> String {
match self {
HOCONValue::Null => "null".to_string(),
HOCONValue::Bool(b) => b.to_string(),
HOCONValue::Number(n) => n.to_string(),
HOCONValue::String(s) => serde_json::to_string(&Value::String(s.clone())).unwrap(),
HOCONValue::Substitution(v, required) => {
format!("${{{}{}}}", if *required { "" } else { "?" }, v)
}
HOCONValue::List(l) => {
let items = l
.iter()
.map(|item| item.to_string())
.collect::<Vec<String>>()
.join(",\n")
.split('\n')
.map(|s| " ".to_owned() + s)
.collect::<Vec<String>>()
.join("\n");
format!("[\n{}\n]", items)
}
HOCONValue::Object(i, o) => {
let includes = i
.iter()
.map(|x| {
x.map_fst(&|s| serde_json::to_string(&Value::String(s.clone())).unwrap())
})
.map(|x| match x {
HOCONInclude::Heuristic(s, r) => (s.to_string(), r),
HOCONInclude::Url(s, r) => (format!("url({})", s), r),
HOCONInclude::File(s, r) => (format!("file({})", s), r),
HOCONInclude::ClassPath(s, r) => (format!("classpath({})", s), r),
})
.map(|(i, r)| if r { format!("required({})", i) } else { i })
.map(|s| format!("include {}", s))
.collect::<Vec<String>>()
.join("\n");
let items = o
.iter()
.map(|(key, value)| {
(
serde_json::to_string(&Value::String(key.clone())).unwrap(),
value,
)
})
.map(|(key, value)| match value {
HOCONValue::Append(v) => format!("{} += {}", key, v.to_string()),
v => format!("{} = {}", key, v.to_string()),
})
.collect::<Vec<String>>()
.join("\n");
let content = (if includes.is_empty() {
items
} else {
format!("{}{}", includes, items)
})
.split('\n')
.map(|s| format!(" {}", s))
.collect::<Vec<String>>()
.join("\n");
format!("{{\n{}\n}}", content)
}
HOCONValue::Append(_) => panic!("Append should not be present at this point"),
Self::Literal(s) => s.to_string(),
}
}
}
fn main() {
let stdin = std::io::stdin().lock();
let json = serde_json::Deserializer::from_reader(stdin)
.into_iter::<Value>()
.next()
.expect("Could not read content from stdin")
.expect("Could not parse JSON from stdin");
print!("{}\n\n", json_to_hocon(&json).to_string());
}

View File

@ -0,0 +1,65 @@
{ lib, formats, stdenvNoCC, writeText, ... }:
let
hocon = formats.hocon { };
expression = {
substitution = { __hocon_envvar = "PATH"; };
literal = {
__hocon_unquoted_string = ''
[
1,
"a",
]'';
};
nested = {
substitution = { __hocon_envvar = "PATH"; };
literal = {
__hocon_unquoted_string = ''
[
1,
"a",
]'';
};
};
nested_in_array = [
{ __hocon_envvar = "PATH"; }
{
__hocon_unquoted_string = ''
[
1,
"a",
]'';
}
];
};
hocon-test-conf = hocon.generate "hocon-test.conf" expression;
in
stdenvNoCC.mkDerivation {
name = "pkgs.formats.hocon-test-backwards-compatibility";
dontUnpack = true;
dontBuild = true;
doCheck = true;
checkPhase = ''
runHook preCheck
diff -U3 ${./expected.txt} ${hocon-test-conf}
runHook postCheck
'';
installPhase = ''
runHook preInstall
mkdir $out
cp ${./expected.txt} $out/expected.txt
cp ${hocon-test-conf} $out/hocon-test.conf
cp ${hocon-test-conf.passthru.json} $out/hocon-test.json
runHook postInstall
'';
}

View File

@ -0,0 +1,22 @@
{
"literal" = [
1,
"a",
]
"nested" = {
"literal" = [
1,
"a",
]
"substitution" = ${?PATH}
}
"nested_in_array" = [
${?PATH},
[
1,
"a",
]
]
"substitution" = ${?PATH}
}

View File

@ -0,0 +1,83 @@
{ lib, formats, stdenvNoCC, writeText, ... }:
let
hocon = formats.hocon { };
include_file = (writeText "hocon-test-include.conf" ''
"val" = 1
'').overrideAttrs (_: _: {
outputHashAlgo = "sha256";
outputHashMode = "flat";
outputHash = "sha256-UhkJLhT3bD6znq+IdDjs/ahP19mLzrLCy/R14pVrfew=";
});
expression = {
simple_top_level_attr = "1.0";
nested.attrset.has.a.integer.value = 100;
some_floaty = 29.95;
array2d = [
[ 1 2 "a" ]
[ 2 1 "b" ]
];
nasty_string = "\"@\n\\\t^*\b\f\n\0\";'''$";
"misc attrs" = {
x = 1;
y = hocon.lib.mkAppend { a = 1; };
};
"cursed \" .attrs \" " = {
"a" = 1;
"a b" = hocon.lib.mkSubstitution "a";
"a b c" = hocon.lib.mkSubstitution {
value = "a b";
required = false;
};
};
to_include = {
_includes = [
(hocon.lib.mkInclude include_file)
(hocon.lib.mkInclude "https://example.com")
(hocon.lib.mkInclude {
required = true;
type = "file";
value = include_file;
})
(hocon.lib.mkInclude { value = include_file; })
(hocon.lib.mkInclude {
value = "https://example.com";
type = "url";
})
];
};
};
hocon-test-conf = hocon.generate "hocon-test.conf" expression;
in
stdenvNoCC.mkDerivation {
name = "pkgs.formats.hocon-test-comprehensive";
dontUnpack = true;
dontBuild = true;
doCheck = true;
checkPhase = ''
runHook preCheck
diff -U3 ${./expected.txt} ${hocon-test-conf}
runHook postCheck
'';
installPhase = ''
runHook preInstall
mkdir $out
cp ${./expected.txt} $out/expected.txt
cp ${hocon-test-conf} $out/hocon-test.conf
cp ${hocon-test-conf.passthru.json} $out/hocon-test.json
runHook postInstall
'';
}

View File

@ -0,0 +1,47 @@
{
"array2d" = [
[
1,
2,
"a"
],
[
2,
1,
"b"
]
]
"cursed \" .attrs \" " = {
"a" = 1
"a b" = ${?a}
"a b c" = ${?a b}
}
"misc attrs" = {
"x" = 1
"y" += {
"a" = 1
}
}
"nasty_string" = "\"@\n\\\t^*bf\n0\";'''$"
"nested" = {
"attrset" = {
"has" = {
"a" = {
"integer" = {
"value" = 100
}
}
}
}
}
"simple_top_level_attr" = "1.0"
"some_floaty" = 29.95
"to_include" = {
include "/nix/store/ccnzr53dpipdacxgci3ii3bqacvb5hxm-hocon-test-include.conf"
include "https://example.com"
include required(file("/nix/store/ccnzr53dpipdacxgci3ii3bqacvb5hxm-hocon-test-include.conf"))
include "/nix/store/ccnzr53dpipdacxgci3ii3bqacvb5hxm-hocon-test-include.conf"
include url("https://example.com")
}
}

View File

@ -0,0 +1,15 @@
{ pkgs, ... }:
{
comprehensive = pkgs.callPackage ./comprehensive { };
backwards-compatibility =
let
pkgsNoWarn = pkgs.extend (final: prev: {
lib = prev.lib.extend (libFinal: libPrev: {
warn = msg: v: v;
trivial = libPrev.trivial // {
warn = msg: v: v;
};
});
});
in pkgsNoWarn.callPackage ./backwards-compatibility { };
}

View File

@ -0,0 +1,4 @@
#!/usr/bin/env nix-shell
#!nix-shell -p cargo -i bash
cd "$(dirname "$0")"
cargo update

View File

@ -17,7 +17,10 @@ let
jdk11 = pkgs.callPackage ../formats/java-properties/test { jdk = pkgs.jdk11_headless; };
jdk17 = pkgs.callPackage ../formats/java-properties/test { jdk = pkgs.jdk17_headless; };
};
libconfig = recurseIntoAttrs (import ../formats/libconfig/test { inherit pkgs; });
hocon = recurseIntoAttrs (import ../formats/hocon/test { inherit pkgs; });
};
flatten = prefix: as:

View File

@ -15,13 +15,6 @@ python3.pkgs.buildPythonApplication rec {
hash = "sha256-0fcCON/M9JklE7X9aRfzTkEFG4ckJqLoQlYCSrWHHGQ=";
};
# Per <https://github.com/srstevenson/xdg-base-dirs/tree/6.0.0#xdg-base-dirs>, the package is
# renamed from `xdg` to `xdg_base_dirs`, but upstream isn't amenable to performing that rename.
# See <https://github.com/Textualize/frogmouth/pull/59>. So this is a minimal fix.
postUnpack = ''
sed -i -e "s,from xdg import,from xdg_base_dirs import," $sourceRoot/frogmouth/data/{config,data_directory}.py
'';
nativeBuildInputs = [
python3.pkgs.poetry-core
python3.pkgs.pythonRelaxDepsHook
@ -31,13 +24,12 @@ python3.pkgs.buildPythonApplication rec {
httpx
textual
typing-extensions
xdg-base-dirs
xdg
];
pythonRelaxDeps = [
"httpx"
"textual"
"xdg-base-dirs"
];
pythonImportsCheck = [ "frogmouth" ];

View File

@ -1471,7 +1471,12 @@ with pkgs;
writers = callPackage ../build-support/writers { };
# lib functions depending on pkgs
inherit (import ../pkgs-lib { inherit lib pkgs; }) formats;
inherit (import ../pkgs-lib {
# The `lib` variable in this scope doesn't include any applied lib overlays,
# `pkgs.lib` does.
inherit (pkgs) lib;
inherit pkgs;
}) formats;
testers = callPackage ../build-support/testers { };
@ -16362,7 +16367,7 @@ with pkgs;
hugs = callPackage ../development/interpreters/hugs { };
inherit (javaPackages) openjfx11 openjfx15 openjfx17 openjfx19 openjfx20 openjfx21;
openjfx = pin-to-gcc12-if-gcc13 (openjfx17.override { });
openjfx = openjfx17;
openjdk8-bootstrap = javaPackages.compiler.openjdk8-bootstrap;
openjdk8 = javaPackages.compiler.openjdk8;
@ -18662,8 +18667,6 @@ with pkgs;
cc-tool = callPackage ../development/embedded/cc-tool { };
ccache = callPackage ../development/tools/misc/ccache { };
# Wrapper that works as gcc or g++
# It can be used by setting in nixpkgs config like this, for example:
# replaceStdenv = { pkgs }: pkgs.ccacheStdenv;