mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-26 00:43:20 +00:00
Merge staging-next into staging
This commit is contained in:
commit
5f5513224f
@ -285,11 +285,11 @@ specifying an interpreter version), like this:
|
||||
|
||||
```nix
|
||||
{ lib
|
||||
, python3
|
||||
, python3Packages
|
||||
, fetchPypi
|
||||
}:
|
||||
|
||||
python3.pkgs.buildPythonApplication rec {
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "luigi";
|
||||
version = "2.7.9";
|
||||
pyproject = true;
|
||||
@ -300,13 +300,13 @@ python3.pkgs.buildPythonApplication rec {
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
python3.pkgs.setuptools
|
||||
python3.pkgs.wheel
|
||||
python3Packages.setuptools
|
||||
python3Packages.wheel
|
||||
];
|
||||
|
||||
propagatedBuildInputs = with python3.pkgs; [
|
||||
tornado
|
||||
python-daemon
|
||||
propagatedBuildInputs = [
|
||||
python3Packages.tornado
|
||||
python3Packages.python-daemon
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
|
@ -244,5 +244,4 @@ Here's a list of places in the library that need to be updated in the future:
|
||||
- > The file set library is currently somewhat limited but is being expanded to include more functions over time.
|
||||
|
||||
in [the manual](../../doc/functions/fileset.section.md)
|
||||
- If/Once a function to convert `lib.sources` values into file sets exists, the `_coerce` and `toSource` functions should be updated to mention that function in the error when such a value is passed
|
||||
- If/Once a function exists that can optionally include a path depending on whether it exists, the error message for the path not existing in `_coerce` should mention the new function
|
||||
|
@ -3,8 +3,10 @@ let
|
||||
|
||||
inherit (import ./internal.nix { inherit lib; })
|
||||
_coerce
|
||||
_singleton
|
||||
_coerceMany
|
||||
_toSourceFilter
|
||||
_fromSourceFilter
|
||||
_unionMany
|
||||
_fileFilter
|
||||
_printFileset
|
||||
@ -152,7 +154,12 @@ in {
|
||||
sourceFilter = _toSourceFilter fileset;
|
||||
in
|
||||
if ! isPath root then
|
||||
if isStringLike root then
|
||||
if root ? _isLibCleanSourceWith then
|
||||
throw ''
|
||||
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
|
||||
To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
|
||||
Note that this only works for sources created from paths.''
|
||||
else if isStringLike root then
|
||||
throw ''
|
||||
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
|
||||
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||
@ -188,6 +195,75 @@ in {
|
||||
filter = sourceFilter;
|
||||
};
|
||||
|
||||
/*
|
||||
Create a file set with the same files as a `lib.sources`-based value.
|
||||
This does not import any of the files into the store.
|
||||
|
||||
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
|
||||
|
||||
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
|
||||
|
||||
:::{.note}
|
||||
File sets cannot represent empty directories.
|
||||
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
|
||||
:::
|
||||
|
||||
Type:
|
||||
fromSource :: SourceLike -> FileSet
|
||||
|
||||
Example:
|
||||
# There's no cleanSource-like function for file sets yet,
|
||||
# but we can just convert cleanSource to a file set and use it that way
|
||||
toSource {
|
||||
root = ./.;
|
||||
fileset = fromSource (lib.sources.cleanSource ./.);
|
||||
}
|
||||
|
||||
# Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
|
||||
# but removing a subdirectory using file set functions
|
||||
difference
|
||||
(fromSource (lib.sources.sourceByRegex ./. [
|
||||
"^README\.md$"
|
||||
# This regex includes everything in ./doc
|
||||
"^doc(/.*)?$"
|
||||
])
|
||||
./doc/generated
|
||||
|
||||
# Use cleanSource, but limit it to only include ./Makefile and files under ./src
|
||||
intersection
|
||||
(fromSource (lib.sources.cleanSource ./.))
|
||||
(unions [
|
||||
./Makefile
|
||||
./src
|
||||
]);
|
||||
*/
|
||||
fromSource = source:
|
||||
let
|
||||
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
|
||||
# which are technically internal to lib.sources,
|
||||
# but we'll allow this since both libraries are in the same code base
|
||||
# and this function is a bridge between them.
|
||||
isFiltered = source ? _isLibCleanSourceWith;
|
||||
path = if isFiltered then source.origSrc else source;
|
||||
in
|
||||
# We can only support sources created from paths
|
||||
if ! isPath path then
|
||||
if isStringLike path then
|
||||
throw ''
|
||||
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
|
||||
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
|
||||
else
|
||||
throw ''
|
||||
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
|
||||
else if ! pathExists path then
|
||||
throw ''
|
||||
lib.fileset.fromSource: The source origin (${toString path}) of the argument does not exist.''
|
||||
else if isFiltered then
|
||||
_fromSourceFilter path source.filter
|
||||
else
|
||||
# If there's no filter, no need to run the expensive conversion, all subpaths will be included
|
||||
_singleton path;
|
||||
|
||||
/*
|
||||
The file set containing all files that are in either of two given file sets.
|
||||
This is the same as [`unions`](#function-library-lib.fileset.unions),
|
||||
|
@ -167,7 +167,12 @@ rec {
|
||||
else
|
||||
value
|
||||
else if ! isPath value then
|
||||
if isStringLike value then
|
||||
if value ? _isLibCleanSourceWith then
|
||||
throw ''
|
||||
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
|
||||
To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
|
||||
Note that this only works for sources created from paths.''
|
||||
else if isStringLike value then
|
||||
throw ''
|
||||
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
|
||||
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||
@ -470,6 +475,59 @@ rec {
|
||||
else
|
||||
nonEmpty;
|
||||
|
||||
# Turn a builtins.filterSource-based source filter on a root path into a file set
|
||||
# containing only files included by the filter.
|
||||
# The filter is lazily called as necessary to determine whether paths are included
|
||||
# Type: Path -> (String -> String -> Bool) -> fileset
|
||||
_fromSourceFilter = root: sourceFilter:
|
||||
let
|
||||
# During the recursion we need to track both:
|
||||
# - The path value such that we can safely call `readDir` on it
|
||||
# - The path string value such that we can correctly call the `filter` with it
|
||||
#
|
||||
# While we could just recurse with the path value,
|
||||
# this would then require converting it to a path string for every path,
|
||||
# which is a fairly expensive operation
|
||||
|
||||
# Create a file set from a directory entry
|
||||
fromDirEntry = path: pathString: type:
|
||||
# The filter needs to run on the path as a string
|
||||
if ! sourceFilter pathString type then
|
||||
null
|
||||
else if type == "directory" then
|
||||
fromDir path pathString
|
||||
else
|
||||
type;
|
||||
|
||||
# Create a file set from a directory
|
||||
fromDir = path: pathString:
|
||||
mapAttrs
|
||||
# This looks a bit funny, but we need both the path-based and the path string-based values
|
||||
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
|
||||
# We need to readDir on the path value, because reading on a path string
|
||||
# would be unspecified if there are multiple filesystem roots
|
||||
(readDir path);
|
||||
|
||||
rootPathType = pathType root;
|
||||
|
||||
# We need to convert the path to a string to imitate what builtins.path calls the filter function with.
|
||||
# We don't want to rely on `toString` for this though because it's not very well defined, see ../path/README.md
|
||||
# So instead we use `lib.path.splitRoot` to safely deconstruct the path into its filesystem root and subpath
|
||||
# We don't need the filesystem root though, builtins.path doesn't expose that in any way to the filter.
|
||||
# So we only need the components, which we then turn into a string as one would expect.
|
||||
rootString = "/" + concatStringsSep "/" (components (splitRoot root).subpath);
|
||||
in
|
||||
if rootPathType == "directory" then
|
||||
# We imitate builtins.path not calling the filter on the root path
|
||||
_create root (fromDir root rootString)
|
||||
else
|
||||
# Direct files are always included by builtins.path without calling the filter
|
||||
# But we need to lift up the base path to its parent to satisfy the base path invariant
|
||||
_create (dirOf root)
|
||||
{
|
||||
${baseNameOf root} = rootPathType;
|
||||
};
|
||||
|
||||
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
||||
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
||||
# => { bar = null; }
|
||||
|
@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2016
|
||||
# shellcheck disable=SC2317
|
||||
# shellcheck disable=SC2192
|
||||
|
||||
# Tests lib.fileset
|
||||
# Run:
|
||||
@ -224,23 +226,17 @@ withFileMonitor() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Check whether a file set includes/excludes declared paths as expected, usage:
|
||||
|
||||
# Create the tree structure declared in the tree variable, usage:
|
||||
#
|
||||
# tree=(
|
||||
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
|
||||
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
|
||||
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
|
||||
# [a/b] = # Declare that file a/b should exist
|
||||
# [c/a] = # Declare that file c/a should exist
|
||||
# [c/d/]= # Declare that directory c/d/ should exist
|
||||
# )
|
||||
# checkFileset './a' # Pass the fileset as the argument
|
||||
# createTree
|
||||
declare -A tree
|
||||
checkFileset() {
|
||||
# New subshell so that we can have a separate trap handler, see `trap` below
|
||||
local fileset=$1
|
||||
|
||||
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
|
||||
local -a included=()
|
||||
local -a excluded=()
|
||||
local -a excludedFiles=()
|
||||
createTree() {
|
||||
# Track which paths need to be created
|
||||
local -a dirsToCreate=()
|
||||
local -a filesToCreate=()
|
||||
@ -248,24 +244,9 @@ checkFileset() {
|
||||
# If keys end with a `/` we treat them as directories, otherwise files
|
||||
if [[ "$p" =~ /$ ]]; then
|
||||
dirsToCreate+=("$p")
|
||||
isFile=
|
||||
else
|
||||
filesToCreate+=("$p")
|
||||
isFile=1
|
||||
fi
|
||||
case "${tree[$p]}" in
|
||||
1)
|
||||
included+=("$p")
|
||||
;;
|
||||
0)
|
||||
excluded+=("$p")
|
||||
if [[ -n "$isFile" ]]; then
|
||||
excludedFiles+=("$p")
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
die "Unsupported tree value: ${tree[$p]}"
|
||||
esac
|
||||
done
|
||||
|
||||
# Create all the necessary paths.
|
||||
@ -280,6 +261,43 @@ checkFileset() {
|
||||
mkdir -p "${parentsToCreate[@]}"
|
||||
touch "${filesToCreate[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check whether a file set includes/excludes declared paths as expected, usage:
|
||||
#
|
||||
# tree=(
|
||||
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
|
||||
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
|
||||
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
|
||||
# )
|
||||
# checkFileset './a' # Pass the fileset as the argument
|
||||
checkFileset() {
|
||||
# New subshell so that we can have a separate trap handler, see `trap` below
|
||||
local fileset=$1
|
||||
|
||||
# Create the tree
|
||||
createTree
|
||||
|
||||
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
|
||||
local -a included=()
|
||||
local -a excluded=()
|
||||
local -a excludedFiles=()
|
||||
for p in "${!tree[@]}"; do
|
||||
case "${tree[$p]}" in
|
||||
1)
|
||||
included+=("$p")
|
||||
;;
|
||||
0)
|
||||
excluded+=("$p")
|
||||
# If keys end with a `/` we treat them as directories, otherwise files
|
||||
if [[ ! "$p" =~ /$ ]]; then
|
||||
excludedFiles+=("$p")
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
die "Unsupported tree value: ${tree[$p]}"
|
||||
esac
|
||||
done
|
||||
|
||||
expression="toSource { root = ./.; fileset = $fileset; }"
|
||||
|
||||
@ -321,6 +339,10 @@ checkFileset() {
|
||||
expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
|
||||
\s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
|
||||
expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
|
||||
\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
|
||||
\s*Note that this only works for sources created from paths.'
|
||||
|
||||
# Only paths are accepted as `root`
|
||||
expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
|
||||
|
||||
@ -365,6 +387,9 @@ rm -rf -- *
|
||||
expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
|
||||
expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
|
||||
\s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
|
||||
\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
|
||||
\s*Note that this only works for sources created from paths.'
|
||||
|
||||
# Path coercion errors for non-existent paths
|
||||
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.'
|
||||
@ -995,6 +1020,217 @@ touch 0 "${filesToCreate[@]}"
|
||||
expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
|
||||
rm -rf -- *
|
||||
|
||||
## lib.fileset.fromSource
|
||||
|
||||
# Check error messages
|
||||
expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
|
||||
|
||||
expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
|
||||
\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
|
||||
|
||||
expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
|
||||
|
||||
# fromSource on a path works and is the same as coercing that path
|
||||
mkdir a
|
||||
touch a/b c
|
||||
expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
|
||||
rm -rf -- *
|
||||
|
||||
# Check that converting to a file set doesn't read the included files
|
||||
mkdir a
|
||||
touch a/b
|
||||
run() {
|
||||
expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
|
||||
rm a/b
|
||||
}
|
||||
withFileMonitor run a/b
|
||||
rm -rf -- *
|
||||
|
||||
# Check that converting to a file set doesn't read entries for directories that are filtered out
|
||||
mkdir -p a/b
|
||||
touch a/b/c
|
||||
run() {
|
||||
expectEqual "trace (fromSource (lib.cleanSourceWith {
|
||||
src = ./a;
|
||||
filter = pathString: type: false;
|
||||
})) null" "builtins.trace \"(empty)\" null"
|
||||
rm a/b/c
|
||||
rmdir a/b
|
||||
}
|
||||
withFileMonitor run a/b
|
||||
rm -rf -- *
|
||||
|
||||
# The filter is not needed on empty directories
|
||||
expectEqual 'trace (fromSource (lib.cleanSourceWith {
|
||||
src = ./.;
|
||||
filter = abort "filter should not be needed";
|
||||
})) null' 'trace _emptyWithoutBase null'
|
||||
|
||||
# Single files also work
|
||||
touch a b
|
||||
expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
|
||||
rm -rf -- *
|
||||
|
||||
# For a tree assigning each subpath true/false,
|
||||
# check whether a source filter with those results includes the same files
|
||||
# as a file set created using fromSource. Usage:
|
||||
#
|
||||
# tree=(
|
||||
# [a]=1 # ./a is a file and the filter should return true for it
|
||||
# [b/]=0 # ./b is a directory and the filter should return false for it
|
||||
# )
|
||||
# checkSource
|
||||
checkSource() {
|
||||
createTree
|
||||
|
||||
# Serialise the tree as JSON (there's only minimal savings with jq,
|
||||
# and we don't need to handle escapes)
|
||||
{
|
||||
echo "{"
|
||||
first=1
|
||||
for p in "${!tree[@]}"; do
|
||||
if [[ -z "$first" ]]; then
|
||||
echo ","
|
||||
else
|
||||
first=
|
||||
fi
|
||||
echo "\"$p\":"
|
||||
case "${tree[$p]}" in
|
||||
1)
|
||||
echo "true"
|
||||
;;
|
||||
0)
|
||||
echo "false"
|
||||
;;
|
||||
*)
|
||||
die "Unsupported tree value: ${tree[$p]}"
|
||||
esac
|
||||
done
|
||||
echo "}"
|
||||
} > "$tmp/tree.json"
|
||||
|
||||
# An expression to create a source value with a filter matching the tree
|
||||
sourceExpr='
|
||||
let
|
||||
tree = importJSON '"$tmp"'/tree.json;
|
||||
in
|
||||
cleanSourceWith {
|
||||
src = ./.;
|
||||
filter =
|
||||
pathString: type:
|
||||
let
|
||||
stripped = removePrefix (toString ./. + "/") pathString;
|
||||
key = stripped + optionalString (type == "directory") "/";
|
||||
in
|
||||
tree.${key} or
|
||||
(throw "tree key ${key} missing");
|
||||
}
|
||||
'
|
||||
|
||||
filesetExpr='
|
||||
toSource {
|
||||
root = ./.;
|
||||
fileset = fromSource ('"$sourceExpr"');
|
||||
}
|
||||
'
|
||||
|
||||
# Turn both into store paths
|
||||
sourceStorePath=$(expectStorePath "$sourceExpr")
|
||||
filesetStorePath=$(expectStorePath "$filesetExpr")
|
||||
|
||||
# Loop through each path in the tree
|
||||
while IFS= read -r -d $'\0' subpath; do
|
||||
if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
|
||||
# If it's not in the source store path, it's also not in the file set store path
|
||||
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
|
||||
die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
|
||||
fi
|
||||
elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
|
||||
# If it's an empty directory in the source store path, it shouldn't be in the file set store path
|
||||
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
|
||||
die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
|
||||
fi
|
||||
else
|
||||
# If it's non-empty directory or a file, it should be in the file set store path
|
||||
if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
|
||||
die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
|
||||
fi
|
||||
fi
|
||||
done < <(find . -mindepth 1 -print0)
|
||||
|
||||
rm -rf -- *
|
||||
}
|
||||
|
||||
# Check whether the filter is evaluated correctly
|
||||
tree=(
|
||||
[a]=
|
||||
[b/]=
|
||||
[b/c]=
|
||||
[b/d]=
|
||||
[e/]=
|
||||
[e/e/]=
|
||||
)
|
||||
# We fill out the above tree values with all possible combinations of 0 and 1
|
||||
# Then check whether a filter based on those return values gets turned into the corresponding file set
|
||||
for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
|
||||
for p in "${!tree[@]}"; do
|
||||
tree[$p]=$(( i % 2 ))
|
||||
(( i /= 2 )) || true
|
||||
done
|
||||
checkSource
|
||||
done
|
||||
|
||||
# The filter is called with the same arguments in the same order
|
||||
mkdir a e
|
||||
touch a/b a/c d e
|
||||
expectEqual '
|
||||
trace (fromSource (cleanSourceWith {
|
||||
src = ./.;
|
||||
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
|
||||
})) null
|
||||
' '
|
||||
builtins.seq (cleanSourceWith {
|
||||
src = ./.;
|
||||
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
|
||||
}).outPath
|
||||
builtins.trace "'"$work"' (all files in directory)"
|
||||
null
|
||||
'
|
||||
rm -rf -- *
|
||||
|
||||
# Test that if a directory is not included, the filter isn't called on its contents
|
||||
mkdir a b
|
||||
touch a/c b/d
|
||||
expectEqual 'trace (fromSource (cleanSourceWith {
|
||||
src = ./.;
|
||||
filter = pathString: type:
|
||||
if pathString == toString ./a then
|
||||
false
|
||||
else if pathString == toString ./b then
|
||||
true
|
||||
else if pathString == toString ./b/d then
|
||||
true
|
||||
else
|
||||
abort "This filter should not be called with path ${pathString}";
|
||||
})) null' 'trace (_create ./. { b = "directory"; }) null'
|
||||
rm -rf -- *
|
||||
|
||||
# The filter is called lazily:
|
||||
# If a later say intersection removes a part of the tree, the filter won't run on it
|
||||
mkdir a d
|
||||
touch a/{b,c} d/e
|
||||
expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
|
||||
src = ./.;
|
||||
filter = pathString: type:
|
||||
if pathString == toString ./a || pathString == toString ./a/b then
|
||||
true
|
||||
else if pathString == toString ./a/c then
|
||||
false
|
||||
else
|
||||
abort "filter should not be called on ${pathString}";
|
||||
}))) null' 'trace ./a/b null'
|
||||
rm -rf -- *
|
||||
|
||||
# TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
|
||||
|
||||
echo >&2 tests ok
|
||||
|
@ -57,6 +57,22 @@ using:
|
||||
Once the connection is established, you can enter commands in the socat terminal
|
||||
where socat is running.
|
||||
|
||||
## Port forwarding to NixOS test VMs {#sec-nixos-test-port-forwarding}
|
||||
|
||||
If your test has only a single VM, you may use e.g.
|
||||
|
||||
```ShellSession
|
||||
$ QEMU_NET_OPTS="hostfwd=tcp:127.0.0.1:2222-127.0.0.1:22" ./result/bin/nixos-test-driver
|
||||
```
|
||||
|
||||
to port-forward a port in the VM (here `22`) to the host machine (here port `2222`).
|
||||
|
||||
This naturally does not work when multiple machines are involved,
|
||||
since a single port on the host cannot forward to multiple VMs.
|
||||
|
||||
If the test defines multiple machines, you may opt to _temporarily_ set
|
||||
`virtualisation.forwardPorts` in the test definition for debugging.
|
||||
|
||||
## Reuse VM state {#sec-nixos-test-reuse-vm-state}
|
||||
|
||||
You can re-use the VM states coming from a previous run by setting the
|
||||
|
@ -89,7 +89,7 @@ guest. For instance, the following will forward host port 2222 to guest
|
||||
port 22 (SSH):
|
||||
|
||||
```ShellSession
|
||||
$ QEMU_NET_OPTS="hostfwd=tcp::2222-:22" ./result/bin/run-*-vm
|
||||
$ QEMU_NET_OPTS="hostfwd=tcp:127.0.0.1:2222-127.0.0.1:22" ./result/bin/run-*-vm
|
||||
```
|
||||
|
||||
allowing you to log in via SSH (assuming you have set the appropriate
|
||||
|
@ -33,6 +33,8 @@
|
||||
- All [ROCm](https://rocm.docs.amd.com/en/latest/) packages have been updated to 5.7.0.
|
||||
- [ROCm](https://rocm.docs.amd.com/en/latest/) package attribute sets are versioned: `rocmPackages` -> `rocmPackages_5`.
|
||||
|
||||
- `yarn-berry` has been updated to 4.0.1. This means that NodeJS versions less than `18.12` are no longer supported by it. More details at the [upstream changelog](https://github.com/yarnpkg/berry/blob/master/CHANGELOG.md).
|
||||
|
||||
- If the user has a custom shell enabled via `users.users.${USERNAME}.shell = ${CUSTOMSHELL}`, the
|
||||
assertion will require them to also set `programs.${CUSTOMSHELL}.enable =
|
||||
true`. This is generally safe behavior, but for anyone needing to opt out from
|
||||
@ -373,6 +375,8 @@
|
||||
|
||||
- The `junicode` font package has been updated to [major version 2](https://github.com/psb1558/Junicode-font/releases/tag/v2.001), which is now a font family. In particular, plain `Junicode.ttf` no longer exists. In addition, TrueType font files are now placed in `font/truetype` instead of `font/junicode-ttf`; this change does not affect use via `fonts.packages` NixOS option.
|
||||
|
||||
- The `prayer` package as well as `services.prayer` have been removed because it's been unmaintained for several years and the author's website has vanished.
|
||||
|
||||
## Other Notable Changes {#sec-release-23.11-notable-changes}
|
||||
|
||||
- A new option `system.switch.enable` was added. By default, this is option is
|
||||
@ -525,6 +529,8 @@ The module update takes care of the new config syntax and the data itself (user
|
||||
|
||||
- `services.bitcoind` now properly respects the `enable` option.
|
||||
|
||||
- The Home Assistant module now offers support for installing custom components and lovelace modules. Available at [`services.home-assistant.customComponents`](#opt-services.home-assistant.customComponents) and [`services.home-assistant.customLovelaceModules`](#opt-services.home-assistant.customLovelaceModules).
|
||||
|
||||
## Nixpkgs internals {#sec-release-23.11-nixpkgs-internals}
|
||||
|
||||
- The use of `sourceRoot = "source";`, `sourceRoot = "source/subdir";`, and similar lines in package derivations using the default `unpackPhase` is deprecated as it requires `unpackPhase` to always produce a directory named "source". Use `sourceRoot = src.name`, `sourceRoot = "${src.name}/subdir";`, or `setSourceRoot = "sourceRoot=$(echo */subdir)";` or similar instead.
|
||||
|
@ -86,7 +86,7 @@ in
|
||||
#rtkit = 45; # dynamically allocated 2021-09-03
|
||||
dovecot2 = 46;
|
||||
dovenull2 = 47;
|
||||
prayer = 49;
|
||||
# prayer = 49; # dropped in 23.11
|
||||
mpd = 50;
|
||||
clamav = 51;
|
||||
#fprot = 52; # unused
|
||||
@ -411,7 +411,7 @@ in
|
||||
#rtkit = 45; # unused
|
||||
dovecot2 = 46;
|
||||
dovenull2 = 47;
|
||||
prayer = 49;
|
||||
# prayer = 49; # dropped in 23.11
|
||||
mpd = 50;
|
||||
clamav = 51;
|
||||
#fprot = 52; # unused
|
||||
|
@ -1041,7 +1041,6 @@
|
||||
./services/networking/powerdns.nix
|
||||
./services/networking/pppd.nix
|
||||
./services/networking/pptpd.nix
|
||||
./services/networking/prayer.nix
|
||||
./services/networking/privoxy.nix
|
||||
./services/networking/prosody.nix
|
||||
./services/networking/quassel.nix
|
||||
|
@ -111,6 +111,7 @@ in
|
||||
(mkRemovedOptionModule [ "services" "riak" ] "The corresponding package was removed from nixpkgs.")
|
||||
(mkRemovedOptionModule [ "services" "cryptpad" ] "The corresponding package was removed from nixpkgs.")
|
||||
(mkRemovedOptionModule [ "services" "rtsp-simple-server" ] "Package has been completely rebranded by upstream as mediamtx, and thus the service and the package were renamed in NixOS as well.")
|
||||
(mkRemovedOptionModule [ "services" "prayer" ] "The corresponding package was removed from nixpkgs.")
|
||||
|
||||
(mkRemovedOptionModule [ "i18n" "inputMethod" "fcitx" ] "The fcitx module has been removed. Please use fcitx5 instead")
|
||||
(mkRemovedOptionModule [ "services" "dhcpd4" ] ''
|
||||
|
@ -51,7 +51,7 @@ with lib;
|
||||
})
|
||||
|
||||
(mkIf (!config.services.gnome.at-spi2-core.enable) {
|
||||
environment.variables = {
|
||||
environment.sessionVariables = {
|
||||
NO_AT_BRIDGE = "1";
|
||||
GTK_A11Y = "none";
|
||||
};
|
||||
|
@ -16,7 +16,8 @@ let
|
||||
cp ${format.generate "configuration.yaml" filteredConfig} $out
|
||||
sed -i -e "s/'\!\([a-z_]\+\) \(.*\)'/\!\1 \2/;s/^\!\!/\!/;" $out
|
||||
'';
|
||||
lovelaceConfig = cfg.lovelaceConfig or {};
|
||||
lovelaceConfig = if (cfg.lovelaceConfig == null) then {}
|
||||
else (lib.recursiveUpdate customLovelaceModulesResources cfg.lovelaceConfig);
|
||||
lovelaceConfigFile = format.generate "ui-lovelace.yaml" lovelaceConfig;
|
||||
|
||||
# Components advertised by the home-assistant package
|
||||
@ -62,8 +63,24 @@ let
|
||||
# Respect overrides that already exist in the passed package and
|
||||
# concat it with values passed via the module.
|
||||
extraComponents = oldArgs.extraComponents or [] ++ extraComponents;
|
||||
extraPackages = ps: (oldArgs.extraPackages or (_: []) ps) ++ (cfg.extraPackages ps);
|
||||
extraPackages = ps: (oldArgs.extraPackages or (_: []) ps)
|
||||
++ (cfg.extraPackages ps)
|
||||
++ (lib.concatMap (component: component.propagatedBuildInputs or []) cfg.customComponents);
|
||||
}));
|
||||
|
||||
# Create a directory that holds all lovelace modules
|
||||
customLovelaceModulesDir = pkgs.buildEnv {
|
||||
name = "home-assistant-custom-lovelace-modules";
|
||||
paths = cfg.customLovelaceModules;
|
||||
};
|
||||
|
||||
# Create parts of the lovelace config that reference lovelave modules as resources
|
||||
customLovelaceModulesResources = {
|
||||
lovelace.resources = map (card: {
|
||||
url = "/local/nixos-lovelace-modules/${card.entrypoint or card.pname}.js?${card.version}";
|
||||
type = "module";
|
||||
}) cfg.customLovelaceModules;
|
||||
};
|
||||
in {
|
||||
imports = [
|
||||
# Migrations in NixOS 22.05
|
||||
@ -137,6 +154,41 @@ in {
|
||||
'';
|
||||
};
|
||||
|
||||
customComponents = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = [];
|
||||
example = literalExpression ''
|
||||
with pkgs.home-assistant-custom-components; [
|
||||
prometheus-sensor
|
||||
];
|
||||
'';
|
||||
description = lib.mdDoc ''
|
||||
List of custom component packages to install.
|
||||
|
||||
Available components can be found below `pkgs.home-assistant-custom-components`.
|
||||
'';
|
||||
};
|
||||
|
||||
customLovelaceModules = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = [];
|
||||
example = literalExpression ''
|
||||
with pkgs.home-assistant-custom-lovelace-modules; [
|
||||
mini-graph-card
|
||||
mini-media-player
|
||||
];
|
||||
'';
|
||||
description = lib.mdDoc ''
|
||||
List of custom lovelace card packages to load as lovelace resources.
|
||||
|
||||
Available cards can be found below `pkgs.home-assistant-custom-lovelace-modules`.
|
||||
|
||||
::: {.note}
|
||||
Automatic loading only works with lovelace in `yaml` mode.
|
||||
:::
|
||||
'';
|
||||
};
|
||||
|
||||
config = mkOption {
|
||||
type = types.nullOr (types.submodule {
|
||||
freeformType = format.type;
|
||||
@ -408,9 +460,35 @@ in {
|
||||
rm -f "${cfg.configDir}/ui-lovelace.yaml"
|
||||
ln -s /etc/home-assistant/ui-lovelace.yaml "${cfg.configDir}/ui-lovelace.yaml"
|
||||
'';
|
||||
copyCustomLovelaceModules = if cfg.customLovelaceModules != [] then ''
|
||||
mkdir -p "${cfg.configDir}/www"
|
||||
ln -fns ${customLovelaceModulesDir} "${cfg.configDir}/www/nixos-lovelace-modules"
|
||||
'' else ''
|
||||
rm -f "${cfg.configDir}/www/nixos-lovelace-modules"
|
||||
'';
|
||||
copyCustomComponents = ''
|
||||
mkdir -p "${cfg.configDir}/custom_components"
|
||||
|
||||
# remove components symlinked in from below the /nix/store
|
||||
components="$(find "${cfg.configDir}/custom_components" -maxdepth 1 -type l)"
|
||||
for component in "$components"; do
|
||||
if [[ "$(readlink "$component")" =~ ^${escapeShellArg builtins.storeDir} ]]; then
|
||||
rm "$component"
|
||||
fi
|
||||
done
|
||||
|
||||
# recreate symlinks for desired components
|
||||
declare -a components=(${escapeShellArgs cfg.customComponents})
|
||||
for component in "''${components[@]}"; do
|
||||
path="$(dirname $(find "$component" -name "manifest.json"))"
|
||||
ln -fns "$path" "${cfg.configDir}/custom_components/"
|
||||
done
|
||||
'';
|
||||
in
|
||||
(optionalString (cfg.config != null) copyConfig) +
|
||||
(optionalString (cfg.lovelaceConfig != null) copyLovelaceConfig)
|
||||
(optionalString (cfg.lovelaceConfig != null) copyLovelaceConfig) +
|
||||
copyCustomLovelaceModules +
|
||||
copyCustomComponents
|
||||
;
|
||||
environment.PYTHONPATH = package.pythonPath;
|
||||
serviceConfig = let
|
||||
|
@ -1,90 +0,0 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
inherit (pkgs) prayer;
|
||||
|
||||
cfg = config.services.prayer;
|
||||
|
||||
stateDir = "/var/lib/prayer";
|
||||
|
||||
prayerUser = "prayer";
|
||||
prayerGroup = "prayer";
|
||||
|
||||
prayerExtraCfg = pkgs.writeText "extraprayer.cf" ''
|
||||
prefix = "${prayer}"
|
||||
var_prefix = "${stateDir}"
|
||||
prayer_user = "${prayerUser}"
|
||||
prayer_group = "${prayerGroup}"
|
||||
sendmail_path = "/run/wrappers/bin/sendmail"
|
||||
|
||||
use_http_port ${cfg.port}
|
||||
|
||||
${cfg.extraConfig}
|
||||
'';
|
||||
|
||||
prayerCfg = pkgs.runCommand "prayer.cf" { preferLocalBuild = true; } ''
|
||||
# We have to remove the http_port 80, or it will start a server there
|
||||
cat ${prayer}/etc/prayer.cf | grep -v http_port > $out
|
||||
cat ${prayerExtraCfg} >> $out
|
||||
'';
|
||||
|
||||
in
|
||||
|
||||
{
|
||||
|
||||
###### interface
|
||||
|
||||
options = {
|
||||
|
||||
services.prayer = {
|
||||
|
||||
enable = mkEnableOption (lib.mdDoc "the prayer webmail http server");
|
||||
|
||||
port = mkOption {
|
||||
default = 2080;
|
||||
type = types.port;
|
||||
description = lib.mdDoc ''
|
||||
Port the prayer http server is listening to.
|
||||
'';
|
||||
};
|
||||
|
||||
extraConfig = mkOption {
|
||||
type = types.lines;
|
||||
default = "" ;
|
||||
description = lib.mdDoc ''
|
||||
Extra configuration. Contents will be added verbatim to the configuration file.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
|
||||
###### implementation
|
||||
|
||||
config = mkIf config.services.prayer.enable {
|
||||
environment.systemPackages = [ prayer ];
|
||||
|
||||
users.users.${prayerUser} =
|
||||
{ uid = config.ids.uids.prayer;
|
||||
description = "Prayer daemon user";
|
||||
home = stateDir;
|
||||
};
|
||||
|
||||
users.groups.${prayerGroup} =
|
||||
{ gid = config.ids.gids.prayer; };
|
||||
|
||||
systemd.services.prayer = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig.Type = "forking";
|
||||
preStart = ''
|
||||
mkdir -m 0755 -p ${stateDir}
|
||||
chown ${prayerUser}:${prayerGroup} ${stateDir}
|
||||
'';
|
||||
script = "${prayer}/sbin/prayer --config-file=${prayerCfg}";
|
||||
};
|
||||
};
|
||||
}
|
@ -43,6 +43,16 @@ in {
|
||||
psycopg2
|
||||
];
|
||||
|
||||
# test loading custom components
|
||||
customComponents = with pkgs.home-assistant-custom-components; [
|
||||
prometheus-sensor
|
||||
];
|
||||
|
||||
# test loading lovelace modules
|
||||
customLovelaceModules = with pkgs.home-assistant-custom-lovelace-modules; [
|
||||
mini-graph-card
|
||||
];
|
||||
|
||||
config = {
|
||||
homeassistant = {
|
||||
name = "Home";
|
||||
@ -114,6 +124,14 @@ in {
|
||||
inheritParentConfig = true;
|
||||
configuration.services.home-assistant.config.backup = {};
|
||||
};
|
||||
|
||||
specialisation.removeCustomThings = {
|
||||
inheritParentConfig = true;
|
||||
configuration.services.home-assistant = {
|
||||
customComponents = lib.mkForce [];
|
||||
customLovelaceModules = lib.mkForce [];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = { nodes, ... }: let
|
||||
@ -161,6 +179,14 @@ in {
|
||||
hass.wait_for_open_port(8123)
|
||||
hass.succeed("curl --fail http://localhost:8123/lovelace")
|
||||
|
||||
with subtest("Check that custom components get installed"):
|
||||
hass.succeed("test -f ${configDir}/custom_components/prometheus_sensor/manifest.json")
|
||||
hass.wait_until_succeeds("journalctl -u home-assistant.service | grep -q 'We found a custom integration prometheus_sensor which has not been tested by Home Assistant'")
|
||||
|
||||
with subtest("Check that lovelace modules are referenced and fetchable"):
|
||||
hass.succeed("grep -q 'mini-graph-card-bundle.js' '${configDir}/ui-lovelace.yaml'")
|
||||
hass.succeed("curl --fail http://localhost:8123/local/nixos-lovelace-modules/mini-graph-card-bundle.js")
|
||||
|
||||
with subtest("Check that optional dependencies are in the PYTHONPATH"):
|
||||
env = get_unit_property("Environment")
|
||||
python_path = env.split("PYTHONPATH=")[1].split()[0]
|
||||
@ -200,6 +226,13 @@ in {
|
||||
for domain in ["backup"]:
|
||||
assert f"Setup of domain {domain} took" in journal, f"{domain} setup missing"
|
||||
|
||||
with subtest("Check custom components and custom lovelace modules get removed"):
|
||||
cursor = get_journal_cursor()
|
||||
hass.succeed("${system}/specialisation/removeCustomThings/bin/switch-to-configuration test")
|
||||
hass.fail("grep -q 'mini-graph-card-bundle.js' '${configDir}/ui-lovelace.yaml'")
|
||||
hass.fail("test -f ${configDir}/custom_components/prometheus_sensor/manifest.json")
|
||||
wait_for_homeassistant(cursor)
|
||||
|
||||
with subtest("Check that no errors were logged"):
|
||||
hass.fail("journalctl -u home-assistant -o cat | grep -q ERROR")
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ lib, stdenv, fetchurl, libogg, libvorbis, libao, pkg-config, curl
|
||||
{ lib, stdenv, fetchurl, fetchpatch, libogg, libvorbis, libao, pkg-config, curl, libiconv
|
||||
, speex, flac
|
||||
, autoreconfHook }:
|
||||
|
||||
@ -11,12 +11,18 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "1c7h4ivgfdyygz2hyh6nfibxlkz8kdk868a576qkkjgj5gn78xyv";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ autoreconfHook pkg-config ];
|
||||
buildInputs = [ libogg libvorbis libao curl speex flac ];
|
||||
patches = lib.optionals stdenv.cc.isClang [
|
||||
# Fixes a call to undeclared function `utf8_decode`.
|
||||
# https://github.com/xiph/vorbis-tools/pull/33
|
||||
(fetchpatch {
|
||||
url = "https://github.com/xiph/vorbis-tools/commit/8a645f78b45ae7e370c0dc2a52d0f2612aa6110b.patch";
|
||||
hash = "sha256-RkT9Xa0pRu/oO9E9qhDa17L0luWgYHI2yINIkPZanmI=";
|
||||
})
|
||||
];
|
||||
|
||||
env = lib.optionalAttrs stdenv.cc.isClang {
|
||||
NIX_CFLAGS_COMPILE = "-Wno-error=implicit-function-declaration";
|
||||
};
|
||||
nativeBuildInputs = [ autoreconfHook pkg-config ];
|
||||
buildInputs = [ libogg libvorbis libao curl speex flac ]
|
||||
++ lib.optionals stdenv.isDarwin [ libiconv ];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Extra tools for Ogg-Vorbis audio codec";
|
||||
|
@ -1575,8 +1575,8 @@ let
|
||||
mktplcRef = {
|
||||
publisher = "github";
|
||||
name = "copilot";
|
||||
version = "1.126.493";
|
||||
sha256 = "1an7z8z3xz2piw2xz1hdrs6l5rhpyvnjmb650ff2m4k24n01svfy";
|
||||
version = "1.135.544";
|
||||
sha256 = "sha256-OeG1nkQbQAfu8NuDEA+iaWy0ioFyXPe7Qm/CZIKPiX8=";
|
||||
};
|
||||
|
||||
meta = {
|
||||
@ -1592,8 +1592,8 @@ let
|
||||
mktplcRef = {
|
||||
publisher = "github";
|
||||
name = "copilot-chat";
|
||||
version = "0.3.2023061502";
|
||||
sha256 = "sha256-sUoKwlPDMz+iQbmIsD2JhyDwmUQzOyCHXaXCUaizQ7k=";
|
||||
version = "0.11.2023111001";
|
||||
sha256 = "sha256-sBDvqqyq0R0ZyS81G61fI9Vd860RIjhNzCqY0bdz1mg=";
|
||||
};
|
||||
meta = {
|
||||
description = "GitHub Copilot Chat is a companion extension to GitHub Copilot that houses experimental chat features";
|
||||
@ -3554,8 +3554,8 @@ let
|
||||
mktplcRef = {
|
||||
name = "uiua-vscode";
|
||||
publisher = "uiua-lang";
|
||||
version = "0.0.22";
|
||||
sha256 = "sha256-fJcSJwwRVofduWEEMa5f2VrSfyONKPkFl9OW+++lSRw=";
|
||||
version = "0.0.23";
|
||||
sha256 = "sha256-NauXoYTAka8qXNPYlW5g7r6NNX1x8cnvDRbEGkRsMoY=";
|
||||
};
|
||||
meta = {
|
||||
description = "VSCode language extension for Uiua";
|
||||
|
@ -6,35 +6,24 @@
|
||||
, makeWrapper
|
||||
, alsa-lib
|
||||
, curl
|
||||
, egl-wayland
|
||||
, libao
|
||||
, libdecor
|
||||
, libevdev
|
||||
, libffi
|
||||
, libGL
|
||||
, libpulseaudio
|
||||
, libX11
|
||||
, libXext
|
||||
, libxkbcommon
|
||||
, libzip
|
||||
, mesa
|
||||
, lua
|
||||
, miniupnpc
|
||||
, udev
|
||||
, vulkan-headers
|
||||
, SDL2
|
||||
, vulkan-loader
|
||||
, wayland
|
||||
, zlib
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "flycast";
|
||||
version = "2.1";
|
||||
version = "2.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "flyinghead";
|
||||
repo = "flycast";
|
||||
rev = "V${version}";
|
||||
sha256 = "sha256-PRInOqg9OpaUVLwSj1lOxDtjpVaYehkRsp0jLrVKPyY=";
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-eQMKaUaZ1b0oXre4Ouli4qIyNaG64KntyRGk3/YIopc=";
|
||||
fetchSubmodules = true;
|
||||
};
|
||||
|
||||
@ -47,23 +36,16 @@ stdenv.mkDerivation rec {
|
||||
buildInputs = [
|
||||
alsa-lib
|
||||
curl
|
||||
egl-wayland
|
||||
libao
|
||||
libdecor
|
||||
libevdev
|
||||
libffi
|
||||
libGL
|
||||
libpulseaudio
|
||||
libX11
|
||||
libXext
|
||||
libxkbcommon
|
||||
libzip
|
||||
mesa # for libgbm
|
||||
lua
|
||||
miniupnpc
|
||||
udev
|
||||
vulkan-headers
|
||||
wayland
|
||||
zlib
|
||||
SDL2
|
||||
];
|
||||
|
||||
cmakeFlags = [
|
||||
"-DUSE_HOST_SDL=ON"
|
||||
];
|
||||
|
||||
postFixup = ''
|
||||
|
21
pkgs/applications/gis/grass/clang-integer-conversion.patch
Normal file
21
pkgs/applications/gis/grass/clang-integer-conversion.patch
Normal file
@ -0,0 +1,21 @@
|
||||
diff -ur a/db/drivers/mysql/db.c b/db/drivers/mysql/db.c
|
||||
--- a/db/drivers/mysql/db.c 1969-12-31 19:00:01.000000000 -0500
|
||||
+++ b/db/drivers/mysql/db.c 2023-11-09 23:26:25.329700495 -0500
|
||||
@@ -52,9 +52,16 @@
|
||||
|
||||
db_get_login2("mysql", name, &user, &password, &host, &port);
|
||||
|
||||
+ const char* errstr;
|
||||
+ unsigned int port_number = (unsigned int)strtonum(port, 0, 65536, &errstr);
|
||||
+ if (errstr != NULL) {
|
||||
+ db_d_append_error("%s", errstr);
|
||||
+ return DB_FAILED;
|
||||
+ }
|
||||
+
|
||||
connection = mysql_init(NULL);
|
||||
res = mysql_real_connect(connection, host, user, password,
|
||||
- connpar.dbname, port, NULL, 0);
|
||||
+ connpar.dbname, port_number, NULL, 0);
|
||||
|
||||
if (res == NULL) {
|
||||
db_d_append_error("%s\n%s", _("Connection failed."),
|
@ -81,12 +81,13 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
# On Darwin the installer tries to symlink the help files into a system
|
||||
# directory
|
||||
patches = [ ./no_symbolic_links.patch ];
|
||||
patches = lib.optionals stdenv.isDarwin [
|
||||
# Fix conversion of const char* to unsigned int.
|
||||
./clang-integer-conversion.patch
|
||||
];
|
||||
|
||||
# Correct mysql_config query
|
||||
patchPhase = ''
|
||||
postPatch = ''
|
||||
substituteInPlace configure --replace "--libmysqld-libs" "--libs"
|
||||
'';
|
||||
|
||||
|
@ -1,37 +0,0 @@
|
||||
diff --git a/include/Make/Install.make b/include/Make/Install.make
|
||||
index 0aba138..8ba74bc 100644
|
||||
--- a/include/Make/Install.make
|
||||
+++ b/include/Make/Install.make
|
||||
@@ -116,11 +116,6 @@ real-install: | $(INST_DIR) $(UNIX_BIN)
|
||||
-$(INSTALL) config.status $(INST_DIR)/config.status
|
||||
-$(CHMOD) -R a+rX $(INST_DIR) 2>/dev/null
|
||||
|
||||
-ifneq ($(findstring darwin,$(ARCH)),)
|
||||
- @# enable OSX Help Viewer
|
||||
- @/bin/ln -sfh "$(INST_DIR)/docs/html" /Library/Documentation/Help/GRASS-$(GRASS_VERSION_MAJOR).$(GRASS_VERSION_MINOR)
|
||||
-endif
|
||||
-
|
||||
$(INST_DIR) $(UNIX_BIN):
|
||||
$(MAKE_DIR_CMD) $@
|
||||
|
||||
diff --git a/macosx/app/build_html_user_index.sh b/macosx/app/build_html_user_index.sh
|
||||
index 04e63eb..c9d9c2c 100755
|
||||
--- a/macosx/app/build_html_user_index.sh
|
||||
+++ b/macosx/app/build_html_user_index.sh
|
||||
@@ -140,7 +140,6 @@ else
|
||||
# echo "<tr><td valign=\"top\"><a href=\"$HTMLDIRG/$i\">$BASENAME</a></td> <td>$SHORTDESC</td></tr>" >> $FULLINDEX
|
||||
# make them local to user to simplify page links
|
||||
echo "<tr><td valign=\"top\"><a href=\"global_$i\">$BASENAME</a></td> <td>$SHORTDESC</td></tr>" >> $FULLINDEX
|
||||
- ln -sf "$HTMLDIRG/$i" global_$i
|
||||
done
|
||||
done
|
||||
fi
|
||||
@@ -183,8 +182,3 @@ echo "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">
|
||||
</html>" > $i.html
|
||||
done
|
||||
|
||||
-# add Help Viewer links in user docs folder
|
||||
-
|
||||
-mkdir -p $HOME/Library/Documentation/Help/
|
||||
-ln -sfh ../../GRASS/$GRASS_MMVER/Modules/docs/html $HOME/Library/Documentation/Help/GRASS-$GRASS_MMVER-addon
|
||||
-ln -sfh $GISBASE/docs/html $HOME/Library/Documentation/Help/GRASS-$GRASS_MMVER
|
@ -9,11 +9,11 @@
|
||||
|
||||
stdenvNoCC.mkDerivation rec {
|
||||
pname = "camunda-modeler";
|
||||
version = "5.16.0";
|
||||
version = "5.17.0";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/camunda/camunda-modeler/releases/download/v${version}/camunda-modeler-${version}-linux-x64.tar.gz";
|
||||
hash = "sha256-Y+v/r5bhtgXBjRQic0s5FA+KMWx5R7DOK+qZ9Izdnb0=";
|
||||
hash = "sha256-yxph3Aor5nZOhu2PY4MGcfScaz9w24JXqXbhT+QKlNI=";
|
||||
};
|
||||
sourceRoot = "camunda-modeler-${version}-linux-x64";
|
||||
|
||||
|
@ -10,11 +10,11 @@
|
||||
}:
|
||||
let
|
||||
pname = "jetbrains-toolbox";
|
||||
version = "2.0.5.17700";
|
||||
version = "2.1.0.18144";
|
||||
|
||||
src = fetchzip {
|
||||
url = "https://download.jetbrains.com/toolbox/jetbrains-toolbox-${version}.tar.gz";
|
||||
sha256 = "sha256-BO9W9miQUltsg1tCyTl9j5xRCJUCsO02hUKDCYt7hd8=";
|
||||
sha256 = "sha256-K65naW+RWAy4uxQq2GQmL0kwCH+G73ez1kgTtnTwjEw=";
|
||||
stripRoot = false;
|
||||
};
|
||||
|
||||
|
@ -1,38 +0,0 @@
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index ce78a9d..3cd51e0 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -8,18 +8,21 @@ list(APPEND CMAKE_PREFIX_PATH ${CMAKE_BINARY_DIR})
|
||||
# Common configuration
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
-
|
||||
-# Setup Conan
|
||||
-if(NOT EXISTS "${CMAKE_BINARY_DIR}/conan.cmake")
|
||||
- message(STATUS "Downloading conan.cmake from https://github.com/conan-io/cmake-conan")
|
||||
- file(DOWNLOAD "https://raw.githubusercontent.com/conan-io/cmake-conan/0.18.1/conan.cmake"
|
||||
- "${CMAKE_BINARY_DIR}/conan.cmake"
|
||||
- TLS_VERIFY ON)
|
||||
-endif()
|
||||
-include(${CMAKE_BINARY_DIR}/conan.cmake)
|
||||
-
|
||||
-conan_cmake_autodetect(settings)
|
||||
-conan_cmake_install(PATH_OR_REFERENCE ${CMAKE_SOURCE_DIR} BUILD missing SETTINGS ${settings})
|
||||
+set(USE_CONAN ON CACHE BOOL "Use conan for dependency managment")
|
||||
+
|
||||
+if(USE_CONAN)
|
||||
+ # Setup Conan
|
||||
+ if(NOT EXISTS "${CMAKE_BINARY_DIR}/conan.cmake")
|
||||
+ message(STATUS "Downloading conan.cmake from https://github.com/conan-io/cmake-conan")
|
||||
+ file(DOWNLOAD "https://raw.githubusercontent.com/conan-io/cmake-conan/0.18.1/conan.cmake"
|
||||
+ "${CMAKE_BINARY_DIR}/conan.cmake"
|
||||
+ TLS_VERIFY ON)
|
||||
+ endif()
|
||||
+ include(${CMAKE_BINARY_DIR}/conan.cmake)
|
||||
+
|
||||
+ conan_cmake_autodetect(settings)
|
||||
+ conan_cmake_install(PATH_OR_REFERENCE ${CMAKE_SOURCE_DIR} BUILD missing SETTINGS ${settings})
|
||||
+endif ()
|
||||
|
||||
# Setup Qt
|
||||
set(CMAKE_AUTOMOC ON)
|
@ -1,13 +1,16 @@
|
||||
{ lib
|
||||
, stdenv
|
||||
, fetchFromGitHub
|
||||
, fetchpatch
|
||||
, cmake
|
||||
, magic-enum
|
||||
, range-v3
|
||||
, spdlog
|
||||
, qtbase
|
||||
, qtconnectivity
|
||||
, qttools
|
||||
, qtlanguageserver
|
||||
, qtwayland
|
||||
, wrapQtAppsHook
|
||||
, libXScrnSaver
|
||||
, nix-update-script
|
||||
@ -15,15 +18,24 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "kemai";
|
||||
version = "0.9.2";
|
||||
version = "0.10.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "AlexandrePTJ";
|
||||
repo = "kemai";
|
||||
rev = version;
|
||||
hash = "sha256-PDjNO2iMPK0J3TSHVZ/DW3W0GkdB8yNZYoTGEd2snac=";
|
||||
hash = "sha256-wclBAgeDyAIw/nGF6lzIwbwdoZMBTu+tjxsnIxIkODM=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Backport the fix for an issue where LICENSE.txt ends up in /bin
|
||||
# Remove in next release
|
||||
(fetchpatch {
|
||||
url = "https://github.com/AlexandrePTJ/kemai/commit/e279679dd7308efebe004252d168d7308f3b99ce.patch";
|
||||
hash = "sha256-5cmRRMVATf4ul4HhaQKiE0yTN2qd+MfNFQzGTLLpOyg=";
|
||||
})
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
qtbase
|
||||
qtconnectivity
|
||||
@ -31,10 +43,14 @@ stdenv.mkDerivation rec {
|
||||
qtlanguageserver
|
||||
libXScrnSaver
|
||||
magic-enum
|
||||
range-v3
|
||||
spdlog
|
||||
] ++ lib.optional stdenv.hostPlatform.isLinux qtwayland;
|
||||
cmakeFlags = [
|
||||
"-DFETCHCONTENT_FULLY_DISCONNECTED=ON"
|
||||
"-DFETCHCONTENT_QUIET=OFF"
|
||||
"-DFETCHCONTENT_TRY_FIND_PACKAGE_MODE=ALWAYS"
|
||||
];
|
||||
cmakeFlags = [ "-DUSE_CONAN=OFF" ];
|
||||
patches = [ ./000-cmake-disable-conan.diff ];
|
||||
|
||||
nativeBuildInputs = [ cmake wrapQtAppsHook ];
|
||||
|
||||
@ -48,5 +64,7 @@ stdenv.mkDerivation rec {
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ poelzi ];
|
||||
platforms = platforms.unix;
|
||||
broken = stdenv.isDarwin;
|
||||
mainProgram = "Kemai";
|
||||
};
|
||||
}
|
||||
|
@ -503,6 +503,9 @@ buildStdenv.mkDerivation {
|
||||
|
||||
preBuild = ''
|
||||
cd mozobj
|
||||
'' + lib.optionalString (lib.versionAtLeast version "120") ''
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1864083
|
||||
export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE $(pkg-config dbus-1 --cflags)"
|
||||
'';
|
||||
|
||||
postBuild = ''
|
||||
|
@ -30,11 +30,11 @@
|
||||
|
||||
firefox-beta = buildMozillaMach rec {
|
||||
pname = "firefox-beta";
|
||||
version = "119.0b9";
|
||||
version = "120.0b9";
|
||||
applicationName = "Mozilla Firefox Beta";
|
||||
src = fetchurl {
|
||||
url = "mirror://mozilla/firefox/releases/${version}/source/firefox-${version}.source.tar.xz";
|
||||
sha512 = "11d07474e3ca72a4e2f60053882e09a215e0d29d6830d0cd41447bb67370118356090af7adcbacd7703ad9fcdda83c9f909419c86b8f3bf2eacd9ca3d3aa3f54";
|
||||
sha512 = "7ac5562ce393ea84663eac5c6ee1a0ca527ff4a8a9ec6aaaef37213ff071076846949e80af21d95ec8e32d3cbc740b772a9d7cc54965b7bbc8e015da22ae927f";
|
||||
};
|
||||
|
||||
meta = {
|
||||
@ -58,12 +58,12 @@
|
||||
|
||||
firefox-devedition = (buildMozillaMach rec {
|
||||
pname = "firefox-devedition";
|
||||
version = "119.0b9";
|
||||
version = "120.0b9";
|
||||
applicationName = "Mozilla Firefox Developer Edition";
|
||||
branding = "browser/branding/aurora";
|
||||
src = fetchurl {
|
||||
url = "mirror://mozilla/devedition/releases/${version}/source/firefox-${version}.source.tar.xz";
|
||||
sha512 = "ce3e2adb3171aa05c7af3b7a4ea25eaafbc109c522b90e26aad577192a0902000fb7d705fa5707a9a7d0be2ab1c0cddc5a98abbe6549e1377c0a1d765bda62eb";
|
||||
sha512 = "07bf1a58550e70c683719adef55fa3d1ee06876e0cb086c28242879c683269c4aa784b1dce639218b3ad24a546192088fe5224a52e13a0086f205ec5470e2428";
|
||||
};
|
||||
|
||||
meta = {
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "palemoon-bin";
|
||||
version = "32.4.1";
|
||||
version = "32.5.0";
|
||||
|
||||
src = fetchzip {
|
||||
urls = [
|
||||
@ -26,9 +26,9 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
"https://rm-us.palemoon.org/release/palemoon-${finalAttrs.version}.linux-x86_64-gtk${if withGTK3 then "3" else "2"}.tar.xz"
|
||||
];
|
||||
hash = if withGTK3 then
|
||||
"sha256-c/rfnMpiLWqlNZppqPRNWXsgAQ1FofAdel5EFnK+mrY="
|
||||
"sha256-1MJ5K9Zc/BHeQwwlq3XyUV8XTFEpPytNyTnsDpE1tBI="
|
||||
else
|
||||
"sha256-27njFdqq2DUctlz/UOtH5tlOduQNpoapuCYS+48K9dk=";
|
||||
"sha256-xXunZTqoc2A+ilosRUUluxDwewD3xwITF5nb5Lbyv7Y=";
|
||||
};
|
||||
|
||||
preferLocalBuild = true;
|
||||
|
@ -1,4 +1,5 @@
|
||||
{ lib
|
||||
, stdenv
|
||||
, mkDerivationWith
|
||||
, fetchFromGitHub
|
||||
, python3Packages
|
||||
@ -6,6 +7,8 @@
|
||||
}:
|
||||
|
||||
mkDerivationWith python3Packages.buildPythonApplication rec {
|
||||
inherit stdenv;
|
||||
|
||||
pname = "webmacs";
|
||||
version = "0.8";
|
||||
|
||||
|
@ -95,6 +95,7 @@ let
|
||||
removed = name: date: throw "the ${name} terraform provider removed from nixpkgs on ${date}";
|
||||
in
|
||||
lib.optionalAttrs config.allowAliases {
|
||||
fly = archived "fly" "2023/10";
|
||||
ksyun = removed "ksyun" "2023/04";
|
||||
};
|
||||
|
||||
|
@ -425,15 +425,6 @@
|
||||
"spdx": "MPL-2.0",
|
||||
"vendorHash": "sha256-RqYzqKPzb5GcrzHnEDZC7GaBt1zP8g28Wo3WNAe07Ck="
|
||||
},
|
||||
"fly": {
|
||||
"hash": "sha256-9QB2fbggCKcJz8tkSYgq/X8r+MB2M76VCWXgsHARTkU=",
|
||||
"homepage": "https://registry.terraform.io/providers/fly-apps/fly",
|
||||
"owner": "fly-apps",
|
||||
"repo": "terraform-provider-fly",
|
||||
"rev": "v0.0.23",
|
||||
"spdx": "BSD-3-Clause",
|
||||
"vendorHash": "sha256-f+Z6Y2WPxqJoHoCwuK6sgFa8nUnkW/WwrD55dtU0wtM="
|
||||
},
|
||||
"fortios": {
|
||||
"hash": "sha256-RpcKMndbO3wbkHmrINkbsQ+UeFsZrQ7x02dv8ZpFMec=",
|
||||
"homepage": "https://registry.terraform.io/providers/fortinetdev/fortios",
|
||||
|
20
pkgs/by-name/ro/rockyou/package.nix
Normal file
20
pkgs/by-name/ro/rockyou/package.nix
Normal file
@ -0,0 +1,20 @@
|
||||
{ seclists
|
||||
, stdenvNoCC
|
||||
}:
|
||||
stdenvNoCC.mkDerivation {
|
||||
pname = "rockyou";
|
||||
inherit (seclists) version src;
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/share/wordlists/
|
||||
tar -xvzf ${seclists}/share/wordlists/seclists/Passwords/Leaked-Databases/rockyou.txt.tar.gz -C $out/share/wordlists/
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = seclists.meta // {
|
||||
description = "A famous wordlist often used for brute force attacks";
|
||||
};
|
||||
}
|
34
pkgs/by-name/se/seclists/package.nix
Normal file
34
pkgs/by-name/se/seclists/package.nix
Normal file
@ -0,0 +1,34 @@
|
||||
{ lib
|
||||
, fetchFromGitHub
|
||||
, stdenvNoCC
|
||||
}:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
pname = "seclists";
|
||||
version = "2023.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "danielmiessler";
|
||||
repo = "SecLists";
|
||||
rev = "2023.2";
|
||||
hash = "sha256-yVxb5GaQDuCsyjIV+oZzNUEFoq6gMPeaIeQviwGdAgY=";
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/share/wordlists/seclists
|
||||
find . -maxdepth 1 -type d -regextype posix-extended -regex '^./[A-Z].*' -exec cp -R {} $out/share/wordlists/seclists \;
|
||||
find $out/share/wordlists/seclists -name "*.md" -delete
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "A collection of multiple types of lists used during security assessments, collected in one place";
|
||||
homepage = "https://github.com/danielmiessler/seclists";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ tochiaha janik pamplemousse ];
|
||||
};
|
||||
}
|
||||
|
@ -14,16 +14,16 @@
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "uiua";
|
||||
version = "0.1.0";
|
||||
version = "0.2.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "uiua-lang";
|
||||
repo = "uiua";
|
||||
rev = version;
|
||||
hash = "sha256-ZoiT7Yf8Mdwh2vBkRCDxhkbvTkekhTopFNWjUnyoPUQ=";
|
||||
hash = "sha256-RAMQC9weEvTV44nAXjwMYv+4O5aSNNM5UOf/xBb4SBE=";
|
||||
};
|
||||
|
||||
cargoHash = "sha256-My/15zNfEqt+a0jganS6LfFiEXENUaPTcyz6SBL0oKo=";
|
||||
cargoHash = "sha256-ZBedAIHwbRiR9i6w0CWIiE+OJvTkmxiEihn7zLAV/Dg=";
|
||||
|
||||
nativeBuildInputs = lib.optionals stdenv.isDarwin [
|
||||
rustPlatform.bindgenHook
|
||||
|
70
pkgs/by-name/wo/wordlists/package.nix
Normal file
70
pkgs/by-name/wo/wordlists/package.nix
Normal file
@ -0,0 +1,70 @@
|
||||
{ lib
|
||||
, callPackage
|
||||
, nmap
|
||||
, rockyou
|
||||
, runtimeShell
|
||||
, seclists
|
||||
, symlinkJoin
|
||||
, tree
|
||||
, wfuzz
|
||||
, lists ? [
|
||||
nmap
|
||||
rockyou
|
||||
seclists
|
||||
wfuzz
|
||||
]
|
||||
}:
|
||||
|
||||
symlinkJoin rec {
|
||||
pname = "wordlists";
|
||||
version = "unstable-2023-10-10";
|
||||
|
||||
name = "${pname}-${version}";
|
||||
paths = lists;
|
||||
|
||||
postBuild = ''
|
||||
mkdir -p $out/bin
|
||||
|
||||
# Create a command to show the location of the links.
|
||||
cat >> $out/bin/wordlists << __EOF__
|
||||
#!${runtimeShell}
|
||||
${tree}/bin/tree ${placeholder "out"}/share/wordlists
|
||||
__EOF__
|
||||
chmod +x $out/bin/wordlists
|
||||
|
||||
# Create a handy command for easy access to the wordlists.
|
||||
# e.g.: `cat "$(wordlists_path)/rockyou.txt"`, or `ls "$(wordlists_path)/dirbuster"`
|
||||
cat >> $out/bin/wordlists_path << __EOF__
|
||||
#!${runtimeShell}
|
||||
printf "${placeholder "out"}/share/wordlists\n"
|
||||
__EOF__
|
||||
chmod +x $out/bin/wordlists_path
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "A collection of wordlists useful for security testing";
|
||||
longDescription = ''
|
||||
The `wordlists` package provides two scripts. One is called {command}`wordlists`,
|
||||
and it will list a tree of all the wordlists installed. The other one is
|
||||
called {command}`wordlists_path` which will print the path to the nix store
|
||||
location of the lists. You can for example do
|
||||
{command}`$(wordlists_path)/rockyou.txt` to get the location of the
|
||||
[rockyou](https://en.wikipedia.org/wiki/RockYou#Data_breach)
|
||||
wordlist. If you want to modify the available wordlists you can override
|
||||
the `lists` attribute`. In your nixos configuration this would look
|
||||
similiar to this:
|
||||
|
||||
```nix
|
||||
environment.systemPackages = [
|
||||
(pkgs.wordlists.override { lists = with pkgs; [ rockyou ] })
|
||||
]
|
||||
```
|
||||
|
||||
you can use this with nix-shell by doing:
|
||||
{command}`nix-shell -p 'wordlists.override { lists = with (import <nixpkgs> {}); [ nmap ]; }'
|
||||
If you want to add a new package that provides wordlist/s the convention
|
||||
is to copy it to {file}`$out/share/wordlists/myNewWordlist`.
|
||||
'';
|
||||
maintainers = with maintainers; [ janik pamplemousse ];
|
||||
};
|
||||
}
|
@ -7,16 +7,16 @@ let
|
||||
arch = if stdenv.isAarch64 then "arm64" else "x86_64";
|
||||
hashes =
|
||||
{
|
||||
"x86_64-linux" = "b13110bacc3f71c2a3e12c52172a821a85cc13243a95249ca18c8beb296c0ce8";
|
||||
"aarch64-linux" = "afbc71f0570b86215942d1b4207fe3de0299e6fdfd2e6caac78bf688c81b9bd1";
|
||||
"x86_64-darwin" = "50a3df09b02b34e1653beb1507c6de0f332674e088ded7c66af4e5987753304e";
|
||||
"aarch64-darwin" = "174a5bfec355361c4f030861405513818be25fd7e4325f7221aa71ebd27475d3";
|
||||
"x86_64-linux" = "a51d5b9a011c54b0001ff3273cee027774686e233adadb20b1978d2cabfe32a6";
|
||||
"aarch64-linux" = "8904ce928f60e06df1f06b3af5ee5eb320c388922aa38b698d823df1d73e8e49";
|
||||
"x86_64-darwin" = "b4d1bb5ddc3503862750e5b241f74c22dc013792bc4f410dd914a5216e20ed2f";
|
||||
"aarch64-darwin" = "6d20e384dae90bb994c3f1e866c964124c7e8a51e9e08bad0e90a2b560bb5a18";
|
||||
};
|
||||
in
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "lamdera";
|
||||
version = "1.2.0";
|
||||
version = "1.2.1";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://static.lamdera.com/bin/lamdera-${version}-${os}-${arch}";
|
||||
|
@ -6,8 +6,8 @@
|
||||
, pkg-config
|
||||
, openssl
|
||||
, Security
|
||||
, libiconv
|
||||
, nix-update-script
|
||||
, SystemConfiguration
|
||||
}:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
@ -24,7 +24,7 @@ rustPlatform.buildRustPackage rec {
|
||||
nativeBuildInputs = [ git pkg-config ];
|
||||
|
||||
buildInputs = [ openssl ] ++
|
||||
lib.optionals stdenv.isDarwin [ Security libiconv ];
|
||||
lib.optionals stdenv.isDarwin [ Security SystemConfiguration ];
|
||||
|
||||
cargoHash = "sha256-ffnDTGg+m0NUhG2BYjsXb2fWHeQmtDcBGqQDLqwZMWI=";
|
||||
|
||||
|
@ -14,6 +14,10 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
hash= "sha256-T4feegblOeG+NU+c+PAobf8HT8KDSfcINkRAa1hNpkY=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
./readlink.patch
|
||||
];
|
||||
|
||||
configureFlags = [ "--enable-mcpplib" ];
|
||||
|
||||
meta = with lib; {
|
||||
|
24
pkgs/development/compilers/mcpp/readlink.patch
Normal file
24
pkgs/development/compilers/mcpp/readlink.patch
Normal file
@ -0,0 +1,24 @@
|
||||
From 1c4b0f26614bff331eb8a9f2b514309af6f31fd0 Mon Sep 17 00:00:00 2001
|
||||
From: Jose <pepone@users.noreply.github.com>
|
||||
Date: Mon, 26 Jun 2023 16:43:43 +0200
|
||||
Subject: [PATCH] Add 'unistd' header for readlink (#8)
|
||||
|
||||
---
|
||||
src/system.c | 5 +++++
|
||||
1 file changed, 5 insertions(+)
|
||||
|
||||
diff --git a/src/system.c b/src/system.c
|
||||
index a3501f9..646caf6 100644
|
||||
--- a/src/system.c
|
||||
+++ b/src/system.c
|
||||
@@ -37,6 +37,11 @@
|
||||
* 2. append the system-dependent routines in this file.
|
||||
*/
|
||||
+
|
||||
+#ifndef _MSC_VER
|
||||
+# include <unistd.h> // For readlink()
|
||||
+#endif
|
||||
+
|
||||
#if PREPROCESSED
|
||||
#include "mcpp.H"
|
||||
#else
|
@ -7,15 +7,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "boost-sml";
|
||||
# This is first commit since 1.1.6 that passes all tests (test_policies_logging is commented out)
|
||||
version = "1.1.6";
|
||||
working_tests = "24d762d1901f4f6afaa5c5e0d1b7b77537964694";
|
||||
version = "1.1.9";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "boost-ext";
|
||||
repo = "sml";
|
||||
rev = "${working_tests}";
|
||||
hash = "sha256-ZhIfyYdzrzPTAYevOz5I6tAcUiLRMV8HENKX9jychEY=";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-RYgSpnsmgZybpkJALIzxpkDRfe9QF2FHG+nA3msFaK0=";
|
||||
};
|
||||
|
||||
buildInputs = [ boost ];
|
||||
|
@ -1,6 +1,6 @@
|
||||
let version = "2.9.11"; in
|
||||
{ stdenv, lib, buildPackages, fetchurl, zlib, gettext
|
||||
, wordlists ? [ (fetchurl {
|
||||
, lists ? [ (fetchurl {
|
||||
url = "https://github.com/cracklib/cracklib/releases/download/v${version}/cracklib-words-${version}.gz";
|
||||
hash = "sha256-popxGjE1c517Z+nzYLM/DU7M+b1/rE0XwNXkVqkcUXo=";
|
||||
}) ]
|
||||
@ -23,7 +23,7 @@ stdenv.mkDerivation rec {
|
||||
patchShebangs util
|
||||
|
||||
'' + ''
|
||||
ln -vs ${toString wordlists} dicts/
|
||||
ln -vs ${toString lists} dicts/
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "intel-media-driver";
|
||||
version = "23.1.6";
|
||||
version = "23.3.5";
|
||||
|
||||
outputs = [ "out" "dev" ];
|
||||
|
||||
@ -24,14 +24,14 @@ stdenv.mkDerivation rec {
|
||||
owner = "intel";
|
||||
repo = "media-driver";
|
||||
rev = "intel-media-${version}";
|
||||
sha256 = "sha256-Z1xBU+4SdwknXpYUS8EwEURNIsg2+R/U0CcW3FW325M=";
|
||||
hash = "sha256-7OdLpqO2evNeyxceOtHEI7sJCVybqvrcM1ZZx8bI4xw=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# fix platform detection
|
||||
(fetchpatch {
|
||||
url = "https://salsa.debian.org/multimedia-team/intel-media-driver-non-free/-/raw/04ffb03f744780a55aba311c612d708b00584bb7/debian/patches/0002-Remove-settings-based-on-ARCH.patch";
|
||||
sha256 = "sha256-o/Pg0S53SYh3O7L+AwxOPl1Bx4TS6iKB8ql8GhhHI/o=";
|
||||
url = "https://salsa.debian.org/multimedia-team/intel-media-driver-non-free/-/raw/7376a99f060c26d6be8e56674da52a61662617b9/debian/patches/0002-Remove-settings-based-on-ARCH.patch";
|
||||
hash = "sha256-57yePuHWYb3XXrB4MjYO2h6jbqfs4SGTLlLG91el8M4=";
|
||||
})
|
||||
];
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
, libpng
|
||||
, boost
|
||||
, guile
|
||||
, python
|
||||
, qtbase
|
||||
, darwin
|
||||
}:
|
||||
@ -25,8 +26,8 @@ stdenv.mkDerivation {
|
||||
hash = "sha256-OITy3fJx+Z6856V3D/KpSQRJztvOdJdqUv1c65wNgCc=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ wrapQtAppsHook cmake ninja pkg-config ];
|
||||
buildInputs = [ eigen zlib libpng boost guile qtbase ]
|
||||
nativeBuildInputs = [ wrapQtAppsHook cmake ninja pkg-config python.pkgs.pythonImportsCheckHook ];
|
||||
buildInputs = [ eigen zlib libpng boost guile python qtbase ]
|
||||
++ lib.optionals stdenv.isDarwin [ darwin.apple_sdk_11_0.frameworks.Cocoa ];
|
||||
|
||||
preConfigure = ''
|
||||
@ -42,6 +43,14 @@ stdenv.mkDerivation {
|
||||
--replace "LIBFIVE_STDLIB_DIR=$<TARGET_FILE_DIR:libfive-stdlib>" \
|
||||
"LIBFIVE_STDLIB_DIR=$out/lib"
|
||||
|
||||
substituteInPlace libfive/bind/python/CMakeLists.txt \
|
||||
--replace ' ''${PYTHON_SITE_PACKAGES_DIR}' \
|
||||
" $out/${python.sitePackages}" \
|
||||
|
||||
substituteInPlace libfive/bind/python/libfive/ffi.py \
|
||||
--replace "os.path.join('libfive', folder)" \
|
||||
"os.path.join('$out/${python.sitePackages}/libfive', folder)" \
|
||||
|
||||
export XDG_CACHE_HOME=$(mktemp -d)/.cache
|
||||
'';
|
||||
|
||||
@ -63,12 +72,29 @@ stdenv.mkDerivation {
|
||||
'' + ''
|
||||
# Link "Studio" binary to "libfive-studio" to be more obvious:
|
||||
ln -s "$out/bin/Studio" "$out/bin/libfive-studio"
|
||||
|
||||
# Create links since libfive looks for the library in a specific path.
|
||||
mkdir -p "$out/${python.sitePackages}/libfive/src"
|
||||
ln -s "$out"/lib/libfive.* "$out/${python.sitePackages}/libfive/src/"
|
||||
mkdir -p "$out/${python.sitePackages}/libfive/stdlib"
|
||||
ln -s "$out"/lib/libfive-stdlib.* "$out/${python.sitePackages}/libfive/stdlib/"
|
||||
|
||||
# Create links so Studio can find the bindings.
|
||||
mkdir -p "$out/libfive/bind"
|
||||
ln -s "$out/${python.sitePackages}" "$out/libfive/bind/python"
|
||||
'';
|
||||
|
||||
pythonImportsCheck = [
|
||||
"libfive"
|
||||
"libfive.runner"
|
||||
"libfive.shape"
|
||||
"libfive.stdlib"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Infrastructure for solid modeling with F-Reps in C, C++, and Guile";
|
||||
homepage = "https://libfive.com/";
|
||||
maintainers = with maintainers; [ hodapp kovirobi ];
|
||||
maintainers = with maintainers; [ hodapp kovirobi wulfsta ];
|
||||
license = with licenses; [ mpl20 gpl2Plus ];
|
||||
platforms = with platforms; all;
|
||||
};
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ lib, stdenv, fetchFromGitHub, pkg-config }:
|
||||
{ lib, stdenv, fetchFromGitHub, fetchpatch }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "2.11.10";
|
||||
@ -11,6 +11,15 @@ stdenv.mkDerivation rec {
|
||||
hash = "sha256-Rbm45HRbRKQ6Cdup+gvKJ1xkK1HKG3irR5AIjhLer7g=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
url = "https://github.com/coin-or/CoinUtils/commit/1700ed92c2bc1562aabe65dee3b4885bd5c87fb9.patch";
|
||||
stripLen = 1;
|
||||
extraPrefix = "CoinUtils/";
|
||||
hash = "sha256-8S6XteZvoJlL+5MWiOrW7HXsdcnzpuEFTyzX9qg7OUY=";
|
||||
})
|
||||
];
|
||||
|
||||
doCheck = true;
|
||||
|
||||
meta = with lib; {
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "maestro";
|
||||
version = "1.34.0";
|
||||
version = "1.34.1";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip";
|
||||
sha256 = "1qbva38lcy1rm5k6r207hk3nqrr07h7x9sdppz4w5f37q0ll986r";
|
||||
sha256 = "0whnhcf7a3j01693254qqwfk9d3xa4icv4kyqkn4ihxyibznb91d";
|
||||
};
|
||||
|
||||
dontUnpack = true;
|
||||
|
@ -1,5 +1,6 @@
|
||||
{ lib
|
||||
, fetchFromGitHub
|
||||
, fetchpatch
|
||||
, buildPythonPackage
|
||||
, packaging
|
||||
, setuptools
|
||||
@ -23,6 +24,14 @@ buildPythonPackage rec {
|
||||
hash = "sha256-2yajhuRyQ7BqghbSgPClW3inpw4TW2DhgQbomcRFx94=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Removes `register` storage class specifier, which is not allowed in C++17.
|
||||
(fetchpatch {
|
||||
url = "https://github.com/pytroll/aggdraw/commit/157ed49803567e8c3eeb7dfeff4c116db35747f7.patch";
|
||||
hash = "sha256-QSzpO90u5oSBWUzehRFbXgZ1ApEfLlfp11MUx6w11aI=";
|
||||
})
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
packaging
|
||||
setuptools
|
||||
|
@ -22,7 +22,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "aioesphomeapi";
|
||||
version = "18.2.1";
|
||||
version = "18.2.4";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.9";
|
||||
@ -31,7 +31,7 @@ buildPythonPackage rec {
|
||||
owner = "esphome";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-PW3/V4PTm+UxTsfSSvOEX+FGcuF4m+mDOz6Z/AzB2qk=";
|
||||
hash = "sha256-m82UfhcmAFBDfSVmia6nhBB2qyQjSZJbXtzD/sGeqk4=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "bluetooth-data-tools";
|
||||
version = "1.13.0";
|
||||
version = "1.14.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.9";
|
||||
@ -20,7 +20,7 @@ buildPythonPackage rec {
|
||||
owner = "Bluetooth-Devices";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-qvr4CYOMgyTEFONpe6KA176H56+w6RHThAyUthIzszE=";
|
||||
hash = "sha256-eO17EuZ9K6tLAyEGmTaxw1Cxfz3XPPwNCcIwZ2/uHug=";
|
||||
};
|
||||
|
||||
# The project can build both an optimized cython version and an unoptimized
|
||||
|
@ -1,12 +1,12 @@
|
||||
{ lib
|
||||
, buildPythonPackage
|
||||
, fetchPypi
|
||||
, fetchFromGitHub
|
||||
, packaging
|
||||
, setuptools
|
||||
, setuptools-scm
|
||||
, shapely
|
||||
, sqlalchemy
|
||||
, alembic
|
||||
, psycopg2
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
}:
|
||||
@ -14,37 +14,35 @@
|
||||
buildPythonPackage rec {
|
||||
pname = "geoalchemy2";
|
||||
version = "0.14.2";
|
||||
format = "setuptools";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchPypi {
|
||||
pname = "GeoAlchemy2";
|
||||
inherit version;
|
||||
hash = "sha256-jKAj3LmjbG0xLztK7mMdZjhSZOL8n+sKsPRG61YJQH0=";
|
||||
src = fetchFromGitHub {
|
||||
owner = "geoalchemy";
|
||||
repo = "geoalchemy2";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-C/F1hpL2DnzC4UPAGGFntlQlULCx5Ufzkw7EIrzRV7I=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
setuptools
|
||||
setuptools-scm
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
packaging
|
||||
shapely
|
||||
sqlalchemy
|
||||
packaging
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
alembic
|
||||
psycopg2
|
||||
pytestCheckHook
|
||||
];
|
||||
] ++ passthru.optional-dependencies.shapely;
|
||||
|
||||
pytestFlagsArray = [
|
||||
# tests require live postgis database
|
||||
"--deselect=tests/test_pickle.py::TestPickle::test_pickle_unpickle"
|
||||
"--deselect=tests/gallery/test_specific_compilation.py::test_specific_compilation"
|
||||
];
|
||||
env = {
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION = version;
|
||||
};
|
||||
|
||||
disabledTestPaths = [
|
||||
# tests require live databases
|
||||
@ -52,23 +50,29 @@ buildPythonPackage rec {
|
||||
"tests/gallery/test_length_at_insert.py"
|
||||
"tests/gallery/test_insert_raster.py"
|
||||
"tests/gallery/test_orm_mapped_v2.py"
|
||||
"tests/gallery/test_specific_compilation.py"
|
||||
"tests/gallery/test_summarystatsagg.py"
|
||||
"tests/gallery/test_type_decorator.py"
|
||||
"tests/test_functional.py"
|
||||
"tests/test_functional_postgresql.py"
|
||||
"tests/test_functional_mysql.py"
|
||||
"tests/test_alembic_migrations.py"
|
||||
"tests/test_pickle.py"
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"geoalchemy2"
|
||||
];
|
||||
|
||||
passthru.optional-dependencies = {
|
||||
shapely = [ shapely ];
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
description = "Toolkit for working with spatial databases";
|
||||
homepage = "https://geoalchemy-2.readthedocs.io/";
|
||||
homepage = "https://geoalchemy-2.readthedocs.io/";
|
||||
changelog = "https://github.com/geoalchemy/geoalchemy2/releases/tag/${version}";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ ];
|
||||
maintainers = with maintainers; [ nickcao ];
|
||||
};
|
||||
}
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "home-assistant-bluetooth";
|
||||
version = "1.10.3";
|
||||
version = "1.10.4";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.9";
|
||||
@ -20,7 +20,7 @@ buildPythonPackage rec {
|
||||
owner = "home-assistant-libs";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-77RrqmoCftPc48fFtuuFo0KqGX3n+6aDx2RFkwGCNzQ=";
|
||||
hash = "sha256-7gkesxQI6QBxyQpHlSSh1w6MDeid0dSdXn+jnxvafD0=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
|
@ -15,14 +15,15 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "jupyter-cache";
|
||||
version = "0.6.1";
|
||||
format = "pyproject";
|
||||
version = "1.0.0";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
disabled = pythonOlder "3.9";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-Jvg5ARQ+30ry8/9akeLSrSmORuLO4DyAcdN6I6Y8y/w=";
|
||||
inherit version;
|
||||
pname = "jupyter_cache";
|
||||
hash = "sha256-0Pp9dTPNV5gZjYiJMYJpqME4LtOyL2IsCak1ZSH0hoc=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
@ -45,6 +46,7 @@ buildPythonPackage rec {
|
||||
meta = with lib; {
|
||||
description = "A defined interface for working with a cache of jupyter notebooks";
|
||||
homepage = "https://github.com/executablebooks/jupyter-cache";
|
||||
changelog = "https://github.com/executablebooks/jupyter-cache/blob/v${version}/CHANGELOG.md";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ marsam ];
|
||||
};
|
||||
|
@ -1,33 +0,0 @@
|
||||
From 75baa1751973378cb96fb204b0a18a74e5caa2d1 Mon Sep 17 00:00:00 2001
|
||||
From: Rouven Czerwinski <r.czerwinski@pengutronix.de>
|
||||
Date: Wed, 17 Feb 2021 14:03:20 +0100
|
||||
Subject: [PATCH] serialdriver: remove pyserial version check
|
||||
|
||||
This check isn't required on NixOS, since pyserial within NixOS already
|
||||
contains the patches.
|
||||
|
||||
Signed-off-by: Rouven Czerwinski <r.czerwinski@pengutronix.de>
|
||||
---
|
||||
labgrid/driver/serialdriver.py | 6 ------
|
||||
1 file changed, 6 deletions(-)
|
||||
|
||||
diff --git a/labgrid/driver/serialdriver.py b/labgrid/driver/serialdriver.py
|
||||
index 126f674e..59a92269 100644
|
||||
--- a/labgrid/driver/serialdriver.py
|
||||
+++ b/labgrid/driver/serialdriver.py
|
||||
@@ -27,12 +27,6 @@ class SerialDriver(ConsoleExpectMixin, Driver, ConsoleProtocol):
|
||||
bindings = {"port": "SerialPort", }
|
||||
else:
|
||||
bindings = {"port": {"SerialPort", "NetworkSerialPort"}, }
|
||||
- if version.parse(serial.__version__) != version.Version('3.4.0.1'):
|
||||
- message = ("The installed pyserial version does not contain important RFC2217 fixes.\n"
|
||||
- "You can install the labgrid fork via:\n"
|
||||
- "pip uninstall pyserial\n"
|
||||
- "pip install https://github.com/labgrid-project/pyserial/archive/v3.4.0.1.zip#egg=pyserial\n") # pylint: disable=line-too-long
|
||||
- warnings.warn(message)
|
||||
|
||||
txdelay = attr.ib(default=0.0, validator=attr.validators.instance_of(float))
|
||||
timeout = attr.ib(default=3.0, validator=attr.validators.instance_of(float))
|
||||
--
|
||||
2.30.0
|
||||
|
@ -17,7 +17,9 @@
|
||||
, pyusb
|
||||
, pyyaml
|
||||
, requests
|
||||
, setuptools
|
||||
, setuptools-scm
|
||||
, wheel
|
||||
, xmodem
|
||||
}:
|
||||
|
||||
@ -32,13 +34,13 @@ buildPythonPackage rec {
|
||||
sha256 = "sha256-yhlBqqCLOt6liw4iv8itG6E4QfIa7cW76QJqefUM5dw=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Pyserial within Nixpkgs already includes the necessary fix, remove the
|
||||
# pyserial version check from labgrid.
|
||||
./0001-serialdriver-remove-pyserial-version-check.patch
|
||||
nativeBuildInputs = [
|
||||
setuptools
|
||||
setuptools-scm
|
||||
wheel
|
||||
];
|
||||
|
||||
nativeBuildInputs = [ setuptools-scm ];
|
||||
pyproject = true;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
ansicolors
|
||||
|
@ -11,16 +11,16 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "maison";
|
||||
version = "1.4.0";
|
||||
format = "pyproject";
|
||||
version = "1.4.1";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "dbatten5";
|
||||
repo = pname;
|
||||
repo = "maison";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-Ny/n1vDWS6eA9zLIB0os5zrbwvutb+7sQ6iPXeid1M0=";
|
||||
hash = "sha256-uJW+7+cIt+jnbiC+HvT7KzyNk1enEtELTxtfc4eXAPU=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -3,6 +3,7 @@
|
||||
, buildPythonPackage
|
||||
, envisage
|
||||
, fetchPypi
|
||||
, fetchpatch
|
||||
, numpy
|
||||
, packaging
|
||||
, pyface
|
||||
@ -26,6 +27,24 @@ buildPythonPackage rec {
|
||||
hash = "sha256-n0J+8spska542S02ibpr7KJMhGDicG2KHJuEKJrT/Z4=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Adds compatibility with Python 3.11.
|
||||
# https://github.com/enthought/mayavi/pull/1199
|
||||
(fetchpatch {
|
||||
name = "python311-compat.patch";
|
||||
url = "https://github.com/enthought/mayavi/commit/50c0cbfcf97560be69c84b7c924635a558ebf92f.patch";
|
||||
hash = "sha256-zZOT6on/f5cEjnDBrNGog/wPQh7rBkaFqrxkBYDUQu0=";
|
||||
includes = [ "tvtk/src/*" ];
|
||||
})
|
||||
# Fixes an incompatible function pointer conversion error
|
||||
# https://github.com/enthought/mayavi/pull/1266
|
||||
(fetchpatch {
|
||||
name = "incompatible-pointer-conversion.patch";
|
||||
url = "https://github.com/enthought/mayavi/commit/887adc8fe2b076a368070f5b1d564745b03b1964.patch";
|
||||
hash = "sha256-88H1NNotd4pO0Zw1oLrYk5WNuuVrmTU01HJgsTRfKlo=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
# building the docs fails with the usual Qt xcb error, so skip:
|
||||
substituteInPlace setup.py \
|
||||
|
@ -2,25 +2,56 @@
|
||||
, buildPythonPackage
|
||||
, fetchPypi
|
||||
, html5lib
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
, setuptools
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "mechanize";
|
||||
version = "0.4.8";
|
||||
version = "0.4.9";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-XoasB3c1fgBusEzSj37Z+BHUjf+mA9OJGsbSuSKA3JE=";
|
||||
hash = "sha256-aaXtsJYvkh6LEINzaMIkLYrQSfC5H/aZzn9gG/xDFSE=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ html5lib ];
|
||||
nativeBuildInputs = [
|
||||
setuptools
|
||||
];
|
||||
|
||||
doCheck = false;
|
||||
propagatedBuildInputs = [
|
||||
html5lib
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"mechanize"
|
||||
];
|
||||
|
||||
disabledTestPaths = [
|
||||
# Tests require network access
|
||||
"test/test_urllib2_localnet.py"
|
||||
"test/test_functional.py"
|
||||
];
|
||||
|
||||
disabledTests = [
|
||||
# Tests require network access
|
||||
"test_pickling"
|
||||
"test_password_manager"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Stateful programmatic web browsing in Python";
|
||||
homepage = "https://github.com/python-mechanize/mechanize";
|
||||
license = "BSD-style";
|
||||
changelog = "https://github.com/python-mechanize/mechanize/blob/v${version}/ChangeLog";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ ];
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -1,22 +1,47 @@
|
||||
{ lib, buildPythonPackage, fetchPypi, cryptography, protobuf }:
|
||||
{ lib
|
||||
, buildPythonPackage
|
||||
, cryptography
|
||||
, fetchPypi
|
||||
, protobuf
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
, setuptools
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "omemo-dr";
|
||||
version = "1.0.0";
|
||||
version = "1.0.1";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.10";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-sP5QI+lHoXt0D7ftSqJGEg1vIdgZtYEulN/JVwUgvmE=";
|
||||
hash = "sha256-KoqMdyMdc5Sb3TdSeNTVomElK9ruUstiQayyUcIC02E=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
setuptools
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
cryptography
|
||||
protobuf
|
||||
];
|
||||
|
||||
meta = {
|
||||
nativeCheckInputs = [
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"omemo_dr"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "OMEMO Double Ratchet";
|
||||
license = lib.licenses.lgpl3;
|
||||
homepage = "https://dev.gajim.org/gajim/omemo-dr/";
|
||||
changelog = "https://dev.gajim.org/gajim/omemo-dr/-/blob/v${version}/CHANGELOG.md";
|
||||
license = licenses.gpl3Only;
|
||||
maintainers = with maintainers; [ ];
|
||||
};
|
||||
}
|
||||
|
@ -20,11 +20,11 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "omrdatasettools";
|
||||
version = "1.3.1";
|
||||
version = "1.4.0";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "0cdq02jp8vh78yjq9bncjjl0pb554idrcxkd62rzwk4l6ss2fkw5";
|
||||
sha256 = "sha256-kUUcbti29uDnSEvCubMAUnptlaZGpEsW2IBGSAGnGyQ=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "pyatmo";
|
||||
version = "7.5.0";
|
||||
version = "7.6.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
@ -25,7 +25,7 @@ buildPythonPackage rec {
|
||||
owner = "jabesq";
|
||||
repo = "pyatmo";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-GucatimZTg0Fggrz4bG1x6YSa3wE/uLGB4ufil/km3w=";
|
||||
hash = "sha256-rAmSxayXljOJchiMtSOgnotzQmapK2n86HwNi9HJX68=";
|
||||
};
|
||||
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION = version;
|
||||
|
@ -60,6 +60,10 @@ buildPythonPackage rec {
|
||||
${python.pythonOnBuildForHost.interpreter} buildconfig/config.py
|
||||
'';
|
||||
|
||||
env = lib.optionalAttrs stdenv.cc.isClang {
|
||||
NIX_CFLAGS_COMPILE = "-Wno-error=incompatible-function-pointer-types";
|
||||
};
|
||||
|
||||
checkPhase = ''
|
||||
runHook preCheck
|
||||
|
||||
|
@ -18,11 +18,11 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "python-jenkins";
|
||||
version = "1.8.1";
|
||||
version = "1.8.2";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-/18dklOdkD+GmwLq8rExREfm1tePdn7c/dkpZ9UyucY=";
|
||||
hash = "sha256-VufauwYHvbjh1vxtLUMBq+2+2RZdorIG+svTBxy27ss=";
|
||||
};
|
||||
|
||||
# test uses timeout mechanism unsafe for use with the "spawn"
|
||||
|
@ -1,11 +1,12 @@
|
||||
{ lib
|
||||
, stdenv
|
||||
, fetchpatch
|
||||
, buildPythonPackage
|
||||
, fetchPypi
|
||||
, fetchFromGitHub
|
||||
, pythonOlder
|
||||
, setuptools
|
||||
, tdlib
|
||||
, telegram-text
|
||||
, pytestCheckHook
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
@ -13,30 +14,33 @@ buildPythonPackage rec {
|
||||
version = "0.18.0";
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-UbJW/op01qe/HchfJUlBPBY9/W8NbZkEmFM8gZ5+EmI=";
|
||||
src = fetchFromGitHub {
|
||||
owner = "alexander-akhmetov";
|
||||
repo = "python-telegram";
|
||||
rev = version;
|
||||
hash = "sha256-2Q0nUZ2TMVWznd05+fqYojkRn4xfFZJrlqb1PMuBsAY=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Search for the system library first, and fallback to the embedded one if the system was not found
|
||||
(fetchpatch {
|
||||
url = "https://github.com/alexander-akhmetov/python-telegram/commit/b0af0985910ebb8940cff1b92961387aad683287.patch";
|
||||
hash = "sha256-ZqsntaiC2y9l034gXDMeD2BLO/RcsbBII8FomZ65/24=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
# Remove bundled libtdjson
|
||||
rm -fr telegram/lib
|
||||
|
||||
substituteInPlace telegram/tdjson.py \
|
||||
--replace "ctypes.util.find_library(\"libtdjson\")" \
|
||||
--replace "ctypes.util.find_library(\"tdjson\")" \
|
||||
"\"${tdlib}/lib/libtdjson${stdenv.hostPlatform.extensions.sharedLibrary}\""
|
||||
'';
|
||||
|
||||
propagatedBuildInputs = [
|
||||
setuptools
|
||||
telegram-text
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
disabledTests = [
|
||||
"TestGetTdjsonTdlibPath"
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
|
@ -27,12 +27,13 @@
|
||||
, setuptools
|
||||
, pytestCheckHook
|
||||
, pytest-cov
|
||||
, pytest-mock
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "scikit-rf";
|
||||
version = "0.29.0";
|
||||
format = "pyproject";
|
||||
version = "0.29.1";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
@ -40,7 +41,7 @@ buildPythonPackage rec {
|
||||
owner = "scikit-rf";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-rBOw1rIEF8Ia6xXlXxVzRRiUxrOjOAlipFuKiL+gRl0=";
|
||||
hash = "sha256-sLE6rcBGUKmk5y7oO06rHON3GVIjcvnKlr6Tgddj64Y=";
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
@ -88,6 +89,7 @@ buildPythonPackage rec {
|
||||
coverage
|
||||
flake8
|
||||
pytest-cov
|
||||
pytest-mock
|
||||
nbval
|
||||
matplotlib
|
||||
pyvisa
|
||||
@ -99,6 +101,12 @@ buildPythonPackage rec {
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
# test_calibration.py generates a divide by zero error on darwin
|
||||
# https://github.com/scikit-rf/scikit-rf/issues/972
|
||||
disabledTestPaths =
|
||||
lib.optional (stdenv.isAarch64 && stdenv.isDarwin)
|
||||
"skrf/calibration/tests/test_calibration.py";
|
||||
|
||||
pythonImportsCheck = [
|
||||
"skrf"
|
||||
];
|
||||
|
@ -5,7 +5,7 @@
|
||||
, cmake
|
||||
, qt5
|
||||
, libxcrypt
|
||||
, llvmPackages
|
||||
, llvmPackages_15
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
@ -21,12 +21,12 @@ stdenv.mkDerivation {
|
||||
cd sources/shiboken2
|
||||
'';
|
||||
|
||||
CLANG_INSTALL_DIR = llvmPackages.libclang.out;
|
||||
CLANG_INSTALL_DIR = llvmPackages_15.libclang.out;
|
||||
|
||||
nativeBuildInputs = [ cmake ];
|
||||
|
||||
buildInputs = [
|
||||
llvmPackages.libclang
|
||||
llvmPackages_15.libclang
|
||||
python
|
||||
python.pkgs.setuptools
|
||||
qt5.qtbase
|
||||
|
@ -7,13 +7,14 @@
|
||||
, pandas
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
, setuptools-scm
|
||||
, setuptools
|
||||
, setuptools-scm
|
||||
, jpype1
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "tabula-py";
|
||||
version = "2.8.1";
|
||||
version = "2.8.2";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
@ -22,28 +23,30 @@ buildPythonPackage rec {
|
||||
owner = "chezou";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-QqTfbSwGaNRXBiAzB1fsEawxCvlIunB1j2jSFD9imPI=";
|
||||
hash = "sha256-Zrq1i+HYXXNulyZ/fv00AgVd7ODj3rP9orLq5rT3ERU=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
./java-interpreter-path.patch
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
sed -i 's|@JAVA@|${jre}/bin/java|g' $(find -name '*.py')
|
||||
substituteInPlace tabula/backend.py \
|
||||
--replace '"java"' '"${lib.getExe jre}"'
|
||||
'';
|
||||
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION = version;
|
||||
|
||||
nativeBuildInputs = [
|
||||
setuptools
|
||||
setuptools-scm
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
jre
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
distro
|
||||
numpy
|
||||
pandas
|
||||
setuptools
|
||||
jpype1
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
@ -60,6 +63,11 @@ buildPythonPackage rec {
|
||||
"test_read_pdf_with_remote_template"
|
||||
"test_read_remote_pdf"
|
||||
"test_read_remote_pdf_with_custom_user_agent"
|
||||
# not sure what it checks
|
||||
# probably related to jpype, but we use subprocess instead
|
||||
# https://github.com/chezou/tabula-py/issues/352#issuecomment-1730791540
|
||||
# Failed: DID NOT RAISE <class 'RuntimeError'>
|
||||
"test_read_pdf_with_silent_true"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
|
@ -1,54 +0,0 @@
|
||||
diff -ru origsource/tabula/io.py source/tabula/io.py
|
||||
--- origsource/tabula/io.py 2022-11-23 17:19:35.419837514 +0100
|
||||
+++ source/tabula/io.py 2022-11-23 17:22:08.204194807 +0100
|
||||
@@ -79,7 +79,7 @@
|
||||
)
|
||||
)
|
||||
|
||||
- args = ["java"] + java_options + ["-jar", _jar_path()] + options.build_option_list()
|
||||
+ args = ["@JAVA@"] + java_options + ["-jar", _jar_path()] + options.build_option_list()
|
||||
if path:
|
||||
args.append(path)
|
||||
|
||||
diff -ru origsource/tabula/util.py source/tabula/util.py
|
||||
--- origsource/tabula/util.py 2022-11-23 17:19:35.422837521 +0100
|
||||
+++ source/tabula/util.py 2022-11-23 17:21:41.514132392 +0100
|
||||
@@ -26,7 +26,7 @@
|
||||
|
||||
try:
|
||||
res = subprocess.check_output(
|
||||
- ["java", "-version"], stderr=subprocess.STDOUT
|
||||
+ ["@JAVA@", "-version"], stderr=subprocess.STDOUT
|
||||
).decode()
|
||||
|
||||
except FileNotFoundError:
|
||||
diff -ru origsource/tests/test_read_pdf_table.py source/tests/test_read_pdf_table.py
|
||||
--- origsource/tests/test_read_pdf_table.py 2022-11-23 17:19:35.422837521 +0100
|
||||
+++ source/tests/test_read_pdf_table.py 2022-11-23 17:21:22.008086776 +0100
|
||||
@@ -281,7 +281,7 @@
|
||||
|
||||
tabula.read_pdf(self.pdf_path, encoding="utf-8")
|
||||
|
||||
- target_args = ["java"]
|
||||
+ target_args = ["@JAVA@"]
|
||||
if platform.system() == "Darwin":
|
||||
target_args += ["-Djava.awt.headless=true"]
|
||||
target_args += [
|
||||
@@ -355,7 +355,7 @@
|
||||
|
||||
tabula.read_pdf(self.pdf_path, encoding="utf-8", silent=False)
|
||||
|
||||
- target_args = ["java"]
|
||||
+ target_args = ["@JAVA@"]
|
||||
if platform.system() == "Darwin":
|
||||
target_args += ["-Djava.awt.headless=true"]
|
||||
target_args += [
|
||||
@@ -382,7 +382,7 @@
|
||||
|
||||
tabula.read_pdf(self.pdf_path, encoding="utf-8", silent=True)
|
||||
|
||||
- target_args = ["java"]
|
||||
+ target_args = ["@JAVA@"]
|
||||
if platform.system() == "Darwin":
|
||||
target_args += ["-Djava.awt.headless=true"]
|
||||
target_args += [
|
@ -3,8 +3,9 @@
|
||||
, aresponses
|
||||
, buildPythonPackage
|
||||
, fetchFromGitHub
|
||||
, mashumaro
|
||||
, orjson
|
||||
, poetry-core
|
||||
, pydantic
|
||||
, pytest-asyncio
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
@ -13,22 +14,22 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "tailscale";
|
||||
version = "0.3.0";
|
||||
version = "0.6.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
disabled = pythonOlder "3.11";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "frenck";
|
||||
repo = "python-tailscale";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-gGDsVGsCBZi/pxD0cyH3+xrvHVBC+wJCcl/NGqsTqiE=";
|
||||
hash = "sha256-wO6yMMU5fxk8GQ0e4ZCse2atlR4wrzulZOFXkVKAsmU=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
# Upstream doesn't set a version for the pyproject.toml
|
||||
substituteInPlace pyproject.toml \
|
||||
--replace "0.0.0" "${version}" \
|
||||
--replace 'version = "0.0.0"' 'version = "${version}"' \
|
||||
--replace "--cov" ""
|
||||
'';
|
||||
|
||||
@ -38,7 +39,8 @@ buildPythonPackage rec {
|
||||
|
||||
propagatedBuildInputs = [
|
||||
aiohttp
|
||||
pydantic
|
||||
mashumaro
|
||||
orjson
|
||||
yarl
|
||||
];
|
||||
|
||||
|
39
pkgs/development/python-modules/telegram-text/default.nix
Normal file
39
pkgs/development/python-modules/telegram-text/default.nix
Normal file
@ -0,0 +1,39 @@
|
||||
{ lib
|
||||
, stdenv
|
||||
, buildPythonPackage
|
||||
, fetchFromGitHub
|
||||
, pythonOlder
|
||||
, poetry-core
|
||||
, pytestCheckHook
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "telegram-text";
|
||||
version = "0.1.2";
|
||||
pyproject = true;
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "SKY-ALIN";
|
||||
repo = "telegram-text";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-p8SVQq7IvkVuOFE8VDugROLY5Wk0L2HmXyacTzFFSP4=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
poetry-core
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Python markup module for Telegram messenger";
|
||||
downloadPage = "https://github.com/SKY-ALIN/telegram-text";
|
||||
homepage = "https://telegram-text.alinsky.tech/";
|
||||
changelog = "https://github.com/SKY-ALIN/telegram-text/blob/v${version}/CHANGELOG.md";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ sikmir ];
|
||||
};
|
||||
}
|
@ -10,7 +10,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "ulid-transform";
|
||||
version = "0.8.1";
|
||||
version = "0.9.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.9";
|
||||
@ -19,7 +19,7 @@ buildPythonPackage rec {
|
||||
owner = "bdraco";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-isngr9CZ2YYuq+5s3p4HXrTU20vPqZGZ1r8mBoVkxiI=";
|
||||
hash = "sha256-r9uxPXpmQSsL1rX4d9TH87olFbZugdGdNG++Ygjie1I=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -63,6 +63,11 @@ buildPythonPackage rec {
|
||||
"wfuzz"
|
||||
];
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $out/share/wordlists/wfuzz
|
||||
cp -R -T "wordlist" "$out/share/wordlists/wfuzz"
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "Web content fuzzer to facilitate web applications assessments";
|
||||
longDescription = ''
|
||||
|
@ -1,4 +1,8 @@
|
||||
{ skawarePackages, pkgs }:
|
||||
{ lib
|
||||
, stdenv
|
||||
, skawarePackages
|
||||
, pkgs
|
||||
}:
|
||||
|
||||
with skawarePackages;
|
||||
|
||||
@ -21,6 +25,17 @@ buildPackage {
|
||||
# Empty the default path, which would be "/usr/bin:bin".
|
||||
# It would be set when PATH is empty. This hurts hermeticity.
|
||||
"--with-default-path="
|
||||
|
||||
] ++ lib.optionals (stdenv.buildPlatform.config != stdenv.hostPlatform.config) [
|
||||
# ./configure: sysdep posixspawnearlyreturn cannot be autodetected
|
||||
# when cross-compiling. Please manually provide a value with the
|
||||
# --with-sysdep-posixspawnearlyreturn=yes|no|... option.
|
||||
#
|
||||
# posixspawnearlyreturn: `yes` if the target has a broken
|
||||
# `posix_spawn()` implementation that can return before the
|
||||
# child has successfully exec'ed. That happens with old glibcs
|
||||
# and some virtual platforms.
|
||||
"--with-sysdep-posixspawnearlyreturn=no"
|
||||
];
|
||||
|
||||
postInstall = ''
|
||||
|
@ -22,6 +22,7 @@
|
||||
, file
|
||||
, substituteAll
|
||||
, writeTextFile
|
||||
, writeShellApplication
|
||||
}:
|
||||
|
||||
let
|
||||
@ -128,6 +129,16 @@ let
|
||||
|
||||
defaultShellPath = lib.makeBinPath defaultShellUtils;
|
||||
|
||||
bashWithDefaultShellUtils = writeShellApplication {
|
||||
name = "bash";
|
||||
text = ''
|
||||
if [[ "$PATH" == "/no-such-path" ]]; then
|
||||
export PATH=${defaultShellPath}
|
||||
fi
|
||||
exec ${bash}/bin/bash "$@"
|
||||
'';
|
||||
};
|
||||
|
||||
platforms = lib.platforms.linux ++ lib.platforms.darwin;
|
||||
|
||||
system = if stdenv.hostPlatform.isDarwin then "darwin" else "linux";
|
||||
@ -420,8 +431,8 @@ stdenv.mkDerivation rec {
|
||||
# If you add more replacements here, you must change the grep above!
|
||||
# Only files containing /bin are taken into account.
|
||||
substituteInPlace "$path" \
|
||||
--replace /bin/bash ${bash}/bin/bash \
|
||||
--replace "/usr/bin/env bash" ${bash}/bin/bash \
|
||||
--replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash \
|
||||
--replace "/usr/bin/env bash" ${bashWithDefaultShellUtils}/bin/bash \
|
||||
--replace "/usr/bin/env python" ${python3}/bin/python \
|
||||
--replace /usr/bin/env ${coreutils}/bin/env \
|
||||
--replace /bin/true ${coreutils}/bin/true
|
||||
@ -436,17 +447,17 @@ stdenv.mkDerivation rec {
|
||||
|
||||
# bazel test runner include references to /bin/bash
|
||||
substituteInPlace tools/build_rules/test_rules.bzl \
|
||||
--replace /bin/bash ${bash}/bin/bash
|
||||
--replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
|
||||
|
||||
for i in $(find tools/cpp/ -type f)
|
||||
do
|
||||
substituteInPlace $i \
|
||||
--replace /bin/bash ${bash}/bin/bash
|
||||
--replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
|
||||
done
|
||||
|
||||
# Fixup scripts that generate scripts. Not fixed up by patchShebangs below.
|
||||
substituteInPlace scripts/bootstrap/compile.sh \
|
||||
--replace /bin/bash ${bash}/bin/bash
|
||||
--replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
|
||||
|
||||
# add nix environment vars to .bazelrc
|
||||
cat >> .bazelrc <<EOF
|
||||
|
@ -16,14 +16,14 @@
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "cargo-update";
|
||||
version = "13.1.0";
|
||||
version = "13.2.0";
|
||||
|
||||
src = fetchCrate {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-2j35R7QTn7Z3yqzOU+VWAoZfYodecDt45Plx/D7+GyU=";
|
||||
sha256 = "sha256-yMHGn/RPtYuxS3rHzm87mW7nBUEaSOGsCT7Ckxvhabk=";
|
||||
};
|
||||
|
||||
cargoHash = "sha256-OEv9LOep4YNWY7oixY5zD9QgxqSYTrcf5oSXpxvnKIs=";
|
||||
cargoHash = "sha256-hO2W0NRV9fGHnnS1kOkQ+e0sFzVSBQk3MOm8qDYbA00=";
|
||||
|
||||
nativeBuildInputs = [
|
||||
cmake
|
||||
|
@ -2,18 +2,18 @@
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "viceroy";
|
||||
version = "0.9.2";
|
||||
version = "0.9.3";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "fastly";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-vMyNsLXMJk8MTiZYRiGQpOLZfeJbKlYcG1U8xTQIty0=";
|
||||
hash = "sha256-LOm4d6SV5rlb7NovhSp7V0JIaOfHIZOqeIcpIvTsZsA=";
|
||||
};
|
||||
|
||||
buildInputs = lib.optional stdenv.isDarwin Security;
|
||||
|
||||
cargoHash = "sha256-+v2P9ISSA7Xy5fTjfVNETAStPo19dLxv5K57MC/GU4E=";
|
||||
cargoHash = "sha256-Pz+jA4uC/40mj5Jn/lB+XcoN/QSD23iLwsEowTUI0pg=";
|
||||
|
||||
cargoTestFlags = [
|
||||
"--package viceroy-lib"
|
||||
|
@ -2,13 +2,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "yarn-berry";
|
||||
version = "3.4.1";
|
||||
version = "4.0.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "yarnpkg";
|
||||
repo = "berry";
|
||||
rev = "@yarnpkg/cli/${version}";
|
||||
hash = "sha256-eBBB/F+mnGi93Qf23xgt306/ogoV76RXOM90O14u5Tw=";
|
||||
hash = "sha256-9QNeXamNqRx+Bfg8nAhnImPuNFyqrHIs1eF9prSwIR4=";
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
@ -33,11 +33,11 @@ stdenv.mkDerivation rec {
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
meta = with lib; {
|
||||
homepage = "https://yarnpkg.com/";
|
||||
description = "Fast, reliable, and secure dependency management.";
|
||||
license = licenses.bsd2;
|
||||
maintainers = with maintainers; [ ryota-ka ];
|
||||
maintainers = with maintainers; [ ryota-ka thehedgeh0g ];
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}
|
||||
|
@ -3,22 +3,22 @@
|
||||
let
|
||||
pname = "anki-bin";
|
||||
# Update hashes for both Linux and Darwin!
|
||||
version = "23.10";
|
||||
version = "23.10.1";
|
||||
|
||||
sources = {
|
||||
linux = fetchurl {
|
||||
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-linux-qt6.tar.zst";
|
||||
sha256 = "sha256-dfL95UKu6kwD4WHLtXlIdkf5UItEtW2WCAKP7YGlCtc=";
|
||||
sha256 = "sha256-Kv0SH+bLnBSM/tYHe2kEJc4n7izZTBNWQs2nm/teLEU=";
|
||||
};
|
||||
|
||||
# For some reason anki distributes completely separate dmg-files for the aarch64 version and the x86_64 version
|
||||
darwin-x86_64 = fetchurl {
|
||||
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-intel-qt6.dmg";
|
||||
sha256 = "sha256-Y8BZ7EA6Dn4+5kMCFyuXi17XDLn9YRxqVGautt9WUOo=";
|
||||
sha256 = "sha256-MSlKsEv4N/H7G1bUOBlPBXerpHIW32P6Va02aRq1+54=";
|
||||
};
|
||||
darwin-aarch64 = fetchurl {
|
||||
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-apple-qt6.dmg";
|
||||
sha256 = "sha256-IrKWJ16gMCR2MH8dgYUCtMj6mDQP18+HQr17hfekPIs=";
|
||||
sha256 = "sha256-jEm9WJBXx77KpldzBuxK1Pu6VGiARZPnRmMhEjZdm1I=";
|
||||
};
|
||||
};
|
||||
|
||||
@ -45,7 +45,7 @@ let
|
||||
meta = with lib; {
|
||||
inherit (anki.meta) license homepage description longDescription;
|
||||
platforms = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||
maintainers = with maintainers; [ mahmoudk1000 atemu ];
|
||||
maintainers = with maintainers; [ mahmoudk1000 ];
|
||||
};
|
||||
|
||||
passthru = { inherit sources; };
|
||||
|
@ -5,11 +5,11 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "pdns-recursor";
|
||||
version = "4.9.1";
|
||||
version = "4.9.2";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://downloads.powerdns.com/releases/pdns-recursor-${version}.tar.bz2";
|
||||
sha256 = "sha256-Ch7cE+jyvWYfOeMWOH2UHiLeagO4p6L8Zi/fi5Quor4=";
|
||||
sha256 = "sha256-TLgYBFjs+1KKPZo0uihEts0u1pyhxGHd4koOvWaCkUQ=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkg-config ];
|
||||
|
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import importlib_metadata
|
||||
import sys
|
||||
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
|
||||
def check_requirement(req: str):
|
||||
# https://packaging.pypa.io/en/stable/requirements.html
|
||||
requirement = Requirement(req)
|
||||
try:
|
||||
version = importlib_metadata.distribution(requirement.name).version
|
||||
except importlib_metadata.PackageNotFoundError:
|
||||
print(f" - Dependency {requirement.name} is missing", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# https://packaging.pypa.io/en/stable/specifiers.html
|
||||
if not version in requirement.specifier:
|
||||
print(
|
||||
f" - {requirement.name}{requirement.specifier} expected, but got {version}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_manifest(manifest_file: str):
|
||||
with open(manifest_file) as fd:
|
||||
manifest = json.load(fd)
|
||||
if "requirements" in manifest:
|
||||
ok = True
|
||||
for requirement in manifest["requirements"]:
|
||||
ok &= check_requirement(requirement)
|
||||
if not ok:
|
||||
print("Manifest requirements are not met", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
raise RuntimeError(f"Usage {sys.argv[0]} <manifest>")
|
||||
manifest_file = sys.argv[1]
|
||||
check_manifest(manifest_file)
|
@ -0,0 +1,38 @@
|
||||
{ lib
|
||||
, home-assistant
|
||||
, makeSetupHook
|
||||
}:
|
||||
|
||||
{ pname
|
||||
, version
|
||||
, format ? "other"
|
||||
, ...
|
||||
}@args:
|
||||
|
||||
let
|
||||
manifestRequirementsCheckHook = import ./manifest-requirements-check-hook.nix {
|
||||
inherit makeSetupHook;
|
||||
inherit (home-assistant) python;
|
||||
};
|
||||
in
|
||||
home-assistant.python.pkgs.buildPythonPackage (
|
||||
{
|
||||
inherit format;
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir $out
|
||||
cp -r $src/custom_components/ $out/
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
nativeCheckInputs = with home-assistant.python.pkgs; [
|
||||
importlib-metadata
|
||||
manifestRequirementsCheckHook
|
||||
packaging
|
||||
] ++ (args.nativeCheckInputs or []);
|
||||
|
||||
} // builtins.removeAttrs args [ "nativeCheckInputs" ]
|
||||
)
|
@ -0,0 +1,11 @@
|
||||
{ python
|
||||
, makeSetupHook
|
||||
}:
|
||||
|
||||
makeSetupHook {
|
||||
name = "manifest-requirements-check-hook";
|
||||
substitutions = {
|
||||
pythonCheckInterpreter = python.interpreter;
|
||||
checkManifest = ./check_manifest.py;
|
||||
};
|
||||
} ./manifest-requirements-check-hook.sh
|
@ -0,0 +1,25 @@
|
||||
# Setup hook to check HA manifest requirements
|
||||
echo "Sourcing manifest-requirements-check-hook"
|
||||
|
||||
function manifestCheckPhase() {
|
||||
echo "Executing manifestCheckPhase"
|
||||
runHook preCheck
|
||||
|
||||
manifests=$(shopt -s nullglob; echo $out/custom_components/*/manifest.json)
|
||||
|
||||
if [ ! -z "$manifests" ]; then
|
||||
echo Checking manifests $manifests
|
||||
@pythonCheckInterpreter@ @checkManifest@ $manifests
|
||||
else
|
||||
echo "No custom component manifests found in $out" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
runHook postCheck
|
||||
echo "Finished executing manifestCheckPhase"
|
||||
}
|
||||
|
||||
if [ -z "${dontCheckManifest-}" ] && [ -z "${installCheckPhase-}" ]; then
|
||||
echo "Using manifestCheckPhase"
|
||||
preDistPhases+=" manifestCheckPhase"
|
||||
fi
|
@ -2,12 +2,13 @@
|
||||
# Do not edit!
|
||||
|
||||
{
|
||||
version = "2023.11.1";
|
||||
version = "2023.11.2";
|
||||
components = {
|
||||
"3_day_blinds" = ps: with ps; [
|
||||
];
|
||||
"abode" = ps: with ps; [
|
||||
jaraco-abode
|
||||
jaraco-functools
|
||||
];
|
||||
"accuweather" = ps: with ps; [
|
||||
accuweather
|
||||
|
57
pkgs/servers/home-assistant/custom-components/README.md
Normal file
57
pkgs/servers/home-assistant/custom-components/README.md
Normal file
@ -0,0 +1,57 @@
|
||||
# Packaging guidelines
|
||||
|
||||
## buildHomeAssistantComponent
|
||||
|
||||
Custom components should be packaged using the
|
||||
`buildHomeAssistantComponent` function, that is provided at top-level.
|
||||
It builds upon `buildPythonPackage` but uses a custom install and check
|
||||
phase.
|
||||
|
||||
Python runtime dependencies can be directly consumed as unqualified
|
||||
function arguments. Pass them into `propagatedBuildInputs`, for them to
|
||||
be available to Home Assistant.
|
||||
|
||||
Out-of-tree components need to use python packages from
|
||||
`home-assistant.python.pkgs` as to not introduce conflicting package
|
||||
versions into the Python environment.
|
||||
|
||||
|
||||
**Example Boilerplate:**
|
||||
|
||||
```nix
|
||||
{ lib
|
||||
, buildHomeAssistantcomponent
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildHomeAssistantComponent {
|
||||
# pname, version
|
||||
|
||||
src = fetchFromGithub {
|
||||
# owner, repo, rev, hash
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
# python requirements, as specified in manifest.json
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
# changelog, description, homepage, license, maintainers
|
||||
}
|
||||
}
|
||||
|
||||
## Package name normalization
|
||||
|
||||
Apply the same normalization rules as defined for python packages in
|
||||
[PEP503](https://peps.python.org/pep-0503/#normalized-names).
|
||||
The name should be lowercased and dots, underlines or multiple
|
||||
dashes should all be replaced by a single dash.
|
||||
|
||||
## Manifest check
|
||||
|
||||
The `buildHomeAssistantComponent` builder uses a hook to check whether
|
||||
the dependencies specified in the `manifest.json` are present and
|
||||
inside the specified version range.
|
||||
|
||||
There shouldn't be a need to disable this hook, but you can set
|
||||
`dontCheckManifest` to `true` in the derivation to achieve that.
|
@ -0,0 +1,6 @@
|
||||
{ callPackage
|
||||
}:
|
||||
|
||||
{
|
||||
prometheus-sensor = callPackage ./prometheus-sensor {};
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
{ lib
|
||||
, fetchFromGitHub
|
||||
, buildHomeAssistantComponent
|
||||
}:
|
||||
|
||||
buildHomeAssistantComponent rec {
|
||||
pname = "prometheus-sensor";
|
||||
version = "1.0.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "mweinelt";
|
||||
repo = "ha-prometheus-sensor";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-10COLFXvmpm8ONLyx5c0yiQdtuP0SC2NKq/ZYHro9II=";
|
||||
};
|
||||
|
||||
dontBuild = true;
|
||||
|
||||
meta = with lib; {
|
||||
changelog = "https://github.com/mweinelt/ha-prometheus-sensor/blob/${version}/CHANGELOG.md";
|
||||
description = "Import prometheus query results into Home Assistant";
|
||||
homepage = "https://github.com/mweinelt/ha-prometheus-sensor";
|
||||
maintainers = with maintainers; [ hexa ];
|
||||
license = licenses.mit;
|
||||
};
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
# Packaging guidelines
|
||||
|
||||
## Entrypoint
|
||||
|
||||
Every lovelace module has an entrypoint in the form of a `.js` file. By
|
||||
default the nixos module will try to load `${pname}.js` when a module is
|
||||
configured.
|
||||
|
||||
The entrypoint used can be overridden in `passthru` like this:
|
||||
|
||||
```nix
|
||||
passthru.entrypoint = "demo-card-bundle.js";
|
||||
```
|
@ -0,0 +1,8 @@
|
||||
{ callPackage
|
||||
}:
|
||||
|
||||
{
|
||||
mini-graph-card = callPackage ./mini-graph-card {};
|
||||
|
||||
mini-media-player = callPackage ./mini-media-player {};
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
{ lib
|
||||
, buildNpmPackage
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildNpmPackage rec {
|
||||
pname = "mini-graph-card";
|
||||
version = "0.11.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "kalkih";
|
||||
repo = "mini-graph-card";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-AC4VawRtWTeHbFqDJ6oQchvUu08b4F3ManiPPXpyGPc=";
|
||||
};
|
||||
|
||||
npmDepsHash = "sha256-0ErOTkcCnMqMTsTkVL320SxZaET/izFj9GiNWC2tQtQ=";
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir $out
|
||||
cp -v dist/mini-graph-card-bundle.js $out/
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
passthru.entrypoint = "mini-graph-card-bundle.js";
|
||||
|
||||
meta = with lib; {
|
||||
changelog = "https://github.com/kalkih/mini-graph-card/releases/tag/v${version}";
|
||||
description = "Minimalistic graph card for Home Assistant Lovelace UI";
|
||||
homepage = "https://github.com/kalkih/mini-graph-card";
|
||||
maintainers = with maintainers; [ hexa ];
|
||||
license = licenses.mit;
|
||||
};
|
||||
}
|
||||
|
@ -0,0 +1,37 @@
|
||||
{ lib
|
||||
, buildNpmPackage
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildNpmPackage rec {
|
||||
pname = "mini-media-player";
|
||||
version = "1.16.5";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "kalkih";
|
||||
repo = "mini-media-player";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-ydkY7Qx2GMh4CpvvBAQubJ7PlxSscDZRJayn82bOczM=";
|
||||
};
|
||||
|
||||
npmDepsHash = "sha256-v9NvZOrQPMOoG3LKACnu79jKgZtcnGiopWad+dFbplw=";
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir $out
|
||||
cp -v ./dist/mini-media-player-bundle.js $out/
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
passthru.entrypoint = "mini-media-player-bundle.js";
|
||||
|
||||
meta = with lib; {
|
||||
changelog = "https://github.com/kalkih/mini-media-player/releases/tag/v${version}";
|
||||
description = "Minimalistic media card for Home Assistant Lovelace UI";
|
||||
homepage = "https://github.com/kalkih/mini-media-player";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ hexa ];
|
||||
};
|
||||
}
|
@ -3,7 +3,6 @@
|
||||
, callPackage
|
||||
, fetchFromGitHub
|
||||
, fetchPypi
|
||||
, fetchpatch
|
||||
, python311
|
||||
, substituteAll
|
||||
, ffmpeg-headless
|
||||
@ -193,6 +192,15 @@ let
|
||||
};
|
||||
});
|
||||
|
||||
psutil = super.psutil.overridePythonAttrs (oldAttrs: rec {
|
||||
version = "5.9.6";
|
||||
src = fetchPypi {
|
||||
pname = "psutil";
|
||||
inherit version;
|
||||
hash = "sha256-5Lkt3NfdTN0/kAGA6h4QSTLHvOI0+4iXbio7KWRBIlo=";
|
||||
};
|
||||
});
|
||||
|
||||
py-synologydsm-api = super.py-synologydsm-api.overridePythonAttrs (oldAttrs: rec {
|
||||
version = "2.1.4";
|
||||
src = fetchFromGitHub {
|
||||
@ -310,17 +318,6 @@ let
|
||||
doCheck = false;
|
||||
});
|
||||
|
||||
# Pinned due to API changes in 0.3.0
|
||||
tailscale = super.tailscale.overridePythonAttrs (oldAttrs: rec {
|
||||
version = "0.2.0";
|
||||
src = fetchFromGitHub {
|
||||
owner = "frenck";
|
||||
repo = "python-tailscale";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-/tS9ZMUWsj42n3MYPZJYJELzX3h02AIHeRZmD2SuwWE=";
|
||||
};
|
||||
});
|
||||
|
||||
# Pinned due to API changes ~1.0
|
||||
vultr = super.vultr.overridePythonAttrs (oldAttrs: rec {
|
||||
version = "0.1.2";
|
||||
@ -356,7 +353,7 @@ let
|
||||
extraBuildInputs = extraPackages python.pkgs;
|
||||
|
||||
# Don't forget to run parse-requirements.py after updating
|
||||
hassVersion = "2023.11.1";
|
||||
hassVersion = "2023.11.2";
|
||||
|
||||
in python.pkgs.buildPythonApplication rec {
|
||||
pname = "homeassistant";
|
||||
@ -372,7 +369,7 @@ in python.pkgs.buildPythonApplication rec {
|
||||
# Primary source is the pypi sdist, because it contains translations
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-4OIvY6blun++7JDY+B0Cjrr4yNgnjTd8G55SWkhS3Cs=";
|
||||
hash = "sha256-cnneRq0hIyvgKo0du/52ze0IVs8TgTPNQM3T1kyy03s=";
|
||||
};
|
||||
|
||||
# Secondary source is git for tests
|
||||
@ -380,7 +377,7 @@ in python.pkgs.buildPythonApplication rec {
|
||||
owner = "home-assistant";
|
||||
repo = "core";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-Z/CV1sGdJsdc4OxUZulC0boHaMP7WpajbY8Y6R9Q//I=";
|
||||
hash = "sha256-OljfYmlXSJVoWWsd4jcSF4nI/FXHqRA8e4LN5AaPVv8=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = with python.pkgs; [
|
||||
@ -396,17 +393,14 @@ in python.pkgs.buildPythonApplication rec {
|
||||
|
||||
# leave this in, so users don't have to constantly update their downstream patch handling
|
||||
patches = [
|
||||
# Follow symlinks in /var/lib/hass/www
|
||||
./patches/static-symlinks.patch
|
||||
|
||||
# Patch path to ffmpeg binary
|
||||
(substituteAll {
|
||||
src = ./patches/ffmpeg-path.patch;
|
||||
ffmpeg = "${lib.getBin ffmpeg-headless}/bin/ffmpeg";
|
||||
})
|
||||
(fetchpatch {
|
||||
# freeze time in litterrobot tests
|
||||
# https://github.com/home-assistant/core/pull/103444
|
||||
name = "home-assistant-litterrobot-freeze-test-time.patch";
|
||||
url = "https://github.com/home-assistant/core/commit/806205952ff863e2cf1875be406ea0254be5f13a.patch";
|
||||
hash = "sha256-OVbmJWy275nYWrif9awAGIYlgZqrRPcYBhB0Vil8rmk=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = let
|
||||
@ -526,6 +520,8 @@ in python.pkgs.buildPythonApplication rec {
|
||||
"--deselect=tests/helpers/test_entity_registry.py::test_get_or_create_updates_data"
|
||||
# AssertionError: assert 2 == 1
|
||||
"--deselect=tests/helpers/test_entity_values.py::test_override_single_value"
|
||||
# AssertionError: assert 'WARNING' not in '2023-11-10 ...nt abc[L]>\n'"
|
||||
"--deselect=tests/helpers/test_script.py::test_multiple_runs_repeat_choose"
|
||||
# tests are located in tests/
|
||||
"tests"
|
||||
];
|
||||
|
@ -4,7 +4,7 @@ buildPythonPackage rec {
|
||||
# the frontend version corresponding to a specific home-assistant version can be found here
|
||||
# https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/frontend/manifest.json
|
||||
pname = "home-assistant-frontend";
|
||||
version = "20231030.1";
|
||||
version = "20231030.2";
|
||||
format = "wheel";
|
||||
|
||||
src = fetchPypi {
|
||||
@ -12,7 +12,7 @@ buildPythonPackage rec {
|
||||
pname = "home_assistant_frontend";
|
||||
dist = "py3";
|
||||
python = "py3";
|
||||
hash = "sha256-S363j7HnOxLqCBaml1Kb9xfY0AaqBIgj09NutByn6Xo=";
|
||||
hash = "sha256-qzodzqWpAXZjwBJkiCyBi5zzfpEqqtauJn2PKZ5UtJ0=";
|
||||
};
|
||||
|
||||
# there is nothing to strip in this package
|
||||
|
@ -56,6 +56,15 @@ EXTRA_COMPONENT_DEPS = {
|
||||
],
|
||||
}
|
||||
|
||||
# Sometimes we have unstable versions for libraries that are not
|
||||
# well-maintained. This allows us to mark our weird version as newer
|
||||
# than a certain wanted version
|
||||
OUR_VERSION_IS_NEWER_THAN = {
|
||||
"blinkstick": "1.2.0",
|
||||
"gps3": "0.33.3",
|
||||
"pybluez": "0.22",
|
||||
}
|
||||
|
||||
|
||||
|
||||
def run_sync(cmd: List[str]) -> None:
|
||||
@ -226,7 +235,12 @@ def main() -> None:
|
||||
Version.parse(our_version)
|
||||
except InvalidVersion:
|
||||
print(f"Attribute {attr_name} has invalid version specifier {our_version}", file=sys.stderr)
|
||||
attr_outdated = True
|
||||
|
||||
# allow specifying that our unstable version is newer than some version
|
||||
if newer_than_version := OUR_VERSION_IS_NEWER_THAN.get(attr_name):
|
||||
attr_outdated = Version.parse(newer_than_version) < Version.parse(required_version)
|
||||
else:
|
||||
attr_outdated = True
|
||||
else:
|
||||
attr_outdated = Version.parse(our_version) < Version.parse(required_version)
|
||||
finally:
|
||||
|
37
pkgs/servers/home-assistant/patches/static-symlinks.patch
Normal file
37
pkgs/servers/home-assistant/patches/static-symlinks.patch
Normal file
@ -0,0 +1,37 @@
|
||||
diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py
|
||||
index 2ec991750f..9a937006ce 100644
|
||||
--- a/homeassistant/components/frontend/__init__.py
|
||||
+++ b/homeassistant/components/frontend/__init__.py
|
||||
@@ -383,7 +383,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
local = hass.config.path("www")
|
||||
if os.path.isdir(local):
|
||||
- hass.http.register_static_path("/local", local, not is_dev)
|
||||
+ hass.http.register_static_path("/local", local, not is_dev, follow_symlinks=True)
|
||||
|
||||
# Can be removed in 2023
|
||||
hass.http.register_redirect("/config/server_control", "/developer-tools/yaml")
|
||||
diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py
|
||||
index 122b7b79ce..3cf2b7e0db 100644
|
||||
--- a/homeassistant/components/http/__init__.py
|
||||
+++ b/homeassistant/components/http/__init__.py
|
||||
@@ -411,16 +411,16 @@ class HomeAssistantHTTP:
|
||||
)
|
||||
|
||||
def register_static_path(
|
||||
- self, url_path: str, path: str, cache_headers: bool = True
|
||||
+ self, url_path: str, path: str, cache_headers: bool = True, follow_symlinks: bool = False
|
||||
) -> None:
|
||||
"""Register a folder or file to serve as a static path."""
|
||||
if os.path.isdir(path):
|
||||
if cache_headers:
|
||||
resource: CachingStaticResource | web.StaticResource = (
|
||||
- CachingStaticResource(url_path, path)
|
||||
+ CachingStaticResource(url_path, path, follow_symlinks=follow_symlinks)
|
||||
)
|
||||
else:
|
||||
- resource = web.StaticResource(url_path, path)
|
||||
+ resource = web.StaticResource(url_path, path, follow_symlinks=follow_symlinks)
|
||||
self.app.router.register_resource(resource)
|
||||
self.app["allow_configured_cors"](resource)
|
||||
return
|
@ -8,7 +8,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "homeassistant-stubs";
|
||||
version = "2023.11.1";
|
||||
version = "2023.11.2";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = python.version != home-assistant.python.version;
|
||||
@ -17,7 +17,7 @@ buildPythonPackage rec {
|
||||
owner = "KapJI";
|
||||
repo = "homeassistant-stubs";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-eLmWOMKLzhZ7M/gdUHhlDZ3T+N4h5aHxMwOI8ZUepps=";
|
||||
hash = "sha256-stVfFXb5QfC+wZUSk53+jt/hb8kO1gCcgeOnHHpNlWE=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -1,8 +1,10 @@
|
||||
{ lib, stdenv
|
||||
{ lib
|
||||
, stdenv
|
||||
, bzip2
|
||||
, cmake
|
||||
, doxygen
|
||||
, fetchurl
|
||||
, fetchpatch
|
||||
, fuse
|
||||
, libevent
|
||||
, xz
|
||||
@ -23,6 +25,15 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "0pwsj9rf6a6q7cnfbpcrfq2gjcy7sylqzqqr49g2zi39lrrh8533";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# this patch fixes the build for glibc >= 2.38
|
||||
(fetchpatch {
|
||||
name = "strlcpy-glibc238.patch";
|
||||
url = "https://bugs.debian.org/cgi-bin/bugreport.cgi?att=0;bug=1052360;msg=10";
|
||||
hash = "sha256-uhQj+ZcHCV36Tm0pF/+JG59bSaRdTZCrMcKL3YhZTk8=";
|
||||
})
|
||||
];
|
||||
|
||||
nativeBuildInputs = [ cmake doxygen pkg-config ];
|
||||
buildInputs = [ bzip2 fuse libevent xz openssl systemd tcp_wrappers zlib c-ares ];
|
||||
|
||||
|
@ -1,13 +1,12 @@
|
||||
{ stdenv
|
||||
, lib
|
||||
, fetchFromGitHub
|
||||
, fetchpatch
|
||||
, gitUpdater
|
||||
, testers
|
||||
, cmake
|
||||
, pkg-config
|
||||
, python3
|
||||
, doxygen
|
||||
, libxslt
|
||||
, boost
|
||||
, egl-wayland
|
||||
, freetype
|
||||
@ -40,15 +39,25 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "mir";
|
||||
version = "2.14.1";
|
||||
version = "2.15.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "MirServer";
|
||||
repo = "mir";
|
||||
rev = "v${finalAttrs.version}";
|
||||
hash = "sha256-IEGeZVNxwzHn5GASCyjNuQsnCzzfQBHdC33MWVMeZws=";
|
||||
hash = "sha256-c1+gxzLEtNCjR/mx76O5QElQ8+AO4WsfcG7Wy1+nC6E=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Fix gbm-kms tests
|
||||
# Remove when version > 2.15.0
|
||||
(fetchpatch {
|
||||
name = "0001-mir-Fix-the-signature-of-drmModeCrtcSetGamma.patch";
|
||||
url = "https://github.com/MirServer/mir/commit/98250e9c32c5b9b940da2fb0a32d8139bbc68157.patch";
|
||||
hash = "sha256-tTtOHGNue5rsppOIQSfkOH5sVfFSn/KPGHmubNlRtLI=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
# Fix scripts that get run in tests
|
||||
patchShebangs tools/detect_fd_leaks.bash tests/acceptance-tests/wayland-generator/test_wayland_generator.sh.in
|
||||
@ -73,21 +82,13 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
substituteInPlace src/platform/graphics/CMakeLists.txt \
|
||||
--replace "/usr/include/drm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h" \
|
||||
--replace "/usr/include/libdrm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h"
|
||||
|
||||
# Fix date in generated docs not honouring SOURCE_DATE_EPOCH
|
||||
# Install docs to correct dir
|
||||
substituteInPlace cmake/Doxygen.cmake \
|
||||
--replace '"date"' '"date" "--date=@'"$SOURCE_DATE_EPOCH"'"' \
|
||||
--replace "\''${CMAKE_INSTALL_PREFIX}/share/doc/mir-doc" "\''${CMAKE_INSTALL_DOCDIR}"
|
||||
'';
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
nativeBuildInputs = [
|
||||
cmake
|
||||
doxygen
|
||||
glib # gdbus-codegen
|
||||
libxslt
|
||||
lttng-ust # lttng-gen-tp
|
||||
pkg-config
|
||||
(python3.withPackages (ps: with ps; [
|
||||
@ -137,9 +138,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
wlcs
|
||||
];
|
||||
|
||||
buildFlags = [ "all" "doc" ];
|
||||
|
||||
cmakeFlags = [
|
||||
"-DBUILD_DOXYGEN=OFF"
|
||||
"-DMIR_PLATFORM='gbm-kms;x11;eglstream-kms;wayland'"
|
||||
"-DMIR_ENABLE_TESTS=${if finalAttrs.doCheck then "ON" else "OFF"}"
|
||||
# BadBufferTest.test_truncated_shm_file *doesn't* throw an error as the test expected, mark as such
|
||||
@ -160,7 +160,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
export XDG_RUNTIME_DIR=/tmp
|
||||
'';
|
||||
|
||||
outputs = [ "out" "dev" "doc" ];
|
||||
outputs = [ "out" "dev" ];
|
||||
|
||||
passthru = {
|
||||
tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
|
||||
|
@ -1,19 +1,47 @@
|
||||
{ lib, stdenv, fetchurl, perl, php, gd, libpng, zlib, unzip, nixosTests }:
|
||||
{ lib
|
||||
, stdenv
|
||||
, fetchFromGitHub
|
||||
, perl
|
||||
, php
|
||||
, gd
|
||||
, libpng
|
||||
, openssl
|
||||
, zlib
|
||||
, unzip
|
||||
, nixosTests
|
||||
, nix-update-script
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "nagios";
|
||||
version = "4.4.6";
|
||||
version = "4.4.14";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/nagios/nagios-4.x/${pname}-${version}/${pname}-${version}.tar.gz";
|
||||
sha256 = "1x5hb97zbvkm73q53ydp1gwj8nnznm72q9c4rm6ny7phr995l3db";
|
||||
src = fetchFromGitHub {
|
||||
owner = "NagiosEnterprises";
|
||||
repo = "nagioscore";
|
||||
rev = "refs/tags/nagios-${version}";
|
||||
hash = "sha256-EJKMgU3Nzfefq2VXxBrfDDrQZWZvj7HqKnWR9j75fGI=";
|
||||
};
|
||||
|
||||
patches = [ ./nagios.patch ];
|
||||
nativeBuildInputs = [ unzip ];
|
||||
buildInputs = [ php perl gd libpng zlib ];
|
||||
|
||||
configureFlags = [ "--localstatedir=/var/lib/nagios" ];
|
||||
buildInputs = [
|
||||
php
|
||||
perl
|
||||
gd
|
||||
libpng
|
||||
openssl
|
||||
zlib
|
||||
];
|
||||
|
||||
configureFlags = [
|
||||
"--localstatedir=/var/lib/nagios"
|
||||
"--with-ssl=${openssl.dev}"
|
||||
"--with-ssl-inc=${openssl.dev}/include"
|
||||
"--with-ssl-lib=${lib.getLib openssl}/lib"
|
||||
];
|
||||
|
||||
buildFlags = [ "all" ];
|
||||
|
||||
# Do not create /var directories
|
||||
@ -28,15 +56,22 @@ stdenv.mkDerivation rec {
|
||||
sed -i 's@/bin/@@g' $out/etc/objects/commands.cfg
|
||||
'';
|
||||
|
||||
passthru.tests = {
|
||||
inherit (nixosTests) nagios;
|
||||
passthru = {
|
||||
tests = {
|
||||
inherit (nixosTests) nagios;
|
||||
};
|
||||
updateScript = nix-update-script {
|
||||
extraArgs = [ "--version-regex" "nagios-(.*)" ];
|
||||
};
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "A host, service and network monitoring program";
|
||||
homepage = "https://www.nagios.org/";
|
||||
license = lib.licenses.gpl2;
|
||||
platforms = lib.platforms.linux;
|
||||
maintainers = with lib.maintainers; [ immae thoughtpolice relrod ];
|
||||
homepage = "https://www.nagios.org/";
|
||||
changelog = "https://github.com/NagiosEnterprises/nagioscore/blob/nagios-${version}/Changelog";
|
||||
license = lib.licenses.gpl2;
|
||||
platforms = lib.platforms.linux;
|
||||
mainProgram = "nagios";
|
||||
maintainers = with lib.maintainers; [ immae thoughtpolice relrod anthonyroussel ];
|
||||
};
|
||||
}
|
||||
|
@ -1,56 +0,0 @@
|
||||
{ lib, stdenv, fetchurl, fetchpatch, perl, openssl, db, zlib, uwimap, html-tidy, pam}:
|
||||
|
||||
let
|
||||
ssl = lib.optionals uwimap.withSSL
|
||||
"-e 's/CCLIENT_SSL_ENABLE.*= false/CCLIENT_SSL_ENABLE=true/'";
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "prayer";
|
||||
version = "1.3.5";
|
||||
|
||||
src = fetchurl {
|
||||
url = "ftp://ftp.csx.cam.ac.uk/pub/software/email/prayer/${pname}-${version}.tar.gz";
|
||||
sha256 = "135fjbxjn385b6cjys6qhbwfw61mdcl2akkll4jfpdzfvhbxlyda";
|
||||
};
|
||||
|
||||
patches = [
|
||||
./install.patch
|
||||
|
||||
# fix build errors which result from openssl changes
|
||||
(fetchpatch {
|
||||
url = "https://sources.debian.org/data/main/p/prayer/1.3.5-dfsg1-6/debian/patches/disable_ssl3.patch";
|
||||
sha256 = "1rx4bidc9prh4gffipykp144cyi3zd6qzd990s2aad3knzv5bkdd";
|
||||
})
|
||||
(fetchpatch {
|
||||
url = "https://sources.debian.org/data/main/p/prayer/1.3.5-dfsg1-6/debian/patches/openssl1.1.patch";
|
||||
sha256 = "0zinylvq3bcifdmki867gir49pbjx6qb5h019hawwif2l4jmlxw1";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
sed -i -e s/gmake/make/ -e 's/LDAP_ENABLE.*= true/LDAP_ENABLE=false/' \
|
||||
${ssl} \
|
||||
-e 's/CCLIENT_LIBS=.*/CCLIENT_LIBS=-lc-client/' \
|
||||
-e 's,^PREFIX .*,PREFIX='$out, \
|
||||
-e 's,^CCLIENT_DIR=.*,CCLIENT_DIR=${uwimap}/include/c-client,' \
|
||||
Config
|
||||
sed -i -e s,/usr/bin/perl,${perl}/bin/perl, \
|
||||
templates/src/*.pl
|
||||
sed -i -e '/<stropts.h>/d' lib/os_linux.h
|
||||
'' + /* html-tidy updates */ ''
|
||||
substituteInPlace ./session/html_secure_tidy.c \
|
||||
--replace buffio.h tidybuffio.h
|
||||
'';
|
||||
|
||||
buildInputs = [ openssl db zlib uwimap html-tidy pam ];
|
||||
nativeBuildInputs = [ perl ];
|
||||
|
||||
NIX_LDFLAGS = "-lpam";
|
||||
|
||||
meta = {
|
||||
homepage = "http://www-uxsup.csx.cam.ac.uk/~dpc22/prayer/";
|
||||
description = "Yet another Webmail interface for IMAP servers on Unix systems written in C";
|
||||
license = lib.licenses.gpl2Plus;
|
||||
platforms = lib.platforms.linux;
|
||||
};
|
||||
}
|
@ -1,170 +0,0 @@
|
||||
diff --git a/accountd/Makefile b/accountd/Makefile
|
||||
index c3e8107..7946776 100644
|
||||
--- a/accountd/Makefile
|
||||
+++ b/accountd/Makefile
|
||||
@@ -75,6 +75,6 @@ clean:
|
||||
-rm -f prayer-accountd test core *.o *~ \#*\#
|
||||
|
||||
install:
|
||||
- $(INSTALL) -m 755 -o ${RO_USER} -g ${RW_GROUP} \
|
||||
+ $(INSTALL) -m 755 \
|
||||
prayer-accountd ${BROOT}${BIN_DIR}
|
||||
|
||||
diff --git a/files/Makefile b/files/Makefile
|
||||
index 743d0ed..7eff064 100644
|
||||
--- a/files/Makefile
|
||||
+++ b/files/Makefile
|
||||
@@ -52,20 +52,20 @@ distclean:
|
||||
|
||||
install-cert:
|
||||
if [ -f certs/prayer.pem ]; then \
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) \
|
||||
+ $(INSTALL) \
|
||||
-m $(PRIVATE_FILE) certs/prayer.pem ${BROOT}${PREFIX}/certs; \
|
||||
fi
|
||||
|
||||
install-config: etc/prayer.cf
|
||||
- $(INSTALL) -D -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
|
||||
+ $(INSTALL) -D -m $(PUBLIC_FILE) \
|
||||
etc/prayer.cf ${BROOT}${PRAYER_CONFIG_FILE}
|
||||
|
||||
install-aconfig:
|
||||
- $(INSTALL) -D -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
|
||||
+ $(INSTALL) -D -m $(PUBLIC_FILE) \
|
||||
etc/prayer-accountd.cf ${BROOT}${ACCOUNTD_CONFIG_FILE}
|
||||
|
||||
install-motd:
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
|
||||
+ $(INSTALL) -m $(PUBLIC_FILE) \
|
||||
etc/motd.html ${BROOT}${PREFIX}/etc
|
||||
|
||||
install:
|
||||
@@ -83,6 +83,6 @@ install:
|
||||
if [ ! -f $(BROOT)$(PREFIX)/etc/motd.html ]; then $(MAKE) install-motd; fi
|
||||
|
||||
redhat-install-init.d:
|
||||
- install -D -o root -g root -m 755 \
|
||||
+ install -D -m 755 \
|
||||
./init.d/prayer $(BROOT)/etc/rc.d/init.d/prayer
|
||||
#chkconfig prayer --level 2345 on
|
||||
diff --git a/files/install.sh b/files/install.sh
|
||||
index 8d1d1f4..0804a08 100755
|
||||
--- a/files/install.sh
|
||||
+++ b/files/install.sh
|
||||
@@ -2,8 +2,6 @@
|
||||
#
|
||||
# $Cambridge: hermes/src/prayer/files/install.sh,v 1.7 2008/09/16 09:59:56 dpc22 Exp $
|
||||
|
||||
-PATH=/bin:/sbin/:/usr/bin:/usr/sbin
|
||||
-
|
||||
error=0
|
||||
|
||||
if [ "x$PREFIX" = "x" ]; then
|
||||
@@ -55,24 +53,20 @@ if [ $error != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
-if [ ! -d ${VAR_PREFIX} -a `whoami` = "root" ]; then
|
||||
- ${INSTALL} -d -o ${RW_USER} -g ${RW_GROUP} -m ${PRIVATE_DIR} ${VAR_PREFIX}
|
||||
-fi
|
||||
-
|
||||
if [ ! -d ${PREFIX} ]; then
|
||||
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${PREFIX}
|
||||
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${PREFIX}
|
||||
fi
|
||||
|
||||
if [ ! -d ${PREFIX}/etc ]; then
|
||||
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${PREFIX}/etc
|
||||
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${PREFIX}/etc
|
||||
fi
|
||||
|
||||
if [ ! -d ${PREFIX}/certs ]; then
|
||||
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PRIVATE_DIR} ${PREFIX}/certs
|
||||
+ ${INSTALL} -d -m ${PRIVATE_DIR} ${PREFIX}/certs
|
||||
fi
|
||||
|
||||
if [ ! -d ${BIN_DIR} ]; then
|
||||
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${BIN_DIR}
|
||||
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${BIN_DIR}
|
||||
fi
|
||||
|
||||
for i in icons static
|
||||
@@ -83,5 +77,4 @@ do
|
||||
fi
|
||||
echo Copying ${i}
|
||||
(tar cf - ${i}) | (cd ${PREFIX} ; tar xf -)
|
||||
- (cd ${PREFIX}; chown -R ${RO_USER}:${RO_GROUP} ${i})
|
||||
done
|
||||
diff --git a/servers/Makefile b/servers/Makefile
|
||||
index 021aed5..5ccbd08 100644
|
||||
--- a/servers/Makefile
|
||||
+++ b/servers/Makefile
|
||||
@@ -107,13 +107,13 @@ clean:
|
||||
-rm -f $(BIN) core *.o *.flc *~ \#*\#
|
||||
|
||||
install: all
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
|
||||
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
|
||||
$(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer $(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-chroot $(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-session $(BROOT)$(BIN_DIR)
|
||||
|
||||
prayer: $(PRAYER_OBJS) prayer_main.o
|
||||
diff --git a/templates/cam/Makefile b/templates/cam/Makefile
|
||||
index 9f4122a..396b628 100644
|
||||
--- a/templates/cam/Makefile
|
||||
+++ b/templates/cam/Makefile
|
||||
@@ -124,7 +124,7 @@ _template_index.c:
|
||||
$(COMPILE) $(TYPE) $@ $*
|
||||
|
||||
install:
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
|
||||
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
|
||||
$(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
cp *.t $(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
cp *.vars $(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
diff --git a/templates/old/Makefile b/templates/old/Makefile
|
||||
index 31016cf..288a64c 100644
|
||||
--- a/templates/old/Makefile
|
||||
+++ b/templates/old/Makefile
|
||||
@@ -123,7 +123,7 @@ _template_index.c:
|
||||
$(COMPILE) $(TYPE) $@ $*
|
||||
|
||||
install:
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
|
||||
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
|
||||
$(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
cp *.t $(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
cp *.vars $(BROOT)$(PREFIX)/templates/$(TYPE)
|
||||
diff --git a/utils/Makefile b/utils/Makefile
|
||||
index 9c79916..ef82481 100644
|
||||
--- a/utils/Makefile
|
||||
+++ b/utils/Makefile
|
||||
@@ -72,15 +72,15 @@ clean:
|
||||
-rm -f $(BIN) core *.o *.flc *~ \#*\#
|
||||
|
||||
install: all
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
|
||||
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
|
||||
$(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-ssl-prune $(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-sem-prune $(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-db-prune $(BROOT)$(BIN_DIR)
|
||||
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
|
||||
+ $(INSTALL) -m $(PUBLIC_EXEC) \
|
||||
prayer-cyclog $(BROOT)$(BIN_DIR)
|
||||
|
||||
prayer-ssl-prune: $(PRUNE_OBJS)
|
@ -1,6 +1,7 @@
|
||||
{ stdenv, writeScriptBin, makeWrapper, lib, fetchurl, git, cacert, libpng, libjpeg, libwebp
|
||||
, erlang, openssl, expat, libyaml, bash, gnused, gnugrep, coreutils, util-linux, procps, gd
|
||||
, flock, autoreconfHook
|
||||
, gawk
|
||||
, nixosTests
|
||||
, withMysql ? false
|
||||
, withPgsql ? false
|
||||
@ -12,7 +13,7 @@
|
||||
}:
|
||||
|
||||
let
|
||||
ctlpath = lib.makeBinPath [ bash gnused gnugrep coreutils util-linux procps ];
|
||||
ctlpath = lib.makeBinPath [ bash gnused gnugrep gawk coreutils util-linux procps ];
|
||||
in stdenv.mkDerivation rec {
|
||||
pname = "ejabberd";
|
||||
version = "23.01";
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user