mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-25 00:12:56 +00:00
Merge staging-next into staging
This commit is contained in:
commit
9031f2db8b
139
.github/workflows/eval.yml
vendored
Normal file
139
.github/workflows/eval.yml
vendored
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
name: Eval
|
||||||
|
|
||||||
|
on: pull_request_target
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
attrs:
|
||||||
|
name: Attributes
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
mergedSha: ${{ steps.merged.outputs.mergedSha }}
|
||||||
|
systems: ${{ steps.systems.outputs.systems }}
|
||||||
|
steps:
|
||||||
|
# Important: Because of `pull_request_target`, this doesn't check out the PR,
|
||||||
|
# but rather the base branch of the PR, which is needed so we don't run untrusted code
|
||||||
|
- name: Check out the ci directory of the base branch
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
path: base
|
||||||
|
sparse-checkout: ci
|
||||||
|
- name: Check if the PR can be merged and get the test merge commit
|
||||||
|
id: merged
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then
|
||||||
|
echo "Checking the merge commit $mergedSha"
|
||||||
|
echo "mergedSha=$mergedSha" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
# Skipping so that no notifications are sent
|
||||||
|
echo "Skipping the rest..."
|
||||||
|
fi
|
||||||
|
rm -rf base
|
||||||
|
- name: Check out the PR at the test merge commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
# Add this to _all_ subsequent steps to skip them
|
||||||
|
if: steps.merged.outputs.mergedSha
|
||||||
|
with:
|
||||||
|
ref: ${{ env.mergedSha }}
|
||||||
|
path: nixpkgs
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||||
|
if: steps.merged.outputs.mergedSha
|
||||||
|
|
||||||
|
- name: Evaluate the list of all attributes and get the systems matrix
|
||||||
|
id: systems
|
||||||
|
if: steps.merged.outputs.mergedSha
|
||||||
|
run: |
|
||||||
|
nix-build nixpkgs/ci -A eval.attrpathsSuperset
|
||||||
|
echo "systems=$(<result/systems.json)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Upload the list of all attributes
|
||||||
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
|
if: steps.merged.outputs.mergedSha
|
||||||
|
with:
|
||||||
|
name: paths
|
||||||
|
path: result/*
|
||||||
|
|
||||||
|
outpaths:
|
||||||
|
name: Outpaths
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: attrs
|
||||||
|
# Skip this and future steps if the PR can't be merged
|
||||||
|
if: needs.attrs.outputs.mergedSha
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
system: ${{ fromJSON(needs.attrs.outputs.systems) }}
|
||||||
|
steps:
|
||||||
|
- name: Download the list of all attributes
|
||||||
|
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||||
|
with:
|
||||||
|
name: paths
|
||||||
|
path: paths
|
||||||
|
|
||||||
|
- name: Check out the PR at the test merge commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.attrs.outputs.mergedSha }}
|
||||||
|
path: nixpkgs
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||||
|
|
||||||
|
- name: Evaluate the ${{ matrix.system }} output paths for all derivation attributes
|
||||||
|
run: |
|
||||||
|
nix-build nixpkgs/ci -A eval.singleSystem \
|
||||||
|
--argstr evalSystem ${{ matrix.system }} \
|
||||||
|
--arg attrpathFile ./paths/paths.json \
|
||||||
|
--arg chunkSize 10000
|
||||||
|
# If it uses too much memory, slightly decrease chunkSize
|
||||||
|
|
||||||
|
- name: Upload the output paths and eval stats
|
||||||
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
|
if: needs.attrs.outputs.mergedSha
|
||||||
|
with:
|
||||||
|
name: intermediate-${{ matrix.system }}
|
||||||
|
path: result/*
|
||||||
|
|
||||||
|
process:
|
||||||
|
name: Process
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: outpaths
|
||||||
|
steps:
|
||||||
|
- name: Download output paths and eval stats for all systems
|
||||||
|
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||||
|
with:
|
||||||
|
pattern: intermediate-*
|
||||||
|
path: intermediate
|
||||||
|
|
||||||
|
- name: Check out the PR at the test merge commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.attrs.outputs.mergedSha }}
|
||||||
|
path: nixpkgs
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||||
|
|
||||||
|
- name: Combine all output paths and eval stats
|
||||||
|
run: |
|
||||||
|
nix-build nixpkgs/ci -A eval.combine \
|
||||||
|
--arg resultsDir ./intermediate
|
||||||
|
|
||||||
|
- name: Upload the combined results
|
||||||
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
|
with:
|
||||||
|
name: result
|
||||||
|
path: result/*
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Run this workflow also on `push` (on at least the main development branches)
|
||||||
|
# Then add an extra step here that waits for the base branch (not the merge base, because that could be very different)
|
||||||
|
# to have completed the eval, then use
|
||||||
|
# gh api --method GET /repos/NixOS/nixpkgs/actions/workflows/eval.yml/runs -f head_sha=<BASE>
|
||||||
|
# and follow it to the artifact results, where you can then download the outpaths.json from the base branch
|
||||||
|
# That can then be used to compare the number of changed paths, get evaluation stats and ping appropriate reviewers
|
@ -26,4 +26,5 @@ in
|
|||||||
inherit pkgs;
|
inherit pkgs;
|
||||||
requestReviews = pkgs.callPackage ./request-reviews { };
|
requestReviews = pkgs.callPackage ./request-reviews { };
|
||||||
codeownersValidator = pkgs.callPackage ./codeowners-validator { };
|
codeownersValidator = pkgs.callPackage ./codeowners-validator { };
|
||||||
|
eval = pkgs.callPackage ./eval { };
|
||||||
}
|
}
|
||||||
|
19
ci/eval/README.md
Normal file
19
ci/eval/README.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Nixpkgs CI evaluation
|
||||||
|
|
||||||
|
The code in this directory is used by the [eval.yml](../../.github/workflows/eval.yml) GitHub Actions workflow to evaluate the majority of Nixpkgs for all PRs, effectively making sure that when the development branches are processed by Hydra, no evaluation failures are encountered.
|
||||||
|
|
||||||
|
Furthermore it also allows local evaluation using
|
||||||
|
```
|
||||||
|
nix-build ci -A eval.full \
|
||||||
|
--max-jobs 4
|
||||||
|
--cores 2
|
||||||
|
--arg chunkSize 10000
|
||||||
|
```
|
||||||
|
|
||||||
|
- `--max-jobs`: The maximum number of derivations to run at the same time. Only each [supported system](../supportedSystems.nix) gets a separate derivation, so it doesn't make sense to set this higher than that number.
|
||||||
|
- `--cores`: The number of cores to use for each job. Recommended to set this to the amount of cores on your system divided by `--max-jobs`.
|
||||||
|
- `chunkSize`: The number of attributes that are evaluated simultaneously on a single core. Lowering this decreases memory usage at the cost of increased evaluation time. If this is too high, there won't be enough chunks to process them in parallel, and will also increase evaluation time.
|
||||||
|
|
||||||
|
A good default is to set `chunkSize` to 10000, which leads to about 3.6GB max memory usage per core, so suitable for fully utilising machines with 4 cores and 16GB memory, 8 cores and 32GB memory or 16 cores and 64GB memory.
|
||||||
|
|
||||||
|
Note that 16GB memory is the recommended minimum, while with less than 8GB memory evaluation time suffers greatly.
|
273
ci/eval/default.nix
Normal file
273
ci/eval/default.nix
Normal file
@ -0,0 +1,273 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
runCommand,
|
||||||
|
writeShellScript,
|
||||||
|
linkFarm,
|
||||||
|
time,
|
||||||
|
procps,
|
||||||
|
nix,
|
||||||
|
jq,
|
||||||
|
sta,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
nixpkgs =
|
||||||
|
with lib.fileset;
|
||||||
|
toSource {
|
||||||
|
root = ../..;
|
||||||
|
fileset = unions (
|
||||||
|
map (lib.path.append ../..) [
|
||||||
|
"default.nix"
|
||||||
|
"doc"
|
||||||
|
"lib"
|
||||||
|
"maintainers"
|
||||||
|
"nixos"
|
||||||
|
"pkgs"
|
||||||
|
".version"
|
||||||
|
"ci/supportedSystems.nix"
|
||||||
|
]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
supportedSystems = import ../supportedSystems.nix;
|
||||||
|
|
||||||
|
attrpathsSuperset =
|
||||||
|
runCommand "attrpaths-superset.json"
|
||||||
|
{
|
||||||
|
src = nixpkgs;
|
||||||
|
nativeBuildInputs = [
|
||||||
|
nix
|
||||||
|
time
|
||||||
|
];
|
||||||
|
env.supportedSystems = builtins.toJSON supportedSystems;
|
||||||
|
passAsFile = [ "supportedSystems" ];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
export NIX_STATE_DIR=$(mktemp -d)
|
||||||
|
mkdir $out
|
||||||
|
export GC_INITIAL_HEAP_SIZE=4g
|
||||||
|
command time -v \
|
||||||
|
nix-instantiate --eval --strict --json --show-trace \
|
||||||
|
$src/pkgs/top-level/release-attrpaths-superset.nix -A paths \
|
||||||
|
--arg enableWarnings false > $out/paths.json
|
||||||
|
mv "$supportedSystemsPath" $out/systems.json
|
||||||
|
'';
|
||||||
|
|
||||||
|
singleSystem =
|
||||||
|
{
|
||||||
|
# The system to evaluate.
|
||||||
|
# Note that this is intentionally not called `system`,
|
||||||
|
# because `--argstr system` would only be passed to the ci/default.nix file!
|
||||||
|
evalSystem,
|
||||||
|
# The path to the `paths.json` file from `attrpathsSuperset`
|
||||||
|
attrpathFile,
|
||||||
|
# The number of attributes per chunk, see ./README.md for more info.
|
||||||
|
chunkSize,
|
||||||
|
checkMeta ? true,
|
||||||
|
includeBroken ? true,
|
||||||
|
# Whether to just evaluate a single chunk for quick testing
|
||||||
|
quickTest ? false,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
singleChunk = writeShellScript "single-chunk" ''
|
||||||
|
set -euo pipefail
|
||||||
|
chunkSize=$1
|
||||||
|
myChunk=$2
|
||||||
|
system=$3
|
||||||
|
outputDir=$4
|
||||||
|
|
||||||
|
export NIX_SHOW_STATS=1
|
||||||
|
export NIX_SHOW_STATS_PATH="$outputDir/stats/$myChunk"
|
||||||
|
echo "Chunk $myChunk on $system start"
|
||||||
|
set +e
|
||||||
|
command time -f "Chunk $myChunk on $system done [%MKB max resident, %Es elapsed] %C" \
|
||||||
|
nix-env -f "${nixpkgs}/pkgs/top-level/release-attrpaths-parallel.nix" \
|
||||||
|
--query --available \
|
||||||
|
--no-name --attr-path --out-path \
|
||||||
|
--show-trace \
|
||||||
|
--arg chunkSize "$chunkSize" \
|
||||||
|
--arg myChunk "$myChunk" \
|
||||||
|
--arg attrpathFile "${attrpathFile}" \
|
||||||
|
--arg systems "[ \"$system\" ]" \
|
||||||
|
--arg checkMeta ${lib.boolToString checkMeta} \
|
||||||
|
--arg includeBroken ${lib.boolToString includeBroken} \
|
||||||
|
> "$outputDir/result/$myChunk"
|
||||||
|
exitCode=$?
|
||||||
|
set -e
|
||||||
|
if (( exitCode != 0 )); then
|
||||||
|
echo "Evaluation failed with exit code $exitCode"
|
||||||
|
# This immediately halts all xargs processes
|
||||||
|
kill $PPID
|
||||||
|
fi
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
runCommand "nixpkgs-eval-${evalSystem}"
|
||||||
|
{
|
||||||
|
nativeBuildInputs = [
|
||||||
|
nix
|
||||||
|
time
|
||||||
|
procps
|
||||||
|
jq
|
||||||
|
];
|
||||||
|
env = {
|
||||||
|
inherit evalSystem chunkSize;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
''
|
||||||
|
export NIX_STATE_DIR=$(mktemp -d)
|
||||||
|
nix-store --init
|
||||||
|
|
||||||
|
echo "System: $evalSystem"
|
||||||
|
cores=$NIX_BUILD_CORES
|
||||||
|
echo "Cores: $cores"
|
||||||
|
attrCount=$(jq length "${attrpathFile}")
|
||||||
|
echo "Attribute count: $attrCount"
|
||||||
|
echo "Chunk size: $chunkSize"
|
||||||
|
# Same as `attrCount / chunkSize` but rounded up
|
||||||
|
chunkCount=$(( (attrCount - 1) / chunkSize + 1 ))
|
||||||
|
echo "Chunk count: $chunkCount"
|
||||||
|
|
||||||
|
mkdir $out
|
||||||
|
|
||||||
|
# Record and print stats on free memory and swap in the background
|
||||||
|
(
|
||||||
|
while true; do
|
||||||
|
availMemory=$(free -b | grep Mem | awk '{print $7}')
|
||||||
|
freeSwap=$(free -b | grep Swap | awk '{print $4}')
|
||||||
|
echo "Available memory: $(( availMemory / 1024 / 1024 )) MiB, free swap: $(( freeSwap / 1024 / 1024 )) MiB"
|
||||||
|
|
||||||
|
if [[ ! -f "$out/min-avail-memory" ]] || (( availMemory < $(<$out/min-avail-memory) )); then
|
||||||
|
echo "$availMemory" > $out/min-avail-memory
|
||||||
|
fi
|
||||||
|
if [[ ! -f $out/min-free-swap ]] || (( availMemory < $(<$out/min-free-swap) )); then
|
||||||
|
echo "$freeSwap" > $out/min-free-swap
|
||||||
|
fi
|
||||||
|
sleep 4
|
||||||
|
done
|
||||||
|
) &
|
||||||
|
|
||||||
|
seq_end=$(( chunkCount - 1 ))
|
||||||
|
|
||||||
|
${lib.optionalString quickTest ''
|
||||||
|
seq_end=0
|
||||||
|
''}
|
||||||
|
|
||||||
|
chunkOutputDir=$(mktemp -d)
|
||||||
|
mkdir "$chunkOutputDir"/{result,stats}
|
||||||
|
|
||||||
|
seq -w 0 "$seq_end" |
|
||||||
|
command time -f "%e" -o "$out/total-time" \
|
||||||
|
xargs -I{} -P"$cores" \
|
||||||
|
${singleChunk} "$chunkSize" {} "$evalSystem" "$chunkOutputDir"
|
||||||
|
|
||||||
|
if (( chunkSize * chunkCount != attrCount )); then
|
||||||
|
# A final incomplete chunk would mess up the stats, don't include it
|
||||||
|
rm "$chunkOutputDir"/stats/"$seq_end"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Make sure the glob doesn't break when there's no files
|
||||||
|
shopt -s nullglob
|
||||||
|
cat "$chunkOutputDir"/result/* > $out/paths
|
||||||
|
cat "$chunkOutputDir"/stats/* > $out/stats.jsonstream
|
||||||
|
'';
|
||||||
|
|
||||||
|
combine =
|
||||||
|
{
|
||||||
|
resultsDir,
|
||||||
|
}:
|
||||||
|
runCommand "combined-result"
|
||||||
|
{
|
||||||
|
nativeBuildInputs = [
|
||||||
|
jq
|
||||||
|
sta
|
||||||
|
];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
mkdir -p $out
|
||||||
|
|
||||||
|
# Transform output paths to JSON
|
||||||
|
cat ${resultsDir}/*/paths |
|
||||||
|
jq --sort-keys --raw-input --slurp '
|
||||||
|
split("\n") |
|
||||||
|
map(select(. != "") | split(" ") | map(select(. != ""))) |
|
||||||
|
map(
|
||||||
|
{
|
||||||
|
key: .[0],
|
||||||
|
value: .[1] | split(";") | map(split("=") |
|
||||||
|
if length == 1 then
|
||||||
|
{ key: "out", value: .[0] }
|
||||||
|
else
|
||||||
|
{ key: .[0], value: .[1] }
|
||||||
|
end) | from_entries}
|
||||||
|
) | from_entries
|
||||||
|
' > $out/outpaths.json
|
||||||
|
|
||||||
|
# Computes min, mean, error, etc. for a list of values and outputs a JSON from that
|
||||||
|
statistics() {
|
||||||
|
local stat=$1
|
||||||
|
sta --transpose |
|
||||||
|
jq --raw-input --argjson stat "$stat" -n '
|
||||||
|
[
|
||||||
|
inputs |
|
||||||
|
split("\t") |
|
||||||
|
{ key: .[0], value: (.[1] | fromjson) }
|
||||||
|
] |
|
||||||
|
from_entries |
|
||||||
|
{
|
||||||
|
key: ($stat | join(".")),
|
||||||
|
value: .
|
||||||
|
}'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Gets all available number stats (without .sizes because those are constant and not interesting)
|
||||||
|
readarray -t stats < <(jq -cs '.[0] | del(.sizes) | paths(type == "number")' ${resultsDir}/*/stats.jsonstream)
|
||||||
|
|
||||||
|
# Combines the statistics from all evaluations
|
||||||
|
{
|
||||||
|
echo "{ \"key\": \"minAvailMemory\", \"value\": $(cat ${resultsDir}/*/min-avail-memory | sta --brief --min) }"
|
||||||
|
echo "{ \"key\": \"minFreeSwap\", \"value\": $(cat ${resultsDir}/*/min-free-swap | sta --brief --min) }"
|
||||||
|
cat ${resultsDir}/*/total-time | statistics '["totalTime"]'
|
||||||
|
for stat in "''${stats[@]}"; do
|
||||||
|
cat ${resultsDir}/*/stats.jsonstream |
|
||||||
|
jq --argjson stat "$stat" 'getpath($stat)' |
|
||||||
|
statistics "$stat"
|
||||||
|
done
|
||||||
|
} |
|
||||||
|
jq -s from_entries > $out/stats.json
|
||||||
|
'';
|
||||||
|
|
||||||
|
full =
|
||||||
|
{
|
||||||
|
# Whether to evaluate just a single system, by default all are evaluated
|
||||||
|
evalSystem ? if quickTest then "x86_64-linux" else null,
|
||||||
|
# The number of attributes per chunk, see ./README.md for more info.
|
||||||
|
chunkSize,
|
||||||
|
quickTest ? false,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
systems = if evalSystem == null then supportedSystems else [ evalSystem ];
|
||||||
|
results = linkFarm "results" (
|
||||||
|
map (evalSystem: {
|
||||||
|
name = evalSystem;
|
||||||
|
path = singleSystem {
|
||||||
|
inherit quickTest evalSystem chunkSize;
|
||||||
|
attrpathFile = attrpathsSuperset + "/paths.json";
|
||||||
|
};
|
||||||
|
}) systems
|
||||||
|
);
|
||||||
|
in
|
||||||
|
combine {
|
||||||
|
resultsDir = results;
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit
|
||||||
|
attrpathsSuperset
|
||||||
|
singleSystem
|
||||||
|
combine
|
||||||
|
# The above three are used by separate VMs in a GitHub workflow,
|
||||||
|
# while the below is intended for testing on a single local machine
|
||||||
|
full
|
||||||
|
;
|
||||||
|
}
|
6
ci/supportedSystems.nix
Normal file
6
ci/supportedSystems.nix
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[
|
||||||
|
"aarch64-linux"
|
||||||
|
"aarch64-darwin"
|
||||||
|
"x86_64-linux"
|
||||||
|
"x86_64-darwin"
|
||||||
|
]
|
@ -14,19 +14,5 @@ let
|
|||||||
in
|
in
|
||||||
pkgs.symlinkJoin {
|
pkgs.symlinkJoin {
|
||||||
name = "nixpkgs-lib-tests";
|
name = "nixpkgs-lib-tests";
|
||||||
paths = map testWithNix nixVersions ++
|
paths = map testWithNix nixVersions;
|
||||||
|
|
||||||
#
|
|
||||||
# TEMPORARY MIGRATION MECHANISM
|
|
||||||
#
|
|
||||||
# This comment and the expression which follows it should be
|
|
||||||
# removed as part of resolving this issue:
|
|
||||||
#
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/272591
|
|
||||||
#
|
|
||||||
[(import ../../pkgs/test/release {
|
|
||||||
inherit pkgs lib nix;
|
|
||||||
})]
|
|
||||||
;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -13078,6 +13078,13 @@
|
|||||||
githubId = 30698906;
|
githubId = 30698906;
|
||||||
name = "Luna D Dragon";
|
name = "Luna D Dragon";
|
||||||
};
|
};
|
||||||
|
luNeder = {
|
||||||
|
email = "luana@luana.dev.br";
|
||||||
|
matrix = "@luana:catgirl.cloud";
|
||||||
|
github = "LuNeder";
|
||||||
|
githubId = 19750714;
|
||||||
|
name = "Luana Neder";
|
||||||
|
};
|
||||||
lunik1 = {
|
lunik1 = {
|
||||||
email = "ch.nixpkgs@themaw.xyz";
|
email = "ch.nixpkgs@themaw.xyz";
|
||||||
matrix = "@lunik1:lunik.one";
|
matrix = "@lunik1:lunik.one";
|
||||||
|
@ -34,6 +34,9 @@
|
|||||||
|
|
||||||
- The `moonlight-qt` package (for [Moonlight game streaming](https://moonlight-stream.org/)) now has HDR support on Linux systems.
|
- The `moonlight-qt` package (for [Moonlight game streaming](https://moonlight-stream.org/)) now has HDR support on Linux systems.
|
||||||
|
|
||||||
|
- [Sched-ext](https://github.com/sched-ext/scx), a Linux kernel feature to run schedulers in userspace, is now available [`services.scx`](options.html#opt-services.scx.enable).
|
||||||
|
Requires Linux kernel version 6.12 or later.
|
||||||
|
|
||||||
- PostgreSQL now defaults to major version 16.
|
- PostgreSQL now defaults to major version 16.
|
||||||
|
|
||||||
- GNOME has been updated to version 47. Refer to the [release notes](https://release.gnome.org/47/) for more details.
|
- GNOME has been updated to version 47. Refer to the [release notes](https://release.gnome.org/47/) for more details.
|
||||||
|
@ -29,6 +29,10 @@
|
|||||||
|
|
||||||
- `buildGoPackage` has been removed. Use `buildGoModule` instead. See the [Go section in the nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#sec-language-go) for details.
|
- `buildGoPackage` has been removed. Use `buildGoModule` instead. See the [Go section in the nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#sec-language-go) for details.
|
||||||
|
|
||||||
|
- `timescaledb` requires manual upgrade steps.
|
||||||
|
After you run ALTER EXTENSION, you must run [this SQL script](https://github.com/timescale/timescaledb-extras/blob/master/utils/2.15.X-fix_hypertable_foreign_keys.sql). For more details, see the following pull requests [#6797](https://github.com/timescale/timescaledb/pull/6797).
|
||||||
|
PostgreSQL 13 is no longer supported in TimescaleDB v2.16.
|
||||||
|
|
||||||
- `kanata` was updated to v1.7.0, which introduces several breaking changes.
|
- `kanata` was updated to v1.7.0, which introduces several breaking changes.
|
||||||
See the release notes of
|
See the release notes of
|
||||||
[v1.7.0](https://github.com/jtroo/kanata/releases/tag/v1.7.0)
|
[v1.7.0](https://github.com/jtroo/kanata/releases/tag/v1.7.0)
|
||||||
|
@ -1300,6 +1300,7 @@
|
|||||||
./services/scheduling/atd.nix
|
./services/scheduling/atd.nix
|
||||||
./services/scheduling/cron.nix
|
./services/scheduling/cron.nix
|
||||||
./services/scheduling/fcron.nix
|
./services/scheduling/fcron.nix
|
||||||
|
./services/scheduling/scx.nix
|
||||||
./services/search/elasticsearch-curator.nix
|
./services/search/elasticsearch-curator.nix
|
||||||
./services/search/elasticsearch.nix
|
./services/search/elasticsearch.nix
|
||||||
./services/search/hound.nix
|
./services/search/hound.nix
|
||||||
@ -1504,6 +1505,7 @@
|
|||||||
./services/web-apps/pingvin-share.nix
|
./services/web-apps/pingvin-share.nix
|
||||||
./services/web-apps/plantuml-server.nix
|
./services/web-apps/plantuml-server.nix
|
||||||
./services/web-apps/plausible.nix
|
./services/web-apps/plausible.nix
|
||||||
|
./services/web-apps/porn-vault/default.nix
|
||||||
./services/web-apps/powerdns-admin.nix
|
./services/web-apps/powerdns-admin.nix
|
||||||
./services/web-apps/pretalx.nix
|
./services/web-apps/pretalx.nix
|
||||||
./services/web-apps/pretix.nix
|
./services/web-apps/pretix.nix
|
||||||
|
110
nixos/modules/services/scheduling/scx.nix
Normal file
110
nixos/modules/services/scheduling/scx.nix
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
pkgs,
|
||||||
|
config,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
cfg = config.services.scx;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options.services.scx = {
|
||||||
|
enable = lib.mkEnableOption null // {
|
||||||
|
description = ''
|
||||||
|
Whether to enable SCX service, a daemon to run schedulers from userspace.
|
||||||
|
|
||||||
|
::: {.note}
|
||||||
|
This service requires a kernel with the Sched-ext feature.
|
||||||
|
Generally, kernel version 6.12 and later are supported.
|
||||||
|
:::
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
package = lib.mkOption {
|
||||||
|
type = lib.types.package;
|
||||||
|
default = pkgs.scx.full;
|
||||||
|
defaultText = lib.literalExpression "pkgs.scx.full";
|
||||||
|
example = lib.literalExpression "pkgs.scx.rustland";
|
||||||
|
description = ''
|
||||||
|
`scx` package to use. `scx.full`, which includes all schedulers, is the default.
|
||||||
|
You may choose a minimal package, such as `pkgs.scx.rustland`, if only one specific scheduler is needed.
|
||||||
|
|
||||||
|
::: {.note}
|
||||||
|
Overriding this does not change the default scheduler; you should set `services.scx.scheduler` for it.
|
||||||
|
:::
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
scheduler = lib.mkOption {
|
||||||
|
type = lib.types.enum [
|
||||||
|
"scx_bpfland"
|
||||||
|
"scx_central"
|
||||||
|
"scx_flatcg"
|
||||||
|
"scx_lavd"
|
||||||
|
"scx_layered"
|
||||||
|
"scx_nest"
|
||||||
|
"scx_pair"
|
||||||
|
"scx_qmap"
|
||||||
|
"scx_rlfifo"
|
||||||
|
"scx_rustland"
|
||||||
|
"scx_rusty"
|
||||||
|
"scx_simple"
|
||||||
|
"scx_userland"
|
||||||
|
];
|
||||||
|
default = "scx_rustland";
|
||||||
|
example = "scx_bpfland";
|
||||||
|
description = ''
|
||||||
|
Which scheduler to use. See [SCX documentation](https://github.com/sched-ext/scx/tree/main/scheds)
|
||||||
|
for details on each scheduler and guidance on selecting the most suitable one.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
extraArgs = lib.mkOption {
|
||||||
|
type = lib.types.listOf lib.types.singleLineStr;
|
||||||
|
example = [
|
||||||
|
"--slice-us 5000"
|
||||||
|
"--verbose"
|
||||||
|
];
|
||||||
|
description = ''
|
||||||
|
Parameters passed to the chosen scheduler at runtime.
|
||||||
|
|
||||||
|
::: {.note}
|
||||||
|
Run `chosen-scx-scheduler --help` to see the available options. Generally,
|
||||||
|
each scheduler has its own set of options, and they are incompatible with each other.
|
||||||
|
:::
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = lib.mkIf cfg.enable {
|
||||||
|
environment.systemPackages = [ cfg.package ];
|
||||||
|
|
||||||
|
systemd.services.scx = {
|
||||||
|
description = "SCX scheduler daemon";
|
||||||
|
|
||||||
|
# SCX service should be started only if the kernel supports sched-ext
|
||||||
|
unitConfig.ConditionPathIsDirectory = "/sys/kernel/sched_ext";
|
||||||
|
|
||||||
|
startLimitIntervalSec = 30;
|
||||||
|
startLimitBurst = 2;
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "simple";
|
||||||
|
ExecStart = "${lib.getExe' cfg.package cfg.scheduler} ${lib.concatStringsSep " " cfg.extraArgs}";
|
||||||
|
Restart = "on-failure";
|
||||||
|
StandardError = "journal";
|
||||||
|
};
|
||||||
|
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
assertions = [
|
||||||
|
{
|
||||||
|
assertion = lib.versionAtLeast config.boot.kernelPackages.kernel.version "6.12";
|
||||||
|
message = "SCX is only supported on kernel version >= 6.12.";
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
meta.maintainers = with lib.maintainers; [ johnrtitor ];
|
||||||
|
}
|
158
nixos/modules/services/web-apps/porn-vault/default-config.nix
Normal file
158
nixos/modules/services/web-apps/porn-vault/default-config.nix
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
# See https://gitlab.com/porn-vault/porn-vault/-/blob/dev/config.example.json
|
||||||
|
{
|
||||||
|
auth = {
|
||||||
|
password = null;
|
||||||
|
};
|
||||||
|
binaries = {
|
||||||
|
ffmpeg = "ffmpeg";
|
||||||
|
ffprobe = "ffprobe";
|
||||||
|
izzyPort = 8000;
|
||||||
|
imagemagick = {
|
||||||
|
convertPath = "convert";
|
||||||
|
montagePath = "montage";
|
||||||
|
identifyPath = "identify";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
import = {
|
||||||
|
images = [
|
||||||
|
{
|
||||||
|
path = "/media/porn-vault/images";
|
||||||
|
include = [ ];
|
||||||
|
exclude = [ ];
|
||||||
|
extensions = [
|
||||||
|
".jpg"
|
||||||
|
".jpeg"
|
||||||
|
".png"
|
||||||
|
".gif"
|
||||||
|
];
|
||||||
|
enable = true;
|
||||||
|
}
|
||||||
|
];
|
||||||
|
videos = [
|
||||||
|
{
|
||||||
|
path = "/media/porn-vault/videos";
|
||||||
|
include = [ ];
|
||||||
|
exclude = [ ];
|
||||||
|
extensions = [
|
||||||
|
".mp4"
|
||||||
|
".mov"
|
||||||
|
".webm"
|
||||||
|
];
|
||||||
|
enable = true;
|
||||||
|
}
|
||||||
|
];
|
||||||
|
scanInterval = 10800000;
|
||||||
|
};
|
||||||
|
log = {
|
||||||
|
level = "debug";
|
||||||
|
maxSize = "20m";
|
||||||
|
maxFiles = "5";
|
||||||
|
writeFile = [
|
||||||
|
{
|
||||||
|
level = "debug";
|
||||||
|
prefix = "errors-";
|
||||||
|
silent = false;
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
matching = {
|
||||||
|
applyActorLabels = [
|
||||||
|
"event:actor:create"
|
||||||
|
"event:actor:find-unmatched-scenes"
|
||||||
|
"plugin:actor:create"
|
||||||
|
"event:scene:create"
|
||||||
|
"plugin:scene:create"
|
||||||
|
"event:image:create"
|
||||||
|
"plugin:marker:create"
|
||||||
|
"event:marker:create"
|
||||||
|
];
|
||||||
|
applySceneLabels = true;
|
||||||
|
applyStudioLabels = [
|
||||||
|
"event:studio:create"
|
||||||
|
"event:studio:find-unmatched-scenes"
|
||||||
|
"plugin:studio:create"
|
||||||
|
"event:scene:create"
|
||||||
|
"plugin:scene:create"
|
||||||
|
];
|
||||||
|
extractSceneActorsFromFilepath = true;
|
||||||
|
extractSceneLabelsFromFilepath = true;
|
||||||
|
extractSceneMoviesFromFilepath = true;
|
||||||
|
extractSceneStudiosFromFilepath = true;
|
||||||
|
matcher = {
|
||||||
|
type = "word";
|
||||||
|
options = {
|
||||||
|
ignoreSingleNames = false;
|
||||||
|
ignoreDiacritics = true;
|
||||||
|
enableWordGroups = true;
|
||||||
|
wordSeparatorFallback = true;
|
||||||
|
camelCaseWordGroups = true;
|
||||||
|
overlappingMatchPreference = "longest";
|
||||||
|
groupSeparators = [
|
||||||
|
"[\\s',()[\\]{}*\\.]"
|
||||||
|
];
|
||||||
|
wordSeparators = [
|
||||||
|
"[-_]"
|
||||||
|
];
|
||||||
|
filepathSeparators = [
|
||||||
|
"[/\\\\&]"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
matchCreatedActors = true;
|
||||||
|
matchCreatedStudios = true;
|
||||||
|
matchCreatedLabels = true;
|
||||||
|
};
|
||||||
|
persistence = {
|
||||||
|
backup = {
|
||||||
|
enable = true;
|
||||||
|
maxAmount = 10;
|
||||||
|
};
|
||||||
|
libraryPath = "/media/porn-vault/lib";
|
||||||
|
};
|
||||||
|
plugins = {
|
||||||
|
allowActorThumbnailOverwrite = false;
|
||||||
|
allowMovieThumbnailOverwrite = false;
|
||||||
|
allowSceneThumbnailOverwrite = false;
|
||||||
|
allowStudioThumbnailOverwrite = false;
|
||||||
|
createMissingActors = false;
|
||||||
|
createMissingLabels = false;
|
||||||
|
createMissingMovies = false;
|
||||||
|
createMissingStudios = false;
|
||||||
|
events = {
|
||||||
|
actorCreated = [ ];
|
||||||
|
actorCustom = [ ];
|
||||||
|
sceneCreated = [ ];
|
||||||
|
sceneCustom = [ ];
|
||||||
|
movieCustom = [ ];
|
||||||
|
studioCreated = [ ];
|
||||||
|
studioCustom = [ ];
|
||||||
|
};
|
||||||
|
register = { };
|
||||||
|
markerDeduplicationThreshold = 5;
|
||||||
|
};
|
||||||
|
processing = {
|
||||||
|
generatePreviews = true;
|
||||||
|
readImagesOnImport = false;
|
||||||
|
generateImageThumbnails = true;
|
||||||
|
};
|
||||||
|
server = {
|
||||||
|
https = {
|
||||||
|
certificate = "";
|
||||||
|
enable = false;
|
||||||
|
key = "";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
transcode = {
|
||||||
|
hwaDriver = null;
|
||||||
|
vaapiDevice = "/dev/dri/renderD128";
|
||||||
|
h264 = {
|
||||||
|
preset = "veryfast";
|
||||||
|
crf = 23;
|
||||||
|
};
|
||||||
|
webm = {
|
||||||
|
deadline = "realtime";
|
||||||
|
cpuUsed = 3;
|
||||||
|
crf = 31;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
110
nixos/modules/services/web-apps/porn-vault/default.nix
Normal file
110
nixos/modules/services/web-apps/porn-vault/default.nix
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
{
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
lib,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
cfg = config.services.porn-vault;
|
||||||
|
configFormat = pkgs.formats.json { };
|
||||||
|
defaultConfig = import ./default-config.nix;
|
||||||
|
inherit (lib)
|
||||||
|
mkIf
|
||||||
|
mkEnableOption
|
||||||
|
mkPackageOption
|
||||||
|
mkOption
|
||||||
|
getExe
|
||||||
|
literalExpression
|
||||||
|
types
|
||||||
|
;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options = {
|
||||||
|
services.porn-vault = {
|
||||||
|
enable = lib.mkEnableOption "Porn-Vault";
|
||||||
|
|
||||||
|
package = lib.mkPackageOption pkgs "porn-vault" { };
|
||||||
|
|
||||||
|
autoStart = lib.mkOption {
|
||||||
|
type = lib.types.bool;
|
||||||
|
default = true;
|
||||||
|
description = ''
|
||||||
|
Whether to start porn-vault automatically.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
port = lib.mkOption {
|
||||||
|
type = lib.types.port;
|
||||||
|
default = 3000;
|
||||||
|
description = ''
|
||||||
|
Which port Porn-Vault will use.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
openFirewall = lib.mkOption {
|
||||||
|
type = lib.types.bool;
|
||||||
|
default = false;
|
||||||
|
description = ''
|
||||||
|
Whether to open the Porn-Vault port in the firewall.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
settings = mkOption {
|
||||||
|
type = configFormat.type;
|
||||||
|
description = ''
|
||||||
|
Configuration for Porn-Vault. The attributes are serialized to JSON in config.json.
|
||||||
|
|
||||||
|
See https://gitlab.com/porn-vault/porn-vault/-/blob/dev/config.example.json
|
||||||
|
'';
|
||||||
|
default = defaultConfig;
|
||||||
|
apply = lib.recursiveUpdate defaultConfig;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = lib.mkIf cfg.enable {
|
||||||
|
environment.systemPackages = [ cfg.package ];
|
||||||
|
|
||||||
|
systemd.services.porn-vault = {
|
||||||
|
description = "Porn-Vault server";
|
||||||
|
environment = {
|
||||||
|
PV_CONFIG_FOLDER = "/etc/porn-vault";
|
||||||
|
NODE_ENV = "production";
|
||||||
|
DATABASE_NAME = "production";
|
||||||
|
PORT = toString cfg.port;
|
||||||
|
};
|
||||||
|
serviceConfig = {
|
||||||
|
ExecStart = getExe cfg.package;
|
||||||
|
CacheDirectory = "porn-vault";
|
||||||
|
# Hardening options
|
||||||
|
CapabilityBoundingSet = [ "CAP_SYS_NICE" ];
|
||||||
|
AmbientCapabilities = [ "CAP_SYS_NICE" ];
|
||||||
|
LockPersonality = true;
|
||||||
|
NoNewPrivileges = true;
|
||||||
|
PrivateTmp = true;
|
||||||
|
ProtectControlGroups = true;
|
||||||
|
ProtectKernelLogs = true;
|
||||||
|
ProtectKernelModules = true;
|
||||||
|
ProtectKernelTunables = true;
|
||||||
|
ProtectSystem = true;
|
||||||
|
RestrictNamespaces = true;
|
||||||
|
RestrictSUIDSGID = true;
|
||||||
|
Restart = "on-failure";
|
||||||
|
RestartSec = 5;
|
||||||
|
};
|
||||||
|
wantedBy = mkIf cfg.autoStart [ "multi-user.target" ];
|
||||||
|
wants = [ "network.target" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
environment.etc = {
|
||||||
|
"porn-vault/config.json".source = configFormat.generate "config.json" cfg.settings;
|
||||||
|
};
|
||||||
|
|
||||||
|
networking.firewall = lib.mkIf cfg.openFirewall {
|
||||||
|
allowedTCPPorts = [ cfg.port ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
meta.maintainers = [ lib.maintainers.luNeder ];
|
||||||
|
}
|
@ -10,16 +10,16 @@ let
|
|||||||
in
|
in
|
||||||
rustPlatform.buildRustPackage {
|
rustPlatform.buildRustPackage {
|
||||||
pname = "lspce-module";
|
pname = "lspce-module";
|
||||||
version = "1.1.0-unstable-2024-09-07";
|
version = "1.1.0-unstable-2024-10-07";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "zbelial";
|
owner = "zbelial";
|
||||||
repo = "lspce";
|
repo = "lspce";
|
||||||
rev = "4bf1fa9d3d8b17eb6ae628e93018ee8f020565ba";
|
rev = "2a06232033478757dc5770dc7ba658848073de42";
|
||||||
hash = "sha256-OeDUQXqVBUfKjYt5oSmfl2N/19PFYIbPXfFqloai0LQ=";
|
hash = "sha256-iCge/m1z4Tl3dDvbN4FGsINWE5GEtLxTlvBBu8Zxhzs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoHash = "sha256-VMGdB4dF3Ccxl6DifdXFH4+XVT7RoeqI/l/AR/epg4o=";
|
cargoHash = "sha256-I3NxV0uIwQ/Vg9Txfx+ouA6FXOYyLQ2kKdhnAdkNfdE=";
|
||||||
|
|
||||||
checkFlags = [
|
checkFlags = [
|
||||||
# flaky test
|
# flaky test
|
||||||
|
@ -6,13 +6,13 @@
|
|||||||
|
|
||||||
mkDerivation rec {
|
mkDerivation rec {
|
||||||
pname = "pure-maps";
|
pname = "pure-maps";
|
||||||
version = "3.3.0";
|
version = "3.4.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "rinigus";
|
owner = "rinigus";
|
||||||
repo = "pure-maps";
|
repo = "pure-maps";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-TeFolD3jXRdLGfXdy+QcwtOcQQVUB5fn8PwoYfRLaPQ=";
|
hash = "sha256-3XghdDwzt0r8Qi8W3ZMwar2aaqTNGiGsM27BHVr5C2E=";
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{ lib, mkChromiumDerivation
|
{ lib, mkChromiumDerivation
|
||||||
, channel, chromiumVersionAtLeast
|
, chromiumVersionAtLeast
|
||||||
, enableWideVine, ungoogled
|
, enableWideVine, ungoogled
|
||||||
}:
|
}:
|
||||||
|
|
||||||
@ -90,7 +90,7 @@ mkChromiumDerivation (base: rec {
|
|||||||
license = if enableWideVine then lib.licenses.unfree else lib.licenses.bsd3;
|
license = if enableWideVine then lib.licenses.unfree else lib.licenses.bsd3;
|
||||||
platforms = lib.platforms.linux;
|
platforms = lib.platforms.linux;
|
||||||
mainProgram = "chromium";
|
mainProgram = "chromium";
|
||||||
hydraPlatforms = lib.optionals (channel == "stable" || channel == "ungoogled-chromium") ["aarch64-linux" "x86_64-linux"];
|
hydraPlatforms = ["aarch64-linux" "x86_64-linux"];
|
||||||
timeout = 172800; # 48 hours (increased from the Hydra default of 10h)
|
timeout = 172800; # 48 hours (increased from the Hydra default of 10h)
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
@ -1,15 +1,19 @@
|
|||||||
{ stdenv, lib, fetchpatch
|
{ stdenv, lib, fetchpatch
|
||||||
, recompressTarball
|
, zstd
|
||||||
|
, fetchFromGitiles
|
||||||
|
, fetchNpmDeps
|
||||||
, buildPackages
|
, buildPackages
|
||||||
, pkgsBuildBuild
|
, pkgsBuildBuild
|
||||||
# Channel data:
|
# Channel data:
|
||||||
, channel, upstream-info
|
, upstream-info
|
||||||
# Helper functions:
|
# Helper functions:
|
||||||
, chromiumVersionAtLeast, versionRange
|
, chromiumVersionAtLeast, versionRange
|
||||||
|
|
||||||
# Native build inputs:
|
# Native build inputs:
|
||||||
, ninja, pkg-config
|
, ninja, pkg-config
|
||||||
, python3, perl
|
, python3, perl
|
||||||
|
, nodejs
|
||||||
|
, npmHooks
|
||||||
, which
|
, which
|
||||||
, libuuid
|
, libuuid
|
||||||
, overrideCC
|
, overrideCC
|
||||||
@ -145,12 +149,64 @@ let
|
|||||||
else throw "no chromium Rosetta Stone entry for os: ${platform.config}";
|
else throw "no chromium Rosetta Stone entry for os: ${platform.config}";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
isElectron = packageName == "electron";
|
||||||
|
|
||||||
|
chromiumDeps = lib.mapAttrs (path: args: fetchFromGitiles (removeAttrs args [ "recompress" ] // lib.optionalAttrs args.recompress or false {
|
||||||
|
name = "source.tar.zstd";
|
||||||
|
downloadToTemp = false;
|
||||||
|
passthru.unpack = true;
|
||||||
|
postFetch = ''
|
||||||
|
tar \
|
||||||
|
--use-compress-program="${lib.getExe zstd} -T$NIX_BUILD_CORES" \
|
||||||
|
--sort=name \
|
||||||
|
--mtime="1970-01-01" \
|
||||||
|
--owner=root --group=root \
|
||||||
|
--numeric-owner --mode=go=rX,u+rw,a-s \
|
||||||
|
--remove-files \
|
||||||
|
--directory="$out" \
|
||||||
|
-cf "$TMPDIR/source.zstd" .
|
||||||
|
mv "$TMPDIR/source.zstd" "$out"
|
||||||
|
'';
|
||||||
|
})) upstream-info.DEPS;
|
||||||
|
|
||||||
|
unpackPhaseSnippet = lib.concatStrings (lib.mapAttrsToList (path: dep:
|
||||||
|
(if dep.unpack or false
|
||||||
|
then ''
|
||||||
|
mkdir -p ${path}
|
||||||
|
pushd ${path}
|
||||||
|
unpackFile ${dep}
|
||||||
|
popd
|
||||||
|
''
|
||||||
|
else ''
|
||||||
|
mkdir -p ${builtins.dirOf path}
|
||||||
|
cp -r ${dep}/. ${path}
|
||||||
|
''
|
||||||
|
) + ''
|
||||||
|
chmod u+w -R ${path}
|
||||||
|
'') chromiumDeps);
|
||||||
|
|
||||||
base = rec {
|
base = rec {
|
||||||
pname = "${lib.optionalString ungoogled "ungoogled-"}${packageName}-unwrapped";
|
pname = "${lib.optionalString ungoogled "ungoogled-"}${packageName}-unwrapped";
|
||||||
inherit (upstream-info) version;
|
inherit (upstream-info) version;
|
||||||
inherit packageName buildType buildPath;
|
inherit packageName buildType buildPath;
|
||||||
|
|
||||||
src = recompressTarball { inherit version; inherit (upstream-info) hash; };
|
unpackPhase = ''
|
||||||
|
runHook preUnpack
|
||||||
|
|
||||||
|
${unpackPhaseSnippet}
|
||||||
|
sourceRoot=src
|
||||||
|
|
||||||
|
runHook postUnpack
|
||||||
|
'';
|
||||||
|
|
||||||
|
npmRoot = "third_party/node";
|
||||||
|
npmDeps = (fetchNpmDeps {
|
||||||
|
src = chromiumDeps."src";
|
||||||
|
sourceRoot = npmRoot;
|
||||||
|
hash = upstream-info.deps.npmHash;
|
||||||
|
}).overrideAttrs (p: {
|
||||||
|
nativeBuildInputs = p.nativeBuildInputs or [ ] ++ [ zstd ];
|
||||||
|
});
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
ninja pkg-config
|
ninja pkg-config
|
||||||
@ -158,6 +214,9 @@ let
|
|||||||
which
|
which
|
||||||
buildPackages.rustc.llvmPackages.bintools
|
buildPackages.rustc.llvmPackages.bintools
|
||||||
bison gperf
|
bison gperf
|
||||||
|
] ++ lib.optionals (!isElectron) [
|
||||||
|
nodejs
|
||||||
|
npmHooks.npmConfigHook
|
||||||
];
|
];
|
||||||
|
|
||||||
depsBuildBuild = [
|
depsBuildBuild = [
|
||||||
@ -317,7 +376,32 @@ let
|
|||||||
})
|
})
|
||||||
];
|
];
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = lib.optionalString (!isElectron) ''
|
||||||
|
ln -s ${./files/gclient_args.gni} build/config/gclient_args.gni
|
||||||
|
|
||||||
|
echo 'LASTCHANGE=${upstream-info.DEPS."src".rev}-refs/heads/master@{#0}' > build/util/LASTCHANGE
|
||||||
|
echo "$SOURCE_DATE_EPOCH" > build/util/LASTCHANGE.committime
|
||||||
|
|
||||||
|
cat << EOF > gpu/config/gpu_lists_version.h
|
||||||
|
/* Generated by lastchange.py, do not edit.*/
|
||||||
|
#ifndef GPU_CONFIG_GPU_LISTS_VERSION_H_
|
||||||
|
#define GPU_CONFIG_GPU_LISTS_VERSION_H_
|
||||||
|
#define GPU_LISTS_VERSION "${upstream-info.DEPS."src".rev}"
|
||||||
|
#endif // GPU_CONFIG_GPU_LISTS_VERSION_H_
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat << EOF > skia/ext/skia_commit_hash.h
|
||||||
|
/* Generated by lastchange.py, do not edit.*/
|
||||||
|
#ifndef SKIA_EXT_SKIA_COMMIT_HASH_H_
|
||||||
|
#define SKIA_EXT_SKIA_COMMIT_HASH_H_
|
||||||
|
#define SKIA_COMMIT_HASH "${upstream-info.DEPS."src/third_party/skia".rev}-"
|
||||||
|
#endif // SKIA_EXT_SKIA_COMMIT_HASH_H_
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo -n '${upstream-info.DEPS."src/third_party/dawn".rev}' > gpu/webgpu/DAWN_VERSION
|
||||||
|
|
||||||
|
mkdir -p third_party/jdk/current/bin
|
||||||
|
'' + ''
|
||||||
# Workaround/fix for https://bugs.chromium.org/p/chromium/issues/detail?id=1313361:
|
# Workaround/fix for https://bugs.chromium.org/p/chromium/issues/detail?id=1313361:
|
||||||
substituteInPlace BUILD.gn \
|
substituteInPlace BUILD.gn \
|
||||||
--replace '"//infra/orchestrator:orchestrator_all",' ""
|
--replace '"//infra/orchestrator:orchestrator_all",' ""
|
||||||
@ -513,6 +597,11 @@ let
|
|||||||
# enable those features in our stable builds.
|
# enable those features in our stable builds.
|
||||||
preConfigure = ''
|
preConfigure = ''
|
||||||
export RUSTC_BOOTSTRAP=1
|
export RUSTC_BOOTSTRAP=1
|
||||||
|
'' + lib.optionalString (!isElectron) ''
|
||||||
|
(
|
||||||
|
cd third_party/node
|
||||||
|
grep patch update_npm_deps | sh
|
||||||
|
)
|
||||||
'';
|
'';
|
||||||
|
|
||||||
configurePhase = ''
|
configurePhase = ''
|
||||||
@ -570,11 +659,9 @@ let
|
|||||||
'';
|
'';
|
||||||
|
|
||||||
passthru = {
|
passthru = {
|
||||||
updateScript = ./update.py;
|
updateScript = ./update.mjs;
|
||||||
chromiumDeps = {
|
} // lib.optionalAttrs (!isElectron) {
|
||||||
gn = gnChromium;
|
inherit chromiumDeps npmDeps;
|
||||||
};
|
|
||||||
inherit recompressTarball;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
# overwrite `version` with the exact same `version` from the same source,
|
# overwrite `version` with the exact same `version` from the same source,
|
||||||
|
@ -10,8 +10,7 @@
|
|||||||
|
|
||||||
# package customization
|
# package customization
|
||||||
# Note: enable* flags should not require full rebuilds (i.e. only affect the wrapper)
|
# Note: enable* flags should not require full rebuilds (i.e. only affect the wrapper)
|
||||||
, channel ? "stable"
|
, upstream-info ? (lib.importJSON ./info.json).${if !ungoogled then "chromium" else "ungoogled-chromium"}
|
||||||
, upstream-info ? (import ./upstream-info.nix).${channel}
|
|
||||||
, proprietaryCodecs ? true
|
, proprietaryCodecs ? true
|
||||||
, enableWideVine ? false
|
, enableWideVine ? false
|
||||||
, ungoogled ? false # Whether to build chromium or ungoogled-chromium
|
, ungoogled ? false # Whether to build chromium or ungoogled-chromium
|
||||||
@ -46,13 +45,14 @@ let
|
|||||||
inherit stdenv upstream-info;
|
inherit stdenv upstream-info;
|
||||||
|
|
||||||
mkChromiumDerivation = callPackage ./common.nix ({
|
mkChromiumDerivation = callPackage ./common.nix ({
|
||||||
inherit channel chromiumVersionAtLeast versionRange;
|
inherit chromiumVersionAtLeast versionRange;
|
||||||
inherit proprietaryCodecs
|
inherit proprietaryCodecs
|
||||||
cupsSupport pulseSupport ungoogled;
|
cupsSupport pulseSupport ungoogled;
|
||||||
gnChromium = buildPackages.gn.overrideAttrs (oldAttrs: {
|
gnChromium = buildPackages.gn.overrideAttrs (oldAttrs: {
|
||||||
inherit (upstream-info.deps.gn) version;
|
version = if (upstream-info.deps.gn ? "version") then upstream-info.deps.gn.version else "0";
|
||||||
src = fetchgit {
|
src = fetchgit {
|
||||||
inherit (upstream-info.deps.gn) url rev hash;
|
url = "https://gn.googlesource.com/gn";
|
||||||
|
inherit (upstream-info.deps.gn) rev hash;
|
||||||
};
|
};
|
||||||
} // lib.optionalAttrs (chromiumVersionAtLeast "127") {
|
} // lib.optionalAttrs (chromiumVersionAtLeast "127") {
|
||||||
# Relax hardening as otherwise gn unstable 2024-06-06 and later fail with:
|
# Relax hardening as otherwise gn unstable 2024-06-06 and later fail with:
|
||||||
@ -65,11 +65,10 @@ let
|
|||||||
# As a work around until gn is updated again, we filter specifically that patch out.
|
# As a work around until gn is updated again, we filter specifically that patch out.
|
||||||
patches = lib.filter (e: lib.getName e != "LFS64.patch") oldAttrs.patches;
|
patches = lib.filter (e: lib.getName e != "LFS64.patch") oldAttrs.patches;
|
||||||
});
|
});
|
||||||
recompressTarball = callPackage ./recompress-tarball.nix { inherit chromiumVersionAtLeast; };
|
|
||||||
});
|
});
|
||||||
|
|
||||||
browser = callPackage ./browser.nix {
|
browser = callPackage ./browser.nix {
|
||||||
inherit channel chromiumVersionAtLeast enableWideVine ungoogled;
|
inherit chromiumVersionAtLeast enableWideVine ungoogled;
|
||||||
};
|
};
|
||||||
|
|
||||||
# ungoogled-chromium is, contrary to its name, not a build of
|
# ungoogled-chromium is, contrary to its name, not a build of
|
||||||
@ -80,8 +79,6 @@ let
|
|||||||
ungoogled-chromium = pkgsBuildBuild.callPackage ./ungoogled.nix {};
|
ungoogled-chromium = pkgsBuildBuild.callPackage ./ungoogled.nix {};
|
||||||
};
|
};
|
||||||
|
|
||||||
suffix = lib.optionalString (channel != "stable" && channel != "ungoogled-chromium") ("-" + channel);
|
|
||||||
|
|
||||||
sandboxExecutableName = chromium.browser.passthru.sandboxExecutableName;
|
sandboxExecutableName = chromium.browser.passthru.sandboxExecutableName;
|
||||||
|
|
||||||
# We want users to be able to enableWideVine without rebuilding all of
|
# We want users to be able to enableWideVine without rebuilding all of
|
||||||
@ -99,7 +96,7 @@ let
|
|||||||
|
|
||||||
in stdenv.mkDerivation {
|
in stdenv.mkDerivation {
|
||||||
pname = lib.optionalString ungoogled "ungoogled-"
|
pname = lib.optionalString ungoogled "ungoogled-"
|
||||||
+ "chromium${suffix}";
|
+ "chromium";
|
||||||
inherit (chromium.browser) version;
|
inherit (chromium.browser) version;
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
122
pkgs/applications/networking/browsers/chromium/depot_tools.py
Executable file
122
pkgs/applications/networking/browsers/chromium/depot_tools.py
Executable file
@ -0,0 +1,122 @@
|
|||||||
|
#! /usr/bin/env nix-shell
|
||||||
|
#! nix-shell -i python -p python3
|
||||||
|
"""
|
||||||
|
This is a heavily simplified variant of electron's update.py
|
||||||
|
for use in ./update.mjs and should not be called manually.
|
||||||
|
|
||||||
|
It resolves chromium's DEPS file recursively when called with
|
||||||
|
a working depot_tools checkout and a ref to fetch and prints
|
||||||
|
the result as JSON to stdout.
|
||||||
|
"""
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from typing import Optional
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print("""This internal script has been called with the wrong amount of parameters.
|
||||||
|
This script is not supposed to be called manually.
|
||||||
|
Refer to ./update.mjs instead.""")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
_, depot_tools_checkout, chromium_version = sys.argv
|
||||||
|
|
||||||
|
sys.path.append(depot_tools_checkout)
|
||||||
|
import gclient_eval
|
||||||
|
import gclient_utils
|
||||||
|
|
||||||
|
|
||||||
|
class Repo:
|
||||||
|
fetcher: str
|
||||||
|
args: dict
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.deps: dict = {}
|
||||||
|
self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||||
|
|
||||||
|
def get_deps(self, repo_vars: dict, path: str) -> None:
|
||||||
|
print(
|
||||||
|
"evaluating " + json.dumps(self, default=vars, sort_keys=True),
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
|
||||||
|
deps_file = self.get_file("DEPS")
|
||||||
|
evaluated = gclient_eval.Parse(deps_file, vars_override=repo_vars, filename="DEPS")
|
||||||
|
|
||||||
|
repo_vars = dict(evaluated.get("vars", {})) | repo_vars
|
||||||
|
|
||||||
|
prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
|
||||||
|
|
||||||
|
self.deps = {
|
||||||
|
prefix + dep_name: repo_from_dep(dep)
|
||||||
|
for dep_name, dep in evaluated.get("deps", {}).items()
|
||||||
|
if (
|
||||||
|
gclient_eval.EvaluateCondition(dep["condition"], repo_vars)
|
||||||
|
if "condition" in dep
|
||||||
|
else True
|
||||||
|
)
|
||||||
|
and repo_from_dep(dep) != None
|
||||||
|
}
|
||||||
|
|
||||||
|
for key in evaluated.get("recursedeps", []):
|
||||||
|
dep_path = prefix + key
|
||||||
|
if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
|
||||||
|
self.deps[dep_path].get_deps(repo_vars, dep_path)
|
||||||
|
|
||||||
|
def flatten_repr(self) -> dict:
|
||||||
|
return {"fetcher": self.fetcher, "hash": self.hash, **self.args}
|
||||||
|
|
||||||
|
def flatten(self, path: str) -> dict:
|
||||||
|
out = {path: self.flatten_repr()}
|
||||||
|
for dep_path, dep in self.deps.items():
|
||||||
|
out |= dep.flatten(dep_path)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def get_file(self, filepath: str) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class GitilesRepo(Repo):
|
||||||
|
def __init__(self, url: str, rev: str) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.fetcher = "fetchFromGitiles"
|
||||||
|
self.args = {
|
||||||
|
"url": url,
|
||||||
|
"rev": rev,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_file(self, filepath: str) -> str:
|
||||||
|
return base64.b64decode(
|
||||||
|
urlopen(
|
||||||
|
f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT"
|
||||||
|
).read()
|
||||||
|
).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def repo_from_dep(dep: dict) -> Optional[Repo]:
|
||||||
|
if "url" in dep:
|
||||||
|
url, rev = gclient_utils.SplitUrlRevision(dep["url"])
|
||||||
|
return GitilesRepo(url, rev)
|
||||||
|
else:
|
||||||
|
# Not a git dependency; skip
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
chromium = GitilesRepo("https://chromium.googlesource.com/chromium/src.git", chromium_version)
|
||||||
|
chromium.get_deps(
|
||||||
|
{
|
||||||
|
**{
|
||||||
|
f"checkout_{platform}": platform == "linux" or platform == "x64" or platform == "arm64" or platform == "arm"
|
||||||
|
for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
|
||||||
|
},
|
||||||
|
**{
|
||||||
|
f"checkout_{arch}": True
|
||||||
|
for arch in ["x64", "arm64", "arm", "x86", "mips", "mips64"]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"",
|
||||||
|
)
|
||||||
|
print(json.dumps(chromium.flatten("src")))
|
@ -0,0 +1,12 @@
|
|||||||
|
build_with_chromium = true
|
||||||
|
checkout_android = false
|
||||||
|
checkout_android_prebuilts_build_tools = false
|
||||||
|
checkout_clang_coverage_tools = false
|
||||||
|
checkout_copybara = false
|
||||||
|
checkout_ios_webkit = false
|
||||||
|
checkout_nacl = false
|
||||||
|
checkout_openxr = false
|
||||||
|
checkout_src_internal = false
|
||||||
|
cros_boards = ""
|
||||||
|
cros_boards_with_qemu_images = ""
|
||||||
|
generate_location_tags = true
|
1534
pkgs/applications/networking/browsers/chromium/info.json
Normal file
1534
pkgs/applications/networking/browsers/chromium/info.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,56 +0,0 @@
|
|||||||
{ zstd
|
|
||||||
, fetchurl
|
|
||||||
, lib
|
|
||||||
, chromiumVersionAtLeast
|
|
||||||
}:
|
|
||||||
|
|
||||||
{ version
|
|
||||||
, hash ? ""
|
|
||||||
} @ args:
|
|
||||||
|
|
||||||
fetchurl ({
|
|
||||||
name = "chromium-${version}.tar.zstd";
|
|
||||||
url = "https://commondatastorage.googleapis.com/chromium-browser-official/chromium-${version}.tar.xz";
|
|
||||||
inherit hash;
|
|
||||||
|
|
||||||
# chromium xz tarballs are multiple gigabytes big and are sometimes downloaded multiples
|
|
||||||
# times for different versions as part of our update script.
|
|
||||||
# We originally inherited fetchzip's default for downloadToTemp (true).
|
|
||||||
# Given the size of the /run/user tmpfs used defaults to logind's RuntimeDirectorySize=,
|
|
||||||
# which in turn defaults to 10% of the total amount of physical RAM, this often lead to
|
|
||||||
# "no space left" errors, eventually resulting in its own section in our chromium
|
|
||||||
# README.md (for users wanting to run the update script).
|
|
||||||
# Nowadays, we use fetchurl instead of fetchzip, which defaults to false instead of true.
|
|
||||||
# We just want to be explicit and provide a place to document the history and reasoning
|
|
||||||
# behind this.
|
|
||||||
downloadToTemp = false;
|
|
||||||
|
|
||||||
nativeBuildInputs = [ zstd ];
|
|
||||||
|
|
||||||
postFetch = ''
|
|
||||||
cat "$downloadedFile" \
|
|
||||||
| xz -d --threads=$NIX_BUILD_CORES \
|
|
||||||
| tar xf - \
|
|
||||||
--warning=no-timestamp \
|
|
||||||
--one-top-level=source \
|
|
||||||
--exclude=third_party/llvm \
|
|
||||||
--exclude=third_party/rust-src \
|
|
||||||
--exclude='build/linux/debian_*-sysroot' \
|
|
||||||
'' + lib.optionalString (chromiumVersionAtLeast "127") ''
|
|
||||||
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9]' \
|
|
||||||
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9][a-zA-Z0-9]' \
|
|
||||||
--exclude=third_party/llvm-build \
|
|
||||||
--exclude=third_party/rust-toolchain \
|
|
||||||
--exclude=third_party/instrumented_libs \
|
|
||||||
'' + ''
|
|
||||||
--strip-components=1
|
|
||||||
|
|
||||||
tar \
|
|
||||||
--use-compress-program "zstd -T$NIX_BUILD_CORES" \
|
|
||||||
--sort name \
|
|
||||||
--mtime "1970-01-01" \
|
|
||||||
--owner=root --group=root \
|
|
||||||
--numeric-owner --mode=go=rX,u+rw,a-s \
|
|
||||||
-cf $out source
|
|
||||||
'';
|
|
||||||
} // removeAttrs args [ "version" ])
|
|
227
pkgs/applications/networking/browsers/chromium/update.mjs
Executable file
227
pkgs/applications/networking/browsers/chromium/update.mjs
Executable file
@ -0,0 +1,227 @@
|
|||||||
|
#! /usr/bin/env nix-shell
|
||||||
|
/*
|
||||||
|
#! nix-shell -i zx -p zx
|
||||||
|
*/
|
||||||
|
|
||||||
|
cd(__dirname)
|
||||||
|
const nixpkgs = (await $`git rev-parse --show-toplevel`).stdout.trim()
|
||||||
|
const $nixpkgs = $({
|
||||||
|
cwd: nixpkgs
|
||||||
|
})
|
||||||
|
|
||||||
|
const dummy_hash = 'sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA='
|
||||||
|
|
||||||
|
const lockfile_file = './info.json'
|
||||||
|
const lockfile_initial = fs.readJsonSync(lockfile_file)
|
||||||
|
function flush_to_file() {
|
||||||
|
fs.writeJsonSync(lockfile_file, lockfile, { spaces: 2 })
|
||||||
|
}
|
||||||
|
const flush_to_file_proxy = {
|
||||||
|
get(obj, prop) {
|
||||||
|
const value = obj[prop]
|
||||||
|
return typeof value == 'object' ? new Proxy(value, flush_to_file_proxy) : value
|
||||||
|
},
|
||||||
|
|
||||||
|
set(obj, prop, value) {
|
||||||
|
obj[prop] = value
|
||||||
|
flush_to_file()
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const lockfile = new Proxy(structuredClone(lockfile_initial), flush_to_file_proxy)
|
||||||
|
|
||||||
|
|
||||||
|
for (const attr_path of Object.keys(lockfile)) {
|
||||||
|
if (!argv[attr_path]) {
|
||||||
|
console.log(`[${attr_path}] Skipping ${attr_path}. Pass --${attr_path} as argument to update.`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const ungoogled = attr_path === 'ungoogled-chromium'
|
||||||
|
const version_nixpkgs = !ungoogled ? lockfile[attr_path].version : lockfile[attr_path].deps["ungoogled-patches"].rev
|
||||||
|
const version_upstream = !ungoogled ? await get_latest_chromium_release() : await get_latest_ungoogled_release()
|
||||||
|
|
||||||
|
console.log(`[${attr_path}] ${chalk.red(version_nixpkgs)} (nixpkgs)`)
|
||||||
|
console.log(`[${attr_path}] ${chalk.green(version_upstream)} (upstream)`)
|
||||||
|
|
||||||
|
if (version_greater_than(version_upstream, version_nixpkgs)) {
|
||||||
|
console.log(`[${attr_path}] ${chalk.green(version_upstream)} from upstream is newer than our ${chalk.red(version_nixpkgs)}...`)
|
||||||
|
|
||||||
|
// unconditionally remove ungoogled-chromium's epoch/sub-version (e.g. 130.0.6723.116-1 -> 130.0.6723.116)
|
||||||
|
const version_chromium = version_upstream.split('-')[0]
|
||||||
|
|
||||||
|
lockfile[attr_path] = {
|
||||||
|
version: version_chromium,
|
||||||
|
chromedriver: !ungoogled ? await fetch_chromedriver_binaries(version_chromium) : undefined,
|
||||||
|
deps: {
|
||||||
|
depot_tools: {},
|
||||||
|
gn: {},
|
||||||
|
"ungoogled-patches": ungoogled ? await fetch_ungoogled(version_upstream) : undefined,
|
||||||
|
npmHash: dummy_hash,
|
||||||
|
},
|
||||||
|
DEPS: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
const depot_tools = await fetch_depot_tools(version_chromium, lockfile_initial[attr_path].deps.depot_tools)
|
||||||
|
lockfile[attr_path].deps.depot_tools = {
|
||||||
|
rev: depot_tools.rev,
|
||||||
|
hash: depot_tools.hash,
|
||||||
|
}
|
||||||
|
|
||||||
|
const gn = await fetch_gn(version_chromium, lockfile_initial[attr_path].deps.gn)
|
||||||
|
lockfile[attr_path].deps.gn = {
|
||||||
|
rev: gn.rev,
|
||||||
|
hash: gn.hash,
|
||||||
|
}
|
||||||
|
|
||||||
|
// DEPS update loop
|
||||||
|
lockfile[attr_path].DEPS = await resolve_DEPS(depot_tools.out, version_chromium)
|
||||||
|
for (const [path, value] of Object.entries(lockfile[attr_path].DEPS)) {
|
||||||
|
delete value.fetcher
|
||||||
|
delete value.postFetch
|
||||||
|
|
||||||
|
if (value.url === 'https://chromium.googlesource.com/chromium/src.git') {
|
||||||
|
value.recompress = true
|
||||||
|
}
|
||||||
|
|
||||||
|
const cache = lockfile_initial[attr_path].DEPS[path]
|
||||||
|
const cache_hit =
|
||||||
|
cache !== undefined &&
|
||||||
|
value.url === cache.url &&
|
||||||
|
value.rev === cache.rev &&
|
||||||
|
value.recompress === cache.recompress &&
|
||||||
|
cache.hash !== undefined &&
|
||||||
|
cache.hash !== '' &&
|
||||||
|
cache.hash !== dummy_hash
|
||||||
|
if (cache_hit) {
|
||||||
|
console.log(`[${chalk.green(path)}] Reusing hash from previous info.json for ${cache.url}@${cache.rev}`)
|
||||||
|
value.hash = cache.hash
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[${chalk.red(path)}] FOD prefetching ${value.url}@${value.rev}...`)
|
||||||
|
value.hash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.chromiumDeps."${path}"`)
|
||||||
|
console.log(`[${chalk.green(path)}] FOD prefetching successful`)
|
||||||
|
}
|
||||||
|
|
||||||
|
lockfile[attr_path].deps.npmHash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.npmDeps`)
|
||||||
|
|
||||||
|
console.log(chalk.green(`[${attr_path}] Done updating ${attr_path} from ${version_nixpkgs} to ${version_upstream}!`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function fetch_gn(chromium_rev, gn_previous) {
|
||||||
|
const DEPS_file = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'DEPS')
|
||||||
|
const gn_rev = /^\s+'gn_version': 'git_revision:(?<rev>.+)',$/m.exec(DEPS_file).groups.rev
|
||||||
|
const hash = gn_rev === gn_previous.rev ? gn_previous.hash : ''
|
||||||
|
|
||||||
|
return await prefetch_gitiles('https://gn.googlesource.com/gn', gn_rev, hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function fetch_chromedriver_binaries(chromium_version) {
|
||||||
|
// https://developer.chrome.com/docs/chromedriver/downloads/version-selection
|
||||||
|
const prefetch = async (url) => {
|
||||||
|
const expr = [`(import ./. {}).fetchzip { url = "${url}"; hash = ""; }`]
|
||||||
|
const derivation = await $nixpkgs`nix-instantiate --expr ${expr}`
|
||||||
|
return await prefetch_FOD(derivation)
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the URL ever changes, the URLs in the chromedriver derivations need updating as well!
|
||||||
|
const url = (platform) => `https://storage.googleapis.com/chrome-for-testing-public/${chromium_version}/${platform}/chromedriver-${platform}.zip`
|
||||||
|
return {
|
||||||
|
hash_darwin: await prefetch(url('mac-x64')),
|
||||||
|
hash_darwin_aarch64: await prefetch(url('mac-arm64')),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function resolve_DEPS(depot_tools_checkout, chromium_rev) {
|
||||||
|
const { stdout } = await $`./depot_tools.py ${depot_tools_checkout} ${chromium_rev}`
|
||||||
|
const deps = JSON.parse(stdout)
|
||||||
|
return Object.fromEntries(Object.entries(deps).map(([k, { url, rev, hash }]) => [k, { url, rev, hash }]))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function get_latest_chromium_release() {
|
||||||
|
const url = `https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases?` + new URLSearchParams({
|
||||||
|
order_by: 'version desc',
|
||||||
|
filter: 'endtime=none,fraction>=0.5'
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await (await fetch(url)).json()
|
||||||
|
return response.releases[0].version
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function get_latest_ungoogled_release() {
|
||||||
|
const ungoogled_tags = await (await fetch('https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags')).json()
|
||||||
|
const chromium_releases = await (await fetch('https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases')).json()
|
||||||
|
const chromium_release_map = chromium_releases.releases.map((x) => x.version)
|
||||||
|
return ungoogled_tags.find((x) => chromium_release_map.includes(x.name.split('-')[0])).name
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function fetch_ungoogled(rev) {
|
||||||
|
const expr = (hash) => [`(import ./. {}).fetchFromGitHub { owner = "ungoogled-software"; repo = "ungoogled-chromium"; rev = "${rev}"; hash = "${hash}"; }`]
|
||||||
|
const hash = await prefetch_FOD('--expr', expr(''))
|
||||||
|
|
||||||
|
const checkout = await $nixpkgs`nix-build --expr ${expr(hash)}`
|
||||||
|
|
||||||
|
await fs.copy(`${checkout.stdout.trim()}/flags.gn`, './ungoogled-flags.toml')
|
||||||
|
|
||||||
|
return {
|
||||||
|
rev,
|
||||||
|
hash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function version_greater_than(greater, than) {
|
||||||
|
return greater.localeCompare(than, undefined, { numeric: true, sensitivity: 'base' }) === 1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function get_gitiles_file(repo, rev, path) {
|
||||||
|
const base64 = await (await fetch(`${repo}/+/${rev}/${path}?format=TEXT`)).text()
|
||||||
|
return Buffer.from(base64, 'base64').toString('utf-8')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function fetch_depot_tools(chromium_rev, depot_tools_previous) {
|
||||||
|
const depot_tools_rev = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'third_party/depot_tools')
|
||||||
|
const hash = depot_tools_rev === depot_tools_previous.rev ? depot_tools_previous.hash : ''
|
||||||
|
return await prefetch_gitiles('https://chromium.googlesource.com/chromium/tools/depot_tools', depot_tools_rev, hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function prefetch_gitiles(url, rev, hash = '') {
|
||||||
|
const expr = () => [`(import ./. {}).fetchFromGitiles { url = "${url}"; rev = "${rev}"; hash = "${hash}"; }`]
|
||||||
|
|
||||||
|
if (hash === '') {
|
||||||
|
hash = await prefetch_FOD('--expr', expr())
|
||||||
|
}
|
||||||
|
|
||||||
|
const { stdout } = await $nixpkgs`nix-build --expr ${expr()}`
|
||||||
|
|
||||||
|
return {
|
||||||
|
url,
|
||||||
|
rev,
|
||||||
|
hash,
|
||||||
|
out: stdout.trim(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function prefetch_FOD(...args) {
|
||||||
|
const { stderr } = await $nixpkgs`nix-build ${args}`.nothrow()
|
||||||
|
const hash = /\s+got:\s+(?<hash>.+)$/m.exec(stderr)?.groups?.hash
|
||||||
|
|
||||||
|
if (hash == undefined) {
|
||||||
|
throw new Error(chalk.red('Expected to find hash in nix-build stderr output:') + stderr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash
|
||||||
|
}
|
||||||
|
|
@ -1,300 +0,0 @@
|
|||||||
#! /usr/bin/env nix-shell
|
|
||||||
#! nix-shell -i python -p python3Packages.looseversion nix nixfmt-classic nix-prefetch-git
|
|
||||||
|
|
||||||
"""This script automatically updates chromium, google-chrome, chromedriver, and ungoogled-chromium
|
|
||||||
via upstream-info.nix."""
|
|
||||||
# Usage: ./update.py [--commit]
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import csv
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from codecs import iterdecode
|
|
||||||
from collections import OrderedDict
|
|
||||||
from datetime import datetime
|
|
||||||
from looseversion import LooseVersion
|
|
||||||
from os.path import abspath, dirname
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
RELEASES_URL = 'https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/all/versions/all/releases'
|
|
||||||
|
|
||||||
PIN_PATH = dirname(abspath(__file__)) + '/upstream-info.nix'
|
|
||||||
UNGOOGLED_FLAGS_PATH = dirname(abspath(__file__)) + '/ungoogled-flags.toml'
|
|
||||||
COMMIT_MESSAGE_SCRIPT = dirname(abspath(__file__)) + '/get-commit-message.py'
|
|
||||||
NIXPKGS_PATH = subprocess.check_output(["git", "rev-parse", "--show-toplevel"], cwd=dirname(PIN_PATH)).strip()
|
|
||||||
|
|
||||||
def load_as_json(path):
|
|
||||||
"""Loads the given nix file as JSON."""
|
|
||||||
out = subprocess.check_output(['nix-instantiate', '--eval', '--strict', '--json', path])
|
|
||||||
return json.loads(out)
|
|
||||||
|
|
||||||
def save_dict_as_nix(path, input):
|
|
||||||
"""Saves the given dict/JSON as nix file."""
|
|
||||||
json_string = json.dumps(input)
|
|
||||||
nix = subprocess.check_output(['nix-instantiate', '--eval', '--expr', '{ json }: builtins.fromJSON json', '--argstr', 'json', json_string])
|
|
||||||
formatted = subprocess.check_output(['nixfmt'], input=nix)
|
|
||||||
with open(path, 'w') as out:
|
|
||||||
out.write(formatted.decode())
|
|
||||||
|
|
||||||
def prefetch_src_sri_hash(attr_path, version):
|
|
||||||
"""Prefetches the fixed-output-derivation source tarball and returns its SRI-Hash."""
|
|
||||||
print(f'nix-build (FOD prefetch) {attr_path} {version}')
|
|
||||||
out = subprocess.run(
|
|
||||||
["nix-build", "--expr", f'(import ./. {{}}).{attr_path}.browser.passthru.recompressTarball {{ version = "{version}"; }}'],
|
|
||||||
cwd=NIXPKGS_PATH,
|
|
||||||
stderr=subprocess.PIPE
|
|
||||||
).stderr.decode()
|
|
||||||
|
|
||||||
for line in iter(out.split("\n")):
|
|
||||||
match = re.match(r"\s+got:\s+(.+)$", line)
|
|
||||||
if match:
|
|
||||||
print(f'Hash: {match.group(1)}')
|
|
||||||
return match.group(1)
|
|
||||||
print(f'{out}\n\nError: Expected hash in nix-build stderr output.', file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def nix_prefetch_url(url, algo='sha256'):
|
|
||||||
"""Prefetches the content of the given URL."""
|
|
||||||
print(f'nix store prefetch-file {url}')
|
|
||||||
out = subprocess.check_output(['nix', 'store', 'prefetch-file', '--json', '--hash-type', algo, url])
|
|
||||||
return json.loads(out)['hash']
|
|
||||||
|
|
||||||
|
|
||||||
def nix_prefetch_git(url, rev):
|
|
||||||
"""Prefetches the requested Git revision of the given repository URL."""
|
|
||||||
print(f'nix-prefetch-git {url} {rev}')
|
|
||||||
out = subprocess.check_output(['nix-prefetch-git', '--quiet', '--url', url, '--rev', rev])
|
|
||||||
return json.loads(out)
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_revision(revision, file_path):
|
|
||||||
"""Fetches the requested Git revision of the given Chromium file."""
|
|
||||||
url = f'https://chromium.googlesource.com/chromium/src/+/refs/tags/{revision}/{file_path}?format=TEXT'
|
|
||||||
with urlopen(url) as http_response:
|
|
||||||
resp = http_response.read()
|
|
||||||
return base64.b64decode(resp)
|
|
||||||
|
|
||||||
def get_ungoogled_file_revision(revision, file_path):
|
|
||||||
"""Fetches the requested Git revision of the given Chromium file."""
|
|
||||||
url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/{file_path}'
|
|
||||||
with urlopen(url) as http_response:
|
|
||||||
resp = http_response.read()
|
|
||||||
return resp.decode("utf-8")
|
|
||||||
|
|
||||||
def get_chromedriver(channel):
|
|
||||||
"""Get the latest chromedriver builds given a channel"""
|
|
||||||
# See https://chromedriver.chromium.org/downloads/version-selection#h.4wiyvw42q63v
|
|
||||||
chromedriver_versions_url = f'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json'
|
|
||||||
print(f'GET {chromedriver_versions_url}')
|
|
||||||
with urlopen(chromedriver_versions_url) as http_response:
|
|
||||||
chromedrivers = json.load(http_response)
|
|
||||||
channel = chromedrivers['channels'][channel]
|
|
||||||
downloads = channel['downloads']['chromedriver']
|
|
||||||
|
|
||||||
def get_chromedriver_url(platform):
|
|
||||||
for download in downloads:
|
|
||||||
if download['platform'] == platform:
|
|
||||||
return download['url']
|
|
||||||
|
|
||||||
return {
|
|
||||||
'version': channel['version'],
|
|
||||||
'hash_linux': nix_prefetch_url(get_chromedriver_url('linux64')),
|
|
||||||
'hash_darwin': nix_prefetch_url(get_chromedriver_url('mac-x64')),
|
|
||||||
'hash_darwin_aarch64': nix_prefetch_url(get_chromedriver_url('mac-arm64'))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_dependencies(version):
|
|
||||||
"""Gets all dependencies for the given Chromium version."""
|
|
||||||
deps = get_file_revision(version, 'DEPS')
|
|
||||||
gn_pattern = b"'gn_version': 'git_revision:([0-9a-f]{40})'"
|
|
||||||
gn_commit = re.search(gn_pattern, deps).group(1).decode()
|
|
||||||
gn = nix_prefetch_git('https://gn.googlesource.com/gn', gn_commit)
|
|
||||||
return {
|
|
||||||
'gn': {
|
|
||||||
'version': datetime.fromisoformat(gn['date']).date().isoformat(),
|
|
||||||
'url': gn['url'],
|
|
||||||
'rev': gn['rev'],
|
|
||||||
'hash': gn['hash']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_latest_ungoogled_chromium_tag(linux_stable_versions):
|
|
||||||
"""Returns the latest ungoogled-chromium tag for linux using the GitHub API."""
|
|
||||||
api_tag_url = 'https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags'
|
|
||||||
with urlopen(api_tag_url) as http_response:
|
|
||||||
tags = json.load(http_response)
|
|
||||||
for tag in tags:
|
|
||||||
if not tag['name'].split('-')[0] in linux_stable_versions:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return tag['name']
|
|
||||||
|
|
||||||
|
|
||||||
def get_latest_ungoogled_chromium_build(linux_stable_versions):
|
|
||||||
"""Returns a dictionary for the latest ungoogled-chromium build."""
|
|
||||||
tag = get_latest_ungoogled_chromium_tag(linux_stable_versions)
|
|
||||||
version = tag.split('-')[0]
|
|
||||||
return {
|
|
||||||
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
|
|
||||||
'version': version,
|
|
||||||
'ungoogled_rev': tag
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_ungoogled_chromium_build_by_ref(ungoogled_chromium_ref):
|
|
||||||
"""Returns a dictionary for an ungoogled-chromium build referenced by a ref in the ungoogled-chromium repository."""
|
|
||||||
version = get_ungoogled_file_revision(ungoogled_chromium_ref, "chromium_version.txt").strip("\n ")
|
|
||||||
return {
|
|
||||||
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
|
|
||||||
'version': version,
|
|
||||||
'ungoogled_rev': ungoogled_chromium_ref
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_ungoogled_chromium_gn_flags(revision):
|
|
||||||
"""Returns ungoogled-chromium's GN build flags for the given revision."""
|
|
||||||
gn_flags_url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/flags.gn'
|
|
||||||
return urlopen(gn_flags_url).read().decode()
|
|
||||||
|
|
||||||
|
|
||||||
def channel_name_to_attr_name(channel_name):
|
|
||||||
"""Maps a channel name to the corresponding main Nixpkgs attribute name."""
|
|
||||||
if channel_name == 'stable':
|
|
||||||
return 'chromium'
|
|
||||||
if channel_name == 'ungoogled-chromium':
|
|
||||||
return 'ungoogled-chromium'
|
|
||||||
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_key(item):
|
|
||||||
"""Orders Chromium channels by their name."""
|
|
||||||
channel_name = item[0]
|
|
||||||
if channel_name == 'stable':
|
|
||||||
return 0
|
|
||||||
if channel_name == 'beta':
|
|
||||||
return 1
|
|
||||||
if channel_name == 'dev':
|
|
||||||
return 2
|
|
||||||
if channel_name == 'ungoogled-chromium':
|
|
||||||
return 3
|
|
||||||
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def print_updates(channels_old, channels_new):
|
|
||||||
"""Print a summary of the updates."""
|
|
||||||
print('Updates:')
|
|
||||||
for channel_name in channels_old:
|
|
||||||
version_old = channels_old[channel_name]["version"]
|
|
||||||
version_new = channels_new[channel_name]["version"]
|
|
||||||
if LooseVersion(version_old) < LooseVersion(version_new):
|
|
||||||
attr_name = channel_name_to_attr_name(channel_name)
|
|
||||||
print(f'- {attr_name}: {version_old} -> {version_new}')
|
|
||||||
|
|
||||||
|
|
||||||
channels = {}
|
|
||||||
last_channels = load_as_json(PIN_PATH)
|
|
||||||
|
|
||||||
src_hash_cache = {}
|
|
||||||
|
|
||||||
|
|
||||||
print(f'GET {RELEASES_URL}', file=sys.stderr)
|
|
||||||
with urlopen(RELEASES_URL) as resp:
|
|
||||||
releases = json.load(resp)['releases']
|
|
||||||
|
|
||||||
if len(sys.argv) == 3 and sys.argv[1] == 'ungoogled-rev':
|
|
||||||
releases.append(get_ungoogled_chromium_build_by_ref(sys.argv[2]))
|
|
||||||
else:
|
|
||||||
linux_stable_versions = [release['version'] for release in releases if release['name'].startswith('chrome/platforms/linux/channels/stable/versions/')]
|
|
||||||
releases.append(get_latest_ungoogled_chromium_build(linux_stable_versions))
|
|
||||||
|
|
||||||
for release in releases:
|
|
||||||
channel_name = re.findall("chrome/platforms/linux/channels/(.*)/versions/", release['name'])[0]
|
|
||||||
|
|
||||||
# If we've already found a newer release for this channel, we're
|
|
||||||
# no longer interested in it.
|
|
||||||
if channel_name in channels:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# We only look for channels that are listed in our version pin file.
|
|
||||||
if channel_name not in last_channels:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If we're back at the last release we used, we don't need to
|
|
||||||
# keep going -- there's no new version available, and we can
|
|
||||||
# just reuse the info from last time.
|
|
||||||
if release['version'] == last_channels[channel_name]['version']:
|
|
||||||
channels[channel_name] = last_channels[channel_name]
|
|
||||||
continue
|
|
||||||
|
|
||||||
channel = {'version': release['version']}
|
|
||||||
if channel_name == 'dev':
|
|
||||||
google_chrome_suffix = 'unstable'
|
|
||||||
elif channel_name == 'ungoogled-chromium':
|
|
||||||
google_chrome_suffix = 'stable'
|
|
||||||
else:
|
|
||||||
google_chrome_suffix = channel_name
|
|
||||||
|
|
||||||
try:
|
|
||||||
version = release["version"]
|
|
||||||
existing_releases = dict(map(lambda channel: (channel[1]['version'], channel[1]['hash']), last_channels.items()))
|
|
||||||
|
|
||||||
if version in src_hash_cache:
|
|
||||||
print(f'Already got hash {src_hash_cache[version]} for {version}, skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
|
|
||||||
|
|
||||||
channel["hash"] = src_hash_cache[version]
|
|
||||||
elif version in existing_releases:
|
|
||||||
print(f'Already got hash {existing_releases[version]} for {version} (from upstream-info.nix), skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
|
|
||||||
|
|
||||||
channel["hash"] = existing_releases[version]
|
|
||||||
else:
|
|
||||||
channel["hash"] = prefetch_src_sri_hash(
|
|
||||||
channel_name_to_attr_name(channel_name),
|
|
||||||
version
|
|
||||||
)
|
|
||||||
src_hash_cache[version] = channel["hash"]
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
# This release isn't actually available yet. Continue to
|
|
||||||
# the next one.
|
|
||||||
continue
|
|
||||||
|
|
||||||
channel['deps'] = get_channel_dependencies(channel['version'])
|
|
||||||
if channel_name == 'stable':
|
|
||||||
channel['chromedriver'] = get_chromedriver('Stable')
|
|
||||||
elif channel_name == 'ungoogled-chromium':
|
|
||||||
ungoogled_repo_url = 'https://github.com/ungoogled-software/ungoogled-chromium.git'
|
|
||||||
channel['deps']['ungoogled-patches'] = {
|
|
||||||
'rev': release['ungoogled_rev'],
|
|
||||||
'hash': nix_prefetch_git(ungoogled_repo_url, release['ungoogled_rev'])['hash']
|
|
||||||
}
|
|
||||||
with open(UNGOOGLED_FLAGS_PATH, 'w') as out:
|
|
||||||
out.write(get_ungoogled_chromium_gn_flags(release['ungoogled_rev']))
|
|
||||||
|
|
||||||
channels[channel_name] = channel
|
|
||||||
|
|
||||||
|
|
||||||
sorted_channels = OrderedDict(sorted(channels.items(), key=get_channel_key))
|
|
||||||
if len(sys.argv) == 2 and sys.argv[1] == '--commit':
|
|
||||||
for channel_name in sorted_channels.keys():
|
|
||||||
version_old = last_channels[channel_name]['version']
|
|
||||||
version_new = sorted_channels[channel_name]['version']
|
|
||||||
if LooseVersion(version_old) < LooseVersion(version_new):
|
|
||||||
last_channels[channel_name] = sorted_channels[channel_name]
|
|
||||||
save_dict_as_nix(PIN_PATH, last_channels)
|
|
||||||
attr_name = channel_name_to_attr_name(channel_name)
|
|
||||||
commit_message = f'{attr_name}: {version_old} -> {version_new}'
|
|
||||||
if channel_name == 'stable':
|
|
||||||
body = subprocess.check_output([COMMIT_MESSAGE_SCRIPT, version_new]).decode('utf-8')
|
|
||||||
commit_message += '\n\n' + body
|
|
||||||
elif channel_name == 'ungoogled-chromium':
|
|
||||||
subprocess.run(['git', 'add', UNGOOGLED_FLAGS_PATH], check=True)
|
|
||||||
subprocess.run(['git', 'add', JSON_PATH], check=True)
|
|
||||||
subprocess.run(['git', 'commit', '--file=-'], input=commit_message.encode(), check=True)
|
|
||||||
else:
|
|
||||||
save_dict_as_nix(PIN_PATH, sorted_channels)
|
|
||||||
print_updates(last_channels, sorted_channels)
|
|
@ -1,37 +0,0 @@
|
|||||||
{
|
|
||||||
stable = {
|
|
||||||
chromedriver = {
|
|
||||||
hash_darwin = "sha256-+Pcd++19/nJVsqGr2jzyjMTWYfb2U9wSgnNccDyGuGU=";
|
|
||||||
hash_darwin_aarch64 =
|
|
||||||
"sha256-vrbIpHrBwbzqars7D546eJ7zhEhAB0abq7MXiqlU4ts=";
|
|
||||||
hash_linux = "sha256-NbZ1GULLWJ6L3kczz23HoUhGk6VgBOXcjZlL7t4Z6Ec=";
|
|
||||||
version = "130.0.6723.116";
|
|
||||||
};
|
|
||||||
deps = {
|
|
||||||
gn = {
|
|
||||||
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
|
|
||||||
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
|
|
||||||
url = "https://gn.googlesource.com/gn";
|
|
||||||
version = "2024-09-09";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
|
|
||||||
version = "130.0.6723.116";
|
|
||||||
};
|
|
||||||
ungoogled-chromium = {
|
|
||||||
deps = {
|
|
||||||
gn = {
|
|
||||||
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
|
|
||||||
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
|
|
||||||
url = "https://gn.googlesource.com/gn";
|
|
||||||
version = "2024-09-09";
|
|
||||||
};
|
|
||||||
ungoogled-patches = {
|
|
||||||
hash = "sha256-+94tSSaOp6vzWkXN1qF3UXMm/Rs3pKmjf+U4x+af818=";
|
|
||||||
rev = "130.0.6723.116-1";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
|
|
||||||
version = "130.0.6723.116";
|
|
||||||
};
|
|
||||||
}
|
|
@ -28,13 +28,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "jwm";
|
pname = "jwm";
|
||||||
version = "2.4.5";
|
version = "2.4.6";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "joewing";
|
owner = "joewing";
|
||||||
repo = "jwm";
|
repo = "jwm";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-T0N9UMu+BLRzVWshUB4apiq8H2t1y09n4P1cLT5K/N8=";
|
hash = "sha256-odGqHdm8xnjEcXmpKMy51HEhbjcROLL3hRSdlbmTr2g=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -8,13 +8,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "blasfeo";
|
pname = "blasfeo";
|
||||||
version = "0.1.3";
|
version = "0.1.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "giaf";
|
owner = "giaf";
|
||||||
repo = "blasfeo";
|
repo = "blasfeo";
|
||||||
rev = finalAttrs.version;
|
rev = finalAttrs.version;
|
||||||
hash = "sha256-e8InqyUMWRdL4CBHUOtrZkuabaTLiNPMNPRCnWzWkQ4=";
|
hash = "sha256-Qm6N1PeWZtS9H5ZuL31NbsctpZiJaGI7bfSPMUmI2BQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake ];
|
nativeBuildInputs = [ cmake ];
|
||||||
|
@ -12,13 +12,13 @@
|
|||||||
|
|
||||||
buildNpmPackage rec {
|
buildNpmPackage rec {
|
||||||
pname = "blockbench";
|
pname = "blockbench";
|
||||||
version = "4.11.1";
|
version = "4.11.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "JannisX11";
|
owner = "JannisX11";
|
||||||
repo = "blockbench";
|
repo = "blockbench";
|
||||||
rev = "v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-a+55seE5tFxTmdTn4qDFWWW6C6FzO8Vgjvfow/tBqf0=";
|
hash = "sha256-rUMzn+3j+RL8DY8euS6a4MmdoIAVLXxXu9wvKNmK/TU=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
@ -93,7 +93,7 @@ buildNpmPackage rec {
|
|||||||
];
|
];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
changelog = "https://github.com/JannisX11/blockbench/releases/tag/${src.rev}";
|
changelog = "https://github.com/JannisX11/blockbench/releases/tag/v${version}";
|
||||||
description = "Low-poly 3D modeling and animation software";
|
description = "Low-poly 3D modeling and animation software";
|
||||||
homepage = "https://blockbench.net/";
|
homepage = "https://blockbench.net/";
|
||||||
license = lib.licenses.gpl3Only;
|
license = lib.licenses.gpl3Only;
|
||||||
|
7343
pkgs/by-name/cl/clash-rs/Cargo.lock
generated
7343
pkgs/by-name/cl/clash-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -7,28 +7,18 @@
|
|||||||
}:
|
}:
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "clash-rs";
|
pname = "clash-rs";
|
||||||
version = "0.7.0";
|
version = "0.7.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Watfaq";
|
owner = "Watfaq";
|
||||||
repo = "clash-rs";
|
repo = "clash-rs";
|
||||||
rev = "v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-0deMVI51XHTCrnLTycqDsaY5Lq+wx14uMUlkG5OViNA=";
|
hash = "sha256-H76ErJQ+qKC3mt3IzNCPldAwlj7NnYUcLzUuOYykxnE=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoLock = {
|
useFetchCargoVendor = true;
|
||||||
lockFile = ./Cargo.lock;
|
|
||||||
outputHashes = {
|
cargoHash = "sha256-yU5ioAuCJRuYKNOdd381W07Ua+c2me+wHFOMukTVVqM=";
|
||||||
"boringtun-0.6.0" = "sha256-HBNo53b+CpCGmTXZYH4NBBvNmekyaBKAk1pSRzZdavg=";
|
|
||||||
"netstack-lwip-0.3.4" = "sha256-lcauDyaw5gAaECRcGNXQDHbWmnyxil18qWFkZ/p/C50=";
|
|
||||||
"rustls-0.23.12" = "sha256-grt94JG44MljRQRooVZbXL4h4XLI1/KoIdwGv03MoIU=";
|
|
||||||
"tokio-rustls-0.26.0" = "sha256-Bmi36j8hbR4kkY/xnHbluaInk+YH5/eTln0VYfHulGA=";
|
|
||||||
"tracing-oslog-0.2.0" = "sha256-JYaCslbVOgsyBhjeBkplPWcjSgFccjr4s6OAGIUu5kg=";
|
|
||||||
"tuic-1.3.1" = "sha256-WMd+O2UEu0AEI+gNeQtdBhEgIB8LPanoIpMcDAUUWrM=";
|
|
||||||
"tun-0.6.1" = "sha256-j4yQSu4Mw7DBFak8vJGQomYq81+pfaeEDdN4NNBve+E=";
|
|
||||||
"unix-udp-sock-0.7.0" = "sha256-TekBfaxecFPpOfq7PVjLHwc0uIp3yJGV/Cgav5VfKaA=";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
PROTOC = "${protobuf}/bin/protoc";
|
PROTOC = "${protobuf}/bin/protoc";
|
||||||
|
@ -1,24 +1,38 @@
|
|||||||
{ lib, fetchFromGitHub
|
{
|
||||||
, autoPatchelfHook
|
autoPatchelfHook,
|
||||||
, fuse3
|
fetchFromGitHub,
|
||||||
, maven, jdk, makeShellWrapper, glib, wrapGAppsHook3
|
fuse3,
|
||||||
, libayatana-appindicator
|
glib,
|
||||||
|
jdk23,
|
||||||
|
lib,
|
||||||
|
libayatana-appindicator,
|
||||||
|
makeShellWrapper,
|
||||||
|
maven,
|
||||||
|
wrapGAppsHook3,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
jdk = jdk23.override { enableJavaFX = true; };
|
||||||
|
in
|
||||||
maven.buildMavenPackage rec {
|
maven.buildMavenPackage rec {
|
||||||
pname = "cryptomator";
|
pname = "cryptomator";
|
||||||
version = "1.14.1";
|
version = "1.14.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "cryptomator";
|
owner = "cryptomator";
|
||||||
repo = "cryptomator";
|
repo = "cryptomator";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-so8RINjFLF9H4K9f/60Ym/v/VpcVfxJ/c+JDOAPFgZU=";
|
hash = "sha256-TSE83QYFry8O6MKAoggJBjqonYiGax5GG/a7sm7aHf8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
# https://github.com/cryptomator/cryptomator/pull/3621
|
||||||
|
./string-template-removal-and-jdk23.patch
|
||||||
|
];
|
||||||
|
|
||||||
mvnJdk = jdk;
|
mvnJdk = jdk;
|
||||||
mvnParameters = "-Dmaven.test.skip=true -Plinux";
|
mvnParameters = "-Dmaven.test.skip=true -Plinux";
|
||||||
mvnHash = "sha256-aB7wgnJAYvCizC0/gG/amcId/WVVWmZndItm398nDfQ=";
|
mvnHash = "sha256-LFD150cGW6OdwkK28GYI9j44GtVE0pwFMaQ8dQqArLo=";
|
||||||
|
|
||||||
preBuild = ''
|
preBuild = ''
|
||||||
VERSION=${version}
|
VERSION=${version}
|
||||||
@ -55,8 +69,18 @@ maven.buildMavenPackage rec {
|
|||||||
--add-flags "-Dcryptomator.disableUpdateCheck=true" \
|
--add-flags "-Dcryptomator.disableUpdateCheck=true" \
|
||||||
--add-flags "-Dcryptomator.integrationsLinux.trayIconsDir='$out/share/icons/hicolor/symbolic/apps'" \
|
--add-flags "-Dcryptomator.integrationsLinux.trayIconsDir='$out/share/icons/hicolor/symbolic/apps'" \
|
||||||
--add-flags "--module org.cryptomator.desktop/org.cryptomator.launcher.Cryptomator" \
|
--add-flags "--module org.cryptomator.desktop/org.cryptomator.launcher.Cryptomator" \
|
||||||
--prefix PATH : "$out/share/cryptomator/libs/:${lib.makeBinPath [ jdk glib ]}" \
|
--prefix PATH : "$out/share/cryptomator/libs/:${
|
||||||
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fuse3 libayatana-appindicator ]}" \
|
lib.makeBinPath [
|
||||||
|
jdk
|
||||||
|
glib
|
||||||
|
]
|
||||||
|
}" \
|
||||||
|
--prefix LD_LIBRARY_PATH : "${
|
||||||
|
lib.makeLibraryPath [
|
||||||
|
fuse3
|
||||||
|
libayatana-appindicator
|
||||||
|
]
|
||||||
|
}" \
|
||||||
--set JAVA_HOME "${jdk.home}"
|
--set JAVA_HOME "${jdk.home}"
|
||||||
|
|
||||||
# install desktop entry and icons
|
# install desktop entry and icons
|
||||||
@ -79,24 +103,30 @@ maven.buildMavenPackage rec {
|
|||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
autoPatchelfHook
|
autoPatchelfHook
|
||||||
|
jdk
|
||||||
makeShellWrapper
|
makeShellWrapper
|
||||||
wrapGAppsHook3
|
wrapGAppsHook3
|
||||||
jdk
|
|
||||||
];
|
];
|
||||||
buildInputs = [ fuse3 jdk glib libayatana-appindicator ];
|
buildInputs = [
|
||||||
|
fuse3
|
||||||
|
glib
|
||||||
|
jdk
|
||||||
|
libayatana-appindicator
|
||||||
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = {
|
||||||
description = "Free client-side encryption for your cloud files";
|
description = "Free client-side encryption for your cloud files";
|
||||||
mainProgram = "cryptomator";
|
|
||||||
homepage = "https://cryptomator.org";
|
homepage = "https://cryptomator.org";
|
||||||
sourceProvenance = with sourceTypes; [
|
license = lib.licenses.gpl3Plus;
|
||||||
|
mainProgram = "cryptomator";
|
||||||
|
maintainers = with lib.maintainers; [
|
||||||
|
bachp
|
||||||
|
gepbird
|
||||||
|
];
|
||||||
|
platforms = [ "x86_64-linux" ];
|
||||||
|
sourceProvenance = with lib.sourceTypes; [
|
||||||
fromSource
|
fromSource
|
||||||
binaryBytecode # deps
|
binaryBytecode # deps
|
||||||
];
|
];
|
||||||
license = licenses.gpl3Plus;
|
|
||||||
maintainers = with maintainers; [ bachp ];
|
|
||||||
platforms = [ "x86_64-linux" ];
|
|
||||||
# Uses abandoned JEP 430 string template preview, removed in JDK 23
|
|
||||||
broken = true;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
@ -0,0 +1,135 @@
|
|||||||
|
diff --git a/src/main/java/org/cryptomator/common/mount/Mounter.java b/src/main/java/org/cryptomator/common/mount/Mounter.java
|
||||||
|
index 6ca067305b..89f8fb7822 100644
|
||||||
|
--- a/src/main/java/org/cryptomator/common/mount/Mounter.java
|
||||||
|
+++ b/src/main/java/org/cryptomator/common/mount/Mounter.java
|
||||||
|
@@ -160,7 +160,7 @@ public MountHandle mount(VaultSettings vaultSettings, Path cryptoFsRoot) throws
|
||||||
|
var mountService = mountProviders.stream().filter(s -> s.getClass().getName().equals(vaultSettings.mountService.getValue())).findFirst().orElse(defaultMountService.getValue());
|
||||||
|
|
||||||
|
if (isConflictingMountService(mountService)) {
|
||||||
|
- var msg = STR."\{mountService.getClass()} unavailable due to conflict with either of \{CONFLICTING_MOUNT_SERVICES.get(mountService.getClass().getName())}";
|
||||||
|
+ var msg = mountService.getClass() + " unavailable due to conflict with either of " + CONFLICTING_MOUNT_SERVICES.get(mountService.getClass().getName());
|
||||||
|
throw new ConflictingMountServiceException(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java b/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
|
||||||
|
index eefad55a2f..0e7a6cc3ab 100644
|
||||||
|
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
|
||||||
|
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
|
||||||
|
@@ -20,7 +20,7 @@ public class HubConfig {
|
||||||
|
public String devicesResourceUrl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
- * A collection of String template processors to construct URIs related to this Hub instance.
|
||||||
|
+ * A collection of functions to construct URIs related to this Hub instance.
|
||||||
|
*/
|
||||||
|
@JsonIgnore
|
||||||
|
public final URIProcessors URIs = new URIProcessors();
|
||||||
|
@@ -52,8 +52,7 @@ public class URIProcessors {
|
||||||
|
/**
|
||||||
|
* Resolves paths relative to the <code>/api/</code> endpoint of this Hub instance.
|
||||||
|
*/
|
||||||
|
- public final StringTemplate.Processor<URI, RuntimeException> API = template -> {
|
||||||
|
- var path = template.interpolate();
|
||||||
|
+ public URI getApi(String path) {
|
||||||
|
var relPath = path.startsWith("/") ? path.substring(1) : path;
|
||||||
|
return getApiBaseUrl().resolve(relPath);
|
||||||
|
};
|
||||||
|
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java b/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
|
||||||
|
index 3bfb4ec8ea..3353d78dd6 100644
|
||||||
|
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
|
||||||
|
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
|
||||||
|
@@ -88,7 +88,7 @@ public void receiveKey() {
|
||||||
|
* STEP 0 (Request): GET /api/config
|
||||||
|
*/
|
||||||
|
private void requestApiConfig() {
|
||||||
|
- var configUri = hubConfig.URIs.API."config";
|
||||||
|
+ var configUri = hubConfig.URIs.getApi("config");
|
||||||
|
var request = HttpRequest.newBuilder(configUri) //
|
||||||
|
.GET() //
|
||||||
|
.timeout(REQ_TIMEOUT) //
|
||||||
|
@@ -122,7 +122,7 @@ private void receivedApiConfig(HttpResponse<String> response) {
|
||||||
|
* STEP 1 (Request): GET user key for this device
|
||||||
|
*/
|
||||||
|
private void requestDeviceData() {
|
||||||
|
- var deviceUri = hubConfig.URIs.API."devices/\{deviceId}";
|
||||||
|
+ var deviceUri = hubConfig.URIs.getApi("devices/" + deviceId);
|
||||||
|
var request = HttpRequest.newBuilder(deviceUri) //
|
||||||
|
.header("Authorization", "Bearer " + bearerToken) //
|
||||||
|
.GET() //
|
||||||
|
@@ -162,7 +162,7 @@ private void needsDeviceRegistration() {
|
||||||
|
* STEP 2 (Request): GET vault key for this user
|
||||||
|
*/
|
||||||
|
private void requestVaultMasterkey(String encryptedUserKey) {
|
||||||
|
- var vaultKeyUri = hubConfig.URIs.API."vaults/\{vaultId}/access-token";
|
||||||
|
+ var vaultKeyUri = hubConfig.URIs.getApi("vaults/" + vaultId + "/access-token");
|
||||||
|
var request = HttpRequest.newBuilder(vaultKeyUri) //
|
||||||
|
.header("Authorization", "Bearer " + bearerToken) //
|
||||||
|
.GET() //
|
||||||
|
@@ -205,7 +205,7 @@ private void receivedBothEncryptedKeys(String encryptedVaultKey, String encrypte
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
private void requestLegacyAccessToken() {
|
||||||
|
- var legacyAccessTokenUri = hubConfig.URIs.API."vaults/\{vaultId}/keys/\{deviceId}";
|
||||||
|
+ var legacyAccessTokenUri = hubConfig.URIs.getApi("vaults/" + vaultId + "/keys/" + deviceId);
|
||||||
|
var request = HttpRequest.newBuilder(legacyAccessTokenUri) //
|
||||||
|
.header("Authorization", "Bearer " + bearerToken) //
|
||||||
|
.GET() //
|
||||||
|
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java b/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
|
||||||
|
index b00d49874e..d711ff86ef 100644
|
||||||
|
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
|
||||||
|
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
|
||||||
|
@@ -115,7 +115,7 @@ public void register() {
|
||||||
|
workInProgress.set(true);
|
||||||
|
|
||||||
|
|
||||||
|
- var userReq = HttpRequest.newBuilder(hubConfig.URIs.API."users/me") //
|
||||||
|
+ var userReq = HttpRequest.newBuilder(hubConfig.URIs.getApi("users/me")) //
|
||||||
|
.GET() //
|
||||||
|
.timeout(REQ_TIMEOUT) //
|
||||||
|
.header("Authorization", "Bearer " + bearerToken) //
|
||||||
|
@@ -143,7 +143,7 @@ public void register() {
|
||||||
|
var now = Instant.now().toString();
|
||||||
|
var dto = new CreateDeviceDto(deviceId, deviceNameField.getText(), BaseEncoding.base64().encode(deviceKeyPair.getPublic().getEncoded()), "DESKTOP", jwe.serialize(), now);
|
||||||
|
var json = toJson(dto);
|
||||||
|
- var deviceUri = hubConfig.URIs.API."devices/\{deviceId}";
|
||||||
|
+ var deviceUri = hubConfig.URIs.getApi("devices/" + deviceId);
|
||||||
|
var putDeviceReq = HttpRequest.newBuilder(deviceUri) //
|
||||||
|
.PUT(HttpRequest.BodyPublishers.ofString(json, StandardCharsets.UTF_8)) //
|
||||||
|
.timeout(REQ_TIMEOUT) //
|
||||||
|
@@ -164,7 +164,7 @@ public void register() {
|
||||||
|
private void migrateLegacyDevices(ECPublicKey userPublicKey) {
|
||||||
|
try {
|
||||||
|
// GET legacy access tokens
|
||||||
|
- var getUri = hubConfig.URIs.API."devices/\{deviceId}/legacy-access-tokens";
|
||||||
|
+ var getUri = hubConfig.URIs.getApi("devices/" + deviceId + "/legacy-access-tokens");
|
||||||
|
var getReq = HttpRequest.newBuilder(getUri).GET().timeout(REQ_TIMEOUT).header("Authorization", "Bearer " + bearerToken).build();
|
||||||
|
var getRes = httpClient.send(getReq, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8));
|
||||||
|
if (getRes.statusCode() != 200) {
|
||||||
|
@@ -185,12 +185,12 @@ private void migrateLegacyDevices(ECPublicKey userPublicKey) {
|
||||||
|
LOG.warn("Failed to decrypt legacy access token for vault {}. Skipping migration.", entry.getKey());
|
||||||
|
}
|
||||||
|
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
|
- var postUri = hubConfig.URIs.API."users/me/access-tokens";
|
||||||
|
+ var postUri = hubConfig.URIs.getApi("users/me/access-tokens");
|
||||||
|
var postBody = JSON.writer().writeValueAsString(newAccessTokens);
|
||||||
|
var postReq = HttpRequest.newBuilder(postUri).POST(HttpRequest.BodyPublishers.ofString(postBody)).timeout(REQ_TIMEOUT).header("Authorization", "Bearer " + bearerToken).build();
|
||||||
|
var postRes = httpClient.send(postReq, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8));
|
||||||
|
if (postRes.statusCode() != 200) {
|
||||||
|
- throw new IOException(STR."Unexpected response from POST \{postUri}: \{postRes.statusCode()}");
|
||||||
|
+ throw new IOException("Unexpected response from POST " + postUri + ": " + postRes.statusCode());
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// log and ignore: this is merely a best-effort attempt of migrating legacy devices. Failure is uncritical as this is merely a convenience feature.
|
||||||
|
diff --git a/pom.xml b/pom.xml
|
||||||
|
index 3290b3121d..0812419af1 100644
|
||||||
|
--- a/pom.xml
|
||||||
|
+++ b/pom.xml
|
||||||
|
@@ -26,7 +26,7 @@
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
- <project.jdk.version>22</project.jdk.version>
|
||||||
|
+ <project.jdk.version>23</project.jdk.version>
|
||||||
|
|
||||||
|
<!-- Group IDs of jars that need to stay on the class path for now -->
|
||||||
|
<!-- remove them, as soon they got modularized or support is dropped (i.e., WebDAV) -->
|
@ -11,14 +11,14 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "deltatouch";
|
pname = "deltatouch";
|
||||||
version = "1.6.0";
|
version = "1.8.0";
|
||||||
|
|
||||||
src = fetchFromGitea {
|
src = fetchFromGitea {
|
||||||
domain = "codeberg.org";
|
domain = "codeberg.org";
|
||||||
owner = "lk108";
|
owner = "lk108";
|
||||||
repo = "deltatouch";
|
repo = "deltatouch";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-mOs5WlWOkH9A+BZK6hvKq/JKS4k8tzvvov4CYFHyMfA=";
|
hash = "sha256-HVq6eiy2ufFg96oXwnSpv7fMDL1haWyIelBzFd7pfk0=";
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -3,25 +3,25 @@
|
|||||||
"alpha": {
|
"alpha": {
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio_linux_2.0.16.tar.xz"
|
"factorio_linux_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_alpha_x64-2.0.16.tar.xz",
|
"name": "factorio_alpha_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": true,
|
"needsAuth": true,
|
||||||
"sha256": "9828ae257a3b2f95de2dae2f262e9d8d6b85f356911449166ceef1472d231e6d",
|
"sha256": "999247294680f67b29ea4758014e8337069dccc19f8f3808a99f45d8213972b0",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.16/alpha/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/alpha/linux64",
|
||||||
"version": "2.0.16"
|
"version": "2.0.20"
|
||||||
},
|
},
|
||||||
"stable": {
|
"stable": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio_linux_2.0.15.tar.xz"
|
"factorio_linux_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_alpha_x64-2.0.15.tar.xz",
|
"name": "factorio_alpha_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": true,
|
"needsAuth": true,
|
||||||
"sha256": "39df353fe8f14394d2618b9627659f3c0c16922362708e681f006083a8f5163e",
|
"sha256": "999247294680f67b29ea4758014e8337069dccc19f8f3808a99f45d8213972b0",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.15/alpha/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/alpha/linux64",
|
||||||
"version": "2.0.15"
|
"version": "2.0.20"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"demo": {
|
"demo": {
|
||||||
@ -51,51 +51,51 @@
|
|||||||
"expansion": {
|
"expansion": {
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio-space-age_linux_2.0.16.tar.xz"
|
"factorio-space-age_linux_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_expansion_x64-2.0.16.tar.xz",
|
"name": "factorio_expansion_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": true,
|
"needsAuth": true,
|
||||||
"sha256": "32ae1b8f525148b3bb1f68e41b398543c2b0da29734f9f3b4f9509a86c64ecf4",
|
"sha256": "cbc6e70985295b078fec8b9ce759fbf8a68ac157fcc7bbead934a9c3108d997f",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.16/expansion/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/expansion/linux64",
|
||||||
"version": "2.0.16"
|
"version": "2.0.20"
|
||||||
},
|
},
|
||||||
"stable": {
|
"stable": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio-space-age_linux_2.0.15.tar.xz"
|
"factorio-space-age_linux_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_expansion_x64-2.0.15.tar.xz",
|
"name": "factorio_expansion_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": true,
|
"needsAuth": true,
|
||||||
"sha256": "09de33402fee3dbae9d0207409f05a19e2fa8019b53d3de96557d2ec904e10f3",
|
"sha256": "cbc6e70985295b078fec8b9ce759fbf8a68ac157fcc7bbead934a9c3108d997f",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.15/expansion/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/expansion/linux64",
|
||||||
"version": "2.0.15"
|
"version": "2.0.20"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"headless": {
|
"headless": {
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio-headless_linux_2.0.16.tar.xz",
|
"factorio-headless_linux_2.0.20.tar.xz",
|
||||||
"factorio_headless_x64_2.0.16.tar.xz"
|
"factorio_headless_x64_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_headless_x64-2.0.16.tar.xz",
|
"name": "factorio_headless_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": false,
|
"needsAuth": false,
|
||||||
"sha256": "f2069b4b746500d945eeb67ef7eda5e7aebe7fd0294c2af4e117af22a3bbaea3",
|
"sha256": "c4a901f2f1dbedbb41654560db4c6fab683a30c20334e805d4ef740c0416515a",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.16/headless/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/headless/linux64",
|
||||||
"version": "2.0.16"
|
"version": "2.0.20"
|
||||||
},
|
},
|
||||||
"stable": {
|
"stable": {
|
||||||
"candidateHashFilenames": [
|
"candidateHashFilenames": [
|
||||||
"factorio-headless_linux_2.0.15.tar.xz",
|
"factorio-headless_linux_2.0.20.tar.xz",
|
||||||
"factorio_headless_x64_2.0.15.tar.xz"
|
"factorio_headless_x64_2.0.20.tar.xz"
|
||||||
],
|
],
|
||||||
"name": "factorio_headless_x64-2.0.15.tar.xz",
|
"name": "factorio_headless_x64-2.0.20.tar.xz",
|
||||||
"needsAuth": false,
|
"needsAuth": false,
|
||||||
"sha256": "70b441cb807811a60586c01107248c1d8d7ae043bd1f23675fc924fbaaa538d8",
|
"sha256": "c4a901f2f1dbedbb41654560db4c6fab683a30c20334e805d4ef740c0416515a",
|
||||||
"tarDirectory": "x64",
|
"tarDirectory": "x64",
|
||||||
"url": "https://factorio.com/get-download/2.0.15/headless/linux64",
|
"url": "https://factorio.com/get-download/2.0.20/headless/linux64",
|
||||||
"version": "2.0.15"
|
"version": "2.0.20"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,13 +63,13 @@ let
|
|||||||
in
|
in
|
||||||
freecad-utils.makeCustomizable (stdenv.mkDerivation (finalAttrs: {
|
freecad-utils.makeCustomizable (stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "freecad";
|
pname = "freecad";
|
||||||
version = "1.0rc4";
|
version = "1.0.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "FreeCAD";
|
owner = "FreeCAD";
|
||||||
repo = "FreeCAD";
|
repo = "FreeCAD";
|
||||||
rev = finalAttrs.version;
|
rev = finalAttrs.version;
|
||||||
hash = "sha256-b7aeVQkgdsDRdnVIr+5ZNuWAm6GLH7sepa8kFp2Zm2U=";
|
hash = "sha256-u7RYSImUMAgKaAQSAGCFha++RufpZ/QuHAirbSFOUCI=";
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -11,13 +11,13 @@
|
|||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "fzf";
|
pname = "fzf";
|
||||||
version = "0.56.2";
|
version = "0.56.3";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "junegunn";
|
owner = "junegunn";
|
||||||
repo = "fzf";
|
repo = "fzf";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-POSploJmwMA3ZTYtQh3k45gaumG1dsuyyGn4CRO4qfY=";
|
hash = "sha256-m/RtAjqB6YTwmzCUdKQx/e7vxqJOu1Y1N0u28i8gwEs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = "sha256-4VAAka9FvKuoDZ1E1v9Es3r00GZeG8Jp4pJONYpB/t8=";
|
vendorHash = "sha256-4VAAka9FvKuoDZ1E1v9Es3r00GZeG8Jp4pJONYpB/t8=";
|
||||||
|
@ -18,25 +18,25 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "gale";
|
pname = "gale";
|
||||||
version = "0.8.11";
|
version = "1.1.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Kesomannen";
|
owner = "Kesomannen";
|
||||||
repo = "gale";
|
repo = "gale";
|
||||||
rev = "refs/tags/${finalAttrs.version}";
|
rev = "refs/tags/${finalAttrs.version}";
|
||||||
hash = "sha256-PXK64WD3vb3uVxBFNU+LiGOipUjIAKW9RLWr1o4RigU=";
|
hash = "sha256-yAfQuLfucz522ln0YNMy8nppp2jk6tGJnP/WhK7JdhI=";
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDeps = fetchNpmDeps {
|
npmDeps = fetchNpmDeps {
|
||||||
name = "${finalAttrs.pname}-${finalAttrs.version}-npm-deps";
|
name = "${finalAttrs.pname}-${finalAttrs.version}-npm-deps";
|
||||||
inherit (finalAttrs) src;
|
inherit (finalAttrs) src;
|
||||||
hash = "sha256-W0ryt3WH/3SireaOHa9i1vKpuokzIsDlD8R9Fnd0s4k=";
|
hash = "sha256-xKg/ABUdtylFpT3EisXVvyv38++KjucrZ+s3/fFjzmM=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoDeps = rustPlatform.fetchCargoTarball {
|
cargoDeps = rustPlatform.fetchCargoTarball {
|
||||||
inherit (finalAttrs) pname version src;
|
inherit (finalAttrs) pname version src;
|
||||||
sourceRoot = "${finalAttrs.src.name}/${finalAttrs.cargoRoot}";
|
sourceRoot = "${finalAttrs.src.name}/${finalAttrs.cargoRoot}";
|
||||||
hash = "sha256-zXZkjSYN6/qNwBh+xUgJPWQvduIUSMVSt/XGbocKTwg=";
|
hash = "sha256-u7UbC9TyEQwYpcVWt8/NsweDNWbQi6NuD9ay9gmMDjg=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoRoot = "src-tauri";
|
cargoRoot = "src-tauri";
|
||||||
|
@ -46,6 +46,8 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
passthru.updateScript = ./update.sh;
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
changelog = "https://github.com/github/gh-copilot/releases/tag/v${finalAttrs.version}";
|
changelog = "https://github.com/github/gh-copilot/releases/tag/v${finalAttrs.version}";
|
||||||
description = "Ask for assistance right in your terminal";
|
description = "Ask for assistance right in your terminal";
|
||||||
|
43
pkgs/by-name/gh/gh-copilot/update.sh
Executable file
43
pkgs/by-name/gh/gh-copilot/update.sh
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env nix-shell
|
||||||
|
#!nix-shell -i bash -p curl jq common-updater-scripts nix-prefetch
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
set -x
|
||||||
|
|
||||||
|
ROOT="$(dirname "$(readlink -f "$0")")"
|
||||||
|
NIX_DRV="$ROOT/package.nix"
|
||||||
|
if [ ! -f "$NIX_DRV" ]; then
|
||||||
|
echo "ERROR: cannot find gh-copilot in $ROOT"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
fetch_arch() {
|
||||||
|
VER="$1"; ARCH="$2"
|
||||||
|
URL="https://github.com/github/gh-copilot/releases/download/v${VER}/${ARCH}";
|
||||||
|
nix-prefetch "{ stdenv, fetchzip }:
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
pname = \"vere\"; version = \"${VER}\";
|
||||||
|
src = fetchurl { url = \"$URL\"; };
|
||||||
|
}
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
|
replace_sha() {
|
||||||
|
# https://stackoverflow.com/a/38470458/22235705
|
||||||
|
sed -rziE "s@($1[^\n]*\n[^\n]*hash = )\"sha256-.{44}\";@\1\"$2\";@" "$NIX_DRV"
|
||||||
|
}
|
||||||
|
|
||||||
|
VERE_VER=$(curl https://api.github.com/repos/github/gh-copilot/releases/latest | jq .tag_name)
|
||||||
|
VERE_VER=$(echo $VERE_VER | sed -e 's/^"v//' -e 's/"$//') # transform "v1.0.2" into 1.0.2
|
||||||
|
|
||||||
|
VERE_LINUX_X64_SHA256=$(fetch_arch "$VERE_VER" "linux-amd64")
|
||||||
|
VERE_LINUX_AARCH64_SHA256=$(fetch_arch "$VERE_VER" "linux-arm64")
|
||||||
|
VERE_DARWIN_X64_SHA256=$(fetch_arch "$VERE_VER" "darwin-amd64")
|
||||||
|
VERE_DARWIN_AARCH64_SHA256=$(fetch_arch "$VERE_VER" "darwin-arm64")
|
||||||
|
|
||||||
|
sed -i "s/version = \".*\"/version = \"$VERE_VER\"/" "$NIX_DRV"
|
||||||
|
|
||||||
|
replace_sha "linux-amd64" "$VERE_LINUX_X64_SHA256"
|
||||||
|
replace_sha "linux-arm64" "$VERE_LINUX_AARCH64_SHA256"
|
||||||
|
replace_sha "darwin-amd64" "$VERE_DARWIN_X64_SHA256"
|
||||||
|
replace_sha "darwin-arm64" "$VERE_DARWIN_AARCH64_SHA256"
|
@ -173,5 +173,6 @@ rustPlatform.buildRustPackage rec {
|
|||||||
techknowlogick
|
techknowlogick
|
||||||
];
|
];
|
||||||
platforms = lib.platforms.linux ++ lib.platforms.darwin;
|
platforms = lib.platforms.linux ++ lib.platforms.darwin;
|
||||||
|
broken = true; # build fails on darwin and linux
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -45,12 +45,12 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
docbook_xml_dtd_43
|
docbook_xml_dtd_43
|
||||||
vala
|
vala
|
||||||
wayland-scanner
|
wayland-scanner
|
||||||
wayland-protocols
|
|
||||||
];
|
];
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
wayland
|
|
||||||
gtk4
|
gtk4
|
||||||
|
wayland
|
||||||
|
wayland-protocols
|
||||||
];
|
];
|
||||||
|
|
||||||
mesonFlags = [
|
mesonFlags = [
|
||||||
|
@ -6,49 +6,48 @@
|
|||||||
testers,
|
testers,
|
||||||
nix-update-script,
|
nix-update-script,
|
||||||
versionCheckHook,
|
versionCheckHook,
|
||||||
|
glibcLocales,
|
||||||
withPostgresAdapter ? true,
|
withPostgresAdapter ? true,
|
||||||
withBigQueryAdapter ? true,
|
withBigQueryAdapter ? true,
|
||||||
}:
|
}:
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "harlequin";
|
pname = "harlequin";
|
||||||
version = "1.25.0";
|
version = "1.25.2";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "tconbeer";
|
owner = "tconbeer";
|
||||||
repo = "harlequin";
|
repo = "harlequin";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-iRl91GqYigD6t0aVVShBg835yhlPxgfZcQCdAGUoc1k=";
|
hash = "sha256-ov9pMvFzJAMfOM7JeSgnp6dZ424GiRaH7W5OCKin9Jk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = with python3Packages; [
|
pythonRelaxDeps = [ "textual" ];
|
||||||
poetry-core
|
|
||||||
];
|
build-system = with python3Packages; [ poetry-core ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [ glibcLocales ];
|
||||||
|
|
||||||
dependencies =
|
dependencies =
|
||||||
with python3Packages;
|
with python3Packages;
|
||||||
[
|
[
|
||||||
|
click
|
||||||
|
duckdb
|
||||||
|
importlib-metadata
|
||||||
|
numpy
|
||||||
|
packaging
|
||||||
|
platformdirs
|
||||||
|
questionary
|
||||||
|
rich-click
|
||||||
|
sqlfmt
|
||||||
textual
|
textual
|
||||||
textual-fastdatatable
|
textual-fastdatatable
|
||||||
textual-textarea
|
textual-textarea
|
||||||
click
|
|
||||||
rich-click
|
|
||||||
duckdb
|
|
||||||
sqlfmt
|
|
||||||
platformdirs
|
|
||||||
importlib-metadata
|
|
||||||
tomlkit
|
tomlkit
|
||||||
questionary
|
|
||||||
numpy
|
|
||||||
packaging
|
|
||||||
]
|
]
|
||||||
++ lib.optionals withPostgresAdapter [ harlequin-postgres ]
|
++ lib.optionals withPostgresAdapter [ harlequin-postgres ]
|
||||||
++ lib.optionals withBigQueryAdapter [ harlequin-bigquery ];
|
++ lib.optionals withBigQueryAdapter [ harlequin-bigquery ];
|
||||||
|
|
||||||
pythonRelaxDeps = [
|
|
||||||
"textual"
|
|
||||||
];
|
|
||||||
|
|
||||||
pythonImportsCheck = [
|
pythonImportsCheck = [
|
||||||
"harlequin"
|
"harlequin"
|
||||||
"harlequin_duckdb"
|
"harlequin_duckdb"
|
||||||
@ -60,17 +59,37 @@ python3Packages.buildPythonApplication rec {
|
|||||||
updateScript = nix-update-script { };
|
updateScript = nix-update-script { };
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeCheckInputs = [
|
preCheck = ''
|
||||||
|
export HOME=$(mktemp -d)
|
||||||
|
'';
|
||||||
|
|
||||||
|
nativeCheckInputs =
|
||||||
|
[
|
||||||
versionCheckHook
|
versionCheckHook
|
||||||
|
]
|
||||||
|
++ (with python3Packages; [
|
||||||
|
pytest-asyncio
|
||||||
|
pytestCheckHook
|
||||||
|
]);
|
||||||
|
|
||||||
|
disabledTests = [
|
||||||
|
# Tests require network access
|
||||||
|
"test_connect_extensions"
|
||||||
|
"test_connect_prql"
|
||||||
|
];
|
||||||
|
|
||||||
|
disabledTestPaths = [
|
||||||
|
# Tests requires more setup
|
||||||
|
"tests/functional_tests/"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "The SQL IDE for Your Terminal";
|
description = "The SQL IDE for Your Terminal";
|
||||||
homepage = "https://harlequin.sh";
|
homepage = "https://harlequin.sh";
|
||||||
mainProgram = "harlequin";
|
changelog = "https://github.com/tconbeer/harlequin/releases/tag/v${version}";
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
|
mainProgram = "harlequin";
|
||||||
maintainers = with lib.maintainers; [ pcboy ];
|
maintainers = with lib.maintainers; [ pcboy ];
|
||||||
platforms = lib.platforms.unix;
|
platforms = lib.platforms.unix;
|
||||||
changelog = "https://github.com/tconbeer/harlequin/releases/tag/v${version}";
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
15
pkgs/by-name/ib/ibm-plex/hashes.nix
Normal file
15
pkgs/by-name/ib/ibm-plex/hashes.nix
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"serif" = "sha256-8ygaAeMKygYS4GCub4YUQmkh87pYHfi3s0PQ6AbaeGw=";
|
||||||
|
"sans" = "sha256-mK+8GGl2ugF2+fS6yd3p5NWPHHcKEJWiShDS3lihOlI=";
|
||||||
|
"sans-condensed" = "sha256-/aJTXmrHuoPSrtCKNodBY7I0CriayNTx5LCXw+/MFvE=";
|
||||||
|
"sans-arabic" = "sha256-qi4k7kMLftIuq87idyxq46FOD6QYycdG6j3zJmu8mhI=";
|
||||||
|
"sans-devanagari" = "sha256-K57OVqcH5r9tZx8NFEoz1P3xpUooqpF7xxJzNmnObwE=";
|
||||||
|
"sans-thai" = "sha256-JZVbvZdj+IfBthiqivOXHrvAUe392M9ECGsiJkm0saE=";
|
||||||
|
"sans-thai-looped" = "sha256-cry/Ctwt1bsrdbvWkJIYWLjsvV6a1lLFsT85znqERnw=";
|
||||||
|
"sans-tc" = "sha256-kZvzSK6fEjfVMR4kxC4lxtD7GskqvJZx8BBJVT4T9MU=";
|
||||||
|
"sans-kr" = "sha256-FsHxMvLlI4yylgG96DOZIdW2DYpk7I+c5QgkVIkNZIE=";
|
||||||
|
"sans-jp" = "sha256-hUl/SSkN6q3pDTtrY2mJepw3ljhhLJskGbxfsTl9TuI=";
|
||||||
|
"sans-hebrew" = "sha256-rTuBQYLI+6gEFTumCdaWpeoLzRoyFSmqWbXziq+UG6I=";
|
||||||
|
"mono" = "sha256-OwUmrPfEehLDz0fl2ChYLK8FQM2p0G1+EMrGsYEq+6g=";
|
||||||
|
"math" = "sha256-dJA6uqxa/yb3eLY4l39NeP0yIl2NfrbaRpf6h0/F7Xc=";
|
||||||
|
}
|
@ -1,28 +1,51 @@
|
|||||||
{ lib, stdenvNoCC, fetchzip }:
|
{
|
||||||
|
lib,
|
||||||
|
stdenvNoCC,
|
||||||
|
fetchzip,
|
||||||
|
families ? [ ],
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
version = import ./version.nix;
|
||||||
|
availableFamilies = import ./hashes.nix;
|
||||||
|
|
||||||
stdenvNoCC.mkDerivation rec {
|
availableFamilyNames = builtins.attrNames availableFamilies;
|
||||||
|
selectedFamilies = if (families == [ ]) then availableFamilyNames else families;
|
||||||
|
|
||||||
|
unknownFamilies = lib.subtractLists availableFamilyNames families;
|
||||||
|
in
|
||||||
|
assert lib.assertMsg (unknownFamilies == [ ]) "Unknown font(s): ${toString unknownFamilies}";
|
||||||
|
stdenvNoCC.mkDerivation {
|
||||||
pname = "ibm-plex";
|
pname = "ibm-plex";
|
||||||
version = "6.4.0";
|
inherit version;
|
||||||
|
|
||||||
src = fetchzip {
|
srcs = builtins.map (
|
||||||
url = "https://github.com/IBM/plex/releases/download/v${version}/OpenType.zip";
|
family:
|
||||||
hash = "sha256-/aR3bu03VxenSPed6EqrGoPjWCcKT//MVtb9OC8tSRs=";
|
fetchzip {
|
||||||
};
|
url = "https://github.com/IBM/plex/releases/download/%40ibm%2Fplex-${family}%40${version}/ibm-plex-${family}.zip";
|
||||||
|
hash = availableFamilies.${family};
|
||||||
|
}
|
||||||
|
) selectedFamilies;
|
||||||
|
|
||||||
|
dontUnpack = true;
|
||||||
|
sourceRoot = ".";
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
runHook preInstall
|
runHook preInstall
|
||||||
|
find $srcs -type f -name '*.otf' -exec install -Dm644 {} -t $out/share/fonts/opentype \;
|
||||||
install -Dm644 */*.otf IBM-Plex-Sans-JP/unhinted/* -t $out/share/fonts/opentype
|
|
||||||
|
|
||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
passthru.updateScript = ./update.sh;
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "IBM Plex Typeface";
|
description = "IBM Plex Typeface";
|
||||||
homepage = "https://www.ibm.com/plex/";
|
homepage = "https://www.ibm.com/plex/";
|
||||||
changelog = "https://github.com/IBM/plex/raw/v${version}/CHANGELOG.md";
|
changelog = "https://github.com/IBM/plex/raw/v${version}/CHANGELOG.md";
|
||||||
license = licenses.ofl;
|
license = licenses.ofl;
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
maintainers = [ maintainers.romildo ];
|
maintainers = with maintainers; [
|
||||||
|
romildo
|
||||||
|
ryanccn
|
||||||
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
42
pkgs/by-name/ib/ibm-plex/update.sh
Executable file
42
pkgs/by-name/ib/ibm-plex/update.sh
Executable file
@ -0,0 +1,42 @@
|
|||||||
|
#! /usr/bin/env nix-shell
|
||||||
|
#! nix-shell -i bash -p nix nix-prefetch jq
|
||||||
|
# shellcheck shell=bash
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
families=(
|
||||||
|
"serif"
|
||||||
|
"sans"
|
||||||
|
"sans-condensed"
|
||||||
|
"sans-arabic"
|
||||||
|
"sans-devanagari"
|
||||||
|
"sans-thai"
|
||||||
|
"sans-thai-looped"
|
||||||
|
"sans-tc"
|
||||||
|
"sans-kr"
|
||||||
|
"sans-jp"
|
||||||
|
"sans-hebrew"
|
||||||
|
"mono"
|
||||||
|
"math"
|
||||||
|
)
|
||||||
|
|
||||||
|
version=$(curl --silent 'https://api.github.com/repos/IBM/plex/releases/latest' | jq -r '.tag_name | sub("^@ibm/[\\w-]+@"; "")')
|
||||||
|
|
||||||
|
dirname="$(dirname "$0")"
|
||||||
|
echo \""${version}"\" >"$dirname/version-new.nix"
|
||||||
|
if diff -q "$dirname/version-new.nix" "$dirname/version.nix"; then
|
||||||
|
echo No new version available, current: "$version"
|
||||||
|
rm "$dirname/version-new.nix"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo Updated to version "$version"
|
||||||
|
mv "$dirname/version-new.nix" "$dirname/version.nix"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf '{\n' > "$dirname/hashes.nix"
|
||||||
|
|
||||||
|
for family in "${families[@]}"; do
|
||||||
|
url="https://github.com/IBM/plex/releases/download/%40ibm%2Fplex-${family}%40${version}/ibm-plex-${family}.zip"
|
||||||
|
printf ' "%s" = "%s";\n' "$family" "$(nix-prefetch-url --unpack "$url" | xargs nix hash convert --hash-algo sha256)" >>"$dirname/hashes.nix"
|
||||||
|
done
|
||||||
|
|
||||||
|
printf '}\n' >> "$dirname/hashes.nix"
|
1
pkgs/by-name/ib/ibm-plex/version.nix
Normal file
1
pkgs/by-name/ib/ibm-plex/version.nix
Normal file
@ -0,0 +1 @@
|
|||||||
|
"1.1.0"
|
@ -3,6 +3,7 @@
|
|||||||
stdenv,
|
stdenv,
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
rustPlatform,
|
rustPlatform,
|
||||||
|
nix-update-script,
|
||||||
cargo,
|
cargo,
|
||||||
rustc,
|
rustc,
|
||||||
meson,
|
meson,
|
||||||
@ -18,18 +19,18 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "keypunch";
|
pname = "keypunch";
|
||||||
version = "3.1";
|
version = "4.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "bragefuglseth";
|
owner = "bragefuglseth";
|
||||||
repo = "keypunch";
|
repo = "keypunch";
|
||||||
rev = "refs/tags/v${finalAttrs.version}";
|
rev = "refs/tags/v${finalAttrs.version}";
|
||||||
hash = "sha256-2S5S7SvMYdEOOrF3SiwpbijsgHcSIyWEVJB41jbrn1A=";
|
hash = "sha256-Xd4fzreComOUnoJ6l2ncMWn6DlUeRCM+YwApilhFd/8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoDeps = rustPlatform.fetchCargoTarball {
|
cargoDeps = rustPlatform.fetchCargoTarball {
|
||||||
inherit (finalAttrs) pname version src;
|
inherit (finalAttrs) pname version src;
|
||||||
hash = "sha256-sD+wy1D6nl333PxlDz73YtnfBEmDzb+kNZkZI8JbfSg=";
|
hash = "sha256-agFOxSZBi8f0zEPd+ha5c3IAbSH2jHfUx2iNeHFs9jI=";
|
||||||
};
|
};
|
||||||
|
|
||||||
strictDeps = true;
|
strictDeps = true;
|
||||||
@ -53,12 +54,19 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
|
|
||||||
buildInputs = [ libadwaita ];
|
buildInputs = [ libadwaita ];
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
updateScript = nix-update-script { };
|
||||||
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Practice your typing skills";
|
description = "Practice your typing skills";
|
||||||
homepage = "https://github.com/bragefuglseth/keypunch";
|
homepage = "https://github.com/bragefuglseth/keypunch";
|
||||||
license = lib.licenses.gpl3Plus;
|
license = lib.licenses.gpl3Plus;
|
||||||
mainProgram = "keypunch";
|
mainProgram = "keypunch";
|
||||||
maintainers = with lib.maintainers; [ tomasajt ];
|
maintainers = with lib.maintainers; [
|
||||||
|
tomasajt
|
||||||
|
getchoo
|
||||||
|
];
|
||||||
platforms = lib.platforms.linux;
|
platforms = lib.platforms.linux;
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
@ -24,7 +24,7 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "meli";
|
pname = "meli";
|
||||||
version = "0.8.7";
|
version = "0.8.8";
|
||||||
|
|
||||||
src = fetchzip {
|
src = fetchzip {
|
||||||
urls = [
|
urls = [
|
||||||
@ -32,20 +32,21 @@ rustPlatform.buildRustPackage rec {
|
|||||||
"https://codeberg.org/meli/meli/archive/v${version}.tar.gz"
|
"https://codeberg.org/meli/meli/archive/v${version}.tar.gz"
|
||||||
"https://github.com/meli/meli/archive/refs/tags/v${version}.tar.gz"
|
"https://github.com/meli/meli/archive/refs/tags/v${version}.tar.gz"
|
||||||
];
|
];
|
||||||
hash = "sha256-2+JIehi2wuWdARbhFPvNPIJ9ucZKWjNSORszEG9lyjw=";
|
hash = "sha256-XOUOIlFKxI7eL7KEEfLyYTsNqc2lc9sJNt9RqPavuW8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoHash = "sha256-ZVhUkpiiPKbWcf56cXFgn3Nyr63STHLlD7mpYEetNIY=";
|
|
||||||
|
|
||||||
cargoPatches = [
|
cargoPatches = [
|
||||||
(fetchpatch {
|
(fetchpatch {
|
||||||
# https://github.com/NixOS/nixpkgs/issues/332957#issuecomment-2278578811
|
# https://git.meli-email.org/meli/meli/issues/522
|
||||||
name = "fix-rust-1.80-compat.patch";
|
# https://git.meli-email.org/meli/meli/issues/524
|
||||||
url = "https://git.meli-email.org/meli/meli/commit/6b05279a0987315c401516cac8ff0b016a8e02a8.patch";
|
name = "fix test_fd_locks() on platforms without OFD support";
|
||||||
hash = "sha256-mh8H7wmHMXAe01UTvdY8vJeeLyH6ZFwylNLFFL+4LO0=";
|
url = "https://git.meli-email.org/meli/meli/commit/b7e215f9c238f8364e2a1f0d10ac668d0cfe91ad.patch";
|
||||||
|
hash = "sha256-227vnFuxhQ0Hh5A/J8y7Ei89AxbNXReMn3c3EVRN4Tc=";
|
||||||
})
|
})
|
||||||
];
|
];
|
||||||
|
|
||||||
|
cargoHash = "sha256-SMvpmWEHUWo0snR/DiUmfZJnXy1QtVOowO8CErM9Xjg=";
|
||||||
|
|
||||||
# Needed to get openssl-sys to use pkg-config
|
# Needed to get openssl-sys to use pkg-config
|
||||||
OPENSSL_NO_VENDOR=1;
|
OPENSSL_NO_VENDOR=1;
|
||||||
|
|
||||||
@ -81,9 +82,7 @@ rustPlatform.buildRustPackage rec {
|
|||||||
'';
|
'';
|
||||||
|
|
||||||
checkFlags = [
|
checkFlags = [
|
||||||
"--skip=conf::tests::test_config_parse" # panicking due to sandbox
|
"--skip=test_cli_subcommands" # panicking due to sandbox
|
||||||
"--skip=utils::tests::test_shellexpandtrait_impls" # panicking due to sandbox
|
|
||||||
"--skip=utils::tests::test_shellexpandtrait" # panicking due to sandbox
|
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
@ -93,6 +92,6 @@ rustPlatform.buildRustPackage rec {
|
|||||||
homepage = "https://meli.delivery";
|
homepage = "https://meli.delivery";
|
||||||
license = licenses.gpl3;
|
license = licenses.gpl3;
|
||||||
maintainers = with maintainers; [ _0x4A6F matthiasbeyer ];
|
maintainers = with maintainers; [ _0x4A6F matthiasbeyer ];
|
||||||
platforms = platforms.linux;
|
platforms = platforms.linux ++ platforms.darwin;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -8,13 +8,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "melodeon";
|
pname = "melodeon";
|
||||||
version = "0.4.2";
|
version = "0.4.3";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "CDrummond";
|
owner = "CDrummond";
|
||||||
repo = "melodeon";
|
repo = "melodeon";
|
||||||
rev = "refs/tags/${finalAttrs.version}";
|
rev = "refs/tags/${finalAttrs.version}";
|
||||||
hash = "sha256-CwJd77FAEcfCvxHeh3V1SIsgSam3S5WtpSLj9WrOOyI=";
|
hash = "sha256-Og0o4Iy0mvGE7H5IY9h7uo7w64jZjXtdsGd4ApYO8oU=";
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,37 +1,45 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
python3Packages,
|
|
||||||
fetchFromGitLab,
|
|
||||||
appstream,
|
appstream,
|
||||||
blueprint-compiler,
|
blueprint-compiler,
|
||||||
desktop-file-utils,
|
desktop-file-utils,
|
||||||
|
fetchFromGitLab,
|
||||||
|
glib,
|
||||||
gobject-introspection,
|
gobject-introspection,
|
||||||
|
gtk4,
|
||||||
libadwaita,
|
libadwaita,
|
||||||
meson,
|
meson,
|
||||||
ninja,
|
ninja,
|
||||||
|
nix-update-script,
|
||||||
pandoc,
|
pandoc,
|
||||||
pkg-config,
|
pkg-config,
|
||||||
|
python3Packages,
|
||||||
webkitgtk_6_0,
|
webkitgtk_6_0,
|
||||||
wrapGAppsHook4,
|
wrapGAppsHook4,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "morphosis";
|
pname = "morphosis";
|
||||||
version = "1.3";
|
version = "1.4.1";
|
||||||
pyproject = false;
|
pyproject = false;
|
||||||
|
|
||||||
src = fetchFromGitLab {
|
src = fetchFromGitLab {
|
||||||
domain = "gitlab.gnome.org";
|
domain = "gitlab.gnome.org";
|
||||||
owner = "World";
|
owner = "World";
|
||||||
repo = "morphosis";
|
repo = "morphosis";
|
||||||
rev = "v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-JEZFgON4QkxHDbWSZbDNLpIFctt8mDHdGVVu3Q+WH4U=";
|
hash = "sha256-ZpxenBqC5qr7yNwjld0u7gSBQfL7Kpa4FWE9gkzG0hg=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
strictDeps = true;
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
appstream
|
appstream
|
||||||
blueprint-compiler
|
blueprint-compiler
|
||||||
desktop-file-utils
|
desktop-file-utils
|
||||||
|
glib # For `glib-compile-schemas`
|
||||||
gobject-introspection
|
gobject-introspection
|
||||||
|
gtk4 # For `gtk-update-icon-cache`
|
||||||
meson
|
meson
|
||||||
ninja
|
ninja
|
||||||
pkg-config
|
pkg-config
|
||||||
@ -47,13 +55,17 @@ python3Packages.buildPythonApplication rec {
|
|||||||
|
|
||||||
dontWrapGApps = true;
|
dontWrapGApps = true;
|
||||||
makeWrapperArgs = [
|
makeWrapperArgs = [
|
||||||
''''${gappsWrapperArgs[@]}''
|
"\${gappsWrapperArgs[@]}"
|
||||||
"--prefix PATH : ${lib.makeBinPath [ pandoc ]}"
|
"--prefix PATH : ${lib.makeBinPath [ pandoc ]}"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
updateScript = nix-update-script { };
|
||||||
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Convert your documents";
|
description = "Convert your documents";
|
||||||
homepage = "https://gitlab.gnome.org/Monster/morphosis";
|
homepage = "https://gitlab.gnome.org/World/morphosis";
|
||||||
license = lib.licenses.gpl3Only;
|
license = lib.licenses.gpl3Only;
|
||||||
maintainers = with lib.maintainers; [ getchoo ];
|
maintainers = with lib.maintainers; [ getchoo ];
|
||||||
mainProgram = "morphosis";
|
mainProgram = "morphosis";
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "plumber";
|
pname = "plumber";
|
||||||
version = "2.7.1";
|
version = "2.8.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "streamdal";
|
owner = "streamdal";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-L8vpaqt9yCIP3TLPSNUrOC6hXc71mzl4lqiaoNS6zls=";
|
hash = "sha256-38tLlFeQtXIiHuQa9c/IfIYbyf+GrOsERAdWQnHSeck=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = null;
|
vendorHash = null;
|
||||||
|
2291
pkgs/by-name/po/porn-vault/Cargo.lock
generated
Normal file
2291
pkgs/by-name/po/porn-vault/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
109
pkgs/by-name/po/porn-vault/allow-use-of-systemd-temp-path.patch
Normal file
109
pkgs/by-name/po/porn-vault/allow-use-of-systemd-temp-path.patch
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
diff --git a/server/binaries/ffmpeg.ts b/server/binaries/ffmpeg.ts
|
||||||
|
index abb4de4f..cdcc0a02 100644
|
||||||
|
--- a/server/binaries/ffmpeg.ts
|
||||||
|
+++ b/server/binaries/ffmpeg.ts
|
||||||
|
@@ -8,6 +8,7 @@ import { getConfig } from "../config";
|
||||||
|
import { mkdirpAsync, rimrafAsync, statAsync } from "../utils/fs/async";
|
||||||
|
import { formatMessage, handleError, logger } from "../utils/logger";
|
||||||
|
import { generateTimestampsAtIntervals } from "../utils/misc";
|
||||||
|
+import { tempPath } from "server/utils/path";
|
||||||
|
|
||||||
|
export async function takeScreenshot(
|
||||||
|
inPath: string,
|
||||||
|
@@ -112,7 +113,7 @@ export async function generatePreview(
|
||||||
|
): Promise<void> {
|
||||||
|
logger.debug(`Creating 100 small previews for ${sceneId}.`);
|
||||||
|
|
||||||
|
- const tmpFolder = resolve("tmp", "preview", sceneId);
|
||||||
|
+ const tmpFolder = resolve(tempPath, "preview", sceneId);
|
||||||
|
|
||||||
|
const timestamps = generateTimestampsAtIntervals(PREVIEW_COUNT, durationSecs * 1000, {
|
||||||
|
startPercentage: 2,
|
||||||
|
diff --git a/server/database/index.ts b/server/database/index.ts
|
||||||
|
index 80ff6432..c6feb11d 100755
|
||||||
|
--- a/server/database/index.ts
|
||||||
|
+++ b/server/database/index.ts
|
||||||
|
@@ -15,7 +15,7 @@ import Studio from "../types/studio";
|
||||||
|
import SceneView from "../types/watch";
|
||||||
|
import { mkdirpSync } from "../utils/fs/async";
|
||||||
|
import { logger } from "../utils/logger";
|
||||||
|
-import { libraryPath } from "../utils/path";
|
||||||
|
+import { libraryPath, tempPath } from "../utils/path";
|
||||||
|
import { Izzy } from "./internal";
|
||||||
|
|
||||||
|
export function formatCollectionName(name: string) {
|
||||||
|
@@ -261,11 +261,11 @@ export async function loadStore<T extends { _id: string }>(
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadStores(): Promise<void> {
|
||||||
|
- if (!existsSync("tmp")) {
|
||||||
|
- logger.info("Creating temporary directory 'tmp'");
|
||||||
|
- mkdirpSync("tmp/");
|
||||||
|
+ if (!existsSync(tempPath)) {
|
||||||
|
+ logger.info(`Creating temporary directory '${tempPath}'`);
|
||||||
|
+ mkdirpSync(tempPath);
|
||||||
|
} else {
|
||||||
|
- logger.debug("Temporary directory 'tmp' already exists");
|
||||||
|
+ logger.debug(`Temporary directory '${tempPath}' already exists`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const crossReferencePath = libraryPath("cross_references.db");
|
||||||
|
diff --git a/server/graphql/mutations/image.ts b/server/graphql/mutations/image.ts
|
||||||
|
index 6554f145..a7853eea 100644
|
||||||
|
--- a/server/graphql/mutations/image.ts
|
||||||
|
+++ b/server/graphql/mutations/image.ts
|
||||||
|
@@ -20,7 +20,7 @@ import Studio from "../../types/studio";
|
||||||
|
import { mapAsync } from "../../utils/async";
|
||||||
|
import { copyFileAsync, statAsync, unlinkAsync } from "../../utils/fs/async";
|
||||||
|
import { logger } from "../../utils/logger";
|
||||||
|
-import { getFolderPartition, libraryPath } from "../../utils/path";
|
||||||
|
+import { getFolderPartition, libraryPath, tempPath } from "../../utils/path";
|
||||||
|
import { getExtension, normalizeName } from "../../utils/string";
|
||||||
|
import { Dictionary, isBoolean, isNumber, isString } from "../../utils/types";
|
||||||
|
import { clearCaches } from "../datasources";
|
||||||
|
@@ -110,7 +110,7 @@ export default {
|
||||||
|
|
||||||
|
const image = new Image(imageName);
|
||||||
|
|
||||||
|
- const outPath = `tmp/${image._id}${ext}`;
|
||||||
|
+ const outPath = resolve(tempPath, `${image._id}${ext}`);
|
||||||
|
|
||||||
|
logger.debug(`Getting file...`);
|
||||||
|
|
||||||
|
diff --git a/server/routes/scene.ts b/server/routes/scene.ts
|
||||||
|
index 601de160..fe8b8de5 100644
|
||||||
|
--- a/server/routes/scene.ts
|
||||||
|
+++ b/server/routes/scene.ts
|
||||||
|
@@ -16,7 +16,7 @@ import Scene from "../types/scene";
|
||||||
|
import { mkdirpAsync, readFileAsync, rimrafAsync } from "../utils/fs/async";
|
||||||
|
import { handleError, logger } from "../utils/logger";
|
||||||
|
import { generateTimestampsAtIntervals } from "../utils/misc";
|
||||||
|
-import { getFolderPartition, libraryPath } from "../utils/path";
|
||||||
|
+import { getFolderPartition, libraryPath, tempPath } from "../utils/path";
|
||||||
|
import { IMAGE_CACHE_CONTROL } from "./media";
|
||||||
|
|
||||||
|
/* function streamTranscode(
|
||||||
|
@@ -94,7 +94,7 @@ export async function attachScenePreviewGrid(scene: Scene): Promise<string | nul
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
- const gridFolder = path.resolve("tmp", "grid");
|
||||||
|
+ const gridFolder = path.resolve(tempPath, "grid");
|
||||||
|
const tmpFolder = path.resolve(gridFolder, "thumbs", randomUUID());
|
||||||
|
await mkdirpAsync(tmpFolder);
|
||||||
|
|
||||||
|
diff --git a/server/utils/path.ts b/server/utils/path.ts
|
||||||
|
index 05619e93..64964de8 100644
|
||||||
|
--- a/server/utils/path.ts
|
||||||
|
+++ b/server/utils/path.ts
|
||||||
|
@@ -5,6 +5,7 @@ import { getConfig } from "../config";
|
||||||
|
import { mkdirpSync } from "./fs/async";
|
||||||
|
|
||||||
|
const configFolder = process.env.PV_CONFIG_FOLDER || process.cwd();
|
||||||
|
+export const tempPath = process.env.CACHE_DIRECTORY ?? "tmp";
|
||||||
|
|
||||||
|
export function libraryPath(str: string): string {
|
||||||
|
return resolve(getConfig().persistence.libraryPath, "library", str);
|
||||||
|
--
|
||||||
|
2.47.0
|
||||||
|
|
114
pkgs/by-name/po/porn-vault/package.nix
Normal file
114
pkgs/by-name/po/porn-vault/package.nix
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
{
|
||||||
|
fetchFromGitLab,
|
||||||
|
fetchurl,
|
||||||
|
rustPlatform,
|
||||||
|
lib,
|
||||||
|
pnpm_9,
|
||||||
|
stdenvNoCC,
|
||||||
|
nodejs_22,
|
||||||
|
ffmpeg,
|
||||||
|
imagemagick,
|
||||||
|
makeWrapper,
|
||||||
|
autoPatchelfHook,
|
||||||
|
writeShellApplication,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
izzy = rustPlatform.buildRustPackage rec {
|
||||||
|
pname = "izzy";
|
||||||
|
version = "2.0.1";
|
||||||
|
|
||||||
|
src = fetchFromGitLab {
|
||||||
|
owner = "porn-vault";
|
||||||
|
repo = "izzy";
|
||||||
|
rev = version;
|
||||||
|
hash = "sha256-UauA5mZi5a5QF7d17pKSzvyaWbeSuFjBrXEAxR3wNkk=";
|
||||||
|
};
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
ln -s ${./Cargo.lock} Cargo.lock
|
||||||
|
'';
|
||||||
|
|
||||||
|
cargoLock.lockFile = ./Cargo.lock;
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Rust In-Memory K-V Store with Redis-Style File Persistence and Secondary Indices";
|
||||||
|
homepage = "https://gitlab.com/porn-vault/izzy";
|
||||||
|
license = lib.licenses.gpl3Plus;
|
||||||
|
maintainers = [ lib.maintainers.luNeder ];
|
||||||
|
mainProgram = "izzy";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
pnpm = pnpm_9;
|
||||||
|
nodejs = nodejs_22;
|
||||||
|
in
|
||||||
|
stdenvNoCC.mkDerivation (finalAttrs: {
|
||||||
|
pname = "porn-vault";
|
||||||
|
version = "0.30.0-rc.11";
|
||||||
|
|
||||||
|
src = fetchFromGitLab {
|
||||||
|
owner = "porn-vault";
|
||||||
|
repo = "porn-vault";
|
||||||
|
rev = "4c6182c5825d85193cf67cb7cd927da2feaaecdb";
|
||||||
|
hash = "sha256-wQ3dqLc0l2BmLGDYrbWxX2mPwO/Tqz0fY/fOQTEUv24=";
|
||||||
|
};
|
||||||
|
|
||||||
|
pnpmDeps = pnpm.fetchDeps {
|
||||||
|
inherit (finalAttrs) pname version src;
|
||||||
|
hash = "sha256-Xr9tRiP1hW+aFs9FnPvPkeJ0/LtJI57cjWY5bZQaRTQ=";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
nodejs
|
||||||
|
pnpm.configHook
|
||||||
|
makeWrapper
|
||||||
|
];
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
./allow-use-of-systemd-temp-path.patch
|
||||||
|
];
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
substituteInPlace server/binaries/izzy.ts \
|
||||||
|
--replace-fail 'chmodSync(izzyPath, "111");' ""
|
||||||
|
'';
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
runHook preBuild
|
||||||
|
|
||||||
|
pnpm build
|
||||||
|
|
||||||
|
runHook postBuild
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
install -Dm644 package.json config.example.json remix.config.js -t $out/share/porn-vault
|
||||||
|
cp -R public dist build node_modules graphql locale -t $out/share/porn-vault
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
preFixup = ''
|
||||||
|
makeWrapper "${lib.getExe nodejs}" "$out/bin/porn-vault" \
|
||||||
|
--chdir "$out/share/porn-vault" \
|
||||||
|
--add-flags "dist/index.js" \
|
||||||
|
--set-default IZZY_PATH "${lib.getExe izzy}" \
|
||||||
|
--prefix PATH : "${
|
||||||
|
lib.makeBinPath [
|
||||||
|
ffmpeg
|
||||||
|
imagemagick
|
||||||
|
izzy
|
||||||
|
]
|
||||||
|
}"
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Porn-Vault is a self hosted organizer for adult videos and imagery.";
|
||||||
|
homepage = "https://gitlab.com/porn-vault/porn-vault";
|
||||||
|
license = lib.licenses.gpl3Plus;
|
||||||
|
maintainers = [ lib.maintainers.luNeder ];
|
||||||
|
inherit (nodejs.meta) platforms;
|
||||||
|
mainProgram = "porn-vault";
|
||||||
|
};
|
||||||
|
})
|
@ -1,38 +1,52 @@
|
|||||||
{ stdenv, lib, fetchFromGitHub, fetchpatch, cmake, pkg-config, openssl, gtest }:
|
{
|
||||||
|
abseil-cpp,
|
||||||
|
cmake,
|
||||||
|
fetchFromGitHub,
|
||||||
|
stdenv,
|
||||||
|
lib,
|
||||||
|
pkg-config,
|
||||||
|
openssl,
|
||||||
|
}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
let
|
||||||
|
cxxStandard = "17";
|
||||||
|
in
|
||||||
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "s2geometry";
|
pname = "s2geometry";
|
||||||
version = "0.9.0";
|
version = "0.11.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "google";
|
owner = "google";
|
||||||
repo = "s2geometry";
|
repo = "s2geometry";
|
||||||
rev = "v${version}";
|
rev = "refs/tags/v${finalAttrs.version}";
|
||||||
sha256 = "1mx61bnn2f6bd281qlhn667q6yfg1pxzd2js88l5wpkqlfzzhfaz";
|
sha256 = "sha256-VjgGcGgQlKmjUq+JU0JpyhOZ9pqwPcBUFEPGV9XoHc0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [
|
nativeBuildInputs = [
|
||||||
# Fix build https://github.com/google/s2geometry/issues/165
|
cmake
|
||||||
(fetchpatch {
|
pkg-config
|
||||||
url = "https://github.com/google/s2geometry/commit/a4dddf40647c68cd0104eafc31e9c8fb247a6308.patch";
|
|
||||||
sha256 = "0fp3w4bg7pgf5vv4vacp9g06rbqzhxc2fg6i5appp93q6phiinvi";
|
|
||||||
})
|
|
||||||
];
|
];
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake pkg-config ];
|
cmakeFlags = [
|
||||||
buildInputs = [ openssl gtest ];
|
(lib.cmakeFeature "CMAKE_CXX_STANDARD" cxxStandard)
|
||||||
|
# incompatible with our version of gtest
|
||||||
|
(lib.cmakeBool "BUILD_TESTS" false)
|
||||||
|
];
|
||||||
|
|
||||||
# Default of C++11 is too low for gtest.
|
buildInputs = [
|
||||||
# In newer versions of s2geometry this can be done with cmakeFlags.
|
openssl
|
||||||
postPatch = ''
|
];
|
||||||
substituteInPlace CMakeLists.txt --replace "CMAKE_CXX_STANDARD 11" "CMAKE_CXX_STANDARD 14"
|
|
||||||
'';
|
propagatedBuildInputs = [
|
||||||
|
(abseil-cpp.override { inherit cxxStandard; })
|
||||||
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
|
changelog = "https://github.com/google/s2geometry/releases/tag/${lib.removePrefix "refs/tags/" finalAttrs.src.rev}";
|
||||||
description = "Computational geometry and spatial indexing on the sphere";
|
description = "Computational geometry and spatial indexing on the sphere";
|
||||||
homepage = "http://s2geometry.io/";
|
homepage = "http://s2geometry.io/";
|
||||||
license = licenses.asl20;
|
license = licenses.asl20;
|
||||||
maintainers = [ maintainers.Thra11 ];
|
maintainers = [ maintainers.Thra11 ];
|
||||||
platforms = platforms.linux;
|
platforms = platforms.linux;
|
||||||
};
|
};
|
||||||
}
|
})
|
||||||
|
@ -15,13 +15,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "showmethekey";
|
pname = "showmethekey";
|
||||||
version = "1.15.1";
|
version = "1.16.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "AlynxZhou";
|
owner = "AlynxZhou";
|
||||||
repo = "showmethekey";
|
repo = "showmethekey";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-odlIgWFmhDqju7U5Y9q6apUEAqZUvMUA7/eU7LMltQs=";
|
hash = "sha256-eEbpQVfp1Q40+O7uZazKz8aHSPwfLBwEBemyd6jJAgs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -35,20 +35,20 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "siyuan";
|
pname = "siyuan";
|
||||||
version = "3.1.8";
|
version = "3.1.13";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "siyuan-note";
|
owner = "siyuan-note";
|
||||||
repo = "siyuan";
|
repo = "siyuan";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-0sV3r3ETW/FeLJZQrkE95oqKeUKKiNA3vpOBPtHzeE8=";
|
hash = "sha256-+jlJTsGvElumUV1NdYed0XthmY1MFNqWMgDmTQObIA4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
kernel = buildGo123Module {
|
kernel = buildGo123Module {
|
||||||
name = "${finalAttrs.pname}-${finalAttrs.version}-kernel";
|
name = "${finalAttrs.pname}-${finalAttrs.version}-kernel";
|
||||||
inherit (finalAttrs) src;
|
inherit (finalAttrs) src;
|
||||||
sourceRoot = "${finalAttrs.src.name}/kernel";
|
sourceRoot = "${finalAttrs.src.name}/kernel";
|
||||||
vendorHash = "sha256-hxXCq03wxVLONaztZVqLjlqQ/fZNlV2iDF5JIayb5YY=";
|
vendorHash = "sha256-uK++FoWCoeb05TyUhh0PK+wkTmzTko0K7oLodoGAWt8=";
|
||||||
|
|
||||||
patches = [
|
patches = [
|
||||||
(substituteAll {
|
(substituteAll {
|
||||||
@ -90,7 +90,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
src
|
src
|
||||||
sourceRoot
|
sourceRoot
|
||||||
;
|
;
|
||||||
hash = "sha256-ZaurLQlM81lCGdMwvl/1YDzpC/mU08Wlgx4/MAm6un4=";
|
hash = "sha256-uv3gahbSW81gHMx0sQoUbW4Oyzvo6iD5u1izX8vXkwA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
sourceRoot = "${finalAttrs.src.name}/app";
|
sourceRoot = "${finalAttrs.src.name}/app";
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
buildGo123Module rec {
|
buildGo123Module rec {
|
||||||
pname = "telegraf";
|
pname = "telegraf";
|
||||||
version = "1.32.2";
|
version = "1.32.3";
|
||||||
|
|
||||||
subPackages = [ "cmd/telegraf" ];
|
subPackages = [ "cmd/telegraf" ];
|
||||||
|
|
||||||
@ -17,10 +17,10 @@ buildGo123Module rec {
|
|||||||
owner = "influxdata";
|
owner = "influxdata";
|
||||||
repo = "telegraf";
|
repo = "telegraf";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-hqYBoaklrdCkTwnkwqkZAvGLyWkUB3CVu4cg3Ujbyto=";
|
hash = "sha256-H/thJ88cfl75rRByLYIjpPx6lfBSSryhYii8jBl/PBA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = "sha256-KRZ5Rrdp69DTHtNu8vXuD7QKy2n6Sx7myNpB5TSZpRk=";
|
vendorHash = "sha256-3Wcbl4DM4SHvctVvQTsqQNRkB3z+273kvM/KwypmB70=";
|
||||||
proxyVendor = true;
|
proxyVendor = true;
|
||||||
|
|
||||||
ldflags = [
|
ldflags = [
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
version = "0.23.1";
|
version = "0.24.2";
|
||||||
in
|
in
|
||||||
python3Packages.buildPythonApplication {
|
python3Packages.buildPythonApplication {
|
||||||
pname = "toml-sort";
|
pname = "toml-sort";
|
||||||
@ -15,7 +15,7 @@ python3Packages.buildPythonApplication {
|
|||||||
owner = "pappasam";
|
owner = "pappasam";
|
||||||
repo = "toml-sort";
|
repo = "toml-sort";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-7V2WBZYAdsA4Tiy9/2UPOcThSNE3ZXM713j57KDCegk=";
|
hash = "sha256-PuTXG8RIN8Mui5J8DV0yxe94y6FNs4TgPyHjEhpcKqM=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ python3Packages.poetry-core ];
|
build-system = [ python3Packages.poetry-core ];
|
||||||
|
@ -118,5 +118,6 @@ in buildRubyGem rec {
|
|||||||
license = licenses.bsl11;
|
license = licenses.bsl11;
|
||||||
maintainers = with maintainers; [ tylerjl ];
|
maintainers = with maintainers; [ tylerjl ];
|
||||||
platforms = with platforms; linux ++ darwin;
|
platforms = with platforms; linux ++ darwin;
|
||||||
|
broken = true; # build fails on darwin and linux
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -34,14 +34,14 @@ rustPlatform.buildRustPackage rec {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
meta = with lib; {
|
meta = {
|
||||||
description = "A command-line implementation of WCHISPTool, for flashing ch32 MCUs";
|
description = "A command-line implementation of WCHISPTool, for flashing ch32 MCUs";
|
||||||
homepage = "https://ch32-rs.github.io/wchisp/";
|
homepage = "https://ch32-rs.github.io/wchisp/";
|
||||||
changelog = "https://github.com/ch32-rs/wchisp/releases/tag/v${version}";
|
changelog = "https://github.com/ch32-rs/wchisp/releases/tag/v${version}";
|
||||||
license = with licenses; [ gpl2Only ];
|
license = with lib.licenses; [ gpl2Only ];
|
||||||
platforms = with platforms; linux ++ darwin ++ windows;
|
platforms = with lib.platforms; linux ++ darwin ++ windows;
|
||||||
broken = !stdenv.hostPlatform.isLinux;
|
broken = !stdenv.hostPlatform.isLinux;
|
||||||
maintainers = with maintainers; [ jwillikers ];
|
maintainers = with lib.maintainers; [ jwillikers ];
|
||||||
mainProgram = "wchisp";
|
mainProgram = "wchisp";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
{ stdenv
|
{
|
||||||
, lib
|
stdenv,
|
||||||
, fetchFromGitHub
|
lib,
|
||||||
, makeWrapper
|
fetchFromGitHub,
|
||||||
, Foundation
|
unstableGitUpdater,
|
||||||
, glew
|
makeWrapper,
|
||||||
, SDL2
|
glew,
|
||||||
, writeShellScript
|
SDL2,
|
||||||
|
writeShellScript,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
@ -24,13 +25,13 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "wipeout-rewrite";
|
pname = "wipeout-rewrite";
|
||||||
version = "unstable-2023-08-13";
|
version = "0-unstable-2024-07-07";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "phoboslab";
|
owner = "phoboslab";
|
||||||
repo = "wipeout-rewrite";
|
repo = "wipeout-rewrite";
|
||||||
rev = "7a9f757a79d5c6806252cc1268bda5cdef463e23";
|
rev = "a372b51f59217da4a5208352123a4acca800783c";
|
||||||
hash = "sha256-21IG9mZPGgRhVkT087G+Bz/zLkknkHKGmWjSpcLw8vE=";
|
hash = "sha256-RJrWOTb5cZ2rSgO/J8qW5ifMJryBaK6MDtYwQZfghS0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
@ -42,10 +43,11 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
buildInputs = [
|
buildInputs = [
|
||||||
glew
|
glew
|
||||||
SDL2
|
SDL2
|
||||||
] ++ lib.optionals stdenv.hostPlatform.isDarwin [
|
|
||||||
Foundation
|
|
||||||
];
|
];
|
||||||
|
|
||||||
|
# Force this to empty, so assets are looked up in CWD instead of $out/bin
|
||||||
|
env.NIX_CFLAGS_COMPILE = "-DPATH_ASSETS=";
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
runHook preInstall
|
runHook preInstall
|
||||||
|
|
||||||
@ -60,12 +62,14 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = with lib; {
|
passthru.updateScript = unstableGitUpdater { };
|
||||||
|
|
||||||
|
meta = {
|
||||||
mainProgram = "wipegame";
|
mainProgram = "wipegame";
|
||||||
description = "Re-implementation of the 1995 PSX game wipEout";
|
description = "Re-implementation of the 1995 PSX game wipEout";
|
||||||
homepage = "https://github.com/phoboslab/wipeout-rewrite";
|
homepage = "https://github.com/phoboslab/wipeout-rewrite";
|
||||||
license = licenses.unfree;
|
license = lib.licenses.unfree;
|
||||||
maintainers = with maintainers; [ OPNA2608 ];
|
maintainers = with lib.maintainers; [ OPNA2608 ];
|
||||||
platforms = platforms.all;
|
platforms = lib.platforms.all;
|
||||||
};
|
};
|
||||||
})
|
})
|
@ -16,13 +16,13 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "wluma";
|
pname = "wluma";
|
||||||
version = "4.4.0";
|
version = "4.5.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "maximbaz";
|
owner = "maximbaz";
|
||||||
repo = "wluma";
|
repo = "wluma";
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-Ow3SjeulYiHY9foXrmTtLK3F+B3+DrtDjBUke3bJeDw=";
|
sha256 = "sha256-5uSExmh1a88kZDly4VrHzI8YwfTDB8wm2mMGZyvKsk4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
@ -38,7 +38,7 @@ rustPlatform.buildRustPackage rec {
|
|||||||
'ExecStart=/usr/bin/wluma' 'ExecStart=${placeholder "out"}/bin/wluma'
|
'ExecStart=/usr/bin/wluma' 'ExecStart=${placeholder "out"}/bin/wluma'
|
||||||
'';
|
'';
|
||||||
|
|
||||||
cargoHash = "sha256-BwduYAYIRxc40nn9kloHv+Dt8jLSZViweSYGL5e45YM=";
|
cargoHash = "sha256-hKxKEs88tB05AiWC/LuC/0jJ1RxeUnpp35A6UTQK4xw=";
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
makeWrapper
|
makeWrapper
|
||||||
|
853
pkgs/by-name/ze/zed-editor/Cargo.lock
generated
853
pkgs/by-name/ze/zed-editor/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -90,13 +90,13 @@ let
|
|||||||
in
|
in
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "zed-editor";
|
pname = "zed-editor";
|
||||||
version = "0.161.2";
|
version = "0.162.3";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "zed-industries";
|
owner = "zed-industries";
|
||||||
repo = "zed";
|
repo = "zed";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-UEqlOiB7oNQcrLViPrk9ZCg4uUDYhRXjq0cHp/wclYk=";
|
hash = "sha256-B0iTJMVUpsSVZ0l2bdPnWc7YaZErKnxqiuhgYopmJ/4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches =
|
patches =
|
||||||
|
@ -137,5 +137,6 @@ stdenv.mkDerivation {
|
|||||||
maintainers = [ ];
|
maintainers = [ ];
|
||||||
license = lib.licenses.bsl11;
|
license = lib.licenses.bsl11;
|
||||||
platforms = lib.platforms.all;
|
platforms = lib.platforms.all;
|
||||||
|
broken = true; # `codon-llvm` build fails on darwin and linux
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -158,5 +158,6 @@ in stdenv.mkDerivation rec {
|
|||||||
];
|
];
|
||||||
maintainers = with maintainers; [ kini raskin ];
|
maintainers = with maintainers; [ kini raskin ];
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
broken = stdenv.hostPlatform.isDarwin;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@ stdenv.mkDerivation rec
|
|||||||
owner = "alembic";
|
owner = "alembic";
|
||||||
repo = "alembic";
|
repo = "alembic";
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-PuVN5Ytls58G2BmwCHUHiMQ0rolH98Hlw/pp7cvpiAg=";
|
hash = "sha256-EJZvbaGP9aea/UvcXmFbI3Y2/xTkdqORdzyz5ch931A=";
|
||||||
};
|
};
|
||||||
|
|
||||||
# note: out is unused (but required for outputDoc anyway)
|
# note: out is unused (but required for outputDoc anyway)
|
||||||
|
@ -1,38 +1,48 @@
|
|||||||
{ lib
|
{
|
||||||
, mkDerivation
|
cmake,
|
||||||
, fetchFromGitHub
|
fetchFromGitHub,
|
||||||
, cmake
|
lib,
|
||||||
, pkg-config
|
maplibre-native-qt,
|
||||||
, curl
|
qtbase,
|
||||||
, qtbase
|
qtpositioning,
|
||||||
, qtlocation
|
stdenv,
|
||||||
, maplibre-gl-native
|
|
||||||
}:
|
}:
|
||||||
|
|
||||||
mkDerivation rec {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "mapbox-gl-qml";
|
pname = "mapbox-gl-qml";
|
||||||
version = "2.1.1";
|
version = "3.0.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "rinigus";
|
owner = "rinigus";
|
||||||
repo = "mapbox-gl-qml";
|
repo = "mapbox-gl-qml";
|
||||||
rev = version;
|
rev = "refs/tags/${finalAttrs.version}";
|
||||||
hash = "sha256-zZcD85nOZZ067FRvSuzE8lr2gyuVxpcZGp44D4ayc3Q=";
|
hash = "sha256-csk3Uo+AdP1R/T/9gWyWmYFIKuen2jy8wYN3GJznyRE=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake pkg-config ];
|
nativeBuildInputs = [
|
||||||
buildInputs = [ curl qtlocation maplibre-gl-native ];
|
cmake
|
||||||
|
];
|
||||||
|
|
||||||
postPatch = ''
|
cmakeFlags = [
|
||||||
substituteInPlace src/CMakeLists.txt \
|
(lib.cmakeFeature "QT_INSTALL_QML" "${placeholder "out"}/${qtbase.qtQmlPrefix}")
|
||||||
--replace ' ''${QT_INSTALL_QML}' " $out/${qtbase.qtQmlPrefix}"
|
];
|
||||||
'';
|
|
||||||
|
|
||||||
meta = with lib; {
|
buildInputs = [
|
||||||
|
maplibre-native-qt
|
||||||
|
qtpositioning
|
||||||
|
];
|
||||||
|
|
||||||
|
dontWrapQtApps = true; # library only
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
changelog = "https://github.com/rinigus/mapbox-gl-qml/releases/tag/${lib.removePrefix "refs/tags/" finalAttrs.src.rev}";
|
||||||
description = "Unofficial Mapbox GL Native bindings for Qt QML";
|
description = "Unofficial Mapbox GL Native bindings for Qt QML";
|
||||||
homepage = "https://github.com/rinigus/mapbox-gl-qml";
|
homepage = "https://github.com/rinigus/mapbox-gl-qml";
|
||||||
license = licenses.lgpl3Only;
|
license = lib.licenses.lgpl3Only;
|
||||||
maintainers = with maintainers; [ Thra11 dotlambda ];
|
maintainers = with lib.maintainers; [
|
||||||
platforms = platforms.linux;
|
Thra11
|
||||||
|
dotlambda
|
||||||
|
];
|
||||||
|
platforms = lib.platforms.linux;
|
||||||
};
|
};
|
||||||
}
|
})
|
||||||
|
43
pkgs/development/libraries/maplibre-native-qt/default.nix
Normal file
43
pkgs/development/libraries/maplibre-native-qt/default.nix
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
cmake,
|
||||||
|
fetchFromGitHub,
|
||||||
|
lib,
|
||||||
|
qtlocation,
|
||||||
|
stdenv,
|
||||||
|
}:
|
||||||
|
|
||||||
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
|
pname = "maplibre-native-qt";
|
||||||
|
version = "3.0.0";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "maplibre";
|
||||||
|
repo = "maplibre-native-qt";
|
||||||
|
rev = "refs/tags/v${finalAttrs.version}";
|
||||||
|
hash = "sha256-h7PFoGJ5P+k5AEv+y0XReYnPdP/bD4nr/uW9jZ5DCy4=";
|
||||||
|
fetchSubmodules = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
cmake
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
qtlocation
|
||||||
|
];
|
||||||
|
|
||||||
|
dontWrapQtApps = true; # library only
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
changelog = "https://github.com/maplibre/maplibre-native-qt/blob/${finalAttrs.src.rev}/CHANGELOG.md";
|
||||||
|
description = "MapLibre Native Qt Bindings and Qt Location Plugin";
|
||||||
|
homepage = "https://github.com/maplibre/maplibre-native-qt";
|
||||||
|
license = with lib.licenses; [
|
||||||
|
bsd2
|
||||||
|
gpl3
|
||||||
|
lgpl3
|
||||||
|
];
|
||||||
|
maintainers = with lib.maintainers; [ dotlambda ];
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
};
|
||||||
|
})
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "arcam-fmj";
|
pname = "arcam-fmj";
|
||||||
version = "1.5.2";
|
version = "1.6.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
disabled = pythonOlder "3.8";
|
disabled = pythonOlder "3.8";
|
||||||
@ -24,7 +24,7 @@ buildPythonPackage rec {
|
|||||||
owner = "elupus";
|
owner = "elupus";
|
||||||
repo = "arcam_fmj";
|
repo = "arcam_fmj";
|
||||||
rev = "refs/tags/${version}";
|
rev = "refs/tags/${version}";
|
||||||
hash = "sha256-hBoUxY+xtW04UPUG2P8A8QQbHxGk0bjcrtXis6nlaGg=";
|
hash = "sha256-nit+UjUxhkpaK758WLsNc9tcw1s1wdxq3x3etyVVgPk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ setuptools ];
|
build-system = [ setuptools ];
|
||||||
|
@ -5,15 +5,18 @@
|
|||||||
setuptools-scm,
|
setuptools-scm,
|
||||||
setuptools,
|
setuptools,
|
||||||
python,
|
python,
|
||||||
|
docutils,
|
||||||
jaraco-collections,
|
jaraco-collections,
|
||||||
jaraco-functools,
|
jaraco-functools,
|
||||||
jaraco-envs,
|
jaraco-envs,
|
||||||
jaraco-path,
|
jaraco-path,
|
||||||
jaraco-text,
|
jaraco-text,
|
||||||
more-itertools,
|
more-itertools,
|
||||||
|
packaging,
|
||||||
path,
|
path,
|
||||||
pyfakefs,
|
pyfakefs,
|
||||||
pytestCheckHook,
|
pytestCheckHook,
|
||||||
|
stdenv,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
@ -24,13 +27,18 @@ buildPythonPackage rec {
|
|||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "pypa";
|
owner = "pypa";
|
||||||
repo = "distutils";
|
repo = "distutils";
|
||||||
rev = "378984e02edae91d5f49425da8436f8dd9152b8a"; # correlate commit from setuptools version
|
rev = "72837514c2b67081401db556be9aaaa43debe44f"; # correlate commit from setuptools version
|
||||||
hash = "sha256-31sPPVY6tr+OwpiFiaKw82KyhDNBVW3Foea49dCa6pA=";
|
hash = "sha256-Kx4Iudy9oZ0oQT96Meyq/m0k0BuexPLVxwvpNJehCW0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ setuptools-scm ];
|
build-system = [ setuptools-scm ];
|
||||||
|
|
||||||
dependencies = [ jaraco-functools ];
|
dependencies = [
|
||||||
|
jaraco-collections
|
||||||
|
jaraco-functools
|
||||||
|
more-itertools
|
||||||
|
packaging
|
||||||
|
];
|
||||||
|
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
rm -r $out/${python.sitePackages}/distutils
|
rm -r $out/${python.sitePackages}/distutils
|
||||||
@ -40,7 +48,7 @@ buildPythonPackage rec {
|
|||||||
pythonImportsCheck = [ "distutils" ];
|
pythonImportsCheck = [ "distutils" ];
|
||||||
|
|
||||||
nativeCheckInputs = [
|
nativeCheckInputs = [
|
||||||
jaraco-collections
|
docutils
|
||||||
jaraco-envs
|
jaraco-envs
|
||||||
jaraco-path
|
jaraco-path
|
||||||
jaraco-text
|
jaraco-text
|
||||||
@ -50,6 +58,9 @@ buildPythonPackage rec {
|
|||||||
pytestCheckHook
|
pytestCheckHook
|
||||||
];
|
];
|
||||||
|
|
||||||
|
# jaraco-path depends ob pyobjc
|
||||||
|
doCheck = !stdenv.isDarwin;
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Distutils as found in cpython";
|
description = "Distutils as found in cpython";
|
||||||
homepage = "https://github.com/pypa/distutils";
|
homepage = "https://github.com/pypa/distutils";
|
||||||
|
@ -1,17 +1,20 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
buildPythonPackage,
|
|
||||||
fetchPypi,
|
|
||||||
fetchFromGitHub,
|
|
||||||
importlib-metadata,
|
|
||||||
black,
|
black,
|
||||||
poetry-core,
|
buildPythonPackage,
|
||||||
click,
|
click,
|
||||||
|
fetchFromGitHub,
|
||||||
|
fetchPypi,
|
||||||
|
gitpython,
|
||||||
|
importlib-metadata,
|
||||||
jinja2,
|
jinja2,
|
||||||
platformdirs,
|
platformdirs,
|
||||||
|
poetry-core,
|
||||||
|
pytest-asyncio,
|
||||||
|
pytestCheckHook,
|
||||||
|
pythonOlder,
|
||||||
tomli,
|
tomli,
|
||||||
tqdm,
|
tqdm,
|
||||||
gitpython,
|
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
@ -19,6 +22,8 @@ buildPythonPackage rec {
|
|||||||
version = "0.23.3";
|
version = "0.23.3";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
|
disabled = pythonOlder "3.9";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "tconbeer";
|
owner = "tconbeer";
|
||||||
repo = "sqlfmt";
|
repo = "sqlfmt";
|
||||||
@ -26,9 +31,9 @@ buildPythonPackage rec {
|
|||||||
hash = "sha256-kbluj29P1HwTaCYv1Myslak9s8FFm2e/eHdGgi3H4i0=";
|
hash = "sha256-kbluj29P1HwTaCYv1Myslak9s8FFm2e/eHdGgi3H4i0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [
|
pythonRelaxDeps = [ "platformdirs" ];
|
||||||
poetry-core
|
|
||||||
];
|
build-system = [ poetry-core ];
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
click
|
click
|
||||||
@ -40,25 +45,26 @@ buildPythonPackage rec {
|
|||||||
];
|
];
|
||||||
|
|
||||||
optional-dependencies = {
|
optional-dependencies = {
|
||||||
jinjafmt = [
|
jinjafmt = [ black ];
|
||||||
black
|
sqlfmt_primer = [ gitpython ];
|
||||||
];
|
|
||||||
sqlfmt_primer = [
|
|
||||||
gitpython
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pythonRelaxDeps = [
|
nativeCheckInputs = [
|
||||||
"platformdirs"
|
pytest-asyncio
|
||||||
];
|
pytestCheckHook
|
||||||
|
] ++ lib.flatten (builtins.attrValues optional-dependencies);
|
||||||
|
|
||||||
pythonImportsCheck = [
|
preCheck = ''
|
||||||
"sqlfmt"
|
export HOME=$(mktemp -d)
|
||||||
];
|
export PATH="$PATH:$out/bin";
|
||||||
|
'';
|
||||||
|
|
||||||
|
pythonImportsCheck = [ "sqlfmt" ];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Sqlfmt formats your dbt SQL files so you don't have to";
|
description = "Sqlfmt formats your dbt SQL files so you don't have to";
|
||||||
homepage = "https://github.com/tconbeer/sqlfmt";
|
homepage = "https://github.com/tconbeer/sqlfmt";
|
||||||
|
changelog = "https://github.com/tconbeer/sqlfmt/blob/${src.rev}/CHANGELOG.md";
|
||||||
license = lib.licenses.asl20;
|
license = lib.licenses.asl20;
|
||||||
maintainers = with lib.maintainers; [ pcboy ];
|
maintainers = with lib.maintainers; [ pcboy ];
|
||||||
mainProgram = "sqlfmt";
|
mainProgram = "sqlfmt";
|
||||||
|
@ -1,50 +1,63 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
buildPythonPackage,
|
buildPythonPackage,
|
||||||
fetchPypi,
|
fetchFromGitHub,
|
||||||
poetry-core,
|
poetry-core,
|
||||||
pyarrow,
|
pyarrow,
|
||||||
pytz,
|
pytz,
|
||||||
textual,
|
textual,
|
||||||
tzdata,
|
tzdata,
|
||||||
|
pythonOlder,
|
||||||
polars,
|
polars,
|
||||||
|
pytest-asyncio,
|
||||||
|
pytest-textual-snapshot,
|
||||||
|
pytestCheckHook,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "textual-fastdatatable";
|
pname = "textual-fastdatatable";
|
||||||
version = "0.9.0";
|
version = "0.10.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchPypi {
|
disabled = pythonOlder "3.8";
|
||||||
pname = "textual_fastdatatable";
|
|
||||||
inherit version;
|
src = fetchFromGitHub {
|
||||||
hash = "sha256-AS3SiwetCHkCMu8H81xbp5QvN/2GCvMlWgU4qZKvBRU=";
|
owner = "tconbeer";
|
||||||
|
repo = "textual-fastdatatable";
|
||||||
|
rev = "refs/tags/v${version}";
|
||||||
|
hash = "sha256-r1evN69etFn21TkXPLuAh1OxIsurDDyPyYOKQR5uUos=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [
|
build-system = [ poetry-core ];
|
||||||
poetry-core
|
|
||||||
];
|
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
pyarrow
|
pyarrow
|
||||||
pytz
|
pytz
|
||||||
textual
|
textual
|
||||||
tzdata
|
tzdata
|
||||||
];
|
] ++ textual.optional-dependencies.syntax;
|
||||||
|
|
||||||
optional-dependencies = {
|
optional-dependencies = {
|
||||||
polars = [
|
polars = [ polars ];
|
||||||
polars
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pythonImportsCheck = [
|
nativeCheckInputs = [
|
||||||
"textual_fastdatatable"
|
pytest-asyncio
|
||||||
|
pytest-textual-snapshot
|
||||||
|
pytestCheckHook
|
||||||
|
] ++ lib.flatten (builtins.attrValues optional-dependencies);
|
||||||
|
|
||||||
|
pythonImportsCheck = [ "textual_fastdatatable" ];
|
||||||
|
|
||||||
|
disabledTestPaths = [
|
||||||
|
# Tests are comparing CLI output
|
||||||
|
"tests/snapshot_tests/test_snapshots.py"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "A performance-focused reimplementation of Textual's DataTable widget, with a pluggable data storage backend";
|
description = "A performance-focused reimplementation of Textual's DataTable widget, with a pluggable data storage backend";
|
||||||
homepage = "https://pypi.org/project/textual-fastdatatable/";
|
homepage = "https://github.com/tconbeer/textual-fastdatatable";
|
||||||
|
changelog = "https://github.com/tconbeer/textual-fastdatatable/releases/tag/v${version}";
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
maintainers = with lib.maintainers; [ pcboy ];
|
maintainers = with lib.maintainers; [ pcboy ];
|
||||||
};
|
};
|
||||||
|
@ -1,39 +1,47 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
buildPythonPackage,
|
buildPythonPackage,
|
||||||
fetchPypi,
|
fetchFromGitHub,
|
||||||
poetry-core,
|
poetry-core,
|
||||||
pyperclip,
|
pyperclip,
|
||||||
|
pytest-asyncio,
|
||||||
|
pytestCheckHook,
|
||||||
|
pythonOlder,
|
||||||
textual,
|
textual,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "textual-textarea";
|
pname = "textual-textarea";
|
||||||
version = "0.14.2";
|
version = "0.14.4";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchPypi {
|
disabled = pythonOlder "3.8";
|
||||||
pname = "textual_textarea";
|
|
||||||
inherit version;
|
src = fetchFromGitHub {
|
||||||
hash = "sha256-AJU7BBoev6pBrLhvbfF4I7l+E8YnO5jCD5OIsNf6NW0=";
|
owner = "tconbeer";
|
||||||
|
repo = "textual-textarea";
|
||||||
|
rev = "refs/tags/v${version}";
|
||||||
|
hash = "sha256-tmbSCU1VgxR9aXG22UVpweD71dVmhKSRBTDm1Gf33jM=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [
|
build-system = [ poetry-core ];
|
||||||
poetry-core
|
|
||||||
];
|
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
pyperclip
|
pyperclip
|
||||||
textual
|
textual
|
||||||
|
] ++ textual.optional-dependencies.syntax;
|
||||||
|
|
||||||
|
nativeCheckInputs = [
|
||||||
|
pytest-asyncio
|
||||||
|
pytestCheckHook
|
||||||
];
|
];
|
||||||
|
|
||||||
pythonImportsCheck = [
|
pythonImportsCheck = [ "textual_textarea" ];
|
||||||
"textual_textarea"
|
|
||||||
];
|
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "A text area (multi-line input) with syntax highlighting for Textual";
|
description = "A text area (multi-line input) with syntax highlighting for Textual";
|
||||||
homepage = "https://pypi.org/project/textual-textarea/";
|
homepage = "https://github.com/tconbeer/textual-textarea";
|
||||||
|
changelog = "https://github.com/tconbeer/textual-textarea/releases/tag/v${version}";
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
maintainers = with lib.maintainers; [ pcboy ];
|
maintainers = with lib.maintainers; [ pcboy ];
|
||||||
};
|
};
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "textual";
|
pname = "textual";
|
||||||
version = "0.82.0";
|
version = "0.86.1";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
disabled = pythonOlder "3.8";
|
disabled = pythonOlder "3.8";
|
||||||
@ -29,7 +29,7 @@ buildPythonPackage rec {
|
|||||||
owner = "Textualize";
|
owner = "Textualize";
|
||||||
repo = "textual";
|
repo = "textual";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-belpoXQ+CkTchK+FjI/Ur8v4cNgzX39xLdNfPCwaU6E=";
|
hash = "sha256-5msCFv79nAmoaP9gZxV3DXMLTyVlSFb+qyA5jHWwc50=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ poetry-core ];
|
build-system = [ poetry-core ];
|
||||||
|
@ -77,4 +77,11 @@ in
|
|||||||
hash = "sha256-0m9oaqjU42RYyttkTihADDrRMjr2WoK/8sInZALeHws=";
|
hash = "sha256-0m9oaqjU42RYyttkTihADDrRMjr2WoK/8sInZALeHws=";
|
||||||
cargoHash = "sha256-9XTIcpoCnROP63ZTDgMMMmj0kPggiTazKlKQfCgXKzk=";
|
cargoHash = "sha256-9XTIcpoCnROP63ZTDgMMMmj0kPggiTazKlKQfCgXKzk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
cargo-pgrx_0_12_6 = generic {
|
||||||
|
version = "0.12.6";
|
||||||
|
hash = "sha256-7aQkrApALZe6EoQGVShGBj0UIATnfOy2DytFj9IWdEA=";
|
||||||
|
cargoHash = "sha256-Di4UldQwAt3xVyvgQT1gUhdvYUVp7n/a72pnX45kP0w=";
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
stdenv,
|
stdenv,
|
||||||
fetchurl,
|
fetchzip,
|
||||||
unzip,
|
unzip,
|
||||||
testers,
|
testers,
|
||||||
chromedriver,
|
chromedriver,
|
||||||
@ -9,19 +9,18 @@
|
|||||||
|
|
||||||
let
|
let
|
||||||
upstream-info =
|
upstream-info =
|
||||||
(import ../../../../applications/networking/browsers/chromium/upstream-info.nix)
|
(lib.importJSON ../../../../applications/networking/browsers/chromium/info.json).chromium;
|
||||||
.stable.chromedriver;
|
|
||||||
|
|
||||||
# See ./source.nix for Linux
|
# See ./source.nix for Linux
|
||||||
allSpecs = {
|
allSpecs = {
|
||||||
x86_64-darwin = {
|
x86_64-darwin = {
|
||||||
system = "mac-x64";
|
system = "mac-x64";
|
||||||
hash = upstream-info.hash_darwin;
|
hash = upstream-info.chromedriver.hash_darwin;
|
||||||
};
|
};
|
||||||
|
|
||||||
aarch64-darwin = {
|
aarch64-darwin = {
|
||||||
system = "mac-arm64";
|
system = "mac-arm64";
|
||||||
hash = upstream-info.hash_darwin_aarch64;
|
hash = upstream-info.chromedriver.hash_darwin_aarch64;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -35,7 +34,7 @@ stdenv.mkDerivation {
|
|||||||
pname = "chromedriver";
|
pname = "chromedriver";
|
||||||
inherit version;
|
inherit version;
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchzip {
|
||||||
url = "https://storage.googleapis.com/chrome-for-testing-public/${version}/${spec.system}/chromedriver-${spec.system}.zip";
|
url = "https://storage.googleapis.com/chrome-for-testing-public/${version}/${spec.system}/chromedriver-${spec.system}.zip";
|
||||||
inherit (spec) hash;
|
inherit (spec) hash;
|
||||||
};
|
};
|
||||||
|
@ -24,5 +24,16 @@ buildNodejs {
|
|||||||
./node-npm-build-npm-package-logic.patch
|
./node-npm-build-npm-package-logic.patch
|
||||||
./use-correct-env-in-tests.patch
|
./use-correct-env-in-tests.patch
|
||||||
./bin-sh-node-run-v22.patch
|
./bin-sh-node-run-v22.patch
|
||||||
|
|
||||||
|
# Fix for https://github.com/NixOS/nixpkgs/issues/355919
|
||||||
|
# FIXME: remove after a minor point release
|
||||||
|
(fetchpatch2 {
|
||||||
|
url = "https://github.com/nodejs/node/commit/a094a8166cd772f89e92b5deef168e5e599fa815.patch?full_index=1";
|
||||||
|
hash = "sha256-5FZfozYWRa1ZI/f+e+xpdn974Jg2DbiHbua13XUQP5E=";
|
||||||
|
})
|
||||||
|
(fetchpatch2 {
|
||||||
|
url = "https://github.com/nodejs/node/commit/f270462c09ddfd770291a7c8a2cd204b2c63d730.patch?full_index=1";
|
||||||
|
hash = "sha256-Err0i5g7WtXcnhykKgrS3ocX7/3oV9UrT0SNeRtMZNU=";
|
||||||
|
})
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -4,13 +4,13 @@
|
|||||||
|
|
||||||
callPackage ../generic.nix rec {
|
callPackage ../generic.nix rec {
|
||||||
pname = "rat-king-adventure";
|
pname = "rat-king-adventure";
|
||||||
version = "2.0.1";
|
version = "2.0.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "TrashboxBobylev";
|
owner = "TrashboxBobylev";
|
||||||
repo = "Rat-King-Adventure";
|
repo = "Rat-King-Adventure";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-FAIFrlVyNYTiS+UBLZFOhuMzj8C6qNGAffYrTxcNeDM=";
|
hash = "sha256-mh54m2YwGOmE03fxndk3wNX/xi6UyIdXWEguiC3mDeA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
desktopName = "Rat King Adventure";
|
desktopName = "Rat King Adventure";
|
||||||
|
@ -13,13 +13,13 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "evdi";
|
pname = "evdi";
|
||||||
version = "1.14.6";
|
version = "1.14.7";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "DisplayLink";
|
owner = "DisplayLink";
|
||||||
repo = "evdi";
|
repo = "evdi";
|
||||||
rev = "refs/tags/v${finalAttrs.version}";
|
rev = "refs/tags/v${finalAttrs.version}";
|
||||||
hash = "sha256-/XIWacrsB7qBqlLUwIGuDdahvt2dAwiK7dauFaYh7lU=";
|
hash = "sha256-z3GawjaokbmmUC1LihwGSnF3tUp9n/FO+kDiWvBq+mY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
env.NIX_CFLAGS_COMPILE = toString [
|
env.NIX_CFLAGS_COMPILE = toString [
|
||||||
|
@ -1,20 +1,39 @@
|
|||||||
{ lib, stdenv, fetchFromGitHub, cmake, postgresql, openssl, libkrb5, nixosTests, enableUnfree ? true, buildPostgresqlExtension }:
|
{
|
||||||
|
buildPostgresqlExtension,
|
||||||
|
cmake,
|
||||||
|
enableUnfree ? true,
|
||||||
|
fetchFromGitHub,
|
||||||
|
lib,
|
||||||
|
libkrb5,
|
||||||
|
nixosTests,
|
||||||
|
openssl,
|
||||||
|
postgresql,
|
||||||
|
stdenv,
|
||||||
|
}:
|
||||||
|
|
||||||
buildPostgresqlExtension rec {
|
buildPostgresqlExtension rec {
|
||||||
pname = "timescaledb${lib.optionalString (!enableUnfree) "-apache"}";
|
pname = "timescaledb${lib.optionalString (!enableUnfree) "-apache"}";
|
||||||
version = "2.14.2";
|
version = "2.17.2";
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake ];
|
nativeBuildInputs = [ cmake ];
|
||||||
buildInputs = [ openssl libkrb5 ];
|
buildInputs = [
|
||||||
|
openssl
|
||||||
|
libkrb5
|
||||||
|
];
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "timescale";
|
owner = "timescale";
|
||||||
repo = "timescaledb";
|
repo = "timescaledb";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-gJViEWHtIczvIiQKuvvuwCfWJMxAYoBhCHhD75no6r0=";
|
hash = "sha256-gPsAebMUBuAwP6Hoi9/vrc2IFsmTbL0wQH1g6/2k2d4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cmakeFlags = [ "-DSEND_TELEMETRY_DEFAULT=OFF" "-DREGRESS_CHECKS=OFF" "-DTAP_CHECKS=OFF" ]
|
cmakeFlags =
|
||||||
|
[
|
||||||
|
"-DSEND_TELEMETRY_DEFAULT=OFF"
|
||||||
|
"-DREGRESS_CHECKS=OFF"
|
||||||
|
"-DTAP_CHECKS=OFF"
|
||||||
|
]
|
||||||
++ lib.optionals (!enableUnfree) [ "-DAPACHE_ONLY=ON" ]
|
++ lib.optionals (!enableUnfree) [ "-DAPACHE_ONLY=ON" ]
|
||||||
++ lib.optionals stdenv.hostPlatform.isDarwin [ "-DLINTER=OFF" ];
|
++ lib.optionals stdenv.hostPlatform.isDarwin [ "-DLINTER=OFF" ];
|
||||||
|
|
||||||
@ -38,14 +57,9 @@ buildPostgresqlExtension rec {
|
|||||||
description = "Scales PostgreSQL for time-series data via automatic partitioning across time and space";
|
description = "Scales PostgreSQL for time-series data via automatic partitioning across time and space";
|
||||||
homepage = "https://www.timescale.com/";
|
homepage = "https://www.timescale.com/";
|
||||||
changelog = "https://github.com/timescale/timescaledb/blob/${version}/CHANGELOG.md";
|
changelog = "https://github.com/timescale/timescaledb/blob/${version}/CHANGELOG.md";
|
||||||
maintainers = [ ];
|
maintainers = [ maintainers.kirillrdy ];
|
||||||
platforms = postgresql.meta.platforms;
|
platforms = postgresql.meta.platforms;
|
||||||
license = with licenses; if enableUnfree then tsl else asl20;
|
license = with licenses; if enableUnfree then tsl else asl20;
|
||||||
broken = versionOlder postgresql.version "13" ||
|
broken = versionOlder postgresql.version "14";
|
||||||
# timescaledb supports PostgreSQL 17 from 2.17.0 on:
|
|
||||||
# https://github.com/timescale/timescaledb/releases/tag/2.17.0
|
|
||||||
# We can't upgrade to it, yet, because this would imply dropping support for
|
|
||||||
# PostgreSQL 13, which is a breaking change.
|
|
||||||
(versionAtLeast postgresql.version "17" && version == "2.14.2");
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -3,24 +3,24 @@
|
|||||||
, buildPgrxExtension
|
, buildPgrxExtension
|
||||||
, postgresql
|
, postgresql
|
||||||
, nixosTests
|
, nixosTests
|
||||||
, cargo-pgrx_0_10_2
|
, cargo-pgrx_0_12_6
|
||||||
, nix-update-script
|
, nix-update-script
|
||||||
}:
|
}:
|
||||||
|
|
||||||
(buildPgrxExtension.override { cargo-pgrx = cargo-pgrx_0_10_2; }) rec {
|
(buildPgrxExtension.override { cargo-pgrx = cargo-pgrx_0_12_6; }) rec {
|
||||||
inherit postgresql;
|
inherit postgresql;
|
||||||
|
|
||||||
pname = "timescaledb_toolkit";
|
pname = "timescaledb_toolkit";
|
||||||
version = "1.18.0";
|
version = "1.19.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "timescale";
|
owner = "timescale";
|
||||||
repo = "timescaledb-toolkit";
|
repo = "timescaledb-toolkit";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-Lm/LFBkG91GeWlJL9RBqP8W0tlhBEeGQ6kXUzzv4xRE=";
|
hash = "sha256-7yUbtWbYL4AnuUX8OXG4OVqYCY2Lf0pISSTlcFdPqog=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoHash = "sha256-LME8oftHmmiN8GU3eTBTSB6m0CE+KtDFRssL1g2Cjm8=";
|
cargoHash = "sha256-+uD4UU7QwNISQZ7a2kDkY/y3fQWk/K0fFcrFq4yq6RU=";
|
||||||
buildAndTestSubdir = "extension";
|
buildAndTestSubdir = "extension";
|
||||||
|
|
||||||
passthru = {
|
passthru = {
|
||||||
@ -37,8 +37,5 @@
|
|||||||
maintainers = with maintainers; [ typetetris ];
|
maintainers = with maintainers; [ typetetris ];
|
||||||
platforms = postgresql.meta.platforms;
|
platforms = postgresql.meta.platforms;
|
||||||
license = licenses.tsl;
|
license = licenses.tsl;
|
||||||
# PostgreSQL 17 support issue upstream: https://github.com/timescale/timescaledb-toolkit/issues/813
|
|
||||||
# Check after next package update.
|
|
||||||
broken = versionAtLeast postgresql.version "17" && version == "1.18.0";
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,52 +0,0 @@
|
|||||||
# Adapted from lib/tests/release.nix
|
|
||||||
{ pkgs-path ? ../../..
|
|
||||||
, pkgs ? import pkgs-path {}
|
|
||||||
, lib ? pkgs.lib
|
|
||||||
, nix ? pkgs.nix
|
|
||||||
}:
|
|
||||||
|
|
||||||
#
|
|
||||||
# This verifies that release-attrpaths-superset.nix does not encounter
|
|
||||||
# infinite recursion or non-tryEval-able failures.
|
|
||||||
#
|
|
||||||
pkgs.runCommand "all-attrs-eval-under-tryEval" {
|
|
||||||
nativeBuildInputs = [
|
|
||||||
nix
|
|
||||||
pkgs.gitMinimal
|
|
||||||
] ++ lib.optional pkgs.stdenv.hostPlatform.isLinux pkgs.inotify-tools;
|
|
||||||
strictDeps = true;
|
|
||||||
|
|
||||||
src = with lib.fileset; toSource {
|
|
||||||
root = pkgs-path;
|
|
||||||
fileset = unions [
|
|
||||||
../../../default.nix
|
|
||||||
../../../doc
|
|
||||||
../../../lib
|
|
||||||
../../../maintainers
|
|
||||||
../../../nixos
|
|
||||||
../../../pkgs
|
|
||||||
../../../.version
|
|
||||||
];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
''
|
|
||||||
datadir="${nix}/share"
|
|
||||||
export TEST_ROOT=$(pwd)/test-tmp
|
|
||||||
export HOME=$(mktemp -d)
|
|
||||||
export NIX_BUILD_HOOK=
|
|
||||||
export NIX_CONF_DIR=$TEST_ROOT/etc
|
|
||||||
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
|
|
||||||
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
|
|
||||||
export NIX_STATE_DIR=$TEST_ROOT/var/nix
|
|
||||||
export NIX_STORE_DIR=$TEST_ROOT/store
|
|
||||||
export PAGER=cat
|
|
||||||
cacheDir=$TEST_ROOT/binary-cache
|
|
||||||
|
|
||||||
nix-store --init
|
|
||||||
|
|
||||||
echo "Running pkgs/top-level/release-attrpaths-superset.nix"
|
|
||||||
nix-instantiate --eval --strict --json $src/pkgs/top-level/release-attrpaths-superset.nix -A names > /dev/null
|
|
||||||
|
|
||||||
mkdir $out
|
|
||||||
echo success > $out/${nix.version}
|
|
||||||
''
|
|
@ -12,19 +12,19 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "esphome-dashboard";
|
pname = "esphome-dashboard";
|
||||||
version = "20240620.0";
|
version = "20241120.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "esphome";
|
owner = "esphome";
|
||||||
repo = "dashboard";
|
repo = "dashboard";
|
||||||
rev = "refs/tags/${version}";
|
rev = "refs/tags/${version}";
|
||||||
hash = "sha256-LmIxfX3rcRK90h31J0B5T02f48MCctFERgXxf0zkDm0=";
|
hash = "sha256-insoDWHqMFAGgmsY2ZgNuo1cl0WGJXRy398bt3ADORs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDeps = fetchNpmDeps {
|
npmDeps = fetchNpmDeps {
|
||||||
inherit src;
|
inherit src;
|
||||||
hash = "sha256-xMVESS1bPNJF07joUgY8ku+GWtflWhM8mYAv0emggc8=";
|
hash = "sha256-UKrF7yzyj09WBrmrJ6uzcRjIYrKwCqLQ5paiqnt/Xuc=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ setuptools ];
|
build-system = [ setuptools ];
|
||||||
|
@ -21,14 +21,14 @@ let
|
|||||||
in
|
in
|
||||||
python.pkgs.buildPythonApplication rec {
|
python.pkgs.buildPythonApplication rec {
|
||||||
pname = "esphome";
|
pname = "esphome";
|
||||||
version = "2024.10.3";
|
version = "2024.11.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = pname;
|
owner = pname;
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = "refs/tags/${version}";
|
rev = "refs/tags/${version}";
|
||||||
hash = "sha256-13hNX9uaQbO/IKUkGaOITKh+REqUCHirbTPRgomzHBU=";
|
hash = "sha256-YH/i1W9d79ZnQCiLiAMHn6goa0l/kqL4MddKYcgLOjg=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-systems = with python.pkgs; [
|
build-systems = with python.pkgs; [
|
||||||
@ -56,7 +56,7 @@ python.pkgs.buildPythonApplication rec {
|
|||||||
cat requirements_optional.txt >> requirements.txt
|
cat requirements_optional.txt >> requirements.txt
|
||||||
# relax strict runtime version check
|
# relax strict runtime version check
|
||||||
substituteInPlace esphome/components/font/__init__.py \
|
substituteInPlace esphome/components/font/__init__.py \
|
||||||
--replace-fail "10.2.0" "${python.pkgs.pillow.version}"
|
--replace-fail "10.4.0" "${python.pkgs.pillow.version}"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# Remove esptool and platformio from requirements
|
# Remove esptool and platformio from requirements
|
||||||
@ -76,7 +76,9 @@ python.pkgs.buildPythonApplication rec {
|
|||||||
colorama
|
colorama
|
||||||
cryptography
|
cryptography
|
||||||
esphome-dashboard
|
esphome-dashboard
|
||||||
|
freetype-py
|
||||||
icmplib
|
icmplib
|
||||||
|
glyphsets
|
||||||
kconfiglib
|
kconfiglib
|
||||||
packaging
|
packaging
|
||||||
paho-mqtt
|
paho-mqtt
|
||||||
|
@ -6986,6 +6986,7 @@ with pkgs;
|
|||||||
cargo-pgrx_0_11_2
|
cargo-pgrx_0_11_2
|
||||||
cargo-pgrx_0_11_3
|
cargo-pgrx_0_11_3
|
||||||
cargo-pgrx_0_12_0_alpha_1
|
cargo-pgrx_0_12_0_alpha_1
|
||||||
|
cargo-pgrx_0_12_6
|
||||||
;
|
;
|
||||||
cargo-pgrx = cargo-pgrx_0_11_2;
|
cargo-pgrx = cargo-pgrx_0_11_2;
|
||||||
|
|
||||||
@ -12408,10 +12409,6 @@ with pkgs;
|
|||||||
inherit (pkgs) meson;
|
inherit (pkgs) meson;
|
||||||
};
|
};
|
||||||
|
|
||||||
cryptomator = callPackage ../tools/security/cryptomator {
|
|
||||||
jdk = jdk23.override { enableJavaFX = true; };
|
|
||||||
};
|
|
||||||
|
|
||||||
# Darwin package set
|
# Darwin package set
|
||||||
#
|
#
|
||||||
# Even though this is a set of packages not single package, use `callPackage`
|
# Even though this is a set of packages not single package, use `callPackage`
|
||||||
@ -16088,7 +16085,6 @@ with pkgs;
|
|||||||
|
|
||||||
ungoogled-chromium = callPackage ../applications/networking/browsers/chromium ((config.chromium or {}) // {
|
ungoogled-chromium = callPackage ../applications/networking/browsers/chromium ((config.chromium or {}) // {
|
||||||
ungoogled = true;
|
ungoogled = true;
|
||||||
channel = "ungoogled-chromium";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
unigine-tropics = pkgsi686Linux.callPackage ../applications/graphics/unigine-tropics { };
|
unigine-tropics = pkgsi686Linux.callPackage ../applications/graphics/unigine-tropics { };
|
||||||
@ -16774,10 +16770,6 @@ with pkgs;
|
|||||||
|
|
||||||
pmars-x11 = pmars.override { enableXwinGraphics = true; };
|
pmars-x11 = pmars.override { enableXwinGraphics = true; };
|
||||||
|
|
||||||
wipeout-rewrite = callPackage ../games/wipeout-rewrite {
|
|
||||||
inherit (darwin.apple_sdk.frameworks) Foundation;
|
|
||||||
};
|
|
||||||
|
|
||||||
### GAMES/DOOM-PORTS
|
### GAMES/DOOM-PORTS
|
||||||
|
|
||||||
doomseeker = qt5.callPackage ../games/doom-ports/doomseeker { };
|
doomseeker = qt5.callPackage ../games/doom-ports/doomseeker { };
|
||||||
|
@ -180,6 +180,8 @@ in (noExtraAttrs (kdeFrameworks // plasmaMobileGear // plasma5 // plasma5.thirdP
|
|||||||
|
|
||||||
maplibre-gl-native = callPackage ../development/libraries/maplibre-gl-native { };
|
maplibre-gl-native = callPackage ../development/libraries/maplibre-gl-native { };
|
||||||
|
|
||||||
|
maplibre-native-qt = callPackage ../development/libraries/maplibre-native-qt { };
|
||||||
|
|
||||||
maui-core = libsForQt5.callPackage ../development/libraries/maui-core { };
|
maui-core = libsForQt5.callPackage ../development/libraries/maui-core { };
|
||||||
|
|
||||||
mlt = pkgs.mlt.override {
|
mlt = pkgs.mlt.override {
|
||||||
|
@ -60,6 +60,8 @@ makeScopeWithSplicing' {
|
|||||||
qt = qt6;
|
qt = qt6;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
maplibre-native-qt = callPackage ../development/libraries/maplibre-native-qt { };
|
||||||
|
|
||||||
qca = pkgs.darwin.apple_sdk_11_0.callPackage ../development/libraries/qca {
|
qca = pkgs.darwin.apple_sdk_11_0.callPackage ../development/libraries/qca {
|
||||||
inherit (qt6) qtbase qt5compat;
|
inherit (qt6) qtbase qt5compat;
|
||||||
};
|
};
|
||||||
|
47
pkgs/top-level/release-attrpaths-parallel.nix
Normal file
47
pkgs/top-level/release-attrpaths-parallel.nix
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# This file works in tandem with ../../ci/eval/default.nix
|
||||||
|
# It turns ./release-outpaths.nix into chunks of a fixed size
|
||||||
|
{
|
||||||
|
lib ? import ../../lib,
|
||||||
|
path ? ../..,
|
||||||
|
# The file containing all available attribute paths, which are split into chunks here
|
||||||
|
attrpathFile,
|
||||||
|
chunkSize,
|
||||||
|
myChunk,
|
||||||
|
checkMeta,
|
||||||
|
includeBroken,
|
||||||
|
systems,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
attrpaths = lib.importJSON attrpathFile;
|
||||||
|
myAttrpaths = lib.sublist (chunkSize * myChunk) chunkSize attrpaths;
|
||||||
|
|
||||||
|
unfiltered = import ./release-outpaths.nix {
|
||||||
|
inherit path;
|
||||||
|
inherit checkMeta includeBroken systems;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Turns the unfiltered recursive attribute set into one that is limited to myAttrpaths
|
||||||
|
filtered =
|
||||||
|
let
|
||||||
|
recurse =
|
||||||
|
index: paths: attrs:
|
||||||
|
lib.mapAttrs (
|
||||||
|
name: values:
|
||||||
|
if attrs ? ${name} then
|
||||||
|
if lib.any (value: lib.length value <= index + 1) values then
|
||||||
|
attrs.${name}
|
||||||
|
else
|
||||||
|
recurse (index + 1) values attrs.${name}
|
||||||
|
# Make sure nix-env recurses as well
|
||||||
|
// {
|
||||||
|
recurseForDerivations = true;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
null
|
||||||
|
) (lib.groupBy (a: lib.elemAt a index) paths);
|
||||||
|
in
|
||||||
|
recurse 0 myAttrpaths unfiltered;
|
||||||
|
|
||||||
|
in
|
||||||
|
filtered
|
@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
$ hydra-eval-jobs -I . pkgs/top-level/release-haskell.nix
|
$ hydra-eval-jobs -I . pkgs/top-level/release-haskell.nix
|
||||||
*/
|
*/
|
||||||
{ supportedSystems ? [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ] }:
|
{ supportedSystems ? import ../../ci/supportedSystems.nix }:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
|
@ -163,19 +163,26 @@ let
|
|||||||
(addMetaAttrs { maintainers = crossMaintainers; });
|
(addMetaAttrs { maintainers = crossMaintainers; });
|
||||||
|
|
||||||
|
|
||||||
/* Recursively map a (nested) set of derivations to an isomorphic
|
/* Recursive for packages and apply a function to them */
|
||||||
set of meta.platforms values. */
|
recursiveMapPackages = f: mapAttrs (name: value:
|
||||||
packagePlatforms = mapAttrs (name: value:
|
|
||||||
if isDerivation value then
|
if isDerivation value then
|
||||||
value.meta.hydraPlatforms
|
f value
|
||||||
or (subtractLists (value.meta.badPlatforms or [])
|
|
||||||
(value.meta.platforms or supportedSystems))
|
|
||||||
else if value.recurseForDerivations or false || value.recurseForRelease or false then
|
else if value.recurseForDerivations or false || value.recurseForRelease or false then
|
||||||
packagePlatforms value
|
recursiveMapPackages f value
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/* Gets the list of Hydra platforms for a derivation */
|
||||||
|
getPlatforms = drv:
|
||||||
|
drv.meta.hydraPlatforms
|
||||||
|
or (subtractLists (drv.meta.badPlatforms or [])
|
||||||
|
(drv.meta.platforms or supportedSystems));
|
||||||
|
|
||||||
|
/* Recursively map a (nested) set of derivations to an isomorphic
|
||||||
|
set of meta.platforms values. */
|
||||||
|
packagePlatforms = recursiveMapPackages getPlatforms;
|
||||||
|
|
||||||
in {
|
in {
|
||||||
/* Common platform groups on which to test packages. */
|
/* Common platform groups on which to test packages. */
|
||||||
inherit (platforms) unix linux darwin cygwin all;
|
inherit (platforms) unix linux darwin cygwin all;
|
||||||
@ -188,6 +195,8 @@ in {
|
|||||||
lib
|
lib
|
||||||
mapTestOn
|
mapTestOn
|
||||||
mapTestOnCross
|
mapTestOnCross
|
||||||
|
recursiveMapPackages
|
||||||
|
getPlatforms
|
||||||
packagePlatforms
|
packagePlatforms
|
||||||
pkgs
|
pkgs
|
||||||
pkgsFor
|
pkgsFor
|
||||||
|
@ -12,13 +12,7 @@
|
|||||||
, attrNamesOnly ? false
|
, attrNamesOnly ? false
|
||||||
|
|
||||||
# Set this to `null` to build for builtins.currentSystem only
|
# Set this to `null` to build for builtins.currentSystem only
|
||||||
, systems ? [
|
, systems ? import ../../ci/supportedSystems.nix
|
||||||
"aarch64-linux"
|
|
||||||
"aarch64-darwin"
|
|
||||||
#"i686-linux" # !!!
|
|
||||||
"x86_64-linux"
|
|
||||||
"x86_64-darwin"
|
|
||||||
]
|
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
lib = import (path + "/lib");
|
lib = import (path + "/lib");
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
, system ? builtins.currentSystem
|
, system ? builtins.currentSystem
|
||||||
, officialRelease ? false
|
, officialRelease ? false
|
||||||
# The platform doubles for which we build Nixpkgs.
|
# The platform doubles for which we build Nixpkgs.
|
||||||
, supportedSystems ? [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]
|
, supportedSystems ? import ../../ci/supportedSystems.nix
|
||||||
# The platform triples for which we build bootstrap tools.
|
# The platform triples for which we build bootstrap tools.
|
||||||
, bootstrapConfigs ? [
|
, bootstrapConfigs ? [
|
||||||
"aarch64-apple-darwin"
|
"aarch64-apple-darwin"
|
||||||
@ -321,8 +321,9 @@ let
|
|||||||
# Conflicts usually cause silent job drops like in
|
# Conflicts usually cause silent job drops like in
|
||||||
# https://github.com/NixOS/nixpkgs/pull/182058
|
# https://github.com/NixOS/nixpkgs/pull/182058
|
||||||
jobs = let
|
jobs = let
|
||||||
packagePlatforms = if attrNamesOnly then id else release-lib.packagePlatforms;
|
packagePlatforms = release-lib.recursiveMapPackages
|
||||||
packageJobs = {
|
(if attrNamesOnly then id else release-lib.getPlatforms);
|
||||||
|
packageJobs = packagePlatforms pkgs // {
|
||||||
haskell.compiler = packagePlatforms pkgs.haskell.compiler;
|
haskell.compiler = packagePlatforms pkgs.haskell.compiler;
|
||||||
haskellPackages = packagePlatforms pkgs.haskellPackages;
|
haskellPackages = packagePlatforms pkgs.haskellPackages;
|
||||||
# Build selected packages (HLS) for multiple Haskell compilers to rebuild
|
# Build selected packages (HLS) for multiple Haskell compilers to rebuild
|
||||||
@ -363,8 +364,8 @@ let
|
|||||||
};
|
};
|
||||||
mapTestOn-packages =
|
mapTestOn-packages =
|
||||||
if attrNamesOnly
|
if attrNamesOnly
|
||||||
then pkgs // packageJobs
|
then packageJobs
|
||||||
else mapTestOn ((packagePlatforms pkgs) // packageJobs);
|
else mapTestOn packageJobs;
|
||||||
in
|
in
|
||||||
unionOfDisjoint nonPackageJobs mapTestOn-packages;
|
unionOfDisjoint nonPackageJobs mapTestOn-packages;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user