Merge master into staging-next

This commit is contained in:
github-actions[bot] 2024-11-21 00:15:13 +00:00 committed by GitHub
commit acce078383
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
83 changed files with 6684 additions and 8443 deletions

139
.github/workflows/eval.yml vendored Normal file
View File

@ -0,0 +1,139 @@
name: Eval
on: pull_request_target
permissions:
contents: read
jobs:
attrs:
name: Attributes
runs-on: ubuntu-latest
outputs:
mergedSha: ${{ steps.merged.outputs.mergedSha }}
systems: ${{ steps.systems.outputs.systems }}
steps:
# Important: Because of `pull_request_target`, this doesn't check out the PR,
# but rather the base branch of the PR, which is needed so we don't run untrusted code
- name: Check out the ci directory of the base branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: base
sparse-checkout: ci
- name: Check if the PR can be merged and get the test merge commit
id: merged
env:
GH_TOKEN: ${{ github.token }}
run: |
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then
echo "Checking the merge commit $mergedSha"
echo "mergedSha=$mergedSha" >> "$GITHUB_OUTPUT"
else
# Skipping so that no notifications are sent
echo "Skipping the rest..."
fi
rm -rf base
- name: Check out the PR at the test merge commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
# Add this to _all_ subsequent steps to skip them
if: steps.merged.outputs.mergedSha
with:
ref: ${{ env.mergedSha }}
path: nixpkgs
- name: Install Nix
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
if: steps.merged.outputs.mergedSha
- name: Evaluate the list of all attributes and get the systems matrix
id: systems
if: steps.merged.outputs.mergedSha
run: |
nix-build nixpkgs/ci -A eval.attrpathsSuperset
echo "systems=$(<result/systems.json)" >> "$GITHUB_OUTPUT"
- name: Upload the list of all attributes
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: steps.merged.outputs.mergedSha
with:
name: paths
path: result/*
outpaths:
name: Outpaths
runs-on: ubuntu-latest
needs: attrs
# Skip this and future steps if the PR can't be merged
if: needs.attrs.outputs.mergedSha
strategy:
matrix:
system: ${{ fromJSON(needs.attrs.outputs.systems) }}
steps:
- name: Download the list of all attributes
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: paths
path: paths
- name: Check out the PR at the test merge commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ needs.attrs.outputs.mergedSha }}
path: nixpkgs
- name: Install Nix
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
- name: Evaluate the ${{ matrix.system }} output paths for all derivation attributes
run: |
nix-build nixpkgs/ci -A eval.singleSystem \
--argstr evalSystem ${{ matrix.system }} \
--arg attrpathFile ./paths/paths.json \
--arg chunkSize 10000
# If it uses too much memory, slightly decrease chunkSize
- name: Upload the output paths and eval stats
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: needs.attrs.outputs.mergedSha
with:
name: intermediate-${{ matrix.system }}
path: result/*
process:
name: Process
runs-on: ubuntu-latest
needs: outpaths
steps:
- name: Download output paths and eval stats for all systems
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
pattern: intermediate-*
path: intermediate
- name: Check out the PR at the test merge commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ needs.attrs.outputs.mergedSha }}
path: nixpkgs
- name: Install Nix
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
- name: Combine all output paths and eval stats
run: |
nix-build nixpkgs/ci -A eval.combine \
--arg resultsDir ./intermediate
- name: Upload the combined results
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
with:
name: result
path: result/*
# TODO: Run this workflow also on `push` (on at least the main development branches)
# Then add an extra step here that waits for the base branch (not the merge base, because that could be very different)
# to have completed the eval, then use
# gh api --method GET /repos/NixOS/nixpkgs/actions/workflows/eval.yml/runs -f head_sha=<BASE>
# and follow it to the artifact results, where you can then download the outpaths.json from the base branch
# That can then be used to compare the number of changed paths, get evaluation stats and ping appropriate reviewers

View File

@ -26,4 +26,5 @@ in
inherit pkgs;
requestReviews = pkgs.callPackage ./request-reviews { };
codeownersValidator = pkgs.callPackage ./codeowners-validator { };
eval = pkgs.callPackage ./eval { };
}

19
ci/eval/README.md Normal file
View File

@ -0,0 +1,19 @@
# Nixpkgs CI evaluation
The code in this directory is used by the [eval.yml](../../.github/workflows/eval.yml) GitHub Actions workflow to evaluate the majority of Nixpkgs for all PRs, effectively making sure that when the development branches are processed by Hydra, no evaluation failures are encountered.
Furthermore it also allows local evaluation using
```
nix-build ci -A eval.full \
--max-jobs 4
--cores 2
--arg chunkSize 10000
```
- `--max-jobs`: The maximum number of derivations to run at the same time. Only each [supported system](../supportedSystems.nix) gets a separate derivation, so it doesn't make sense to set this higher than that number.
- `--cores`: The number of cores to use for each job. Recommended to set this to the amount of cores on your system divided by `--max-jobs`.
- `chunkSize`: The number of attributes that are evaluated simultaneously on a single core. Lowering this decreases memory usage at the cost of increased evaluation time. If this is too high, there won't be enough chunks to process them in parallel, and will also increase evaluation time.
A good default is to set `chunkSize` to 10000, which leads to about 3.6GB max memory usage per core, so suitable for fully utilising machines with 4 cores and 16GB memory, 8 cores and 32GB memory or 16 cores and 64GB memory.
Note that 16GB memory is the recommended minimum, while with less than 8GB memory evaluation time suffers greatly.

273
ci/eval/default.nix Normal file
View File

@ -0,0 +1,273 @@
{
lib,
runCommand,
writeShellScript,
linkFarm,
time,
procps,
nix,
jq,
sta,
}:
let
nixpkgs =
with lib.fileset;
toSource {
root = ../..;
fileset = unions (
map (lib.path.append ../..) [
"default.nix"
"doc"
"lib"
"maintainers"
"nixos"
"pkgs"
".version"
"ci/supportedSystems.nix"
]
);
};
supportedSystems = import ../supportedSystems.nix;
attrpathsSuperset =
runCommand "attrpaths-superset.json"
{
src = nixpkgs;
nativeBuildInputs = [
nix
time
];
env.supportedSystems = builtins.toJSON supportedSystems;
passAsFile = [ "supportedSystems" ];
}
''
export NIX_STATE_DIR=$(mktemp -d)
mkdir $out
export GC_INITIAL_HEAP_SIZE=4g
command time -v \
nix-instantiate --eval --strict --json --show-trace \
$src/pkgs/top-level/release-attrpaths-superset.nix -A paths \
--arg enableWarnings false > $out/paths.json
mv "$supportedSystemsPath" $out/systems.json
'';
singleSystem =
{
# The system to evaluate.
# Note that this is intentionally not called `system`,
# because `--argstr system` would only be passed to the ci/default.nix file!
evalSystem,
# The path to the `paths.json` file from `attrpathsSuperset`
attrpathFile,
# The number of attributes per chunk, see ./README.md for more info.
chunkSize,
checkMeta ? true,
includeBroken ? true,
# Whether to just evaluate a single chunk for quick testing
quickTest ? false,
}:
let
singleChunk = writeShellScript "single-chunk" ''
set -euo pipefail
chunkSize=$1
myChunk=$2
system=$3
outputDir=$4
export NIX_SHOW_STATS=1
export NIX_SHOW_STATS_PATH="$outputDir/stats/$myChunk"
echo "Chunk $myChunk on $system start"
set +e
command time -f "Chunk $myChunk on $system done [%MKB max resident, %Es elapsed] %C" \
nix-env -f "${nixpkgs}/pkgs/top-level/release-attrpaths-parallel.nix" \
--query --available \
--no-name --attr-path --out-path \
--show-trace \
--arg chunkSize "$chunkSize" \
--arg myChunk "$myChunk" \
--arg attrpathFile "${attrpathFile}" \
--arg systems "[ \"$system\" ]" \
--arg checkMeta ${lib.boolToString checkMeta} \
--arg includeBroken ${lib.boolToString includeBroken} \
> "$outputDir/result/$myChunk"
exitCode=$?
set -e
if (( exitCode != 0 )); then
echo "Evaluation failed with exit code $exitCode"
# This immediately halts all xargs processes
kill $PPID
fi
'';
in
runCommand "nixpkgs-eval-${evalSystem}"
{
nativeBuildInputs = [
nix
time
procps
jq
];
env = {
inherit evalSystem chunkSize;
};
}
''
export NIX_STATE_DIR=$(mktemp -d)
nix-store --init
echo "System: $evalSystem"
cores=$NIX_BUILD_CORES
echo "Cores: $cores"
attrCount=$(jq length "${attrpathFile}")
echo "Attribute count: $attrCount"
echo "Chunk size: $chunkSize"
# Same as `attrCount / chunkSize` but rounded up
chunkCount=$(( (attrCount - 1) / chunkSize + 1 ))
echo "Chunk count: $chunkCount"
mkdir $out
# Record and print stats on free memory and swap in the background
(
while true; do
availMemory=$(free -b | grep Mem | awk '{print $7}')
freeSwap=$(free -b | grep Swap | awk '{print $4}')
echo "Available memory: $(( availMemory / 1024 / 1024 )) MiB, free swap: $(( freeSwap / 1024 / 1024 )) MiB"
if [[ ! -f "$out/min-avail-memory" ]] || (( availMemory < $(<$out/min-avail-memory) )); then
echo "$availMemory" > $out/min-avail-memory
fi
if [[ ! -f $out/min-free-swap ]] || (( availMemory < $(<$out/min-free-swap) )); then
echo "$freeSwap" > $out/min-free-swap
fi
sleep 4
done
) &
seq_end=$(( chunkCount - 1 ))
${lib.optionalString quickTest ''
seq_end=0
''}
chunkOutputDir=$(mktemp -d)
mkdir "$chunkOutputDir"/{result,stats}
seq -w 0 "$seq_end" |
command time -f "%e" -o "$out/total-time" \
xargs -I{} -P"$cores" \
${singleChunk} "$chunkSize" {} "$evalSystem" "$chunkOutputDir"
if (( chunkSize * chunkCount != attrCount )); then
# A final incomplete chunk would mess up the stats, don't include it
rm "$chunkOutputDir"/stats/"$seq_end"
fi
# Make sure the glob doesn't break when there's no files
shopt -s nullglob
cat "$chunkOutputDir"/result/* > $out/paths
cat "$chunkOutputDir"/stats/* > $out/stats.jsonstream
'';
combine =
{
resultsDir,
}:
runCommand "combined-result"
{
nativeBuildInputs = [
jq
sta
];
}
''
mkdir -p $out
# Transform output paths to JSON
cat ${resultsDir}/*/paths |
jq --sort-keys --raw-input --slurp '
split("\n") |
map(select(. != "") | split(" ") | map(select(. != ""))) |
map(
{
key: .[0],
value: .[1] | split(";") | map(split("=") |
if length == 1 then
{ key: "out", value: .[0] }
else
{ key: .[0], value: .[1] }
end) | from_entries}
) | from_entries
' > $out/outpaths.json
# Computes min, mean, error, etc. for a list of values and outputs a JSON from that
statistics() {
local stat=$1
sta --transpose |
jq --raw-input --argjson stat "$stat" -n '
[
inputs |
split("\t") |
{ key: .[0], value: (.[1] | fromjson) }
] |
from_entries |
{
key: ($stat | join(".")),
value: .
}'
}
# Gets all available number stats (without .sizes because those are constant and not interesting)
readarray -t stats < <(jq -cs '.[0] | del(.sizes) | paths(type == "number")' ${resultsDir}/*/stats.jsonstream)
# Combines the statistics from all evaluations
{
echo "{ \"key\": \"minAvailMemory\", \"value\": $(cat ${resultsDir}/*/min-avail-memory | sta --brief --min) }"
echo "{ \"key\": \"minFreeSwap\", \"value\": $(cat ${resultsDir}/*/min-free-swap | sta --brief --min) }"
cat ${resultsDir}/*/total-time | statistics '["totalTime"]'
for stat in "''${stats[@]}"; do
cat ${resultsDir}/*/stats.jsonstream |
jq --argjson stat "$stat" 'getpath($stat)' |
statistics "$stat"
done
} |
jq -s from_entries > $out/stats.json
'';
full =
{
# Whether to evaluate just a single system, by default all are evaluated
evalSystem ? if quickTest then "x86_64-linux" else null,
# The number of attributes per chunk, see ./README.md for more info.
chunkSize,
quickTest ? false,
}:
let
systems = if evalSystem == null then supportedSystems else [ evalSystem ];
results = linkFarm "results" (
map (evalSystem: {
name = evalSystem;
path = singleSystem {
inherit quickTest evalSystem chunkSize;
attrpathFile = attrpathsSuperset + "/paths.json";
};
}) systems
);
in
combine {
resultsDir = results;
};
in
{
inherit
attrpathsSuperset
singleSystem
combine
# The above three are used by separate VMs in a GitHub workflow,
# while the below is intended for testing on a single local machine
full
;
}

6
ci/supportedSystems.nix Normal file
View File

@ -0,0 +1,6 @@
[
"aarch64-linux"
"aarch64-darwin"
"x86_64-linux"
"x86_64-darwin"
]

View File

@ -14,19 +14,5 @@ let
in
pkgs.symlinkJoin {
name = "nixpkgs-lib-tests";
paths = map testWithNix nixVersions ++
#
# TEMPORARY MIGRATION MECHANISM
#
# This comment and the expression which follows it should be
# removed as part of resolving this issue:
#
# https://github.com/NixOS/nixpkgs/issues/272591
#
[(import ../../pkgs/test/release {
inherit pkgs lib nix;
})]
;
paths = map testWithNix nixVersions;
}

View File

@ -13078,6 +13078,13 @@
githubId = 30698906;
name = "Luna D Dragon";
};
luNeder = {
email = "luana@luana.dev.br";
matrix = "@luana:catgirl.cloud";
github = "LuNeder";
githubId = 19750714;
name = "Luana Neder";
};
lunik1 = {
email = "ch.nixpkgs@themaw.xyz";
matrix = "@lunik1:lunik.one";

View File

@ -34,6 +34,9 @@
- The `moonlight-qt` package (for [Moonlight game streaming](https://moonlight-stream.org/)) now has HDR support on Linux systems.
- [Sched-ext](https://github.com/sched-ext/scx), a Linux kernel feature to run schedulers in userspace, is now available [`services.scx`](options.html#opt-services.scx.enable).
Requires Linux kernel version 6.12 or later.
- PostgreSQL now defaults to major version 16.
- GNOME has been updated to version 47. Refer to the [release notes](https://release.gnome.org/47/) for more details.

View File

@ -22,6 +22,10 @@
- `buildGoPackage` has been removed. Use `buildGoModule` instead. See the [Go section in the nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#sec-language-go) for details.
- `timescaledb` requires manual upgrade steps.
After you run ALTER EXTENSION, you must run [this SQL script](https://github.com/timescale/timescaledb-extras/blob/master/utils/2.15.X-fix_hypertable_foreign_keys.sql). For more details, see the following pull requests [#6797](https://github.com/timescale/timescaledb/pull/6797).
PostgreSQL 13 is no longer supported in TimescaleDB v2.16.
- `kanata` was updated to v1.7.0, which introduces several breaking changes.
See the release notes of
[v1.7.0](https://github.com/jtroo/kanata/releases/tag/v1.7.0)

View File

@ -1300,6 +1300,7 @@
./services/scheduling/atd.nix
./services/scheduling/cron.nix
./services/scheduling/fcron.nix
./services/scheduling/scx.nix
./services/search/elasticsearch-curator.nix
./services/search/elasticsearch.nix
./services/search/hound.nix
@ -1504,6 +1505,7 @@
./services/web-apps/pingvin-share.nix
./services/web-apps/plantuml-server.nix
./services/web-apps/plausible.nix
./services/web-apps/porn-vault/default.nix
./services/web-apps/powerdns-admin.nix
./services/web-apps/pretalx.nix
./services/web-apps/pretix.nix

View File

@ -0,0 +1,110 @@
{
lib,
pkgs,
config,
...
}:
let
cfg = config.services.scx;
in
{
options.services.scx = {
enable = lib.mkEnableOption null // {
description = ''
Whether to enable SCX service, a daemon to run schedulers from userspace.
::: {.note}
This service requires a kernel with the Sched-ext feature.
Generally, kernel version 6.12 and later are supported.
:::
'';
};
package = lib.mkOption {
type = lib.types.package;
default = pkgs.scx.full;
defaultText = lib.literalExpression "pkgs.scx.full";
example = lib.literalExpression "pkgs.scx.rustland";
description = ''
`scx` package to use. `scx.full`, which includes all schedulers, is the default.
You may choose a minimal package, such as `pkgs.scx.rustland`, if only one specific scheduler is needed.
::: {.note}
Overriding this does not change the default scheduler; you should set `services.scx.scheduler` for it.
:::
'';
};
scheduler = lib.mkOption {
type = lib.types.enum [
"scx_bpfland"
"scx_central"
"scx_flatcg"
"scx_lavd"
"scx_layered"
"scx_nest"
"scx_pair"
"scx_qmap"
"scx_rlfifo"
"scx_rustland"
"scx_rusty"
"scx_simple"
"scx_userland"
];
default = "scx_rustland";
example = "scx_bpfland";
description = ''
Which scheduler to use. See [SCX documentation](https://github.com/sched-ext/scx/tree/main/scheds)
for details on each scheduler and guidance on selecting the most suitable one.
'';
};
extraArgs = lib.mkOption {
type = lib.types.listOf lib.types.singleLineStr;
example = [
"--slice-us 5000"
"--verbose"
];
description = ''
Parameters passed to the chosen scheduler at runtime.
::: {.note}
Run `chosen-scx-scheduler --help` to see the available options. Generally,
each scheduler has its own set of options, and they are incompatible with each other.
:::
'';
};
};
config = lib.mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
systemd.services.scx = {
description = "SCX scheduler daemon";
# SCX service should be started only if the kernel supports sched-ext
unitConfig.ConditionPathIsDirectory = "/sys/kernel/sched_ext";
startLimitIntervalSec = 30;
startLimitBurst = 2;
serviceConfig = {
Type = "simple";
ExecStart = "${lib.getExe' cfg.package cfg.scheduler} ${lib.concatStringsSep " " cfg.extraArgs}";
Restart = "on-failure";
StandardError = "journal";
};
wantedBy = [ "multi-user.target" ];
};
assertions = [
{
assertion = lib.versionAtLeast config.boot.kernelPackages.kernel.version "6.12";
message = "SCX is only supported on kernel version >= 6.12.";
}
];
};
meta.maintainers = with lib.maintainers; [ johnrtitor ];
}

View File

@ -0,0 +1,158 @@
# See https://gitlab.com/porn-vault/porn-vault/-/blob/dev/config.example.json
{
auth = {
password = null;
};
binaries = {
ffmpeg = "ffmpeg";
ffprobe = "ffprobe";
izzyPort = 8000;
imagemagick = {
convertPath = "convert";
montagePath = "montage";
identifyPath = "identify";
};
};
import = {
images = [
{
path = "/media/porn-vault/images";
include = [ ];
exclude = [ ];
extensions = [
".jpg"
".jpeg"
".png"
".gif"
];
enable = true;
}
];
videos = [
{
path = "/media/porn-vault/videos";
include = [ ];
exclude = [ ];
extensions = [
".mp4"
".mov"
".webm"
];
enable = true;
}
];
scanInterval = 10800000;
};
log = {
level = "debug";
maxSize = "20m";
maxFiles = "5";
writeFile = [
{
level = "debug";
prefix = "errors-";
silent = false;
}
];
};
matching = {
applyActorLabels = [
"event:actor:create"
"event:actor:find-unmatched-scenes"
"plugin:actor:create"
"event:scene:create"
"plugin:scene:create"
"event:image:create"
"plugin:marker:create"
"event:marker:create"
];
applySceneLabels = true;
applyStudioLabels = [
"event:studio:create"
"event:studio:find-unmatched-scenes"
"plugin:studio:create"
"event:scene:create"
"plugin:scene:create"
];
extractSceneActorsFromFilepath = true;
extractSceneLabelsFromFilepath = true;
extractSceneMoviesFromFilepath = true;
extractSceneStudiosFromFilepath = true;
matcher = {
type = "word";
options = {
ignoreSingleNames = false;
ignoreDiacritics = true;
enableWordGroups = true;
wordSeparatorFallback = true;
camelCaseWordGroups = true;
overlappingMatchPreference = "longest";
groupSeparators = [
"[\\s',()[\\]{}*\\.]"
];
wordSeparators = [
"[-_]"
];
filepathSeparators = [
"[/\\\\&]"
];
};
};
matchCreatedActors = true;
matchCreatedStudios = true;
matchCreatedLabels = true;
};
persistence = {
backup = {
enable = true;
maxAmount = 10;
};
libraryPath = "/media/porn-vault/lib";
};
plugins = {
allowActorThumbnailOverwrite = false;
allowMovieThumbnailOverwrite = false;
allowSceneThumbnailOverwrite = false;
allowStudioThumbnailOverwrite = false;
createMissingActors = false;
createMissingLabels = false;
createMissingMovies = false;
createMissingStudios = false;
events = {
actorCreated = [ ];
actorCustom = [ ];
sceneCreated = [ ];
sceneCustom = [ ];
movieCustom = [ ];
studioCreated = [ ];
studioCustom = [ ];
};
register = { };
markerDeduplicationThreshold = 5;
};
processing = {
generatePreviews = true;
readImagesOnImport = false;
generateImageThumbnails = true;
};
server = {
https = {
certificate = "";
enable = false;
key = "";
};
};
transcode = {
hwaDriver = null;
vaapiDevice = "/dev/dri/renderD128";
h264 = {
preset = "veryfast";
crf = 23;
};
webm = {
deadline = "realtime";
cpuUsed = 3;
crf = 31;
};
};
}

View File

@ -0,0 +1,110 @@
{
config,
pkgs,
lib,
...
}:
let
cfg = config.services.porn-vault;
configFormat = pkgs.formats.json { };
defaultConfig = import ./default-config.nix;
inherit (lib)
mkIf
mkEnableOption
mkPackageOption
mkOption
getExe
literalExpression
types
;
in
{
options = {
services.porn-vault = {
enable = lib.mkEnableOption "Porn-Vault";
package = lib.mkPackageOption pkgs "porn-vault" { };
autoStart = lib.mkOption {
type = lib.types.bool;
default = true;
description = ''
Whether to start porn-vault automatically.
'';
};
port = lib.mkOption {
type = lib.types.port;
default = 3000;
description = ''
Which port Porn-Vault will use.
'';
};
openFirewall = lib.mkOption {
type = lib.types.bool;
default = false;
description = ''
Whether to open the Porn-Vault port in the firewall.
'';
};
settings = mkOption {
type = configFormat.type;
description = ''
Configuration for Porn-Vault. The attributes are serialized to JSON in config.json.
See https://gitlab.com/porn-vault/porn-vault/-/blob/dev/config.example.json
'';
default = defaultConfig;
apply = lib.recursiveUpdate defaultConfig;
};
};
};
config = lib.mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
systemd.services.porn-vault = {
description = "Porn-Vault server";
environment = {
PV_CONFIG_FOLDER = "/etc/porn-vault";
NODE_ENV = "production";
DATABASE_NAME = "production";
PORT = toString cfg.port;
};
serviceConfig = {
ExecStart = getExe cfg.package;
CacheDirectory = "porn-vault";
# Hardening options
CapabilityBoundingSet = [ "CAP_SYS_NICE" ];
AmbientCapabilities = [ "CAP_SYS_NICE" ];
LockPersonality = true;
NoNewPrivileges = true;
PrivateTmp = true;
ProtectControlGroups = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = true;
RestrictNamespaces = true;
RestrictSUIDSGID = true;
Restart = "on-failure";
RestartSec = 5;
};
wantedBy = mkIf cfg.autoStart [ "multi-user.target" ];
wants = [ "network.target" ];
};
environment.etc = {
"porn-vault/config.json".source = configFormat.generate "config.json" cfg.settings;
};
networking.firewall = lib.mkIf cfg.openFirewall {
allowedTCPPorts = [ cfg.port ];
};
};
meta.maintainers = [ lib.maintainers.luNeder ];
}

View File

@ -6,13 +6,13 @@
mkDerivation rec {
pname = "pure-maps";
version = "3.3.0";
version = "3.4.0";
src = fetchFromGitHub {
owner = "rinigus";
repo = "pure-maps";
rev = version;
hash = "sha256-TeFolD3jXRdLGfXdy+QcwtOcQQVUB5fn8PwoYfRLaPQ=";
hash = "sha256-3XghdDwzt0r8Qi8W3ZMwar2aaqTNGiGsM27BHVr5C2E=";
fetchSubmodules = true;
};

View File

@ -1,5 +1,5 @@
{ lib, mkChromiumDerivation
, channel, chromiumVersionAtLeast
, chromiumVersionAtLeast
, enableWideVine, ungoogled
}:
@ -90,7 +90,7 @@ mkChromiumDerivation (base: rec {
license = if enableWideVine then lib.licenses.unfree else lib.licenses.bsd3;
platforms = lib.platforms.linux;
mainProgram = "chromium";
hydraPlatforms = lib.optionals (channel == "stable" || channel == "ungoogled-chromium") ["aarch64-linux" "x86_64-linux"];
hydraPlatforms = ["aarch64-linux" "x86_64-linux"];
timeout = 172800; # 48 hours (increased from the Hydra default of 10h)
};
})

View File

@ -1,15 +1,19 @@
{ stdenv, lib, fetchpatch
, recompressTarball
, zstd
, fetchFromGitiles
, fetchNpmDeps
, buildPackages
, pkgsBuildBuild
# Channel data:
, channel, upstream-info
, upstream-info
# Helper functions:
, chromiumVersionAtLeast, versionRange
# Native build inputs:
, ninja, pkg-config
, python3, perl
, nodejs
, npmHooks
, which
, libuuid
, overrideCC
@ -145,12 +149,64 @@ let
else throw "no chromium Rosetta Stone entry for os: ${platform.config}";
};
isElectron = packageName == "electron";
chromiumDeps = lib.mapAttrs (path: args: fetchFromGitiles (removeAttrs args [ "recompress" ] // lib.optionalAttrs args.recompress or false {
name = "source.tar.zstd";
downloadToTemp = false;
passthru.unpack = true;
postFetch = ''
tar \
--use-compress-program="${lib.getExe zstd} -T$NIX_BUILD_CORES" \
--sort=name \
--mtime="1970-01-01" \
--owner=root --group=root \
--numeric-owner --mode=go=rX,u+rw,a-s \
--remove-files \
--directory="$out" \
-cf "$TMPDIR/source.zstd" .
mv "$TMPDIR/source.zstd" "$out"
'';
})) upstream-info.DEPS;
unpackPhaseSnippet = lib.concatStrings (lib.mapAttrsToList (path: dep:
(if dep.unpack or false
then ''
mkdir -p ${path}
pushd ${path}
unpackFile ${dep}
popd
''
else ''
mkdir -p ${builtins.dirOf path}
cp -r ${dep}/. ${path}
''
) + ''
chmod u+w -R ${path}
'') chromiumDeps);
base = rec {
pname = "${lib.optionalString ungoogled "ungoogled-"}${packageName}-unwrapped";
inherit (upstream-info) version;
inherit packageName buildType buildPath;
src = recompressTarball { inherit version; inherit (upstream-info) hash; };
unpackPhase = ''
runHook preUnpack
${unpackPhaseSnippet}
sourceRoot=src
runHook postUnpack
'';
npmRoot = "third_party/node";
npmDeps = (fetchNpmDeps {
src = chromiumDeps."src";
sourceRoot = npmRoot;
hash = upstream-info.deps.npmHash;
}).overrideAttrs (p: {
nativeBuildInputs = p.nativeBuildInputs or [ ] ++ [ zstd ];
});
nativeBuildInputs = [
ninja pkg-config
@ -158,6 +214,9 @@ let
which
buildPackages.rustc.llvmPackages.bintools
bison gperf
] ++ lib.optionals (!isElectron) [
nodejs
npmHooks.npmConfigHook
];
depsBuildBuild = [
@ -317,7 +376,32 @@ let
})
];
postPatch = ''
postPatch = lib.optionalString (!isElectron) ''
ln -s ${./files/gclient_args.gni} build/config/gclient_args.gni
echo 'LASTCHANGE=${upstream-info.DEPS."src".rev}-refs/heads/master@{#0}' > build/util/LASTCHANGE
echo "$SOURCE_DATE_EPOCH" > build/util/LASTCHANGE.committime
cat << EOF > gpu/config/gpu_lists_version.h
/* Generated by lastchange.py, do not edit.*/
#ifndef GPU_CONFIG_GPU_LISTS_VERSION_H_
#define GPU_CONFIG_GPU_LISTS_VERSION_H_
#define GPU_LISTS_VERSION "${upstream-info.DEPS."src".rev}"
#endif // GPU_CONFIG_GPU_LISTS_VERSION_H_
EOF
cat << EOF > skia/ext/skia_commit_hash.h
/* Generated by lastchange.py, do not edit.*/
#ifndef SKIA_EXT_SKIA_COMMIT_HASH_H_
#define SKIA_EXT_SKIA_COMMIT_HASH_H_
#define SKIA_COMMIT_HASH "${upstream-info.DEPS."src/third_party/skia".rev}-"
#endif // SKIA_EXT_SKIA_COMMIT_HASH_H_
EOF
echo -n '${upstream-info.DEPS."src/third_party/dawn".rev}' > gpu/webgpu/DAWN_VERSION
mkdir -p third_party/jdk/current/bin
'' + ''
# Workaround/fix for https://bugs.chromium.org/p/chromium/issues/detail?id=1313361:
substituteInPlace BUILD.gn \
--replace '"//infra/orchestrator:orchestrator_all",' ""
@ -513,6 +597,11 @@ let
# enable those features in our stable builds.
preConfigure = ''
export RUSTC_BOOTSTRAP=1
'' + lib.optionalString (!isElectron) ''
(
cd third_party/node
grep patch update_npm_deps | sh
)
'';
configurePhase = ''
@ -570,11 +659,9 @@ let
'';
passthru = {
updateScript = ./update.py;
chromiumDeps = {
gn = gnChromium;
};
inherit recompressTarball;
updateScript = ./update.mjs;
} // lib.optionalAttrs (!isElectron) {
inherit chromiumDeps npmDeps;
};
}
# overwrite `version` with the exact same `version` from the same source,

View File

@ -10,8 +10,7 @@
# package customization
# Note: enable* flags should not require full rebuilds (i.e. only affect the wrapper)
, channel ? "stable"
, upstream-info ? (import ./upstream-info.nix).${channel}
, upstream-info ? (lib.importJSON ./info.json).${if !ungoogled then "chromium" else "ungoogled-chromium"}
, proprietaryCodecs ? true
, enableWideVine ? false
, ungoogled ? false # Whether to build chromium or ungoogled-chromium
@ -46,13 +45,14 @@ let
inherit stdenv upstream-info;
mkChromiumDerivation = callPackage ./common.nix ({
inherit channel chromiumVersionAtLeast versionRange;
inherit chromiumVersionAtLeast versionRange;
inherit proprietaryCodecs
cupsSupport pulseSupport ungoogled;
gnChromium = buildPackages.gn.overrideAttrs (oldAttrs: {
inherit (upstream-info.deps.gn) version;
version = if (upstream-info.deps.gn ? "version") then upstream-info.deps.gn.version else "0";
src = fetchgit {
inherit (upstream-info.deps.gn) url rev hash;
url = "https://gn.googlesource.com/gn";
inherit (upstream-info.deps.gn) rev hash;
};
} // lib.optionalAttrs (chromiumVersionAtLeast "127") {
# Relax hardening as otherwise gn unstable 2024-06-06 and later fail with:
@ -65,11 +65,10 @@ let
# As a work around until gn is updated again, we filter specifically that patch out.
patches = lib.filter (e: lib.getName e != "LFS64.patch") oldAttrs.patches;
});
recompressTarball = callPackage ./recompress-tarball.nix { inherit chromiumVersionAtLeast; };
});
browser = callPackage ./browser.nix {
inherit channel chromiumVersionAtLeast enableWideVine ungoogled;
inherit chromiumVersionAtLeast enableWideVine ungoogled;
};
# ungoogled-chromium is, contrary to its name, not a build of
@ -80,8 +79,6 @@ let
ungoogled-chromium = pkgsBuildBuild.callPackage ./ungoogled.nix {};
};
suffix = lib.optionalString (channel != "stable" && channel != "ungoogled-chromium") ("-" + channel);
sandboxExecutableName = chromium.browser.passthru.sandboxExecutableName;
# We want users to be able to enableWideVine without rebuilding all of
@ -99,7 +96,7 @@ let
in stdenv.mkDerivation {
pname = lib.optionalString ungoogled "ungoogled-"
+ "chromium${suffix}";
+ "chromium";
inherit (chromium.browser) version;
nativeBuildInputs = [

View File

@ -0,0 +1,122 @@
#! /usr/bin/env nix-shell
#! nix-shell -i python -p python3
"""
This is a heavily simplified variant of electron's update.py
for use in ./update.mjs and should not be called manually.
It resolves chromium's DEPS file recursively when called with
a working depot_tools checkout and a ref to fetch and prints
the result as JSON to stdout.
"""
import base64
import json
from typing import Optional
from urllib.request import urlopen
import sys
if len(sys.argv) != 3:
print("""This internal script has been called with the wrong amount of parameters.
This script is not supposed to be called manually.
Refer to ./update.mjs instead.""")
exit(1)
_, depot_tools_checkout, chromium_version = sys.argv
sys.path.append(depot_tools_checkout)
import gclient_eval
import gclient_utils
class Repo:
fetcher: str
args: dict
def __init__(self) -> None:
self.deps: dict = {}
self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
def get_deps(self, repo_vars: dict, path: str) -> None:
print(
"evaluating " + json.dumps(self, default=vars, sort_keys=True),
file=sys.stderr,
)
deps_file = self.get_file("DEPS")
evaluated = gclient_eval.Parse(deps_file, vars_override=repo_vars, filename="DEPS")
repo_vars = dict(evaluated.get("vars", {})) | repo_vars
prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
self.deps = {
prefix + dep_name: repo_from_dep(dep)
for dep_name, dep in evaluated.get("deps", {}).items()
if (
gclient_eval.EvaluateCondition(dep["condition"], repo_vars)
if "condition" in dep
else True
)
and repo_from_dep(dep) != None
}
for key in evaluated.get("recursedeps", []):
dep_path = prefix + key
if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
self.deps[dep_path].get_deps(repo_vars, dep_path)
def flatten_repr(self) -> dict:
return {"fetcher": self.fetcher, "hash": self.hash, **self.args}
def flatten(self, path: str) -> dict:
out = {path: self.flatten_repr()}
for dep_path, dep in self.deps.items():
out |= dep.flatten(dep_path)
return out
def get_file(self, filepath: str) -> str:
raise NotImplementedError
class GitilesRepo(Repo):
def __init__(self, url: str, rev: str) -> None:
super().__init__()
self.fetcher = "fetchFromGitiles"
self.args = {
"url": url,
"rev": rev,
}
def get_file(self, filepath: str) -> str:
return base64.b64decode(
urlopen(
f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT"
).read()
).decode("utf-8")
def repo_from_dep(dep: dict) -> Optional[Repo]:
if "url" in dep:
url, rev = gclient_utils.SplitUrlRevision(dep["url"])
return GitilesRepo(url, rev)
else:
# Not a git dependency; skip
return None
chromium = GitilesRepo("https://chromium.googlesource.com/chromium/src.git", chromium_version)
chromium.get_deps(
{
**{
f"checkout_{platform}": platform == "linux" or platform == "x64" or platform == "arm64" or platform == "arm"
for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
},
**{
f"checkout_{arch}": True
for arch in ["x64", "arm64", "arm", "x86", "mips", "mips64"]
},
},
"",
)
print(json.dumps(chromium.flatten("src")))

View File

@ -0,0 +1,12 @@
build_with_chromium = true
checkout_android = false
checkout_android_prebuilts_build_tools = false
checkout_clang_coverage_tools = false
checkout_copybara = false
checkout_ios_webkit = false
checkout_nacl = false
checkout_openxr = false
checkout_src_internal = false
cros_boards = ""
cros_boards_with_qemu_images = ""
generate_location_tags = true

File diff suppressed because it is too large Load Diff

View File

@ -1,56 +0,0 @@
{ zstd
, fetchurl
, lib
, chromiumVersionAtLeast
}:
{ version
, hash ? ""
} @ args:
fetchurl ({
name = "chromium-${version}.tar.zstd";
url = "https://commondatastorage.googleapis.com/chromium-browser-official/chromium-${version}.tar.xz";
inherit hash;
# chromium xz tarballs are multiple gigabytes big and are sometimes downloaded multiples
# times for different versions as part of our update script.
# We originally inherited fetchzip's default for downloadToTemp (true).
# Given the size of the /run/user tmpfs used defaults to logind's RuntimeDirectorySize=,
# which in turn defaults to 10% of the total amount of physical RAM, this often lead to
# "no space left" errors, eventually resulting in its own section in our chromium
# README.md (for users wanting to run the update script).
# Nowadays, we use fetchurl instead of fetchzip, which defaults to false instead of true.
# We just want to be explicit and provide a place to document the history and reasoning
# behind this.
downloadToTemp = false;
nativeBuildInputs = [ zstd ];
postFetch = ''
cat "$downloadedFile" \
| xz -d --threads=$NIX_BUILD_CORES \
| tar xf - \
--warning=no-timestamp \
--one-top-level=source \
--exclude=third_party/llvm \
--exclude=third_party/rust-src \
--exclude='build/linux/debian_*-sysroot' \
'' + lib.optionalString (chromiumVersionAtLeast "127") ''
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9]' \
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9][a-zA-Z0-9]' \
--exclude=third_party/llvm-build \
--exclude=third_party/rust-toolchain \
--exclude=third_party/instrumented_libs \
'' + ''
--strip-components=1
tar \
--use-compress-program "zstd -T$NIX_BUILD_CORES" \
--sort name \
--mtime "1970-01-01" \
--owner=root --group=root \
--numeric-owner --mode=go=rX,u+rw,a-s \
-cf $out source
'';
} // removeAttrs args [ "version" ])

View File

@ -0,0 +1,227 @@
#! /usr/bin/env nix-shell
/*
#! nix-shell -i zx -p zx
*/
cd(__dirname)
const nixpkgs = (await $`git rev-parse --show-toplevel`).stdout.trim()
const $nixpkgs = $({
cwd: nixpkgs
})
const dummy_hash = 'sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA='
const lockfile_file = './info.json'
const lockfile_initial = fs.readJsonSync(lockfile_file)
function flush_to_file() {
fs.writeJsonSync(lockfile_file, lockfile, { spaces: 2 })
}
const flush_to_file_proxy = {
get(obj, prop) {
const value = obj[prop]
return typeof value == 'object' ? new Proxy(value, flush_to_file_proxy) : value
},
set(obj, prop, value) {
obj[prop] = value
flush_to_file()
return true
},
}
const lockfile = new Proxy(structuredClone(lockfile_initial), flush_to_file_proxy)
for (const attr_path of Object.keys(lockfile)) {
if (!argv[attr_path]) {
console.log(`[${attr_path}] Skipping ${attr_path}. Pass --${attr_path} as argument to update.`)
continue
}
const ungoogled = attr_path === 'ungoogled-chromium'
const version_nixpkgs = !ungoogled ? lockfile[attr_path].version : lockfile[attr_path].deps["ungoogled-patches"].rev
const version_upstream = !ungoogled ? await get_latest_chromium_release() : await get_latest_ungoogled_release()
console.log(`[${attr_path}] ${chalk.red(version_nixpkgs)} (nixpkgs)`)
console.log(`[${attr_path}] ${chalk.green(version_upstream)} (upstream)`)
if (version_greater_than(version_upstream, version_nixpkgs)) {
console.log(`[${attr_path}] ${chalk.green(version_upstream)} from upstream is newer than our ${chalk.red(version_nixpkgs)}...`)
// unconditionally remove ungoogled-chromium's epoch/sub-version (e.g. 130.0.6723.116-1 -> 130.0.6723.116)
const version_chromium = version_upstream.split('-')[0]
lockfile[attr_path] = {
version: version_chromium,
chromedriver: !ungoogled ? await fetch_chromedriver_binaries(version_chromium) : undefined,
deps: {
depot_tools: {},
gn: {},
"ungoogled-patches": ungoogled ? await fetch_ungoogled(version_upstream) : undefined,
npmHash: dummy_hash,
},
DEPS: {},
}
const depot_tools = await fetch_depot_tools(version_chromium, lockfile_initial[attr_path].deps.depot_tools)
lockfile[attr_path].deps.depot_tools = {
rev: depot_tools.rev,
hash: depot_tools.hash,
}
const gn = await fetch_gn(version_chromium, lockfile_initial[attr_path].deps.gn)
lockfile[attr_path].deps.gn = {
rev: gn.rev,
hash: gn.hash,
}
// DEPS update loop
lockfile[attr_path].DEPS = await resolve_DEPS(depot_tools.out, version_chromium)
for (const [path, value] of Object.entries(lockfile[attr_path].DEPS)) {
delete value.fetcher
delete value.postFetch
if (value.url === 'https://chromium.googlesource.com/chromium/src.git') {
value.recompress = true
}
const cache = lockfile_initial[attr_path].DEPS[path]
const cache_hit =
cache !== undefined &&
value.url === cache.url &&
value.rev === cache.rev &&
value.recompress === cache.recompress &&
cache.hash !== undefined &&
cache.hash !== '' &&
cache.hash !== dummy_hash
if (cache_hit) {
console.log(`[${chalk.green(path)}] Reusing hash from previous info.json for ${cache.url}@${cache.rev}`)
value.hash = cache.hash
continue
}
console.log(`[${chalk.red(path)}] FOD prefetching ${value.url}@${value.rev}...`)
value.hash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.chromiumDeps."${path}"`)
console.log(`[${chalk.green(path)}] FOD prefetching successful`)
}
lockfile[attr_path].deps.npmHash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.npmDeps`)
console.log(chalk.green(`[${attr_path}] Done updating ${attr_path} from ${version_nixpkgs} to ${version_upstream}!`))
}
}
async function fetch_gn(chromium_rev, gn_previous) {
const DEPS_file = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'DEPS')
const gn_rev = /^\s+'gn_version': 'git_revision:(?<rev>.+)',$/m.exec(DEPS_file).groups.rev
const hash = gn_rev === gn_previous.rev ? gn_previous.hash : ''
return await prefetch_gitiles('https://gn.googlesource.com/gn', gn_rev, hash)
}
async function fetch_chromedriver_binaries(chromium_version) {
// https://developer.chrome.com/docs/chromedriver/downloads/version-selection
const prefetch = async (url) => {
const expr = [`(import ./. {}).fetchzip { url = "${url}"; hash = ""; }`]
const derivation = await $nixpkgs`nix-instantiate --expr ${expr}`
return await prefetch_FOD(derivation)
}
// if the URL ever changes, the URLs in the chromedriver derivations need updating as well!
const url = (platform) => `https://storage.googleapis.com/chrome-for-testing-public/${chromium_version}/${platform}/chromedriver-${platform}.zip`
return {
hash_darwin: await prefetch(url('mac-x64')),
hash_darwin_aarch64: await prefetch(url('mac-arm64')),
}
}
async function resolve_DEPS(depot_tools_checkout, chromium_rev) {
const { stdout } = await $`./depot_tools.py ${depot_tools_checkout} ${chromium_rev}`
const deps = JSON.parse(stdout)
return Object.fromEntries(Object.entries(deps).map(([k, { url, rev, hash }]) => [k, { url, rev, hash }]))
}
async function get_latest_chromium_release() {
const url = `https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases?` + new URLSearchParams({
order_by: 'version desc',
filter: 'endtime=none,fraction>=0.5'
})
const response = await (await fetch(url)).json()
return response.releases[0].version
}
async function get_latest_ungoogled_release() {
const ungoogled_tags = await (await fetch('https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags')).json()
const chromium_releases = await (await fetch('https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases')).json()
const chromium_release_map = chromium_releases.releases.map((x) => x.version)
return ungoogled_tags.find((x) => chromium_release_map.includes(x.name.split('-')[0])).name
}
async function fetch_ungoogled(rev) {
const expr = (hash) => [`(import ./. {}).fetchFromGitHub { owner = "ungoogled-software"; repo = "ungoogled-chromium"; rev = "${rev}"; hash = "${hash}"; }`]
const hash = await prefetch_FOD('--expr', expr(''))
const checkout = await $nixpkgs`nix-build --expr ${expr(hash)}`
await fs.copy(`${checkout.stdout.trim()}/flags.gn`, './ungoogled-flags.toml')
return {
rev,
hash,
}
}
function version_greater_than(greater, than) {
return greater.localeCompare(than, undefined, { numeric: true, sensitivity: 'base' }) === 1
}
async function get_gitiles_file(repo, rev, path) {
const base64 = await (await fetch(`${repo}/+/${rev}/${path}?format=TEXT`)).text()
return Buffer.from(base64, 'base64').toString('utf-8')
}
async function fetch_depot_tools(chromium_rev, depot_tools_previous) {
const depot_tools_rev = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'third_party/depot_tools')
const hash = depot_tools_rev === depot_tools_previous.rev ? depot_tools_previous.hash : ''
return await prefetch_gitiles('https://chromium.googlesource.com/chromium/tools/depot_tools', depot_tools_rev, hash)
}
async function prefetch_gitiles(url, rev, hash = '') {
const expr = () => [`(import ./. {}).fetchFromGitiles { url = "${url}"; rev = "${rev}"; hash = "${hash}"; }`]
if (hash === '') {
hash = await prefetch_FOD('--expr', expr())
}
const { stdout } = await $nixpkgs`nix-build --expr ${expr()}`
return {
url,
rev,
hash,
out: stdout.trim(),
}
}
async function prefetch_FOD(...args) {
const { stderr } = await $nixpkgs`nix-build ${args}`.nothrow()
const hash = /\s+got:\s+(?<hash>.+)$/m.exec(stderr)?.groups?.hash
if (hash == undefined) {
throw new Error(chalk.red('Expected to find hash in nix-build stderr output:') + stderr)
}
return hash
}

View File

@ -1,300 +0,0 @@
#! /usr/bin/env nix-shell
#! nix-shell -i python -p python3Packages.looseversion nix nixfmt-classic nix-prefetch-git
"""This script automatically updates chromium, google-chrome, chromedriver, and ungoogled-chromium
via upstream-info.nix."""
# Usage: ./update.py [--commit]
import base64
import csv
import json
import re
import subprocess
import sys
from codecs import iterdecode
from collections import OrderedDict
from datetime import datetime
from looseversion import LooseVersion
from os.path import abspath, dirname
from urllib.request import urlopen
RELEASES_URL = 'https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/all/versions/all/releases'
PIN_PATH = dirname(abspath(__file__)) + '/upstream-info.nix'
UNGOOGLED_FLAGS_PATH = dirname(abspath(__file__)) + '/ungoogled-flags.toml'
COMMIT_MESSAGE_SCRIPT = dirname(abspath(__file__)) + '/get-commit-message.py'
NIXPKGS_PATH = subprocess.check_output(["git", "rev-parse", "--show-toplevel"], cwd=dirname(PIN_PATH)).strip()
def load_as_json(path):
"""Loads the given nix file as JSON."""
out = subprocess.check_output(['nix-instantiate', '--eval', '--strict', '--json', path])
return json.loads(out)
def save_dict_as_nix(path, input):
"""Saves the given dict/JSON as nix file."""
json_string = json.dumps(input)
nix = subprocess.check_output(['nix-instantiate', '--eval', '--expr', '{ json }: builtins.fromJSON json', '--argstr', 'json', json_string])
formatted = subprocess.check_output(['nixfmt'], input=nix)
with open(path, 'w') as out:
out.write(formatted.decode())
def prefetch_src_sri_hash(attr_path, version):
"""Prefetches the fixed-output-derivation source tarball and returns its SRI-Hash."""
print(f'nix-build (FOD prefetch) {attr_path} {version}')
out = subprocess.run(
["nix-build", "--expr", f'(import ./. {{}}).{attr_path}.browser.passthru.recompressTarball {{ version = "{version}"; }}'],
cwd=NIXPKGS_PATH,
stderr=subprocess.PIPE
).stderr.decode()
for line in iter(out.split("\n")):
match = re.match(r"\s+got:\s+(.+)$", line)
if match:
print(f'Hash: {match.group(1)}')
return match.group(1)
print(f'{out}\n\nError: Expected hash in nix-build stderr output.', file=sys.stderr)
sys.exit(1)
def nix_prefetch_url(url, algo='sha256'):
"""Prefetches the content of the given URL."""
print(f'nix store prefetch-file {url}')
out = subprocess.check_output(['nix', 'store', 'prefetch-file', '--json', '--hash-type', algo, url])
return json.loads(out)['hash']
def nix_prefetch_git(url, rev):
"""Prefetches the requested Git revision of the given repository URL."""
print(f'nix-prefetch-git {url} {rev}')
out = subprocess.check_output(['nix-prefetch-git', '--quiet', '--url', url, '--rev', rev])
return json.loads(out)
def get_file_revision(revision, file_path):
"""Fetches the requested Git revision of the given Chromium file."""
url = f'https://chromium.googlesource.com/chromium/src/+/refs/tags/{revision}/{file_path}?format=TEXT'
with urlopen(url) as http_response:
resp = http_response.read()
return base64.b64decode(resp)
def get_ungoogled_file_revision(revision, file_path):
"""Fetches the requested Git revision of the given Chromium file."""
url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/{file_path}'
with urlopen(url) as http_response:
resp = http_response.read()
return resp.decode("utf-8")
def get_chromedriver(channel):
"""Get the latest chromedriver builds given a channel"""
# See https://chromedriver.chromium.org/downloads/version-selection#h.4wiyvw42q63v
chromedriver_versions_url = f'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json'
print(f'GET {chromedriver_versions_url}')
with urlopen(chromedriver_versions_url) as http_response:
chromedrivers = json.load(http_response)
channel = chromedrivers['channels'][channel]
downloads = channel['downloads']['chromedriver']
def get_chromedriver_url(platform):
for download in downloads:
if download['platform'] == platform:
return download['url']
return {
'version': channel['version'],
'hash_linux': nix_prefetch_url(get_chromedriver_url('linux64')),
'hash_darwin': nix_prefetch_url(get_chromedriver_url('mac-x64')),
'hash_darwin_aarch64': nix_prefetch_url(get_chromedriver_url('mac-arm64'))
}
def get_channel_dependencies(version):
"""Gets all dependencies for the given Chromium version."""
deps = get_file_revision(version, 'DEPS')
gn_pattern = b"'gn_version': 'git_revision:([0-9a-f]{40})'"
gn_commit = re.search(gn_pattern, deps).group(1).decode()
gn = nix_prefetch_git('https://gn.googlesource.com/gn', gn_commit)
return {
'gn': {
'version': datetime.fromisoformat(gn['date']).date().isoformat(),
'url': gn['url'],
'rev': gn['rev'],
'hash': gn['hash']
}
}
def get_latest_ungoogled_chromium_tag(linux_stable_versions):
"""Returns the latest ungoogled-chromium tag for linux using the GitHub API."""
api_tag_url = 'https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags'
with urlopen(api_tag_url) as http_response:
tags = json.load(http_response)
for tag in tags:
if not tag['name'].split('-')[0] in linux_stable_versions:
continue
return tag['name']
def get_latest_ungoogled_chromium_build(linux_stable_versions):
"""Returns a dictionary for the latest ungoogled-chromium build."""
tag = get_latest_ungoogled_chromium_tag(linux_stable_versions)
version = tag.split('-')[0]
return {
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
'version': version,
'ungoogled_rev': tag
}
def get_ungoogled_chromium_build_by_ref(ungoogled_chromium_ref):
"""Returns a dictionary for an ungoogled-chromium build referenced by a ref in the ungoogled-chromium repository."""
version = get_ungoogled_file_revision(ungoogled_chromium_ref, "chromium_version.txt").strip("\n ")
return {
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
'version': version,
'ungoogled_rev': ungoogled_chromium_ref
}
def get_ungoogled_chromium_gn_flags(revision):
"""Returns ungoogled-chromium's GN build flags for the given revision."""
gn_flags_url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/flags.gn'
return urlopen(gn_flags_url).read().decode()
def channel_name_to_attr_name(channel_name):
"""Maps a channel name to the corresponding main Nixpkgs attribute name."""
if channel_name == 'stable':
return 'chromium'
if channel_name == 'ungoogled-chromium':
return 'ungoogled-chromium'
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
sys.exit(1)
def get_channel_key(item):
"""Orders Chromium channels by their name."""
channel_name = item[0]
if channel_name == 'stable':
return 0
if channel_name == 'beta':
return 1
if channel_name == 'dev':
return 2
if channel_name == 'ungoogled-chromium':
return 3
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
sys.exit(1)
def print_updates(channels_old, channels_new):
"""Print a summary of the updates."""
print('Updates:')
for channel_name in channels_old:
version_old = channels_old[channel_name]["version"]
version_new = channels_new[channel_name]["version"]
if LooseVersion(version_old) < LooseVersion(version_new):
attr_name = channel_name_to_attr_name(channel_name)
print(f'- {attr_name}: {version_old} -> {version_new}')
channels = {}
last_channels = load_as_json(PIN_PATH)
src_hash_cache = {}
print(f'GET {RELEASES_URL}', file=sys.stderr)
with urlopen(RELEASES_URL) as resp:
releases = json.load(resp)['releases']
if len(sys.argv) == 3 and sys.argv[1] == 'ungoogled-rev':
releases.append(get_ungoogled_chromium_build_by_ref(sys.argv[2]))
else:
linux_stable_versions = [release['version'] for release in releases if release['name'].startswith('chrome/platforms/linux/channels/stable/versions/')]
releases.append(get_latest_ungoogled_chromium_build(linux_stable_versions))
for release in releases:
channel_name = re.findall("chrome/platforms/linux/channels/(.*)/versions/", release['name'])[0]
# If we've already found a newer release for this channel, we're
# no longer interested in it.
if channel_name in channels:
continue
# We only look for channels that are listed in our version pin file.
if channel_name not in last_channels:
continue
# If we're back at the last release we used, we don't need to
# keep going -- there's no new version available, and we can
# just reuse the info from last time.
if release['version'] == last_channels[channel_name]['version']:
channels[channel_name] = last_channels[channel_name]
continue
channel = {'version': release['version']}
if channel_name == 'dev':
google_chrome_suffix = 'unstable'
elif channel_name == 'ungoogled-chromium':
google_chrome_suffix = 'stable'
else:
google_chrome_suffix = channel_name
try:
version = release["version"]
existing_releases = dict(map(lambda channel: (channel[1]['version'], channel[1]['hash']), last_channels.items()))
if version in src_hash_cache:
print(f'Already got hash {src_hash_cache[version]} for {version}, skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
channel["hash"] = src_hash_cache[version]
elif version in existing_releases:
print(f'Already got hash {existing_releases[version]} for {version} (from upstream-info.nix), skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
channel["hash"] = existing_releases[version]
else:
channel["hash"] = prefetch_src_sri_hash(
channel_name_to_attr_name(channel_name),
version
)
src_hash_cache[version] = channel["hash"]
except subprocess.CalledProcessError:
# This release isn't actually available yet. Continue to
# the next one.
continue
channel['deps'] = get_channel_dependencies(channel['version'])
if channel_name == 'stable':
channel['chromedriver'] = get_chromedriver('Stable')
elif channel_name == 'ungoogled-chromium':
ungoogled_repo_url = 'https://github.com/ungoogled-software/ungoogled-chromium.git'
channel['deps']['ungoogled-patches'] = {
'rev': release['ungoogled_rev'],
'hash': nix_prefetch_git(ungoogled_repo_url, release['ungoogled_rev'])['hash']
}
with open(UNGOOGLED_FLAGS_PATH, 'w') as out:
out.write(get_ungoogled_chromium_gn_flags(release['ungoogled_rev']))
channels[channel_name] = channel
sorted_channels = OrderedDict(sorted(channels.items(), key=get_channel_key))
if len(sys.argv) == 2 and sys.argv[1] == '--commit':
for channel_name in sorted_channels.keys():
version_old = last_channels[channel_name]['version']
version_new = sorted_channels[channel_name]['version']
if LooseVersion(version_old) < LooseVersion(version_new):
last_channels[channel_name] = sorted_channels[channel_name]
save_dict_as_nix(PIN_PATH, last_channels)
attr_name = channel_name_to_attr_name(channel_name)
commit_message = f'{attr_name}: {version_old} -> {version_new}'
if channel_name == 'stable':
body = subprocess.check_output([COMMIT_MESSAGE_SCRIPT, version_new]).decode('utf-8')
commit_message += '\n\n' + body
elif channel_name == 'ungoogled-chromium':
subprocess.run(['git', 'add', UNGOOGLED_FLAGS_PATH], check=True)
subprocess.run(['git', 'add', JSON_PATH], check=True)
subprocess.run(['git', 'commit', '--file=-'], input=commit_message.encode(), check=True)
else:
save_dict_as_nix(PIN_PATH, sorted_channels)
print_updates(last_channels, sorted_channels)

View File

@ -1,37 +0,0 @@
{
stable = {
chromedriver = {
hash_darwin = "sha256-+Pcd++19/nJVsqGr2jzyjMTWYfb2U9wSgnNccDyGuGU=";
hash_darwin_aarch64 =
"sha256-vrbIpHrBwbzqars7D546eJ7zhEhAB0abq7MXiqlU4ts=";
hash_linux = "sha256-NbZ1GULLWJ6L3kczz23HoUhGk6VgBOXcjZlL7t4Z6Ec=";
version = "130.0.6723.116";
};
deps = {
gn = {
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
url = "https://gn.googlesource.com/gn";
version = "2024-09-09";
};
};
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
version = "130.0.6723.116";
};
ungoogled-chromium = {
deps = {
gn = {
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
url = "https://gn.googlesource.com/gn";
version = "2024-09-09";
};
ungoogled-patches = {
hash = "sha256-+94tSSaOp6vzWkXN1qF3UXMm/Rs3pKmjf+U4x+af818=";
rev = "130.0.6723.116-1";
};
};
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
version = "130.0.6723.116";
};
}

View File

@ -28,13 +28,13 @@
stdenv.mkDerivation rec {
pname = "jwm";
version = "2.4.5";
version = "2.4.6";
src = fetchFromGitHub {
owner = "joewing";
repo = "jwm";
rev = "v${version}";
sha256 = "sha256-T0N9UMu+BLRzVWshUB4apiq8H2t1y09n4P1cLT5K/N8=";
hash = "sha256-odGqHdm8xnjEcXmpKMy51HEhbjcROLL3hRSdlbmTr2g=";
};
nativeBuildInputs = [

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "blasfeo";
version = "0.1.3";
version = "0.1.4";
src = fetchFromGitHub {
owner = "giaf";
repo = "blasfeo";
rev = finalAttrs.version;
hash = "sha256-e8InqyUMWRdL4CBHUOtrZkuabaTLiNPMNPRCnWzWkQ4=";
hash = "sha256-Qm6N1PeWZtS9H5ZuL31NbsctpZiJaGI7bfSPMUmI2BQ=";
};
nativeBuildInputs = [ cmake ];

View File

@ -12,13 +12,13 @@
buildNpmPackage rec {
pname = "blockbench";
version = "4.11.1";
version = "4.11.2";
src = fetchFromGitHub {
owner = "JannisX11";
repo = "blockbench";
rev = "v${version}";
hash = "sha256-a+55seE5tFxTmdTn4qDFWWW6C6FzO8Vgjvfow/tBqf0=";
rev = "refs/tags/v${version}";
hash = "sha256-rUMzn+3j+RL8DY8euS6a4MmdoIAVLXxXu9wvKNmK/TU=";
};
nativeBuildInputs =
@ -93,7 +93,7 @@ buildNpmPackage rec {
];
meta = {
changelog = "https://github.com/JannisX11/blockbench/releases/tag/${src.rev}";
changelog = "https://github.com/JannisX11/blockbench/releases/tag/v${version}";
description = "Low-poly 3D modeling and animation software";
homepage = "https://blockbench.net/";
license = lib.licenses.gpl3Only;

File diff suppressed because it is too large Load Diff

View File

@ -7,28 +7,18 @@
}:
rustPlatform.buildRustPackage rec {
pname = "clash-rs";
version = "0.7.0";
version = "0.7.1";
src = fetchFromGitHub {
owner = "Watfaq";
repo = "clash-rs";
rev = "v${version}";
hash = "sha256-0deMVI51XHTCrnLTycqDsaY5Lq+wx14uMUlkG5OViNA=";
rev = "refs/tags/v${version}";
hash = "sha256-H76ErJQ+qKC3mt3IzNCPldAwlj7NnYUcLzUuOYykxnE=";
};
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"boringtun-0.6.0" = "sha256-HBNo53b+CpCGmTXZYH4NBBvNmekyaBKAk1pSRzZdavg=";
"netstack-lwip-0.3.4" = "sha256-lcauDyaw5gAaECRcGNXQDHbWmnyxil18qWFkZ/p/C50=";
"rustls-0.23.12" = "sha256-grt94JG44MljRQRooVZbXL4h4XLI1/KoIdwGv03MoIU=";
"tokio-rustls-0.26.0" = "sha256-Bmi36j8hbR4kkY/xnHbluaInk+YH5/eTln0VYfHulGA=";
"tracing-oslog-0.2.0" = "sha256-JYaCslbVOgsyBhjeBkplPWcjSgFccjr4s6OAGIUu5kg=";
"tuic-1.3.1" = "sha256-WMd+O2UEu0AEI+gNeQtdBhEgIB8LPanoIpMcDAUUWrM=";
"tun-0.6.1" = "sha256-j4yQSu4Mw7DBFak8vJGQomYq81+pfaeEDdN4NNBve+E=";
"unix-udp-sock-0.7.0" = "sha256-TekBfaxecFPpOfq7PVjLHwc0uIp3yJGV/Cgav5VfKaA=";
};
};
useFetchCargoVendor = true;
cargoHash = "sha256-yU5ioAuCJRuYKNOdd381W07Ua+c2me+wHFOMukTVVqM=";
env = {
PROTOC = "${protobuf}/bin/protoc";

View File

@ -1,24 +1,38 @@
{ lib, fetchFromGitHub
, autoPatchelfHook
, fuse3
, maven, jdk, makeShellWrapper, glib, wrapGAppsHook3
, libayatana-appindicator
{
autoPatchelfHook,
fetchFromGitHub,
fuse3,
glib,
jdk23,
lib,
libayatana-appindicator,
makeShellWrapper,
maven,
wrapGAppsHook3,
}:
let
jdk = jdk23.override { enableJavaFX = true; };
in
maven.buildMavenPackage rec {
pname = "cryptomator";
version = "1.14.1";
version = "1.14.2";
src = fetchFromGitHub {
owner = "cryptomator";
repo = "cryptomator";
rev = version;
hash = "sha256-so8RINjFLF9H4K9f/60Ym/v/VpcVfxJ/c+JDOAPFgZU=";
hash = "sha256-TSE83QYFry8O6MKAoggJBjqonYiGax5GG/a7sm7aHf8=";
};
patches = [
# https://github.com/cryptomator/cryptomator/pull/3621
./string-template-removal-and-jdk23.patch
];
mvnJdk = jdk;
mvnParameters = "-Dmaven.test.skip=true -Plinux";
mvnHash = "sha256-aB7wgnJAYvCizC0/gG/amcId/WVVWmZndItm398nDfQ=";
mvnHash = "sha256-LFD150cGW6OdwkK28GYI9j44GtVE0pwFMaQ8dQqArLo=";
preBuild = ''
VERSION=${version}
@ -55,8 +69,18 @@ maven.buildMavenPackage rec {
--add-flags "-Dcryptomator.disableUpdateCheck=true" \
--add-flags "-Dcryptomator.integrationsLinux.trayIconsDir='$out/share/icons/hicolor/symbolic/apps'" \
--add-flags "--module org.cryptomator.desktop/org.cryptomator.launcher.Cryptomator" \
--prefix PATH : "$out/share/cryptomator/libs/:${lib.makeBinPath [ jdk glib ]}" \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fuse3 libayatana-appindicator ]}" \
--prefix PATH : "$out/share/cryptomator/libs/:${
lib.makeBinPath [
jdk
glib
]
}" \
--prefix LD_LIBRARY_PATH : "${
lib.makeLibraryPath [
fuse3
libayatana-appindicator
]
}" \
--set JAVA_HOME "${jdk.home}"
# install desktop entry and icons
@ -79,24 +103,30 @@ maven.buildMavenPackage rec {
nativeBuildInputs = [
autoPatchelfHook
jdk
makeShellWrapper
wrapGAppsHook3
jdk
];
buildInputs = [ fuse3 jdk glib libayatana-appindicator ];
buildInputs = [
fuse3
glib
jdk
libayatana-appindicator
];
meta = with lib; {
meta = {
description = "Free client-side encryption for your cloud files";
mainProgram = "cryptomator";
homepage = "https://cryptomator.org";
sourceProvenance = with sourceTypes; [
fromSource
binaryBytecode # deps
license = lib.licenses.gpl3Plus;
mainProgram = "cryptomator";
maintainers = with lib.maintainers; [
bachp
gepbird
];
license = licenses.gpl3Plus;
maintainers = with maintainers; [ bachp ];
platforms = [ "x86_64-linux" ];
# Uses abandoned JEP 430 string template preview, removed in JDK 23
broken = true;
sourceProvenance = with lib.sourceTypes; [
fromSource
binaryBytecode # deps
];
};
}

View File

@ -0,0 +1,135 @@
diff --git a/src/main/java/org/cryptomator/common/mount/Mounter.java b/src/main/java/org/cryptomator/common/mount/Mounter.java
index 6ca067305b..89f8fb7822 100644
--- a/src/main/java/org/cryptomator/common/mount/Mounter.java
+++ b/src/main/java/org/cryptomator/common/mount/Mounter.java
@@ -160,7 +160,7 @@ public MountHandle mount(VaultSettings vaultSettings, Path cryptoFsRoot) throws
var mountService = mountProviders.stream().filter(s -> s.getClass().getName().equals(vaultSettings.mountService.getValue())).findFirst().orElse(defaultMountService.getValue());
if (isConflictingMountService(mountService)) {
- var msg = STR."\{mountService.getClass()} unavailable due to conflict with either of \{CONFLICTING_MOUNT_SERVICES.get(mountService.getClass().getName())}";
+ var msg = mountService.getClass() + " unavailable due to conflict with either of " + CONFLICTING_MOUNT_SERVICES.get(mountService.getClass().getName());
throw new ConflictingMountServiceException(msg);
}
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java b/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
index eefad55a2f..0e7a6cc3ab 100644
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/HubConfig.java
@@ -20,7 +20,7 @@ public class HubConfig {
public String devicesResourceUrl;
/**
- * A collection of String template processors to construct URIs related to this Hub instance.
+ * A collection of functions to construct URIs related to this Hub instance.
*/
@JsonIgnore
public final URIProcessors URIs = new URIProcessors();
@@ -52,8 +52,7 @@ public class URIProcessors {
/**
* Resolves paths relative to the <code>/api/</code> endpoint of this Hub instance.
*/
- public final StringTemplate.Processor<URI, RuntimeException> API = template -> {
- var path = template.interpolate();
+ public URI getApi(String path) {
var relPath = path.startsWith("/") ? path.substring(1) : path;
return getApiBaseUrl().resolve(relPath);
};
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java b/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
index 3bfb4ec8ea..3353d78dd6 100644
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/ReceiveKeyController.java
@@ -88,7 +88,7 @@ public void receiveKey() {
* STEP 0 (Request): GET /api/config
*/
private void requestApiConfig() {
- var configUri = hubConfig.URIs.API."config";
+ var configUri = hubConfig.URIs.getApi("config");
var request = HttpRequest.newBuilder(configUri) //
.GET() //
.timeout(REQ_TIMEOUT) //
@@ -122,7 +122,7 @@ private void receivedApiConfig(HttpResponse<String> response) {
* STEP 1 (Request): GET user key for this device
*/
private void requestDeviceData() {
- var deviceUri = hubConfig.URIs.API."devices/\{deviceId}";
+ var deviceUri = hubConfig.URIs.getApi("devices/" + deviceId);
var request = HttpRequest.newBuilder(deviceUri) //
.header("Authorization", "Bearer " + bearerToken) //
.GET() //
@@ -162,7 +162,7 @@ private void needsDeviceRegistration() {
* STEP 2 (Request): GET vault key for this user
*/
private void requestVaultMasterkey(String encryptedUserKey) {
- var vaultKeyUri = hubConfig.URIs.API."vaults/\{vaultId}/access-token";
+ var vaultKeyUri = hubConfig.URIs.getApi("vaults/" + vaultId + "/access-token");
var request = HttpRequest.newBuilder(vaultKeyUri) //
.header("Authorization", "Bearer " + bearerToken) //
.GET() //
@@ -205,7 +205,7 @@ private void receivedBothEncryptedKeys(String encryptedVaultKey, String encrypte
*/
@Deprecated
private void requestLegacyAccessToken() {
- var legacyAccessTokenUri = hubConfig.URIs.API."vaults/\{vaultId}/keys/\{deviceId}";
+ var legacyAccessTokenUri = hubConfig.URIs.getApi("vaults/" + vaultId + "/keys/" + deviceId);
var request = HttpRequest.newBuilder(legacyAccessTokenUri) //
.header("Authorization", "Bearer " + bearerToken) //
.GET() //
diff --git a/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java b/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
index b00d49874e..d711ff86ef 100644
--- a/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
+++ b/src/main/java/org/cryptomator/ui/keyloading/hub/RegisterDeviceController.java
@@ -115,7 +115,7 @@ public void register() {
workInProgress.set(true);
- var userReq = HttpRequest.newBuilder(hubConfig.URIs.API."users/me") //
+ var userReq = HttpRequest.newBuilder(hubConfig.URIs.getApi("users/me")) //
.GET() //
.timeout(REQ_TIMEOUT) //
.header("Authorization", "Bearer " + bearerToken) //
@@ -143,7 +143,7 @@ public void register() {
var now = Instant.now().toString();
var dto = new CreateDeviceDto(deviceId, deviceNameField.getText(), BaseEncoding.base64().encode(deviceKeyPair.getPublic().getEncoded()), "DESKTOP", jwe.serialize(), now);
var json = toJson(dto);
- var deviceUri = hubConfig.URIs.API."devices/\{deviceId}";
+ var deviceUri = hubConfig.URIs.getApi("devices/" + deviceId);
var putDeviceReq = HttpRequest.newBuilder(deviceUri) //
.PUT(HttpRequest.BodyPublishers.ofString(json, StandardCharsets.UTF_8)) //
.timeout(REQ_TIMEOUT) //
@@ -164,7 +164,7 @@ public void register() {
private void migrateLegacyDevices(ECPublicKey userPublicKey) {
try {
// GET legacy access tokens
- var getUri = hubConfig.URIs.API."devices/\{deviceId}/legacy-access-tokens";
+ var getUri = hubConfig.URIs.getApi("devices/" + deviceId + "/legacy-access-tokens");
var getReq = HttpRequest.newBuilder(getUri).GET().timeout(REQ_TIMEOUT).header("Authorization", "Bearer " + bearerToken).build();
var getRes = httpClient.send(getReq, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8));
if (getRes.statusCode() != 200) {
@@ -185,12 +185,12 @@ private void migrateLegacyDevices(ECPublicKey userPublicKey) {
LOG.warn("Failed to decrypt legacy access token for vault {}. Skipping migration.", entry.getKey());
}
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
- var postUri = hubConfig.URIs.API."users/me/access-tokens";
+ var postUri = hubConfig.URIs.getApi("users/me/access-tokens");
var postBody = JSON.writer().writeValueAsString(newAccessTokens);
var postReq = HttpRequest.newBuilder(postUri).POST(HttpRequest.BodyPublishers.ofString(postBody)).timeout(REQ_TIMEOUT).header("Authorization", "Bearer " + bearerToken).build();
var postRes = httpClient.send(postReq, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8));
if (postRes.statusCode() != 200) {
- throw new IOException(STR."Unexpected response from POST \{postUri}: \{postRes.statusCode()}");
+ throw new IOException("Unexpected response from POST " + postUri + ": " + postRes.statusCode());
}
} catch (IOException e) {
// log and ignore: this is merely a best-effort attempt of migrating legacy devices. Failure is uncritical as this is merely a convenience feature.
diff --git a/pom.xml b/pom.xml
index 3290b3121d..0812419af1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,7 +26,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <project.jdk.version>22</project.jdk.version>
+ <project.jdk.version>23</project.jdk.version>
<!-- Group IDs of jars that need to stay on the class path for now -->
<!-- remove them, as soon they got modularized or support is dropped (i.e., WebDAV) -->

View File

@ -11,14 +11,14 @@
stdenv.mkDerivation (finalAttrs: {
pname = "deltatouch";
version = "1.6.0";
version = "1.8.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "lk108";
repo = "deltatouch";
rev = "v${finalAttrs.version}";
hash = "sha256-mOs5WlWOkH9A+BZK6hvKq/JKS4k8tzvvov4CYFHyMfA=";
hash = "sha256-HVq6eiy2ufFg96oXwnSpv7fMDL1haWyIelBzFd7pfk0=";
fetchSubmodules = true;
};

View File

@ -3,25 +3,25 @@
"alpha": {
"experimental": {
"candidateHashFilenames": [
"factorio_linux_2.0.16.tar.xz"
"factorio_linux_2.0.20.tar.xz"
],
"name": "factorio_alpha_x64-2.0.16.tar.xz",
"name": "factorio_alpha_x64-2.0.20.tar.xz",
"needsAuth": true,
"sha256": "9828ae257a3b2f95de2dae2f262e9d8d6b85f356911449166ceef1472d231e6d",
"sha256": "999247294680f67b29ea4758014e8337069dccc19f8f3808a99f45d8213972b0",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.16/alpha/linux64",
"version": "2.0.16"
"url": "https://factorio.com/get-download/2.0.20/alpha/linux64",
"version": "2.0.20"
},
"stable": {
"candidateHashFilenames": [
"factorio_linux_2.0.15.tar.xz"
"factorio_linux_2.0.20.tar.xz"
],
"name": "factorio_alpha_x64-2.0.15.tar.xz",
"name": "factorio_alpha_x64-2.0.20.tar.xz",
"needsAuth": true,
"sha256": "39df353fe8f14394d2618b9627659f3c0c16922362708e681f006083a8f5163e",
"sha256": "999247294680f67b29ea4758014e8337069dccc19f8f3808a99f45d8213972b0",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.15/alpha/linux64",
"version": "2.0.15"
"url": "https://factorio.com/get-download/2.0.20/alpha/linux64",
"version": "2.0.20"
}
},
"demo": {
@ -51,51 +51,51 @@
"expansion": {
"experimental": {
"candidateHashFilenames": [
"factorio-space-age_linux_2.0.16.tar.xz"
"factorio-space-age_linux_2.0.20.tar.xz"
],
"name": "factorio_expansion_x64-2.0.16.tar.xz",
"name": "factorio_expansion_x64-2.0.20.tar.xz",
"needsAuth": true,
"sha256": "32ae1b8f525148b3bb1f68e41b398543c2b0da29734f9f3b4f9509a86c64ecf4",
"sha256": "cbc6e70985295b078fec8b9ce759fbf8a68ac157fcc7bbead934a9c3108d997f",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.16/expansion/linux64",
"version": "2.0.16"
"url": "https://factorio.com/get-download/2.0.20/expansion/linux64",
"version": "2.0.20"
},
"stable": {
"candidateHashFilenames": [
"factorio-space-age_linux_2.0.15.tar.xz"
"factorio-space-age_linux_2.0.20.tar.xz"
],
"name": "factorio_expansion_x64-2.0.15.tar.xz",
"name": "factorio_expansion_x64-2.0.20.tar.xz",
"needsAuth": true,
"sha256": "09de33402fee3dbae9d0207409f05a19e2fa8019b53d3de96557d2ec904e10f3",
"sha256": "cbc6e70985295b078fec8b9ce759fbf8a68ac157fcc7bbead934a9c3108d997f",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.15/expansion/linux64",
"version": "2.0.15"
"url": "https://factorio.com/get-download/2.0.20/expansion/linux64",
"version": "2.0.20"
}
},
"headless": {
"experimental": {
"candidateHashFilenames": [
"factorio-headless_linux_2.0.16.tar.xz",
"factorio_headless_x64_2.0.16.tar.xz"
"factorio-headless_linux_2.0.20.tar.xz",
"factorio_headless_x64_2.0.20.tar.xz"
],
"name": "factorio_headless_x64-2.0.16.tar.xz",
"name": "factorio_headless_x64-2.0.20.tar.xz",
"needsAuth": false,
"sha256": "f2069b4b746500d945eeb67ef7eda5e7aebe7fd0294c2af4e117af22a3bbaea3",
"sha256": "c4a901f2f1dbedbb41654560db4c6fab683a30c20334e805d4ef740c0416515a",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.16/headless/linux64",
"version": "2.0.16"
"url": "https://factorio.com/get-download/2.0.20/headless/linux64",
"version": "2.0.20"
},
"stable": {
"candidateHashFilenames": [
"factorio-headless_linux_2.0.15.tar.xz",
"factorio_headless_x64_2.0.15.tar.xz"
"factorio-headless_linux_2.0.20.tar.xz",
"factorio_headless_x64_2.0.20.tar.xz"
],
"name": "factorio_headless_x64-2.0.15.tar.xz",
"name": "factorio_headless_x64-2.0.20.tar.xz",
"needsAuth": false,
"sha256": "70b441cb807811a60586c01107248c1d8d7ae043bd1f23675fc924fbaaa538d8",
"sha256": "c4a901f2f1dbedbb41654560db4c6fab683a30c20334e805d4ef740c0416515a",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.15/headless/linux64",
"version": "2.0.15"
"url": "https://factorio.com/get-download/2.0.20/headless/linux64",
"version": "2.0.20"
}
}
}

View File

@ -63,13 +63,13 @@ let
in
freecad-utils.makeCustomizable (stdenv.mkDerivation (finalAttrs: {
pname = "freecad";
version = "1.0rc4";
version = "1.0.0";
src = fetchFromGitHub {
owner = "FreeCAD";
repo = "FreeCAD";
rev = finalAttrs.version;
hash = "sha256-b7aeVQkgdsDRdnVIr+5ZNuWAm6GLH7sepa8kFp2Zm2U=";
hash = "sha256-u7RYSImUMAgKaAQSAGCFha++RufpZ/QuHAirbSFOUCI=";
fetchSubmodules = true;
};

View File

@ -18,25 +18,25 @@
stdenv.mkDerivation (finalAttrs: {
pname = "gale";
version = "0.8.11";
version = "1.1.4";
src = fetchFromGitHub {
owner = "Kesomannen";
repo = "gale";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-PXK64WD3vb3uVxBFNU+LiGOipUjIAKW9RLWr1o4RigU=";
hash = "sha256-yAfQuLfucz522ln0YNMy8nppp2jk6tGJnP/WhK7JdhI=";
};
npmDeps = fetchNpmDeps {
name = "${finalAttrs.pname}-${finalAttrs.version}-npm-deps";
inherit (finalAttrs) src;
hash = "sha256-W0ryt3WH/3SireaOHa9i1vKpuokzIsDlD8R9Fnd0s4k=";
hash = "sha256-xKg/ABUdtylFpT3EisXVvyv38++KjucrZ+s3/fFjzmM=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit (finalAttrs) pname version src;
sourceRoot = "${finalAttrs.src.name}/${finalAttrs.cargoRoot}";
hash = "sha256-zXZkjSYN6/qNwBh+xUgJPWQvduIUSMVSt/XGbocKTwg=";
hash = "sha256-u7UbC9TyEQwYpcVWt8/NsweDNWbQi6NuD9ay9gmMDjg=";
};
cargoRoot = "src-tauri";

View File

@ -46,6 +46,8 @@ stdenv.mkDerivation (finalAttrs: {
runHook postInstall
'';
passthru.updateScript = ./update.sh;
meta = {
changelog = "https://github.com/github/gh-copilot/releases/tag/v${finalAttrs.version}";
description = "Ask for assistance right in your terminal";

View File

@ -0,0 +1,43 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl jq common-updater-scripts nix-prefetch
set -euo pipefail
set -x
ROOT="$(dirname "$(readlink -f "$0")")"
NIX_DRV="$ROOT/package.nix"
if [ ! -f "$NIX_DRV" ]; then
echo "ERROR: cannot find gh-copilot in $ROOT"
exit 1
fi
fetch_arch() {
VER="$1"; ARCH="$2"
URL="https://github.com/github/gh-copilot/releases/download/v${VER}/${ARCH}";
nix-prefetch "{ stdenv, fetchzip }:
stdenv.mkDerivation rec {
pname = \"vere\"; version = \"${VER}\";
src = fetchurl { url = \"$URL\"; };
}
"
}
replace_sha() {
# https://stackoverflow.com/a/38470458/22235705
sed -rziE "s@($1[^\n]*\n[^\n]*hash = )\"sha256-.{44}\";@\1\"$2\";@" "$NIX_DRV"
}
VERE_VER=$(curl https://api.github.com/repos/github/gh-copilot/releases/latest | jq .tag_name)
VERE_VER=$(echo $VERE_VER | sed -e 's/^"v//' -e 's/"$//') # transform "v1.0.2" into 1.0.2
VERE_LINUX_X64_SHA256=$(fetch_arch "$VERE_VER" "linux-amd64")
VERE_LINUX_AARCH64_SHA256=$(fetch_arch "$VERE_VER" "linux-arm64")
VERE_DARWIN_X64_SHA256=$(fetch_arch "$VERE_VER" "darwin-amd64")
VERE_DARWIN_AARCH64_SHA256=$(fetch_arch "$VERE_VER" "darwin-arm64")
sed -i "s/version = \".*\"/version = \"$VERE_VER\"/" "$NIX_DRV"
replace_sha "linux-amd64" "$VERE_LINUX_X64_SHA256"
replace_sha "linux-arm64" "$VERE_LINUX_AARCH64_SHA256"
replace_sha "darwin-amd64" "$VERE_DARWIN_X64_SHA256"
replace_sha "darwin-arm64" "$VERE_DARWIN_AARCH64_SHA256"

View File

@ -45,12 +45,12 @@ stdenv.mkDerivation (finalAttrs: {
docbook_xml_dtd_43
vala
wayland-scanner
wayland-protocols
];
buildInputs = [
wayland
gtk4
wayland
wayland-protocols
];
mesonFlags = [

View File

@ -6,49 +6,48 @@
testers,
nix-update-script,
versionCheckHook,
glibcLocales,
withPostgresAdapter ? true,
withBigQueryAdapter ? true,
}:
python3Packages.buildPythonApplication rec {
pname = "harlequin";
version = "1.25.0";
version = "1.25.2";
pyproject = true;
src = fetchFromGitHub {
owner = "tconbeer";
repo = "harlequin";
rev = "refs/tags/v${version}";
hash = "sha256-iRl91GqYigD6t0aVVShBg835yhlPxgfZcQCdAGUoc1k=";
hash = "sha256-ov9pMvFzJAMfOM7JeSgnp6dZ424GiRaH7W5OCKin9Jk=";
};
build-system = with python3Packages; [
poetry-core
];
pythonRelaxDeps = [ "textual" ];
build-system = with python3Packages; [ poetry-core ];
nativeBuildInputs = [ glibcLocales ];
dependencies =
with python3Packages;
[
click
duckdb
importlib-metadata
numpy
packaging
platformdirs
questionary
rich-click
sqlfmt
textual
textual-fastdatatable
textual-textarea
click
rich-click
duckdb
sqlfmt
platformdirs
importlib-metadata
tomlkit
questionary
numpy
packaging
]
++ lib.optionals withPostgresAdapter [ harlequin-postgres ]
++ lib.optionals withBigQueryAdapter [ harlequin-bigquery ];
pythonRelaxDeps = [
"textual"
];
pythonImportsCheck = [
"harlequin"
"harlequin_duckdb"
@ -60,17 +59,37 @@ python3Packages.buildPythonApplication rec {
updateScript = nix-update-script { };
};
nativeCheckInputs = [
versionCheckHook
preCheck = ''
export HOME=$(mktemp -d)
'';
nativeCheckInputs =
[
versionCheckHook
]
++ (with python3Packages; [
pytest-asyncio
pytestCheckHook
]);
disabledTests = [
# Tests require network access
"test_connect_extensions"
"test_connect_prql"
];
disabledTestPaths = [
# Tests requires more setup
"tests/functional_tests/"
];
meta = {
description = "The SQL IDE for Your Terminal";
homepage = "https://harlequin.sh";
mainProgram = "harlequin";
changelog = "https://github.com/tconbeer/harlequin/releases/tag/v${version}";
license = lib.licenses.mit;
mainProgram = "harlequin";
maintainers = with lib.maintainers; [ pcboy ];
platforms = lib.platforms.unix;
changelog = "https://github.com/tconbeer/harlequin/releases/tag/v${version}";
};
}

View File

@ -3,6 +3,7 @@
stdenv,
fetchFromGitHub,
rustPlatform,
nix-update-script,
cargo,
rustc,
meson,
@ -18,18 +19,18 @@
stdenv.mkDerivation (finalAttrs: {
pname = "keypunch";
version = "3.1";
version = "4.0";
src = fetchFromGitHub {
owner = "bragefuglseth";
repo = "keypunch";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-2S5S7SvMYdEOOrF3SiwpbijsgHcSIyWEVJB41jbrn1A=";
hash = "sha256-Xd4fzreComOUnoJ6l2ncMWn6DlUeRCM+YwApilhFd/8=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit (finalAttrs) pname version src;
hash = "sha256-sD+wy1D6nl333PxlDz73YtnfBEmDzb+kNZkZI8JbfSg=";
hash = "sha256-agFOxSZBi8f0zEPd+ha5c3IAbSH2jHfUx2iNeHFs9jI=";
};
strictDeps = true;
@ -53,12 +54,19 @@ stdenv.mkDerivation (finalAttrs: {
buildInputs = [ libadwaita ];
passthru = {
updateScript = nix-update-script { };
};
meta = {
description = "Practice your typing skills";
homepage = "https://github.com/bragefuglseth/keypunch";
license = lib.licenses.gpl3Plus;
mainProgram = "keypunch";
maintainers = with lib.maintainers; [ tomasajt ];
maintainers = with lib.maintainers; [
tomasajt
getchoo
];
platforms = lib.platforms.linux;
};
})

View File

@ -24,7 +24,7 @@
rustPlatform.buildRustPackage rec {
pname = "meli";
version = "0.8.7";
version = "0.8.8";
src = fetchzip {
urls = [
@ -32,20 +32,21 @@ rustPlatform.buildRustPackage rec {
"https://codeberg.org/meli/meli/archive/v${version}.tar.gz"
"https://github.com/meli/meli/archive/refs/tags/v${version}.tar.gz"
];
hash = "sha256-2+JIehi2wuWdARbhFPvNPIJ9ucZKWjNSORszEG9lyjw=";
hash = "sha256-XOUOIlFKxI7eL7KEEfLyYTsNqc2lc9sJNt9RqPavuW8=";
};
cargoHash = "sha256-ZVhUkpiiPKbWcf56cXFgn3Nyr63STHLlD7mpYEetNIY=";
cargoPatches = [
cargoPatches = [
(fetchpatch {
# https://github.com/NixOS/nixpkgs/issues/332957#issuecomment-2278578811
name = "fix-rust-1.80-compat.patch";
url = "https://git.meli-email.org/meli/meli/commit/6b05279a0987315c401516cac8ff0b016a8e02a8.patch";
hash = "sha256-mh8H7wmHMXAe01UTvdY8vJeeLyH6ZFwylNLFFL+4LO0=";
# https://git.meli-email.org/meli/meli/issues/522
# https://git.meli-email.org/meli/meli/issues/524
name = "fix test_fd_locks() on platforms without OFD support";
url = "https://git.meli-email.org/meli/meli/commit/b7e215f9c238f8364e2a1f0d10ac668d0cfe91ad.patch";
hash = "sha256-227vnFuxhQ0Hh5A/J8y7Ei89AxbNXReMn3c3EVRN4Tc=";
})
];
cargoHash = "sha256-SMvpmWEHUWo0snR/DiUmfZJnXy1QtVOowO8CErM9Xjg=";
# Needed to get openssl-sys to use pkg-config
OPENSSL_NO_VENDOR=1;
@ -81,9 +82,7 @@ rustPlatform.buildRustPackage rec {
'';
checkFlags = [
"--skip=conf::tests::test_config_parse" # panicking due to sandbox
"--skip=utils::tests::test_shellexpandtrait_impls" # panicking due to sandbox
"--skip=utils::tests::test_shellexpandtrait" # panicking due to sandbox
"--skip=test_cli_subcommands" # panicking due to sandbox
];
meta = with lib; {
@ -93,6 +92,6 @@ rustPlatform.buildRustPackage rec {
homepage = "https://meli.delivery";
license = licenses.gpl3;
maintainers = with maintainers; [ _0x4A6F matthiasbeyer ];
platforms = platforms.linux;
platforms = platforms.linux ++ platforms.darwin;
};
}

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "melodeon";
version = "0.4.2";
version = "0.4.3";
src = fetchFromGitHub {
owner = "CDrummond";
repo = "melodeon";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-CwJd77FAEcfCvxHeh3V1SIsgSam3S5WtpSLj9WrOOyI=";
hash = "sha256-Og0o4Iy0mvGE7H5IY9h7uo7w64jZjXtdsGd4ApYO8oU=";
fetchSubmodules = true;
};

View File

@ -1,37 +1,45 @@
{
lib,
python3Packages,
fetchFromGitLab,
appstream,
blueprint-compiler,
desktop-file-utils,
fetchFromGitLab,
glib,
gobject-introspection,
gtk4,
libadwaita,
meson,
ninja,
nix-update-script,
pandoc,
pkg-config,
python3Packages,
webkitgtk_6_0,
wrapGAppsHook4,
}:
python3Packages.buildPythonApplication rec {
pname = "morphosis";
version = "1.3";
version = "1.4.1";
pyproject = false;
src = fetchFromGitLab {
domain = "gitlab.gnome.org";
owner = "World";
repo = "morphosis";
rev = "v${version}";
hash = "sha256-JEZFgON4QkxHDbWSZbDNLpIFctt8mDHdGVVu3Q+WH4U=";
rev = "refs/tags/v${version}";
hash = "sha256-ZpxenBqC5qr7yNwjld0u7gSBQfL7Kpa4FWE9gkzG0hg=";
};
strictDeps = true;
nativeBuildInputs = [
appstream
blueprint-compiler
desktop-file-utils
glib # For `glib-compile-schemas`
gobject-introspection
gtk4 # For `gtk-update-icon-cache`
meson
ninja
pkg-config
@ -47,13 +55,17 @@ python3Packages.buildPythonApplication rec {
dontWrapGApps = true;
makeWrapperArgs = [
''''${gappsWrapperArgs[@]}''
"\${gappsWrapperArgs[@]}"
"--prefix PATH : ${lib.makeBinPath [ pandoc ]}"
];
passthru = {
updateScript = nix-update-script { };
};
meta = {
description = "Convert your documents";
homepage = "https://gitlab.gnome.org/Monster/morphosis";
homepage = "https://gitlab.gnome.org/World/morphosis";
license = lib.licenses.gpl3Only;
maintainers = with lib.maintainers; [ getchoo ];
mainProgram = "morphosis";

View File

@ -2,13 +2,13 @@
buildGoModule rec {
pname = "plumber";
version = "2.7.1";
version = "2.8.0";
src = fetchFromGitHub {
owner = "streamdal";
repo = pname;
rev = "v${version}";
hash = "sha256-L8vpaqt9yCIP3TLPSNUrOC6hXc71mzl4lqiaoNS6zls=";
hash = "sha256-38tLlFeQtXIiHuQa9c/IfIYbyf+GrOsERAdWQnHSeck=";
};
vendorHash = null;

2291
pkgs/by-name/po/porn-vault/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,109 @@
diff --git a/server/binaries/ffmpeg.ts b/server/binaries/ffmpeg.ts
index abb4de4f..cdcc0a02 100644
--- a/server/binaries/ffmpeg.ts
+++ b/server/binaries/ffmpeg.ts
@@ -8,6 +8,7 @@ import { getConfig } from "../config";
import { mkdirpAsync, rimrafAsync, statAsync } from "../utils/fs/async";
import { formatMessage, handleError, logger } from "../utils/logger";
import { generateTimestampsAtIntervals } from "../utils/misc";
+import { tempPath } from "server/utils/path";
export async function takeScreenshot(
inPath: string,
@@ -112,7 +113,7 @@ export async function generatePreview(
): Promise<void> {
logger.debug(`Creating 100 small previews for ${sceneId}.`);
- const tmpFolder = resolve("tmp", "preview", sceneId);
+ const tmpFolder = resolve(tempPath, "preview", sceneId);
const timestamps = generateTimestampsAtIntervals(PREVIEW_COUNT, durationSecs * 1000, {
startPercentage: 2,
diff --git a/server/database/index.ts b/server/database/index.ts
index 80ff6432..c6feb11d 100755
--- a/server/database/index.ts
+++ b/server/database/index.ts
@@ -15,7 +15,7 @@ import Studio from "../types/studio";
import SceneView from "../types/watch";
import { mkdirpSync } from "../utils/fs/async";
import { logger } from "../utils/logger";
-import { libraryPath } from "../utils/path";
+import { libraryPath, tempPath } from "../utils/path";
import { Izzy } from "./internal";
export function formatCollectionName(name: string) {
@@ -261,11 +261,11 @@ export async function loadStore<T extends { _id: string }>(
}
export async function loadStores(): Promise<void> {
- if (!existsSync("tmp")) {
- logger.info("Creating temporary directory 'tmp'");
- mkdirpSync("tmp/");
+ if (!existsSync(tempPath)) {
+ logger.info(`Creating temporary directory '${tempPath}'`);
+ mkdirpSync(tempPath);
} else {
- logger.debug("Temporary directory 'tmp' already exists");
+ logger.debug(`Temporary directory '${tempPath}' already exists`);
}
const crossReferencePath = libraryPath("cross_references.db");
diff --git a/server/graphql/mutations/image.ts b/server/graphql/mutations/image.ts
index 6554f145..a7853eea 100644
--- a/server/graphql/mutations/image.ts
+++ b/server/graphql/mutations/image.ts
@@ -20,7 +20,7 @@ import Studio from "../../types/studio";
import { mapAsync } from "../../utils/async";
import { copyFileAsync, statAsync, unlinkAsync } from "../../utils/fs/async";
import { logger } from "../../utils/logger";
-import { getFolderPartition, libraryPath } from "../../utils/path";
+import { getFolderPartition, libraryPath, tempPath } from "../../utils/path";
import { getExtension, normalizeName } from "../../utils/string";
import { Dictionary, isBoolean, isNumber, isString } from "../../utils/types";
import { clearCaches } from "../datasources";
@@ -110,7 +110,7 @@ export default {
const image = new Image(imageName);
- const outPath = `tmp/${image._id}${ext}`;
+ const outPath = resolve(tempPath, `${image._id}${ext}`);
logger.debug(`Getting file...`);
diff --git a/server/routes/scene.ts b/server/routes/scene.ts
index 601de160..fe8b8de5 100644
--- a/server/routes/scene.ts
+++ b/server/routes/scene.ts
@@ -16,7 +16,7 @@ import Scene from "../types/scene";
import { mkdirpAsync, readFileAsync, rimrafAsync } from "../utils/fs/async";
import { handleError, logger } from "../utils/logger";
import { generateTimestampsAtIntervals } from "../utils/misc";
-import { getFolderPartition, libraryPath } from "../utils/path";
+import { getFolderPartition, libraryPath, tempPath } from "../utils/path";
import { IMAGE_CACHE_CONTROL } from "./media";
/* function streamTranscode(
@@ -94,7 +94,7 @@ export async function attachScenePreviewGrid(scene: Scene): Promise<string | nul
return null;
}
- const gridFolder = path.resolve("tmp", "grid");
+ const gridFolder = path.resolve(tempPath, "grid");
const tmpFolder = path.resolve(gridFolder, "thumbs", randomUUID());
await mkdirpAsync(tmpFolder);
diff --git a/server/utils/path.ts b/server/utils/path.ts
index 05619e93..64964de8 100644
--- a/server/utils/path.ts
+++ b/server/utils/path.ts
@@ -5,6 +5,7 @@ import { getConfig } from "../config";
import { mkdirpSync } from "./fs/async";
const configFolder = process.env.PV_CONFIG_FOLDER || process.cwd();
+export const tempPath = process.env.CACHE_DIRECTORY ?? "tmp";
export function libraryPath(str: string): string {
return resolve(getConfig().persistence.libraryPath, "library", str);
--
2.47.0

View File

@ -0,0 +1,114 @@
{
fetchFromGitLab,
fetchurl,
rustPlatform,
lib,
pnpm_9,
stdenvNoCC,
nodejs_22,
ffmpeg,
imagemagick,
makeWrapper,
autoPatchelfHook,
writeShellApplication,
}:
let
izzy = rustPlatform.buildRustPackage rec {
pname = "izzy";
version = "2.0.1";
src = fetchFromGitLab {
owner = "porn-vault";
repo = "izzy";
rev = version;
hash = "sha256-UauA5mZi5a5QF7d17pKSzvyaWbeSuFjBrXEAxR3wNkk=";
};
postPatch = ''
ln -s ${./Cargo.lock} Cargo.lock
'';
cargoLock.lockFile = ./Cargo.lock;
meta = {
description = "Rust In-Memory K-V Store with Redis-Style File Persistence and Secondary Indices";
homepage = "https://gitlab.com/porn-vault/izzy";
license = lib.licenses.gpl3Plus;
maintainers = [ lib.maintainers.luNeder ];
mainProgram = "izzy";
};
};
pnpm = pnpm_9;
nodejs = nodejs_22;
in
stdenvNoCC.mkDerivation (finalAttrs: {
pname = "porn-vault";
version = "0.30.0-rc.11";
src = fetchFromGitLab {
owner = "porn-vault";
repo = "porn-vault";
rev = "4c6182c5825d85193cf67cb7cd927da2feaaecdb";
hash = "sha256-wQ3dqLc0l2BmLGDYrbWxX2mPwO/Tqz0fY/fOQTEUv24=";
};
pnpmDeps = pnpm.fetchDeps {
inherit (finalAttrs) pname version src;
hash = "sha256-Xr9tRiP1hW+aFs9FnPvPkeJ0/LtJI57cjWY5bZQaRTQ=";
};
nativeBuildInputs = [
nodejs
pnpm.configHook
makeWrapper
];
patches = [
./allow-use-of-systemd-temp-path.patch
];
postPatch = ''
substituteInPlace server/binaries/izzy.ts \
--replace-fail 'chmodSync(izzyPath, "111");' ""
'';
buildPhase = ''
runHook preBuild
pnpm build
runHook postBuild
'';
installPhase = ''
runHook preInstall
install -Dm644 package.json config.example.json remix.config.js -t $out/share/porn-vault
cp -R public dist build node_modules graphql locale -t $out/share/porn-vault
runHook postInstall
'';
preFixup = ''
makeWrapper "${lib.getExe nodejs}" "$out/bin/porn-vault" \
--chdir "$out/share/porn-vault" \
--add-flags "dist/index.js" \
--set-default IZZY_PATH "${lib.getExe izzy}" \
--prefix PATH : "${
lib.makeBinPath [
ffmpeg
imagemagick
izzy
]
}"
'';
meta = {
description = "Porn-Vault is a self hosted organizer for adult videos and imagery.";
homepage = "https://gitlab.com/porn-vault/porn-vault";
license = lib.licenses.gpl3Plus;
maintainers = [ lib.maintainers.luNeder ];
inherit (nodejs.meta) platforms;
mainProgram = "porn-vault";
};
})

View File

@ -1,38 +1,52 @@
{ stdenv, lib, fetchFromGitHub, fetchpatch, cmake, pkg-config, openssl, gtest }:
{
abseil-cpp,
cmake,
fetchFromGitHub,
stdenv,
lib,
pkg-config,
openssl,
}:
stdenv.mkDerivation rec {
let
cxxStandard = "17";
in
stdenv.mkDerivation (finalAttrs: {
pname = "s2geometry";
version = "0.9.0";
version = "0.11.1";
src = fetchFromGitHub {
owner = "google";
repo = "s2geometry";
rev = "v${version}";
sha256 = "1mx61bnn2f6bd281qlhn667q6yfg1pxzd2js88l5wpkqlfzzhfaz";
rev = "refs/tags/v${finalAttrs.version}";
sha256 = "sha256-VjgGcGgQlKmjUq+JU0JpyhOZ9pqwPcBUFEPGV9XoHc0=";
};
patches = [
# Fix build https://github.com/google/s2geometry/issues/165
(fetchpatch {
url = "https://github.com/google/s2geometry/commit/a4dddf40647c68cd0104eafc31e9c8fb247a6308.patch";
sha256 = "0fp3w4bg7pgf5vv4vacp9g06rbqzhxc2fg6i5appp93q6phiinvi";
})
nativeBuildInputs = [
cmake
pkg-config
];
nativeBuildInputs = [ cmake pkg-config ];
buildInputs = [ openssl gtest ];
cmakeFlags = [
(lib.cmakeFeature "CMAKE_CXX_STANDARD" cxxStandard)
# incompatible with our version of gtest
(lib.cmakeBool "BUILD_TESTS" false)
];
# Default of C++11 is too low for gtest.
# In newer versions of s2geometry this can be done with cmakeFlags.
postPatch = ''
substituteInPlace CMakeLists.txt --replace "CMAKE_CXX_STANDARD 11" "CMAKE_CXX_STANDARD 14"
'';
buildInputs = [
openssl
];
propagatedBuildInputs = [
(abseil-cpp.override { inherit cxxStandard; })
];
meta = with lib; {
changelog = "https://github.com/google/s2geometry/releases/tag/${lib.removePrefix "refs/tags/" finalAttrs.src.rev}";
description = "Computational geometry and spatial indexing on the sphere";
homepage = "http://s2geometry.io/";
license = licenses.asl20;
maintainers = [ maintainers.Thra11 ];
platforms = platforms.linux;
};
}
})

View File

@ -15,13 +15,13 @@
stdenv.mkDerivation rec {
pname = "showmethekey";
version = "1.15.1";
version = "1.16.0";
src = fetchFromGitHub {
owner = "AlynxZhou";
repo = "showmethekey";
rev = "refs/tags/v${version}";
hash = "sha256-odlIgWFmhDqju7U5Y9q6apUEAqZUvMUA7/eU7LMltQs=";
hash = "sha256-eEbpQVfp1Q40+O7uZazKz8aHSPwfLBwEBemyd6jJAgs=";
};
nativeBuildInputs = [

View File

@ -35,20 +35,20 @@ let
in
stdenv.mkDerivation (finalAttrs: {
pname = "siyuan";
version = "3.1.8";
version = "3.1.13";
src = fetchFromGitHub {
owner = "siyuan-note";
repo = "siyuan";
rev = "v${finalAttrs.version}";
hash = "sha256-0sV3r3ETW/FeLJZQrkE95oqKeUKKiNA3vpOBPtHzeE8=";
hash = "sha256-+jlJTsGvElumUV1NdYed0XthmY1MFNqWMgDmTQObIA4=";
};
kernel = buildGo123Module {
name = "${finalAttrs.pname}-${finalAttrs.version}-kernel";
inherit (finalAttrs) src;
sourceRoot = "${finalAttrs.src.name}/kernel";
vendorHash = "sha256-hxXCq03wxVLONaztZVqLjlqQ/fZNlV2iDF5JIayb5YY=";
vendorHash = "sha256-uK++FoWCoeb05TyUhh0PK+wkTmzTko0K7oLodoGAWt8=";
patches = [
(substituteAll {
@ -90,7 +90,7 @@ stdenv.mkDerivation (finalAttrs: {
src
sourceRoot
;
hash = "sha256-ZaurLQlM81lCGdMwvl/1YDzpC/mU08Wlgx4/MAm6un4=";
hash = "sha256-uv3gahbSW81gHMx0sQoUbW4Oyzvo6iD5u1izX8vXkwA=";
};
sourceRoot = "${finalAttrs.src.name}/app";

View File

@ -4,7 +4,7 @@
fetchFromGitHub,
}:
let
version = "0.23.1";
version = "0.24.2";
in
python3Packages.buildPythonApplication {
pname = "toml-sort";
@ -15,7 +15,7 @@ python3Packages.buildPythonApplication {
owner = "pappasam";
repo = "toml-sort";
rev = "refs/tags/v${version}";
hash = "sha256-7V2WBZYAdsA4Tiy9/2UPOcThSNE3ZXM713j57KDCegk=";
hash = "sha256-PuTXG8RIN8Mui5J8DV0yxe94y6FNs4TgPyHjEhpcKqM=";
};
build-system = [ python3Packages.poetry-core ];

View File

@ -34,14 +34,14 @@ rustPlatform.buildRustPackage rec {
};
};
meta = with lib; {
meta = {
description = "A command-line implementation of WCHISPTool, for flashing ch32 MCUs";
homepage = "https://ch32-rs.github.io/wchisp/";
changelog = "https://github.com/ch32-rs/wchisp/releases/tag/v${version}";
license = with licenses; [ gpl2Only ];
platforms = with platforms; linux ++ darwin ++ windows;
license = with lib.licenses; [ gpl2Only ];
platforms = with lib.platforms; linux ++ darwin ++ windows;
broken = !stdenv.hostPlatform.isLinux;
maintainers = with maintainers; [ jwillikers ];
maintainers = with lib.maintainers; [ jwillikers ];
mainProgram = "wchisp";
};
}

View File

@ -1,11 +1,12 @@
{ stdenv
, lib
, fetchFromGitHub
, makeWrapper
, Foundation
, glew
, SDL2
, writeShellScript
{
stdenv,
lib,
fetchFromGitHub,
unstableGitUpdater,
makeWrapper,
glew,
SDL2,
writeShellScript,
}:
let
@ -24,13 +25,13 @@ let
in
stdenv.mkDerivation (finalAttrs: {
pname = "wipeout-rewrite";
version = "unstable-2023-08-13";
version = "0-unstable-2024-07-07";
src = fetchFromGitHub {
owner = "phoboslab";
repo = "wipeout-rewrite";
rev = "7a9f757a79d5c6806252cc1268bda5cdef463e23";
hash = "sha256-21IG9mZPGgRhVkT087G+Bz/zLkknkHKGmWjSpcLw8vE=";
rev = "a372b51f59217da4a5208352123a4acca800783c";
hash = "sha256-RJrWOTb5cZ2rSgO/J8qW5ifMJryBaK6MDtYwQZfghS0=";
};
enableParallelBuilding = true;
@ -42,10 +43,11 @@ stdenv.mkDerivation (finalAttrs: {
buildInputs = [
glew
SDL2
] ++ lib.optionals stdenv.hostPlatform.isDarwin [
Foundation
];
# Force this to empty, so assets are looked up in CWD instead of $out/bin
env.NIX_CFLAGS_COMPILE = "-DPATH_ASSETS=";
installPhase = ''
runHook preInstall
@ -60,12 +62,14 @@ stdenv.mkDerivation (finalAttrs: {
runHook postInstall
'';
meta = with lib; {
passthru.updateScript = unstableGitUpdater { };
meta = {
mainProgram = "wipegame";
description = "Re-implementation of the 1995 PSX game wipEout";
homepage = "https://github.com/phoboslab/wipeout-rewrite";
license = licenses.unfree;
maintainers = with maintainers; [ OPNA2608 ];
platforms = platforms.all;
license = lib.licenses.unfree;
maintainers = with lib.maintainers; [ OPNA2608 ];
platforms = lib.platforms.all;
};
})

View File

@ -16,13 +16,13 @@
rustPlatform.buildRustPackage rec {
pname = "wluma";
version = "4.4.0";
version = "4.5.1";
src = fetchFromGitHub {
owner = "maximbaz";
repo = "wluma";
rev = version;
sha256 = "sha256-Ow3SjeulYiHY9foXrmTtLK3F+B3+DrtDjBUke3bJeDw=";
sha256 = "sha256-5uSExmh1a88kZDly4VrHzI8YwfTDB8wm2mMGZyvKsk4=";
};
postPatch = ''
@ -38,7 +38,7 @@ rustPlatform.buildRustPackage rec {
'ExecStart=/usr/bin/wluma' 'ExecStart=${placeholder "out"}/bin/wluma'
'';
cargoHash = "sha256-BwduYAYIRxc40nn9kloHv+Dt8jLSZViweSYGL5e45YM=";
cargoHash = "sha256-hKxKEs88tB05AiWC/LuC/0jJ1RxeUnpp35A6UTQK4xw=";
nativeBuildInputs = [
makeWrapper

File diff suppressed because it is too large Load Diff

View File

@ -90,13 +90,13 @@ let
in
rustPlatform.buildRustPackage rec {
pname = "zed-editor";
version = "0.161.2";
version = "0.162.3";
src = fetchFromGitHub {
owner = "zed-industries";
repo = "zed";
rev = "refs/tags/v${version}";
hash = "sha256-UEqlOiB7oNQcrLViPrk9ZCg4uUDYhRXjq0cHp/wclYk=";
hash = "sha256-B0iTJMVUpsSVZ0l2bdPnWc7YaZErKnxqiuhgYopmJ/4=";
};
patches =

View File

@ -9,7 +9,7 @@ stdenv.mkDerivation rec
owner = "alembic";
repo = "alembic";
rev = version;
sha256 = "sha256-PuVN5Ytls58G2BmwCHUHiMQ0rolH98Hlw/pp7cvpiAg=";
hash = "sha256-EJZvbaGP9aea/UvcXmFbI3Y2/xTkdqORdzyz5ch931A=";
};
# note: out is unused (but required for outputDoc anyway)

View File

@ -1,38 +1,48 @@
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, pkg-config
, curl
, qtbase
, qtlocation
, maplibre-gl-native
{
cmake,
fetchFromGitHub,
lib,
maplibre-native-qt,
qtbase,
qtpositioning,
stdenv,
}:
mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "mapbox-gl-qml";
version = "2.1.1";
version = "3.0.0";
src = fetchFromGitHub {
owner = "rinigus";
repo = "mapbox-gl-qml";
rev = version;
hash = "sha256-zZcD85nOZZ067FRvSuzE8lr2gyuVxpcZGp44D4ayc3Q=";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-csk3Uo+AdP1R/T/9gWyWmYFIKuen2jy8wYN3GJznyRE=";
};
nativeBuildInputs = [ cmake pkg-config ];
buildInputs = [ curl qtlocation maplibre-gl-native ];
nativeBuildInputs = [
cmake
];
postPatch = ''
substituteInPlace src/CMakeLists.txt \
--replace ' ''${QT_INSTALL_QML}' " $out/${qtbase.qtQmlPrefix}"
'';
cmakeFlags = [
(lib.cmakeFeature "QT_INSTALL_QML" "${placeholder "out"}/${qtbase.qtQmlPrefix}")
];
meta = with lib; {
buildInputs = [
maplibre-native-qt
qtpositioning
];
dontWrapQtApps = true; # library only
meta = {
changelog = "https://github.com/rinigus/mapbox-gl-qml/releases/tag/${lib.removePrefix "refs/tags/" finalAttrs.src.rev}";
description = "Unofficial Mapbox GL Native bindings for Qt QML";
homepage = "https://github.com/rinigus/mapbox-gl-qml";
license = licenses.lgpl3Only;
maintainers = with maintainers; [ Thra11 dotlambda ];
platforms = platforms.linux;
license = lib.licenses.lgpl3Only;
maintainers = with lib.maintainers; [
Thra11
dotlambda
];
platforms = lib.platforms.linux;
};
}
})

View File

@ -0,0 +1,43 @@
{
cmake,
fetchFromGitHub,
lib,
qtlocation,
stdenv,
}:
stdenv.mkDerivation (finalAttrs: {
pname = "maplibre-native-qt";
version = "3.0.0";
src = fetchFromGitHub {
owner = "maplibre";
repo = "maplibre-native-qt";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-h7PFoGJ5P+k5AEv+y0XReYnPdP/bD4nr/uW9jZ5DCy4=";
fetchSubmodules = true;
};
nativeBuildInputs = [
cmake
];
buildInputs = [
qtlocation
];
dontWrapQtApps = true; # library only
meta = {
changelog = "https://github.com/maplibre/maplibre-native-qt/blob/${finalAttrs.src.rev}/CHANGELOG.md";
description = "MapLibre Native Qt Bindings and Qt Location Plugin";
homepage = "https://github.com/maplibre/maplibre-native-qt";
license = with lib.licenses; [
bsd2
gpl3
lgpl3
];
maintainers = with lib.maintainers; [ dotlambda ];
platforms = lib.platforms.all;
};
})

View File

@ -15,7 +15,7 @@
buildPythonPackage rec {
pname = "arcam-fmj";
version = "1.5.2";
version = "1.6.0";
pyproject = true;
disabled = pythonOlder "3.8";
@ -24,7 +24,7 @@ buildPythonPackage rec {
owner = "elupus";
repo = "arcam_fmj";
rev = "refs/tags/${version}";
hash = "sha256-hBoUxY+xtW04UPUG2P8A8QQbHxGk0bjcrtXis6nlaGg=";
hash = "sha256-nit+UjUxhkpaK758WLsNc9tcw1s1wdxq3x3etyVVgPk=";
};
build-system = [ setuptools ];

View File

@ -5,15 +5,18 @@
setuptools-scm,
setuptools,
python,
docutils,
jaraco-collections,
jaraco-functools,
jaraco-envs,
jaraco-path,
jaraco-text,
more-itertools,
packaging,
path,
pyfakefs,
pytestCheckHook,
stdenv,
}:
buildPythonPackage rec {
@ -24,13 +27,18 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "pypa";
repo = "distutils";
rev = "378984e02edae91d5f49425da8436f8dd9152b8a"; # correlate commit from setuptools version
hash = "sha256-31sPPVY6tr+OwpiFiaKw82KyhDNBVW3Foea49dCa6pA=";
rev = "72837514c2b67081401db556be9aaaa43debe44f"; # correlate commit from setuptools version
hash = "sha256-Kx4Iudy9oZ0oQT96Meyq/m0k0BuexPLVxwvpNJehCW0=";
};
build-system = [ setuptools-scm ];
dependencies = [ jaraco-functools ];
dependencies = [
jaraco-collections
jaraco-functools
more-itertools
packaging
];
postInstall = ''
rm -r $out/${python.sitePackages}/distutils
@ -40,7 +48,7 @@ buildPythonPackage rec {
pythonImportsCheck = [ "distutils" ];
nativeCheckInputs = [
jaraco-collections
docutils
jaraco-envs
jaraco-path
jaraco-text
@ -50,6 +58,9 @@ buildPythonPackage rec {
pytestCheckHook
];
# jaraco-path depends ob pyobjc
doCheck = !stdenv.isDarwin;
meta = {
description = "Distutils as found in cpython";
homepage = "https://github.com/pypa/distutils";

View File

@ -1,17 +1,20 @@
{
lib,
buildPythonPackage,
fetchPypi,
fetchFromGitHub,
importlib-metadata,
black,
poetry-core,
buildPythonPackage,
click,
fetchFromGitHub,
fetchPypi,
gitpython,
importlib-metadata,
jinja2,
platformdirs,
poetry-core,
pytest-asyncio,
pytestCheckHook,
pythonOlder,
tomli,
tqdm,
gitpython,
}:
buildPythonPackage rec {
@ -19,6 +22,8 @@ buildPythonPackage rec {
version = "0.23.3";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchFromGitHub {
owner = "tconbeer";
repo = "sqlfmt";
@ -26,9 +31,9 @@ buildPythonPackage rec {
hash = "sha256-kbluj29P1HwTaCYv1Myslak9s8FFm2e/eHdGgi3H4i0=";
};
build-system = [
poetry-core
];
pythonRelaxDeps = [ "platformdirs" ];
build-system = [ poetry-core ];
dependencies = [
click
@ -40,25 +45,26 @@ buildPythonPackage rec {
];
optional-dependencies = {
jinjafmt = [
black
];
sqlfmt_primer = [
gitpython
];
jinjafmt = [ black ];
sqlfmt_primer = [ gitpython ];
};
pythonRelaxDeps = [
"platformdirs"
];
nativeCheckInputs = [
pytest-asyncio
pytestCheckHook
] ++ lib.flatten (builtins.attrValues optional-dependencies);
pythonImportsCheck = [
"sqlfmt"
];
preCheck = ''
export HOME=$(mktemp -d)
export PATH="$PATH:$out/bin";
'';
pythonImportsCheck = [ "sqlfmt" ];
meta = {
description = "Sqlfmt formats your dbt SQL files so you don't have to";
homepage = "https://github.com/tconbeer/sqlfmt";
changelog = "https://github.com/tconbeer/sqlfmt/blob/${src.rev}/CHANGELOG.md";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ pcboy ];
mainProgram = "sqlfmt";

View File

@ -1,50 +1,63 @@
{
lib,
buildPythonPackage,
fetchPypi,
fetchFromGitHub,
poetry-core,
pyarrow,
pytz,
textual,
tzdata,
pythonOlder,
polars,
pytest-asyncio,
pytest-textual-snapshot,
pytestCheckHook,
}:
buildPythonPackage rec {
pname = "textual-fastdatatable";
version = "0.9.0";
version = "0.10.0";
pyproject = true;
src = fetchPypi {
pname = "textual_fastdatatable";
inherit version;
hash = "sha256-AS3SiwetCHkCMu8H81xbp5QvN/2GCvMlWgU4qZKvBRU=";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "tconbeer";
repo = "textual-fastdatatable";
rev = "refs/tags/v${version}";
hash = "sha256-r1evN69etFn21TkXPLuAh1OxIsurDDyPyYOKQR5uUos=";
};
build-system = [
poetry-core
];
build-system = [ poetry-core ];
dependencies = [
pyarrow
pytz
textual
tzdata
];
] ++ textual.optional-dependencies.syntax;
optional-dependencies = {
polars = [
polars
];
polars = [ polars ];
};
pythonImportsCheck = [
"textual_fastdatatable"
nativeCheckInputs = [
pytest-asyncio
pytest-textual-snapshot
pytestCheckHook
] ++ lib.flatten (builtins.attrValues optional-dependencies);
pythonImportsCheck = [ "textual_fastdatatable" ];
disabledTestPaths = [
# Tests are comparing CLI output
"tests/snapshot_tests/test_snapshots.py"
];
meta = {
description = "A performance-focused reimplementation of Textual's DataTable widget, with a pluggable data storage backend";
homepage = "https://pypi.org/project/textual-fastdatatable/";
homepage = "https://github.com/tconbeer/textual-fastdatatable";
changelog = "https://github.com/tconbeer/textual-fastdatatable/releases/tag/v${version}";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ pcboy ];
};

View File

@ -1,39 +1,47 @@
{
lib,
buildPythonPackage,
fetchPypi,
fetchFromGitHub,
poetry-core,
pyperclip,
pytest-asyncio,
pytestCheckHook,
pythonOlder,
textual,
}:
buildPythonPackage rec {
pname = "textual-textarea";
version = "0.14.2";
version = "0.14.4";
pyproject = true;
src = fetchPypi {
pname = "textual_textarea";
inherit version;
hash = "sha256-AJU7BBoev6pBrLhvbfF4I7l+E8YnO5jCD5OIsNf6NW0=";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "tconbeer";
repo = "textual-textarea";
rev = "refs/tags/v${version}";
hash = "sha256-tmbSCU1VgxR9aXG22UVpweD71dVmhKSRBTDm1Gf33jM=";
};
build-system = [
poetry-core
];
build-system = [ poetry-core ];
dependencies = [
pyperclip
textual
] ++ textual.optional-dependencies.syntax;
nativeCheckInputs = [
pytest-asyncio
pytestCheckHook
];
pythonImportsCheck = [
"textual_textarea"
];
pythonImportsCheck = [ "textual_textarea" ];
meta = {
description = "A text area (multi-line input) with syntax highlighting for Textual";
homepage = "https://pypi.org/project/textual-textarea/";
homepage = "https://github.com/tconbeer/textual-textarea";
changelog = "https://github.com/tconbeer/textual-textarea/releases/tag/v${version}";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ pcboy ];
};

View File

@ -20,7 +20,7 @@
buildPythonPackage rec {
pname = "textual";
version = "0.82.0";
version = "0.86.1";
pyproject = true;
disabled = pythonOlder "3.8";
@ -29,7 +29,7 @@ buildPythonPackage rec {
owner = "Textualize";
repo = "textual";
rev = "refs/tags/v${version}";
hash = "sha256-belpoXQ+CkTchK+FjI/Ur8v4cNgzX39xLdNfPCwaU6E=";
hash = "sha256-5msCFv79nAmoaP9gZxV3DXMLTyVlSFb+qyA5jHWwc50=";
};
build-system = [ poetry-core ];

View File

@ -77,4 +77,11 @@ in
hash = "sha256-0m9oaqjU42RYyttkTihADDrRMjr2WoK/8sInZALeHws=";
cargoHash = "sha256-9XTIcpoCnROP63ZTDgMMMmj0kPggiTazKlKQfCgXKzk=";
};
cargo-pgrx_0_12_6 = generic {
version = "0.12.6";
hash = "sha256-7aQkrApALZe6EoQGVShGBj0UIATnfOy2DytFj9IWdEA=";
cargoHash = "sha256-Di4UldQwAt3xVyvgQT1gUhdvYUVp7n/a72pnX45kP0w=";
};
}

View File

@ -1,7 +1,7 @@
{
lib,
stdenv,
fetchurl,
fetchzip,
unzip,
testers,
chromedriver,
@ -9,19 +9,18 @@
let
upstream-info =
(import ../../../../applications/networking/browsers/chromium/upstream-info.nix)
.stable.chromedriver;
(lib.importJSON ../../../../applications/networking/browsers/chromium/info.json).chromium;
# See ./source.nix for Linux
allSpecs = {
x86_64-darwin = {
system = "mac-x64";
hash = upstream-info.hash_darwin;
hash = upstream-info.chromedriver.hash_darwin;
};
aarch64-darwin = {
system = "mac-arm64";
hash = upstream-info.hash_darwin_aarch64;
hash = upstream-info.chromedriver.hash_darwin_aarch64;
};
};
@ -35,7 +34,7 @@ stdenv.mkDerivation {
pname = "chromedriver";
inherit version;
src = fetchurl {
src = fetchzip {
url = "https://storage.googleapis.com/chrome-for-testing-public/${version}/${spec.system}/chromedriver-${spec.system}.zip";
inherit (spec) hash;
};

View File

@ -53,5 +53,15 @@ buildNodejs {
hash = "sha256-gmIyiSyNzC3pClL1SM2YicckWM+/2tsbV1xv2S3d5G0=";
revert = true;
})
# Fix for https://github.com/NixOS/nixpkgs/issues/355919
# FIXME: remove after a minor point release
(fetchpatch2 {
url = "https://github.com/nodejs/node/commit/a094a8166cd772f89e92b5deef168e5e599fa815.patch?full_index=1";
hash = "sha256-5FZfozYWRa1ZI/f+e+xpdn974Jg2DbiHbua13XUQP5E=";
})
(fetchpatch2 {
url = "https://github.com/nodejs/node/commit/f270462c09ddfd770291a7c8a2cd204b2c63d730.patch?full_index=1";
hash = "sha256-Err0i5g7WtXcnhykKgrS3ocX7/3oV9UrT0SNeRtMZNU=";
})
];
}

View File

@ -4,13 +4,13 @@
callPackage ../generic.nix rec {
pname = "rat-king-adventure";
version = "2.0.1";
version = "2.0.2";
src = fetchFromGitHub {
owner = "TrashboxBobylev";
repo = "Rat-King-Adventure";
rev = version;
hash = "sha256-FAIFrlVyNYTiS+UBLZFOhuMzj8C6qNGAffYrTxcNeDM=";
hash = "sha256-mh54m2YwGOmE03fxndk3wNX/xi6UyIdXWEguiC3mDeA=";
};
desktopName = "Rat King Adventure";

View File

@ -13,13 +13,13 @@ let
in
stdenv.mkDerivation (finalAttrs: {
pname = "evdi";
version = "1.14.6";
version = "1.14.7";
src = fetchFromGitHub {
owner = "DisplayLink";
repo = "evdi";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-/XIWacrsB7qBqlLUwIGuDdahvt2dAwiK7dauFaYh7lU=";
hash = "sha256-z3GawjaokbmmUC1LihwGSnF3tUp9n/FO+kDiWvBq+mY=";
};
env.NIX_CFLAGS_COMPILE = toString [

View File

@ -1,20 +1,39 @@
{ lib, stdenv, fetchFromGitHub, cmake, postgresql, openssl, libkrb5, nixosTests, enableUnfree ? true, buildPostgresqlExtension }:
{
buildPostgresqlExtension,
cmake,
enableUnfree ? true,
fetchFromGitHub,
lib,
libkrb5,
nixosTests,
openssl,
postgresql,
stdenv,
}:
buildPostgresqlExtension rec {
pname = "timescaledb${lib.optionalString (!enableUnfree) "-apache"}";
version = "2.14.2";
version = "2.17.2";
nativeBuildInputs = [ cmake ];
buildInputs = [ openssl libkrb5 ];
buildInputs = [
openssl
libkrb5
];
src = fetchFromGitHub {
owner = "timescale";
repo = "timescaledb";
rev = version;
hash = "sha256-gJViEWHtIczvIiQKuvvuwCfWJMxAYoBhCHhD75no6r0=";
hash = "sha256-gPsAebMUBuAwP6Hoi9/vrc2IFsmTbL0wQH1g6/2k2d4=";
};
cmakeFlags = [ "-DSEND_TELEMETRY_DEFAULT=OFF" "-DREGRESS_CHECKS=OFF" "-DTAP_CHECKS=OFF" ]
cmakeFlags =
[
"-DSEND_TELEMETRY_DEFAULT=OFF"
"-DREGRESS_CHECKS=OFF"
"-DTAP_CHECKS=OFF"
]
++ lib.optionals (!enableUnfree) [ "-DAPACHE_ONLY=ON" ]
++ lib.optionals stdenv.hostPlatform.isDarwin [ "-DLINTER=OFF" ];
@ -38,14 +57,9 @@ buildPostgresqlExtension rec {
description = "Scales PostgreSQL for time-series data via automatic partitioning across time and space";
homepage = "https://www.timescale.com/";
changelog = "https://github.com/timescale/timescaledb/blob/${version}/CHANGELOG.md";
maintainers = [ ];
maintainers = [ maintainers.kirillrdy ];
platforms = postgresql.meta.platforms;
license = with licenses; if enableUnfree then tsl else asl20;
broken = versionOlder postgresql.version "13" ||
# timescaledb supports PostgreSQL 17 from 2.17.0 on:
# https://github.com/timescale/timescaledb/releases/tag/2.17.0
# We can't upgrade to it, yet, because this would imply dropping support for
# PostgreSQL 13, which is a breaking change.
(versionAtLeast postgresql.version "17" && version == "2.14.2");
broken = versionOlder postgresql.version "14";
};
}

View File

@ -3,24 +3,24 @@
, buildPgrxExtension
, postgresql
, nixosTests
, cargo-pgrx_0_10_2
, cargo-pgrx_0_12_6
, nix-update-script
}:
(buildPgrxExtension.override { cargo-pgrx = cargo-pgrx_0_10_2; }) rec {
(buildPgrxExtension.override { cargo-pgrx = cargo-pgrx_0_12_6; }) rec {
inherit postgresql;
pname = "timescaledb_toolkit";
version = "1.18.0";
version = "1.19.0";
src = fetchFromGitHub {
owner = "timescale";
repo = "timescaledb-toolkit";
rev = version;
hash = "sha256-Lm/LFBkG91GeWlJL9RBqP8W0tlhBEeGQ6kXUzzv4xRE=";
hash = "sha256-7yUbtWbYL4AnuUX8OXG4OVqYCY2Lf0pISSTlcFdPqog=";
};
cargoHash = "sha256-LME8oftHmmiN8GU3eTBTSB6m0CE+KtDFRssL1g2Cjm8=";
cargoHash = "sha256-+uD4UU7QwNISQZ7a2kDkY/y3fQWk/K0fFcrFq4yq6RU=";
buildAndTestSubdir = "extension";
passthru = {
@ -37,8 +37,5 @@
maintainers = with maintainers; [ typetetris ];
platforms = postgresql.meta.platforms;
license = licenses.tsl;
# PostgreSQL 17 support issue upstream: https://github.com/timescale/timescaledb-toolkit/issues/813
# Check after next package update.
broken = versionAtLeast postgresql.version "17" && version == "1.18.0";
};
}

View File

@ -1,52 +0,0 @@
# Adapted from lib/tests/release.nix
{ pkgs-path ? ../../..
, pkgs ? import pkgs-path {}
, lib ? pkgs.lib
, nix ? pkgs.nix
}:
#
# This verifies that release-attrpaths-superset.nix does not encounter
# infinite recursion or non-tryEval-able failures.
#
pkgs.runCommand "all-attrs-eval-under-tryEval" {
nativeBuildInputs = [
nix
pkgs.gitMinimal
] ++ lib.optional pkgs.stdenv.hostPlatform.isLinux pkgs.inotify-tools;
strictDeps = true;
src = with lib.fileset; toSource {
root = pkgs-path;
fileset = unions [
../../../default.nix
../../../doc
../../../lib
../../../maintainers
../../../nixos
../../../pkgs
../../../.version
];
};
}
''
datadir="${nix}/share"
export TEST_ROOT=$(pwd)/test-tmp
export HOME=$(mktemp -d)
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_STORE_DIR=$TEST_ROOT/store
export PAGER=cat
cacheDir=$TEST_ROOT/binary-cache
nix-store --init
echo "Running pkgs/top-level/release-attrpaths-superset.nix"
nix-instantiate --eval --strict --json $src/pkgs/top-level/release-attrpaths-superset.nix -A names > /dev/null
mkdir $out
echo success > $out/${nix.version}
''

View File

@ -12,19 +12,19 @@
buildPythonPackage rec {
pname = "esphome-dashboard";
version = "20240620.0";
version = "20241120.0";
pyproject = true;
src = fetchFromGitHub {
owner = "esphome";
repo = "dashboard";
rev = "refs/tags/${version}";
hash = "sha256-LmIxfX3rcRK90h31J0B5T02f48MCctFERgXxf0zkDm0=";
hash = "sha256-insoDWHqMFAGgmsY2ZgNuo1cl0WGJXRy398bt3ADORs=";
};
npmDeps = fetchNpmDeps {
inherit src;
hash = "sha256-xMVESS1bPNJF07joUgY8ku+GWtflWhM8mYAv0emggc8=";
hash = "sha256-UKrF7yzyj09WBrmrJ6uzcRjIYrKwCqLQ5paiqnt/Xuc=";
};
build-system = [ setuptools ];

View File

@ -21,14 +21,14 @@ let
in
python.pkgs.buildPythonApplication rec {
pname = "esphome";
version = "2024.10.3";
version = "2024.11.0";
pyproject = true;
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-13hNX9uaQbO/IKUkGaOITKh+REqUCHirbTPRgomzHBU=";
hash = "sha256-YH/i1W9d79ZnQCiLiAMHn6goa0l/kqL4MddKYcgLOjg=";
};
build-systems = with python.pkgs; [
@ -56,7 +56,7 @@ python.pkgs.buildPythonApplication rec {
cat requirements_optional.txt >> requirements.txt
# relax strict runtime version check
substituteInPlace esphome/components/font/__init__.py \
--replace-fail "10.2.0" "${python.pkgs.pillow.version}"
--replace-fail "10.4.0" "${python.pkgs.pillow.version}"
'';
# Remove esptool and platformio from requirements
@ -76,7 +76,9 @@ python.pkgs.buildPythonApplication rec {
colorama
cryptography
esphome-dashboard
freetype-py
icmplib
glyphsets
kconfiglib
packaging
paho-mqtt

View File

@ -7007,6 +7007,7 @@ with pkgs;
cargo-pgrx_0_11_2
cargo-pgrx_0_11_3
cargo-pgrx_0_12_0_alpha_1
cargo-pgrx_0_12_6
;
cargo-pgrx = cargo-pgrx_0_11_2;
@ -12458,10 +12459,6 @@ with pkgs;
inherit (pkgs) meson;
};
cryptomator = callPackage ../tools/security/cryptomator {
jdk = jdk23.override { enableJavaFX = true; };
};
# Darwin package set
#
# Even though this is a set of packages not single package, use `callPackage`
@ -16138,7 +16135,6 @@ with pkgs;
ungoogled-chromium = callPackage ../applications/networking/browsers/chromium ((config.chromium or {}) // {
ungoogled = true;
channel = "ungoogled-chromium";
});
unigine-tropics = pkgsi686Linux.callPackage ../applications/graphics/unigine-tropics { };
@ -16824,10 +16820,6 @@ with pkgs;
pmars-x11 = pmars.override { enableXwinGraphics = true; };
wipeout-rewrite = callPackage ../games/wipeout-rewrite {
inherit (darwin.apple_sdk.frameworks) Foundation;
};
### GAMES/DOOM-PORTS
doomseeker = qt5.callPackage ../games/doom-ports/doomseeker { };

View File

@ -180,6 +180,8 @@ in (noExtraAttrs (kdeFrameworks // plasmaMobileGear // plasma5 // plasma5.thirdP
maplibre-gl-native = callPackage ../development/libraries/maplibre-gl-native { };
maplibre-native-qt = callPackage ../development/libraries/maplibre-native-qt { };
maui-core = libsForQt5.callPackage ../development/libraries/maui-core { };
mlt = pkgs.mlt.override {

View File

@ -60,6 +60,8 @@ makeScopeWithSplicing' {
qt = qt6;
};
maplibre-native-qt = callPackage ../development/libraries/maplibre-native-qt { };
qca = pkgs.darwin.apple_sdk_11_0.callPackage ../development/libraries/qca {
inherit (qt6) qtbase qt5compat;
};

View File

@ -0,0 +1,47 @@
# This file works in tandem with ../../ci/eval/default.nix
# It turns ./release-outpaths.nix into chunks of a fixed size
{
lib ? import ../../lib,
path ? ../..,
# The file containing all available attribute paths, which are split into chunks here
attrpathFile,
chunkSize,
myChunk,
checkMeta,
includeBroken,
systems,
}:
let
attrpaths = lib.importJSON attrpathFile;
myAttrpaths = lib.sublist (chunkSize * myChunk) chunkSize attrpaths;
unfiltered = import ./release-outpaths.nix {
inherit path;
inherit checkMeta includeBroken systems;
};
# Turns the unfiltered recursive attribute set into one that is limited to myAttrpaths
filtered =
let
recurse =
index: paths: attrs:
lib.mapAttrs (
name: values:
if attrs ? ${name} then
if lib.any (value: lib.length value <= index + 1) values then
attrs.${name}
else
recurse (index + 1) values attrs.${name}
# Make sure nix-env recurses as well
// {
recurseForDerivations = true;
}
else
null
) (lib.groupBy (a: lib.elemAt a index) paths);
in
recurse 0 myAttrpaths unfiltered;
in
filtered

View File

@ -9,7 +9,7 @@
$ hydra-eval-jobs -I . pkgs/top-level/release-haskell.nix
*/
{ supportedSystems ? [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ] }:
{ supportedSystems ? import ../../ci/supportedSystems.nix }:
let

View File

@ -163,19 +163,26 @@ let
(addMetaAttrs { maintainers = crossMaintainers; });
/* Recursively map a (nested) set of derivations to an isomorphic
set of meta.platforms values. */
packagePlatforms = mapAttrs (name: value:
/* Recursive for packages and apply a function to them */
recursiveMapPackages = f: mapAttrs (name: value:
if isDerivation value then
value.meta.hydraPlatforms
or (subtractLists (value.meta.badPlatforms or [])
(value.meta.platforms or supportedSystems))
f value
else if value.recurseForDerivations or false || value.recurseForRelease or false then
packagePlatforms value
recursiveMapPackages f value
else
[]
);
/* Gets the list of Hydra platforms for a derivation */
getPlatforms = drv:
drv.meta.hydraPlatforms
or (subtractLists (drv.meta.badPlatforms or [])
(drv.meta.platforms or supportedSystems));
/* Recursively map a (nested) set of derivations to an isomorphic
set of meta.platforms values. */
packagePlatforms = recursiveMapPackages getPlatforms;
in {
/* Common platform groups on which to test packages. */
inherit (platforms) unix linux darwin cygwin all;
@ -188,6 +195,8 @@ in {
lib
mapTestOn
mapTestOnCross
recursiveMapPackages
getPlatforms
packagePlatforms
pkgs
pkgsFor

View File

@ -12,13 +12,7 @@
, attrNamesOnly ? false
# Set this to `null` to build for builtins.currentSystem only
, systems ? [
"aarch64-linux"
"aarch64-darwin"
#"i686-linux" # !!!
"x86_64-linux"
"x86_64-darwin"
]
, systems ? import ../../ci/supportedSystems.nix
}:
let
lib = import (path + "/lib");

View File

@ -12,7 +12,7 @@
, system ? builtins.currentSystem
, officialRelease ? false
# The platform doubles for which we build Nixpkgs.
, supportedSystems ? [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]
, supportedSystems ? import ../../ci/supportedSystems.nix
# The platform triples for which we build bootstrap tools.
, bootstrapConfigs ? [
"aarch64-apple-darwin"
@ -321,8 +321,9 @@ let
# Conflicts usually cause silent job drops like in
# https://github.com/NixOS/nixpkgs/pull/182058
jobs = let
packagePlatforms = if attrNamesOnly then id else release-lib.packagePlatforms;
packageJobs = {
packagePlatforms = release-lib.recursiveMapPackages
(if attrNamesOnly then id else release-lib.getPlatforms);
packageJobs = packagePlatforms pkgs // {
haskell.compiler = packagePlatforms pkgs.haskell.compiler;
haskellPackages = packagePlatforms pkgs.haskellPackages;
# Build selected packages (HLS) for multiple Haskell compilers to rebuild
@ -363,8 +364,8 @@ let
};
mapTestOn-packages =
if attrNamesOnly
then pkgs // packageJobs
else mapTestOn ((packagePlatforms pkgs) // packageJobs);
then packageJobs
else mapTestOn packageJobs;
in
unionOfDisjoint nonPackageJobs mapTestOn-packages;