Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2021-06-06 00:15:23 +00:00 committed by GitHub
commit 500db2661d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
92 changed files with 4064 additions and 477 deletions

View File

@ -734,6 +734,12 @@ lib.mapAttrs (n: v: v // { shortName = n; }) ({
free = false;
};
stk = {
shortName = "stk";
fullName = "Synthesis Tool Kit 4.3";
url = https://github.com/thestk/stk/blob/master/LICENSE;
};
tcltk = spdx {
spdxId = "TCL";
fullName = "TCL/TK License";

View File

@ -139,6 +139,7 @@
./programs/flexoptix-app.nix
./programs/freetds.nix
./programs/fuse.nix
./programs/gamemode.nix
./programs/geary.nix
./programs/gnome-disks.nix
./programs/gnome-documents.nix
@ -553,6 +554,7 @@
./services/misc/siproxd.nix
./services/misc/snapper.nix
./services/misc/sonarr.nix
./services/misc/sourcehut
./services/misc/spice-vdagentd.nix
./services/misc/ssm-agent.nix
./services/misc/sssd.nix

View File

@ -0,0 +1,96 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.programs.gamemode;
settingsFormat = pkgs.formats.ini { };
configFile = settingsFormat.generate "gamemode.ini" cfg.settings;
in
{
options = {
programs.gamemode = {
enable = mkEnableOption "GameMode to optimise system performance on demand";
enableRenice = mkEnableOption "CAP_SYS_NICE on gamemoded to support lowering process niceness" // {
default = true;
};
settings = mkOption {
type = settingsFormat.type;
default = {};
description = ''
System-wide configuration for GameMode (/etc/gamemode.ini).
See gamemoded(8) man page for available settings.
'';
example = literalExample ''
{
general = {
renice = 10;
};
# Warning: GPU optimisations have the potential to damage hardware
gpu = {
apply_gpu_optimisations = "accept-responsibility";
gpu_device = 0;
amd_performance_level = "high";
};
custom = {
start = "''${pkgs.libnotify}/bin/notify-send 'GameMode started'";
end = "''${pkgs.libnotify}/bin/notify-send 'GameMode ended'";
};
}
'';
};
};
};
config = mkIf cfg.enable {
environment = {
systemPackages = [ pkgs.gamemode ];
etc."gamemode.ini".source = configFile;
};
security = {
polkit.enable = true;
wrappers = mkIf cfg.enableRenice {
gamemoded = {
source = "${pkgs.gamemode}/bin/gamemoded";
capabilities = "cap_sys_nice+ep";
};
};
};
systemd = {
packages = [ pkgs.gamemode ];
user.services.gamemoded = {
# The upstream service already defines this, but doesn't get applied.
# See https://github.com/NixOS/nixpkgs/issues/81138
wantedBy = [ "default.target" ];
# Use pkexec from the security wrappers to allow users to
# run libexec/cpugovctl & libexec/gpuclockctl as root with
# the the actions defined in share/polkit-1/actions.
#
# This uses a link farm to make sure other wrapped executables
# aren't included in PATH.
environment.PATH = mkForce (pkgs.linkFarm "pkexec" [
{
name = "pkexec";
path = "${config.security.wrapperDir}/pkexec";
}
]);
serviceConfig.ExecStart = mkIf cfg.enableRenice [
"" # Tell systemd to clear the existing ExecStart list, to prevent appending to it.
"${config.security.wrapperDir}/gamemoded"
];
};
};
};
meta = {
maintainers = with maintainers; [ kira-bruneau ];
};
}

View File

@ -0,0 +1,220 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
scfg = cfg.builds;
rcfg = config.services.redis;
iniKey = "builds.sr.ht";
drv = pkgs.sourcehut.buildsrht;
in
{
options.services.sourcehut.builds = {
user = mkOption {
type = types.str;
default = "buildsrht";
description = ''
User for builds.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5002;
description = ''
Port on which the "builds" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "builds.sr.ht";
description = ''
PostgreSQL database name for builds.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/buildsrht";
description = ''
State path for builds.sr.ht.
'';
};
enableWorker = mkOption {
type = types.bool;
default = false;
description = ''
Run workers for builds.sr.ht.
Perform manually on machine: `cd ${scfg.statePath}/images; docker build -t qemu -f qemu/Dockerfile .`
'';
};
images = mkOption {
type = types.attrsOf (types.attrsOf (types.attrsOf types.package));
default = { };
example = lib.literalExample ''(let
# Pinning unstable to allow usage with flakes and limit rebuilds.
pkgs_unstable = builtins.fetchGit {
url = "https://github.com/NixOS/nixpkgs";
rev = "ff96a0fa5635770390b184ae74debea75c3fd534";
ref = "nixos-unstable";
};
image_from_nixpkgs = pkgs_unstable: (import ("${pkgs.sourcehut.buildsrht}/lib/images/nixos/image.nix") {
pkgs = (import pkgs_unstable {});
});
in
{
nixos.unstable.x86_64 = image_from_nixpkgs pkgs_unstable;
}
)'';
description = ''
Images for builds.sr.ht. Each package should be distro.release.arch and point to a /nix/store/package/root.img.qcow2.
'';
};
};
config = with scfg; let
image_dirs = lib.lists.flatten (
lib.attrsets.mapAttrsToList
(distro: revs:
lib.attrsets.mapAttrsToList
(rev: archs:
lib.attrsets.mapAttrsToList
(arch: image:
pkgs.runCommandNoCC "buildsrht-images" { } ''
mkdir -p $out/${distro}/${rev}/${arch}
ln -s ${image}/*.qcow2 $out/${distro}/${rev}/${arch}/root.img.qcow2
'')
archs)
revs)
scfg.images);
image_dir_pre = pkgs.symlinkJoin {
name = "builds.sr.ht-worker-images-pre";
paths = image_dirs ++ [
"${pkgs.sourcehut.buildsrht}/lib/images"
];
};
image_dir = pkgs.runCommandNoCC "builds.sr.ht-worker-images" { } ''
mkdir -p $out/images
cp -Lr ${image_dir_pre}/* $out/images
'';
in
lib.mkIf (cfg.enable && elem "builds" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
extraGroups = lib.optionals cfg.builds.enableWorker [ "docker" ];
description = "builds.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0755 ${user} ${user} -"
] ++ (lib.optionals cfg.builds.enableWorker
[ "d ${statePath}/logs 0775 ${user} ${user} - -" ]
);
services = {
buildsrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey
{
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "builds.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
# Hack to bypass this hack: https://git.sr.ht/~sircmpwn/core.sr.ht/tree/master/item/srht-update-profiles#L6
} // { preStart = " "; };
buildsrht-worker = {
enable = scfg.enableWorker;
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
partOf = [ "buildsrht.service" ];
description = "builds.sr.ht worker service";
path = [ pkgs.openssh pkgs.docker ];
serviceConfig = {
Type = "simple";
User = user;
Group = "nginx";
Restart = "always";
};
serviceConfig.ExecStart = "${pkgs.sourcehut.buildsrht}/bin/builds.sr.ht-worker";
};
};
};
services.sourcehut.settings = {
# URL builds.sr.ht is being served at (protocol://domain)
"builds.sr.ht".origin = mkDefault "http://builds.${cfg.originBase}";
# Address and port to bind the debug server to
"builds.sr.ht".debug-host = mkDefault "0.0.0.0";
"builds.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"builds.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"builds.sr.ht".migrate-on-upgrade = mkDefault "yes";
# builds.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"builds.sr.ht".oauth-client-id = mkDefault null;
"builds.sr.ht".oauth-client-secret = mkDefault null;
# The redis connection used for the celery worker
"builds.sr.ht".redis = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/3";
# The shell used for ssh
"builds.sr.ht".shell = mkDefault "runner-shell";
# Register the builds.sr.ht dispatcher
"git.sr.ht::dispatch".${builtins.unsafeDiscardStringContext "${pkgs.sourcehut.buildsrht}/bin/buildsrht-keys"} = mkDefault "${user}:${user}";
# Location for build logs, images, and control command
} // lib.attrsets.optionalAttrs scfg.enableWorker {
# Default worker stores logs that are accessible via this address:port
"builds.sr.ht::worker".name = mkDefault "127.0.0.1:5020";
"builds.sr.ht::worker".buildlogs = mkDefault "${scfg.statePath}/logs";
"builds.sr.ht::worker".images = mkDefault "${image_dir}/images";
"builds.sr.ht::worker".controlcmd = mkDefault "${image_dir}/images/control";
"builds.sr.ht::worker".timeout = mkDefault "3m";
};
services.nginx.virtualHosts."logs.${cfg.originBase}" =
if scfg.enableWorker then {
listen = with builtins; let address = split ":" cfg.settings."builds.sr.ht::worker".name;
in [{ addr = elemAt address 0; port = lib.toInt (elemAt address 2); }];
locations."/logs".root = "${scfg.statePath}";
} else { };
services.nginx.virtualHosts."builds.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.buildsrht}/${pkgs.sourcehut.python.sitePackages}/buildsrht";
};
};
}

View File

@ -0,0 +1,198 @@
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
settingsFormat = pkgs.formats.ini { };
# Specialized python containing all the modules
python = pkgs.sourcehut.python.withPackages (ps: with ps; [
gunicorn
# Sourcehut services
srht
buildsrht
dispatchsrht
gitsrht
hgsrht
hubsrht
listssrht
mansrht
metasrht
pastesrht
todosrht
]);
in
{
imports =
[
./git.nix
./hg.nix
./hub.nix
./todo.nix
./man.nix
./meta.nix
./paste.nix
./builds.nix
./lists.nix
./dispatch.nix
(mkRemovedOptionModule [ "services" "sourcehut" "nginx" "enable" ] ''
The sourcehut module supports `nginx` as a local reverse-proxy by default and doesn't
support other reverse-proxies officially.
However it's possible to use an alternative reverse-proxy by
* disabling nginx
* adjusting the relevant settings for server addresses and ports directly
Further details about this can be found in the `Sourcehut`-section of the NixOS-manual.
'')
];
options.services.sourcehut = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Enable sourcehut - git hosting, continuous integration, mailing list, ticket tracking,
task dispatching, wiki and account management services
'';
};
services = mkOption {
type = types.nonEmptyListOf (types.enum [ "builds" "dispatch" "git" "hub" "hg" "lists" "man" "meta" "paste" "todo" ]);
default = [ "man" "meta" "paste" ];
example = [ "builds" "dispatch" "git" "hub" "hg" "lists" "man" "meta" "paste" "todo" ];
description = ''
Services to enable on the sourcehut network.
'';
};
originBase = mkOption {
type = types.str;
default = with config.networking; hostName + lib.optionalString (domain != null) ".${domain}";
description = ''
Host name used by reverse-proxy and for default settings. Will host services at git."''${originBase}". For example: git.sr.ht
'';
};
address = mkOption {
type = types.str;
default = "127.0.0.1";
description = ''
Address to bind to.
'';
};
python = mkOption {
internal = true;
type = types.package;
default = python;
description = ''
The python package to use. It should contain references to the *srht modules and also
gunicorn.
'';
};
statePath = mkOption {
type = types.path;
default = "/var/lib/sourcehut";
description = ''
Root state path for the sourcehut network. If left as the default value
this directory will automatically be created before the sourcehut server
starts, otherwise the sysadmin is responsible for ensuring the
directory exists with appropriate ownership and permissions.
'';
};
settings = mkOption {
type = lib.types.submodule {
freeformType = settingsFormat.type;
};
default = { };
description = ''
The configuration for the sourcehut network.
'';
};
};
config = mkIf cfg.enable {
assertions =
[
{
assertion = with cfgIni.webhooks; private-key != null && stringLength private-key == 44;
message = "The webhook's private key must be defined and of a 44 byte length.";
}
{
assertion = hasAttrByPath [ "meta.sr.ht" "origin" ] cfgIni && cfgIni."meta.sr.ht".origin != null;
message = "meta.sr.ht's origin must be defined.";
}
];
virtualisation.docker.enable = true;
environment.etc."sr.ht/config.ini".source =
settingsFormat.generate "sourcehut-config.ini" (mapAttrsRecursive
(
path: v: if v == null then "" else v
)
cfg.settings);
environment.systemPackages = [ pkgs.sourcehut.coresrht ];
# PostgreSQL server
services.postgresql.enable = mkOverride 999 true;
# Mail server
services.postfix.enable = mkOverride 999 true;
# Cron daemon
services.cron.enable = mkOverride 999 true;
# Redis server
services.redis.enable = mkOverride 999 true;
services.redis.bind = mkOverride 999 "127.0.0.1";
services.sourcehut.settings = {
# The name of your network of sr.ht-based sites
"sr.ht".site-name = mkDefault "sourcehut";
# The top-level info page for your site
"sr.ht".site-info = mkDefault "https://sourcehut.org";
# {{ site-name }}, {{ site-blurb }}
"sr.ht".site-blurb = mkDefault "the hacker's forge";
# If this != production, we add a banner to each page
"sr.ht".environment = mkDefault "development";
# Contact information for the site owners
"sr.ht".owner-name = mkDefault "Drew DeVault";
"sr.ht".owner-email = mkDefault "sir@cmpwn.com";
# The source code for your fork of sr.ht
"sr.ht".source-url = mkDefault "https://git.sr.ht/~sircmpwn/srht";
# A secret key to encrypt session cookies with
"sr.ht".secret-key = mkDefault null;
"sr.ht".global-domain = mkDefault null;
# Outgoing SMTP settings
mail.smtp-host = mkDefault null;
mail.smtp-port = mkDefault null;
mail.smtp-user = mkDefault null;
mail.smtp-password = mkDefault null;
mail.smtp-from = mkDefault null;
# Application exceptions are emailed to this address
mail.error-to = mkDefault null;
mail.error-from = mkDefault null;
# Your PGP key information (DO NOT mix up pub and priv here)
# You must remove the password from your secret key, if present.
# You can do this with gpg --edit-key [key-id], then use the passwd
# command and do not enter a new password.
mail.pgp-privkey = mkDefault null;
mail.pgp-pubkey = mkDefault null;
mail.pgp-key-id = mkDefault null;
# base64-encoded Ed25519 key for signing webhook payloads. This should be
# consistent for all *.sr.ht sites, as we'll use this key to verify signatures
# from other sites in your network.
#
# Use the srht-webhook-keygen command to generate a key.
webhooks.private-key = mkDefault null;
};
};
meta.doc = ./sourcehut.xml;
meta.maintainers = with maintainers; [ tomberek ];
}

View File

@ -0,0 +1,125 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.dispatch;
iniKey = "dispatch.sr.ht";
drv = pkgs.sourcehut.dispatchsrht;
in
{
options.services.sourcehut.dispatch = {
user = mkOption {
type = types.str;
default = "dispatchsrht";
description = ''
User for dispatch.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5005;
description = ''
Port on which the "dispatch" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "dispatch.sr.ht";
description = ''
PostgreSQL database name for dispatch.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/dispatchsrht";
description = ''
State path for dispatch.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "dispatch" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
description = "dispatch.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services.dispatchsrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "dispatch.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
};
services.sourcehut.settings = {
# URL dispatch.sr.ht is being served at (protocol://domain)
"dispatch.sr.ht".origin = mkDefault "http://dispatch.${cfg.originBase}";
# Address and port to bind the debug server to
"dispatch.sr.ht".debug-host = mkDefault "0.0.0.0";
"dispatch.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"dispatch.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"dispatch.sr.ht".migrate-on-upgrade = mkDefault "yes";
# dispatch.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"dispatch.sr.ht".oauth-client-id = mkDefault null;
"dispatch.sr.ht".oauth-client-secret = mkDefault null;
# Github Integration
"dispatch.sr.ht::github".oauth-client-id = mkDefault null;
"dispatch.sr.ht::github".oauth-client-secret = mkDefault null;
# Gitlab Integration
"dispatch.sr.ht::gitlab".enabled = mkDefault null;
"dispatch.sr.ht::gitlab".canonical-upstream = mkDefault "gitlab.com";
"dispatch.sr.ht::gitlab".repo-cache = mkDefault "./repo-cache";
# "dispatch.sr.ht::gitlab"."gitlab.com" = mkDefault "GitLab:application id:secret";
};
services.nginx.virtualHosts."dispatch.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.dispatchsrht}/${pkgs.sourcehut.python.sitePackages}/dispatchsrht";
};
};
}

View File

@ -0,0 +1,214 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
scfg = cfg.git;
iniKey = "git.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.gitsrht;
in
{
options.services.sourcehut.git = {
user = mkOption {
type = types.str;
visible = false;
internal = true;
readOnly = true;
default = "git";
description = ''
User for git.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5001;
description = ''
Port on which the "git" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "git.sr.ht";
description = ''
PostgreSQL database name for git.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/gitsrht";
description = ''
State path for git.sr.ht.
'';
};
package = mkOption {
type = types.package;
default = pkgs.git;
example = literalExample "pkgs.gitFull";
description = ''
Git package for git.sr.ht. This can help silence collisions.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "git" cfg.services) {
# sshd refuses to run with `Unsafe AuthorizedKeysCommand ... bad ownership or modes for directory /nix/store`
environment.etc."ssh/gitsrht-dispatch" = {
mode = "0755";
text = ''
#! ${pkgs.stdenv.shell}
${cfg.python}/bin/gitsrht-dispatch "$@"
'';
};
# Needs this in the $PATH when sshing into the server
environment.systemPackages = [ cfg.git.package ];
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
# https://stackoverflow.com/questions/22314298/git-push-results-in-fatal-protocol-error-bad-line-length-character-this
# Probably could use gitsrht-shell if output is restricted to just parameters...
shell = pkgs.bash;
description = "git.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services = {
cron.systemCronJobs = [ "*/20 * * * * ${cfg.python}/bin/gitsrht-periodic" ];
fcgiwrap.enable = true;
openssh.authorizedKeysCommand = ''/etc/ssh/gitsrht-dispatch "%u" "%h" "%t" "%k"'';
openssh.authorizedKeysCommandUser = "root";
openssh.extraConfig = ''
PermitUserEnvironment SRHT_*
'';
postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
};
systemd = {
tmpfiles.rules = [
# /var/log is owned by root
"f /var/log/git-srht-shell 0644 ${user} ${user} -"
"d ${statePath} 0750 ${user} ${user} -"
"d ${cfg.settings."${iniKey}".repos} 2755 ${user} ${user} -"
];
services = {
gitsrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "redis.service" "postgresql.service" "network.target" ];
requires = [ "redis.service" "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
# Needs internally to create repos at the very least
path = [ pkgs.git ];
description = "git.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
gitsrht-webhooks = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "git.sr.ht webhooks service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
};
serviceConfig.ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.webhooks worker --loglevel=info";
};
};
};
services.sourcehut.settings = {
# URL git.sr.ht is being served at (protocol://domain)
"git.sr.ht".origin = mkDefault "http://git.${cfg.originBase}";
# Address and port to bind the debug server to
"git.sr.ht".debug-host = mkDefault "0.0.0.0";
"git.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"git.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"git.sr.ht".migrate-on-upgrade = mkDefault "yes";
# The redis connection used for the webhooks worker
"git.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/1";
# A post-update script which is installed in every git repo.
"git.sr.ht".post-update-script = mkDefault "${pkgs.sourcehut.gitsrht}/bin/gitsrht-update-hook";
# git.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"git.sr.ht".oauth-client-id = mkDefault null;
"git.sr.ht".oauth-client-secret = mkDefault null;
# Path to git repositories on disk
"git.sr.ht".repos = mkDefault "/var/lib/git";
"git.sr.ht".outgoing-domain = mkDefault "http://git.${cfg.originBase}";
# The authorized keys hook uses this to dispatch to various handlers
# The format is a program to exec into as the key, and the user to match as the
# value. When someone tries to log in as this user, this program is executed
# and is expected to omit an AuthorizedKeys file.
#
# Discard of the string context is in order to allow derivation-derived strings.
# This is safe if the relevant package is installed which will be the case if the setting is utilized.
"git.sr.ht::dispatch".${builtins.unsafeDiscardStringContext "${pkgs.sourcehut.gitsrht}/bin/gitsrht-keys"} = mkDefault "${user}:${user}";
};
services.nginx.virtualHosts."git.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.gitsrht}/${pkgs.sourcehut.python.sitePackages}/gitsrht";
extraConfig = ''
location = /authorize {
proxy_pass http://${cfg.address}:${toString port};
proxy_pass_request_body off;
proxy_set_header Content-Length "";
proxy_set_header X-Original-URI $request_uri;
}
location ~ ^/([^/]+)/([^/]+)/(HEAD|info/refs|objects/info/.*|git-upload-pack).*$ {
auth_request /authorize;
root /var/lib/git;
fastcgi_pass unix:/run/fcgiwrap.sock;
fastcgi_param SCRIPT_FILENAME ${pkgs.git}/bin/git-http-backend;
fastcgi_param PATH_INFO $uri;
fastcgi_param GIT_PROJECT_ROOT $document_root;
fastcgi_read_timeout 500s;
include ${pkgs.nginx}/conf/fastcgi_params;
gzip off;
}
'';
};
};
}

View File

@ -0,0 +1,173 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
scfg = cfg.hg;
iniKey = "hg.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.hgsrht;
in
{
options.services.sourcehut.hg = {
user = mkOption {
type = types.str;
internal = true;
readOnly = true;
default = "hg";
description = ''
User for hg.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5010;
description = ''
Port on which the "hg" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "hg.sr.ht";
description = ''
PostgreSQL database name for hg.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/hgsrht";
description = ''
State path for hg.sr.ht.
'';
};
cloneBundles = mkOption {
type = types.bool;
default = false;
description = ''
Generate clonebundles (which require more disk space but dramatically speed up cloning large repositories).
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "hg" cfg.services) {
# In case it ever comes into being
environment.etc."ssh/hgsrht-dispatch" = {
mode = "0755";
text = ''
#! ${pkgs.stdenv.shell}
${cfg.python}/bin/gitsrht-dispatch $@
'';
};
environment.systemPackages = [ pkgs.mercurial ];
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
# Assuming hg.sr.ht needs this too
shell = pkgs.bash;
description = "hg.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services = {
cron.systemCronJobs = [ "*/20 * * * * ${cfg.python}/bin/hgsrht-periodic" ]
++ optional cloneBundles "0 * * * * ${cfg.python}/bin/hgsrht-clonebundles";
openssh.authorizedKeysCommand = ''/etc/ssh/hgsrht-dispatch "%u" "%h" "%t" "%k"'';
openssh.authorizedKeysCommandUser = "root";
openssh.extraConfig = ''
PermitUserEnvironment SRHT_*
'';
postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
};
systemd = {
tmpfiles.rules = [
# /var/log is owned by root
"f /var/log/hg-srht-shell 0644 ${user} ${user} -"
"d ${statePath} 0750 ${user} ${user} -"
"d ${cfg.settings."${iniKey}".repos} 2755 ${user} ${user} -"
];
services.hgsrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "redis.service" "postgresql.service" "network.target" ];
requires = [ "redis.service" "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
path = [ pkgs.mercurial ];
description = "hg.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
};
services.sourcehut.settings = {
# URL hg.sr.ht is being served at (protocol://domain)
"hg.sr.ht".origin = mkDefault "http://hg.${cfg.originBase}";
# Address and port to bind the debug server to
"hg.sr.ht".debug-host = mkDefault "0.0.0.0";
"hg.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"hg.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# The redis connection used for the webhooks worker
"hg.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/1";
# A post-update script which is installed in every mercurial repo.
"hg.sr.ht".changegroup-script = mkDefault "${cfg.python}/bin/hgsrht-hook-changegroup";
# hg.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"hg.sr.ht".oauth-client-id = mkDefault null;
"hg.sr.ht".oauth-client-secret = mkDefault null;
# Path to mercurial repositories on disk
"hg.sr.ht".repos = mkDefault "/var/lib/hg";
# Path to the srht mercurial extension
# (defaults to where the hgsrht code is)
# "hg.sr.ht".srhtext = mkDefault null;
# .hg/store size (in MB) past which the nightly job generates clone bundles.
# "hg.sr.ht".clone_bundle_threshold = mkDefault 50;
# Path to hg-ssh (if not in $PATH)
# "hg.sr.ht".hg_ssh = mkDefault /path/to/hg-ssh;
# The authorized keys hook uses this to dispatch to various handlers
# The format is a program to exec into as the key, and the user to match as the
# value. When someone tries to log in as this user, this program is executed
# and is expected to omit an AuthorizedKeys file.
#
# Uncomment the relevant lines to enable the various sr.ht dispatchers.
"hg.sr.ht::dispatch"."/run/current-system/sw/bin/hgsrht-keys" = mkDefault "${user}:${user}";
};
# TODO: requires testing and addition of hg-specific requirements
services.nginx.virtualHosts."hg.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.hgsrht}/${pkgs.sourcehut.python.sitePackages}/hgsrht";
};
};
}

View File

@ -0,0 +1,118 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.hub;
iniKey = "hub.sr.ht";
drv = pkgs.sourcehut.hubsrht;
in
{
options.services.sourcehut.hub = {
user = mkOption {
type = types.str;
default = "hubsrht";
description = ''
User for hub.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5014;
description = ''
Port on which the "hub" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "hub.sr.ht";
description = ''
PostgreSQL database name for hub.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/hubsrht";
description = ''
State path for hub.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "hub" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
description = "hub.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services.hubsrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "hub.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
};
services.sourcehut.settings = {
# URL hub.sr.ht is being served at (protocol://domain)
"hub.sr.ht".origin = mkDefault "http://hub.${cfg.originBase}";
# Address and port to bind the debug server to
"hub.sr.ht".debug-host = mkDefault "0.0.0.0";
"hub.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"hub.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"hub.sr.ht".migrate-on-upgrade = mkDefault "yes";
# hub.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"hub.sr.ht".oauth-client-id = mkDefault null;
"hub.sr.ht".oauth-client-secret = mkDefault null;
};
services.nginx.virtualHosts."${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.hubsrht}/${pkgs.sourcehut.python.sitePackages}/hubsrht";
};
services.nginx.virtualHosts."hub.${cfg.originBase}" = {
globalRedirect = "${cfg.originBase}";
forceSSL = true;
};
};
}

View File

@ -0,0 +1,185 @@
# Email setup is fairly involved, useful references:
# https://drewdevault.com/2018/08/05/Local-mail-server.html
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.lists;
iniKey = "lists.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.listssrht;
in
{
options.services.sourcehut.lists = {
user = mkOption {
type = types.str;
default = "listssrht";
description = ''
User for lists.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5006;
description = ''
Port on which the "lists" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "lists.sr.ht";
description = ''
PostgreSQL database name for lists.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/listssrht";
description = ''
State path for lists.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "lists" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
extraGroups = [ "postfix" ];
description = "lists.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services = {
listssrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "lists.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
listssrht-process = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "lists.sr.ht process service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.process worker --loglevel=info";
};
};
listssrht-lmtp = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "lists.sr.ht process service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/listssrht-lmtp";
};
};
listssrht-webhooks = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "lists.sr.ht webhooks service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.webhooks worker --loglevel=info";
};
};
};
};
services.sourcehut.settings = {
# URL lists.sr.ht is being served at (protocol://domain)
"lists.sr.ht".origin = mkDefault "http://lists.${cfg.originBase}";
# Address and port to bind the debug server to
"lists.sr.ht".debug-host = mkDefault "0.0.0.0";
"lists.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"lists.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"lists.sr.ht".migrate-on-upgrade = mkDefault "yes";
# lists.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"lists.sr.ht".oauth-client-id = mkDefault null;
"lists.sr.ht".oauth-client-secret = mkDefault null;
# Outgoing email for notifications generated by users
"lists.sr.ht".notify-from = mkDefault "CHANGEME@example.org";
# The redis connection used for the webhooks worker
"lists.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/2";
# The redis connection used for the celery worker
"lists.sr.ht".redis = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/4";
# Network-key
"lists.sr.ht".network-key = mkDefault null;
# Allow creation
"lists.sr.ht".allow-new-lists = mkDefault "no";
# Posting Domain
"lists.sr.ht".posting-domain = mkDefault "lists.${cfg.originBase}";
# Path for the lmtp daemon's unix socket. Direct incoming mail to this socket.
# Alternatively, specify IP:PORT and an SMTP server will be run instead.
"lists.sr.ht::worker".sock = mkDefault "/tmp/lists.sr.ht-lmtp.sock";
# The lmtp daemon will make the unix socket group-read/write for users in this
# group.
"lists.sr.ht::worker".sock-group = mkDefault "postfix";
"lists.sr.ht::worker".reject-url = mkDefault "https://man.sr.ht/lists.sr.ht/etiquette.md";
"lists.sr.ht::worker".reject-mimetypes = mkDefault "text/html";
};
services.nginx.virtualHosts."lists.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.listssrht}/${pkgs.sourcehut.python.sitePackages}/listssrht";
};
};
}

View File

@ -0,0 +1,122 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.man;
iniKey = "man.sr.ht";
drv = pkgs.sourcehut.mansrht;
in
{
options.services.sourcehut.man = {
user = mkOption {
type = types.str;
default = "mansrht";
description = ''
User for man.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5004;
description = ''
Port on which the "man" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "man.sr.ht";
description = ''
PostgreSQL database name for man.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/mansrht";
description = ''
State path for man.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "man" cfg.services) {
assertions =
[
{
assertion = hasAttrByPath [ "git.sr.ht" "oauth-client-id" ] cfgIni;
message = "man.sr.ht needs access to git.sr.ht.";
}
];
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
description = "man.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services.mansrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "man.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
};
services.sourcehut.settings = {
# URL man.sr.ht is being served at (protocol://domain)
"man.sr.ht".origin = mkDefault "http://man.${cfg.originBase}";
# Address and port to bind the debug server to
"man.sr.ht".debug-host = mkDefault "0.0.0.0";
"man.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"man.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"man.sr.ht".migrate-on-upgrade = mkDefault "yes";
# man.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"man.sr.ht".oauth-client-id = mkDefault null;
"man.sr.ht".oauth-client-secret = mkDefault null;
};
services.nginx.virtualHosts."man.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.mansrht}/${pkgs.sourcehut.python.sitePackages}/mansrht";
};
};
}

View File

@ -0,0 +1,211 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.meta;
iniKey = "meta.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.metasrht;
in
{
options.services.sourcehut.meta = {
user = mkOption {
type = types.str;
default = "metasrht";
description = ''
User for meta.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5000;
description = ''
Port on which the "meta" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "meta.sr.ht";
description = ''
PostgreSQL database name for meta.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/metasrht";
description = ''
State path for meta.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "meta" cfg.services) {
assertions =
[
{
assertion = with cfgIni."meta.sr.ht::billing"; enabled == "yes" -> (stripe-public-key != null && stripe-secret-key != null);
message = "If meta.sr.ht::billing is enabled, the keys should be defined.";
}
];
users = {
users = {
${user} = {
isSystemUser = true;
group = user;
description = "meta.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.cron.systemCronJobs = [ "0 0 * * * ${cfg.python}/bin/metasrht-daily" ];
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services = {
metasrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "meta.sr.ht website service";
preStart = ''
# Configure client(s) as "preauthorized"
${concatMapStringsSep "\n\n"
(attr: ''
if ! test -e "${statePath}/${attr}.oauth" || [ "$(cat ${statePath}/${attr}.oauth)" != "${cfgIni."${attr}".oauth-client-id}" ]; then
# Configure ${attr}'s OAuth client as "preauthorized"
psql ${database} \
-c "UPDATE oauthclient SET preauthorized = true WHERE client_id = '${cfgIni."${attr}".oauth-client-id}'"
printf "%s" "${cfgIni."${attr}".oauth-client-id}" > "${statePath}/${attr}.oauth"
fi
'')
(builtins.attrNames (filterAttrs
(k: v: !(hasInfix "::" k) && builtins.hasAttr "oauth-client-id" v && v.oauth-client-id != null)
cfg.settings))}
'';
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
metasrht-api = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "meta.sr.ht api service";
preStart = ''
# Configure client(s) as "preauthorized"
${concatMapStringsSep "\n\n"
(attr: ''
if ! test -e "${statePath}/${attr}.oauth" || [ "$(cat ${statePath}/${attr}.oauth)" != "${cfgIni."${attr}".oauth-client-id}" ]; then
# Configure ${attr}'s OAuth client as "preauthorized"
psql ${database} \
-c "UPDATE oauthclient SET preauthorized = true WHERE client_id = '${cfgIni."${attr}".oauth-client-id}'"
printf "%s" "${cfgIni."${attr}".oauth-client-id}" > "${statePath}/${attr}.oauth"
fi
'')
(builtins.attrNames (filterAttrs
(k: v: !(hasInfix "::" k) && builtins.hasAttr "oauth-client-id" v && v.oauth-client-id != null)
cfg.settings))}
'';
serviceConfig.ExecStart = "${pkgs.sourcehut.metasrht}/bin/metasrht-api -b :${toString (port + 100)}";
};
metasrht-webhooks = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "meta.sr.ht webhooks service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.webhooks worker --loglevel=info";
};
};
};
};
services.sourcehut.settings = {
# URL meta.sr.ht is being served at (protocol://domain)
"meta.sr.ht".origin = mkDefault "https://meta.${cfg.originBase}";
# Address and port to bind the debug server to
"meta.sr.ht".debug-host = mkDefault "0.0.0.0";
"meta.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"meta.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"meta.sr.ht".migrate-on-upgrade = mkDefault "yes";
# If "yes", the user will be sent the stock sourcehut welcome emails after
# signup (requires cron to be configured properly). These are specific to the
# sr.ht instance so you probably want to patch these before enabling this.
"meta.sr.ht".welcome-emails = mkDefault "no";
# The redis connection used for the webhooks worker
"meta.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/6";
# If "no", public registration will not be permitted.
"meta.sr.ht::settings".registration = mkDefault "no";
# Where to redirect new users upon registration
"meta.sr.ht::settings".onboarding-redirect = mkDefault "https://meta.${cfg.originBase}";
# How many invites each user is issued upon registration (only applicable if
# open registration is disabled)
"meta.sr.ht::settings".user-invites = mkDefault 5;
# Origin URL for API, 100 more than web
"meta.sr.ht".api-origin = mkDefault "http://localhost:5100";
# You can add aliases for the client IDs of commonly used OAuth clients here.
#
# Example:
"meta.sr.ht::aliases" = mkDefault { };
# "meta.sr.ht::aliases"."git.sr.ht" = 12345;
# "yes" to enable the billing system
"meta.sr.ht::billing".enabled = mkDefault "no";
# Get your keys at https://dashboard.stripe.com/account/apikeys
"meta.sr.ht::billing".stripe-public-key = mkDefault null;
"meta.sr.ht::billing".stripe-secret-key = mkDefault null;
};
services.nginx.virtualHosts."meta.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.metasrht}/${pkgs.sourcehut.python.sitePackages}/metasrht";
};
};
}

View File

@ -0,0 +1,133 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.paste;
iniKey = "paste.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.pastesrht;
in
{
options.services.sourcehut.paste = {
user = mkOption {
type = types.str;
default = "pastesrht";
description = ''
User for paste.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5011;
description = ''
Port on which the "paste" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "paste.sr.ht";
description = ''
PostgreSQL database name for paste.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/pastesrht";
description = ''
State path for pastesrht.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "paste" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
description = "paste.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services = {
pastesrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "paste.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
pastesrht-webhooks = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "paste.sr.ht webhooks service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.webhooks worker --loglevel=info";
};
};
};
};
services.sourcehut.settings = {
# URL paste.sr.ht is being served at (protocol://domain)
"paste.sr.ht".origin = mkDefault "http://paste.${cfg.originBase}";
# Address and port to bind the debug server to
"paste.sr.ht".debug-host = mkDefault "0.0.0.0";
"paste.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"paste.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"paste.sr.ht".migrate-on-upgrade = mkDefault "yes";
# paste.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"paste.sr.ht".oauth-client-id = mkDefault null;
"paste.sr.ht".oauth-client-secret = mkDefault null;
"paste.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/5";
};
services.nginx.virtualHosts."paste.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.pastesrht}/${pkgs.sourcehut.python.sitePackages}/pastesrht";
};
};
}

View File

@ -0,0 +1,66 @@
{ config, pkgs, lib }:
serviceCfg: serviceDrv: iniKey: attrs:
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings."${iniKey}";
pgSuperUser = config.services.postgresql.superUser;
setupDB = pkgs.writeScript "${serviceDrv.pname}-gen-db" ''
#! ${cfg.python}/bin/python
from ${serviceDrv.pname}.app import db
db.create()
'';
in
with serviceCfg; with lib; recursiveUpdate
{
environment.HOME = statePath;
path = [ config.services.postgresql.package ] ++ (attrs.path or [ ]);
restartTriggers = [ config.environment.etc."sr.ht/config.ini".source ];
serviceConfig = {
Type = "simple";
User = user;
Group = user;
Restart = "always";
WorkingDirectory = statePath;
} // (if (cfg.statePath == "/var/lib/sourcehut/${serviceDrv.pname}") then {
StateDirectory = [ "sourcehut/${serviceDrv.pname}" ];
} else {})
;
preStart = ''
if ! test -e ${statePath}/db; then
# Setup the initial database
${setupDB}
# Set the initial state of the database for future database upgrades
if test -e ${cfg.python}/bin/${serviceDrv.pname}-migrate; then
# Run alembic stamp head once to tell alembic the schema is up-to-date
${cfg.python}/bin/${serviceDrv.pname}-migrate stamp head
fi
printf "%s" "${serviceDrv.version}" > ${statePath}/db
fi
# Update copy of each users' profile to the latest
# See https://lists.sr.ht/~sircmpwn/sr.ht-admins/<20190302181207.GA13778%40cirno.my.domain>
if ! test -e ${statePath}/webhook; then
# Update ${iniKey}'s users' profile copy to the latest
${cfg.python}/bin/srht-update-profiles ${iniKey}
touch ${statePath}/webhook
fi
${optionalString (builtins.hasAttr "migrate-on-upgrade" cfgIni && cfgIni.migrate-on-upgrade == "yes") ''
if [ "$(cat ${statePath}/db)" != "${serviceDrv.version}" ]; then
# Manage schema migrations using alembic
${cfg.python}/bin/${serviceDrv.pname}-migrate -a upgrade head
# Mark down current package version
printf "%s" "${serviceDrv.version}" > ${statePath}/db
fi
''}
${attrs.preStart or ""}
'';
}
(builtins.removeAttrs attrs [ "path" "preStart" ])

View File

@ -0,0 +1,115 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
version="5.0"
xml:id="module-services-sourcehut">
<title>Sourcehut</title>
<para>
<link xlink:href="https://sr.ht.com/">Sourcehut</link> is an open-source,
self-hostable software development platform. The server setup can be automated using
<link linkend="opt-services.sourcehut.enable">services.sourcehut</link>.
</para>
<section xml:id="module-services-sourcehut-basic-usage">
<title>Basic usage</title>
<para>
Sourcehut is a Python and Go based set of applications.
<literal><link linkend="opt-services.sourcehut.enable">services.sourcehut</link></literal>
by default will use
<literal><link linkend="opt-services.nginx.enable">services.nginx</link></literal>,
<literal><link linkend="opt-services.nginx.enable">services.redis</link></literal>,
<literal><link linkend="opt-services.nginx.enable">services.cron</link></literal>,
and
<literal><link linkend="opt-services.postgresql.enable">services.postgresql</link></literal>.
</para>
<para>
A very basic configuration may look like this:
<programlisting>
{ pkgs, ... }:
let
fqdn =
let
join = hostName: domain: hostName + optionalString (domain != null) ".${domain}";
in join config.networking.hostName config.networking.domain;
in {
networking = {
<link linkend="opt-networking.hostName">hostName</link> = "srht";
<link linkend="opt-networking.domain">domain</link> = "tld";
<link linkend="opt-networking.firewall.allowedTCPPorts">firewall.allowedTCPPorts</link> = [ 22 80 443 ];
};
services.sourcehut = {
<link linkend="opt-services.sourcehut.enable">enable</link> = true;
<link linkend="opt-services.sourcehut.originBase">originBase</link> = fqdn;
<link linkend="opt-services.sourcehut.services">services</link> = [ "meta" "man" "git" ];
<link linkend="opt-services.sourcehut.settings">settings</link> = {
"sr.ht" = {
environment = "production";
global-domain = fqdn;
origin = "https://${fqdn}";
# Produce keys with srht-keygen from <package>sourcehut.coresrht</package>.
network-key = "SECRET";
service-key = "SECRET";
};
webhooks.private-key= "SECRET";
};
};
<link linkend="opt-security.acme.certs._name_.extraDomainNames">security.acme.certs."${fqdn}".extraDomainNames</link> = [
"meta.${fqdn}"
"man.${fqdn}"
"git.${fqdn}"
];
services.nginx = {
<link linkend="opt-services.nginx.enable">enable</link> = true;
# only recommendedProxySettings are strictly required, but the rest make sense as well.
<link linkend="opt-services.nginx.recommendedTlsSettings">recommendedTlsSettings</link> = true;
<link linkend="opt-services.nginx.recommendedOptimisation">recommendedOptimisation</link> = true;
<link linkend="opt-services.nginx.recommendedGzipSettings">recommendedGzipSettings</link> = true;
<link linkend="opt-services.nginx.recommendedProxySettings">recommendedProxySettings</link> = true;
# Settings to setup what certificates are used for which endpoint.
<link linkend="opt-services.nginx.virtualHosts">virtualHosts</link> = {
<link linkend="opt-services.nginx.virtualHosts._name_.enableACME">"${fqdn}".enableACME</link> = true;
<link linkend="opt-services.nginx.virtualHosts._name_.useACMEHost">"meta.${fqdn}".useACMEHost</link> = fqdn:
<link linkend="opt-services.nginx.virtualHosts._name_.useACMEHost">"man.${fqdn}".useACMEHost</link> = fqdn:
<link linkend="opt-services.nginx.virtualHosts._name_.useACMEHost">"git.${fqdn}".useACMEHost</link> = fqdn:
};
};
}
</programlisting>
</para>
<para>
The <literal>hostName</literal> option is used internally to configure the nginx
reverse-proxy. The <literal>settings</literal> attribute set is
used by the configuration generator and the result is placed in <literal>/etc/sr.ht/config.ini</literal>.
</para>
</section>
<section xml:id="module-services-sourcehut-configuration">
<title>Configuration</title>
<para>
All configuration parameters are also stored in
<literal>/etc/sr.ht/config.ini</literal> which is generated by
the module and linked from the store to ensure that all values from <literal>config.ini</literal>
can be modified by the module.
</para>
</section>
<section xml:id="module-services-sourcehut-httpd">
<title>Using an alternative webserver as reverse-proxy (e.g. <literal>httpd</literal>)</title>
<para>
By default, <package>nginx</package> is used as reverse-proxy for <package>sourcehut</package>.
However, it's possible to use e.g. <package>httpd</package> by explicitly disabling
<package>nginx</package> using <xref linkend="opt-services.nginx.enable" /> and fixing the
<literal>settings</literal>.
</para>
</section>
</chapter>

View File

@ -0,0 +1,161 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.sourcehut;
cfgIni = cfg.settings;
scfg = cfg.todo;
iniKey = "todo.sr.ht";
rcfg = config.services.redis;
drv = pkgs.sourcehut.todosrht;
in
{
options.services.sourcehut.todo = {
user = mkOption {
type = types.str;
default = "todosrht";
description = ''
User for todo.sr.ht.
'';
};
port = mkOption {
type = types.port;
default = 5003;
description = ''
Port on which the "todo" module should listen.
'';
};
database = mkOption {
type = types.str;
default = "todo.sr.ht";
description = ''
PostgreSQL database name for todo.sr.ht.
'';
};
statePath = mkOption {
type = types.path;
default = "${cfg.statePath}/todosrht";
description = ''
State path for todo.sr.ht.
'';
};
};
config = with scfg; lib.mkIf (cfg.enable && elem "todo" cfg.services) {
users = {
users = {
"${user}" = {
isSystemUser = true;
group = user;
extraGroups = [ "postfix" ];
description = "todo.sr.ht user";
};
};
groups = {
"${user}" = { };
};
};
services.postgresql = {
authentication = ''
local ${database} ${user} trust
'';
ensureDatabases = [ database ];
ensureUsers = [
{
name = user;
ensurePermissions = { "DATABASE \"${database}\"" = "ALL PRIVILEGES"; };
}
];
};
systemd = {
tmpfiles.rules = [
"d ${statePath} 0750 ${user} ${user} -"
];
services = {
todosrht = import ./service.nix { inherit config pkgs lib; } scfg drv iniKey {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "todo.sr.ht website service";
serviceConfig.ExecStart = "${cfg.python}/bin/gunicorn ${drv.pname}.app:app -b ${cfg.address}:${toString port}";
};
todosrht-lmtp = {
after = [ "postgresql.service" "network.target" ];
bindsTo = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "todo.sr.ht process service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/todosrht-lmtp";
};
};
todosrht-webhooks = {
after = [ "postgresql.service" "network.target" ];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
description = "todo.sr.ht webhooks service";
serviceConfig = {
Type = "simple";
User = user;
Restart = "always";
ExecStart = "${cfg.python}/bin/celery -A ${drv.pname}.webhooks worker --loglevel=info";
};
};
};
};
services.sourcehut.settings = {
# URL todo.sr.ht is being served at (protocol://domain)
"todo.sr.ht".origin = mkDefault "http://todo.${cfg.originBase}";
# Address and port to bind the debug server to
"todo.sr.ht".debug-host = mkDefault "0.0.0.0";
"todo.sr.ht".debug-port = mkDefault port;
# Configures the SQLAlchemy connection string for the database.
"todo.sr.ht".connection-string = mkDefault "postgresql:///${database}?user=${user}&host=/var/run/postgresql";
# Set to "yes" to automatically run migrations on package upgrade.
"todo.sr.ht".migrate-on-upgrade = mkDefault "yes";
# todo.sr.ht's OAuth client ID and secret for meta.sr.ht
# Register your client at meta.example.org/oauth
"todo.sr.ht".oauth-client-id = mkDefault null;
"todo.sr.ht".oauth-client-secret = mkDefault null;
# Outgoing email for notifications generated by users
"todo.sr.ht".notify-from = mkDefault "CHANGEME@example.org";
# The redis connection used for the webhooks worker
"todo.sr.ht".webhooks = mkDefault "redis://${rcfg.bind}:${toString rcfg.port}/1";
# Network-key
"todo.sr.ht".network-key = mkDefault null;
# Path for the lmtp daemon's unix socket. Direct incoming mail to this socket.
# Alternatively, specify IP:PORT and an SMTP server will be run instead.
"todo.sr.ht::mail".sock = mkDefault "/tmp/todo.sr.ht-lmtp.sock";
# The lmtp daemon will make the unix socket group-read/write for users in this
# group.
"todo.sr.ht::mail".sock-group = mkDefault "postfix";
"todo.sr.ht::mail".posting-domain = mkDefault "todo.${cfg.originBase}";
};
services.nginx.virtualHosts."todo.${cfg.originBase}" = {
forceSSL = true;
locations."/".proxyPass = "http://${cfg.address}:${toString port}";
locations."/query".proxyPass = "http://${cfg.address}:${toString (port + 100)}";
locations."/static".root = "${pkgs.sourcehut.todosrht}/${pkgs.sourcehut.python.sitePackages}/todosrht";
};
};
}

View File

@ -51,6 +51,7 @@ let
"pihole"
"postfix"
"postgres"
"process"
"py-air-control"
"redis"
"rspamd"

View File

@ -0,0 +1,48 @@
{ config, lib, pkgs, options }:
with lib;
let
cfg = config.services.prometheus.exporters.process;
configFile = pkgs.writeText "process-exporter.yaml" (builtins.toJSON cfg.settings);
in
{
port = 9256;
extraOpts = {
settings.process_names = mkOption {
type = types.listOf types.anything;
default = {};
example = literalExample ''
{
process_names = [
# Remove nix store path from process name
{ name = "{{.Matches.Wrapped}} {{ .Matches.Args }}"; cmdline = [ "^/nix/store[^ ]*/(?P<Wrapped>[^ /]*) (?P<Args>.*)" ]; }
];
}
'';
description = ''
All settings expressed as an Nix attrset.
Check the official documentation for the corresponding YAML
settings that can all be used here: <link xlink:href="https://github.com/ncabatoff/process-exporter" />
'';
};
};
serviceOpts = {
serviceConfig = {
DynamicUser = false;
ExecStart = ''
${pkgs.prometheus-process-exporter}/bin/process-exporter \
--web.listen-address ${cfg.listenAddress}:${toString cfg.port} \
--config.path ${configFile} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
NoNewPrivileges = true;
ProtectHome = true;
ProtectSystem = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectControlGroups = true;
};
};
}

View File

@ -66,9 +66,7 @@ in {
};
in (mkMerge [{
environment.systemPackages = [ cfg.package pkgs.ipsecTools ];
environment.systemPackages = [ cfg.package ];
boot.kernelModules = [ "tun" "openvswitch" ];
boot.extraModulePackages = [ cfg.package ];
@ -146,6 +144,8 @@ in {
}
(mkIf (cfg.ipsec && (versionOlder cfg.package.version "2.6.0")) {
environment.systemPackages = [ pkgs.ipsecTools ];
services.racoon.enable = true;
services.racoon.configPath = "${runDir}/ipsec/etc/racoon/racoon.conf";

View File

@ -864,6 +864,25 @@ let
'';
};
process = {
exporterConfig = {
enable = true;
settings.process_names = [
# Remove nix store path from process name
{ name = "{{.Matches.Wrapped}} {{ .Matches.Args }}"; cmdline = [ "^/nix/store[^ ]*/(?P<Wrapped>[^ /]*) (?P<Args>.*)" ]; }
];
};
exporterTest = ''
wait_for_unit("prometheus-process-exporter.service")
wait_for_open_port(9256)
wait_until_succeeds(
"curl -sSf localhost:9256/metrics | grep -q '{}'".format(
'namedprocess_namegroup_cpu_seconds_total{groupname="process-exporter '
)
)
'';
};
py-air-control = {
nodeName = "py_air_control";
exporterConfig = {

View File

@ -1,5 +1,5 @@
import ./make-test-python.nix {
name = "opensmtpd";
name = "rss2email";
nodes = {
server = { pkgs, ... }: {

29
nixos/tests/sourcehut.nix Normal file
View File

@ -0,0 +1,29 @@
import ./make-test-python.nix ({ pkgs, ... }:
{
name = "sourcehut";
meta.maintainers = [ pkgs.lib.maintainers.tomberek ];
machine = { config, pkgs, ... }: {
virtualisation.memorySize = 2048;
networking.firewall.allowedTCPPorts = [ 80 ];
services.sourcehut = {
enable = true;
services = [ "meta" ];
originBase = "sourcehut";
settings."sr.ht".service-key = "8888888888888888888888888888888888888888888888888888888888888888";
settings."sr.ht".network-key = "0000000000000000000000000000000000000000000=";
settings.webhooks.private-key = "0000000000000000000000000000000000000000000=";
};
};
testScript = ''
start_all()
machine.wait_for_unit("multi-user.target")
machine.wait_for_unit("metasrht.service")
machine.wait_for_open_port(5000)
machine.succeed("curl -sL http://localhost:5000 | grep meta.sourcehut")
'';
})

View File

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "boops";
version = "1.4.0";
version = "1.6.0";
src = fetchFromGitHub {
owner = "sjaehn";
repo = "BOops";
rev = version;
sha256 = "1kkp6s431pjb1qrg1dq8ak3lj0ksqnxsij9jg6biscpfgbmaqdcq";
sha256 = "sha256-7eNvt8PxIZCp83Y5XX5fBolBon4j+HPtu8wrgG8Miok=";
};
nativeBuildInputs = [ pkg-config ];

View File

@ -0,0 +1,39 @@
{ stdenv, lib, fetchFromGitHub, faust2jaqt, faust2lv2 }:
stdenv.mkDerivation rec {
pname = "faustPhysicalModeling";
version = "2.20.2";
src = fetchFromGitHub {
owner = "grame-cncm";
repo = "faust";
rev = version;
sha256 = "1mm93ba26b7q69hvabzalg30dh8pl858nj4m2bb57pznnp09lq9a";
};
buildInputs = [ faust2jaqt faust2lv2 ];
buildPhase = ''
cd examples/physicalModeling
for f in *MIDI.dsp; do
faust2jaqt -time -vec -double -midi -nvoices 16 -t 99999 $f
faust2lv2 -time -vec -double -gui -nvoices 16 -t 99999 $f
done
'';
installPhase = ''
mkdir -p $out/lib/lv2 $out/bin
mv *.lv2/ $out/lib/lv2
for f in $(find . -executable -type f); do
cp $f $out/bin/
done
'';
meta = with lib; {
description = "The physical models included with faust compiled as jack standalone and lv2 instruments";
homepage = "https://github.com/grame-cncm/faust/tree/master-dev/examples/physicalModeling";
license = licenses.mit;
platforms = platforms.linux;
maintainers = with maintainers; [ magnetophon ];
};
}

View File

@ -0,0 +1,39 @@
{ stdenv, lib, fetchFromGitHub, faust2jaqt, faust2lv2 }:
stdenv.mkDerivation rec {
pname = "faustPhhysicalModeling";
version = "2.20.2";
src = fetchFromGitHub {
owner = "grame-cncm";
repo = "faust";
rev = version;
sha256 = "1mm93ba26b7q69hvabzalg30dh8pl858nj4m2bb57pznnp09lq9a";
};
buildInputs = [ faust2jaqt faust2lv2 ];
buildPhase = ''
cd examples/physicalModeling/faust-stk
for f in *.dsp; do
faust2jaqt -time -vec -midi -nvoices 8 -t 99999 $f
faust2lv2 -time -vec -double -gui -nvoices 32 -t 99999 $f
done
'';
installPhase = ''
mkdir -p $out/lib/lv2 $out/bin
mv *.lv2/ $out/lib/lv2
for f in $(find . -executable -type f); do
cp $f $out/bin/
done
'';
meta = with lib; {
description = "The physical modeling instruments included with faust, compiled as jack standalone and lv2 instruments";
homepage = "https://ccrma.stanford.edu/~rmichon/faustSTK/";
license = licenses.stk;
platforms = platforms.linux;
maintainers = with maintainers; [ magnetophon ];
};
}

View File

@ -1,5 +1,7 @@
{ lib
, fetchFromGitLab
, makeDesktopItem
, copyDesktopItems
, rustPlatform
, pkg-config
, clang
@ -23,11 +25,19 @@ rustPlatform.buildRustPackage rec {
cargoSha256 = "sha256-uNTSU06Fz/ud04K40e98rb7o/uAht0DsiJOXeHX72vw=";
nativeBuildInputs = [ clang pkg-config ];
nativeBuildInputs = [ clang copyDesktopItems pkg-config ];
buildInputs = [ glib gtk4 pipewire ];
LIBCLANG_PATH = "${libclang.lib}/lib";
desktopItems = makeDesktopItem {
name = "Helvum";
exec = pname;
desktopName = "Helvum";
genericName = "Helvum";
categories = "AudioVideo;";
};
meta = with lib; {
description = "A GTK patchbay for pipewire";
homepage = "https://gitlab.freedesktop.org/ryuukyu/helvum";

View File

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "stochas";
version = "1.3.4";
version = "1.3.5";
src = fetchFromGitHub {
owner = "surge-synthesizer";
repo = pname;
rev = "v${version}";
sha256 = "0b26mbj727dnygavz4kihnhmnnvwsr9l145w6kydq7bd7nwiw7lq";
sha256 = "1z8q53qfigw6wwbvpca92b9pf9d0mv3nyb0fmszz5ikj3pcybi7m";
fetchSubmodules = true;
};

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "logseq";
version = "0.0.16";
version = "0.1.3";
src = fetchurl {
url = "https://github.com/logseq/logseq/releases/download/${version}/logseq-linux-x64-${version}.AppImage";
sha256 = "dmgwFHJRy5qE71naRJKX0HCrVG0qQBOIM9TvCh4j/lY=";
sha256 = "1akg3xjbh01nb7l06qpvz3xsjj64kf042xjnapn60jlgg5y34vbm";
name = "${pname}-${version}.AppImage";
};

View File

@ -15,6 +15,7 @@
, openssl
, libopus
, ffmpeg
, wayland
}:
stdenv.mkDerivation rec {
@ -47,6 +48,7 @@ stdenv.mkDerivation rec {
openssl
libopus
ffmpeg
wayland
];
meta = with lib; {

View File

@ -1,5 +1,6 @@
{ lib, setuptools, boto3, requests, click, pyyaml, pydantic, buildPythonApplication
, pythonOlder, fetchFromGitHub, awscli }:
{ lib, setuptools, boto3, requests, click, pyyaml, pydantic
, buildPythonApplication, pythonOlder, installShellFiles, fetchFromGitHub
, awscli }:
buildPythonApplication rec {
pname = "nimbo";
@ -12,13 +13,20 @@ buildPythonApplication rec {
rev = "v${version}";
sha256 = "1fs28s9ynfxrb4rzba6cmik0kl0q0vkpb4zdappsq62jqf960k24";
};
nativeBuildInputs = [ installShellFiles ];
propagatedBuildInputs = [ setuptools boto3 awscli requests click pyyaml pydantic ];
# nimbo tests require an AWS instance
doCheck = false;
pythonImportsCheck = [ "nimbo" ];
postInstall = ''
installShellCompletion --cmd nimbo \
--zsh <(_NIMBO_COMPLETE=source_zsh $out/bin/nimbo) \
--bash <(_NIMBO_COMPLETE=source_bash $out/bin/nimbo) \
--fish <(_NIMBO_COMPLETE=source_fish $out/bin/nimbo)
'';
meta = with lib; {
description = "Run machine learning jobs on AWS with a single command";
homepage = "https://github.com/nimbo-sh/nimbo";

View File

@ -24,7 +24,8 @@ in python.pkgs.buildPythonPackage {
postPatch = ''
substituteInPlace requirements.txt \
--replace "aiohttp==3.6.2" "aiohttp>=3.6.2"
--replace "aiohttp==3.6.2" "aiohttp>=3.6.2" \
--replace "py-cpuinfo==7.0.0" "py-cpuinfo>=8.0.0"
'';
propagatedBuildInputs = with python.pkgs; [

View File

@ -9,7 +9,8 @@ stdenv.mkDerivation {
version = "13.3.1.22";
src = fetchurl {
url = "https://download.cdn.viber.com/cdn/desktop/Linux/viber.deb";
# Official link: https://download.cdn.viber.com/cdn/desktop/Linux/viber.deb
url = "http://web.archive.org/web/20210602004133/https://download.cdn.viber.com/cdn/desktop/Linux/viber.deb";
sha256 = "0rs26x0lycavybn6k1hbb5kzms0zzcmxlrmi4g8k7vyafj6s8dqh";
};

View File

@ -0,0 +1,53 @@
{ lib, mkDerivation, fetchFromGitHub, qmake, cmake, pkg-config, miniupnpc, bzip2
, speex, libmicrohttpd, libxml2, libxslt, sqlcipher, rapidjson, libXScrnSaver
, qtbase, qtx11extras, qtmultimedia, libgnome-keyring3
}:
mkDerivation rec {
pname = "retroshare";
version = "0.6.6";
src = fetchFromGitHub {
owner = "RetroShare";
repo = "RetroShare";
rev = "v${version}";
sha256 = "1hsymbhsfgycj39mdkrdp2hgq8irmvxa4a6jx2gg339m1fgf2xmh";
fetchSubmodules = true;
};
patches = [
# The build normally tries to get git sub-modules during build
# but we already have them checked out
./no-submodules.patch
];
nativeBuildInputs = [ pkg-config qmake cmake ];
buildInputs = [
speex miniupnpc qtmultimedia qtx11extras qtbase libgnome-keyring3
bzip2 libXScrnSaver libxml2 libxslt sqlcipher libmicrohttpd rapidjson
];
qmakeFlags = [
# Upnp library autodetection doesn't work
"RS_UPNP_LIB=miniupnpc"
# These values are normally found from the .git folder
"RS_MAJOR_VERSION=${lib.versions.major version}"
"RS_MINOR_VERSION=${lib.versions.minor version}"
"RS_MINI_VERSION=${lib.versions.patch version}"
"RS_EXTRA_VERSION="
];
postInstall = ''
# BT DHT bootstrap
cp libbitdht/src/bitdht/bdboot.txt $out/share/retroshare
'';
meta = with lib; {
description = "Decentralized peer to peer chat application.";
homepage = "http://retroshare.sourceforge.net/";
license = licenses.gpl2Plus;
platforms = platforms.linux;
maintainers = with maintainers; [ StijnDW ];
};
}

View File

@ -0,0 +1,62 @@
diff --git a/libretroshare/src/libretroshare.pro b/libretroshare/src/libretroshare.pro
index 84d18944e..71aeb67d2 100644
--- a/libretroshare/src/libretroshare.pro
+++ b/libretroshare/src/libretroshare.pro
@@ -870,20 +870,14 @@ rs_jsonapi {
genrestbedlib.variable_out = PRE_TARGETDEPS
win32-g++:isEmpty(QMAKE_SH) {
genrestbedlib.commands = \
- cd /D $$shell_path($${RS_SRC_PATH}) && git submodule update --init supportlibs/restbed || cd . $$escape_expand(\\n\\t) \
- cd /D $$shell_path($${RESTBED_SRC_PATH}) && git submodule update --init dependency/asio || cd . $$escape_expand(\\n\\t) \
- cd /D $$shell_path($${RESTBED_SRC_PATH}) && git submodule update --init dependency/catch || cd . $$escape_expand(\\n\\t )\
- cd /D $$shell_path($${RESTBED_SRC_PATH}) && git submodule update --init dependency/kashmir || cd . $$escape_expand(\\n\\t) \
+ cd /D $$shell_path($${RS_SRC_PATH}) && cd . $$escape_expand(\\n\\t) \
+ cd /D $$shell_path($${RESTBED_SRC_PATH}) && cd . $$escape_expand(\\n\\t) \
+ cd /D $$shell_path($${RESTBED_SRC_PATH}) && cd . $$escape_expand(\\n\\t )\
+ cd /D $$shell_path($${RESTBED_SRC_PATH}) && cd . $$escape_expand(\\n\\t) \
$(CHK_DIR_EXISTS) $$shell_path($$UDP_DISCOVERY_BUILD_PATH) $(MKDIR) $$shell_path($${UDP_DISCOVERY_BUILD_PATH}) $$escape_expand(\\n\\t)
} else {
genrestbedlib.commands = \
- cd $${RS_SRC_PATH} && ( \
- git submodule update --init supportlibs/restbed ; \
- cd $${RESTBED_SRC_PATH} ; \
- git submodule update --init dependency/asio ; \
- git submodule update --init dependency/catch ; \
- git submodule update --init dependency/kashmir ; \
- true ) && \
+ cd $${RS_SRC_PATH} && \
mkdir -p $${RESTBED_BUILD_PATH} &&
}
genrestbedlib.commands += \
@@ -991,14 +985,9 @@ rs_broadcast_discovery {
udpdiscoverycpplib.variable_out = PRE_TARGETDEPS
win32-g++:isEmpty(QMAKE_SH) {
udpdiscoverycpplib.commands = \
- cd /D $$shell_path($${RS_SRC_PATH}) && git submodule update --init supportlibs/udp-discovery-cpp || cd . $$escape_expand(\\n\\t) \
$(CHK_DIR_EXISTS) $$shell_path($$UDP_DISCOVERY_BUILD_PATH) $(MKDIR) $$shell_path($${UDP_DISCOVERY_BUILD_PATH}) $$escape_expand(\\n\\t)
} else {
- udpdiscoverycpplib.commands = \
- cd $${RS_SRC_PATH} && ( \
- git submodule update --init supportlibs/udp-discovery-cpp || \
- true ) && \
- mkdir -p $${UDP_DISCOVERY_BUILD_PATH} &&
+ udpdiscoverycpplib.commands = mkdir -p $${UDP_DISCOVERY_BUILD_PATH} &&
}
udpdiscoverycpplib.commands += \
cd $$shell_path($${UDP_DISCOVERY_BUILD_PATH}) && \
diff --git a/retroshare-gui/src/retroshare-gui.pro b/retroshare-gui/src/retroshare-gui.pro
index 654efd170..06cba9ba3 100644
--- a/retroshare-gui/src/retroshare-gui.pro
+++ b/retroshare-gui/src/retroshare-gui.pro
@@ -66,10 +66,7 @@ rs_gui_cmark {
gencmarklib.CONFIG += target_predeps combine
gencmarklib.variable_out = PRE_TARGETDEPS
gencmarklib.commands = \
- cd $${RS_SRC_PATH} && ( \
- git submodule update --init supportlibs/cmark ; \
- cd $${CMARK_SRC_PATH} ; \
- true ) && \
+ cd $${RS_SRC_PATH} && \
mkdir -p $${CMARK_BUILD_PATH} && cd $${CMARK_BUILD_PATH} && \
cmake \
-DCMAKE_CXX_COMPILER=$$QMAKE_CXX \

View File

@ -25,6 +25,17 @@ mkDerivation rec {
# Using local file instead of content of commit #33e3d896a47 because
# sourceRoot make it unappliable
./qt515.patch
# Change from upstream master that removes extern-C scopes which
# cause failures with modern glib. This can likely be removed if
# there is an upstream release >1.12
(fetchpatch {
name = "fix-extern-c.patch";
url = "https://github.com/DreamSourceLab/DSView/commit/33cc733abe19872bf5ed08540a94b798d0d4ecf4.patch";
sha256 = "sha256-TLfLQa3sdyNHTpMMvId/V6uUuOFihOZMFJOj9frnDoY=";
stripLen = 2;
extraPrefix = "";
})
];
nativeBuildInputs = [ cmake pkg-config ];

View File

@ -4,13 +4,13 @@
stdenv.mkDerivation rec {
pname = "tig";
version = "2.5.3";
version = "2.5.4";
src = fetchFromGitHub {
owner = "jonas";
repo = pname;
rev = "${pname}-${version}";
sha256 = "sha256-BXs7aKUYiU5L2OjhhmJ+dkHvNcrnw5qREwOTB6npLnw=";
sha256 = "sha256-dZqqUydZ4q/mDEjtojpMGfzAmW3yCNDvT9oCEmhq1hg=";
};
nativeBuildInputs = [ makeWrapper autoreconfHook asciidoc xmlto docbook_xsl docbook_xml_dtd_45 findXMLCatalogs pkg-config ];

View File

@ -31,6 +31,7 @@ let
in
with python.pkgs; recurseIntoAttrs {
inherit python;
coresrht = toPythonApplication srht;
buildsrht = toPythonApplication buildsrht;
dispatchsrht = toPythonApplication dispatchsrht;
gitsrht = toPythonApplication gitsrht;

View File

@ -0,0 +1,29 @@
{ lib, fetchurl }:
let
version = "0.52";
in fetchurl {
name = "edwin-${version}";
url = "https://github.com/MuseScoreFonts/Edwin/archive/refs/tags/v${version}.tar.gz";
downloadToTemp = true;
recursiveHash = true;
sha256 = "sha256-e0ADK72ECl+QMvLWtFJfeHBmuEwzr9M+Kqvkd5Z2mmo=";
postFetch = ''
tar xzf $downloadedFile
mkdir -p $out/share/fonts/opentype
install Edwin-${version}/*.otf $out/share/fonts/opentype
'';
meta = with lib; {
description = "A text font for musical scores";
homepage = "https://github.com/MuseScoreFonts/Edwin";
license = licenses.ofl;
platforms = platforms.all;
maintainers = with maintainers; [ fortuneteller2k ];
};
}

View File

@ -80,11 +80,22 @@ let
dir=${if releaseType == "escript"
then "bin"
else "rel"}
mkdir -p "$out/$dir"
mkdir -p "$out/$dir" "$out/bin"
cp -R --preserve=mode "_build/${profile}/$dir" "$out"
${lib.optionalString (releaseType == "release")
"find $out/rel/*/bin -type f -executable -exec ln -s -t $out/bin {} \\;"}
runHook postInstall
'';
postInstall = ''
for dir in $out/rel/*/erts-*; do
echo "ERTS found in $dir - removing references to erlang to reduce closure size"
for f in $dir/bin/{erl,start}; do
substituteInPlace "$f" --replace "${erlang}/lib/erlang" "''${dir/\/erts-*/}"
done
done
'';
meta = {
inherit (erlang.meta) platforms;
} // meta;

View File

@ -3,13 +3,13 @@
stdenv.mkDerivation (rec {
pname = "ponyc";
version = "0.38.3";
version = "0.41.1";
src = fetchFromGitHub {
owner = "ponylang";
repo = pname;
rev = version;
sha256 = "14kivmyphi7gbd7mgd4cnsiwl4cl7wih8kwzh7n79s2s4c5hj4ak";
sha256 = "02wx070cy1193xzv58vh79yzwgpqiayqlwd3i285698fppbcg69a";
# Due to a bug in LLVM 9.x, ponyc has to include its own vendored patched
# LLVM. (The submodule is a specific tag in the LLVM source tree).
@ -23,34 +23,33 @@ stdenv.mkDerivation (rec {
fetchSubmodules = true;
};
ponygbenchmark = fetchurl {
url = "https://github.com/google/benchmark/archive/v1.5.0.tar.gz";
sha256 = "06i2cr4rj126m1zfz0x1rbxv1mw1l7a11mzal5kqk56cdrdicsiw";
name = "v1.5.0.tar.gz";
ponygbenchmark = fetchFromGitHub {
owner = "google";
repo = "benchmark";
rev = "v1.5.2";
sha256 = "13rxagpzw6bal6ajlmrxlh9kgfvcixn6j734b2bvfqz7lch8n0pa";
};
nativeBuildInputs = [ cmake makeWrapper which ];
buildInputs = [ libxml2 z3 ];
propagatedBuildInputs = [ cc ];
# Sandbox disallows network access, so disabling problematic networking tests
patches = [
./disable-tests.patch
./fix-libstdcpp-path.patch
(substituteAll {
src = ./make-safe-for-sandbox.patch;
googletest = fetchurl {
url = "https://github.com/google/googletest/archive/release-1.8.1.tar.gz";
sha256 = "17147961i01fl099ygxjx4asvjanwdd446nwbq9v8156h98zxwcv";
name = "release-1.8.1.tar.gz";
};
})
];
postUnpack = ''
mkdir -p source/build/build_libs/gbenchmark-prefix/src
tar -C source/build/build_libs/gbenchmark-prefix/src -zxvf "$ponygbenchmark"
mv source/build/build_libs/gbenchmark-prefix/src/benchmark-1.5.0 \
source/build/build_libs/gbenchmark-prefix/src/benchmark
cp -r "$ponygbenchmark"/ source/build/build_libs/gbenchmark-prefix/src/benchmark
chmod -R u+w source/build/build_libs/gbenchmark-prefix/src/benchmark
'';
dontConfigure = true;
@ -61,7 +60,6 @@ stdenv.mkDerivation (rec {
patch -d lib/llvm/src/ -p1 < lib/llvm/patches/2020-09-01-is-trivially-copyable.diff
patch -d lib/llvm/src/ -p1 < lib/llvm/patches/2020-01-07-01-c-exports.diff
patch -d lib/llvm/src/ -p1 < lib/llvm/patches/2019-12-23-01-jit-eh-frames.diff
substituteInPlace packages/process/_test.pony \
--replace '"/bin/' '"${coreutils}/bin/' \
--replace '=/bin' "${coreutils}/bin"
@ -91,7 +89,6 @@ stdenv.mkDerivation (rec {
+ lib.optionalString stdenv.isDarwin "bits=64 "
+ lib.optionalString (stdenv.isDarwin && (!lto)) "lto=no "
+ '' install
wrapProgram $out/bin/ponyc \
--prefix PATH ":" "${stdenv.cc}/bin" \
--set-default CC "$CC" \

View File

@ -0,0 +1,14 @@
diff --git a/src/libponyc/CMakeLists.txt b/src/libponyc/CMakeLists.txt
index bf2c385e..11d0d619 100644
--- a/src/libponyc/CMakeLists.txt
+++ b/src/libponyc/CMakeLists.txt
@@ -136,7 +136,7 @@ elseif(${CMAKE_HOST_SYSTEM_NAME} MATCHES "DragonFly")
else()
# add a rule to generate the standalone library if needed
add_custom_command(OUTPUT libponyc-standalone.a
- COMMAND cp `find /usr/lib/ -name 'libstdc++.a' -print -quit` libstdcpp.a
+ COMMAND cp `${CMAKE_CXX_COMPILER} --print-file-name='libstdc++.a'` libstdcpp.a
COMMAND echo "create libponyc-standalone.a" > standalone.mri
COMMAND echo "addlib ${PROJECT_SOURCE_DIR}/../../build/libs/lib/libblake2.a" >> standalone.mri
COMMAND echo "addlib libstdcpp.a" >> standalone.mri

View File

@ -1,10 +1,10 @@
--- a/lib/CMakeLists.txt 2020-09-27 02:39:12.862940179 +0000
+++ b/lib/CMakeLists.txt 2020-09-27 02:39:16.451957865 +0000
--- a/lib/CMakeLists.txt 2021-05-27 15:58:36.819331229 -0400
+++ b/lib/CMakeLists.txt 2021-05-27 16:00:19.768268649 -0400
@@ -10,12 +10,12 @@
endif()
ExternalProject_Add(gbenchmark
- URL https://github.com/google/benchmark/archive/v1.5.0.tar.gz
- URL https://github.com/google/benchmark/archive/v1.5.2.tar.gz
+ SOURCE_DIR gbenchmark-prefix/src/benchmark
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${PONYC_LIBS_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DBENCHMARK_ENABLE_GTEST_TESTS=OFF -DCMAKE_CXX_FLAGS=-fpic --no-warn-unused-cli
)
@ -30,12 +30,12 @@
- option(GIT_SUBMODULE "Check submodules during build" ON)
- if(GIT_SUBMODULE)
- message(STATUS "Updating submodules...")
- execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive
- execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive --depth 1
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
- RESULT_VARIABLE git_submod_result)
- #message("git_submod_result ${git_submod_result}")
- if(NOT git_submod_result EQUAL "0")
- message(FATAL_ERROR "git submodule update --init --recursive failed with ${git_submod_result}, please checkout submodules")
- message(FATAL_ERROR "git submodule update --init --recursive --depth 1 failed with ${git_submod_result}, please checkout submodules")
- endif()
-
- # we check to make sure the submodule hash matches

View File

@ -3,16 +3,16 @@
rustPlatform.buildRustPackage rec {
pname = "evcxr";
version = "0.9.0";
version = "0.10.0";
src = fetchFromGitHub {
owner = "google";
repo = "evcxr";
rev = "v${version}";
sha256 = "sha256-89+RZrG/QUo3JY9N5eTiMigUnlUP+wZWRW8PSnCcsrY=";
sha256 = "sha256-EPxWLPw+V5eIm+eL8m8Xw14adgshthJSDRyWohsJH88=";
};
cargoSha256 = "sha256-gZLSTWS5cLfJvk4/tv8FG2I2vH3PKljWbJDOflNDmTQ=";
cargoSha256 = "sha256-5jGrv0YRVMo2X9p/WPgjYV3z193hl2+NiFTZr3v0Iik=";
RUST_SRC_PATH = "${rustPlatform.rustLibSrc}";

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation rec {
pname = "aws-c-common";
version = "0.5.5";
version = "0.5.11";
src = fetchFromGitHub {
owner = "awslabs";
repo = pname;
rev = "v${version}";
sha256 = "sha256-rGv+fa+UF/f6mY8CmZpkjP98CAcAQCTjL3OI7HsUHcU=";
sha256 = "sha256-4CYbL+ICabKvpfjlALJ0wRbuwgy+JKJnKqYbQFsHQsI=";
};
nativeBuildInputs = [ cmake ];
@ -22,9 +22,6 @@ stdenv.mkDerivation rec {
"-DCMAKE_SKIP_BUILD_RPATH=OFF" # for tests
];
NIX_CFLAGS_COMPILE = lib.optionalString stdenv.isDarwin
"-Wno-nullability-extension -Wno-typedef-redefinition";
doCheck = true;
meta = with lib; {

View File

@ -6,11 +6,11 @@ stdenv.mkDerivation rec {
version = "2.3.1";
src = fetchurl {
url = "https://github.com/erincatto/Box2D/archive/v${version}.tar.gz";
sha256 = "0llpcifl8zbjbpxdwz87drd01m3lwnv82xb4av6kca1xn4w2gmkm";
url = "https://github.com/erincatto/box2d/archive/v${version}.tar.gz";
sha256 = "0p03ngsmyz0r5kbpiaq10ns4fxwkjvvawi8k6pfall46b93wizsq";
};
sourceRoot = "Box2D-${version}/Box2D";
sourceRoot = "box2d-${version}/Box2D";
nativeBuildInputs = [ cmake unzip pkg-config ];
buildInputs = [ libGLU libGL freeglut libX11 xorgproto libXi ];

View File

@ -1,5 +1,6 @@
{ lib, stdenv
, fetchurl
, fetchpatch
, meson
, ninja
, pkg-config
@ -28,6 +29,14 @@ stdenv.mkDerivation rec {
sha256 = "sha256-96AwfqUfXkTRuDL0k92QRURKOk4hHvhd/Zql3W6up9E=";
};
patches = [
(fetchpatch {
name = "CVE-2021-33516.patch";
url = "https://gitlab.gnome.org/GNOME/gupnp/-/commit/ca6ec9dcb26fd7a2a630eb6a68118659b589afac.patch";
sha256 = "sha256-G7e/xNQB7Kp2fPzqVeD/cH3h1co9hZXh55QOUBnAnvU=";
})
];
nativeBuildInputs = [
meson
ninja

View File

@ -0,0 +1,73 @@
{ lib, stdenv, fetchurl, pkg-config, bison, numactl, libxml2
, perl, gfortran, slurm, openssh, hwloc, zlib, makeWrapper
# InfiniBand dependencies
, opensm, rdma-core
# OmniPath dependencies
, libpsm2, libfabric
# Compile with slurm as a process manager
, useSlurm ? false
# Network type for MVAPICH2
, network ? "ethernet"
} :
assert builtins.elem network [ "ethernet" "infiniband" "omnipath" ];
stdenv.mkDerivation rec {
pname = "mvapich";
version = "2.3.6";
src = fetchurl {
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-${version}.tar.gz";
sha256 = "0jd28vy9ivl3rcpkxmhw73b6krzm0pd9jps8asw92wa00lm2z9mk";
};
nativeBuildInputs = [ pkg-config bison makeWrapper ];
propagatedBuildInputs = [ numactl rdma-core zlib opensm ];
buildInputs = with lib; [
numactl
libxml2
perl
gfortran
openssh
hwloc
] ++ optionals (network == "infiniband") [ rdma-core opensm ]
++ optionals (network == "omnipath") [ libpsm2 libfabric ]
++ optional useSlurm slurm;
configureFlags = with lib; [
"--with-pm=hydra"
"--enable-fortran=all"
"--enable-cxx"
"--enable-threads=multiple"
"--enable-hybrid"
"--enable-shared"
] ++ optional useSlurm "--with-pm=slurm"
++ optional (network == "ethernet") "--with-device=ch3:sock"
++ optionals (network == "infiniband") [ "--with-device=ch3:mrail" "--with-rdma=gen2" ]
++ optionals (network == "omnipath") ["--with-device=ch3:psm" "--with-psm2=${libpsm2}"];
doCheck = true;
preFixup = ''
# /tmp/nix-build... ends up in the RPATH, fix it manually
for entry in $out/bin/mpichversion $out/bin/mpivars; do
echo "fix rpath: $entry"
patchelf --set-rpath "$out/lib" $entry
done
# Ensure the default compilers are the ones mvapich was built with
substituteInPlace $out/bin/mpicc --replace 'CC="gcc"' 'CC=${stdenv.cc}/bin/gcc'
substituteInPlace $out/bin/mpicxx --replace 'CXX="g++"' 'CC=${stdenv.cc}/bin/g++'
substituteInPlace $out/bin/mpifort --replace 'FC="gfortran"' 'CC=${gfortran}/bin/gfortran'
'';
enableParallelBuilding = true;
meta = with lib; {
description = "MPI-3.1 implementation optimized for Infiband transport";
homepage = "https://mvapich.cse.ohio-state.edu";
license = licenses.bsd3;
maintainers = [ maintainers.markuskowa ];
platforms = platforms.linux;
};
}

View File

@ -17,8 +17,8 @@ stdenv.mkDerivation rec {
outputs = [ "out" "examples" ];
src = fetchurl {
url = "https://computation.llnl.gov/projects/${pname}/download/${pname}-${version}.tar.gz";
sha256 = "jW3QlP7Mu41uzEE0DsFqZfq6yC7UQVAj9tfBwjkOovM=";
url = "https://github.com/LLNL/sundials/releases/download/v${version}/sundials-${version}.tar.gz";
hash = "sha256-SNp7qoFS3bIq7RsC2C0du0+/6iKs9nY0ARqgMDoQCkM=";
};
nativeBuildInputs = [ cmake ];

View File

@ -0,0 +1,36 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, gtest
, xsimd
, xtl
}:
stdenv.mkDerivation rec {
pname = "xtensor";
version = "0.23.10";
src = fetchFromGitHub {
owner = "xtensor-stack";
repo = "xtensor";
rev = version;
sha256 = "1ayrhyh9x33b87ic01b4jzxc8x27yxpxzya5x54ikazvz8p71n14";
};
nativeBuildInputs = [ cmake ];
propagatedBuildInputs = [ xtl xsimd ];
cmakeFlags = [ "-DBUILD_TESTS=ON" ];
doCheck = true;
checkInputs = [ gtest ];
checkTarget = "xtest";
meta = with lib; {
description = "Multi-dimensional arrays with broadcasting and lazy computing.";
homepage = "https://github.com/xtensor-stack/xtensor";
license = licenses.bsd3;
maintainers = with maintainers; [ cpcloud ];
platforms = platforms.all;
};
}

View File

@ -1,4 +1,4 @@
{ mkDerivation, fetchurl, makeWrapper, lib, php }:
{ mkDerivation, fetchurl, makeWrapper, installShellFiles, lib, php }:
mkDerivation rec {
pname = "deployer";
@ -11,12 +11,17 @@ mkDerivation rec {
dontUnpack = true;
nativeBuildInputs = [ makeWrapper ];
nativeBuildInputs = [ makeWrapper installShellFiles ];
installPhase = ''
mkdir -p $out/bin
install -D $src $out/libexec/deployer/deployer.phar
makeWrapper ${php}/bin/php $out/bin/dep --add-flags "$out/libexec/deployer/deployer.phar"
# fish support currently broken: https://github.com/deployphp/deployer/issues/2527
installShellCompletion --cmd dep \
--bash <($out/bin/dep autocomplete --install) \
--zsh <($out/bin/dep autocomplete --install)
'';
meta = with lib; {

View File

@ -0,0 +1,51 @@
{ lib
, aiohttp
, aresponses
, asynctest
, buildPythonPackage
, dateparser
, fetchFromGitHub
, haversine
, pytest-asyncio
, pytestCheckHook
, pythonOlder
, requests
, xmltodict
}:
buildPythonPackage rec {
pname = "aio-georss-client";
version = "0.7";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-aio-georss-client";
rev = "v${version}";
sha256 = "1nhw2sf92dbizxdcil1wdmbaa3hbmsiriy8jfzpqxsliw5dc0kmh";
};
propagatedBuildInputs = [
aiohttp
haversine
xmltodict
requests
dateparser
];
checkInputs = [
aresponses
asynctest
pytest-asyncio
pytestCheckHook
];
pythonImportsCheck = [ "aio_georss_client" ];
meta = with lib; {
description = "Python library for accessing GeoRSS feeds";
homepage = "https://github.com/exxamalte/python-aio-georss-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,43 @@
{ lib
, aio-georss-client
, aresponses
, buildPythonPackage
, dateparser
, fetchFromGitHub
, pytest-asyncio
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "aio-georss-gdacs";
version = "0.4";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-aio-georss-gdacs";
rev = "v${version}";
sha256 = "0rcrhdpgj84hfifx9rzxz15ajzsk069iknb28gicw1cm1qv4vfxm";
};
propagatedBuildInputs = [
aio-georss-client
dateparser
];
checkInputs = [
aresponses
pytest-asyncio
pytestCheckHook
];
pythonImportsCheck = [ "aio_georss_gdacs" ];
meta = with lib; {
description = "Python library for accessing GeoRSS feeds";
homepage = "https://github.com/exxamalte/python-aio-georss-gdacs";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -1,7 +1,8 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, isPy27
, pythonOlder
, dataclasses
, kafka-python
, cython
, zlib
@ -9,15 +10,14 @@
buildPythonPackage rec {
pname = "aiokafka";
version = "0.7.0";
disabled = isPy27;
version = "0.7.1";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = "aio-libs";
repo = "aiokafka";
repo = pname;
rev = "v${version}";
sha256 = "16pcgv38syqy6sj3w7zx95zgynpd642n3i95dpiw0ivhpqrxxhrf";
sha256 = "sha256-D89ppIUliJJMDuCySrZUyN6Rlm01gFskz6ayHmqploc=";
};
nativeBuildInputs = [
@ -30,16 +30,15 @@ buildPythonPackage rec {
propagatedBuildInputs = [
kafka-python
] ++ lib.optionals (pythonOlder "3.7") [
dataclasses
];
postPatch = ''
substituteInPlace setup.py \
--replace "kafka-python==1.4.6" "kafka-python"
'';
# checks require running kafka server
doCheck = false;
pythonImportsCheck = [ "aiokafka" ];
meta = with lib; {
description = "Kafka integration with asyncio";
homepage = "https://aiokafka.readthedocs.org";

View File

@ -12,14 +12,14 @@
buildPythonPackage rec {
pname = "aioswitcher";
version = "1.2.3";
version = "1.2.5";
format = "pyproject";
src = fetchFromGitHub {
owner = "TomerFi";
repo = pname;
rev = version;
sha256 = "sha256-Qp5iVk71JxhPVrytWuXkzpqPNPmMQubO0t9sgeQfO8c=";
sha256 = "sha256-eiWmB2DVNAYHPHfnVwv0+4A/wYLgtAa1ReGsmwiIvAk=";
};
nativeBuildInputs = [

View File

@ -15,14 +15,14 @@
buildPythonPackage rec {
pname = "clldutils";
version = "3.8.0";
version = "3.9.0";
disabled = isPy27;
src = fetchFromGitHub {
owner = "clld";
repo = pname;
rev = "v${version}";
sha256 = "18sjcqzprf96s7bkn5zm3lh83hxfxj56nycxyldrwz7ndgkgxxx2";
sha256 = "07ljq7v1zvaxyl6xn4a2p4097lgd5j9bz71lf05y5bz8k024mxbr";
};
patchPhase = ''
@ -48,6 +48,6 @@ buildPythonPackage rec {
description = "CSV on the Web";
homepage = "https://github.com/cldf/csvw";
license = licenses.asl20;
maintainers = with maintainers; [ hexa ];
maintainers = with maintainers; [ ];
};
}

View File

@ -0,0 +1,43 @@
{ lib
, buildPythonPackage
, dateparser
, fetchFromGitHub
, haversine
, pytestCheckHook
, pythonOlder
, requests
, xmltodict
}:
buildPythonPackage rec {
pname = "georss-client";
version = "0.13";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-client";
rev = "v${version}";
sha256 = "1pvx2qb8gs2f7bb8xxq689ydxirsl3bcgsbi5qv5klc4c051dj8i";
};
propagatedBuildInputs = [
haversine
xmltodict
requests
dateparser
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_client" ];
meta = with lib; {
description = "Python library for accessing GeoRSS feeds";
homepage = "https://github.com/exxamalte/python-georss-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-generic-client";
version = "0.4";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-generic-client";
rev = "v${version}";
sha256 = "0i4shx6fvwibx0hlfmd0dyq2n5lkrqwmlm0l476fdb9bw5lkaiy0";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_generic_client" ];
meta = with lib; {
description = "Python library for accessing generic GeoRSS feeds";
homepage = "https://github.com/exxamalte/python-georss-generic-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-ign-sismologia-client";
version = "0.2";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-ign-sismologia-client";
rev = "v${version}";
sha256 = "1xylgvbdrpl3wxa6qqc8jma4c9520rld0pv28y3b6b0m07ab6ijl";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_ign_sismologia_client" ];
meta = with lib; {
description = "Python library for accessing the IGN Sismologia GeoRSS feed";
homepage = "https://github.com/exxamalte/python-georss-ign-sismologia-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-ingv-centro-nazionale-terremoti-client";
version = "0.4";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-ingv-centro-nazionale-terremoti-client";
rev = "v${version}";
sha256 = "06qhxczznckb208bnfly0q5099scq1yj5rk67a6fqczpsmzcln6x";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_ingv_centro_nazionale_terremoti_client" ];
meta = with lib; {
description = "Python library for accessing the INGV Centro Nazionale Terremoti GeoRSS feed";
homepage = "https://github.com/exxamalte/python-georss-ingv-centro-nazionale-terremoti-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-nrcan-earthquakes-client";
version = "0.2";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-nrcan-earthquakes-client";
rev = "v${version}";
sha256 = "0d5cdvi35wj30yvql1sr5n4vz0g4ydrslhql3bya1b7pndfs0h3y";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_nrcan_earthquakes_client" ];
meta = with lib; {
description = "Python library for accessing Natural Resources Canada Earthquakes feed";
homepage = "https://github.com/exxamalte/python-georss-nrcan-earthquakes-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-qld-bushfire-alert-client";
version = "0.4";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-qld-bushfire-alert-client";
rev = "v${version}";
sha256 = "14k7q0ynray1fj0lhxvgxpbdh4pmsqqk9gzmv38p9i7ijx8h1sc8";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_qld_bushfire_alert_client" ];
meta = with lib; {
description = "Python library for accessing Queensland Bushfire Alert feed";
homepage = "https://github.com/exxamalte/python-georss-qld-bushfire-alert-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-tfs-incidents-client";
version = "0.2";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-tfs-incidents-client";
rev = "v${version}";
sha256 = "10qscn7kncb7h0b8mjykkf5kmm3ga9l8gss4acb888iaigcjgavf";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_tfs_incidents_client" ];
meta = with lib; {
description = "Python library for accessing Tasmania Fire Service Incidents feed";
homepage = "https://github.com/exxamalte/python-georss-tfs-incidents-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, georss-client
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "georss-wa-dfes-client";
version = "0.2";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "exxamalte";
repo = "python-georss-wa-dfes-client";
rev = "v${version}";
sha256 = "0zfjq6yyrss61vwgdrykwkikb009q63kg9ab6ryb2509wiwwfwvk";
};
propagatedBuildInputs = [
georss-client
];
checkInputs = [
pytestCheckHook
];
pythonImportsCheck = [ "georss_wa_dfes_client" ];
meta = with lib; {
description = "Python library for accessing WA Department of Fire and Emergency Services (DFES) feed";
homepage = "https://github.com/exxamalte/python-georss-wa-dfes-client";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -4,24 +4,22 @@
, pbr
, pythonOlder
, requests
, six
}:
buildPythonPackage rec {
pname = "icmplib";
version = "2.1.1";
disabled = pythonOlder "3.6";
version = "3.0.0";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "ValentinBELYN";
repo = pname;
rev = "v${version}";
sha256 = "06xx9854yzxa7x1mjfzbhhw5rfzgjnw269j5k0rshyqh3qvw1nwv";
sha256 = "sha256-i5cmL8kOrehldOwX2RfVAfL4HdzJ+9S3BojJI2raUSA=";
};
propagatedBuildInputs = [
pbr
six
requests
];

View File

@ -11,7 +11,7 @@
buildPythonPackage rec {
pname = "motioneye-client";
version = "0.3.8";
version = "0.3.9";
format = "pyproject";
disabled = pythonOlder "3.8";
@ -19,7 +19,7 @@ buildPythonPackage rec {
owner = "dermotduffy";
repo = pname;
rev = "v${version}";
sha256 = "sha256-vTTjH4LhUcbh+/838wR0vnvml2y78Ro8SGwSZ6aApdQ=";
sha256 = "sha256-pLdAxBipmr+HUr9NSupm7h/68PK95r3zY/qZTBs1m54=";
};
nativeBuildInputs = [

View File

@ -42,6 +42,8 @@ buildPythonPackage rec {
"test_zero_downtime"
# flaky
"test_keep_alive_client_timeout"
"test_check_timeouts_request_timeout"
"test_check_timeouts_response_timeout"
"test_reloader_live"
];

View File

@ -0,0 +1,30 @@
{ lib
, buildPythonPackage
, fetchPypi
, sphinx
, openpyxl
}:
buildPythonPackage rec {
pname = "sphinxcontrib-excel-table";
version = "1.0.8";
src = fetchPypi {
inherit pname version;
hash = "sha256:1q79byn3k3ribvwqafbpixwabjhymk46ns8ym0hxcn8vhf5nljzd";
};
propagatedBuildInputs = [ sphinx openpyxl ];
pythonImportsCheck = [ "sphinxcontrib.excel_table" ];
# No tests present upstream
doCheck = false;
meta = with lib; {
description = "Sphinx excel-table extension";
homepage = "https://github.com/hackerain/sphinxcontrib-excel-table";
maintainers = with maintainers; [ raboof ];
license = licenses.asl20;
};
}

View File

@ -1,7 +1,6 @@
{ lib
, fetchPypi
, buildPythonPackage
, isPy3k
, guessit
, babelfish
, enzyme
@ -16,7 +15,6 @@
, appdirs
, rarfile
, pytz
, futures
, sympy
, vcrpy
, pytest
@ -38,7 +36,7 @@ buildPythonPackage rec {
guessit babelfish enzyme beautifulsoup4 requests
click dogpile_cache stevedore chardet pysrt six
appdirs rarfile pytz
] ++ lib.optional (!isPy3k) futures;
];
checkInputs = [
sympy vcrpy pytest pytest-flakes
@ -47,6 +45,7 @@ buildPythonPackage rec {
# https://github.com/Diaoul/subliminal/pull/963
doCheck = false;
pythonImportsCheck = [ "subliminal" ];
meta = with lib; {
homepage = "https://github.com/Diaoul/subliminal";

View File

@ -375,6 +375,7 @@ let
affyio = [ pkgs.zlib.dev ];
VariantAnnotation = [ pkgs.zlib.dev pkgs.curl.dev ];
snpStats = [ pkgs.zlib.dev ];
hdf5r = [ pkgs.hdf5.dev ];
};
packagesWithBuildInputs = {

View File

@ -0,0 +1,32 @@
{ lib, stdenv, fetchurl }:
stdenv.mkDerivation {
pname = "f2c";
version = "20200916";
src = fetchurl {
url = "https://www.netlib.org/f2c/src.tgz";
sha256 = "0d8xfbv6dk4dz95qds7sd44b5hvara07f2g2c5g4xiwim9b7916l";
};
makeFlags = [ "-f" "makefile.u" ];
installPhase = ''
runHook preInstall
mkdir -p $out/bin $out/share/man/man1
install -m755 f2c $out/bin
install -m755 xsum $out/bin
install f2c.1t $out/share/man/man1
runHook postInstall
'';
meta = with lib; {
description = "Convert Fortran 77 source code to C";
homepage = "https://www.netlib.org/f2c/";
license = licenses.mit;
maintainers = [ maintainers.markuskowa ];
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,45 @@
{ stdenv, lib, fetchFromGitHub
, gettext, libpng, SDL2, SDL2_image, SDL2_mixer, SDL2_ttf, zlib
}:
stdenv.mkDerivation rec {
pname = "fheroes2";
version = "0.9.4";
src = fetchFromGitHub {
owner = "ihhub";
repo = "fheroes2";
rev = version;
sha256 = "sha256-z+88tVsf4uyMFzNfZDKXo0cYqBCYn1ehX+A+e+aIfSg=";
};
buildInputs = [ gettext libpng SDL2 SDL2_image SDL2_mixer SDL2_ttf zlib ];
makeFlags = [
"FHEROES2_STRICT_COMPILATION=1"
"RELEASE=1"
];
enableParallelBuilding = true;
installPhase = ''
runHook preInstall
install -Dm755 $PWD/src/dist/fheroes2 $out/bin/fheroes2
runHook postInstall
'';
meta = with lib; {
homepage = "https://github.com/ihhub/fheroes2";
description = "Free implementation of Heroes of Might and Magic II game engine";
longDescription = ''
In order to play this game, an original game data is required.
Please refer to README of the project for instructions.
On linux, the data can be placed in ~/.local/share/fheroes2 folder.
'';
license = licenses.gpl2Plus;
maintainers = [ maintainers.karolchmist ];
platforms = platforms.linux;
};
}

View File

@ -2,7 +2,7 @@
let
name = "lunar-client";
version = "2.6.0";
version = "2.7.3";
desktopItem = makeDesktopItem {
name = "Lunar Client";
@ -21,7 +21,7 @@ let
src = fetchurl {
url = "https://launcherupdates.lunarclientcdn.com/Lunar%20Client-${version}.AppImage";
name = "lunar-client.AppImage";
sha256 = "1pmblnnvs5jv5v7y5nnxr3liw9xfp5h6l44x7pln8kr9zg85dzma";
sha256 = "0ihi937rrj677y9b377b4hhp9wsarbqwrdrd6k3lhzx3jyh2fynf";
};
in appimageTools.wrapType1 rec {
inherit name src;

View File

@ -0,0 +1,104 @@
{ lib
, stdenv
, fetchpatch
, fetchurl
, unzip
, gdc
, SDL
, SDL_mixer
, bulletml
}:
let
debianPatch = patchname: hash: fetchpatch {
name = "${patchname}.patch";
url = "https://sources.debian.org/data/main/t/torus-trooper/0.22.dfsg1-12/debian/patches/${patchname}.patch";
sha256 = hash;
};
in stdenv.mkDerivation {
pname = "torus-trooper";
version = "0.22";
src = fetchurl {
url = "http://abagames.sakura.ne.jp/windows/tt0_22.zip";
sha256 = "1yhki1fdp3fi4y2iq12vca69f6k38dqjaw9z4lwcxky5kbgb7jvg";
};
patches = [
(debianPatch
"imports"
"0mifw0mj66zljpq6iqnh0rhkgs2sky8rz0p32k98vxfnsb39ibsf")
(debianPatch
"fixes"
"05f93zq2v14lymq748c9g646ckbh9mqpr5rrahb63s90x8hlcqil")
(debianPatch
"directories"
"0y5xvf26v9fk0rx6ncrxx4czckhjbi891hp3pixlmv568pg9cihd")
(debianPatch
"windowed"
"1d8ghj4shvpb0s8l16kscz4l7rz1fxmfdpddy1ikz3678pw1sc8p")
(debianPatch
"dotfile"
"17yirmnjhbd1clzhmdd2mfdhbxkyinaahd6v3yz5kzbcylvjz2r2")
(debianPatch
"window-resizing"
"1n64gbhabl6vis7s294wxlj2k8s3ypxljpdg71icwz1m9jjx59df")
(debianPatch
"save-score-444372"
"1skny6s3hjxkh8w4fq86vp51j7z40fvn80b8myl4i1zzlwag3x17")
(debianPatch
"level-select-444948"
"008248s55188plggg2kg01nimjgc7w0sqd3c22sl6lzd1fjsflv8")
(debianPatch
"avoid-segfault-when-sdl-fails"
"1yp758gi4i15gqk6wiqp815rqcmlyqx62ir1sw20hn6zb3j97bmc")
(debianPatch
"dlang_v2"
"1lxsbckhvl8a8j43pw2dyl5nlavvdbgxb5zlb2450a0vml55nswd")
(debianPatch
"lowest-level-position-602808"
"19r48wirc9zssjmv57drn2fd0f56dcgyqqaz3j49cvv6yd74qf20")
(debianPatch
"libbulletml0v5-segfault"
"0pad2daz60hswkhkdpssxaqc9p9ca0sw1nraqzr453x0zdwwq0hn")
(debianPatch
"std.math.fabs"
"18xnnqlj20bxv2h9fa8dn4rmxwi3k6y3g50kwvh8i8p3b4hgag3r")
(debianPatch
"gdc-8"
"10z702y75c48hjcnvv8m7f3ka52cj3r3jqafdbby85nb0p2lbssx")
];
postPatch = ''
for f in src/abagames/tt/barrage.d src/abagames/util/sdl/sound.d src/abagames/util/sdl/texture.d; do
substituteInPlace $f \
--replace "/usr/" "$out/"
done
'';
nativeBuildInputs = [
unzip
gdc
];
buildInputs = [
SDL
SDL_mixer
bulletml
];
installPhase = ''
install -Dm755 torus-trooper $out/bin/torus-trooper
mkdir -p $out/share/games/torus-trooper
cp -r barrage sounds images $out/share/games/torus-trooper/
'';
meta = with lib; {
homepage = "http://www.asahi-net.or.jp/~cs8k-cyu/windows/tt_e.html";
description = "Fast-paced abstract scrolling shooter game";
license = licenses.bsd2;
maintainers = with maintainers; [ fgaz ];
platforms = platforms.all;
};
}

View File

@ -0,0 +1,97 @@
{ lib
, stdenv
, fetchpatch
, fetchurl
, unzip
, gdc
, SDL
, SDL_mixer
, bulletml
}:
let
debianPatch = patchname: hash: fetchpatch {
name = "${patchname}.patch";
url = "https://sources.debian.org/data/main/t/tumiki-fighters/0.2.dfsg1-9/debian/patches/${patchname}.patch";
sha256 = hash;
};
in stdenv.mkDerivation {
pname = "tumiki-fighters";
version = "0.21";
src = fetchurl {
url = "http://abagames.sakura.ne.jp/windows/tf0_21.zip";
sha256 = "0djykfc1r8ysapklm621h89ana1c4qzc1m5nr9bqw4iccnmvwk3p";
};
patches = [
(debianPatch
"imports"
"1l3kc67b43gdi139cpz5cka1nkn0pjp9mrgrrxlmr0liwx2aryhn")
(debianPatch
"fixes"
"1iy1a5vii6yz9zdlk2bcj6gkj4y25hn9y2fczz15jpqd9r2zm603")
(debianPatch
"directories"
"0kmv0s7jgr693fzrkjsmz4dnicc4w7njanxm2la3cf4vmgdyipmm")
(debianPatch
"windowed"
"1wp74l0bi8wq85pcxnmkwrlfmlf09im95n27pxgz082lhwf2ksy1")
(debianPatch
"dotfile"
"0d8x519bclh41j992qn6ijzfcrgacb79px6zjd1awypkwyc0j2p6")
(debianPatch
"makefile"
"11xf2b31kjyps53jfryv82dv0g6q0smc9xgp8imrbr93mzi51vf0")
(debianPatch
"window-resizing"
"1dm79d0yisa8zs5fr89y3wq2kzd3khcaxs0la8lhncvkqbd4smx8")
(debianPatch
"dlang_v2"
"1isnvbl3bjnpyphji8k3fl0yd1z4869h0lai143vpwgj6518lpg4")
(debianPatch
"gdc-8"
"1md0zwmv50jnak5g9d93bglv9v4z41blinjii6kv3vmgjnajapzj")
];
postPatch = ''
for f in \
src/abagames/tf/barragemanager.d \
src/abagames/util/sdl/sound.d \
src/abagames/util/sdl/texture.d \
src/abagames/tf/enemyspec.d \
src/abagames/tf/field.d \
src/abagames/tf/stagemanager.d \
src/abagames/tf/tumikiset.d
do
substituteInPlace $f \
--replace "/usr/" "$out/"
done
'';
nativeBuildInputs = [
unzip
gdc
];
buildInputs = [
SDL
SDL_mixer
bulletml
];
installPhase = ''
install -Dm755 tumiki-fighters $out/bin/tumiki-fighters
mkdir -p $out/share/games/tumiki-fighters
cp -r barrage sounds enemy field stage tumiki $out/share/games/tumiki-fighters/
'';
meta = with lib; {
homepage = "http://www.asahi-net.or.jp/~cs8k-cyu/windows/tf_e.html";
description = "Sticky 2D shooter";
license = licenses.bsd2;
maintainers = with maintainers; [ fgaz ];
platforms = platforms.all;
};
}

File diff suppressed because it is too large Load Diff

View File

@ -13,6 +13,7 @@ alvan/vim-closetag
alx741/vim-hindent
alx741/vim-stylishask
amiorin/ctrlp-z
andersevenrud/compe-tmux@main
andrep/vimacs
andreshazard/vim-logreview
AndrewRadev/sideways.vim@main
@ -164,6 +165,7 @@ glepnir/zephyr-nvim@main
glts/vim-textobj-comment
godlygeek/csapprox
godlygeek/tabular
GoldsteinE/compe-latex-symbols
google/vim-codefmt
google/vim-jsonnet
google/vim-maktaba
@ -298,6 +300,7 @@ kristijanhusak/defx-git
kristijanhusak/defx-icons
kristijanhusak/deoplete-phpactor
kristijanhusak/vim-carbon-now-sh
kristijanhusak/vim-dadbod-completion
kristijanhusak/vim-dirvish-git
kristijanhusak/vim-hybrid-material
kshenoy/vim-signature
@ -619,6 +622,7 @@ t9md/vim-choosewin
t9md/vim-smalls
TaDaa/vimade
takac/vim-hardtime
tamago324/compe-zsh
tami5/compe-conjure
tami5/lispdocs.nvim
tami5/sql.nvim

View File

@ -2,9 +2,6 @@
with lib;
# The Magewell Pro Capture drivers are not supported for kernels older than 3.2
assert versionAtLeast kernel.version "3.2.0";
let
bits =
if stdenv.is64bit then "64"
@ -14,15 +11,15 @@ let
in
stdenv.mkDerivation rec {
name = "mwprocapture-1.2.${version}-${kernel.version}";
version = "4177";
name = "mwprocapture-1.3.0.${version}-${kernel.version}";
version = "4236";
src = fetchurl {
url = "http://www.magewell.com/files/drivers/ProCaptureForLinux_${version}.tar.gz";
sha256 = "1nf51w9yixpvr767k49sfdb9n9rv5qc72f5yki1mkghbmabw7vys";
url = "https://www.magewell.com/files/drivers/ProCaptureForLinux_${version}.tar.gz";
sha256 = "1mfgj84km276sq5i8dny1vqp2ycqpvgplrmpbqwnk230d0w3qs74";
};
nativeBuildInputs = [ kernel.moduleBuildDependencies ];
nativeBuildInputs = kernel.moduleBuildDependencies;
preConfigure =
''
@ -63,5 +60,6 @@ stdenv.mkDerivation rec {
license = licenses.unfreeRedistributable;
maintainers = with maintainers; [ MP2E ];
platforms = platforms.linux;
broken = kernel.kernelOlder "3.2.0";
};
}

View File

@ -2,7 +2,7 @@
buildGoModule rec {
pname = "consul";
version = "1.9.5";
version = "1.9.6";
rev = "v${version}";
# Note: Currently only release tags are supported, because they have the Consul UI
@ -17,7 +17,7 @@ buildGoModule rec {
owner = "hashicorp";
repo = pname;
inherit rev;
sha256 = "sha256-CKezHuCbL1I79gDz7ZQiSgPbSXo0NtssQro2MqqmeXw=";
sha256 = "sha256-SuG/Q5Tjet4etd4Qy5NBQLYEe2QO0K8QHKmgxYMl09U=";
};
passthru.tests.consul = nixosTests.consul;
@ -26,7 +26,7 @@ buildGoModule rec {
# has a split module structure in one repo
subPackages = ["." "connect/certgen"];
vendorSha256 = "sha256-YqrW3PeFv1Y6lmjVmMMP0SZao57iPqfut3a1afIWkI0=";
vendorSha256 = "sha256-jVhj7pzJ8kxZk3ViA9zhVqD314biih/sP0Ql1GXcoRY=";
doCheck = false;

View File

@ -1,4 +1,4 @@
{ lib, buildGoModule, fetchFromGitHub }:
{ lib, buildGoModule, fetchFromGitHub, nixosTests }:
buildGoModule rec {
pname = "process-exporter";
@ -19,6 +19,8 @@ buildGoModule rec {
doCheck = true;
passthru.tests = { inherit (nixosTests.prometheus-exporters) process; };
meta = with lib; {
description = "Prometheus exporter that mines /proc to report on selected processes";
homepage = "https://github.com/ncabatoff/process-exporter";

View File

@ -49,6 +49,16 @@ in stdenv.mkDerivation {
url = "https://github.com/openafs/openafs/commit/ee53dd3bc087a05e22fc4111297a51ddb30013f0.patch";
sha256 = "0dfab3zk0dmf6iksna5n09lf5dn4f8w43q4irl2yf5dgqm35shkr";
})
# Linux: Create wrapper for setattr_prepare
(fetchpatch {
url = "https://github.com/openafs/openafs/commit/5a5d358b02b88d6d2c7a27a75149e35b1de7db38.patch";
sha256 = "07gywsg41cz5h6iafr4pb0gb9jnsb58xkwn479lw46b3y5jgz7ki";
})
# Linux 5.12: Add user_namespace param to inode ops
(fetchpatch {
url = "https://github.com/openafs/openafs/commit/c747b15dd2877e6d17e3e6b940ae78c1e1ccd3ea.patch";
sha256 = "0bbqmx4nkmfkapk25zrv9ivhhs91rn9dizb1lkfs7a6937q1kaqh";
})
];
hardeningDisable = [ "pic" ];

View File

@ -38,6 +38,6 @@ python3.pkgs.buildPythonApplication rec {
homepage = "https://github.com/yandex/gixy";
license = licenses.mpl20;
maintainers = [ maintainers.willibutz ];
platforms = platforms.linux;
platforms = platforms.unix;
};
}

View File

@ -0,0 +1,104 @@
{ lib
, stdenv
, fetchFromGitHub
, fetchpatch
, libgamemode32
, meson
, ninja
, pkg-config
, dbus
, inih
, systemd
, appstream
}:
stdenv.mkDerivation rec {
pname = "gamemode";
version = "1.6.1";
src = fetchFromGitHub {
owner = "FeralInteractive";
repo = pname;
rev = version;
sha256 = "sha256-P00OnZiPZyxBu9zuG+3JNorXHBhJZy+cKPjX+duZrJ0=";
};
outputs = [ "out" "dev" "lib" "man" "static" ];
patches = [
# Run executables from PATH instead of /usr/bin
# See https://github.com/FeralInteractive/gamemode/pull/323
(fetchpatch {
url = "https://github.com/FeralInteractive/gamemode/commit/be44b7091baa33be6dda60392e4c06c2f398ee72.patch";
sha256 = "TlDUETs4+N3pvrVd0FQGlGmC+6ByhJ2E7gKXa7suBtE=";
})
# Fix loading shipped config when using a prefix other than /usr
# See https://github.com/FeralInteractive/gamemode/pull/324
(fetchpatch {
url = "https://github.com/FeralInteractive/gamemode/commit/b29aa903ce5acc9141cfd3960c98ccb047eca872.patch";
sha256 = "LwBzBJQ7dfm2mFVSOSPjJP+skgV5N6h77i66L1Sq+ZM=";
})
# Add @libraryPath@ template variable to fix loading the PRELOAD library
./preload-nix-workaround.patch
];
postPatch = ''
substituteInPlace data/gamemoderun \
--subst-var-by libraryPath ${lib.makeLibraryPath ([
(placeholder "lib")
] ++ lib.optionals (stdenv.hostPlatform.system == "x86_64-linux") [
# Support wrapping 32bit applications on a 64bit linux system
libgamemode32
])}
'';
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [
dbus
inih
systemd
];
mesonFlags = [
# libexec is just a way to package binaries without including them
# in PATH. It doesn't make sense to install them to $lib
# (the default behaviour in the meson hook).
"--libexecdir=${placeholder "out"}/libexec"
"-Dwith-systemd-user-unit-dir=lib/systemd/user"
];
doCheck = true;
checkInputs = [
appstream
];
# Move static libraries to $static so $lib only contains dynamic libraries.
postInstall = ''
moveToOutput lib/*.a "$static"
'';
# Add $lib/lib to gamemoded & gamemode-simulate-game's rpath since
# they use dlopen to load libgamemode. Can't use makeWrapper since
# it would break the security wrapper in the NixOS module.
postFixup = ''
for bin in "$out/bin/gamemoded" "$out/bin/gamemode-simulate-game"; do
patchelf --set-rpath "$lib/lib:$(patchelf --print-rpath "$bin")" "$bin"
done
'';
meta = with lib; {
description = "Optimise Linux system performance on demand";
homepage = "https://github.com/FeralInteractive/GameMode";
license = licenses.bsd3;
maintainers = with maintainers; [ kira-bruneau ];
platforms = platforms.linux;
};
}

View File

@ -0,0 +1,12 @@
diff --git a/data/gamemoderun b/data/gamemoderun
index 573b3e4..6f2799e 100755
--- a/data/gamemoderun
+++ b/data/gamemoderun
@@ -5,5 +5,6 @@ GAMEMODEAUTO_NAME="libgamemodeauto.so.0"
# ld will find the right path to load the library, including for 32-bit apps.
LD_PRELOAD="${GAMEMODEAUTO_NAME}${LD_PRELOAD:+:$LD_PRELOAD}"
+LD_LIBRARY_PATH="@libraryPath@${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"
-exec env LD_PRELOAD="${LD_PRELOAD}" $GAMEMODERUNEXEC "$@"
+exec env LD_PRELOAD="${LD_PRELOAD}" LD_LIBRARY_PATH="${LD_LIBRARY_PATH}" $GAMEMODERUNEXEC "$@"

View File

@ -1,4 +1,4 @@
{ lib, stdenv, rustPlatform, fetchFromGitHub, shared-mime-info, libiconv }:
{ lib, stdenv, rustPlatform, fetchFromGitHub, shared-mime-info, libiconv, installShellFiles }:
rustPlatform.buildRustPackage rec {
pname = "handlr";
@ -13,13 +13,19 @@ rustPlatform.buildRustPackage rec {
cargoSha256 = "sha256-xDQV8wVlzItz0lzR1nVRPVsg7nSf/khUhevDlGgSO3g=";
nativeBuildInputs = [ shared-mime-info ];
nativeBuildInputs = [ installShellFiles shared-mime-info ];
buildInputs = lib.optional stdenv.isDarwin libiconv;
preCheck = ''
export HOME=$TEMPDIR
'';
postInstall = ''
installShellCompletion \
--zsh completions/_handlr \
--fish completions/handlr.fish
'';
meta = with lib; {
description = "Alternative to xdg-open to manage default applications with ease";
homepage = "https://github.com/chmln/handlr";

View File

@ -18,11 +18,11 @@ buildPythonPackage rec {
# The websites youtube-dl deals with are a very moving target. That means that
# downloads break constantly. Because of that, updates should always be backported
# to the latest stable release.
version = "2021.05.16";
version = "2021.06.06";
src = fetchurl {
url = "https://yt-dl.org/downloads/${version}/${pname}-${version}.tar.gz";
sha256 = "1z8sdzvkxhscnzy7cnjag308glif0k8jylr11biqwzypm1f2l0fl";
sha256 = "1hqan9h55x9gfdakw554vic68w9gpvhblchwxlw265zxp56hxjrw";
};
nativeBuildInputs = [ installShellFiles makeWrapper ];

View File

@ -9,16 +9,16 @@
rustPlatform.buildRustPackage rec {
pname = "gpg-tui";
version = "0.2.0";
version = "0.3.0";
src = fetchFromGitHub {
owner = "orhun";
repo = "gpg-tui";
rev = "v${version}";
sha256 = "sha256-PwKfsIwGw4aUu8DF9VeuFzafp116E3jetsN4bS5YtRY=";
sha256 = "sha256-5vhFgJZY1yaYFPS2qvrYGX3xyT0PbRKW2jmR4gz12Co=";
};
cargoSha256 = "sha256-6IRjfYntKQXrrl7ix+e6PEQX1bmiAW8Kd79mczCpaUY=";
cargoSha256 = "sha256-g38L/FgqAsFh/ECZnNkJVCC/44z5VW3WK8mgIEEy7BQ=";
nativeBuildInputs = [
gpgme # for gpgme-config

View File

@ -1,11 +1,12 @@
{ lib, stdenv, fetchurl }:
stdenv.mkDerivation {
name = "replace-2.24";
stdenv.mkDerivation rec {
pname = "replace";
version = "2.24";
src = fetchurl {
url = "ftp://hpux.connect.org.uk/hpux/Users/replace-2.24/replace-2.24-src-11.11.tar.gz";
sha256 = "1c2nkxx83vmlh1v3ib6r2xqh121gdb1rharwsimcb2h0xwc558dm";
url = "http://hpux.connect.org.uk/ftp/hpux/Users/replace-${version}/replace-${version}-src-11.31.tar.gz";
sha256 = "18hkwhaz25s6209n5mpx9hmkyznlzygqj488p2l7nvp9zrlxb9sf";
};
outputs = [ "out" "man" ];

View File

@ -0,0 +1,26 @@
{ lib, stdenv, fetchFromSourcehut, meson, pkg-config, scdoc, ninja, libxkbcommon, wayland }:
stdenv.mkDerivation rec {
pname = "wlrctl";
version = "0.2.1";
src = fetchFromSourcehut {
owner = "~brocellous";
repo = "wlrctl";
rev = "v${version}";
sha256 = "039cxc82k7x473n6d65jray90rj35qmfdmr390zy0c7ic7vn4b78";
};
nativeBuildInputs = [ meson pkg-config scdoc ninja ];
buildInputs = [ libxkbcommon wayland ];
NIX_CFLAGS_COMPILE = "-Wno-error=type-limits";
meta = with lib; {
description = "Command line utility for miscellaneous wlroots Wayland extensions";
homepage = "https://git.sr.ht/~brocellous/wlrctl";
license = licenses.mit;
maintainers = with maintainers; [ puffnfresh artturin ];
platforms = platforms.unix;
};
}

View File

@ -654,7 +654,6 @@ mapAliases ({
rdiff_backup = rdiff-backup; # added 2014-11-23
rdmd = dtools; # added 2017-08-19
readline80 = throw "readline-8.0 is no longer supported in nixpkgs, please use 'readline' for main supported version or 'readline81' for most recent version"; # added 2021-04-22
retroshare = throw "retroshare was removed because it was broken"; # added 2021-05-17
rhc = throw "rhc was deprecated on 2019-04-09: abandoned by upstream.";
rng_tools = rng-tools; # added 2018-10-24
robomongo = robo3t; #added 2017-09-28

View File

@ -411,6 +411,8 @@ in
ebook2cw = callPackage ../applications/radio/ebook2cw { };
edwin = callPackage ../data/fonts/edwin { };
etBook = callPackage ../data/fonts/et-book { };
fetchutils = callPackage ../tools/misc/fetchutils { };
@ -856,6 +858,10 @@ in
amidst = callPackage ../tools/games/amidst { };
gamemode = callPackage ../tools/games/gamemode {
libgamemode32 = pkgsi686Linux.gamemode.lib;
};
gfshare = callPackage ../tools/security/gfshare { };
gobgp = callPackage ../tools/networking/gobgp { };
@ -935,7 +941,7 @@ in
};
logseq = callPackage ../applications/misc/logseq {
electron = electron_11;
electron = electron_12;
};
lxterminal = callPackage ../applications/terminal-emulators/lxterminal { };
@ -2274,6 +2280,8 @@ in
wlr-randr = callPackage ../tools/wayland/wlr-randr { };
wlrctl = callPackage ../tools/wayland/wlrctl { };
wlsunset = callPackage ../tools/wayland/wlsunset { };
wob = callPackage ../tools/wayland/wob { };
@ -11672,7 +11680,7 @@ in
ponyc = callPackage ../development/compilers/ponyc {
# Upstream pony has dropped support for versions compiled with gcc.
stdenv = clangStdenv;
stdenv = llvmPackages_9.stdenv;
};
pony-corral = callPackage ../development/compilers/ponyc/pony-corral.nix { };
@ -14881,6 +14889,8 @@ in
inherit fontconfig fontDirectories;
};
f2c = callPackage ../development/tools/f2c { };
freealut = callPackage ../development/libraries/freealut { };
freeglut = callPackage ../development/libraries/freeglut { };
@ -17187,6 +17197,8 @@ in
mutest = callPackage ../development/libraries/mutest { };
mvapich = callPackage ../development/libraries/mvapich { };
mygpoclient = pythonPackages.mygpoclient;
mygui = callPackage ../development/libraries/mygui {
@ -18619,6 +18631,8 @@ in
xsimd = callPackage ../development/libraries/xsimd { };
xtensor = callPackage ../development/libraries/xtensor { };
xtl = callPackage ../development/libraries/xtl { };
xvidcore = callPackage ../development/libraries/xvidcore { };
@ -26324,6 +26338,8 @@ in
remotebox = callPackage ../applications/virtualization/remotebox { };
retroshare = libsForQt5.callPackage ../applications/networking/p2p/retroshare { };
rgp = libsForQt5.callPackage ../development/tools/rgp { };
ricochet = libsForQt5.callPackage ../applications/networking/instant-messengers/ricochet { };
@ -28352,6 +28368,8 @@ in
fava = callPackage ../applications/office/fava {};
fheroes2 = callPackage ../games/fheroes2 {};
fish-fillets-ng = callPackage ../games/fish-fillets-ng {};
flightgear = libsForQt5.callPackage ../games/flightgear { };
@ -28941,12 +28959,16 @@ in
toppler = callPackage ../games/toppler { };
torus-trooper = callPackage ../games/torus-trooper { };
trackballs = callPackage ../games/trackballs { };
tremulous = callPackage ../games/tremulous { };
tts = callPackage ../tools/audio/tts { };
tumiki-fighters = callPackage ../games/tumiki-fighters { };
tuxpaint = callPackage ../games/tuxpaint { };
tuxtype = callPackage ../games/tuxtype { };
@ -30401,6 +30423,10 @@ in
faustlive = callPackage ../applications/audio/faust/faustlive.nix { };
faustPhysicalModeling = callPackage ../applications/audio/faustPhysicalModeling { };
faustStk = callPackage ../applications/audio/faustStk { };
fceux = callPackage ../misc/emulators/fceux { };
flockit = callPackage ../tools/backup/flockit { };

View File

@ -81,6 +81,20 @@ let self = dotnetPackages // overrides; dotnetPackages = with self; {
outputFiles = [ "*" ];
};
FSharpData = fetchNuGet {
baseName = "FSharp.Data";
version = "4.1.1";
sha256 = "0ytjiQi8vQQU51JYexnC13Bi7NqVmLRzM75SOZ+hhQU=";
outputFiles = [ "lib/*" ];
meta = with lib; {
description = "F# Data: Library for Data Access";
homepage = "https://fsprojects.github.io/FSharp.Data/";
license = licenses.asl20;
maintainers = [ maintainers.ratsclub ];
};
};
FSharpData225 = fetchNuGet {
baseName = "FSharp.Data";
version = "2.2.5";
@ -616,50 +630,6 @@ let self = dotnetPackages // overrides; dotnetPackages = with self; {
};
};
FSharpData = buildDotnetPackage rec {
baseName = "FSharp.Data";
version = "2.2.3";
src = fetchFromGitHub {
owner = "fsharp";
repo = baseName;
rev = version;
sha256 = "1h3v9rc8k0khp61cv5n01larqbxd3xcx3q52sw5zf9l0661vw7qr";
};
buildInputs = [ fsharp ];
fileProvidedTypes = fetchurl {
name = "ProvidedTypes.fs";
url = "https://raw.githubusercontent.com/fsprojects/FSharp.TypeProviders.StarterPack/877014bfa6244ac382642e113d7cd6c9bc27bc6d/src/ProvidedTypes.fs";
sha256 = "1lb056v1xld1rfx6a8p8i2jz8i6qa2r2823n5izsf1qg1qgf2980";
};
fileDebugProvidedTypes = fetchurl {
name = "DebugProvidedTypes.fs";
url = "https://raw.githubusercontent.com/fsprojects/FSharp.TypeProviders.StarterPack/877014bfa6244ac382642e113d7cd6c9bc27bc6d/src/DebugProvidedTypes.fs";
sha256 = "1whyrf2jv6fs7kgysn2086v15ggjsd54g1xfs398mp46m0nxp91f";
};
preConfigure = ''
# Copy single-files-in-git-repos
mkdir -p "paket-files/fsprojects/FSharp.TypeProviders.StarterPack/src"
cp -v "${fileProvidedTypes}" "paket-files/fsprojects/FSharp.TypeProviders.StarterPack/src/ProvidedTypes.fs"
cp -v "${fileDebugProvidedTypes}" "paket-files/fsprojects/FSharp.TypeProviders.StarterPack/src/DebugProvidedTypes.fs"
'';
xBuildFiles = [ "src/FSharp.Data.fsproj" "src/FSharp.Data.DesignTime.fsproj" ];
outputFiles = [ "bin/*.dll" "bin/*.xml" ];
meta = {
description = "F# Data: Library for Data Access";
homepage = "https://fsharp.github.io/FSharp.Data/";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ obadz ];
platforms = with lib.platforms; linux;
};
};
# FSharpxExtras = buildDotnetPackage rec {
# baseName = "FSharpx.Extras";
# version = "1.8.41";
@ -977,4 +947,18 @@ let self = dotnetPackages // overrides; dotnetPackages = with self; {
};
};
YamlDotNet = fetchNuGet {
baseName = "YamlDotNet";
version = "11.1.1";
sha256 = "rwZ/QyDVrN3wGrEYKY3QY5Xqo2Tp3FkR6dh4QrC+QS0=";
outputFiles = [ "lib/*" ];
meta = with lib; {
description = "YamlDotNet is a .NET library for YAML";
homepage = "https://github.com/aaubry/YamlDotNet";
license = licenses.mit;
maintainers = [ maintainers.ratsclub ];
};
};
}; in self

View File

@ -223,6 +223,10 @@ in {
agent-py = callPackage ../development/python-modules/agent-py { };
aio-georss-client = callPackage ../development/python-modules/aio-georss-client { };
aio-georss-gdacs = callPackage ../development/python-modules/aio-georss-gdacs { };
aioambient = callPackage ../development/python-modules/aioambient { };
ailment = callPackage ../development/python-modules/ailment { };
@ -2742,6 +2746,22 @@ in {
geopy = callPackage ../development/python-modules/geopy { };
georss-client = callPackage ../development/python-modules/georss-client { };
georss-generic-client = callPackage ../development/python-modules/georss-generic-client { };
georss-ign-sismologia-client = callPackage ../development/python-modules/georss-ign-sismologia-client { };
georss-ingv-centro-nazionale-terremoti-client = callPackage ../development/python-modules/georss-ingv-centro-nazionale-terremoti-client { };
georss-nrcan-earthquakes-client = callPackage ../development/python-modules/georss-nrcan-earthquakes-client { };
georss-qld-bushfire-alert-client = callPackage ../development/python-modules/georss-qld-bushfire-alert-client { };
georss-tfs-incidents-client = callPackage ../development/python-modules/georss-tfs-incidents-client { };
georss-wa-dfes-client = callPackage ../development/python-modules/georss-wa-dfes-client { };
getmac = callPackage ../development/python-modules/getmac { };
getkey = callPackage ../development/python-modules/getkey { };
@ -7894,6 +7914,8 @@ in {
sphinxcontrib-devhelp = callPackage ../development/python-modules/sphinxcontrib-devhelp { };
sphinxcontrib-excel-table = callPackage ../development/python-modules/sphinxcontrib-excel-table { };
sphinxcontrib-fulltoc = callPackage ../development/python-modules/sphinxcontrib-fulltoc { };
sphinxcontrib-htmlhelp = callPackage ../development/python-modules/sphinxcontrib-htmlhelp { };
@ -8076,6 +8098,8 @@ in {
subdownloader = callPackage ../development/python-modules/subdownloader { };
subliminal = callPackage ../development/python-modules/subliminal { };
subunit = callPackage ../development/python-modules/subunit {
inherit (pkgs) subunit cppunit check;
};