mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-02 07:31:26 +00:00
Merge remote-tracking branch 'origin/master' into staging
This commit is contained in:
commit
4a57b07653
@ -16,6 +16,7 @@
|
||||
amiddelk = "Arie Middelkoop <amiddelk@gmail.com>";
|
||||
amorsillo = "Andrew Morsillo <andrew.morsillo@gmail.com>";
|
||||
AndersonTorres = "Anderson Torres <torres.anderson.85@gmail.com>";
|
||||
anderspapitto = "Anders Papitto <anderspapitto@gmail.com>";
|
||||
andres = "Andres Loeh <ksnixos@andres-loeh.de>";
|
||||
antono = "Antono Vasiljev <self@antono.info>";
|
||||
ardumont = "Antoine R. Dumont <eniotna.t@gmail.com>";
|
||||
@ -79,6 +80,7 @@
|
||||
fluffynukeit = "Daniel Austin <dan@fluffynukeit.com>";
|
||||
forkk = "Andrew Okin <forkk@forkk.net>";
|
||||
fpletz = "Franz Pletz <fpletz@fnordicwalking.de>";
|
||||
fro_ozen = "fro_ozen <fro_ozen@gmx.de>";
|
||||
ftrvxmtrx = "Siarhei Zirukin <ftrvxmtrx@gmail.com>";
|
||||
funfunctor = "Edward O'Callaghan <eocallaghan@alterapraxis.com>";
|
||||
fuuzetsu = "Mateusz Kowalczyk <fuuzetsu@fuuzetsu.co.uk>";
|
||||
@ -186,6 +188,7 @@
|
||||
rickynils = "Rickard Nilsson <rickynils@gmail.com>";
|
||||
rob = "Rob Vermaas <rob.vermaas@gmail.com>";
|
||||
robberer = "Longrin Wischnewski <robberer@freakmail.de>";
|
||||
robbinch = "Robbin C. <robbinch33@gmail.com>";
|
||||
roconnor = "Russell O'Connor <roconnor@theorem.ca>";
|
||||
roelof = "Roelof Wobben <rwobben@hotmail.com>";
|
||||
romildo = "José Romildo Malaquias <malaquias@gmail.com>";
|
||||
|
@ -17,6 +17,10 @@ rec {
|
||||
evalModules) and the less declarative the module set is. */
|
||||
evalModules = { modules
|
||||
, prefix ? []
|
||||
, # This should only be used for special arguments that need to be evaluated
|
||||
# when resolving module structure (like in imports). For everything else,
|
||||
# there's _module.args.
|
||||
specialArgs ? {}
|
||||
, # This would be remove in the future, Prefer _module.args option instead.
|
||||
args ? {}
|
||||
, # This would be remove in the future, Prefer _module.check option instead.
|
||||
@ -51,7 +55,7 @@ rec {
|
||||
};
|
||||
};
|
||||
|
||||
closed = closeModules (modules ++ [ internalModule ]) { inherit config options; lib = import ./.; };
|
||||
closed = closeModules (modules ++ [ internalModule ]) (specialArgs // { inherit config options; lib = import ./.; });
|
||||
|
||||
# Note: the list of modules is reversed to maintain backward
|
||||
# compatibility with the old module system. Not sure if this is
|
||||
@ -118,7 +122,7 @@ rec {
|
||||
config = removeAttrs m ["key" "_file" "require" "imports"];
|
||||
};
|
||||
|
||||
applyIfFunction = f: arg@{ config, options, lib }: if isFunction f then
|
||||
applyIfFunction = f: arg@{ config, options, lib, ... }: if isFunction f then
|
||||
let
|
||||
# Module arguments are resolved in a strict manner when attribute set
|
||||
# deconstruction is used. As the arguments are now defined with the
|
||||
|
@ -41,10 +41,6 @@ changes:
|
||||
<option>boot.loader.efi</option> and <option>boot.loader.gummiboot</option>
|
||||
as well.</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>To see console messages during early boot, add <literal>"fbcon"</literal>
|
||||
to your <option>boot.initrd.kernelModules</option>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
|
@ -47,6 +47,7 @@ in rec {
|
||||
inherit prefix check;
|
||||
modules = modules ++ extraModules ++ baseModules ++ [ pkgsModule ];
|
||||
args = extraArgs;
|
||||
specialArgs = { modulesPath = ../modules; };
|
||||
}) config options;
|
||||
|
||||
# These are the extra arguments passed to every module. In
|
||||
|
@ -21,7 +21,7 @@ sub new {
|
||||
my ($class, $args) = @_;
|
||||
|
||||
my $startCommand = $args->{startCommand};
|
||||
|
||||
|
||||
my $name = $args->{name};
|
||||
if (!$name) {
|
||||
$startCommand =~ /run-(.*)-vm$/ if defined $startCommand;
|
||||
@ -34,7 +34,7 @@ sub new {
|
||||
"qemu-kvm -m 384 " .
|
||||
"-net nic,model=virtio \$QEMU_OPTS ";
|
||||
my $iface = $args->{hdaInterface} || "virtio";
|
||||
$startCommand .= "-drive file=" . Cwd::abs_path($args->{hda}) . ",if=$iface,boot=on,werror=report "
|
||||
$startCommand .= "-drive file=" . Cwd::abs_path($args->{hda}) . ",if=$iface,werror=report "
|
||||
if defined $args->{hda};
|
||||
$startCommand .= "-cdrom $args->{cdrom} "
|
||||
if defined $args->{cdrom};
|
||||
@ -171,7 +171,7 @@ sub start {
|
||||
|
||||
eval {
|
||||
local $SIG{CHLD} = sub { die "QEMU died prematurely\n"; };
|
||||
|
||||
|
||||
# Wait until QEMU connects to the monitor.
|
||||
accept($self->{monitor}, $monitorS) or die;
|
||||
|
||||
@ -182,11 +182,11 @@ sub start {
|
||||
$self->{socket}->autoflush(1);
|
||||
};
|
||||
die "$@" if $@;
|
||||
|
||||
|
||||
$self->waitForMonitorPrompt;
|
||||
|
||||
$self->log("QEMU running (pid $pid)");
|
||||
|
||||
|
||||
$self->{pid} = $pid;
|
||||
$self->{booted} = 1;
|
||||
}
|
||||
@ -241,7 +241,7 @@ sub connect {
|
||||
alarm 300;
|
||||
readline $self->{socket} or die "the VM quit before connecting\n";
|
||||
alarm 0;
|
||||
|
||||
|
||||
$self->log("connected to guest root shell");
|
||||
$self->{connected} = 1;
|
||||
|
||||
@ -270,7 +270,7 @@ sub isUp {
|
||||
|
||||
sub execute_ {
|
||||
my ($self, $command) = @_;
|
||||
|
||||
|
||||
$self->connect;
|
||||
|
||||
print { $self->{socket} } ("( $command ); echo '|!=EOF' \$?\n");
|
||||
@ -453,7 +453,7 @@ sub shutdown {
|
||||
sub crash {
|
||||
my ($self) = @_;
|
||||
return unless $self->{booted};
|
||||
|
||||
|
||||
$self->log("forced crash");
|
||||
|
||||
$self->sendMonitorCommand("quit");
|
||||
|
@ -43,7 +43,7 @@ in
|
||||
|
||||
consoleFont = mkOption {
|
||||
type = types.str;
|
||||
default = "lat9w-16";
|
||||
default = "Lat2-Terminus16";
|
||||
example = "LatArCyrHeb-16";
|
||||
description = ''
|
||||
The font used for the virtual consoles. Leave empty to use
|
||||
|
@ -12,7 +12,7 @@ let
|
||||
|
||||
# Forces 32bit pulseaudio and alsaPlugins to be built/supported for apps
|
||||
# using 32bit alsa on 64bit linux.
|
||||
enable32BitAlsaPlugins = stdenv.isx86_64 && (pkgs_i686.alsaLib != null && pkgs_i686.libpulseaudio != null);
|
||||
enable32BitAlsaPlugins = cfg.support32Bit && stdenv.isx86_64 && (pkgs_i686.alsaLib != null && pkgs_i686.libpulseaudio != null);
|
||||
|
||||
ids = config.ids;
|
||||
|
||||
@ -78,6 +78,12 @@ in {
|
||||
'';
|
||||
};
|
||||
|
||||
support32Bit = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = "no";
|
||||
};
|
||||
|
||||
configFile = mkOption {
|
||||
type = types.path;
|
||||
description = ''
|
||||
|
@ -7,8 +7,7 @@ with lib;
|
||||
|
||||
{
|
||||
imports =
|
||||
[ ./channel.nix
|
||||
./iso-image.nix
|
||||
[ ./iso-image.nix
|
||||
|
||||
# Profiles of this basic installation CD.
|
||||
../../profiles/all-hardware.nix
|
||||
@ -21,18 +20,6 @@ with lib;
|
||||
|
||||
isoImage.volumeID = substring 0 11 "NIXOS_ISO";
|
||||
|
||||
# Make the installer more likely to succeed in low memory
|
||||
# environments. The kernel's overcommit heustistics bite us
|
||||
# fairly often, preventing processes such as nix-worker or
|
||||
# download-using-manifests.pl from forking even if there is
|
||||
# plenty of free memory.
|
||||
boot.kernel.sysctl."vm.overcommit_memory" = "1";
|
||||
|
||||
# To speed up installation a little bit, include the complete stdenv
|
||||
# in the Nix store on the CD. Archive::Cpio is needed for the
|
||||
# initrd builder.
|
||||
isoImage.storeContents = [ pkgs.stdenv pkgs.busybox pkgs.perlPackages.ArchiveCpio ];
|
||||
|
||||
# EFI booting
|
||||
isoImage.makeEfiBootable = true;
|
||||
|
||||
@ -42,9 +29,6 @@ with lib;
|
||||
# Add Memtest86+ to the CD.
|
||||
boot.loader.grub.memtest86.enable = true;
|
||||
|
||||
# Get a console as soon as the initrd loads fbcon on EFI boot.
|
||||
boot.initrd.kernelModules = [ "fbcon" ];
|
||||
|
||||
# Allow the user to log in as root without a password.
|
||||
users.extraUsers.root.initialHashedPassword = "";
|
||||
}
|
||||
|
@ -1,14 +1,11 @@
|
||||
# This module defines a small NixOS installation CD. It does not
|
||||
# contain any graphical stuff.
|
||||
|
||||
{ config, pkgs, lib, ... }:
|
||||
{ config, lib, ... }:
|
||||
|
||||
{
|
||||
imports =
|
||||
[ ./installation-cd-base.nix
|
||||
../../profiles/minimal.nix
|
||||
];
|
||||
|
||||
# Enable in installer, even if minimal profile disables it
|
||||
services.nixosManual.enable = lib.mkOverride 999 true;
|
||||
}
|
||||
|
@ -495,7 +495,7 @@ $bootLoaderConfig
|
||||
|
||||
# Select internationalisation properties.
|
||||
# i18n = {
|
||||
# consoleFont = "lat9w-16";
|
||||
# consoleFont = "Lat2-Terminus16";
|
||||
# consoleKeyMap = "us";
|
||||
# defaultLocale = "en_US.UTF-8";
|
||||
# };
|
||||
|
@ -2,8 +2,6 @@
|
||||
|
||||
{
|
||||
_module.args = {
|
||||
modulesPath = ../.;
|
||||
|
||||
pkgs_i686 = import ../../lib/nixpkgs.nix {
|
||||
system = "i686-linux";
|
||||
config.allowUnfree = true;
|
||||
|
@ -217,6 +217,9 @@
|
||||
asterisk = 192;
|
||||
plex = 193;
|
||||
bird = 195;
|
||||
grafana = 196;
|
||||
skydns = 197;
|
||||
ripple-rest = 198;
|
||||
|
||||
# When adding a uid, make sure it doesn't match an existing gid. And don't use uids above 399!
|
||||
|
||||
@ -412,6 +415,9 @@
|
||||
plex = 193;
|
||||
sabnzbd = 194;
|
||||
bird = 195;
|
||||
#grafana = 196; #unused
|
||||
#skydns = 197; #unused
|
||||
#ripple-rest = 198; #unused
|
||||
|
||||
# When adding a gid, make sure it doesn't match an existing
|
||||
# uid. Users and groups with the same name should have equal
|
||||
|
@ -212,6 +212,7 @@
|
||||
./services/misc/plex.nix
|
||||
./services/misc/redmine.nix
|
||||
./services/misc/rippled.nix
|
||||
./services/misc/ripple-rest.nix
|
||||
./services/misc/ripple-data-api.nix
|
||||
./services/misc/rogue.nix
|
||||
./services/misc/siproxd.nix
|
||||
@ -225,6 +226,7 @@
|
||||
./services/monitoring/collectd.nix
|
||||
./services/monitoring/das_watchdog.nix
|
||||
./services/monitoring/dd-agent.nix
|
||||
./services/monitoring/grafana.nix
|
||||
./services/monitoring/graphite.nix
|
||||
./services/monitoring/monit.nix
|
||||
./services/monitoring/munin.nix
|
||||
@ -317,6 +319,7 @@
|
||||
./services/networking/sabnzbd.nix
|
||||
./services/networking/searx.nix
|
||||
./services/networking/seeks.nix
|
||||
./services/networking/skydns.nix
|
||||
./services/networking/spiped.nix
|
||||
./services/networking/sslh.nix
|
||||
./services/networking/ssh/lshd.nix
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Provide a basic configuration for installation devices like CDs.
|
||||
{ config, lib, ... }:
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
@ -13,10 +13,17 @@ with lib;
|
||||
# Allow "nixos-rebuild" to work properly by providing
|
||||
# /etc/nixos/configuration.nix.
|
||||
./clone-config.nix
|
||||
|
||||
# Include a copy of Nixpkgs so that nixos-install works out of
|
||||
# the box.
|
||||
../installer/cd-dvd/channel.nix
|
||||
];
|
||||
|
||||
config = {
|
||||
|
||||
# Enable in installer, even if the minimal profile disables it.
|
||||
services.nixosManual.enable = mkForce true;
|
||||
|
||||
# Show the manual.
|
||||
services.nixosManual.showManual = true;
|
||||
|
||||
@ -43,7 +50,7 @@ with lib;
|
||||
systemd.services.sshd.wantedBy = mkOverride 50 [];
|
||||
|
||||
# Enable wpa_supplicant, but don't start it by default.
|
||||
networking.wireless.enable = true;
|
||||
networking.wireless.enable = mkDefault true;
|
||||
jobs.wpa_supplicant.startOn = mkOverride 50 "";
|
||||
|
||||
# Tell the Nix evaluator to garbage collect more aggressively.
|
||||
@ -51,5 +58,17 @@ with lib;
|
||||
# (yet) have swap set up.
|
||||
environment.variables.GC_INITIAL_HEAP_SIZE = "100000";
|
||||
|
||||
# Make the installer more likely to succeed in low memory
|
||||
# environments. The kernel's overcommit heustistics bite us
|
||||
# fairly often, preventing processes such as nix-worker or
|
||||
# download-using-manifests.pl from forking even if there is
|
||||
# plenty of free memory.
|
||||
boot.kernel.sysctl."vm.overcommit_memory" = "1";
|
||||
|
||||
# To speed up installation a little bit, include the complete
|
||||
# stdenv in the Nix store on the CD. Archive::Cpio is needed for
|
||||
# the initrd builder.
|
||||
system.extraDependencies = [ pkgs.stdenv pkgs.busybox pkgs.perlPackages.ArchiveCpio ];
|
||||
|
||||
};
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ in {
|
||||
|
||||
clusterDomain = mkOption {
|
||||
description = "Use alternative domain.";
|
||||
default = "";
|
||||
default = "kubernetes.io";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
@ -322,13 +322,35 @@ in {
|
||||
type = types.str;
|
||||
};
|
||||
};
|
||||
|
||||
kube2sky = {
|
||||
enable = mkEnableOption "Whether to enable kube2sky dns service.";
|
||||
|
||||
domain = mkOption {
|
||||
description = "Kuberntes kube2sky domain under which all DNS names will be hosted.";
|
||||
default = cfg.kubelet.clusterDomain;
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
master = mkOption {
|
||||
description = "Kubernetes apiserver address";
|
||||
default = "${cfg.apiserver.address}:${toString cfg.apiserver.port}";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
extraOpts = mkOption {
|
||||
description = "Kubernetes kube2sky extra command line options.";
|
||||
default = "";
|
||||
type = types.str;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
###### implementation
|
||||
|
||||
config = mkMerge [
|
||||
(mkIf cfg.apiserver.enable {
|
||||
systemd.services.kubernetes-apiserver = {
|
||||
systemd.services.kube-apiserver = {
|
||||
description = "Kubernetes Api Server";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
requires = ["kubernetes-setup.service"];
|
||||
@ -343,26 +365,25 @@ in {
|
||||
(concatImapStringsSep "\n" (i: v: v + "," + (toString i))
|
||||
(mapAttrsToList (name: token: token + "," + name) cfg.apiserver.tokenAuth));
|
||||
in ''${cfg.package}/bin/kube-apiserver \
|
||||
--etcd_servers=${concatMapStringsSep "," (f: "http://${f}") cfg.etcdServers} \
|
||||
--address=${cfg.apiserver.address} \
|
||||
--port=${toString cfg.apiserver.port} \
|
||||
--read_only_port=${toString cfg.apiserver.readOnlyPort} \
|
||||
--public_address_override=${cfg.apiserver.publicAddress} \
|
||||
--allow_privileged=${if cfg.apiserver.allowPrivileged then "true" else "false"} \
|
||||
--etcd-servers=${concatMapStringsSep "," (f: "http://${f}") cfg.etcdServers} \
|
||||
--insecure-bind-address=${cfg.apiserver.address} \
|
||||
--insecure-port=${toString cfg.apiserver.port} \
|
||||
--read-only-port=${toString cfg.apiserver.readOnlyPort} \
|
||||
--bind-address=${cfg.apiserver.publicAddress} \
|
||||
--allow-privileged=${if cfg.apiserver.allowPrivileged then "true" else "false"} \
|
||||
${optionalString (cfg.apiserver.tlsCertFile!="")
|
||||
"--tls_cert_file=${cfg.apiserver.tlsCertFile}"} \
|
||||
"--tls-cert-file=${cfg.apiserver.tlsCertFile}"} \
|
||||
${optionalString (cfg.apiserver.tlsPrivateKeyFile!="")
|
||||
"--tls_private_key_file=${cfg.apiserver.tlsPrivateKeyFile}"} \
|
||||
"--tls-private-key-file=${cfg.apiserver.tlsPrivateKeyFile}"} \
|
||||
${optionalString (cfg.apiserver.tokenAuth!=[])
|
||||
"--token_auth_file=${tokenAuthFile}"} \
|
||||
--authorization_mode=${cfg.apiserver.authorizationMode} \
|
||||
"--token-auth-file=${tokenAuthFile}"} \
|
||||
--authorization-mode=${cfg.apiserver.authorizationMode} \
|
||||
${optionalString (cfg.apiserver.authorizationMode == "ABAC")
|
||||
"--authorization_policy_file=${authorizationPolicyFile}"} \
|
||||
--secure_port=${toString cfg.apiserver.securePort} \
|
||||
--portal_net=${cfg.apiserver.portalNet} \
|
||||
"--authorization-policy-file=${authorizationPolicyFile}"} \
|
||||
--secure-port=${toString cfg.apiserver.securePort} \
|
||||
--service-cluster-ip-range=${cfg.apiserver.portalNet} \
|
||||
--logtostderr=true \
|
||||
--runtime_config=api/v1beta3 \
|
||||
${optionalString cfg.verbose "--v=6 --log_flush_frequency=1s"} \
|
||||
${optionalString cfg.verbose "--v=6 --log-flush-frequency=1s"} \
|
||||
${cfg.apiserver.extraOpts}
|
||||
'';
|
||||
User = "kubernetes";
|
||||
@ -376,7 +397,7 @@ in {
|
||||
})
|
||||
|
||||
(mkIf cfg.scheduler.enable {
|
||||
systemd.services.kubernetes-scheduler = {
|
||||
systemd.services.kube-scheduler = {
|
||||
description = "Kubernetes Scheduler Service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network-interfaces.target" "kubernetes-apiserver.service" ];
|
||||
@ -386,7 +407,7 @@ in {
|
||||
--port=${toString cfg.scheduler.port} \
|
||||
--master=${cfg.scheduler.master} \
|
||||
--logtostderr=true \
|
||||
${optionalString cfg.verbose "--v=6 --log_flush_frequency=1s"} \
|
||||
${optionalString cfg.verbose "--v=6 --log-flush-frequency=1s"} \
|
||||
${cfg.scheduler.extraOpts}
|
||||
'';
|
||||
User = "kubernetes";
|
||||
@ -395,7 +416,7 @@ in {
|
||||
})
|
||||
|
||||
(mkIf cfg.controllerManager.enable {
|
||||
systemd.services.kubernetes-controller-manager = {
|
||||
systemd.services.kube-controller-manager = {
|
||||
description = "Kubernetes Controller Manager Service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network-interfaces.target" "kubernetes-apiserver.service" ];
|
||||
@ -406,7 +427,7 @@ in {
|
||||
--master=${cfg.controllerManager.master} \
|
||||
--machines=${concatStringsSep "," cfg.controllerManager.machines} \
|
||||
--logtostderr=true \
|
||||
${optionalString cfg.verbose "--v=6 --log_flush_frequency=1s"} \
|
||||
${optionalString cfg.verbose "--v=6 --log-flush-frequency=1s"} \
|
||||
${cfg.controllerManager.extraOpts}
|
||||
'';
|
||||
User = "kubernetes";
|
||||
@ -415,7 +436,7 @@ in {
|
||||
})
|
||||
|
||||
(mkIf cfg.kubelet.enable {
|
||||
systemd.services.kubernetes-kubelet = {
|
||||
systemd.services.kubelet = {
|
||||
description = "Kubernetes Kubelet Service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
requires = ["kubernetes-setup.service"];
|
||||
@ -423,17 +444,17 @@ in {
|
||||
script = ''
|
||||
export PATH="/bin:/sbin:/usr/bin:/usr/sbin:$PATH"
|
||||
exec ${cfg.package}/bin/kubelet \
|
||||
--api_servers=${concatMapStringsSep "," (f: "http://${f}") cfg.kubelet.apiServers} \
|
||||
--api-servers=${concatMapStringsSep "," (f: "http://${f}") cfg.kubelet.apiServers} \
|
||||
--address=${cfg.kubelet.address} \
|
||||
--port=${toString cfg.kubelet.port} \
|
||||
--hostname_override=${cfg.kubelet.hostname} \
|
||||
--allow_privileged=${if cfg.kubelet.allowPrivileged then "true" else "false"} \
|
||||
--root_dir=${cfg.dataDir} \
|
||||
--hostname-override=${cfg.kubelet.hostname} \
|
||||
--allow-privileged=${if cfg.kubelet.allowPrivileged then "true" else "false"} \
|
||||
--root-dir=${cfg.dataDir} \
|
||||
--cadvisor_port=${toString cfg.kubelet.cadvisorPort} \
|
||||
${optionalString (cfg.kubelet.clusterDns != "")
|
||||
''--cluster_dns=${cfg.kubelet.clusterDns}''} \
|
||||
''--cluster-dns=${cfg.kubelet.clusterDns}''} \
|
||||
${optionalString (cfg.kubelet.clusterDomain != "")
|
||||
''--cluster_domain=${cfg.kubelet.clusterDomain}''} \
|
||||
''--cluster-domain=${cfg.kubelet.clusterDomain}''} \
|
||||
--logtostderr=true \
|
||||
${optionalString cfg.verbose "--v=6 --log_flush_frequency=1s"} \
|
||||
${cfg.kubelet.extraOpts}
|
||||
@ -443,26 +464,49 @@ in {
|
||||
})
|
||||
|
||||
(mkIf cfg.proxy.enable {
|
||||
systemd.services.kubernetes-proxy = {
|
||||
systemd.services.kube-proxy = {
|
||||
description = "Kubernetes Proxy Service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network-interfaces.target" "etcd.service" ];
|
||||
serviceConfig = {
|
||||
ExecStart = ''${cfg.package}/bin/kube-proxy \
|
||||
--master=${cfg.proxy.master} \
|
||||
--bind_address=${cfg.proxy.address} \
|
||||
--bind-address=${cfg.proxy.address} \
|
||||
--logtostderr=true \
|
||||
${optionalString cfg.verbose "--v=6 --log_flush_frequency=1s"} \
|
||||
${optionalString cfg.verbose "--v=6 --log-flush-frequency=1s"} \
|
||||
${cfg.proxy.extraOpts}
|
||||
'';
|
||||
};
|
||||
};
|
||||
})
|
||||
|
||||
(mkIf cfg.kube2sky.enable {
|
||||
systemd.services.kube2sky = {
|
||||
description = "Kubernetes Dns Bridge Service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" "skydns.service" "etcd.service" "kubernetes-apiserver.service" ];
|
||||
serviceConfig = {
|
||||
ExecStart = ''${cfg.package}/bin/kube2sky \
|
||||
-etcd-server=http://${head cfg.etcdServers} \
|
||||
-domain=${cfg.kube2sky.domain} \
|
||||
-kube_master_url=http://${cfg.kube2sky.master} \
|
||||
-logtostderr=true \
|
||||
${optionalString cfg.verbose "--v=6 --log-flush-frequency=1s"} \
|
||||
${cfg.kube2sky.extraOpts}
|
||||
'';
|
||||
User = "kubernetes";
|
||||
};
|
||||
};
|
||||
|
||||
services.skydns.enable = mkDefault true;
|
||||
services.skydns.domain = mkDefault cfg.kubelet.clusterDomain;
|
||||
})
|
||||
|
||||
(mkIf (any (el: el == "master") cfg.roles) {
|
||||
services.kubernetes.apiserver.enable = mkDefault true;
|
||||
services.kubernetes.scheduler.enable = mkDefault true;
|
||||
services.kubernetes.controllerManager.enable = mkDefault true;
|
||||
services.kubernetes.kube2sky.enable = mkDefault true;
|
||||
})
|
||||
|
||||
(mkIf (any (el: el == "node") cfg.roles) {
|
||||
|
@ -180,7 +180,8 @@ in
|
||||
chown -R ${cfg.user} ${cfg.pidDir}
|
||||
|
||||
# Make the socket directory
|
||||
mkdir -m 0755 -p /run/mysqld
|
||||
mkdir -p /run/mysqld
|
||||
chmod 0755 /run/mysqld
|
||||
chown -R ${cfg.user} /run/mysqld
|
||||
'';
|
||||
|
||||
|
@ -380,6 +380,7 @@ in
|
||||
${pkgs.coreutils}/bin/chmod -R ug+rwX /var/postfix/queue
|
||||
${pkgs.coreutils}/bin/chown root:root /var/spool/mail
|
||||
${pkgs.coreutils}/bin/chmod a+rwxt /var/spool/mail
|
||||
${pkgs.coreutils}/bin/ln -sf /var/spool/mail /var/mail
|
||||
|
||||
ln -sf "${pkgs.postfix}/share/postfix/conf/"* /var/postfix/conf
|
||||
|
||||
|
@ -29,7 +29,7 @@ in {
|
||||
|
||||
storagePath = mkOption {
|
||||
type = types.path;
|
||||
default = "/var/lib/docker/registry";
|
||||
default = "/var/lib/docker-registry";
|
||||
description = "Docker registry storage path.";
|
||||
};
|
||||
|
||||
@ -61,14 +61,9 @@ in {
|
||||
User = "docker-registry";
|
||||
Group = "docker";
|
||||
PermissionsStartOnly = true;
|
||||
WorkingDirectory = cfg.storagePath;
|
||||
};
|
||||
|
||||
preStart = ''
|
||||
mkdir -p ${cfg.storagePath}
|
||||
if [ "$(id -u)" = 0 ]; then
|
||||
chown -R docker-registry:docker ${cfg.storagePath}
|
||||
fi
|
||||
'';
|
||||
postStart = ''
|
||||
until ${pkgs.curl}/bin/curl -s -o /dev/null 'http://${cfg.host}:${toString cfg.port}/'; do
|
||||
sleep 1;
|
||||
@ -77,6 +72,10 @@ in {
|
||||
};
|
||||
|
||||
users.extraGroups.docker.gid = mkDefault config.ids.gids.docker;
|
||||
users.extraUsers.docker-registry.uid = config.ids.uids.docker-registry;
|
||||
users.extraUsers.docker-registry = {
|
||||
createHome = true;
|
||||
home = cfg.storagePath;
|
||||
uid = config.ids.uids.docker-registry;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@ -49,10 +49,10 @@ let
|
||||
</server>
|
||||
<import hidden-files="no">
|
||||
<scripting script-charset="UTF-8">
|
||||
<common-script>/nix/store/cngbzn39vidd6jm4wgzxfafqll74ybfa-mediatomb-0.12.1/share/mediatomb/js/common.js</common-script>
|
||||
<playlist-script>/nix/store/cngbzn39vidd6jm4wgzxfafqll74ybfa-mediatomb-0.12.1/share/mediatomb/js/playlists.js</playlist-script>
|
||||
<common-script>${pkgs.mediatomb}/share/mediatomb/js/common.js</common-script>
|
||||
<playlist-script>${pkgs.mediatomb}/share/mediatomb/js/playlists.js</playlist-script>
|
||||
<virtual-layout type="builtin">
|
||||
<import-script>/nix/store/cngbzn39vidd6jm4wgzxfafqll74ybfa-mediatomb-0.12.1/share/mediatomb/js/import.js</import-script>
|
||||
<import-script>${pkgs.mediatomb}/share/mediatomb/js/import.js</import-script>
|
||||
</virtual-layout>
|
||||
</scripting>
|
||||
<mappings>
|
||||
|
@ -226,7 +226,7 @@ in
|
||||
chmod -Rc u=rwX,go= '${cfg.nslave.cachedir}'
|
||||
'';
|
||||
|
||||
path = with pkgs; [ imagemagick ];
|
||||
path = with pkgs; [ imagemagick pdftk ];
|
||||
environment = {
|
||||
PYTHONPATH = concatMapStringsSep ":"
|
||||
(m: "${pypkgs.${m}}/lib/${python.libPrefix}/site-packages")
|
||||
|
110
nixos/modules/services/misc/ripple-rest.nix
Normal file
110
nixos/modules/services/misc/ripple-rest.nix
Normal file
@ -0,0 +1,110 @@
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.rippleRest;
|
||||
|
||||
configFile = pkgs.writeText "ripple-rest-config.json" (builtins.toJSON {
|
||||
config_version = "2.0.3";
|
||||
debug = cfg.debug;
|
||||
port = cfg.port;
|
||||
host = cfg.host;
|
||||
ssl_enabled = cfg.ssl.enable;
|
||||
ssl = {
|
||||
key_path = cfg.ssl.keyPath;
|
||||
cert_path = cfg.ssl.certPath;
|
||||
reject_unathorized = cfg.ssl.rejectUnathorized;
|
||||
};
|
||||
db_path = cfg.dbPath;
|
||||
max_transaction_fee = cfg.maxTransactionFee;
|
||||
rippled_servers = cfg.rippleds;
|
||||
});
|
||||
|
||||
in {
|
||||
options.services.rippleRest = {
|
||||
enable = mkEnableOption "Whether to enable ripple rest.";
|
||||
|
||||
debug = mkEnableOption "Wheter to enable debug for ripple-rest.";
|
||||
|
||||
host = mkOption {
|
||||
description = "Ripple rest host.";
|
||||
default = "localhost";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
description = "Ripple rest port.";
|
||||
default = 5990;
|
||||
type = types.int;
|
||||
};
|
||||
|
||||
ssl = {
|
||||
enable = mkEnableOption "Whether to enable ssl.";
|
||||
|
||||
keyPath = mkOption {
|
||||
description = "Path to the ripple rest key file.";
|
||||
default = null;
|
||||
type = types.nullOr types.path;
|
||||
};
|
||||
|
||||
|
||||
certPath = mkOption {
|
||||
description = "Path to the ripple rest cert file.";
|
||||
default = null;
|
||||
type = types.nullOr types.path;
|
||||
};
|
||||
|
||||
rejectUnathorized = mkOption {
|
||||
description = "Whether to reject unatohroized.";
|
||||
default = true;
|
||||
type = types.bool;
|
||||
};
|
||||
};
|
||||
|
||||
dbPath = mkOption {
|
||||
description = "Ripple rest database path.";
|
||||
default = "${cfg.dataDir}/ripple-rest.db";
|
||||
type = types.path;
|
||||
};
|
||||
|
||||
maxTransactionFee = mkOption {
|
||||
description = "Ripple rest max transaction fee.";
|
||||
default = 1000000;
|
||||
type = types.int;
|
||||
};
|
||||
|
||||
rippleds = mkOption {
|
||||
description = "List of rippled servers.";
|
||||
default = [
|
||||
"wss://s1.ripple.com:443"
|
||||
];
|
||||
type = types.listOf types.str;
|
||||
};
|
||||
|
||||
dataDir = mkOption {
|
||||
description = "Ripple rest data directory.";
|
||||
default = "/var/lib/ripple-rest";
|
||||
type = types.path;
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf (cfg.enable) {
|
||||
systemd.services.ripple-rest = {
|
||||
wantedBy = [ "multi-user.target"];
|
||||
after = ["network.target" ];
|
||||
environment.NODE_PATH="${pkgs.ripple-rest}/lib/node_modules/ripple-rest/node_modules";
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.nodejs}/bin/node ${pkgs.ripple-rest}/lib/node_modules/ripple-rest/server/server.js --config ${configFile}";
|
||||
User = "ripple-rest";
|
||||
};
|
||||
};
|
||||
|
||||
users.extraUsers.postgres = {
|
||||
name = "ripple-rest";
|
||||
uid = config.ids.uids.ripple-rest;
|
||||
createHome = true;
|
||||
home = cfg.dataDir;
|
||||
};
|
||||
};
|
||||
}
|
@ -27,7 +27,7 @@ let
|
||||
protocol=${concatStringsSep "," p.protocol}
|
||||
${optionalString (p.user != "") "user=${p.user}"}
|
||||
${optionalString (p.password != "") "user=${p.password}"}
|
||||
admin=${if p.admin then "allow" else "no"}
|
||||
admin=${concatStringsSep "," p.admin}
|
||||
${optionalString (p.ssl.key != null) "ssl_key=${p.ssl.key}"}
|
||||
${optionalString (p.ssl.cert != null) "ssl_cert=${p.ssl.cert}"}
|
||||
${optionalString (p.ssl.chain != null) "ssl_chain=${p.ssl.chain}"}
|
||||
@ -118,9 +118,9 @@ let
|
||||
};
|
||||
|
||||
admin = mkOption {
|
||||
description = "Controls whether or not administrative commands are allowed.";
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = "A comma-separated list of admin IP addresses.";
|
||||
type = types.listOf types.str;
|
||||
default = ["127.0.0.1"];
|
||||
};
|
||||
|
||||
ssl = {
|
||||
@ -156,7 +156,7 @@ let
|
||||
dbOptions = {
|
||||
type = mkOption {
|
||||
description = "Rippled database type.";
|
||||
type = types.enum ["rocksdb" "nudb" "sqlite" "hyperleveldb"];
|
||||
type = types.enum ["rocksdb" "nudb"];
|
||||
default = "rocksdb";
|
||||
};
|
||||
|
||||
@ -217,7 +217,7 @@ in
|
||||
default = {
|
||||
rpc = {
|
||||
port = 5005;
|
||||
admin = true;
|
||||
admin = ["127.0.0.1"];
|
||||
protocol = ["http"];
|
||||
};
|
||||
|
||||
|
335
nixos/modules/services/monitoring/grafana.nix
Normal file
335
nixos/modules/services/monitoring/grafana.nix
Normal file
@ -0,0 +1,335 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.grafana;
|
||||
|
||||
b2s = val: if val then "true" else "false";
|
||||
|
||||
cfgFile = pkgs.writeText "grafana.ini" ''
|
||||
app_name = grafana
|
||||
app_mode = production
|
||||
|
||||
[server]
|
||||
; protocol (http or https)
|
||||
protocol = ${cfg.protocol}
|
||||
; the ip address to bind to, empty will bind to all interfaces
|
||||
http_addr = ${cfg.addr}
|
||||
; the http port to use
|
||||
http_port = ${toString cfg.port}
|
||||
; The public facing domain name used to access grafana from a browser
|
||||
domain = ${cfg.domain}
|
||||
; the full public facing url
|
||||
root_url = ${cfg.rootUrl}
|
||||
router_logging = false
|
||||
; the path relative to the binary where the static (html/js/css) files are placed
|
||||
static_root_path = ${cfg.staticRootPath}
|
||||
; enable gzip
|
||||
enable_gzip = false
|
||||
; https certs & key file
|
||||
cert_file = ${cfg.certFile}
|
||||
cert_key = ${cfg.certKey}
|
||||
|
||||
[analytics]
|
||||
# Server reporting, sends usage counters to stats.grafana.org every 24 hours.
|
||||
# No ip addresses are being tracked, only simple counters to track
|
||||
# running instances, dashboard and error counts. It is very helpful to us.
|
||||
# Change this option to false to disable reporting.
|
||||
reporting_enabled = true
|
||||
; Google Analytics universal tracking code, only enabled if you specify an id here
|
||||
google_analytics_ua_id =
|
||||
|
||||
[database]
|
||||
; Either "mysql", "postgres" or "sqlite3", it's your choice
|
||||
type = ${cfg.database.type}
|
||||
host = ${cfg.database.host}
|
||||
name = ${cfg.database.name}
|
||||
user = ${cfg.database.user}
|
||||
password = ${cfg.database.password}
|
||||
; For "postgres" only, either "disable", "require" or "verify-full"
|
||||
ssl_mode = disable
|
||||
; For "sqlite3" only
|
||||
path = ${cfg.database.path}
|
||||
|
||||
[session]
|
||||
; Either "memory", "file", "redis", "mysql", default is "memory"
|
||||
provider = file
|
||||
; Provider config options
|
||||
; memory: not have any config yet
|
||||
; file: session file path, e.g. `data/sessions`
|
||||
; redis: config like redis server addr, poolSize, password, e.g. `127.0.0.1:6379,100,grafana`
|
||||
; mysql: go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1)/database_name`
|
||||
provider_config = data/sessions
|
||||
; Session cookie name
|
||||
cookie_name = grafana_sess
|
||||
; If you use session in https only, default is false
|
||||
cookie_secure = false
|
||||
; Session life time, default is 86400
|
||||
session_life_time = 86400
|
||||
; session id hash func, Either "sha1", "sha256" or "md5" default is sha1
|
||||
session_id_hashfunc = sha1
|
||||
; Session hash key, default is use random string
|
||||
session_id_hashkey =
|
||||
|
||||
[security]
|
||||
; default admin user, created on startup
|
||||
admin_user = ${cfg.security.adminUser}
|
||||
; default admin password, can be changed before first start of grafana, or in profile settings
|
||||
admin_password = ${cfg.security.adminPassword}
|
||||
; used for signing
|
||||
secret_key = ${cfg.security.secretKey}
|
||||
; Auto-login remember days
|
||||
login_remember_days = 7
|
||||
cookie_username = grafana_user
|
||||
cookie_remember_name = grafana_remember
|
||||
|
||||
[users]
|
||||
; disable user signup / registration
|
||||
allow_sign_up = ${b2s cfg.users.allowSignUp}
|
||||
; Allow non admin users to create organizations
|
||||
allow_org_create = ${b2s cfg.users.allowOrgCreate}
|
||||
# Set to true to automatically assign new users to the default organization (id 1)
|
||||
auto_assign_org = ${b2s cfg.users.autoAssignOrg}
|
||||
; Default role new users will be automatically assigned (if disabled above is set to true)
|
||||
auto_assign_org_role = ${cfg.users.autoAssignOrgRole}
|
||||
|
||||
[auth.anonymous]
|
||||
; enable anonymous access
|
||||
enabled = ${b2s cfg.auth.anonymous.enable}
|
||||
; specify organization name that should be used for unauthenticated users
|
||||
org_name = Main Org.
|
||||
; specify role for unauthenticated users
|
||||
org_role = Viewer
|
||||
|
||||
[auth.github]
|
||||
enabled = false
|
||||
client_id = some_id
|
||||
client_secret = some_secret
|
||||
scopes = user:email
|
||||
auth_url = https://github.com/login/oauth/authorize
|
||||
token_url = https://github.com/login/oauth/access_token
|
||||
|
||||
[auth.google]
|
||||
enabled = false
|
||||
client_id = some_client_id
|
||||
client_secret = some_client_secret
|
||||
scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
|
||||
auth_url = https://accounts.google.com/o/oauth2/auth
|
||||
token_url = https://accounts.google.com/o/oauth2/token
|
||||
|
||||
[log]
|
||||
root_path = data/log
|
||||
; Either "console", "file", default is "console"
|
||||
; Use comma to separate multiple modes, e.g. "console, file"
|
||||
mode = console
|
||||
; Buffer length of channel, keep it as it is if you don't know what it is.
|
||||
buffer_len = 10000
|
||||
; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Trace"
|
||||
level = Info
|
||||
|
||||
; For "console" mode only
|
||||
[log.console]
|
||||
level =
|
||||
|
||||
; For "file" mode only
|
||||
[log.file]
|
||||
level =
|
||||
; This enables automated log rotate(switch of following options), default is true
|
||||
log_rotate = true
|
||||
; Max line number of single file, default is 1000000
|
||||
max_lines = 1000000
|
||||
; Max size shift of single file, default is 28 means 1 << 28, 256MB
|
||||
max_lines_shift = 28
|
||||
; Segment log daily, default is true
|
||||
daily_rotate = true
|
||||
; Expired days of log file(delete after max days), default is 7
|
||||
max_days = 7
|
||||
|
||||
[event_publisher]
|
||||
enabled = false
|
||||
rabbitmq_url = amqp://localhost/
|
||||
exchange = grafana_events
|
||||
'';
|
||||
|
||||
in {
|
||||
options.services.grafana = {
|
||||
enable = mkEnableOption "Whether to enable grafana.";
|
||||
|
||||
protocol = mkOption {
|
||||
description = "Which protocol to listen.";
|
||||
default = "http";
|
||||
type = types.enum ["http" "https"];
|
||||
};
|
||||
|
||||
addr = mkOption {
|
||||
description = "Listening address.";
|
||||
default = "127.0.0.1";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
description = "Listening port.";
|
||||
default = 3000;
|
||||
type = types.int;
|
||||
};
|
||||
|
||||
domain = mkOption {
|
||||
description = "The public facing domain name used to access grafana from a browser.";
|
||||
default = "localhost";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
rootUrl = mkOption {
|
||||
description = "Full public facing url.";
|
||||
default = "%(protocol)s://%(domain)s:%(http_port)s/";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
certFile = mkOption {
|
||||
description = "Cert file for ssl.";
|
||||
default = "";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
certKey = mkOption {
|
||||
description = "Cert key for ssl.";
|
||||
default = "";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
staticRootPath = mkOption {
|
||||
description = "Root path for static assets.";
|
||||
default = "${cfg.package}/share/go/src/github.com/grafana/grafana/public";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
package = mkOption {
|
||||
description = "Package to use.";
|
||||
default = pkgs.goPackages.grafana;
|
||||
type = types.package;
|
||||
};
|
||||
|
||||
dataDir = mkOption {
|
||||
description = "Data directory.";
|
||||
default = "/var/lib/grafana";
|
||||
type = types.path;
|
||||
};
|
||||
|
||||
database = {
|
||||
type = mkOption {
|
||||
description = "Database type.";
|
||||
default = "sqlite3";
|
||||
type = types.enum ["mysql" "sqlite3" "postgresql"];
|
||||
};
|
||||
|
||||
host = mkOption {
|
||||
description = "Database host.";
|
||||
default = "127.0.0.1:3306";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
name = mkOption {
|
||||
description = "Database name.";
|
||||
default = "grafana";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
user = mkOption {
|
||||
description = "Database user.";
|
||||
default = "root";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
password = mkOption {
|
||||
description = "Database password.";
|
||||
default = "";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
path = mkOption {
|
||||
description = "Database path.";
|
||||
default = "${cfg.dataDir}/data/grafana.db";
|
||||
type = types.path;
|
||||
};
|
||||
};
|
||||
|
||||
security = {
|
||||
adminUser = mkOption {
|
||||
description = "Default admin username.";
|
||||
default = "admin";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
adminPassword = mkOption {
|
||||
description = "Default admin password.";
|
||||
default = "admin";
|
||||
type = types.str;
|
||||
};
|
||||
|
||||
secretKey = mkOption {
|
||||
description = "Secret key used for signing.";
|
||||
default = "SW2YcwTIb9zpOOhoPsMm";
|
||||
type = types.str;
|
||||
};
|
||||
};
|
||||
|
||||
users = {
|
||||
allowSignUp = mkOption {
|
||||
description = "Disable user signup / registration";
|
||||
default = false;
|
||||
type = types.bool;
|
||||
};
|
||||
|
||||
allowOrgCreate = mkOption {
|
||||
description = "Whether user is allowed to create organizations.";
|
||||
default = false;
|
||||
type = types.bool;
|
||||
};
|
||||
|
||||
autoAssignOrg = mkOption {
|
||||
description = "Whether to automatically assign new users to default org.";
|
||||
default = true;
|
||||
type = types.bool;
|
||||
};
|
||||
|
||||
autoAssignOrgRole = mkOption {
|
||||
description = "Default role new users will be auto assigned.";
|
||||
default = "Viewer";
|
||||
type = types.enum ["Viewer" "Editor"];
|
||||
};
|
||||
};
|
||||
|
||||
auth.anonymous = {
|
||||
enable = mkOption {
|
||||
description = "Whether to allow anonymous access";
|
||||
default = false;
|
||||
type = types.bool;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
warnings = [
|
||||
"Grafana passwords will be stored as plaintext in nix store!"
|
||||
];
|
||||
|
||||
systemd.services.grafana = {
|
||||
description = "Grafana Service Daemon";
|
||||
wantedBy = ["multi-user.target"];
|
||||
after = ["networking.target"];
|
||||
serviceConfig = {
|
||||
ExecStart = "${cfg.package}/bin/grafana --config ${cfgFile} web";
|
||||
WorkingDirectory = cfg.dataDir;
|
||||
User = "grafana";
|
||||
};
|
||||
};
|
||||
|
||||
users.extraUsers.grafana = {
|
||||
uid = config.ids.uids.grafana;
|
||||
description = "Grafana user";
|
||||
home = cfg.dataDir;
|
||||
createHome = true;
|
||||
};
|
||||
};
|
||||
}
|
@ -354,6 +354,16 @@ in {
|
||||
type = types.lines;
|
||||
};
|
||||
};
|
||||
|
||||
beacon = {
|
||||
enable = mkEnableOption "Whether to enable graphite beacon.";
|
||||
|
||||
config = mkOption {
|
||||
description = "Graphite beacon configuration.";
|
||||
default = {};
|
||||
type = types.attrs;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
###### implementation
|
||||
@ -535,10 +545,25 @@ in {
|
||||
environment.systemPackages = [ pkgs.pythonPackages.graphite_pager ];
|
||||
})
|
||||
|
||||
(mkIf cfg.beacon.enable {
|
||||
systemd.services.graphite-beacon = {
|
||||
description = "Grpahite Beacon Alerting Daemon";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${pkgs.pythonPackages.graphite_beacon}/bin/graphite-beacon \
|
||||
--config ${pkgs.writeText "graphite-beacon.json" (builtins.toJSON cfg.beacon.config)}
|
||||
'';
|
||||
User = "graphite";
|
||||
Group = "graphite";
|
||||
};
|
||||
};
|
||||
})
|
||||
|
||||
(mkIf (
|
||||
cfg.carbon.enableCache || cfg.carbon.enableAggregator || cfg.carbon.enableRelay ||
|
||||
cfg.web.enable || cfg.api.enable ||
|
||||
cfg.seyren.enable || cfg.pager.enable
|
||||
cfg.seyren.enable || cfg.pager.enable || cfg.beacon.enable
|
||||
) {
|
||||
users.extraUsers = singleton {
|
||||
name = "graphite";
|
||||
|
@ -208,8 +208,8 @@ in
|
||||
|
||||
storagePath = mkOption {
|
||||
type = types.path;
|
||||
default = "/var/lib/btsync";
|
||||
example = "/var/lib/btsync";
|
||||
default = "/var/lib/btsync/";
|
||||
example = "/var/lib/btsync/";
|
||||
description = ''
|
||||
Where to store the bittorrent sync files.
|
||||
'';
|
||||
|
@ -106,6 +106,12 @@ in
|
||||
alerts = {
|
||||
enable = mkEnableOption "Whether to enable consul-alerts";
|
||||
|
||||
package = mkOption {
|
||||
description = "Package to use for consul-alerts.";
|
||||
default = pkgs.consul-alerts;
|
||||
type = types.package;
|
||||
};
|
||||
|
||||
listenAddr = mkOption {
|
||||
description = "Api listening address.";
|
||||
default = "localhost:9000";
|
||||
@ -135,96 +141,101 @@ in
|
||||
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
config = mkIf cfg.enable (
|
||||
mkMerge [{
|
||||
|
||||
users.extraUsers."consul" = {
|
||||
description = "Consul agent daemon user";
|
||||
uid = config.ids.uids.consul;
|
||||
# The shell is needed for health checks
|
||||
shell = "/run/current-system/sw/bin/bash";
|
||||
};
|
||||
|
||||
environment = {
|
||||
etc."consul.json".text = builtins.toJSON configOptions;
|
||||
# We need consul.d to exist for consul to start
|
||||
etc."consul.d/dummy.json".text = "{ }";
|
||||
systemPackages = with pkgs; [ consul ];
|
||||
};
|
||||
|
||||
systemd.services.consul = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ] ++ systemdDevices;
|
||||
bindsTo = systemdDevices;
|
||||
restartTriggers = [ config.environment.etc."consul.json".source ]
|
||||
++ mapAttrsToList (_: d: d.source)
|
||||
(filterAttrs (n: _: hasPrefix "consul.d/" n) config.environment.etc);
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = "@${pkgs.consul}/bin/consul consul agent -config-dir /etc/consul.d"
|
||||
+ concatMapStrings (n: " -config-file ${n}") configFiles;
|
||||
ExecReload = "${pkgs.consul}/bin/consul reload";
|
||||
PermissionsStartOnly = true;
|
||||
User = if cfg.dropPrivileges then "consul" else null;
|
||||
TimeoutStartSec = "0";
|
||||
} // (optionalAttrs (cfg.leaveOnStop) {
|
||||
ExecStop = "${pkgs.consul}/bin/consul leave";
|
||||
});
|
||||
|
||||
path = with pkgs; [ iproute gnugrep gawk consul ];
|
||||
preStart = ''
|
||||
mkdir -m 0700 -p ${dataDir}
|
||||
chown -R consul ${dataDir}
|
||||
|
||||
# Determine interface addresses
|
||||
getAddrOnce () {
|
||||
ip addr show dev "$1" \
|
||||
| grep 'inet${optionalString (cfg.forceIpv4) " "}.*scope global' \
|
||||
| awk -F '[ /\t]*' '{print $3}' | head -n 1
|
||||
}
|
||||
getAddr () {
|
||||
ADDR="$(getAddrOnce $1)"
|
||||
LEFT=60 # Die after 1 minute
|
||||
while [ -z "$ADDR" ]; do
|
||||
sleep 1
|
||||
LEFT=$(expr $LEFT - 1)
|
||||
if [ "$LEFT" -eq "0" ]; then
|
||||
echo "Address lookup timed out"
|
||||
exit 1
|
||||
fi
|
||||
ADDR="$(getAddrOnce $1)"
|
||||
done
|
||||
echo "$ADDR"
|
||||
}
|
||||
echo "{" > /etc/consul-addrs.json
|
||||
delim=" "
|
||||
''
|
||||
+ concatStrings (flip mapAttrsToList cfg.interface (name: i:
|
||||
optionalString (i != null) ''
|
||||
echo "$delim \"${name}_addr\": \"$(getAddr "${i}")\"" >> /etc/consul-addrs.json
|
||||
delim=","
|
||||
''))
|
||||
+ ''
|
||||
echo "}" >> /etc/consul-addrs.json
|
||||
'';
|
||||
};
|
||||
|
||||
systemd.services.consul-alerts = mkIf (cfg.alerts.enable) {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "consul.service" ];
|
||||
|
||||
path = [ pkgs.consul ];
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${pkgs.consul-alerts}/bin/consul-alerts start \
|
||||
--alert-addr=${cfg.alerts.listenAddr} \
|
||||
--consul-addr=${cfg.alerts.consulAddr} \
|
||||
${optionalString cfg.alerts.watchChecks "--watch-checks"} \
|
||||
${optionalString cfg.alerts.watchEvents "--watch-events"}
|
||||
'';
|
||||
User = if cfg.dropPrivileges then "consul" else null;
|
||||
users.extraUsers."consul" = {
|
||||
description = "Consul agent daemon user";
|
||||
uid = config.ids.uids.consul;
|
||||
# The shell is needed for health checks
|
||||
shell = "/run/current-system/sw/bin/bash";
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
environment = {
|
||||
etc."consul.json".text = builtins.toJSON configOptions;
|
||||
# We need consul.d to exist for consul to start
|
||||
etc."consul.d/dummy.json".text = "{ }";
|
||||
systemPackages = with pkgs; [ consul ];
|
||||
};
|
||||
|
||||
systemd.services.consul = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ] ++ systemdDevices;
|
||||
bindsTo = systemdDevices;
|
||||
restartTriggers = [ config.environment.etc."consul.json".source ]
|
||||
++ mapAttrsToList (_: d: d.source)
|
||||
(filterAttrs (n: _: hasPrefix "consul.d/" n) config.environment.etc);
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = "@${pkgs.consul}/bin/consul consul agent -config-dir /etc/consul.d"
|
||||
+ concatMapStrings (n: " -config-file ${n}") configFiles;
|
||||
ExecReload = "${pkgs.consul}/bin/consul reload";
|
||||
PermissionsStartOnly = true;
|
||||
User = if cfg.dropPrivileges then "consul" else null;
|
||||
TimeoutStartSec = "0";
|
||||
} // (optionalAttrs (cfg.leaveOnStop) {
|
||||
ExecStop = "${pkgs.consul}/bin/consul leave";
|
||||
});
|
||||
|
||||
path = with pkgs; [ iproute gnugrep gawk consul ];
|
||||
preStart = ''
|
||||
mkdir -m 0700 -p ${dataDir}
|
||||
chown -R consul ${dataDir}
|
||||
|
||||
# Determine interface addresses
|
||||
getAddrOnce () {
|
||||
ip addr show dev "$1" \
|
||||
| grep 'inet${optionalString (cfg.forceIpv4) " "}.*scope global' \
|
||||
| awk -F '[ /\t]*' '{print $3}' | head -n 1
|
||||
}
|
||||
getAddr () {
|
||||
ADDR="$(getAddrOnce $1)"
|
||||
LEFT=60 # Die after 1 minute
|
||||
while [ -z "$ADDR" ]; do
|
||||
sleep 1
|
||||
LEFT=$(expr $LEFT - 1)
|
||||
if [ "$LEFT" -eq "0" ]; then
|
||||
echo "Address lookup timed out"
|
||||
exit 1
|
||||
fi
|
||||
ADDR="$(getAddrOnce $1)"
|
||||
done
|
||||
echo "$ADDR"
|
||||
}
|
||||
echo "{" > /etc/consul-addrs.json
|
||||
delim=" "
|
||||
''
|
||||
+ concatStrings (flip mapAttrsToList cfg.interface (name: i:
|
||||
optionalString (i != null) ''
|
||||
echo "$delim \"${name}_addr\": \"$(getAddr "${i}")\"" >> /etc/consul-addrs.json
|
||||
delim=","
|
||||
''))
|
||||
+ ''
|
||||
echo "}" >> /etc/consul-addrs.json
|
||||
'';
|
||||
};
|
||||
}
|
||||
|
||||
(mkIf (cfg.alerts.enable) {
|
||||
systemd.services.consul-alerts = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "consul.service" ];
|
||||
|
||||
path = [ pkgs.consul ];
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${cfg.alerts.package}/bin/consul-alerts start \
|
||||
--alert-addr=${cfg.alerts.listenAddr} \
|
||||
--consul-addr=${cfg.alerts.consulAddr} \
|
||||
${optionalString cfg.alerts.watchChecks "--watch-checks"} \
|
||||
${optionalString cfg.alerts.watchEvents "--watch-events"}
|
||||
'';
|
||||
User = if cfg.dropPrivileges then "consul" else null;
|
||||
Restart = "on-failure";
|
||||
};
|
||||
};
|
||||
})
|
||||
|
||||
]);
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
with lib;
|
||||
|
||||
let
|
||||
quassel = pkgs.kde4.quasselDaemon;
|
||||
quassel = pkgs.quasselDaemon_qt5;
|
||||
cfg = config.services.quassel;
|
||||
user = if cfg.user != null then cfg.user else "quassel";
|
||||
in
|
||||
|
91
nixos/modules/services/networking/skydns.nix
Normal file
91
nixos/modules/services/networking/skydns.nix
Normal file
@ -0,0 +1,91 @@
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.skydns;
|
||||
|
||||
in {
|
||||
options.services.skydns = {
|
||||
enable = mkEnableOption "Whether to enable skydns service.";
|
||||
|
||||
etcd = {
|
||||
machines = mkOption {
|
||||
default = [ "http://localhost:4001" ];
|
||||
type = types.listOf types.str;
|
||||
description = "Skydns list of etcd endpoints to connect to.";
|
||||
};
|
||||
|
||||
tlsKey = mkOption {
|
||||
default = null;
|
||||
type = types.nullOr types.path;
|
||||
description = "Skydns path of TLS client certificate - private key.";
|
||||
};
|
||||
|
||||
tlsPem = mkOption {
|
||||
default = null;
|
||||
type = types.nullOr types.path;
|
||||
description = "Skydns path of TLS client certificate - public key.";
|
||||
};
|
||||
|
||||
caCert = mkOption {
|
||||
default = null;
|
||||
type = types.nullOr types.path;
|
||||
description = "Skydns path of TLS certificate authority public key.";
|
||||
};
|
||||
};
|
||||
|
||||
address = mkOption {
|
||||
default = "0.0.0.0:53";
|
||||
type = types.str;
|
||||
description = "Skydns address to bind to.";
|
||||
};
|
||||
|
||||
domain = mkOption {
|
||||
default = "skydns.local.";
|
||||
type = types.str;
|
||||
description = "Skydns default domain if not specified by etcd config.";
|
||||
};
|
||||
|
||||
nameservers = mkOption {
|
||||
default = map (n: n + ":53") config.networking.nameservers;
|
||||
type = types.listOf types.str;
|
||||
description = "Skydns list of nameservers to forward DNS requests to when not authoritative for a domain.";
|
||||
example = ["8.8.8.8:53" "8.8.4.4:53"];
|
||||
};
|
||||
|
||||
package = mkOption {
|
||||
default = pkgs.goPackages.skydns;
|
||||
type = types.package;
|
||||
description = "Skydns package to use.";
|
||||
};
|
||||
|
||||
extraConfig = mkOption {
|
||||
default = {};
|
||||
type = types.attrsOf types.str;
|
||||
description = "Skydns attribute set of extra config options passed as environemnt variables.";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf (cfg.enable) {
|
||||
systemd.services.skydns = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" "etcd.service" ];
|
||||
description = "Skydns Service";
|
||||
environment = {
|
||||
ETCD_MACHINES = concatStringsSep "," cfg.etcd.machines;
|
||||
ETCD_TLSKEY = cfg.etcd.tlsKey;
|
||||
ETCD_TLSPEM = cfg.etcd.tlsPem;
|
||||
ETCD_CACERT = cfg.etcd.caCert;
|
||||
SKYDNS_ADDR = cfg.address;
|
||||
SKYDNS_DOMAIN = cfg.domain;
|
||||
SKYDNS_NAMESERVER = concatStringsSep "," cfg.nameservers;
|
||||
};
|
||||
serviceConfig = {
|
||||
ExecStart = "${cfg.package}/bin/skydns";
|
||||
};
|
||||
};
|
||||
|
||||
environment.systemPackages = [ cfg.package ];
|
||||
};
|
||||
}
|
@ -104,7 +104,7 @@ in
|
||||
};
|
||||
|
||||
background = mkOption {
|
||||
default = "${pkgs.nixos-artwork}/gnome/Gnome_Dark.png";
|
||||
default = "${pkgs.nixos-artwork}/share/artwork/gnome/Gnome_Dark.png";
|
||||
description = ''
|
||||
The background image or color to use.
|
||||
'';
|
||||
|
@ -159,7 +159,7 @@ in
|
||||
|
||||
boot.kernel.sysctl."kernel.printk" = config.boot.consoleLogLevel;
|
||||
|
||||
boot.kernelModules = [ "loop" "configs" ];
|
||||
boot.kernelModules = [ "loop" "configs" "atkbd" ];
|
||||
|
||||
boot.initrd.availableKernelModules =
|
||||
[ # Note: most of these (especially the SATA/PATA modules)
|
||||
|
@ -28,7 +28,7 @@ let
|
||||
f = x: if x == null then "" else "" + x;
|
||||
|
||||
grubConfig = args: pkgs.writeText "grub-config.xml" (builtins.toXML
|
||||
{ splashImage = f config.boot.loader.grub.splashImage;
|
||||
{ splashImage = f cfg.splashImage;
|
||||
grub = f grub;
|
||||
grubTarget = f (grub.grubTarget or "");
|
||||
shell = "${pkgs.stdenv.shell}";
|
||||
@ -42,7 +42,7 @@ let
|
||||
inherit (cfg)
|
||||
version extraConfig extraPerEntryConfig extraEntries
|
||||
extraEntriesBeforeNixOS extraPrepareConfig configurationLimit copyKernels timeout
|
||||
default fsIdentifier efiSupport;
|
||||
default fsIdentifier efiSupport gfxmodeEfi gfxmodeBios;
|
||||
path = (makeSearchPath "bin" ([
|
||||
pkgs.coreutils pkgs.gnused pkgs.gnugrep pkgs.findutils pkgs.diffutils pkgs.btrfsProgs
|
||||
pkgs.utillinux ] ++ (if cfg.efiSupport && (cfg.version == 2) then [pkgs.efibootmgr ] else [])
|
||||
@ -242,6 +242,24 @@ in
|
||||
'';
|
||||
};
|
||||
|
||||
gfxmodeEfi = mkOption {
|
||||
default = "auto";
|
||||
example = "1024x768";
|
||||
type = types.str;
|
||||
description = ''
|
||||
The gfxmode to pass to grub when loading a graphical boot interface under efi.
|
||||
'';
|
||||
};
|
||||
|
||||
gfxmodeBios = mkOption {
|
||||
default = "1024x768";
|
||||
example = "auto";
|
||||
type = types.str;
|
||||
description = ''
|
||||
The gfxmode to pass to grub when loading a graphical boot interface under bios.
|
||||
'';
|
||||
};
|
||||
|
||||
configurationLimit = mkOption {
|
||||
default = 100;
|
||||
example = 120;
|
||||
@ -337,7 +355,7 @@ in
|
||||
sha256 = "14kqdx2lfqvh40h6fjjzqgff1mwk74dmbjvmqphi6azzra7z8d59";
|
||||
}
|
||||
# GRUB 1.97 doesn't support gzipped XPMs.
|
||||
else "${pkgs.nixos-artwork}/gnome/Gnome_Dark.png");
|
||||
else "${pkgs.nixos-artwork}/share/artwork/gnome/Gnome_Dark.png");
|
||||
}
|
||||
|
||||
(mkIf cfg.enable {
|
||||
|
@ -57,6 +57,8 @@ my $grubTargetEfi = get("grubTargetEfi");
|
||||
my $bootPath = get("bootPath");
|
||||
my $canTouchEfiVariables = get("canTouchEfiVariables");
|
||||
my $efiSysMountPoint = get("efiSysMountPoint");
|
||||
my $gfxmodeEfi = get("gfxmodeEfi");
|
||||
my $gfxmodeBios = get("gfxmodeBios");
|
||||
$ENV{'PATH'} = get("path");
|
||||
|
||||
die "unsupported GRUB version\n" if $grubVersion != 1 && $grubVersion != 2;
|
||||
@ -255,14 +257,22 @@ else {
|
||||
fi
|
||||
|
||||
# Setup the graphics stack for bios and efi systems
|
||||
insmod vbe
|
||||
insmod efi_gop
|
||||
insmod efi_uga
|
||||
if [ \"\${grub_platform}\" = \"efi\" ]; then
|
||||
insmod efi_gop
|
||||
insmod efi_uga
|
||||
else
|
||||
insmod vbe
|
||||
fi
|
||||
insmod font
|
||||
if loadfont " . $grubBoot->path . "/grub/fonts/unicode.pf2; then
|
||||
insmod gfxterm
|
||||
set gfxmode=auto
|
||||
set gfxpayload=keep
|
||||
if [ \"\${grub_platform}\" = \"efi\" ]; then
|
||||
set gfxmode=$gfxmodeEfi
|
||||
set gfxpayload=keep
|
||||
else
|
||||
set gfxmode=$gfxmodeBios
|
||||
set gfxpayload=text
|
||||
fi
|
||||
terminal_output gfxterm
|
||||
fi
|
||||
";
|
||||
|
@ -61,8 +61,8 @@ let
|
||||
idx=2
|
||||
extraDisks=""
|
||||
${flip concatMapStrings cfg.emptyDiskImages (size: ''
|
||||
${pkgs.qemu_kvm}/bin/qemu-img create -f raw "empty$idx" "${toString size}M"
|
||||
extraDisks="$extraDisks -drive index=$idx,file=$(pwd)/empty$idx,if=virtio,werror=report"
|
||||
${pkgs.qemu_kvm}/bin/qemu-img create -f qcow2 "empty$idx.qcow2" "${toString size}M"
|
||||
extraDisks="$extraDisks -drive index=$idx,file=$(pwd)/empty$idx.qcow2,if=virtio,werror=report"
|
||||
idx=$((idx + 1))
|
||||
'')}
|
||||
|
||||
@ -83,7 +83,7 @@ let
|
||||
'' else ''
|
||||
''}
|
||||
'' else ''
|
||||
-drive file=$NIX_DISK_IMAGE,if=virtio,cache=writeback,werror=report \
|
||||
-drive index=0,id=drive1,file=$NIX_DISK_IMAGE,if=virtio,cache=writeback,werror=report \
|
||||
-kernel ${config.system.build.toplevel}/kernel \
|
||||
-initrd ${config.system.build.toplevel}/initrd \
|
||||
-append "$(cat ${config.system.build.toplevel}/kernel-params) init=${config.system.build.toplevel}/init regInfo=${regInfo} ${kernelConsole} $QEMU_KERNEL_PARAMS" \
|
||||
@ -165,7 +165,7 @@ let
|
||||
${config.system.build.toplevel}/bin/switch-to-configuration boot
|
||||
|
||||
umount /boot
|
||||
''
|
||||
'' # */
|
||||
);
|
||||
|
||||
in
|
||||
@ -204,17 +204,25 @@ in
|
||||
'';
|
||||
};
|
||||
|
||||
virtualisation.bootDevice =
|
||||
mkOption {
|
||||
type = types.str;
|
||||
default = "/dev/vda";
|
||||
description =
|
||||
''
|
||||
The disk to be used for the root filesystem.
|
||||
'';
|
||||
};
|
||||
|
||||
virtualisation.emptyDiskImages =
|
||||
mkOption {
|
||||
default = [];
|
||||
type = types.listOf types.int;
|
||||
description =
|
||||
''
|
||||
Additional disk images to provide to the VM, the value is a list of
|
||||
sizes in megabytes the empty disk should be.
|
||||
|
||||
These disks are writeable by the VM and will be thrown away
|
||||
afterwards.
|
||||
Additional disk images to provide to the VM. The value is
|
||||
a list of size in megabytes of each disk. These disks are
|
||||
writeable by the VM.
|
||||
'';
|
||||
};
|
||||
|
||||
@ -341,7 +349,7 @@ in
|
||||
|
||||
config = {
|
||||
|
||||
boot.loader.grub.device = mkVMOverride "/dev/vda";
|
||||
boot.loader.grub.device = mkVMOverride cfg.bootDevice;
|
||||
|
||||
boot.initrd.extraUtilsCommands =
|
||||
''
|
||||
@ -353,9 +361,9 @@ in
|
||||
''
|
||||
# If the disk image appears to be empty, run mke2fs to
|
||||
# initialise.
|
||||
FSTYPE=$(blkid -o value -s TYPE /dev/vda || true)
|
||||
FSTYPE=$(blkid -o value -s TYPE ${cfg.bootDevice} || true)
|
||||
if test -z "$FSTYPE"; then
|
||||
mke2fs -t ext4 /dev/vda
|
||||
mke2fs -t ext4 ${cfg.bootDevice}
|
||||
fi
|
||||
'';
|
||||
|
||||
@ -396,7 +404,7 @@ in
|
||||
# attribute should be disregarded for the purpose of building a VM
|
||||
# test image (since those filesystems don't exist in the VM).
|
||||
fileSystems = mkVMOverride (
|
||||
{ "/".device = "/dev/vda";
|
||||
{ "/".device = cfg.bootDevice;
|
||||
${if cfg.writableStore then "/nix/.ro-store" else "/nix/store"} =
|
||||
{ device = "store";
|
||||
fsType = "9p";
|
||||
|
@ -62,6 +62,7 @@ in rec {
|
||||
(all nixos.tests.installer.btrfsSimple)
|
||||
(all nixos.tests.installer.btrfsSubvols)
|
||||
(all nixos.tests.installer.btrfsSubvolDefault)
|
||||
(all nixos.tests.bootBiosCdrom)
|
||||
(all nixos.tests.ipv6)
|
||||
(all nixos.tests.kde4)
|
||||
(all nixos.tests.lightdm)
|
||||
|
@ -256,7 +256,6 @@ in rec {
|
||||
tests.installer.grub1 = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).grub1.test);
|
||||
tests.installer.lvm = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).lvm.test);
|
||||
tests.installer.luksroot = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).luksroot.test);
|
||||
tests.installer.rebuildCD = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).rebuildCD.test);
|
||||
tests.installer.separateBoot = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).separateBoot.test);
|
||||
tests.installer.simple = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).simple.test);
|
||||
tests.installer.simpleLabels = forAllSystems (system: hydraJob (import tests/installer.nix { inherit system; }).simpleLabels.test);
|
||||
|
@ -6,46 +6,9 @@ with pkgs.lib;
|
||||
|
||||
let
|
||||
|
||||
# Build the ISO. This is the regular minimal installation CD but
|
||||
# with test instrumentation.
|
||||
iso =
|
||||
(import ../lib/eval-config.nix {
|
||||
inherit system;
|
||||
modules =
|
||||
[ ../modules/installer/cd-dvd/installation-cd-minimal.nix
|
||||
../modules/testing/test-instrumentation.nix
|
||||
{ key = "serial";
|
||||
boot.loader.grub.timeout = mkOverride 0 0;
|
||||
|
||||
# The test cannot access the network, so any sources we
|
||||
# need must be included in the ISO.
|
||||
isoImage.storeContents =
|
||||
[ pkgs.glibcLocales
|
||||
pkgs.sudo
|
||||
pkgs.docbook5
|
||||
pkgs.docbook5_xsl
|
||||
pkgs.unionfs-fuse
|
||||
|
||||
# Bootloader support
|
||||
pkgs.grub
|
||||
pkgs.grub2
|
||||
pkgs.grub2_efi
|
||||
pkgs.gummiboot
|
||||
pkgs.perlPackages.XMLLibXML
|
||||
pkgs.perlPackages.ListCompare
|
||||
];
|
||||
|
||||
# Don't use https://cache.nixos.org since the fake
|
||||
# cache.nixos.org doesn't do https.
|
||||
nix.binaryCaches = [ http://cache.nixos.org/ ];
|
||||
}
|
||||
];
|
||||
}).config.system.build.isoImage;
|
||||
|
||||
|
||||
# The configuration to install.
|
||||
makeConfig = { testChannel, grubVersion, grubDevice, grubIdentifier
|
||||
, extraConfig, readOnly ? true, forceGrubReinstallCount ? 0
|
||||
makeConfig = { grubVersion, grubDevice, grubIdentifier
|
||||
, extraConfig, forceGrubReinstallCount ? 0
|
||||
}:
|
||||
pkgs.writeText "configuration.nix" ''
|
||||
{ config, lib, pkgs, modulesPath, ... }:
|
||||
@ -53,7 +16,6 @@ let
|
||||
{ imports =
|
||||
[ ./hardware-configuration.nix
|
||||
<nixpkgs/nixos/modules/testing/test-instrumentation.nix>
|
||||
<nixpkgs/nixos/modules/profiles/minimal.nix>
|
||||
];
|
||||
|
||||
boot.loader.grub.version = ${toString grubVersion};
|
||||
@ -66,96 +28,39 @@ let
|
||||
|
||||
boot.loader.grub.configurationLimit = 100 + ${toString forceGrubReinstallCount};
|
||||
|
||||
${optionalString (!readOnly) "nix.readOnlyStore = false;"}
|
||||
hardware.enableAllFirmware = lib.mkForce false;
|
||||
|
||||
environment.systemPackages = [ ${optionalString testChannel "pkgs.rlwrap"} ];
|
||||
|
||||
nix.binaryCaches = [ http://cache.nixos.org/ ];
|
||||
${replaceChars ["\n"] ["\n "] extraConfig}
|
||||
}
|
||||
'';
|
||||
|
||||
|
||||
# Configuration of a web server that simulates the Nixpkgs channel
|
||||
# distribution server.
|
||||
webserver =
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
{ services.httpd.enable = true;
|
||||
services.httpd.adminAddr = "foo@example.org";
|
||||
services.httpd.servedDirs = singleton
|
||||
{ urlPath = "/";
|
||||
dir = "/tmp/channel";
|
||||
};
|
||||
|
||||
virtualisation.writableStore = true;
|
||||
virtualisation.pathsInNixDB = channelContents ++ [ pkgs.hello.src ];
|
||||
virtualisation.memorySize = 768;
|
||||
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
};
|
||||
|
||||
channelContents = [ pkgs.rlwrap ];
|
||||
|
||||
|
||||
# The test script boots the CD, installs NixOS on an empty hard
|
||||
# The test script boots a NixOS VM, installs NixOS on an empty hard
|
||||
# disk, and then reboot from the hard disk. It's parameterized with
|
||||
# a test script fragment `createPartitions', which must create
|
||||
# partitions and filesystems.
|
||||
testScriptFun = { createPartitions, testChannel, grubVersion, grubDevice
|
||||
testScriptFun = { createPartitions, grubVersion, grubDevice
|
||||
, grubIdentifier, preBootCommands, extraConfig
|
||||
}:
|
||||
let
|
||||
# FIXME: OVMF doesn't boot from virtio http://www.mail-archive.com/edk2-devel@lists.sourceforge.net/msg01501.html
|
||||
iface = if grubVersion == 1 then "scsi" else "virtio";
|
||||
qemuFlags =
|
||||
(if iso.system == "x86_64-linux" then "-m 768 " else "-m 512 ") +
|
||||
(optionalString (iso.system == "x86_64-linux") "-cpu kvm64 ");
|
||||
hdFlags =''hda => "harddisk", hdaInterface => "${iface}", '';
|
||||
(if system == "x86_64-linux" then "-m 768 " else "-m 512 ") +
|
||||
(optionalString (system == "x86_64-linux") "-cpu kvm64 ");
|
||||
hdFlags = ''hda => "vm-state-machine/machine.qcow2", hdaInterface => "${iface}", '';
|
||||
in
|
||||
''
|
||||
createDisk("harddisk", 8 * 1024);
|
||||
|
||||
my $machine = createMachine({ ${hdFlags}
|
||||
cdrom => glob("${iso}/iso/*.iso"),
|
||||
qemuFlags => "${qemuFlags} " . '${optionalString testChannel (toString (qemuNICFlags 1 1 2))}' });
|
||||
$machine->start;
|
||||
|
||||
${optionalString testChannel ''
|
||||
# Create a channel on the web server containing a few packages
|
||||
# to simulate the Nixpkgs channel.
|
||||
$webserver->start;
|
||||
$webserver->waitForUnit("httpd");
|
||||
$webserver->succeed(
|
||||
"nix-push --bzip2 --dest /tmp/channel --manifest --url-prefix http://nixos.org/channels/nixos-unstable " .
|
||||
"${toString channelContents} >&2");
|
||||
$webserver->succeed("mkdir /tmp/channel/sha256");
|
||||
$webserver->succeed("cp ${pkgs.hello.src} /tmp/channel/sha256/${pkgs.hello.src.outputHash}");
|
||||
''}
|
||||
|
||||
# Make sure that we get a login prompt etc.
|
||||
$machine->succeed("echo hello");
|
||||
#$machine->waitForUnit('getty@tty2');
|
||||
$machine->waitForUnit("rogue");
|
||||
$machine->waitForUnit("nixos-manual");
|
||||
|
||||
${optionalString testChannel ''
|
||||
$machine->waitForUnit("dhcpcd");
|
||||
|
||||
# Allow the machine to talk to the fake nixos.org.
|
||||
$machine->succeed(
|
||||
"rm /etc/hosts",
|
||||
"echo 192.168.1.1 nixos.org cache.nixos.org tarballs.nixos.org > /etc/hosts",
|
||||
"ifconfig eth1 up 192.168.1.2",
|
||||
);
|
||||
|
||||
# Test nix-env.
|
||||
$machine->fail("hello");
|
||||
$machine->succeed("nix-env -i hello");
|
||||
$machine->succeed("hello") =~ /Hello, world/
|
||||
or die "bad `hello' output";
|
||||
''}
|
||||
|
||||
# Wait for hard disks to appear in /dev
|
||||
$machine->succeed("udevadm settle");
|
||||
|
||||
@ -163,14 +68,12 @@ let
|
||||
${createPartitions}
|
||||
|
||||
# Create the NixOS configuration.
|
||||
$machine->succeed(
|
||||
"nixos-generate-config --root /mnt",
|
||||
);
|
||||
$machine->succeed("nixos-generate-config --root /mnt");
|
||||
|
||||
$machine->succeed("cat /mnt/etc/nixos/hardware-configuration.nix >&2");
|
||||
|
||||
$machine->copyFileFromHost(
|
||||
"${ makeConfig { inherit testChannel grubVersion grubDevice grubIdentifier extraConfig; } }",
|
||||
"${ makeConfig { inherit grubVersion grubDevice grubIdentifier extraConfig; } }",
|
||||
"/mnt/etc/nixos/configuration.nix");
|
||||
|
||||
# Perform the installation.
|
||||
@ -188,7 +91,7 @@ let
|
||||
# Now see if we can boot the installation.
|
||||
$machine = createMachine({ ${hdFlags} qemuFlags => "${qemuFlags}" });
|
||||
|
||||
# For example to enter LUKS passphrase
|
||||
# For example to enter LUKS passphrase.
|
||||
${preBootCommands}
|
||||
|
||||
# Did /boot get mounted?
|
||||
@ -209,9 +112,9 @@ let
|
||||
$machine->succeed("type -tP ls | tee /dev/stderr") =~ /.nix-profile/
|
||||
or die "nix-env failed";
|
||||
|
||||
# We need to a writable nix-store on next boot
|
||||
# We need to a writable nix-store on next boot.
|
||||
$machine->copyFileFromHost(
|
||||
"${ makeConfig { inherit testChannel grubVersion grubDevice grubIdentifier extraConfig; readOnly = false; forceGrubReinstallCount = 1; } }",
|
||||
"${ makeConfig { inherit grubVersion grubDevice grubIdentifier extraConfig; forceGrubReinstallCount = 1; } }",
|
||||
"/etc/nixos/configuration.nix");
|
||||
|
||||
# Check whether nixos-rebuild works.
|
||||
@ -220,7 +123,7 @@ let
|
||||
# Test nixos-option.
|
||||
$machine->succeed("nixos-option boot.initrd.kernelModules | grep virtio_console");
|
||||
$machine->succeed("nixos-option boot.initrd.kernelModules | grep 'List of modules'");
|
||||
$machine->succeed("nixos-option boot.initrd.kernelModules | grep qemu-guest.nix");
|
||||
$machine->succeed("nixos-option boot.initrd.kernelModules | grep qemu-guest.nix");
|
||||
|
||||
$machine->shutdown;
|
||||
|
||||
@ -229,7 +132,7 @@ let
|
||||
${preBootCommands}
|
||||
$machine->waitForUnit("multi-user.target");
|
||||
$machine->copyFileFromHost(
|
||||
"${ makeConfig { inherit testChannel grubVersion grubDevice grubIdentifier extraConfig; readOnly = false; forceGrubReinstallCount = 2; } }",
|
||||
"${ makeConfig { inherit grubVersion grubDevice grubIdentifier extraConfig; forceGrubReinstallCount = 2; } }",
|
||||
"/etc/nixos/configuration.nix");
|
||||
$machine->succeed("nixos-rebuild boot >&2");
|
||||
$machine->shutdown;
|
||||
@ -245,16 +148,60 @@ let
|
||||
|
||||
makeInstallerTest = name:
|
||||
{ createPartitions, preBootCommands ? "", extraConfig ? ""
|
||||
, testChannel ? false, grubVersion ? 2, grubDevice ? "/dev/vda"
|
||||
, grubVersion ? 2, grubDevice ? "/dev/vda"
|
||||
, grubIdentifier ? "uuid", enableOCR ? false
|
||||
}:
|
||||
makeTest {
|
||||
inherit iso;
|
||||
name = "installer-" + name;
|
||||
nodes = if testChannel then { inherit webserver; } else { };
|
||||
inherit enableOCR;
|
||||
name = "installer-" + name;
|
||||
|
||||
nodes = {
|
||||
|
||||
# The configuration of the machine used to run "nixos-install". It
|
||||
# also has a web server that simulates cache.nixos.org.
|
||||
machine =
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
{ imports =
|
||||
[ ../modules/profiles/installation-device.nix
|
||||
../modules/profiles/base.nix
|
||||
];
|
||||
|
||||
virtualisation.diskSize = 8 * 1024;
|
||||
virtualisation.memorySize = 768;
|
||||
virtualisation.writableStore = true;
|
||||
|
||||
# Use a small /dev/vdb as the root disk for the
|
||||
# installer. This ensures the target disk (/dev/vda) is
|
||||
# the same during and after installation.
|
||||
virtualisation.emptyDiskImages = [ 512 ];
|
||||
virtualisation.bootDevice = "/dev/vdb";
|
||||
|
||||
hardware.enableAllFirmware = mkForce false;
|
||||
|
||||
# The test cannot access the network, so any packages we
|
||||
# need must be included in the VM.
|
||||
system.extraDependencies =
|
||||
[ pkgs.sudo
|
||||
pkgs.docbook5
|
||||
pkgs.docbook5_xsl
|
||||
pkgs.unionfs-fuse
|
||||
pkgs.ntp
|
||||
pkgs.nixos-artwork
|
||||
pkgs.gummiboot
|
||||
pkgs.perlPackages.XMLLibXML
|
||||
pkgs.perlPackages.ListCompare
|
||||
]
|
||||
++ optional (grubVersion == 1) pkgs.grub
|
||||
++ optionals (grubVersion == 2) [ pkgs.grub2 pkgs.grub2_efi ];
|
||||
|
||||
nix.binaryCaches = mkForce [ ];
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
testScript = testScriptFun {
|
||||
inherit createPartitions preBootCommands testChannel grubVersion
|
||||
inherit createPartitions preBootCommands grubVersion
|
||||
grubDevice grubIdentifier extraConfig;
|
||||
};
|
||||
};
|
||||
@ -281,7 +228,6 @@ in {
|
||||
"mount LABEL=nixos /mnt",
|
||||
);
|
||||
'';
|
||||
testChannel = true;
|
||||
};
|
||||
|
||||
# Same as the previous, but now with a separate /boot partition.
|
||||
@ -413,40 +359,11 @@ in {
|
||||
"mkfs.ext3 -L nixos /dev/sda2",
|
||||
"mount LABEL=nixos /mnt",
|
||||
);
|
||||
|
||||
'';
|
||||
grubVersion = 1;
|
||||
grubDevice = "/dev/sda";
|
||||
};
|
||||
|
||||
# Rebuild the CD configuration with a little modification.
|
||||
rebuildCD = makeTest
|
||||
{ inherit iso;
|
||||
name = "rebuild-cd";
|
||||
nodes = { };
|
||||
testScript =
|
||||
''
|
||||
my $machine = createMachine({ cdrom => glob("${iso}/iso/*.iso"), qemuFlags => '-m 768' });
|
||||
$machine->start;
|
||||
|
||||
# Enable sshd service.
|
||||
$machine->succeed(
|
||||
"sed -i 's,^}\$,systemd.services.sshd.wantedBy = pkgs.lib.mkOverride 0 [\"multi-user.target\"]; },' /etc/nixos/configuration.nix"
|
||||
);
|
||||
|
||||
$machine->succeed("cat /etc/nixos/configuration.nix >&2");
|
||||
|
||||
# Apply the new CD configuration.
|
||||
$machine->succeed("nixos-rebuild test");
|
||||
|
||||
# Connect to it-self.
|
||||
$machine->waitForUnit("sshd");
|
||||
$machine->waitForOpenPort(22);
|
||||
|
||||
$machine->shutdown;
|
||||
'';
|
||||
};
|
||||
|
||||
# Test using labels to identify volumes in grub
|
||||
simpleLabels = makeInstallerTest "simpleLabels" {
|
||||
createPartitions = ''
|
||||
@ -545,4 +462,5 @@ in {
|
||||
);
|
||||
'';
|
||||
};
|
||||
|
||||
}
|
||||
|
33
pkgs/applications/audio/AMB-plugins/default.nix
Normal file
33
pkgs/applications/audio/AMB-plugins/default.nix
Normal file
@ -0,0 +1,33 @@
|
||||
{ stdenv, fetchurl, ladspaH
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "AMB-plugins-${version}";
|
||||
version = "0.8.1";
|
||||
src = fetchurl {
|
||||
url = "http://kokkinizita.linuxaudio.org/linuxaudio/downloads/${name}.tar.bz2";
|
||||
sha256 = "0x4blm4visjqj0ndqr0cg776v3b7lvplpc8cgi9n51llhavn0jpl";
|
||||
};
|
||||
|
||||
buildInputs = [ ladspaH ];
|
||||
|
||||
patchPhase = ''
|
||||
sed -i 's@/usr/bin/install@install@g' Makefile
|
||||
sed -i 's@/bin/rm@rm@g' Makefile
|
||||
sed -i 's@/usr/lib/ladspa@$(out)/lib/ladspa@g' Makefile
|
||||
'';
|
||||
|
||||
preInstall="mkdir -p $out/lib/ladspa";
|
||||
|
||||
meta = {
|
||||
description = ''A set of ambisonics ladspa plugins'';
|
||||
longDescription = ''
|
||||
Mono and stereo to B-format panning, horizontal rotator, square, hexagon and cube decoders.
|
||||
'';
|
||||
version = "${version}";
|
||||
homepage = http://kokkinizita.linuxaudio.org/linuxaudio/ladspa/index.html;
|
||||
license = stdenv.lib.licenses.gpl2Plus;
|
||||
maintainers = [ stdenv.lib.maintainers.magnetophon ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
};
|
||||
}
|
33
pkgs/applications/audio/CharacterCompressor/default.nix
Normal file
33
pkgs/applications/audio/CharacterCompressor/default.nix
Normal file
@ -0,0 +1,33 @@
|
||||
{ stdenv, fetchFromGitHub, faust2jack, faust2lv2 }:
|
||||
stdenv.mkDerivation rec {
|
||||
name = "CharacterCompressor-${version}";
|
||||
version = "0.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "magnetophon";
|
||||
repo = "CharacterCompressor";
|
||||
rev = "v${version}";
|
||||
sha256 = "0fvi8m4nshcxypn4jgxhnh7pxp68wshhav3k8wn3il7qpw71pdxi";
|
||||
};
|
||||
|
||||
buildInputs = [ faust2jack faust2lv2 ];
|
||||
|
||||
buildPhase = ''
|
||||
faust2jack -t 99999 CharacterCompressor.dsp
|
||||
faust2lv2 -t 99999 CharacterCompressor.dsp
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp CharacterCompressor $out/bin/
|
||||
mkdir -p $out/lib/lv2
|
||||
cp -r CharacterCompressor.lv2/ $out/lib/lv2
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "A compressor with character. For jack and lv2";
|
||||
homepage = https://github.com/magnetophon/CharacterCompressor;
|
||||
license = stdenv.lib.licenses.gpl3;
|
||||
maintainers = [ stdenv.lib.maintainers.magnetophon ];
|
||||
};
|
||||
}
|
39
pkgs/applications/audio/CompBus/default.nix
Normal file
39
pkgs/applications/audio/CompBus/default.nix
Normal file
@ -0,0 +1,39 @@
|
||||
{ stdenv, fetchFromGitHub, faust2jack, faust2lv2 }:
|
||||
stdenv.mkDerivation rec {
|
||||
name = "CompBus-${version}";
|
||||
version = "1.1.02";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "magnetophon";
|
||||
repo = "CompBus";
|
||||
rev = "v${version}";
|
||||
sha256 = "025vi60caxk3j2vxxrgbc59xlyr88vgn7k3127s271zvpyy7apwh";
|
||||
};
|
||||
|
||||
buildInputs = [ faust2jack faust2lv2 ];
|
||||
|
||||
buildPhase = ''
|
||||
for f in *.dsp;
|
||||
do
|
||||
faust2jack -t 99999 $f
|
||||
faust2lv2 -t 99999 $f
|
||||
done
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/lib/lv2
|
||||
mv *.lv2/ $out/lib/lv2
|
||||
mkdir -p $out/bin
|
||||
for f in $(find . -executable -type f);
|
||||
do
|
||||
cp $f $out/bin/
|
||||
done
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "A group of compressors mixed into a bus, sidechained from that mix bus. For jack and lv2";
|
||||
homepage = https://github.com/magnetophon/CompBus;
|
||||
license = stdenv.lib.licenses.gpl3;
|
||||
maintainers = [ stdenv.lib.maintainers.magnetophon ];
|
||||
};
|
||||
}
|
33
pkgs/applications/audio/RhythmDelay/default.nix
Normal file
33
pkgs/applications/audio/RhythmDelay/default.nix
Normal file
@ -0,0 +1,33 @@
|
||||
{ stdenv, fetchFromGitHub, faust2jack, faust2lv2 }:
|
||||
stdenv.mkDerivation rec {
|
||||
name = "RhythmDelay-${version}";
|
||||
version = "2.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "magnetophon";
|
||||
repo = "RhythmDelay";
|
||||
rev = "v${version}";
|
||||
sha256 = "0n938nm08mf3lz92k6v07k1469xxzmfkgclw40jgdssfcfa16bn7";
|
||||
};
|
||||
|
||||
buildInputs = [ faust2jack faust2lv2 ];
|
||||
|
||||
buildPhase = ''
|
||||
faust2jack -t 99999 RhythmDelay.dsp
|
||||
faust2lv2 -t 99999 RhythmDelay.dsp
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp RhythmDelay $out/bin/
|
||||
mkdir -p $out/lib/lv2
|
||||
cp -r RhythmDelay.lv2/ $out/lib/lv2
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "Tap a rhythm into your delay! For jack and lv2";
|
||||
homepage = https://github.com/magnetophon/RhythmDelay;
|
||||
license = stdenv.lib.licenses.gpl3;
|
||||
maintainers = [ stdenv.lib.maintainers.magnetophon ];
|
||||
};
|
||||
}
|
@ -1,10 +1,10 @@
|
||||
{ stdenv, fetchurl }:
|
||||
stdenv.mkDerivation rec {
|
||||
name = "caps-${version}";
|
||||
version = "0.9.16";
|
||||
version = "0.9.24";
|
||||
src = fetchurl {
|
||||
url = "http://www.quitte.de/dsp/caps_${version}.tar.bz2";
|
||||
sha256 = "117l04w2zwqak856lihmaxg6f22vlz71knpxy0axiyri0x82lbwv";
|
||||
sha256 = "081zx0i2ysw5nmy03j60q9j11zdlg1fxws81kwanncdgayxgwipp";
|
||||
};
|
||||
configurePhase = ''
|
||||
echo "PREFIX = $out" > defines.make
|
||||
|
33
pkgs/applications/audio/constant-detune-chorus/default.nix
Normal file
33
pkgs/applications/audio/constant-detune-chorus/default.nix
Normal file
@ -0,0 +1,33 @@
|
||||
{ stdenv, fetchFromGitHub, faust2jack, faust2lv2 }:
|
||||
stdenv.mkDerivation rec {
|
||||
name = "constant-detune-chorus-${version}";
|
||||
version = "0.1.01";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "magnetophon";
|
||||
repo = "constant-detune-chorus";
|
||||
rev = "v${version}";
|
||||
sha256 = "1z8aj1a36ix9jizk9wl06b3i98hrkg47qxqp8vx930r624pc5z86";
|
||||
};
|
||||
|
||||
buildInputs = [ faust2jack faust2lv2 ];
|
||||
|
||||
buildPhase = ''
|
||||
faust2jack -t 99999 constant-detune-chorus.dsp
|
||||
faust2lv2 -t 99999 constant-detune-chorus.dsp
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp constant-detune-chorus $out/bin/
|
||||
mkdir -p $out/lib/lv2
|
||||
cp -r constant-detune-chorus.lv2/ $out/lib/lv2
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "A chorus algorithm that maintains constant and symmetric detuning depth (in cents), regardless of modulation rate. For jack and lv2";
|
||||
homepage = https://github.com/magnetophon/constant-detune-chorus;
|
||||
license = stdenv.lib.licenses.gpl3;
|
||||
maintainers = [ stdenv.lib.maintainers.magnetophon ];
|
||||
};
|
||||
}
|
@ -9,13 +9,21 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "1shbyp54q64g6bsl6hhch58k3z1dyyy9ph6cq2xvdf8syy00sisz";
|
||||
};
|
||||
buildInputs = [ libsndfile libsamplerate liblo jack2 ];
|
||||
postPatch = ''
|
||||
sed -i "s|./samples|$out/share/dirt/samples|" file.h
|
||||
'';
|
||||
configurePhase = ''
|
||||
export DESTDIR=$out
|
||||
'';
|
||||
postInstall = ''
|
||||
mkdir -p $out/share/dirt/
|
||||
cp -r samples $out/share/dirt/
|
||||
'';
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
description = "An unimpressive thingie for playing bits of samples with some level of accuracy";
|
||||
homepage = "https://github.com/tidalcycles/Dirt";
|
||||
license = stdenv.lib.licenses.gpl3;
|
||||
license = licenses.gpl3;
|
||||
maintainers = with maintainers; [ anderspapitto ];
|
||||
};
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
{ stdenv, fetchurl, mpd_clientlib }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "0.26";
|
||||
version = "0.27";
|
||||
name = "mpc-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://www.musicpd.org/download/mpc/0/${name}.tar.xz";
|
||||
sha256 = "0hp2qv6w2v902dhrmck5hg32s1ai6xiv9n61a3n6prfcfdqmywr0";
|
||||
sha256 = "0r10wsqxsi07gns6mfnicvpci0sbwwj4qa9iyr1ysrgadl5bx8j5";
|
||||
};
|
||||
|
||||
buildInputs = [ mpd_clientlib ];
|
||||
|
@ -3,7 +3,7 @@
|
||||
{ stdenv, fetchurl, alsaLib, bzip2, fftw, jack2, libX11, liblo
|
||||
, libmad, libogg, librdf, librdf_raptor, librdf_rasqal, libsamplerate
|
||||
, libsndfile, pkgconfig, libpulseaudio, qt5, redland
|
||||
, rubberband, serd, sord, vampSDK
|
||||
, rubberband, serd, sord, vampSDK, fftwFloat
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
@ -16,7 +16,7 @@ stdenv.mkDerivation rec {
|
||||
};
|
||||
|
||||
buildInputs =
|
||||
[ libsndfile qt5.base fftw /* should be fftw3f ??*/ bzip2 librdf rubberband
|
||||
[ libsndfile qt5.base fftw fftwFloat bzip2 librdf rubberband
|
||||
libsamplerate vampSDK alsaLib librdf_raptor librdf_rasqal redland
|
||||
serd
|
||||
sord
|
||||
|
@ -1,10 +1,12 @@
|
||||
{ stdenv, fetchgit }:
|
||||
{ stdenv, fetchFromGitHub }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "wavegain-1.3.1";
|
||||
src = fetchgit {
|
||||
url = "https://github.com/MestreLion/wavegain.git";
|
||||
sha256 = "1h886xijc9d7h4p6qx12c6kgwmp6s1bdycnyylkayfncczzlbi24";
|
||||
src = fetchFromGitHub {
|
||||
owner = "MestreLion";
|
||||
repo = "wavegain";
|
||||
rev = "c928eaf97aeec5732625491b64c882e08e314fee";
|
||||
sha256 = "0wghqnsbypmr4xcrhb568bfjdnxzzp8qgnws3jslzmzf34dpk5ls";
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
@ -17,6 +19,6 @@ stdenv.mkDerivation {
|
||||
homepage = https://github.com/MestreLion/wavegain;
|
||||
license = stdenv.lib.licenses.lgpl21;
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
maintainers = [ stdenv.lib.maintainers.devhell ];
|
||||
maintainers = [ stdenv.lib.maintainers.robbinch ];
|
||||
};
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
{ stdenv, fetchurl, ncurses, x11, libXaw, libXpm, Xaw3d
|
||||
, pkgconfig, gtk, libXft, dbus, libpng, libjpeg, libungif
|
||||
, libtiff, librsvg, texinfo, gconf, libxml2, imagemagick, gnutls
|
||||
, alsaLib, cairo
|
||||
, alsaLib, cairo, acl, gpm
|
||||
, withX ? !stdenv.isDarwin
|
||||
, withGTK3 ? false, gtk3 ? null
|
||||
, withGTK2 ? true, gtk2
|
||||
@ -36,7 +36,7 @@ stdenv.mkDerivation rec {
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[ ncurses gconf libxml2 gnutls alsaLib pkgconfig texinfo ]
|
||||
[ ncurses gconf libxml2 gnutls alsaLib pkgconfig texinfo acl gpm ]
|
||||
++ stdenv.lib.optional stdenv.isLinux dbus
|
||||
++ stdenv.lib.optionals withX
|
||||
[ x11 libXaw Xaw3d libXpm libpng libjpeg libungif libtiff librsvg libXft
|
||||
|
@ -1,6 +1,7 @@
|
||||
{ stdenv, fetchurl, fetchgit
|
||||
{ stdenv, fetchurl, fetchgit, fetchNuGet
|
||||
, autoconf, automake, pkgconfig, shared_mime_info, intltool
|
||||
, glib, mono, gtk-sharp, gnome, gnome-sharp, unzip
|
||||
, dotnetPackages
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
@ -13,79 +14,33 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "1bgqvlfi6pilj2zxsviqilh63qq98wsijqdiqwpkqchcw741zlyn";
|
||||
};
|
||||
|
||||
srcNugetBinary = fetchgit {
|
||||
url = "https://github.com/mono/nuget-binary.git";
|
||||
rev = "da1f2102f8172df6f7a1370a4998e3f88b91c047";
|
||||
sha256 = "1hbnckc4gvqkknf8gh1k7iwqb4vdzifdjd19i60fnczly5v8m1c3";
|
||||
};
|
||||
|
||||
srcNUnit = fetchurl {
|
||||
url = "https://www.nuget.org/api/v2/package/NUnit/2.6.3";
|
||||
sha256 = "0bb16i4ggwz32wkxsh485wf014cqqzhbyx0b3wbpmqjw7p4canph";
|
||||
};
|
||||
|
||||
srcNUnitRunners = fetchurl {
|
||||
url = "https://www.nuget.org/api/v2/package/NUnit.Runners/2.6.3";
|
||||
sha256 = "0qwx1i9lxkp9pijj2bsczzgsamz651hngkxraqjap1v4m7d09a3b";
|
||||
};
|
||||
|
||||
srcNUnit2510 = fetchurl {
|
||||
nunit2510 = fetchurl {
|
||||
url = "http://launchpad.net/nunitv2/2.5/2.5.10/+download/NUnit-2.5.10.11092.zip";
|
||||
sha256 = "0k5h5bz1p2v3d0w0hpkpbpvdkcszgp8sr9ik498r1bs72w5qlwnc";
|
||||
};
|
||||
|
||||
srcNugetSystemWebMvcExtensions = fetchurl {
|
||||
url = https://www.nuget.org/api/v2/package/System.Web.Mvc.Extensions.Mvc.4/1.0.9;
|
||||
sha256 = "19wi662m8primpimzifv8k560m6ymm73z0mf1r8ixl0xqag1hx6j";
|
||||
};
|
||||
|
||||
srcNugetMicrosoftAspNetMvc = fetchurl {
|
||||
url = https://www.nuget.org/api/v2/package/Microsoft.AspNet.Mvc/5.2.2;
|
||||
sha256 = "1jwfmz42kw2yb1g2hgp2h34fc4wx6s8z71da3mw5i4ivs25w9n2b";
|
||||
};
|
||||
|
||||
srcNugetMicrosoftAspNetRazor = fetchurl {
|
||||
url = https://www.nuget.org/api/v2/package/Microsoft.AspNet.Razor/3.2.2;
|
||||
sha256 = "1db3apn4vzz1bx6q5fyv6nyx0drz095xgazqbw60qnhfs7z45axd";
|
||||
};
|
||||
|
||||
srcNugetMicrosoftAspNetWebPages = fetchurl {
|
||||
url = https://www.nuget.org/api/v2/package/Microsoft.AspNet.WebPages/3.2.2;
|
||||
sha256 = "17fwb5yj165sql80i47zirjnm0gr4n8ypz408mz7p8a1n40r4i5l";
|
||||
};
|
||||
|
||||
srcNugetMicrosoftWebInfrastructure = fetchurl {
|
||||
url = https://www.nuget.org/api/v2/package/Microsoft.Web.Infrastructure/1.0.0.0;
|
||||
sha256 = "1mxl9dri5729d0jl84gkpqifqf4xzb6aw1rzcfh6l0r24bix9afn";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
# From https://bugzilla.xamarin.com/show_bug.cgi?id=23696#c19
|
||||
|
||||
# it seems parts of MonoDevelop 5.2+ need NUnit 2.6.4, which isn't included
|
||||
# (?), so download it and put it in the right place in the tree
|
||||
mkdir packages
|
||||
unzip ${srcNUnit} -d packages/NUnit.2.6.3
|
||||
unzip ${srcNUnitRunners} -d packages/NUnit.Runners.2.6.3
|
||||
|
||||
# cecil needs NUnit 2.5.10 - this is also missing from the tar
|
||||
unzip -j ${srcNUnit2510} -d external/cecil/Test/libs/nunit-2.5.10 NUnit-2.5.10.11092/bin/net-2.0/framework/\*
|
||||
unzip -j ${nunit2510} -d external/cecil/Test/libs/nunit-2.5.10 NUnit-2.5.10.11092/bin/net-2.0/framework/\*
|
||||
|
||||
# the tar doesn't include the nuget binary, so grab it from github and copy it
|
||||
# into the right place
|
||||
cp -vfR ${srcNugetBinary}/* external/nuget-binary/
|
||||
|
||||
# AspNet plugin requires these packages
|
||||
unzip ${srcNugetSystemWebMvcExtensions} -d packages/System.Web.Mvc.Extensions.Mvc.4.1.0.9
|
||||
unzip ${srcNugetMicrosoftAspNetMvc} -d packages/Microsoft.AspNet.Mvc.5.2.2
|
||||
unzip ${srcNugetMicrosoftAspNetRazor} -d packages/Microsoft.AspNet.Razor.3.2.2
|
||||
unzip ${srcNugetMicrosoftAspNetWebPages} -d packages/Microsoft.AspNet.WebPages.3.2.2
|
||||
unzip ${srcNugetMicrosoftWebInfrastructure} -d packages/Microsoft.Web.Infrastructure.1.0.0.0
|
||||
cp -vfR "$(dirname $(pkg-config NuGet.Core --variable=Libraries))"/* external/nuget-binary/
|
||||
'';
|
||||
|
||||
# Revert this commit which broke the ability to use pkg-config to locate dlls
|
||||
patchFlags = [ "-p2" ];
|
||||
patches = [ ./git-revert-12d610fb3f6dce121df538e36f21d8c2eeb0a6e3.patch ];
|
||||
|
||||
buildInputs = [
|
||||
autoconf automake pkgconfig shared_mime_info intltool
|
||||
mono gtk-sharp gnome-sharp unzip
|
||||
pkgconfig
|
||||
dotnetPackages.NUnit
|
||||
dotnetPackages.NUnitRunners
|
||||
dotnetPackages.Nuget
|
||||
];
|
||||
|
||||
preConfigure = "patchShebangs ./configure";
|
||||
@ -108,6 +63,12 @@ stdenv.mkDerivation rec {
|
||||
>
|
||||
EOF
|
||||
done
|
||||
|
||||
# Without this, you get a missing DLL error any time you install an addin..
|
||||
ln -sv `pkg-config nunit.core --variable=Libraries` $out/lib/monodevelop/AddIns/NUnit
|
||||
ln -sv `pkg-config nunit.core.interfaces --variable=Libraries` $out/lib/monodevelop/AddIns/NUnit
|
||||
ln -sv `pkg-config nunit.framework --variable=Libraries` $out/lib/monodevelop/AddIns/NUnit
|
||||
ln -sv `pkg-config nunit.util --variable=Libraries` $out/lib/monodevelop/AddIns/NUnit
|
||||
'';
|
||||
|
||||
dontStrip = true;
|
||||
|
@ -0,0 +1,57 @@
|
||||
diff --git a/main/src/addins/AspNet/MonoDevelop.AspNet.csproj b/main/src/addins/AspNet/MonoDevelop.AspNet.csproj
|
||||
index 02d3a01..c6daaad 100644
|
||||
--- a/main/src/addins/AspNet/MonoDevelop.AspNet.csproj
|
||||
+++ b/main/src/addins/AspNet/MonoDevelop.AspNet.csproj
|
||||
@@ -452,34 +452,6 @@
|
||||
<None Include="Templates\Projects\WebApplication.xpt.xml">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.Mvc.5.2.2\lib\net45\System.Web.Mvc.dll">
|
||||
- <Link>System.Web.Mvc.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.Razor.3.2.2\lib\net45\System.Web.Razor.dll">
|
||||
- <Link>System.Web.Razor.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.WebPages.3.2.2\lib\net45\System.Web.Helpers.dll">
|
||||
- <Link>System.Web.Helpers.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.WebPages.3.2.2\lib\net45\System.Web.WebPages.Deployment.dll">
|
||||
- <Link>System.Web.WebPages.Deployment.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.WebPages.3.2.2\lib\net45\System.Web.WebPages.dll">
|
||||
- <Link>System.Web.WebPages.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.AspNet.WebPages.3.2.2\lib\net45\System.Web.WebPages.Razor.dll">
|
||||
- <Link>System.Web.WebPages.Razor.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
- <None Include="..\..\..\packages\Microsoft.Web.Infrastructure.1.0.0.0\lib\net40\Microsoft.Web.Infrastructure.dll">
|
||||
- <Link>Microsoft.Web.Infrastructure.dll</Link>
|
||||
- <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
- </None>
|
||||
<None Include="packages.config" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
diff --git a/main/src/addins/AspNet/Properties/MonoDevelop.AspNet.addin.xml b/main/src/addins/AspNet/Properties/MonoDevelop.AspNet.addin.xml
|
||||
index eab7c32..4a75311 100644
|
||||
--- a/main/src/addins/AspNet/Properties/MonoDevelop.AspNet.addin.xml
|
||||
+++ b/main/src/addins/AspNet/Properties/MonoDevelop.AspNet.addin.xml
|
||||
@@ -1,13 +1,6 @@
|
||||
<ExtensionModel>
|
||||
|
||||
<Runtime>
|
||||
- <Import assembly = "System.Web.Helpers.dll" />
|
||||
- <Import assembly = "System.Web.Mvc.dll" />
|
||||
- <Import assembly = "System.Web.Razor.dll" />
|
||||
- <Import assembly = "System.Web.WebPages.Deployment.dll" />
|
||||
- <Import assembly = "System.Web.WebPages.dll" />
|
||||
- <Import assembly = "System.Web.WebPages.Razor.dll" />
|
||||
-
|
||||
<Import file = "Html/Schemas/xhtml1-strict.xsd" />
|
||||
<Import file = "Html/Schemas/xhtml1-transitional.xsd" />
|
||||
<Import file = "Html/Schemas/xhtml1-frameset.xsd" />
|
@ -3,18 +3,18 @@
|
||||
, luabitop, ncurses, perl, pkgconfig, unibilium
|
||||
, withJemalloc ? true, jemalloc }:
|
||||
|
||||
let version = "2015-05-26"; in
|
||||
let version = "2015-06-09"; in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "neovim-${version}";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
sha256 = "0sszpqlq0yp6r62zgcjcmnllc058wzzh9ccvgb2jh9k19ksszyhc";
|
||||
rev = "5a9ad68b258f33ebd7fa0a5da47b308f50f1e5e7";
|
||||
sha256 = "1lycql0lwi7ynrsaln4kxybwvxb9fvganiq3ba4pnpcfgl155k1j";
|
||||
rev = "6270d431aaeed71e7a8782411f36409ab8e0ee35";
|
||||
repo = "neovim";
|
||||
owner = "neovim";
|
||||
};
|
||||
|
||||
# FIXME: this is NOT the libvterm already in nixpkgs, but some NIH silliness:
|
||||
# Note: this is NOT the libvterm already in nixpkgs, but some NIH silliness:
|
||||
neovimLibvterm = let version = "2015-02-23"; in stdenv.mkDerivation rec {
|
||||
name = "neovim-libvterm-${version}";
|
||||
|
||||
|
41
pkgs/applications/graphics/antimony/default.nix
Normal file
41
pkgs/applications/graphics/antimony/default.nix
Normal file
@ -0,0 +1,41 @@
|
||||
{ stdenv, fetchgit, libpng, python3, boost, mesa, qt5, ncurses }:
|
||||
|
||||
let
|
||||
gitRev = "745eca3a2d2657c495d5509e9083c884e021d09c";
|
||||
gitBranch = "master";
|
||||
gitTag = "0.8.0b";
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "antimony-${version}";
|
||||
version = gitTag;
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://github.com/mkeeter/antimony.git";
|
||||
rev = gitRev;
|
||||
sha256 = "19ir3y5ipmfyygcn8mbxika4j3af6dfrv54dvhn6maz7dy8h30f4";
|
||||
};
|
||||
|
||||
patches = [ ./paths-fix.patch ];
|
||||
|
||||
buildInputs = [
|
||||
libpng python3 (boost.override { python = python3; })
|
||||
mesa qt5.base ncurses
|
||||
];
|
||||
|
||||
configurePhase = ''
|
||||
export GITREV=${gitRev}
|
||||
export GITBRANCH=${gitBranch}
|
||||
export GITTAG=${gitTag}
|
||||
|
||||
cd qt
|
||||
export sourceRoot=$sourceRoot/qt
|
||||
qmake antimony.pro PREFIX=$out
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "A computer-aided design (CAD) tool from a parallel universe";
|
||||
homepage = "https://github.com/mkeeter/antimony";
|
||||
license = licenses.mit;
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
99
pkgs/applications/graphics/antimony/paths-fix.patch
Normal file
99
pkgs/applications/graphics/antimony/paths-fix.patch
Normal file
@ -0,0 +1,99 @@
|
||||
diff --git a/qt/antimony.pro b/qt/antimony.pro
|
||||
index 9d586f4..b055a6d 100644
|
||||
--- a/qt/antimony.pro
|
||||
+++ b/qt/antimony.pro
|
||||
@@ -12,14 +12,9 @@ QMAKE_CXXFLAGS_RELEASE += -O3
|
||||
|
||||
QMAKE_CXXFLAGS += -Werror=switch
|
||||
|
||||
-GITREV = $$system(git log --pretty=format:'%h' -n 1)
|
||||
-GITDIFF = $$system(git diff --quiet --exit-code || echo "+")
|
||||
-GITTAG = $$system(git describe --exact-match --tags 2> /dev/null)
|
||||
-GITBRANCH = $$system(git rev-parse --abbrev-ref HEAD)
|
||||
-
|
||||
-QMAKE_CXXFLAGS += "-D'GITREV=\"$${GITREV}$${GITDIFF}\"'"
|
||||
-QMAKE_CXXFLAGS += "-D'GITTAG=\"$${GITTAG}\"'"
|
||||
-QMAKE_CXXFLAGS += "-D'GITBRANCH=\"$${GITBRANCH}\"'"
|
||||
+QMAKE_CXXFLAGS += "-D'GITREV=\"$$(GITREV)\"'"
|
||||
+QMAKE_CXXFLAGS += "-D'GITTAG=\"$$(GITTAG)\"'"
|
||||
+QMAKE_CXXFLAGS += "-D'GITBRANCH=\"$$(GITBRANCH)\"'"
|
||||
|
||||
OLD_GL_SET = $$(OLD_GL)
|
||||
equals(OLD_GL_SET, "true") {
|
||||
@@ -125,11 +120,11 @@ macx {
|
||||
}
|
||||
|
||||
linux {
|
||||
- executable.path = /usr/local/bin
|
||||
+ executable.path = $$(out)/bin
|
||||
executable.files = antimony
|
||||
- nodes_folder.path = /usr/local/bin/sb/nodes
|
||||
+ nodes_folder.path = $$(out)/bin/sb/nodes
|
||||
nodes_folder.files = ../py/nodes/*
|
||||
- fab_folder.path = /usr/local/bin/sb/fab
|
||||
+ fab_folder.path = $$(out)/bin/sb/fab
|
||||
fab_folder.files = ../py/fab/*
|
||||
INSTALLS += executable nodes_folder fab_folder
|
||||
}
|
||||
diff --git a/qt/fab.pri b/qt/fab.pri
|
||||
index a54813b..b500536 100644
|
||||
--- a/qt/fab.pri
|
||||
+++ b/qt/fab.pri
|
||||
@@ -54,7 +54,7 @@ DEFINES += '_STATIC_= '
|
||||
|
||||
linux {
|
||||
QMAKE_CFLAGS += -std=gnu99
|
||||
- QMAKE_CXXFLAGS += $$system(/usr/bin/python3-config --includes)
|
||||
+ QMAKE_CXXFLAGS += $$system(python3-config --includes)
|
||||
LIBS += -lpng
|
||||
}
|
||||
|
||||
diff --git a/qt/shared.pri b/qt/shared.pri
|
||||
index e7d0e3a..026eae3 100644
|
||||
--- a/qt/shared.pri
|
||||
+++ b/qt/shared.pri
|
||||
@@ -39,41 +39,11 @@ macx {
|
||||
}
|
||||
|
||||
linux {
|
||||
- QMAKE_CXXFLAGS += $$system(/usr/bin/python3-config --includes)
|
||||
- QMAKE_LFLAGS += $$system(/usr/bin/python3-config --ldflags)
|
||||
+ QMAKE_CXXFLAGS += $$system(python3-config --includes)
|
||||
+ QMAKE_LFLAGS += $$system(python3-config --ldflags)
|
||||
|
||||
# Even though this is in QMAKE_LFLAGS, the linker is picky about
|
||||
# library ordering (so it needs to be here too).
|
||||
LIBS += -lpython3.4m
|
||||
-
|
||||
- # ldconfig is being used to find libboost_python, but it's in a different
|
||||
- # place in different distros (and is not in the default $PATH on Debian).
|
||||
- # First, check to see if it's on the default $PATH.
|
||||
- system(which ldconfig > /dev/null) {
|
||||
- LDCONFIG_BIN = "ldconfig"
|
||||
- }
|
||||
- # If that failed, then search for it in its usual places.
|
||||
- isEmpty(LDCONFIG_BIN) {
|
||||
- for(p, $$list(/sbin/ldconfig /usr/bin/ldconfig)) {
|
||||
- exists($$p): LDCONFIG_BIN = $$p
|
||||
- }
|
||||
- }
|
||||
- # If that search failed too, then exit with an error.
|
||||
- isEmpty(LDCONFIG_BIN) {
|
||||
- error("Could not find ldconfig!")
|
||||
- }
|
||||
-
|
||||
- # Check for different boost::python naming schemes
|
||||
- LDCONFIG_OUT = $$system($$LDCONFIG_BIN -p|grep python)
|
||||
- for (b, $$list(boost_python-py34 boost_python3)) {
|
||||
- contains(LDCONFIG_OUT, "lib$${b}.so") {
|
||||
- LIBS += "-l$$b"
|
||||
- GOT_BOOST_PYTHON = True
|
||||
- }
|
||||
- }
|
||||
-
|
||||
- # If we couldn't find boost::python, exit with an error.
|
||||
- isEmpty(GOT_BOOST_PYTHON) {
|
||||
- error("Could not find boost::python3")
|
||||
- }
|
||||
+ LIBS += -lboost_python3
|
||||
}
|
48
pkgs/applications/misc/apvlv/default.nix
Normal file
48
pkgs/applications/misc/apvlv/default.nix
Normal file
@ -0,0 +1,48 @@
|
||||
{ stdenv, fetchurl, cmake, pkgconfig,
|
||||
gtk2 , poppler, freetype, libpthreadstubs, libXdmcp, libxshmfence
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "0.1.f7f7b9c";
|
||||
name = "apvlv-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/downloads/naihe2010/apvlv/${name}-Source.tar.gz";
|
||||
sha256 = "125nlcfjdhgzi9jjxh9l2yc9g39l6jahf8qh2555q20xkxf4rl0w";
|
||||
};
|
||||
|
||||
preConfigure = ''
|
||||
export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${poppler}/include/poppler"
|
||||
'';
|
||||
|
||||
buildInputs = [
|
||||
pkgconfig cmake
|
||||
poppler
|
||||
freetype gtk2
|
||||
libpthreadstubs libXdmcp libxshmfence # otherwise warnings in compilation
|
||||
];
|
||||
|
||||
installPhase = ''
|
||||
# binary
|
||||
mkdir -p $out/bin
|
||||
cp src/apvlv $out/bin/apvlv
|
||||
|
||||
# displays pdfStartup.pdf as default pdf entry
|
||||
mkdir -p $out/share/doc/apvlv/
|
||||
cp ../Startup.pdf $out/share/doc/apvlv/Startup.pdf
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
homepage = "http://naihe2010.github.io/apvlv/";
|
||||
description = "PDF viewer with Vim-like behaviour";
|
||||
longDescription = ''
|
||||
apvlv is a PDF/DJVU/UMD/TXT Viewer Under Linux/WIN32
|
||||
with Vim-like behaviour.
|
||||
'';
|
||||
|
||||
license = licenses.lgpl2;
|
||||
platforms = platforms.unix;
|
||||
maintainers = [ maintainers.ardumont ];
|
||||
};
|
||||
|
||||
}
|
@ -5,11 +5,11 @@
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "calibre-2.29.0";
|
||||
name = "calibre-2.30.0";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/calibre/${name}.tar.xz";
|
||||
sha256 = "1n3cfnjnghhhsgzcbcvbr0gh191lhl6az09q1s68jhlcc2lski6l";
|
||||
sha256 = "1k2rpn06nfzqjy5k6fh8pwfj8vbhpn7rgkpkkpz5n2fqg3z8ph1j";
|
||||
};
|
||||
|
||||
inherit python;
|
||||
|
@ -1,11 +1,11 @@
|
||||
{ stdenv, fetchurl, python, pythonPackages, gettext, pygtksourceview, sqlite }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "cherrytree-0.35.7";
|
||||
name = "cherrytree-0.35.8";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://www.giuspen.com/software/${name}.tar.xz";
|
||||
sha256 = "03p3bx7skc361rmh0axhm0fa0inmxv4bpa9l566wskb3zq4sy4g3";
|
||||
sha256 = "0vqc1idzd73f4q5f4zwwypj4jiivwnb4y0r3041h2pm08y1wgsd8";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ pythonPackages.sqlite3 ];
|
||||
|
@ -1,17 +1,21 @@
|
||||
{ stdenv, fetchurl, unzip, makeDesktopItem, mono }:
|
||||
{ stdenv, fetchurl, buildDotnetPackage, makeWrapper, unzip, makeDesktopItem }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "keepass-${version}";
|
||||
buildDotnetPackage rec {
|
||||
baseName = "keepass";
|
||||
version = "2.29";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/keepass/KeePass-${version}.zip";
|
||||
sha256 = "16x7m899akpi036c0wlr41w7fz9q0b69yac9q97rqkixb03l4g9d";
|
||||
url = "mirror://sourceforge/keepass/KeePass-${version}-Source.zip";
|
||||
sha256 = "051s0aznyyhbpdbly6h5rs0ax0zvkp45dh93nmq6lwhicswjwn5m";
|
||||
};
|
||||
|
||||
sourceRoot = ".";
|
||||
|
||||
phases = [ "unpackPhase" "installPhase" ];
|
||||
buildInputs = [ unzip ];
|
||||
|
||||
patches = [ ./keepass.patch ];
|
||||
|
||||
preConfigure = "rm -rvf Build/*";
|
||||
|
||||
desktopItem = makeDesktopItem {
|
||||
name = "keepass";
|
||||
@ -22,23 +26,19 @@ stdenv.mkDerivation rec {
|
||||
categories = "Application;Other;";
|
||||
};
|
||||
|
||||
outputFiles = [ "Build/KeePass/Release/*" "Build/KeePassLib/Release/*" ];
|
||||
dllFiles = [ "KeePassLib.dll" ];
|
||||
exeFiles = [ "KeePass.exe" ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p "$out/bin"
|
||||
echo "${mono}/bin/mono $out/KeePass.exe" > $out/bin/keepass
|
||||
chmod +x $out/bin/keepass
|
||||
echo $out
|
||||
cp -r ./* $out/
|
||||
postInstall = ''
|
||||
mkdir -p "$out/share/applications"
|
||||
cp ${desktopItem}/share/applications/* $out/share/applications
|
||||
'';
|
||||
|
||||
buildInputs = [ unzip ];
|
||||
|
||||
meta = {
|
||||
description = "GUI password manager with strong cryptography";
|
||||
homepage = http://www.keepass.info/;
|
||||
maintainers = with stdenv.lib.maintainers; [amorsillo];
|
||||
maintainers = with stdenv.lib.maintainers; [ amorsillo obadz ];
|
||||
platforms = with stdenv.lib.platforms; all;
|
||||
license = stdenv.lib.licenses.gpl2;
|
||||
};
|
||||
|
89
pkgs/applications/misc/keepass/keepass.patch
Normal file
89
pkgs/applications/misc/keepass/keepass.patch
Normal file
@ -0,0 +1,89 @@
|
||||
diff -Naur old/KeePass/KeePass.csproj new/KeePass/KeePass.csproj
|
||||
--- old/KeePass/KeePass.csproj 2015-04-10 11:00:46.000000000 +0100
|
||||
+++ new/KeePass/KeePass.csproj 2015-05-27 16:35:52.196177593 +0100
|
||||
@@ -1,4 +1,4 @@
|
||||
-<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="3.5">
|
||||
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
@@ -10,7 +10,7 @@
|
||||
<RootNamespace>KeePass</RootNamespace>
|
||||
<AssemblyName>KeePass</AssemblyName>
|
||||
<ApplicationIcon>KeePass.ico</ApplicationIcon>
|
||||
- <SignAssembly>true</SignAssembly>
|
||||
+ <SignAssembly>false</SignAssembly>
|
||||
<AssemblyOriginatorKeyFile>KeePass.pfx</AssemblyOriginatorKeyFile>
|
||||
<FileUpgradeFlags>
|
||||
</FileUpgradeFlags>
|
||||
@@ -1316,6 +1316,5 @@
|
||||
</Target>
|
||||
-->
|
||||
<PropertyGroup>
|
||||
- <PostBuildEvent>"$(FrameworkSDKDir)bin\sgen.exe" /assembly:"$(TargetPath)" /force /nologo /compiler:/keycontainer:VS_KEY_33430356D8D7D1B8 /compiler:/delaysign-</PostBuildEvent>
|
||||
</PropertyGroup>
|
||||
-</Project>
|
||||
\ No newline at end of file
|
||||
+</Project>
|
||||
diff -Naur old/KeePassLib/KeePassLib.csproj new/KeePassLib/KeePassLib.csproj
|
||||
--- old/KeePassLib/KeePassLib.csproj 2014-05-08 15:00:24.000000000 +0100
|
||||
+++ new/KeePassLib/KeePassLib.csproj 2015-05-27 16:35:52.197177562 +0100
|
||||
@@ -1,4 +1,4 @@
|
||||
-<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="3.5">
|
||||
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
@@ -9,7 +9,7 @@
|
||||
<AppDesignerFolder>Properties</AppDesignerFolder>
|
||||
<RootNamespace>KeePassLib</RootNamespace>
|
||||
<AssemblyName>KeePassLib</AssemblyName>
|
||||
- <SignAssembly>true</SignAssembly>
|
||||
+ <SignAssembly>false</SignAssembly>
|
||||
<AssemblyOriginatorKeyFile>KeePassLib.pfx</AssemblyOriginatorKeyFile>
|
||||
<FileUpgradeFlags>
|
||||
</FileUpgradeFlags>
|
||||
diff -Naur old/KeePass.sln new/KeePass.sln
|
||||
--- old/KeePass.sln 2009-08-31 19:47:28.000000000 +0100
|
||||
+++ new/KeePass.sln 2015-05-27 16:35:59.568953518 +0100
|
||||
@@ -1,11 +1,9 @@
|
||||
-Microsoft Visual Studio Solution File, Format Version 10.00
|
||||
+Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 2008
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KeePassLib", "KeePassLib\KeePassLib.csproj", "{53573E4E-33CB-4FDB-8698-C95F5E40E7F3}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KeePass", "KeePass\KeePass.csproj", "{10938016-DEE2-4A25-9A5A-8FD3444379CA}"
|
||||
EndProject
|
||||
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KeePassLibSD", "KeePassLibSD\KeePassLibSD.csproj", "{DC15F71A-2117-4DEF-8C10-AA355B5E5979}"
|
||||
-EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TrlUtil", "Translation\TrlUtil\TrlUtil.csproj", "{B7E890E7-BF50-4450-9A52-C105BD98651C}"
|
||||
EndProject
|
||||
Global
|
||||
@@ -44,18 +42,6 @@
|
||||
{10938016-DEE2-4A25-9A5A-8FD3444379CA}.Release|Mixed Platforms.Build.0 = Release|Any CPU
|
||||
{10938016-DEE2-4A25-9A5A-8FD3444379CA}.Release|Win32.ActiveCfg = Release|Any CPU
|
||||
{10938016-DEE2-4A25-9A5A-8FD3444379CA}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|Win32.ActiveCfg = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|Mixed Platforms.Build.0 = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|Win32.ActiveCfg = Release|Any CPU
|
||||
- {DC15F71A-2117-4DEF-8C10-AA355B5E5979}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{B7E890E7-BF50-4450-9A52-C105BD98651C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{B7E890E7-BF50-4450-9A52-C105BD98651C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{B7E890E7-BF50-4450-9A52-C105BD98651C}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
|
||||
diff -Naur old/Translation/TrlUtil/TrlUtil.csproj new/Translation/TrlUtil/TrlUtil.csproj
|
||||
--- old/Translation/TrlUtil/TrlUtil.csproj 2013-07-21 10:06:38.000000000 +0100
|
||||
+++ new/Translation/TrlUtil/TrlUtil.csproj 2015-05-27 16:35:52.197177562 +0100
|
||||
@@ -1,4 +1,4 @@
|
||||
-<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="3.5">
|
||||
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
@ -4,12 +4,12 @@
|
||||
let ocamlVersion = (builtins.parseDrvName (ocaml.name)).version;
|
||||
in stdenv.mkDerivation rec {
|
||||
name = "llpp-${version}";
|
||||
version = "21-git-2015-04-27";
|
||||
version = "21-git-2015-06-06";
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://repo.or.cz/llpp.git";
|
||||
rev = "66868744188151eaa433d42c807e1efc5f623aa4";
|
||||
sha256 = "04hjbkzxixw88xmrpbr1smq486wfw3q9hvy7b4bfcb9j32mazk5c";
|
||||
rev = "492d761c0c7c8c4ccdd4f0d3fa7c51434ce8acf2";
|
||||
sha256 = "14dp5sw7cd6bja9d3zpxmswbk0k0b7x2fzb1fflhnnnhjc39irk9";
|
||||
};
|
||||
|
||||
buildInputs = [ pkgconfig ninja makeWrapper ocaml findlib mupdf lablgl
|
||||
|
@ -2,12 +2,12 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "rofi-${version}";
|
||||
version = "2015-05-29";
|
||||
version = "2015-06-08";
|
||||
|
||||
src = fetchgit {
|
||||
url = "https://github.com/carnager/rofi-pass";
|
||||
rev = "92c26557ec4b0508c563d596291571bbef402899";
|
||||
sha256 = "17k9jmmckqaw75i0qsay2gc8mrjrs6jjfwfxaggspj912sflmjng";
|
||||
rev = "7e376b5ec64974c4e8478acf3ada8d111896f391";
|
||||
sha256 = "1ywsxdgy5m63a2f5vd7np2f1qffz7y95z7s1gq5d87s8xd8myadl";
|
||||
};
|
||||
|
||||
buildInputs = [ rofi wmctrl xprop xdotool ];
|
||||
|
@ -3,13 +3,13 @@
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "1.2.6";
|
||||
version = "1.2.7";
|
||||
name = "slic3r-${version}";
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://github.com/alexrj/Slic3r";
|
||||
rev = "refs/tags/${version}";
|
||||
sha256 = "1ymk2n9dw1mpizwg6bxbzq60mg1cwljxlncaasdyakqrkkr22r8k";
|
||||
sha256 = "1bybbl8b0lfh9wkn1k9cxd11hlc5064wzh0fk6zdmc9vnnay399i";
|
||||
};
|
||||
|
||||
buildInputs = with perlPackages; [ perl makeWrapper which
|
||||
|
@ -131,7 +131,13 @@ stdenv.mkDerivation {
|
||||
|
||||
for executable in \
|
||||
firefox firefox-bin plugin-container \
|
||||
updater crashreporter webapprt-stub libxul.so
|
||||
updater crashreporter webapprt-stub \
|
||||
components/libdbusservice.so components/libmozgnome.so \
|
||||
gmp-clearkey/0.1/libclearkey.so \
|
||||
browser/components/libbrowsercomps.so \
|
||||
libnssdbm3.so libsmime3.so libxul.so libnss3.so libplc4.so \
|
||||
libfreebl3.so libmozsqlite3.so libmozalloc.so libnspr4.so libssl3.so \
|
||||
libsoftokn3.so libnssutil3.so libnssckbi.so libplds4.so
|
||||
do
|
||||
patchelf --set-rpath "$libPath" \
|
||||
"$out/usr/lib/firefox-bin-${version}/$executable"
|
||||
@ -143,7 +149,7 @@ stdenv.mkDerivation {
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Exec=$out/bin/firefox
|
||||
Icon=$out/lib/firefox-bin-${version}/chrome/icons/default/default256.png
|
||||
Icon=$out/usr/lib/firefox-bin-${version}/browser/icons/mozicon128.png
|
||||
Name=Firefox
|
||||
GenericName=Web Browser
|
||||
Categories=Application;Network;
|
||||
|
@ -2,27 +2,33 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "kubernetes-${version}";
|
||||
version = "0.15.0";
|
||||
version = "0.18.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "GoogleCloudPlatform";
|
||||
repo = "kubernetes";
|
||||
rev = "v${version}";
|
||||
sha256 = "1jiczhx01i8czm1gzd232z2ds2f1lvs5ifa9zjabhzw5ykfzdjg8";
|
||||
sha256 = "1adbd5n2fs1278f6kz6pd23813w2k4pgcxjl21idflh8jafxsyj7";
|
||||
};
|
||||
|
||||
buildInputs = [ makeWrapper which go iptables rsync ];
|
||||
|
||||
buildPhase = ''
|
||||
GOPATH=$(pwd):$(pwd)/Godeps/_workspace
|
||||
mkdir -p $(pwd)/Godeps/_workspace/src/github.com/GoogleCloudPlatform
|
||||
ln -s $(pwd) $(pwd)/Godeps/_workspace/src/github.com/GoogleCloudPlatform/kubernetes
|
||||
|
||||
substituteInPlace "hack/lib/golang.sh" --replace "_cgo" ""
|
||||
GOPATH=$(pwd)
|
||||
patchShebangs ./hack
|
||||
hack/build-go.sh --use_go_build
|
||||
|
||||
(cd cluster/addons/dns/kube2sky && go build ./kube2sky.go)
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p "$out/bin"
|
||||
cp _output/local/go/bin/* "$out/bin/"
|
||||
cp cluster/addons/dns/kube2sky/kube2sky "$out/bin/"
|
||||
'';
|
||||
|
||||
preFixup = ''
|
||||
|
@ -14,6 +14,8 @@ stdenv.mkDerivation rec {
|
||||
buildInputs = [ openssl ncurses pkgconfig glib loudmouth libotr gpgme ];
|
||||
|
||||
configureFlags = "--with-openssl=${openssl} --enable-modules --enable-otr";
|
||||
|
||||
doCheck = true;
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
homepage = http://mcabber.com/;
|
||||
|
@ -4,12 +4,12 @@
|
||||
, extraBuildInputs ? [] }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "1.1.1";
|
||||
version = "1.2";
|
||||
name = "weechat-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://weechat.org/files/src/weechat-${version}.tar.gz";
|
||||
sha256 = "0j8kc2zsv7ybgq6wi0r8siyd3adl3528gymgmidijd78smbpwbx3";
|
||||
url = "http://weechat.org/files/src/weechat-${version}.tar.bz2";
|
||||
sha256 = "0kb8mykhzm7zcxsl6l6cia2n4nc9akiysg0v6d8xb51p3x002ibw";
|
||||
};
|
||||
|
||||
buildInputs =
|
||||
|
@ -121,7 +121,12 @@ stdenv.mkDerivation {
|
||||
|
||||
for executable in \
|
||||
thunderbird mozilla-xremote-client thunderbird-bin plugin-container \
|
||||
updater libxul.so
|
||||
updater \
|
||||
components/libdbusservice.so components/libmozgnome.so \
|
||||
libnssdbm3.so libsmime3.so libxul.so libprldap60.so libnss3.so \
|
||||
libplc4.so libfreebl3.so libmozsqlite3.so libmozalloc.so libnspr4.so \
|
||||
libssl3.so libldif60.so libsoftokn3.so libldap60.so libnssutil3.so \
|
||||
libnssckbi.so libplds4.so
|
||||
do
|
||||
patchelf --set-rpath "$libPath" \
|
||||
"$out/usr/lib/thunderbird-bin-${version}/$executable"
|
||||
|
@ -4,12 +4,12 @@ with goPackages;
|
||||
|
||||
buildGoPackage rec {
|
||||
name = "syncthing-${version}";
|
||||
version = "0.11.7";
|
||||
version = "0.11.8";
|
||||
goPackagePath = "github.com/syncthing/syncthing";
|
||||
src = fetchgit {
|
||||
url = "git://github.com/syncthing/syncthing.git";
|
||||
rev = "refs/tags/v${version}";
|
||||
sha256 = "7d928a255c61c7b89d460cc70c79bd8e85bef3e919c157f59d5709fef4153c8d";
|
||||
sha256 = "fed98ac47fd84aecee7770dd59e5e68c5bc429d50b361f13b9ea2e28c3be62cf";
|
||||
};
|
||||
|
||||
subPackages = [ "cmd/syncthing" ];
|
||||
@ -26,7 +26,7 @@ buildGoPackage rec {
|
||||
homepage = http://syncthing.net/;
|
||||
description = "Replaces Dropbox and BitTorrent Sync with something open, trustworthy and decentralized";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [ matejc ];
|
||||
maintainers = with lib.maintainers; [ matejc theuni ];
|
||||
platforms = with lib.platforms; unix;
|
||||
};
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
}:
|
||||
|
||||
let
|
||||
version = "2.4.1";
|
||||
version = "2.4.2";
|
||||
svn = subversionClient.override { perlBindings = true; };
|
||||
in
|
||||
|
||||
@ -18,7 +18,7 @@ stdenv.mkDerivation {
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://www.kernel.org/pub/software/scm/git/git-${version}.tar.xz";
|
||||
sha256 = "195d61f98jj53jq0w3kfphpyk51h7fylpahc558id79ccc4ii1bj";
|
||||
sha256 = "1rf942v2yk49xgy0asgk4vi4mmshpz823iyvrxc5n5y2v0ffq0a8";
|
||||
};
|
||||
|
||||
patches = [
|
||||
|
@ -16,6 +16,8 @@ stdenv.mkDerivation rec {
|
||||
./autogen.sh --prefix=$out
|
||||
'';
|
||||
|
||||
NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations";
|
||||
|
||||
buildInputs = [ which gnome3.gnome_common glib intltool pkgconfig libtool cairo gtk3 gst_all_1.gstreamer gst_all_1.gst-plugins-base ];
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
|
@ -7,100 +7,86 @@
|
||||
, extension ? (self: super: {})
|
||||
}:
|
||||
|
||||
with stdenv.lib.strings;
|
||||
|
||||
let
|
||||
optionalString = stdenv.lib.optionalString;
|
||||
filter = stdenv.lib.filter;
|
||||
concatMapStringsSep = stdenv.lib.strings.concatMapStringsSep;
|
||||
concatMapStrings = stdenv.lib.strings.concatMapStrings;
|
||||
unwords = stdenv.lib.strings.concatStringsSep " ";
|
||||
mapInside = xs: unwords (map (x: x + "/*") xs);
|
||||
in
|
||||
{ mkDerivation = args:
|
||||
let
|
||||
postprocess = x: x // {
|
||||
sourceDirectories = filter (y: !(y == null)) x.sourceDirectories;
|
||||
propagatedBuildInputs = filter (y : ! (y == null)) x.propagatedBuildInputs;
|
||||
propagatedUserEnvPkgs = filter (y : ! (y == null)) x.propagatedUserEnvPkgs;
|
||||
everythingFile = if x.everythingFile == "" then "Everything.agda" else x.everythingFile;
|
||||
defaults = self : {
|
||||
# There is no Hackage for Agda so we require src.
|
||||
inherit (self) src name;
|
||||
|
||||
passthru = { inherit (x) extras; };
|
||||
extras = null;
|
||||
};
|
||||
isAgdaPackage = true;
|
||||
|
||||
defaults = self : {
|
||||
# There is no Hackage for Agda so we require src.
|
||||
inherit (self) src name;
|
||||
buildInputs = [ Agda ] ++ self.buildDepends;
|
||||
buildDepends = [];
|
||||
|
||||
isAgdaPackage = true;
|
||||
buildDependsAgda = stdenv.lib.filter
|
||||
(dep: dep ? isAgdaPackage && dep.isAgdaPackage)
|
||||
self.buildDepends;
|
||||
buildDependsAgdaShareAgda = map (x: x + "/share/agda") self.buildDependsAgda;
|
||||
|
||||
buildInputs = [ Agda ] ++ self.buildDepends;
|
||||
buildDepends = [];
|
||||
# Not much choice here ;)
|
||||
LANG = "en_US.UTF-8";
|
||||
LOCALE_ARCHIVE = stdenv.lib.optionalString
|
||||
stdenv.isLinux
|
||||
"${glibcLocales}/lib/locale/locale-archive";
|
||||
|
||||
buildDependsAgda = filter
|
||||
(dep: dep ? isAgdaPackage && dep.isAgdaPackage)
|
||||
self.buildDepends;
|
||||
buildDependsAgdaShareAgda = map (x: x + "/share/agda") self.buildDependsAgda;
|
||||
everythingFile = "Everything.agda";
|
||||
|
||||
# Not much choice here ;)
|
||||
LANG = "en_US.UTF-8";
|
||||
LOCALE_ARCHIVE = optionalString stdenv.isLinux "${glibcLocales}/lib/locale/locale-archive";
|
||||
propagatedBuildInputs = self.buildDependsAgda;
|
||||
propagatedUserEnvPkgs = self.buildDependsAgda;
|
||||
|
||||
everythingFile = "Everything.agda";
|
||||
# Immediate source directories under which modules can be found.
|
||||
sourceDirectories = [ ];
|
||||
|
||||
propagatedBuildInputs = self.buildDependsAgda;
|
||||
propagatedUserEnvPkgs = self.buildDependsAgda;
|
||||
# This is used if we have a top-level element that only serves
|
||||
# as the container for the source and we only care about its
|
||||
# contents. The directories put here will have their
|
||||
# *contents* copied over as opposed to sourceDirectories which
|
||||
# would make a direct copy of the whole thing.
|
||||
topSourceDirectories = [ "src" ];
|
||||
|
||||
# Immediate source directories under which modules can be found.
|
||||
sourceDirectories = [ ];
|
||||
# FIXME: `dirOf self.everythingFile` is what we really want, not hardcoded "./"
|
||||
includeDirs = self.buildDependsAgdaShareAgda
|
||||
++ self.sourceDirectories ++ self.topSourceDirectories
|
||||
++ [ "." ];
|
||||
buildFlags = concatStringsSep " " (map (x: "-i " + x) self.includeDirs);
|
||||
|
||||
# This is used if we have a top-level element that only serves
|
||||
# as the container for the source and we only care about its
|
||||
# contents. The directories put here will have their
|
||||
# *contents* copied over as opposed to sourceDirectories which
|
||||
# would make a direct copy of the whole thing.
|
||||
topSourceDirectories = [ "src" ];
|
||||
agdaWithArgs = "${Agda}/bin/agda ${self.buildFlags}";
|
||||
|
||||
# FIXME: `dirOf self.everythingFile` is what we really want, not hardcoded "./"
|
||||
includeDirs = self.buildDependsAgdaShareAgda
|
||||
++ self.sourceDirectories ++ self.topSourceDirectories
|
||||
++ [ "." ];
|
||||
buildFlags = unwords (map (x: "-i " + x) self.includeDirs);
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
${self.agdaWithArgs} ${self.everythingFile}
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
agdaWithArgs = "${Agda}/bin/agda ${self.buildFlags}";
|
||||
installPhase = let
|
||||
srcFiles = self.sourceDirectories
|
||||
++ map (x: x + "/*") self.topSourceDirectories;
|
||||
in ''
|
||||
runHook preInstall
|
||||
mkdir -p $out/share/agda
|
||||
cp -pR ${concatStringsSep " " srcFiles} $out/share/agda
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
${self.agdaWithArgs} ${self.everythingFile}
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
mkdir -p $out/share/agda
|
||||
cp -pR ${unwords self.sourceDirectories} ${mapInside self.topSourceDirectories} $out/share/agda
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# Optionally-built conveniences
|
||||
extras = {
|
||||
passthru = {
|
||||
env = stdenv.mkDerivation {
|
||||
name = "interactive-${self.name}";
|
||||
inherit (self) LANG LOCALE_ARCHIVE;
|
||||
AGDA_PACKAGE_PATH = concatMapStrings (x: x + ":") self.buildDependsAgdaShareAgda;
|
||||
buildInputs = let
|
||||
# Makes a wrapper available to the user. Very useful in
|
||||
# nix-shell where all dependencies are -i'd.
|
||||
agdaWrapper = writeScriptBin "agda" ''
|
||||
${self.agdaWithArgs} "$@"
|
||||
'';
|
||||
|
||||
# Use this to stick `agdaWrapper` at the front of the PATH:
|
||||
#
|
||||
# agda.mkDerivation (self: { PATH = self.extras.agdaWrapperPATH; })
|
||||
#
|
||||
# Not sure this is the best way to handle conflicts....
|
||||
agdaWrapperPATH = "${self.extras.agdaWrapper}/bin:$PATH";
|
||||
|
||||
AGDA_PACKAGE_PATH = concatMapStrings (x: x + ":") self.buildDependsAgdaShareAgda;
|
||||
};
|
||||
in [agdaWrapper] ++ self.buildDepends;
|
||||
};
|
||||
in stdenv.mkDerivation
|
||||
(postprocess (let super = defaults self // args self;
|
||||
self = super // extension self super;
|
||||
in self));
|
||||
};
|
||||
};
|
||||
in
|
||||
{ mkDerivation = args: let
|
||||
super = defaults self // args self;
|
||||
self = super // extension self super;
|
||||
in stdenv.mkDerivation self;
|
||||
}
|
||||
|
109
pkgs/build-support/build-dotnet-package/default.nix
Normal file
109
pkgs/build-support/build-dotnet-package/default.nix
Normal file
@ -0,0 +1,109 @@
|
||||
{ stdenv, lib, makeWrapper, pkgconfig, mono, dotnetbuildhelpers }:
|
||||
|
||||
attrsOrig @
|
||||
{ baseName
|
||||
, version
|
||||
, buildInputs ? []
|
||||
, xBuildFiles ? [ ]
|
||||
, xBuildFlags ? [ "/p:Configuration=Release" ]
|
||||
, outputFiles ? [ "bin/Release/*" ]
|
||||
, dllFiles ? [ "*.dll" ]
|
||||
, exeFiles ? [ "*.exe" ]
|
||||
, ... }:
|
||||
let
|
||||
arrayToShell = (a: toString (map (lib.escape (lib.stringToCharacters "\\ ';$`()|<>\t") ) a));
|
||||
|
||||
attrs = {
|
||||
name = "${baseName}-${version}";
|
||||
|
||||
buildInputs = [
|
||||
pkgconfig
|
||||
mono
|
||||
dotnetbuildhelpers
|
||||
makeWrapper
|
||||
] ++ buildInputs;
|
||||
|
||||
configurePhase = ''
|
||||
runHook preConfigure
|
||||
|
||||
[ -z "$dontPlacateNuget" ] && placate-nuget.sh
|
||||
[ -z "$dontPlacatePaket" ] && placate-paket.sh
|
||||
[ -z "$dontPatchFSharpTargets" ] && patch-fsharp-targets.sh
|
||||
|
||||
runHook postConfigure
|
||||
'';
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
echo Building dotNET packages...
|
||||
|
||||
# Probably needs to be moved to fsharp
|
||||
if pkg-config FSharp.Core
|
||||
then
|
||||
export FSharpTargetsPath="$(dirname $(pkg-config FSharp.Core --variable=Libraries))/Microsoft.FSharp.Targets"
|
||||
fi
|
||||
|
||||
ran=""
|
||||
for xBuildFile in ${arrayToShell xBuildFiles} ''${xBuildFilesExtra}
|
||||
do
|
||||
ran="yes"
|
||||
xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray} $xBuildFile
|
||||
done
|
||||
|
||||
[ -z "$ran" ] && xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray}
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
dontStrip = true;
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
target="$out/lib/dotnet/${baseName}"
|
||||
mkdir -p "$target"
|
||||
|
||||
cp -rv ${arrayToShell outputFiles} "''${outputFilesArray[@]}" "$target"
|
||||
|
||||
if [ -z "$dontRemoveDuplicatedDlls" ]
|
||||
then
|
||||
pushd "$out"
|
||||
remove-duplicated-dlls.sh
|
||||
popd
|
||||
fi
|
||||
|
||||
set -f
|
||||
for dllPattern in ${arrayToShell dllFiles} ''${dllFilesArray[@]}
|
||||
do
|
||||
set +f
|
||||
for dll in "$target"/$dllPattern
|
||||
do
|
||||
[ -f "$dll" ] || continue
|
||||
if pkg-config $(basename -s .dll "$dll")
|
||||
then
|
||||
echo "$dll already exported by a buildInputs, not re-exporting"
|
||||
else
|
||||
${dotnetbuildhelpers}/bin/create-pkg-config-for-dll.sh "$out/lib/pkgconfig" "$dll"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
set -f
|
||||
for exePattern in ${arrayToShell exeFiles} ''${exeFilesArray[@]}
|
||||
do
|
||||
set +f
|
||||
for exe in "$target"/$exePattern
|
||||
do
|
||||
[ -f "$exe" ] || continue
|
||||
mkdir -p "$out"/bin
|
||||
commandName="$(basename -s .exe "$(echo "$exe" | tr "[A-Z]" "[a-z]")")"
|
||||
makeWrapper "${mono}/bin/mono \"$exe\"" "$out"/bin/"$commandName"
|
||||
done
|
||||
done
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation (attrs // (builtins.removeAttrs attrsOrig [ "buildInputs" ] ))
|
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
targetDir="$1"
|
||||
dllFullPath="$2"
|
||||
|
||||
dllVersion="$(monodis --assembly "$dllFullPath" | grep ^Version: | cut -f 2 -d : | xargs)"
|
||||
[ -z "$dllVersion" ] && echo "Defaulting dllVersion to 0.0.0" && dllVersion="0.0.0"
|
||||
dllFileName="$(basename $dllFullPath)"
|
||||
dllRootName="$(basename -s .dll $dllFileName)"
|
||||
targetPcFile="$targetDir"/"$dllRootName".pc
|
||||
|
||||
mkdir -p "$targetDir"
|
||||
|
||||
cat > $targetPcFile << EOF
|
||||
Libraries=$dllFullPath
|
||||
|
||||
Name: $dllRootName
|
||||
Description: $dllRootName
|
||||
Version: $dllVersion
|
||||
Libs: -r:$dllFileName
|
||||
EOF
|
||||
|
||||
echo "Created $targetPcFile"
|
18
pkgs/build-support/dotnetbuildhelpers/default.nix
Normal file
18
pkgs/build-support/dotnetbuildhelpers/default.nix
Normal file
@ -0,0 +1,18 @@
|
||||
{ helperFunctions, mono, pkgconfig }:
|
||||
helperFunctions.runCommand
|
||||
"dotnetbuildhelpers"
|
||||
{ preferLocalBuild = true; }
|
||||
''
|
||||
target="$out/bin"
|
||||
mkdir -p "$target"
|
||||
|
||||
for script in ${./create-pkg-config-for-dll.sh} ${./patch-fsharp-targets.sh} ${./remove-duplicated-dlls.sh} ${./placate-nuget.sh} ${./placate-paket.sh}
|
||||
do
|
||||
scriptName="$(basename "$script" | cut -f 2- -d -)"
|
||||
cp -v "$script" "$target"/"$scriptName"
|
||||
chmod 755 "$target"/"$scriptName"
|
||||
patchShebangs "$target"/"$scriptName"
|
||||
substituteInPlace "$target"/"$scriptName" --replace pkg-config ${pkgconfig}/bin/pkg-config
|
||||
substituteInPlace "$target"/"$scriptName" --replace monodis ${mono}/bin/monodis
|
||||
done
|
||||
''
|
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Some project files look for F# targets in $(FSharpTargetsPath)
|
||||
# so it's a good idea to add something like this to your ~/.bash_profile:
|
||||
|
||||
# export FSharpTargetsPath=$(dirname $(which fsharpc))/../lib/mono/4.0/Microsoft.FSharp.Targets
|
||||
|
||||
# In build scripts, you would add somehting like this:
|
||||
|
||||
# export FSharpTargetsPath="${fsharp}/lib/mono/4.0/Microsoft.FSharp.Targets"
|
||||
|
||||
# However, some project files look for F# targets in the main Mono directory. When that happens
|
||||
# patch the project files using this script so they will look in $(FSharpTargetsPath) instead.
|
||||
|
||||
echo "Patching F# targets in fsproj files..."
|
||||
|
||||
find -iname \*.fsproj -print -exec \
|
||||
sed --in-place=.bak \
|
||||
-e 's,<FSharpTargetsPath>\([^<]*\)</FSharpTargetsPath>,<FSharpTargetsPath Condition="Exists('\'\\1\'')">\1</FSharpTargetsPath>,'g \
|
||||
{} \;
|
7
pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh
Normal file
7
pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh
Normal file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo Placating Nuget in nuget.targets
|
||||
find -iname nuget.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true NUGET PLACATED BY buildDotnetPackage,g' {} \;
|
||||
|
||||
echo Just to be sure, replacing Nuget executables by empty files.
|
||||
find . -iname nuget.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \;
|
7
pkgs/build-support/dotnetbuildhelpers/placate-paket.sh
Normal file
7
pkgs/build-support/dotnetbuildhelpers/placate-paket.sh
Normal file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo Placating Paket in paket.targets
|
||||
find -iname paket.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true PAKET PLACATED BY buildDotnetPackage,g' {} \;
|
||||
|
||||
echo Just to be sure, replacing Paket executables by empty files.
|
||||
find . -iname paket\*.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \;
|
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
IFS="
|
||||
"
|
||||
|
||||
for dll in $(find -iname \*.dll)
|
||||
do
|
||||
baseName="$(basename "$dll" | sed "s/.dll$//i")"
|
||||
if pkg-config "$baseName"
|
||||
then
|
||||
candidateDll="$(pkg-config "$baseName" --variable=Libraries)"
|
||||
|
||||
if diff "$dll" "$candidateDll" >/dev/null
|
||||
then
|
||||
echo "$dll is identical to $candidateDll. Substituting..."
|
||||
rm -vf "$dll"
|
||||
ln -sv "$candidateDll" "$dll"
|
||||
else
|
||||
echo "$dll and $candidateDll share the same name but have different contents, leaving alone."
|
||||
fi
|
||||
fi
|
||||
done
|
40
pkgs/build-support/fetchnuget/default.nix
Normal file
40
pkgs/build-support/fetchnuget/default.nix
Normal file
@ -0,0 +1,40 @@
|
||||
{ stdenv, fetchurl, buildDotnetPackage, unzip }:
|
||||
|
||||
attrs @
|
||||
{ baseName
|
||||
, version
|
||||
, url ? "https://www.nuget.org/api/v2/package/${baseName}/${version}"
|
||||
, sha256 ? ""
|
||||
, md5 ? ""
|
||||
, ...
|
||||
}:
|
||||
buildDotnetPackage ({
|
||||
src = fetchurl {
|
||||
inherit url sha256 md5;
|
||||
name = "${baseName}.${version}.zip";
|
||||
};
|
||||
|
||||
sourceRoot = ".";
|
||||
|
||||
buildInputs = [ unzip ];
|
||||
|
||||
phases = [ "unpackPhase" "installPhase" ];
|
||||
|
||||
preInstall = ''
|
||||
function traverseRename () {
|
||||
for e in *
|
||||
do
|
||||
t="$(echo "$e" | sed -e "s/%20/\ /g" -e "s/%2B/+/g")"
|
||||
[ "$t" != "$e" ] && mv -vn "$e" "$t"
|
||||
if [ -d "$t" ]
|
||||
then
|
||||
cd "$t"
|
||||
traverseRename
|
||||
cd ..
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
traverseRename
|
||||
'';
|
||||
} // attrs)
|
@ -339,7 +339,6 @@ rec {
|
||||
http://cran.hafro.is/
|
||||
http://ftp.iitm.ac.in/cran/
|
||||
http://cran.repo.bppt.go.id/
|
||||
http://cran.unej.ac.id/
|
||||
http://cran.um.ac.ir/
|
||||
http://ftp.heanet.ie/mirrors/cran.r-project.org/
|
||||
http://cran.mirror.garr.it/mirrors/CRAN/
|
||||
|
@ -56,6 +56,6 @@ stdenv.mkDerivation rec {
|
||||
downloadPage = https://www.donationcoder.com/Software/Jibz/Dina/;
|
||||
license = licenses.free;
|
||||
maintainers = [ maintainers.prikhi ];
|
||||
platforms = platforms.linux;
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}
|
||||
|
47
pkgs/data/fonts/tewi/default.nix
Normal file
47
pkgs/data/fonts/tewi/default.nix
Normal file
@ -0,0 +1,47 @@
|
||||
{stdenv, fetchgit, bdftopcf, mkfontdir, mkfontscale}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
date = "2015-06-07";
|
||||
name = "tewi-font-${date}";
|
||||
|
||||
src = fetchgit {
|
||||
url = "https://github.com/lucy/tewi-font";
|
||||
rev = "ff930e66ae471da4fdc226ffe65fd1ccd13d4a69";
|
||||
sha256 = "d641b911cc2132a00c311e3d978c1ca454b0fb3bc3ff4b4742b9f765b765a94b";
|
||||
};
|
||||
|
||||
buildInputs = [ bdftopcf mkfontdir mkfontscale ];
|
||||
buildPhase = ''
|
||||
for i in *.bdf; do
|
||||
bdftopcf -o ''${i/bdf/pcf} $i
|
||||
done
|
||||
|
||||
gzip *.pcf
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
fontDir="$out/share/fonts/misc"
|
||||
mkdir -p "$fontDir"
|
||||
mv *.pcf.gz "$fontDir"
|
||||
|
||||
cd "$fontDir"
|
||||
mkfontdir
|
||||
mkfontscale
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "A nice bitmap font, readable even at small sizes";
|
||||
longDescription = ''
|
||||
Tewi is a bitmap font, readable even at very small font sizes. This is
|
||||
particularily useful while programming, to fit a lot of code on your
|
||||
screen.
|
||||
'';
|
||||
homepage = "https://github.com/lucy/tewi-font";
|
||||
license = {
|
||||
fullName = "GNU General Public License with a font exception";
|
||||
url = "https://www.gnu.org/licenses/gpl-faq.html#FontException";
|
||||
};
|
||||
maintainers = [ maintainers.fro_ozen ];
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}
|
24
pkgs/data/misc/nixos-artwork/default.nix
Normal file
24
pkgs/data/misc/nixos-artwork/default.nix
Normal file
@ -0,0 +1,24 @@
|
||||
{ stdenv, fetchFromGitHub }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "nixos-artwork-2015-02-27";
|
||||
# Remember to check the default lightdm wallpaper when updating
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nixos-artwork";
|
||||
rev = "e71b6846023919136795ede22b16d73b2cf1693d";
|
||||
sha256 = "167yvhm2qy7qgyrqqs4hv98mmlarhgxpcsyv0r8a9g3vkblfdczb";
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/share/artwork
|
||||
cp -r * $out/share/artwork
|
||||
find $out -name \*.xcf -exec rm {} \;
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
homepage = "https://github.com/NixOS/nixos-artwork";
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
@ -1,34 +1,33 @@
|
||||
{ stdenv, fetchFromGitHub, which, automake113x, intltool, pkgconfig, libtool, makeWrapper,
|
||||
dbus_glib, libcanberra, gst_all_1, upower, vala, gnome3, gtk3, gst_plugins_base,
|
||||
glib, gobjectIntrospection, hicolor_icon_theme
|
||||
dbus_glib, libcanberra, gst_all_1, vala, gnome3, gtk3, gst_plugins_base,
|
||||
glib, gobjectIntrospection, hicolor_icon_theme, telepathy_glib
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
rev = "0.10.3";
|
||||
name = "gnome-shell-pomodoro-${rev}-61df3fa";
|
||||
rev = "624945d";
|
||||
name = "gnome-shell-pomodoro-${gnome3.version}-${rev}";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "codito";
|
||||
repo = "gnome-shell-pomodoro";
|
||||
repo = "gnome-pomodoro";
|
||||
rev = "${rev}";
|
||||
sha256 = "0i0glmijalppb5hdb1xd6xnmv824l2w831rpkqmhxi0iqbvaship";
|
||||
sha256 = "0vjy95zvd309n8g13fa80qhqlv7k6wswhrjw7gddxrnmr662xdqq";
|
||||
};
|
||||
|
||||
configureScript = ''./autogen.sh'';
|
||||
|
||||
buildInputs = [
|
||||
which automake113x intltool glib gobjectIntrospection pkgconfig libtool
|
||||
makeWrapper dbus_glib libcanberra upower vala gst_all_1.gstreamer
|
||||
makeWrapper dbus_glib libcanberra vala gst_all_1.gstreamer
|
||||
gst_all_1.gst-plugins-base gst_all_1.gst-plugins-good
|
||||
gnome3.gsettings_desktop_schemas gnome3.gnome_desktop
|
||||
gnome3.gnome_common gnome3.gnome_shell hicolor_icon_theme gtk3
|
||||
telepathy_glib
|
||||
];
|
||||
|
||||
preBuild = ''
|
||||
sed -i \
|
||||
-e 's|/usr\(/share/gir-1.0/UPowerGlib\)|${upower}\1|' \
|
||||
-e 's|/usr\(/share/gir-1.0/GnomeDesktop\)|${gnome3.gnome_desktop}\1|' \
|
||||
vapi/Makefile
|
||||
sed -i 's|\$(INTROSPECTION_GIRDIR)|${gnome3.gnome_desktop}/share/gir-1.0|' \
|
||||
vapi/Makefile
|
||||
'';
|
||||
|
||||
preFixup = ''
|
||||
@ -42,7 +41,7 @@ stdenv.mkDerivation rec {
|
||||
description =
|
||||
"Personal information management application that provides integrated " +
|
||||
"mail, calendaring and address book functionality";
|
||||
maintainers = with maintainers; [ DamienCassou ];
|
||||
maintainers = with maintainers; [ DamienCassou jgeerds ];
|
||||
license = licenses.gpl3;
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
|
@ -11,10 +11,12 @@ stdenv.mkDerivation rec {
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
pkgconfig intltool gnupg p11_kit glib gobjectIntrospection libxslt
|
||||
pkgconfig intltool gnupg glib gobjectIntrospection libxslt
|
||||
libgcrypt libtasn1 dbus_glib gtk pango gdk_pixbuf atk makeWrapper vala
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [ p11_kit ];
|
||||
|
||||
#doCheck = true;
|
||||
|
||||
preFixup = ''
|
||||
|
@ -1,4 +1,6 @@
|
||||
{ stdenv, fetchurl, mono, pkgconfig, autoconf, automake, which }:
|
||||
# Temporaririly avoid dependency on dotnetbuildhelpers to avoid rebuilding many times while working on it
|
||||
|
||||
{ stdenv, fetchurl, mono, pkgconfig, dotnetbuildhelpers, autoconf, automake, which }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "fsharp-${version}";
|
||||
@ -9,7 +11,7 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "16kqgdx0y0lmxv59mc4g7l5ll60nixg5b8bg07vxfnqrf7i6dffd";
|
||||
};
|
||||
|
||||
buildInputs = [ mono pkgconfig autoconf automake which ];
|
||||
buildInputs = [ mono pkgconfig dotnetbuildhelpers autoconf automake which ];
|
||||
configurePhase = ''
|
||||
substituteInPlace ./autogen.sh --replace "/usr/bin/env sh" "/bin/sh"
|
||||
./autogen.sh --prefix $out
|
||||
@ -23,6 +25,10 @@ stdenv.mkDerivation rec {
|
||||
substituteInPlace $out/bin/fsharpiAnyCpu --replace " mono " " ${mono}/bin/mono "
|
||||
ln -s $out/bin/fsharpc $out/bin/fsc
|
||||
ln -s $out/bin/fsharpi $out/bin/fsi
|
||||
for dll in "$out/lib/mono/4.5"/FSharp*.dll
|
||||
do
|
||||
create-pkg-config-for-dll.sh "$out/lib/pkgconfig" "$dll"
|
||||
done
|
||||
'';
|
||||
|
||||
# To fix this error when running:
|
||||
|
@ -17,14 +17,14 @@ let
|
||||
in
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "7.11.20150402";
|
||||
version = "7.11.20150607";
|
||||
name = "ghc-${version}";
|
||||
rev = "47f821a1a24553dc29b9581b1a259a9b1394c955";
|
||||
rev = "89223ce1340654455a9f3aa9cbf25f30884227fd";
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://git.haskell.org/ghc.git";
|
||||
inherit rev;
|
||||
sha256 = "111a2z6bgn966g04a9n2ns9n2a401rd0zqgndznn2w4fv8a4qzgj";
|
||||
sha256 = "1qsv2n5js21kqphq92xlyc91f11fnr9sh1glqzsirc8xr60dg5cs";
|
||||
};
|
||||
|
||||
postUnpack = ''
|
||||
|
@ -69,11 +69,11 @@ stdenv.mkDerivation {
|
||||
cp ./misc/emacs/* $out/share/emacs/site-lisp/
|
||||
'';
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
homepage = http://golang.org/;
|
||||
description = "The Go Programming language";
|
||||
license = "BSD";
|
||||
maintainers = with stdenv.lib.maintainers; [ zef ];
|
||||
platforms = stdenv.lib.platforms.darwin;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ zef ];
|
||||
platforms = platforms.darwin;
|
||||
};
|
||||
}
|
||||
|
@ -90,12 +90,12 @@ stdenv.mkDerivation {
|
||||
cp ./misc/emacs/* $out/share/emacs/site-lisp/
|
||||
'';
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
branch = "1.1";
|
||||
homepage = http://golang.org/;
|
||||
description = "The Go Programming language";
|
||||
license = "BSD";
|
||||
maintainers = with stdenv.lib.maintainers; [ pierron viric ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ pierron viric ];
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
|
@ -81,12 +81,12 @@ stdenv.mkDerivation {
|
||||
cp ./misc/emacs/* $out/share/emacs/site-lisp/
|
||||
'';
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
branch = "1.2";
|
||||
homepage = http://golang.org/;
|
||||
description = "The Go Programming language";
|
||||
license = "BSD";
|
||||
maintainers = with stdenv.lib.maintainers; [ pierron viric ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ pierron viric ];
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
|
@ -102,12 +102,12 @@ stdenv.mkDerivation {
|
||||
|
||||
setupHook = ./setup-hook.sh;
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
branch = "1.3";
|
||||
homepage = http://golang.org/;
|
||||
description = "The Go Programming language";
|
||||
license = "BSD";
|
||||
maintainers = with stdenv.lib.maintainers; [ cstrahan ];
|
||||
platforms = stdenv.lib.platforms.linux ++ stdenv.lib.platforms.darwin;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ cstrahan ];
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
};
|
||||
}
|
||||
|
@ -88,12 +88,12 @@ stdenv.mkDerivation rec {
|
||||
|
||||
setupHook = ./setup-hook.sh;
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
branch = "1.4";
|
||||
homepage = http://golang.org/;
|
||||
description = "The Go Programming language";
|
||||
license = "BSD";
|
||||
maintainers = with stdenv.lib.maintainers; [ cstrahan wkennington ];
|
||||
platforms = stdenv.lib.platforms.linux ++ stdenv.lib.platforms.darwin;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ cstrahan wkennington ];
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
};
|
||||
}
|
||||
|
@ -1,17 +0,0 @@
|
||||
diff --git openjdk-orig/jdk/make/sun/awt/mawt.gmk openjdk/jdk/make/sun/awt/mawt.gmk
|
||||
index c6ab06d..23a14da 100644
|
||||
--- openjdk-orig/jdk/make/sun/awt/mawt.gmk
|
||||
+++ openjdk/jdk/make/sun/awt/mawt.gmk
|
||||
@@ -270,12 +270,6 @@ LDFLAGS += -L$(MOTIF_LIB) -L$(OPENWIN_LIB)
|
||||
endif # !HEADLESS
|
||||
endif # PLATFORM
|
||||
|
||||
-ifeq ($(PLATFORM), linux)
|
||||
- # Checking for the X11/extensions headers at the additional location
|
||||
- CPPFLAGS += -I$(firstword $(wildcard $(OPENWIN_HOME)/include/X11/extensions) \
|
||||
- $(wildcard /usr/include/X11/extensions))
|
||||
-endif
|
||||
-
|
||||
ifeq ($(PLATFORM), macosx)
|
||||
CPPFLAGS += -I$(OPENWIN_HOME)/include/X11/extensions \
|
||||
-I$(OPENWIN_HOME)/include
|
@ -1,181 +0,0 @@
|
||||
{ stdenv, fetchurl, jdk, ant, wget, zip, unzip, cpio, file, libxslt
|
||||
, xorg, zlib, pkgconfig, libjpeg, libpng, giflib, lcms2, gtk2, kerberos, attr
|
||||
, alsaLib, procps, automake, autoconf, cups, which, perl, coreutils, binutils
|
||||
, cacert, setJavaClassPath
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
/**
|
||||
* The JRE libraries are in directories that depend on the CPU.
|
||||
*/
|
||||
architecture =
|
||||
if stdenv.system == "i686-linux" then
|
||||
"i386"
|
||||
else if stdenv.system == "x86_64-linux" then
|
||||
"amd64"
|
||||
else
|
||||
throw "icedtea requires i686-linux or x86_64 linux";
|
||||
|
||||
srcInfo = (import ./sources.nix).icedtea7;
|
||||
|
||||
pkgName = "icedtea7-${srcInfo.version}";
|
||||
|
||||
defSrc = name:
|
||||
with (builtins.getAttr name srcInfo.bundles); fetchurl {
|
||||
inherit url sha256;
|
||||
name = "${pkgName}-${baseNameOf url}";
|
||||
};
|
||||
|
||||
bundleNames = builtins.attrNames srcInfo.bundles;
|
||||
|
||||
sources = stdenv.lib.genAttrs bundleNames (name: defSrc name);
|
||||
|
||||
bundleFun = name: "--with-${name}-src-zip=" + builtins.getAttr name sources;
|
||||
bundleFlags = map bundleFun bundleNames;
|
||||
|
||||
icedtea = stdenv.mkDerivation (with srcInfo; {
|
||||
name = pkgName;
|
||||
|
||||
src = fetchurl {
|
||||
inherit url sha256;
|
||||
};
|
||||
|
||||
outputs = [ "out" "jre" ];
|
||||
|
||||
# TODO: Probably some more dependencies should be on this list but are being
|
||||
# propagated instead
|
||||
buildInputs = [
|
||||
jdk ant wget zip unzip cpio file libxslt pkgconfig procps automake
|
||||
autoconf which perl coreutils xorg.lndir
|
||||
zlib libjpeg libpng giflib lcms2 kerberos attr alsaLib cups
|
||||
xorg.libX11 xorg.libXtst gtk2
|
||||
];
|
||||
|
||||
configureFlags = bundleFlags ++ [
|
||||
"--disable-bootstrap"
|
||||
"--disable-downloading"
|
||||
|
||||
"--without-rhino"
|
||||
"--with-pax=paxctl"
|
||||
"--with-jdk-home=${jdk.home}"
|
||||
];
|
||||
|
||||
preConfigure = ''
|
||||
unset JAVA_HOME JDK_HOME CLASSPATH JAVAC JAVACFLAGS
|
||||
|
||||
substituteInPlace javac.in --replace '#!/usr/bin/perl' '#!${perl}/bin/perl'
|
||||
substituteInPlace javah.in --replace '#!/usr/bin/perl' '#!${perl}/bin/perl'
|
||||
|
||||
./autogen.sh
|
||||
'';
|
||||
|
||||
preBuild = ''
|
||||
make stamps/extract.stamp
|
||||
|
||||
substituteInPlace openjdk/jdk/make/common/shared/Defs-utils.gmk --replace '/bin/echo' '${coreutils}/bin/echo'
|
||||
substituteInPlace openjdk/corba/make/common/shared/Defs-utils.gmk --replace '/bin/echo' '${coreutils}/bin/echo'
|
||||
|
||||
patch -p0 < ${./cppflags-include-fix.patch}
|
||||
patch -p0 < ${./fix-java-home.patch}
|
||||
'';
|
||||
|
||||
NIX_NO_SELF_RPATH = true;
|
||||
|
||||
makeFlags = [
|
||||
"ALSA_INCLUDE=${alsaLib}/include/alsa/version.h"
|
||||
"ALT_UNIXCOMMAND_PATH="
|
||||
"ALT_USRBIN_PATH="
|
||||
"ALT_DEVTOOLS_PATH="
|
||||
"ALT_COMPILER_PATH="
|
||||
"ALT_CUPS_HEADERS_PATH=${cups}/include"
|
||||
"ALT_OBJCOPY=${binutils}/bin/objcopy"
|
||||
"SORT=${coreutils}/bin/sort"
|
||||
"UNLIMITED_CRYPTO=1"
|
||||
];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/lib/icedtea $out/share $jre/lib/icedtea
|
||||
|
||||
cp -av openjdk.build/j2sdk-image/* $out/lib/icedtea
|
||||
|
||||
# Move some stuff to top-level.
|
||||
mv $out/lib/icedtea/include $out/include
|
||||
mv $out/lib/icedtea/man $out/share/man
|
||||
|
||||
# jni.h expects jni_md.h to be in the header search path.
|
||||
ln -s $out/include/linux/*_md.h $out/include/
|
||||
|
||||
# Remove some broken manpages.
|
||||
rm -rf $out/share/man/ja*
|
||||
|
||||
# Remove crap from the installation.
|
||||
rm -rf $out/lib/icedtea/demo $out/lib/icedtea/sample
|
||||
|
||||
# Move the JRE to a separate output.
|
||||
mv $out/lib/icedtea/jre $jre/lib/icedtea/
|
||||
mkdir $out/lib/icedtea/jre
|
||||
lndir $jre/lib/icedtea/jre $out/lib/icedtea/jre
|
||||
|
||||
# The following files cannot be symlinked, as it seems to violate Java security policies
|
||||
rm $out/lib/icedtea/jre/lib/ext/*
|
||||
cp $jre/lib/icedtea/jre/lib/ext/* $out/lib/icedtea/jre/lib/ext/
|
||||
|
||||
rm -rf $out/lib/icedtea/jre/bin
|
||||
ln -s $out/lib/icedtea/bin $out/lib/icedtea/jre/bin
|
||||
|
||||
# Remove duplicate binaries.
|
||||
for i in $(cd $out/lib/icedtea/bin && echo *); do
|
||||
if [ "$i" = java ]; then continue; fi
|
||||
if cmp -s $out/lib/icedtea/bin/$i $jre/lib/icedtea/jre/bin/$i; then
|
||||
ln -sfn $jre/lib/icedtea/jre/bin/$i $out/lib/icedtea/bin/$i
|
||||
fi
|
||||
done
|
||||
|
||||
# Generate certificates.
|
||||
pushd $jre/lib/icedtea/jre/lib/security
|
||||
rm cacerts
|
||||
perl ${./generate-cacerts.pl} $jre/lib/icedtea/jre/bin/keytool ${cacert}/etc/ssl/certs/ca-bundle.crt
|
||||
popd
|
||||
|
||||
ln -s $out/lib/icedtea/bin $out/bin
|
||||
ln -s $jre/lib/icedtea/jre/bin $jre/bin
|
||||
'';
|
||||
|
||||
# FIXME: this is unnecessary once the multiple-outputs branch is merged.
|
||||
preFixup = ''
|
||||
prefix=$jre stripDirs "$stripDebugList" "''${stripDebugFlags:--S}"
|
||||
patchELF $jre
|
||||
propagatedNativeBuildInputs+=" $jre"
|
||||
|
||||
# Propagate the setJavaClassPath setup hook from the JRE so that
|
||||
# any package that depends on the JRE has $CLASSPATH set up
|
||||
# properly.
|
||||
mkdir -p $jre/nix-support
|
||||
echo -n "${setJavaClassPath}" > $jre/nix-support/propagated-native-build-inputs
|
||||
|
||||
# Set JAVA_HOME automatically.
|
||||
mkdir -p $out/nix-support
|
||||
cat <<EOF > $out/nix-support/setup-hook
|
||||
if [ -z "\$JAVA_HOME" ]; then export JAVA_HOME=$out/lib/icedtea; fi
|
||||
EOF
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "Free Java development kit based on OpenJDK 7.0 and the IcedTea project";
|
||||
longDescription = ''
|
||||
Free Java environment based on OpenJDK 7.0 and the IcedTea project.
|
||||
- Full Java runtime environment
|
||||
- Needed for executing Java Webstart programs and the free Java web browser plugin.
|
||||
'';
|
||||
homepage = http://icedtea.classpath.org;
|
||||
maintainers = with stdenv.lib.maintainers; [ wizeman ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
};
|
||||
|
||||
passthru = {
|
||||
inherit architecture;
|
||||
home = "${icedtea}/lib/icedtea";
|
||||
};
|
||||
});
|
||||
in icedtea
|
@ -1,17 +0,0 @@
|
||||
diff -ru -x '*~' openjdk-orig/hotspot/src/os/linux/vm/os_linux.cpp openjdk/hotspot/src/os/linux/vm/os_linux.cpp
|
||||
--- openjdk-orig/hotspot/src/os/linux/vm/os_linux.cpp 2013-09-06 20:22:03.000000000 +0200
|
||||
+++ openjdk/hotspot/src/os/linux/vm/os_linux.cpp 2014-01-24 22:44:08.223857012 +0100
|
||||
@@ -2358,12 +2358,10 @@
|
||||
CAST_FROM_FN_PTR(address, os::jvm_path),
|
||||
dli_fname, sizeof(dli_fname), NULL);
|
||||
assert(ret, "cannot locate libjvm");
|
||||
char *rp = NULL;
|
||||
if (ret && dli_fname[0] != '\0') {
|
||||
- rp = realpath(dli_fname, buf);
|
||||
+ snprintf(buf, buflen, "%s", dli_fname);
|
||||
}
|
||||
- if (rp == NULL)
|
||||
- return;
|
||||
|
||||
if (Arguments::created_by_gamma_launcher()) {
|
||||
// Support for the gamma launcher. Typical value for buf is
|
@ -1,366 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Copyright (C) 2007, 2008 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# generate-cacerts.pl generates a JKS keystore named 'cacerts' from
|
||||
# OpenSSL's certificate bundle using OpenJDK's keytool.
|
||||
|
||||
# First extract each of OpenSSL's bundled certificates into its own
|
||||
# aliased filename.
|
||||
|
||||
# Downloaded from http://cvs.fedoraproject.org/viewvc/rpms/ca-certificates/F-12/generate-cacerts.pl?revision=1.2
|
||||
# Check and prevention of duplicate aliases added by Vlastimil Babka <caster@gentoo.org>
|
||||
|
||||
$file = $ARGV[1];
|
||||
open(CERTS, $file);
|
||||
@certs = <CERTS>;
|
||||
close(CERTS);
|
||||
|
||||
$pem_file_count = 0;
|
||||
$in_cert_block = 0;
|
||||
$write_current_cert = 1;
|
||||
foreach $cert (@certs)
|
||||
{
|
||||
if ($cert =~ /Issuer: /)
|
||||
{
|
||||
$_ = $cert;
|
||||
if ($cert =~ /personal-freemail/)
|
||||
{
|
||||
$cert_alias = "thawtepersonalfreemailca";
|
||||
}
|
||||
elsif ($cert =~ /personal-basic/)
|
||||
{
|
||||
$cert_alias = "thawtepersonalbasicca";
|
||||
}
|
||||
elsif ($cert =~ /personal-premium/)
|
||||
{
|
||||
$cert_alias = "thawtepersonalpremiumca";
|
||||
}
|
||||
elsif ($cert =~ /server-certs/)
|
||||
{
|
||||
$cert_alias = "thawteserverca";
|
||||
}
|
||||
elsif ($cert =~ /premium-server/)
|
||||
{
|
||||
$cert_alias = "thawtepremiumserverca";
|
||||
}
|
||||
elsif ($cert =~ /Class 1 Public Primary Certification Authority$/)
|
||||
{
|
||||
$cert_alias = "verisignclass1ca";
|
||||
}
|
||||
elsif ($cert =~ /Class 1 Public Primary Certification Authority - G2/)
|
||||
{
|
||||
$cert_alias = "verisignclass1g2ca";
|
||||
}
|
||||
elsif ($cert =~
|
||||
/VeriSign Class 1 Public Primary Certification Authority - G3/)
|
||||
{
|
||||
$cert_alias = "verisignclass1g3ca";
|
||||
}
|
||||
elsif ($cert =~ /Class 2 Public Primary Certification Authority$/)
|
||||
{
|
||||
$cert_alias = "verisignclass2ca";
|
||||
}
|
||||
elsif ($cert =~ /Class 2 Public Primary Certification Authority - G2/)
|
||||
{
|
||||
$cert_alias = "verisignclass2g2ca";
|
||||
}
|
||||
elsif ($cert =~
|
||||
/VeriSign Class 2 Public Primary Certification Authority - G3/)
|
||||
{
|
||||
$cert_alias = "verisignclass2g3ca";
|
||||
}
|
||||
elsif ($cert =~ /Class 3 Public Primary Certification Authority$/)
|
||||
{
|
||||
$cert_alias = "verisignclass3ca";
|
||||
}
|
||||
# Version 1 of Class 3 Public Primary Certification Authority
|
||||
# - G2 is added. Version 3 is excluded. See below.
|
||||
elsif ($cert =~
|
||||
/VeriSign Class 3 Public Primary Certification Authority - G3/)
|
||||
{
|
||||
$cert_alias = "verisignclass3g3ca";
|
||||
}
|
||||
elsif ($cert =~
|
||||
/RSA Data Security.*Secure Server Certification Authority/)
|
||||
{
|
||||
$cert_alias = "verisignserverca";
|
||||
}
|
||||
elsif ($cert =~ /GTE CyberTrust Global Root/)
|
||||
{
|
||||
$cert_alias = "gtecybertrustglobalca";
|
||||
}
|
||||
elsif ($cert =~ /Baltimore CyberTrust Root/)
|
||||
{
|
||||
$cert_alias = "baltimorecybertrustca";
|
||||
}
|
||||
elsif ($cert =~ /www.entrust.net\/Client_CA_Info\/CPS/)
|
||||
{
|
||||
$cert_alias = "entrustclientca";
|
||||
}
|
||||
elsif ($cert =~ /www.entrust.net\/GCCA_CPS/)
|
||||
{
|
||||
$cert_alias = "entrustglobalclientca";
|
||||
}
|
||||
elsif ($cert =~ /www.entrust.net\/CPS_2048/)
|
||||
{
|
||||
$cert_alias = "entrust2048ca";
|
||||
}
|
||||
elsif ($cert =~ /www.entrust.net\/CPS /)
|
||||
{
|
||||
$cert_alias = "entrustsslca";
|
||||
}
|
||||
elsif ($cert =~ /www.entrust.net\/SSL_CPS/)
|
||||
{
|
||||
$cert_alias = "entrustgsslca";
|
||||
}
|
||||
elsif ($cert =~ /The Go Daddy Group/)
|
||||
{
|
||||
$cert_alias = "godaddyclass2ca";
|
||||
}
|
||||
elsif ($cert =~ /Starfield Class 2 Certification Authority/)
|
||||
{
|
||||
$cert_alias = "starfieldclass2ca";
|
||||
}
|
||||
elsif ($cert =~ /ValiCert Class 2 Policy Validation Authority/)
|
||||
{
|
||||
$cert_alias = "valicertclass2ca";
|
||||
}
|
||||
elsif ($cert =~ /GeoTrust Global CA$/)
|
||||
{
|
||||
$cert_alias = "geotrustglobalca";
|
||||
}
|
||||
elsif ($cert =~ /Equifax Secure Certificate Authority/)
|
||||
{
|
||||
$cert_alias = "equifaxsecureca";
|
||||
}
|
||||
elsif ($cert =~ /Equifax Secure eBusiness CA-1/)
|
||||
{
|
||||
$cert_alias = "equifaxsecureebusinessca1";
|
||||
}
|
||||
elsif ($cert =~ /Equifax Secure eBusiness CA-2/)
|
||||
{
|
||||
$cert_alias = "equifaxsecureebusinessca2";
|
||||
}
|
||||
elsif ($cert =~ /Equifax Secure Global eBusiness CA-1/)
|
||||
{
|
||||
$cert_alias = "equifaxsecureglobalebusinessca1";
|
||||
}
|
||||
elsif ($cert =~ /Sonera Class1 CA/)
|
||||
{
|
||||
$cert_alias = "soneraclass1ca";
|
||||
}
|
||||
elsif ($cert =~ /Sonera Class2 CA/)
|
||||
{
|
||||
$cert_alias = "soneraclass2ca";
|
||||
}
|
||||
elsif ($cert =~ /AAA Certificate Services/)
|
||||
{
|
||||
$cert_alias = "comodoaaaca";
|
||||
}
|
||||
elsif ($cert =~ /AddTrust Class 1 CA Root/)
|
||||
{
|
||||
$cert_alias = "addtrustclass1ca";
|
||||
}
|
||||
elsif ($cert =~ /AddTrust External CA Root/)
|
||||
{
|
||||
$cert_alias = "addtrustexternalca";
|
||||
}
|
||||
elsif ($cert =~ /AddTrust Qualified CA Root/)
|
||||
{
|
||||
$cert_alias = "addtrustqualifiedca";
|
||||
}
|
||||
elsif ($cert =~ /UTN-USERFirst-Hardware/)
|
||||
{
|
||||
$cert_alias = "utnuserfirsthardwareca";
|
||||
}
|
||||
elsif ($cert =~ /UTN-USERFirst-Client Authentication and Email/)
|
||||
{
|
||||
$cert_alias = "utnuserfirstclientauthemailca";
|
||||
}
|
||||
elsif ($cert =~ /UTN - DATACorp SGC/)
|
||||
{
|
||||
$cert_alias = "utndatacorpsgcca";
|
||||
}
|
||||
elsif ($cert =~ /UTN-USERFirst-Object/)
|
||||
{
|
||||
$cert_alias = "utnuserfirstobjectca";
|
||||
}
|
||||
elsif ($cert =~ /America Online Root Certification Authority 1/)
|
||||
{
|
||||
$cert_alias = "aolrootca1";
|
||||
}
|
||||
elsif ($cert =~ /DigiCert Assured ID Root CA/)
|
||||
{
|
||||
$cert_alias = "digicertassuredidrootca";
|
||||
}
|
||||
elsif ($cert =~ /DigiCert Global Root CA/)
|
||||
{
|
||||
$cert_alias = "digicertglobalrootca";
|
||||
}
|
||||
elsif ($cert =~ /DigiCert High Assurance EV Root CA/)
|
||||
{
|
||||
$cert_alias = "digicerthighassuranceevrootca";
|
||||
}
|
||||
elsif ($cert =~ /GlobalSign Root CA$/)
|
||||
{
|
||||
$cert_alias = "globalsignca";
|
||||
}
|
||||
elsif ($cert =~ /GlobalSign Root CA - R2/)
|
||||
{
|
||||
$cert_alias = "globalsignr2ca";
|
||||
}
|
||||
elsif ($cert =~ /Elektronik.*Kas.*2005/)
|
||||
{
|
||||
$cert_alias = "extra-elektronikkas2005";
|
||||
}
|
||||
elsif ($cert =~ /Elektronik/)
|
||||
{
|
||||
$cert_alias = "extra-elektronik2005";
|
||||
}
|
||||
# Mozilla does not provide these certificates:
|
||||
# baltimorecodesigningca
|
||||
# gtecybertrust5ca
|
||||
# trustcenterclass2caii
|
||||
# trustcenterclass4caii
|
||||
# trustcenteruniversalcai
|
||||
else
|
||||
{
|
||||
# Generate an alias using the OU and CN attributes of the
|
||||
# Issuer field if both are present, otherwise use only the
|
||||
# CN attribute. The Issuer field must have either the OU
|
||||
# or the CN attribute.
|
||||
$_ = $cert;
|
||||
if ($cert =~ /OU=/)
|
||||
{
|
||||
s/Issuer:.*?OU=//;
|
||||
# Remove other occurrences of OU=.
|
||||
s/OU=.*CN=//;
|
||||
# Remove CN= if there were not other occurrences of OU=.
|
||||
s/CN=//;
|
||||
s/\/emailAddress.*//;
|
||||
s/Certificate Authority/ca/g;
|
||||
s/Certification Authority/ca/g;
|
||||
}
|
||||
elsif ($cert =~ /CN=/)
|
||||
{
|
||||
s/Issuer:.*CN=//;
|
||||
s/\/emailAddress.*//;
|
||||
s/Certificate Authority/ca/g;
|
||||
s/Certification Authority/ca/g;
|
||||
}
|
||||
s/\W//g;
|
||||
tr/A-Z/a-z/;
|
||||
$cert_alias = "extra-$_";
|
||||
|
||||
}
|
||||
while (-e "$cert_alias.pem")
|
||||
{
|
||||
$cert_alias = "$cert_alias" . "_";
|
||||
}
|
||||
}
|
||||
# When it attempts to parse:
|
||||
#
|
||||
# Class 3 Public Primary Certification Authority - G2, Version 3
|
||||
#
|
||||
# keytool says:
|
||||
#
|
||||
# #2: ObjectId: 1.3.6.1.5.5.7.1.1 Criticality=false
|
||||
# Unparseable AuthorityInfoAccess extension due to
|
||||
# java.io.IOException: Invalid encoding of URI
|
||||
#
|
||||
# If we do not exclude this file
|
||||
# openjdk/jdk/test/lib/security/cacerts/VerifyCACerts.java fails
|
||||
# on this cert, printing:
|
||||
#
|
||||
# Couldn't verify: java.security.SignatureException: Signature
|
||||
# does not match.
|
||||
#
|
||||
elsif ($cert =~
|
||||
/A6:0F:34:C8:62:6C:81:F6:8B:F7:7D:A9:F6:67:58:8A:90:3F:7D:36/)
|
||||
{
|
||||
$write_current_cert = 0;
|
||||
$pem_file_count--;
|
||||
}
|
||||
elsif ($cert eq "-----BEGIN CERTIFICATE-----\n")
|
||||
{
|
||||
$_ = $cert;
|
||||
s/\W//g;
|
||||
tr/A-Z/a-z/;
|
||||
$cert_alias = "extra-$_";
|
||||
while (-e "$cert_alias.pem")
|
||||
{
|
||||
$cert_alias = "$cert_alias" . "_";
|
||||
}
|
||||
if ($in_cert_block != 0)
|
||||
{
|
||||
die "$file is malformed.";
|
||||
}
|
||||
$in_cert_block = 1;
|
||||
if ($write_current_cert == 1)
|
||||
{
|
||||
$pem_file_count++;
|
||||
if (-e "$cert_alias.pem")
|
||||
{
|
||||
print "$cert_alias";
|
||||
die "already exists"
|
||||
}
|
||||
open(PEM, ">$cert_alias.pem");
|
||||
print PEM $cert;
|
||||
}
|
||||
}
|
||||
elsif ($cert eq "-----END CERTIFICATE-----\n")
|
||||
{
|
||||
$in_cert_block = 0;
|
||||
if ($write_current_cert == 1)
|
||||
{
|
||||
print PEM $cert;
|
||||
close(PEM);
|
||||
}
|
||||
$write_current_cert = 1
|
||||
}
|
||||
else
|
||||
{
|
||||
if ($in_cert_block == 1 && $write_current_cert == 1)
|
||||
{
|
||||
print PEM $cert;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Check that the correct number of .pem files were produced.
|
||||
@pem_files = <*.pem>;
|
||||
if (@pem_files != $pem_file_count)
|
||||
{
|
||||
print "$pem_file_count";
|
||||
die "Number of .pem files produced does not match".
|
||||
" number of certs read from $file.";
|
||||
}
|
||||
|
||||
# Now store each cert in the 'cacerts' file using keytool.
|
||||
$certs_written_count = 0;
|
||||
foreach $pem_file (@pem_files)
|
||||
{
|
||||
system "$ARGV[0] -noprompt -import".
|
||||
" -alias `basename $pem_file .pem`".
|
||||
" -keystore cacerts -storepass 'changeit' -file $pem_file";
|
||||
unlink($pem_file);
|
||||
$certs_written_count++;
|
||||
}
|
||||
|
||||
# Check that the correct number of certs were added to the keystore.
|
||||
if ($certs_written_count != $pem_file_count)
|
||||
{
|
||||
die "Number of certs added to keystore does not match".
|
||||
" number of certs read from $file.";
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
# This file is autogenerated from update.py in the same directory.
|
||||
{
|
||||
icedtea7 = rec {
|
||||
version = "2.5.5";
|
||||
|
||||
url = "http://icedtea.wildebeest.org/download/source/icedtea-${version}.tar.xz";
|
||||
sha256 = "1irxk2ndwsfk4c1zbzb5h3rpwv2bc9bhfjvz6p4dws5476vsxrq9";
|
||||
|
||||
common_url = "http://icedtea.classpath.org/download/drops/icedtea7/${version}";
|
||||
|
||||
bundles = {
|
||||
openjdk = rec {
|
||||
url = "${common_url}/openjdk.tar.bz2";
|
||||
sha256 = "5301baacfb6b4ee28a3469b8429a0017898615532f727bb50d94777682c5fd0d";
|
||||
};
|
||||
|
||||
corba = rec {
|
||||
url = "${common_url}/corba.tar.bz2";
|
||||
sha256 = "f0576599b474f56e58068071242cedbbf2f181b58c9010b614c9096be764ac51";
|
||||
};
|
||||
|
||||
jaxp = rec {
|
||||
url = "${common_url}/jaxp.tar.bz2";
|
||||
sha256 = "293218d595763f7e02a91ea88860e5314e42330cbc21b73dc5de32e7e26fd256";
|
||||
};
|
||||
|
||||
jaxws = rec {
|
||||
url = "${common_url}/jaxws.tar.bz2";
|
||||
sha256 = "76d6d0670ede806b01d39e07c644e423a50984f1cf0ec560afa23f0fedf575be";
|
||||
};
|
||||
|
||||
jdk = rec {
|
||||
url = "${common_url}/jdk.tar.bz2";
|
||||
sha256 = "c1bc0d25457ccf40fcaeb5311052f6d2fbab8ef316b0381995835827711da483";
|
||||
};
|
||||
|
||||
langtools = rec {
|
||||
url = "${common_url}/langtools.tar.bz2";
|
||||
sha256 = "71b269ea930da36d751c6183816ef53a65c0587b7cf0195f87759b4c02c3b660";
|
||||
};
|
||||
|
||||
hotspot = rec {
|
||||
url = "${common_url}/hotspot.tar.bz2";
|
||||
sha256 = "d724a9749f51a3c66351ad8a27bc4570640720eace33cd03f1a52e2e45731dfb";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
@ -1,261 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess, urllib.request, re, os, tarfile
|
||||
from html.parser import HTMLParser
|
||||
|
||||
URL = 'http://icedtea.classpath.org/download/drops/icedtea{}/{}'
|
||||
DOWNLOAD_URL = 'http://icedtea.wildebeest.org/download/source/'
|
||||
DOWNLOAD_HTML = DOWNLOAD_URL + '?C=M;O=D'
|
||||
|
||||
ICEDTEA_JDKS = [7]
|
||||
|
||||
BUNDLES = ['openjdk', 'corba', 'jaxp', 'jaxws', 'jdk', 'langtools', 'hotspot']
|
||||
|
||||
SRC_PATH = './sources.nix'
|
||||
|
||||
def get_output(cmd, env = None):
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, env = env, stdout = subprocess.PIPE)
|
||||
out = proc.communicate()[0]
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None
|
||||
|
||||
return out.decode('utf-8').strip()
|
||||
|
||||
def nix_prefetch_url(url):
|
||||
env = os.environ.copy()
|
||||
env['PRINT_PATH'] = '1'
|
||||
out = get_output(['nix-prefetch-url', url], env = env)
|
||||
|
||||
return out.split('\n')
|
||||
|
||||
def get_nix_attr(path, attr):
|
||||
out = get_output(['nix-instantiate', '--eval-only', '-A', attr, path])
|
||||
|
||||
if len(out) < 2 or out[0] != '"' or out[-1] != '"':
|
||||
raise Exception('Cannot find Nix attribute "{}" (parsing failure?)'.format(attr))
|
||||
|
||||
# Strip quotes
|
||||
return out[1:-1]
|
||||
|
||||
def get_jdk_attr(jdk, attr):
|
||||
return get_nix_attr(SRC_PATH, 'icedtea{}.{}'.format(jdk, attr))
|
||||
|
||||
class Parser(HTMLParser):
|
||||
def __init__(self, link_regex):
|
||||
HTMLParser.__init__(self)
|
||||
|
||||
self.regex = link_regex
|
||||
self.href = None
|
||||
self.version = None
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if self.href != None or tag != 'a':
|
||||
return
|
||||
|
||||
href = None
|
||||
for attr in attrs:
|
||||
if attr[0] == 'href':
|
||||
href = attr[1]
|
||||
if href == None:
|
||||
return
|
||||
|
||||
m = re.match(self.regex, href)
|
||||
if m != None:
|
||||
self.href = href
|
||||
self.version = m.group(1)
|
||||
|
||||
def get_latest_version_url(major):
|
||||
f = urllib.request.urlopen(DOWNLOAD_HTML)
|
||||
html = f.read().decode('utf-8')
|
||||
f.close()
|
||||
|
||||
parser = Parser(r'^icedtea\d?-({}\.\d[\d.]*)\.tar\.xz$'.format(major))
|
||||
parser.feed(html)
|
||||
parser.close()
|
||||
|
||||
if parser.href == None:
|
||||
raise Exception('Error: could not find download url for major version "{}"'.format(major))
|
||||
|
||||
return parser.version, DOWNLOAD_URL + parser.href
|
||||
|
||||
def get_old_bundle_attrs(jdk, bundle):
|
||||
attrs = {}
|
||||
for attr in ('url', 'sha256'):
|
||||
attrs[attr] = get_jdk_attr(jdk, 'bundles.{}.{}'.format(bundle, attr))
|
||||
|
||||
return attrs
|
||||
|
||||
def get_old_attrs(jdk):
|
||||
attrs = {}
|
||||
|
||||
for attr in ('version', 'url', 'sha256'):
|
||||
attrs[attr] = get_jdk_attr(jdk, attr)
|
||||
|
||||
attrs['bundles'] = {}
|
||||
|
||||
for bundle in BUNDLES:
|
||||
attrs['bundles'][bundle] = get_old_bundle_attrs(jdk, bundle)
|
||||
|
||||
return attrs
|
||||
|
||||
def get_member_filename(tarball, name):
|
||||
for fname in tarball.getnames():
|
||||
m = re.match(r'^icedtea\d?-\d[\d.]*/{}$'.format(name), fname)
|
||||
if m != None:
|
||||
return m.group(0)
|
||||
|
||||
return None
|
||||
|
||||
def get_member_file(tarball, name):
|
||||
path = get_member_filename(tarball, name)
|
||||
if path == None:
|
||||
raise Exception('Could not find "{}" inside tarball'.format(name))
|
||||
|
||||
f = tarball.extractfile(path)
|
||||
data = f.read().decode('utf-8')
|
||||
f.close()
|
||||
|
||||
return data
|
||||
|
||||
def get_new_bundle_attr(makefile, bundle, attr):
|
||||
var = '{}_{}'.format(bundle.upper(), attr.upper())
|
||||
regex = r'^{} = (.*?)$'.format(var)
|
||||
|
||||
m = re.search(regex, makefile, re.MULTILINE)
|
||||
if m == None:
|
||||
raise Exception('Could not find variable "{}" in Makefile.am'.format(var))
|
||||
|
||||
return m.group(1)
|
||||
|
||||
def get_new_bundle_attrs(jdk, version, path):
|
||||
url = URL.format(jdk, version)
|
||||
|
||||
attrs = {}
|
||||
|
||||
print('Opening file: "{}"'.format(path))
|
||||
tar = tarfile.open(name = path, mode = 'r:xz')
|
||||
|
||||
makefile = get_member_file(tar, 'Makefile.am')
|
||||
hotspot_map = get_member_file(tar, 'hotspot.map.in')
|
||||
|
||||
hotspot_map = hotspot_map.replace('@ICEDTEA_RELEASE@', version)
|
||||
|
||||
for bundle in BUNDLES:
|
||||
battrs = {}
|
||||
|
||||
battrs['url'] = '{}/{}.tar.bz2'.format(url, bundle)
|
||||
if bundle == 'hotspot':
|
||||
m = re.search(r'^default (.*?) (.*?) (.*?) (.*?)$', hotspot_map, re.MULTILINE)
|
||||
if m == None:
|
||||
raise Exception('Could not find info for hotspot bundle in hotspot.map.in')
|
||||
|
||||
battrs['sha256'] = m.group(4)
|
||||
else:
|
||||
battrs['sha256'] = get_new_bundle_attr(makefile, bundle, 'sha256sum')
|
||||
|
||||
attrs[bundle] = battrs
|
||||
|
||||
tar.close()
|
||||
|
||||
return attrs
|
||||
|
||||
def get_new_attrs(jdk):
|
||||
print('Getting old attributes for JDK {}...'.format(jdk))
|
||||
old_attrs = get_old_attrs(jdk)
|
||||
attrs = {}
|
||||
|
||||
# The major version corresponds to a specific JDK (1 = OpenJDK6, 2 = OpenJDK7, 3 = OpenJDK8)
|
||||
major = jdk - 5
|
||||
|
||||
print('Getting latest version for JDK {}...'.format(jdk))
|
||||
version, url = get_latest_version_url(major)
|
||||
|
||||
print()
|
||||
print('Old version: {}'.format(old_attrs['version']))
|
||||
print('New version: {}'.format(version))
|
||||
print()
|
||||
|
||||
if version == old_attrs['version']:
|
||||
print('No update available, skipping...')
|
||||
print()
|
||||
return old_attrs
|
||||
|
||||
print('Update available, generating new attributes for JDK {}...'.format(jdk))
|
||||
|
||||
attrs['version'] = version
|
||||
attrs['url'] = url
|
||||
|
||||
print('Downloading tarball from url "{}"...'.format(url))
|
||||
print()
|
||||
attrs['sha256'], path = nix_prefetch_url(url)
|
||||
print()
|
||||
|
||||
print('Inspecting tarball for bundle information...')
|
||||
|
||||
attrs['bundles'] = get_new_bundle_attrs(jdk, attrs['version'], path)
|
||||
|
||||
print('Done!')
|
||||
|
||||
return attrs
|
||||
|
||||
def generate_jdk(jdk):
|
||||
attrs = get_new_attrs(jdk)
|
||||
|
||||
version = attrs['version']
|
||||
src_url = attrs['url'].replace(version, '${version}')
|
||||
|
||||
common_url = URL.format(jdk, version)
|
||||
src_common_url = URL.format(jdk, '${version}')
|
||||
|
||||
src = ' icedtea{} = rec {{\n'.format(jdk)
|
||||
src += ' version = "{}";\n'.format(version)
|
||||
src += '\n'
|
||||
src += ' url = "{}";\n'.format(src_url)
|
||||
src += ' sha256 = "{}";\n'.format(attrs['sha256'])
|
||||
src += '\n'
|
||||
src += ' common_url = "{}";\n'.format(src_common_url)
|
||||
src += '\n'
|
||||
src += ' bundles = {\n'
|
||||
|
||||
for bundle in BUNDLES:
|
||||
battrs = attrs['bundles'][bundle]
|
||||
|
||||
b_url = battrs['url']
|
||||
b_url = b_url.replace(common_url, '${common_url}')
|
||||
|
||||
src += ' {} = rec {{\n'.format(bundle)
|
||||
src += ' url = "{}";\n'.format(b_url)
|
||||
src += ' sha256 = "{}";\n'.format(battrs['sha256'])
|
||||
src += ' };\n'
|
||||
|
||||
if bundle != BUNDLES[-1]:
|
||||
src += '\n'
|
||||
|
||||
src += ' };\n'
|
||||
src += ' };\n'
|
||||
|
||||
return src
|
||||
|
||||
def generate_sources(jdks):
|
||||
src = '# This file is autogenerated from update.py in the same directory.\n'
|
||||
src += '{\n'
|
||||
|
||||
for jdk in jdks:
|
||||
print()
|
||||
print('Generating sources for JDK {}...'.format(jdk))
|
||||
src += generate_jdk(jdk)
|
||||
|
||||
src += '}\n'
|
||||
return src
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Generating {}...'.format(SRC_PATH))
|
||||
src = generate_sources(ICEDTEA_JDKS)
|
||||
|
||||
f = open(SRC_PATH, 'w', encoding = 'utf-8')
|
||||
f.write(src)
|
||||
f.close()
|
||||
|
||||
print()
|
||||
print('Update complete!')
|
@ -1,4 +1,4 @@
|
||||
{ stdenv, fetchurl, bison, pkgconfig, glib, gettext, perl, libgdiplus, libX11, callPackage, ncurses, zlib, withLLVM ? true }:
|
||||
{ stdenv, fetchurl, bison, pkgconfig, glib, gettext, perl, libgdiplus, libX11, callPackage, ncurses, zlib, withLLVM ? false, cacert }:
|
||||
|
||||
let
|
||||
llvm = callPackage ./llvm.nix { };
|
||||
@ -31,11 +31,16 @@ stdenv.mkDerivation rec {
|
||||
# Parallel building doesn't work, as shows http://hydra.nixos.org/build/2983601
|
||||
enableParallelBuilding = false;
|
||||
|
||||
# We want pkg-config to take priority over the dlls in the Mono framework and the GAC
|
||||
# because we control pkg-config
|
||||
patches = [ ./pkgconfig-before-gac.patch ];
|
||||
|
||||
# Patch all the necessary scripts. Also, if we're using LLVM, we fix the default
|
||||
# LLVM path to point into the Mono LLVM build, since it's private anyway.
|
||||
preBuild = ''
|
||||
makeFlagsArray=(INSTALL=`type -tp install`)
|
||||
patchShebangs ./
|
||||
substituteInPlace mcs/class/corlib/System/Environment.cs --replace /usr/share "$out/share"
|
||||
'' + stdenv.lib.optionalString withLLVM ''
|
||||
substituteInPlace mono/mini/aot-compiler.c --replace "llvm_path = g_strdup (\"\")" "llvm_path = g_strdup (\"${llvm}/bin/\")"
|
||||
'';
|
||||
@ -50,6 +55,14 @@ stdenv.mkDerivation rec {
|
||||
done
|
||||
'';
|
||||
|
||||
# Without this, any Mono application attempting to open an SSL connection will throw with
|
||||
# The authentication or decryption has failed.
|
||||
# ---> Mono.Security.Protocol.Tls.TlsException: Invalid certificate received from server.
|
||||
postInstall = ''
|
||||
echo "Updating Mono key store"
|
||||
$out/bin/cert-sync ${cacert}/etc/ssl/certs/ca-bundle.crt
|
||||
'';
|
||||
|
||||
meta = {
|
||||
homepage = http://mono-project.com/;
|
||||
description = "Cross platform, open source .NET development framework";
|
||||
|
65
pkgs/development/compilers/mono/pkgconfig-before-gac.patch
Normal file
65
pkgs/development/compilers/mono/pkgconfig-before-gac.patch
Normal file
@ -0,0 +1,65 @@
|
||||
diff -Naur mono-4.0.1.old/mcs/tools/xbuild/data/12.0/Microsoft.Common.targets mono-4.0.1/mcs/tools/xbuild/data/12.0/Microsoft.Common.targets
|
||||
--- mono-4.0.1.old/mcs/tools/xbuild/data/12.0/Microsoft.Common.targets 2015-04-24 02:26:18.000000000 +0100
|
||||
+++ mono-4.0.1/mcs/tools/xbuild/data/12.0/Microsoft.Common.targets 2015-05-26 00:52:33.997847464 +0100
|
||||
@@ -229,8 +229,8 @@
|
||||
$(ReferencePath);
|
||||
@(AdditionalReferencePath);
|
||||
{HintPathFromItem};
|
||||
- {TargetFrameworkDirectory};
|
||||
{PkgConfig};
|
||||
+ {TargetFrameworkDirectory};
|
||||
{GAC};
|
||||
{RawFileName};
|
||||
$(OutDir)
|
||||
diff -Naur mono-4.0.1.old/mcs/tools/xbuild/data/14.0/Microsoft.Common.targets mono-4.0.1/mcs/tools/xbuild/data/14.0/Microsoft.Common.targets
|
||||
--- mono-4.0.1.old/mcs/tools/xbuild/data/14.0/Microsoft.Common.targets 2015-04-24 02:26:18.000000000 +0100
|
||||
+++ mono-4.0.1/mcs/tools/xbuild/data/14.0/Microsoft.Common.targets 2015-05-26 00:52:41.832612748 +0100
|
||||
@@ -214,8 +214,8 @@
|
||||
$(ReferencePath);
|
||||
@(AdditionalReferencePath);
|
||||
{HintPathFromItem};
|
||||
- {TargetFrameworkDirectory};
|
||||
{PkgConfig};
|
||||
+ {TargetFrameworkDirectory};
|
||||
{GAC};
|
||||
{RawFileName};
|
||||
$(OutDir)
|
||||
diff -Naur mono-4.0.1.old/mcs/tools/xbuild/data/2.0/Microsoft.Common.targets mono-4.0.1/mcs/tools/xbuild/data/2.0/Microsoft.Common.targets
|
||||
--- mono-4.0.1.old/mcs/tools/xbuild/data/2.0/Microsoft.Common.targets 2015-04-24 02:26:18.000000000 +0100
|
||||
+++ mono-4.0.1/mcs/tools/xbuild/data/2.0/Microsoft.Common.targets 2015-05-26 00:52:46.298478961 +0100
|
||||
@@ -139,8 +139,8 @@
|
||||
$(ReferencePath);
|
||||
@(AdditionalReferencePath);
|
||||
{HintPathFromItem};
|
||||
- {TargetFrameworkDirectory};
|
||||
{PkgConfig};
|
||||
+ {TargetFrameworkDirectory};
|
||||
{GAC};
|
||||
{RawFileName};
|
||||
$(OutDir)
|
||||
diff -Naur mono-4.0.1.old/mcs/tools/xbuild/data/3.5/Microsoft.Common.targets mono-4.0.1/mcs/tools/xbuild/data/3.5/Microsoft.Common.targets
|
||||
--- mono-4.0.1.old/mcs/tools/xbuild/data/3.5/Microsoft.Common.targets 2015-04-24 02:26:18.000000000 +0100
|
||||
+++ mono-4.0.1/mcs/tools/xbuild/data/3.5/Microsoft.Common.targets 2015-05-26 00:52:52.119304583 +0100
|
||||
@@ -167,8 +167,8 @@
|
||||
$(ReferencePath);
|
||||
@(AdditionalReferencePath);
|
||||
{HintPathFromItem};
|
||||
- {TargetFrameworkDirectory};
|
||||
{PkgConfig};
|
||||
+ {TargetFrameworkDirectory};
|
||||
{GAC};
|
||||
{RawFileName};
|
||||
$(OutDir)
|
||||
diff -Naur mono-4.0.1.old/mcs/tools/xbuild/data/4.0/Microsoft.Common.targets mono-4.0.1/mcs/tools/xbuild/data/4.0/Microsoft.Common.targets
|
||||
--- mono-4.0.1.old/mcs/tools/xbuild/data/4.0/Microsoft.Common.targets 2015-04-24 02:26:18.000000000 +0100
|
||||
+++ mono-4.0.1/mcs/tools/xbuild/data/4.0/Microsoft.Common.targets 2015-05-26 00:52:56.519172776 +0100
|
||||
@@ -229,8 +229,8 @@
|
||||
$(ReferencePath);
|
||||
@(AdditionalReferencePath);
|
||||
{HintPathFromItem};
|
||||
- {TargetFrameworkDirectory};
|
||||
{PkgConfig};
|
||||
+ {TargetFrameworkDirectory};
|
||||
{GAC};
|
||||
{RawFileName};
|
||||
$(OutDir)
|
@ -1,33 +1,60 @@
|
||||
{ stdenv, runCommand, glibc, fetchurl, file }:
|
||||
{ stdenv, runCommand, glibc, fetchurl, file
|
||||
|
||||
, version
|
||||
}:
|
||||
|
||||
let
|
||||
# !!! These should be on nixos.org
|
||||
src = if glibc.system == "x86_64-linux" then
|
||||
fetchurl {
|
||||
url = http://tarballs.nixos.org/openjdk-bootstrap-x86_64-linux-2012-08-24.tar.xz;
|
||||
sha256 = "0gla9dxrfq2w1hvgsnn8jg8a60k27im6z43a6iidi0qmwa0wah32";
|
||||
}
|
||||
(if version == "8" then
|
||||
fetchurl {
|
||||
url = "https://www.dropbox.com/s/a0lsq2ig4uguky5/openjdk8-bootstrap-x86_64-linux.tar.xz?dl=1";
|
||||
sha256 = "18zqx6jhm3lizn9hh6ryyqc9dz3i96pwaz8f6nxfllk70qi5gvks";
|
||||
}
|
||||
else if version == "7" then
|
||||
fetchurl {
|
||||
url = "https://www.dropbox.com/s/rssfbeommrfbsjf/openjdk7-bootstrap-x86_64-linux.tar.xz?dl=1";
|
||||
sha256 = "024gg2sgg4labxbc1nhn8lxls2p7d9h3b82hnsahwaja2pm1hbra";
|
||||
}
|
||||
else throw "No bootstrap for version")
|
||||
else if glibc.system == "i686-linux" then
|
||||
fetchurl {
|
||||
url = http://tarballs.nixos.org/openjdk-bootstrap-i686-linux-2012-08-24.tar.xz;
|
||||
sha256 = "184wq212bycwbbq4ix8cc6jwjxkrqw9b01zb86q95kqpa8zy5206";
|
||||
}
|
||||
(if version == "8" then
|
||||
fetchurl {
|
||||
url = "https://www.dropbox.com/s/rneqjhlerijsw74/openjdk8-bootstrap-i686-linux.tar.xz?dl=1";
|
||||
sha256 = "1yx04xh8bqz7amg12d13rw5vwa008rav59mxjw1b9s6ynkvfgqq9";
|
||||
}
|
||||
else if version == "7" then
|
||||
fetchurl {
|
||||
url = "https://www.dropbox.com/s/6xe64td7eg2wurs/openjdk7-bootstrap-i686-linux.tar.xz?dl=1";
|
||||
sha256 = "0xwqjk1zx8akziw8q9sbjc1rs8s7c0w6mw67jdmmi26cwwp8ijnx";
|
||||
}
|
||||
else throw "No bootstrap for version")
|
||||
else throw "No bootstrap for system";
|
||||
in
|
||||
|
||||
runCommand "openjdk-bootstrap" {} ''
|
||||
tar xvf ${src}
|
||||
mv openjdk-bootstrap $out
|
||||
bootstrap = runCommand "openjdk-bootstrap" {
|
||||
passthru.home = "${bootstrap}/lib/openjdk";
|
||||
} ''
|
||||
tar xvf ${src}
|
||||
mv openjdk-bootstrap $out
|
||||
|
||||
for i in $out/bin/*; do
|
||||
patchelf --set-interpreter ${glibc}/lib/ld-linux*.so.2 $i
|
||||
done
|
||||
LIBDIRS="$(find $out -name \*.so\* -exec dirname {} \; | sort | uniq | tr '\n' ':')"
|
||||
|
||||
# Temporarily, while NixOS's OpenJDK bootstrap tarball doesn't have PaX markings:
|
||||
exes=$(${file}/bin/file $out/bin/* 2> /dev/null | grep -E 'ELF.*(executable|shared object)' | sed -e 's/: .*$//')
|
||||
for file in $exes; do
|
||||
paxmark m "$file"
|
||||
# On x86 for heap sizes over 700MB disable SEGMEXEC and PAGEEXEC as well.
|
||||
${stdenv.lib.optionalString stdenv.isi686 ''paxmark msp "$file"''}
|
||||
done
|
||||
''
|
||||
for i in $out/bin/*; do
|
||||
patchelf --set-interpreter ${glibc}/lib/ld-linux*.so.2 $i || true
|
||||
patchelf --set-rpath "${glibc}/lib:$LIBDIRS" $i || true
|
||||
done
|
||||
|
||||
find $out -name \*.so\* | while read lib; do
|
||||
patchelf --set-interpreter ${glibc}/lib/ld-linux*.so.2 $lib || true
|
||||
patchelf --set-rpath "${glibc}/lib:${stdenv.cc.cc}/lib:$LIBDIRS" $lib || true
|
||||
done
|
||||
|
||||
# Temporarily, while NixOS's OpenJDK bootstrap tarball doesn't have PaX markings:
|
||||
exes=$(${file}/bin/file $out/bin/* 2> /dev/null | grep -E 'ELF.*(executable|shared object)' | sed -e 's/: .*$//')
|
||||
for file in $exes; do
|
||||
paxmark m "$file"
|
||||
# On x86 for heap sizes over 700MB disable SEGMEXEC and PAGEEXEC as well.
|
||||
${stdenv.lib.optionalString stdenv.isi686 ''paxmark msp "$file"''}
|
||||
done
|
||||
'';
|
||||
in bootstrap
|
||||
|
@ -1,6 +1,8 @@
|
||||
{ stdenv, fetchurl, unzip, zip, procps, coreutils, alsaLib, ant, freetype
|
||||
, which, jdk, nettools, xorg, file
|
||||
, fontconfig, cpio, cacert, perl, setJavaClassPath }:
|
||||
, which, bootjdk, nettools, xorg, file
|
||||
, fontconfig, cpio, cacert, perl, setJavaClassPath
|
||||
, minimal ? false
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
@ -15,7 +17,7 @@ let
|
||||
else
|
||||
throw "openjdk requires i686-linux or x86_64 linux";
|
||||
|
||||
update = "65";
|
||||
update = "80";
|
||||
|
||||
build = "32";
|
||||
|
||||
@ -27,13 +29,41 @@ let
|
||||
md5 = "de3006e5cf1ee78a9c6145ce62c4e982";
|
||||
};
|
||||
|
||||
baseurl = "http://hg.openjdk.java.net/jdk7u/jdk7u";
|
||||
repover = "jdk7u${update}-b${build}";
|
||||
jdk7 = fetchurl {
|
||||
url = "${baseurl}/archive/${repover}.tar.gz";
|
||||
sha256 = "1r8xnn87nmqaq2f8i3cp3i9ngq66k0c0wgkdq5cf59lkgs8wkcdi";
|
||||
};
|
||||
langtools = fetchurl {
|
||||
url = "${baseurl}/langtools/archive/${repover}.tar.gz";
|
||||
sha256 = "01alj6pfrjqyf4irll9wg34h4w9nmb3973lvbacs528qm1nxgh9r";
|
||||
};
|
||||
hotspot = fetchurl {
|
||||
url = "${baseurl}/hotspot/archive/${repover}.tar.gz";
|
||||
sha256 = "14zla8axmg5344zf45i4cj7yyli0kmdjsh9yalmzqaphpkqjqpf2";
|
||||
};
|
||||
corba = fetchurl {
|
||||
url = "${baseurl}/corba/archive/${repover}.tar.gz";
|
||||
sha256 = "19z3ay3f2q7r2ra03c6wy8b5rbdbrkq5g2dzhrqcg0n4iydd3c40";
|
||||
};
|
||||
jdk = fetchurl {
|
||||
url = "${baseurl}/jdk/archive/${repover}.tar.gz";
|
||||
sha256 = "1q0r2l9bz2cyx4fq79x6cb2f5xycw83hl5cn1d1mazgsckp590lb";
|
||||
};
|
||||
jaxws = fetchurl {
|
||||
url = "${baseurl}/jaxws/archive/${repover}.tar.gz";
|
||||
sha256 = "1lp0mww2x3b6xavb7idrzckh6iw8jd6s1fvqgfvzs853z4ifksqj";
|
||||
};
|
||||
jaxp = fetchurl {
|
||||
url = "${baseurl}/jaxp/archive/${repover}.tar.gz";
|
||||
sha256 = "0pd874dkgxkb7frxg4n9py61kkhhck4x33dcynynwb3vl6k6iy79";
|
||||
};
|
||||
openjdk = stdenv.mkDerivation rec {
|
||||
name = "openjdk-7u${update}b${build}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://tarballs.nixos.org/openjdk-7u${update}-b${build}.tar.xz";
|
||||
sha256 = "0lyp75sl5w4b9azphb2nq5cwzli85inpksq4943q4j349rkmdprx";
|
||||
};
|
||||
srcs = [ jdk7 langtools hotspot corba jdk jaxws jaxp ];
|
||||
sourceRoot = ".";
|
||||
|
||||
outputs = [ "out" "jre" ];
|
||||
|
||||
@ -41,18 +71,23 @@ let
|
||||
[ unzip procps ant which zip cpio nettools alsaLib
|
||||
xorg.libX11 xorg.libXt xorg.libXext xorg.libXrender xorg.libXtst
|
||||
xorg.libXi xorg.libXinerama xorg.libXcursor xorg.lndir
|
||||
fontconfig perl file
|
||||
fontconfig perl file bootjdk
|
||||
];
|
||||
|
||||
NIX_LDFLAGS = "-lfontconfig -lXcursor -lXinerama";
|
||||
NIX_LDFLAGS = if minimal then null else "-lfontconfig -lXcursor -lXinerama";
|
||||
|
||||
postUnpack = ''
|
||||
ls | grep jdk | grep -v '^jdk7u' | awk -F- '{print $1}' | while read p; do
|
||||
mv $p-* $(ls | grep '^jdk7u')/$p
|
||||
done
|
||||
cd jdk7u-*
|
||||
|
||||
sed -i -e "s@/usr/bin/test@${coreutils}/bin/test@" \
|
||||
-e "s@/bin/ls@${coreutils}/bin/ls@" \
|
||||
openjdk*/hotspot/make/linux/makefiles/sa.make
|
||||
hotspot/make/linux/makefiles/sa.make
|
||||
|
||||
sed -i "s@/bin/echo -e@${coreutils}/bin/echo -e@" \
|
||||
openjdk*/{jdk,corba}/make/common/shared/Defs-utils.gmk
|
||||
{jdk,corba}/make/common/shared/Defs-utils.gmk
|
||||
|
||||
tar xf ${cupsSrc}
|
||||
cupsDir=$(echo $(pwd)/cups-*)
|
||||
@ -75,17 +110,17 @@ let
|
||||
"ALSA_INCLUDE=${alsaLib}/include/alsa/version.h"
|
||||
"FREETYPE_HEADERS_PATH=${freetype}/include"
|
||||
"FREETYPE_LIB_PATH=${freetype}/lib"
|
||||
"MILESTONE=u${update}"
|
||||
"MILESTONE=${update}"
|
||||
"BUILD_NUMBER=b${build}"
|
||||
"USRBIN_PATH="
|
||||
"COMPILER_PATH="
|
||||
"DEVTOOLS_PATH="
|
||||
"UNIXCOMMAND_PATH="
|
||||
"BOOTDIR=${jdk}"
|
||||
"BOOTDIR=${bootjdk.home}"
|
||||
"STATIC_CXX=false"
|
||||
"UNLIMITED_CRYPTO=1"
|
||||
"FULL_DEBUG_SYMBOLS=0"
|
||||
];
|
||||
] ++ stdenv.lib.optional minimal "BUILD_HEADLESS=1";
|
||||
|
||||
configurePhase = "true";
|
||||
|
||||
@ -168,6 +203,32 @@ let
|
||||
EOF
|
||||
'';
|
||||
|
||||
postFixup = ''
|
||||
# Build the set of output library directories to rpath against
|
||||
LIBDIRS=""
|
||||
for output in $outputs; do
|
||||
LIBDIRS="$(find $(eval echo \$$output) -name \*.so\* -exec dirname {} \; | sort | uniq | tr '\n' ':'):$LIBDIRS"
|
||||
done
|
||||
|
||||
# Add the local library paths to remove dependencies on the bootstrap
|
||||
for output in $outputs; do
|
||||
OUTPUTDIR="$(eval echo \$$output)"
|
||||
BINLIBS="$(find $OUTPUTDIR/bin/ -type f; find $OUTPUTDIR -name \*.so\*)"
|
||||
echo "$BINLIBS" | while read i; do
|
||||
patchelf --set-rpath "$LIBDIRS:$(patchelf --print-rpath "$i")" "$i" || true
|
||||
patchelf --shrink-rpath "$i" || true
|
||||
done
|
||||
done
|
||||
|
||||
# Test to make sure that we don't depend on the bootstrap
|
||||
for output in $outputs; do
|
||||
if grep -q -r '${bootjdk}' $(eval echo \$$output); then
|
||||
echo "Extraneous references to ${bootjdk} detected"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
'';
|
||||
|
||||
meta = {
|
||||
homepage = http://openjdk.java.net/;
|
||||
license = stdenv.lib.licenses.gpl2;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user