Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2021-10-25 12:02:08 +00:00 committed by GitHub
commit a16b1b54b3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 554 additions and 341 deletions

View file

@ -1080,6 +1080,12 @@
githubId = 354741;
name = "Austin Butler";
};
autophagy = {
email = "mail@autophagy.io";
github = "autophagy";
githubId = 12958979;
name = "Mika Naylor";
};
avaq = {
email = "nixpkgs@account.avaq.it";
github = "avaq";

View file

@ -1,4 +1,4 @@
{ hadoop, pkgs }:
{ cfg, pkgs, lib }:
let
propertyXml = name: value: ''
<property>
@ -13,19 +13,31 @@ let
${builtins.concatStringsSep "\n" (pkgs.lib.mapAttrsToList propertyXml properties)}
</configuration>
'';
cfgLine = name: value: ''
${name}=${builtins.toString value}
'';
cfgFile = fileName: properties: pkgs.writeTextDir fileName ''
# generated by NixOS
${builtins.concatStringsSep "" (pkgs.lib.mapAttrsToList cfgLine properties)}
'';
userFunctions = ''
hadoop_verify_logdir() {
echo Skipping verification of log directory
}
'';
hadoopEnv = ''
export HADOOP_LOG_DIR=/tmp/hadoop/$USER
'';
in
pkgs.buildEnv {
name = "hadoop-conf";
paths = [
(siteXml "core-site.xml" hadoop.coreSite)
(siteXml "hdfs-site.xml" hadoop.hdfsSite)
(siteXml "mapred-site.xml" hadoop.mapredSite)
(siteXml "yarn-site.xml" hadoop.yarnSite)
(pkgs.writeTextDir "hadoop-user-functions.sh" userFunctions)
];
}
pkgs.runCommand "hadoop-conf" {} ''
mkdir -p $out/
cp ${siteXml "core-site.xml" cfg.coreSite}/* $out/
cp ${siteXml "hdfs-site.xml" cfg.hdfsSite}/* $out/
cp ${siteXml "mapred-site.xml" cfg.mapredSite}/* $out/
cp ${siteXml "yarn-site.xml" cfg.yarnSite}/* $out/
cp ${cfgFile "container-executor.cfg" cfg.containerExecutorCfg}/* $out/
cp ${pkgs.writeTextDir "hadoop-user-functions.sh" userFunctions}/* $out/
cp ${pkgs.writeTextDir "hadoop-env.sh" hadoopEnv}/* $out/
cp ${cfg.log4jProperties} $out/log4j.properties
${lib.concatMapStringsSep "\n" (dir: "cp -r ${dir}/* $out/") cfg.extraConfDirs}
''

View file

@ -1,5 +1,7 @@
{ config, lib, pkgs, ...}:
let
cfg = config.services.hadoop;
in
with lib;
{
imports = [ ./yarn.nix ./hdfs.nix ];
@ -17,7 +19,9 @@ with lib;
};
hdfsSite = mkOption {
default = {};
default = {
"dfs.namenode.rpc-bind-host" = "0.0.0.0";
};
type = types.attrsOf types.anything;
example = literalExpression ''
{
@ -28,27 +32,81 @@ with lib;
};
mapredSite = mkOption {
default = {};
default = {
"mapreduce.framework.name" = "yarn";
"yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}";
"mapreduce.map.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}";
"mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}";
};
type = types.attrsOf types.anything;
example = literalExpression ''
{
"mapreduce.map.cpu.vcores" = "1";
options.services.hadoop.mapredSite.default // {
"mapreduce.map.java.opts" = "-Xmx900m -XX:+UseParallelGC";
}
'';
description = "Hadoop mapred-site.xml definition";
};
yarnSite = mkOption {
default = {};
default = {
"yarn.nodemanager.admin-env" = "PATH=$PATH";
"yarn.nodemanager.aux-services" = "mapreduce_shuffle";
"yarn.nodemanager.aux-services.mapreduce_shuffle.class" = "org.apache.hadoop.mapred.ShuffleHandler";
"yarn.nodemanager.bind-host" = "0.0.0.0";
"yarn.nodemanager.container-executor.class" = "org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor";
"yarn.nodemanager.env-whitelist" = "JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,LANG,TZ";
"yarn.nodemanager.linux-container-executor.group" = "hadoop";
"yarn.nodemanager.linux-container-executor.path" = "/run/wrappers/yarn-nodemanager/bin/container-executor";
"yarn.nodemanager.log-dirs" = "/var/log/hadoop/yarn/nodemanager";
"yarn.resourcemanager.bind-host" = "0.0.0.0";
"yarn.resourcemanager.scheduler.class" = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler";
};
type = types.attrsOf types.anything;
example = literalExpression ''
{
"yarn.resourcemanager.ha.id" = "resourcemanager1";
options.services.hadoop.yarnSite.default // {
"yarn.resourcemanager.hostname" = "''${config.networking.hostName}";
}
'';
description = "Hadoop yarn-site.xml definition";
};
log4jProperties = mkOption {
default = "${cfg.package}/lib/${cfg.package.untarDir}/etc/hadoop/log4j.properties";
type = types.path;
example = literalExpression ''
"''${pkgs.hadoop}/lib/''${pkgs.hadoop.untarDir}/etc/hadoop/log4j.properties";
'';
description = "log4j.properties file added to HADOOP_CONF_DIR";
};
containerExecutorCfg = mkOption {
default = {
# must be the same as yarn.nodemanager.linux-container-executor.group in yarnSite
"yarn.nodemanager.linux-container-executor.group"="hadoop";
"min.user.id"=1000;
"feature.terminal.enabled"=1;
};
type = types.attrsOf types.anything;
example = literalExpression ''
options.services.hadoop.containerExecutorCfg.default // {
"feature.terminal.enabled" = 0;
}
'';
description = "Yarn container-executor.cfg definition";
};
extraConfDirs = mkOption {
default = [];
type = types.listOf types.path;
example = literalExpression ''
[
./extraHDFSConfs
./extraYARNConfs
]
'';
description = "Directories containing additional config files to be added to HADOOP_CONF_DIR";
};
package = mkOption {
type = types.package;
default = pkgs.hadoop;
@ -64,6 +122,12 @@ with lib;
users.groups.hadoop = {
gid = config.ids.gids.hadoop;
};
environment = {
systemPackages = [ cfg.package ];
etc."hadoop-conf".source = let
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
in "${hadoopConf}";
};
})
];

View file

@ -1,24 +1,54 @@
{ config, lib, pkgs, ...}:
with lib;
let
cfg = config.services.hadoop;
hadoopConf = import ./conf.nix { hadoop = cfg; pkgs = pkgs; };
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
restartIfChanged = mkOption {
type = types.bool;
description = ''
Automatically restart the service on config change.
This can be set to false to defer restarts on clusters running critical applications.
Please consider the security implications of inadvertently running an older version,
and the possibility of unexpected behavior caused by inconsistent versions across a cluster when disabling this option.
'';
default = false;
};
in
with lib;
{
options.services.hadoop.hdfs = {
namenode.enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN NameNode
'';
namenode = {
enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the HDFS NameNode
'';
};
inherit restartIfChanged;
openFirewall = mkOption {
type = types.bool;
default = true;
description = ''
Open firewall ports for namenode
'';
};
};
datanode.enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN DataNode
'';
datanode = {
enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the HDFS DataNode
'';
};
inherit restartIfChanged;
openFirewall = mkOption {
type = types.bool;
default = true;
description = ''
Open firewall ports for datanode
'';
};
};
};
@ -27,10 +57,7 @@ with lib;
systemd.services.hdfs-namenode = {
description = "Hadoop HDFS NameNode";
wantedBy = [ "multi-user.target" ];
environment = {
HADOOP_HOME = "${cfg.package}";
};
inherit (cfg.hdfs.namenode) restartIfChanged;
preStart = ''
${cfg.package}/bin/hdfs --config ${hadoopConf} namenode -format -nonInteractive || true
@ -40,24 +67,34 @@ with lib;
User = "hdfs";
SyslogIdentifier = "hdfs-namenode";
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} namenode";
Restart = "always";
};
};
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.namenode.openFirewall [
9870 # namenode.http-address
8020 # namenode.rpc-address
]);
})
(mkIf cfg.hdfs.datanode.enabled {
systemd.services.hdfs-datanode = {
description = "Hadoop HDFS DataNode";
wantedBy = [ "multi-user.target" ];
environment = {
HADOOP_HOME = "${cfg.package}";
};
inherit (cfg.hdfs.datanode) restartIfChanged;
serviceConfig = {
User = "hdfs";
SyslogIdentifier = "hdfs-datanode";
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} datanode";
Restart = "always";
};
};
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.datanode.openFirewall [
9864 # datanode.http.address
9866 # datanode.address
9867 # datanode.ipc.address
]);
})
(mkIf (
cfg.hdfs.namenode.enabled || cfg.hdfs.datanode.enabled

View file

@ -1,24 +1,62 @@
{ config, lib, pkgs, ...}:
with lib;
let
cfg = config.services.hadoop;
hadoopConf = import ./conf.nix { hadoop = cfg; pkgs = pkgs; };
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
restartIfChanged = mkOption {
type = types.bool;
description = ''
Automatically restart the service on config change.
This can be set to false to defer restarts on clusters running critical applications.
Please consider the security implications of inadvertently running an older version,
and the possibility of unexpected behavior caused by inconsistent versions across a cluster when disabling this option.
'';
default = false;
};
in
with lib;
{
options.services.hadoop.yarn = {
resourcemanager.enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN ResourceManager
'';
resourcemanager = {
enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN ResourceManager
'';
};
inherit restartIfChanged;
openFirewall = mkOption {
type = types.bool;
default = true;
description = ''
Open firewall ports for resourcemanager
'';
};
};
nodemanager.enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN NodeManager
'';
nodemanager = {
enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the Hadoop YARN NodeManager
'';
};
inherit restartIfChanged;
addBinBash = mkOption {
type = types.bool;
default = true;
description = ''
Add /bin/bash. This is needed by the linux container executor's launch script.
'';
};
openFirewall = mkOption {
type = types.bool;
default = true;
description = ''
Open firewall ports for nodemanager.
Because containers can listen on any ephemeral port, TCP ports 102465535 will be opened.
'';
};
};
};
@ -38,36 +76,63 @@ with lib;
systemd.services.yarn-resourcemanager = {
description = "Hadoop YARN ResourceManager";
wantedBy = [ "multi-user.target" ];
environment = {
HADOOP_HOME = "${cfg.package}";
};
inherit (cfg.yarn.resourcemanager) restartIfChanged;
serviceConfig = {
User = "yarn";
SyslogIdentifier = "yarn-resourcemanager";
ExecStart = "${cfg.package}/bin/yarn --config ${hadoopConf} " +
" resourcemanager";
Restart = "always";
};
};
networking.firewall.allowedTCPPorts = (mkIf cfg.yarn.resourcemanager.openFirewall [
8088 # resourcemanager.webapp.address
8030 # resourcemanager.scheduler.address
8031 # resourcemanager.resource-tracker.address
8032 # resourcemanager.address
]);
})
(mkIf cfg.yarn.nodemanager.enabled {
# Needed because yarn hardcodes /bin/bash in container start scripts
# These scripts can't be patched, they are generated at runtime
systemd.tmpfiles.rules = [
(mkIf cfg.yarn.nodemanager.addBinBash "L /bin/bash - - - - /run/current-system/sw/bin/bash")
];
systemd.services.yarn-nodemanager = {
description = "Hadoop YARN NodeManager";
wantedBy = [ "multi-user.target" ];
inherit (cfg.yarn.nodemanager) restartIfChanged;
environment = {
HADOOP_HOME = "${cfg.package}";
};
preStart = ''
# create log dir
mkdir -p /var/log/hadoop/yarn/nodemanager
chown yarn:hadoop /var/log/hadoop/yarn/nodemanager
# set up setuid container executor binary
rm -rf /run/wrappers/yarn-nodemanager/ || true
mkdir -p /run/wrappers/yarn-nodemanager/{bin,etc/hadoop}
cp ${cfg.package}/lib/${cfg.package.untarDir}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/
chgrp hadoop /run/wrappers/yarn-nodemanager/bin/container-executor
chmod 6050 /run/wrappers/yarn-nodemanager/bin/container-executor
cp ${hadoopConf}/container-executor.cfg /run/wrappers/yarn-nodemanager/etc/hadoop/
'';
serviceConfig = {
User = "yarn";
SyslogIdentifier = "yarn-nodemanager";
PermissionsStartOnly = true;
ExecStart = "${cfg.package}/bin/yarn --config ${hadoopConf} " +
" nodemanager";
Restart = "always";
};
};
networking.firewall.allowedTCPPortRanges = [
(mkIf (cfg.yarn.nodemanager.openFirewall) {from = 1024; to = 65535;})
];
})
];

View file

@ -346,7 +346,7 @@ in
++ lib.optional config.services.pipewire.pulse.enable plasma-pa
++ lib.optional config.powerManagement.enable powerdevil
++ lib.optional config.services.colord.enable pkgs.colord-kde
++ lib.optional config.services.hardware.bolt.enable pkgs.plasma-thunderbolt
++ lib.optional config.services.hardware.bolt.enable pkgs.plasma5Packages.plasma-thunderbolt
++ lib.optionals config.services.samba.enable [ kdenetwork-filesharing pkgs.samba ]
++ lib.optional config.services.xserver.wacom.enable pkgs.wacomtablet;

View file

@ -26,6 +26,8 @@ let
"nss-user-lookup.target"
"time-sync.target"
"cryptsetup.target"
"cryptsetup-pre.target"
"remote-cryptsetup.target"
"sigpwr.target"
"timers.target"
"paths.target"

View file

@ -165,6 +165,7 @@ in
grocy = handleTest ./grocy.nix {};
grub = handleTest ./grub.nix {};
gvisor = handleTest ./gvisor.nix {};
hadoop.all = handleTestOn [ "x86_64-linux" ] ./hadoop/hadoop.nix {};
hadoop.hdfs = handleTestOn [ "x86_64-linux" ] ./hadoop/hdfs.nix {};
hadoop.yarn = handleTestOn [ "x86_64-linux" ] ./hadoop/yarn.nix {};
handbrake = handleTestOn ["x86_64-linux"] ./handbrake.nix {};
@ -416,6 +417,7 @@ in
solr = handleTest ./solr.nix {};
sonarr = handleTest ./sonarr.nix {};
spacecookie = handleTest ./spacecookie.nix {};
spark = handleTestOn ["x86_64-linux"] ./spark {};
spike = handleTest ./spike.nix {};
sslh = handleTest ./sslh.nix {};
sssd = handleTestOn ["x86_64-linux"] ./sssd.nix {};

View file

@ -0,0 +1,70 @@
import ../make-test-python.nix ({pkgs, ...}: {
nodes = let
package = pkgs.hadoop;
coreSite = {
"fs.defaultFS" = "hdfs://master";
};
in {
master = {pkgs, options, ...}: {
services.hadoop = {
inherit package coreSite;
hdfs.namenode.enabled = true;
yarn.resourcemanager.enabled = true;
};
virtualisation.memorySize = 1024;
};
worker = {pkgs, options, ...}: {
services.hadoop = {
inherit package coreSite;
hdfs.datanode.enabled = true;
yarn.nodemanager.enabled = true;
yarnSite = options.services.hadoop.yarnSite.default // {
"yarn.resourcemanager.hostname" = "master";
};
};
virtualisation.memorySize = 2048;
};
};
testScript = ''
start_all()
master.wait_for_unit("network.target")
master.wait_for_unit("hdfs-namenode")
master.wait_for_open_port(8020)
master.wait_for_open_port(9870)
worker.wait_for_unit("network.target")
worker.wait_for_unit("hdfs-datanode")
worker.wait_for_open_port(9864)
worker.wait_for_open_port(9866)
worker.wait_for_open_port(9867)
master.succeed("curl -f http://worker:9864")
worker.succeed("curl -f http://master:9870")
worker.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
master.wait_for_unit("yarn-resourcemanager")
master.wait_for_open_port(8030)
master.wait_for_open_port(8031)
master.wait_for_open_port(8032)
master.wait_for_open_port(8088)
worker.succeed("curl -f http://master:8088")
worker.wait_for_unit("yarn-nodemanager")
worker.wait_for_open_port(8042)
worker.wait_for_open_port(8040)
master.succeed("curl -f http://worker:8042")
assert "Total Nodes:1" in worker.succeed("yarn node -list")
assert "Estimated value of Pi is" in worker.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~lib/hadoop-*/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10")
assert "SUCCEEDED" in worker.succeed("yarn application -list -appStates FINISHED")
worker.succeed("sudo -u hdfs hdfs dfs -ls / | systemd-cat")
'';
})

View file

@ -2,7 +2,7 @@ import ../make-test-python.nix ({...}: {
nodes = {
namenode = {pkgs, ...}: {
services.hadoop = {
package = pkgs.hadoop_3_1;
package = pkgs.hadoop;
hdfs.namenode.enabled = true;
coreSite = {
"fs.defaultFS" = "hdfs://namenode:8020";
@ -20,7 +20,7 @@ import ../make-test-python.nix ({...}: {
};
datanode = {pkgs, ...}: {
services.hadoop = {
package = pkgs.hadoop_3_1;
package = pkgs.hadoop;
hdfs.datanode.enabled = true;
coreSite = {
"fs.defaultFS" = "hdfs://namenode:8020";

View file

@ -1,7 +1,7 @@
import ../make-test-python.nix ({...}: {
nodes = {
resourcemanager = {pkgs, ...}: {
services.hadoop.package = pkgs.hadoop_3_1;
services.hadoop.package = pkgs.hadoop;
services.hadoop.yarn.resourcemanager.enabled = true;
services.hadoop.yarnSite = {
"yarn.resourcemanager.scheduler.class" = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler";
@ -12,7 +12,7 @@ import ../make-test-python.nix ({...}: {
];
};
nodemanager = {pkgs, ...}: {
services.hadoop.package = pkgs.hadoop_3_1;
services.hadoop.package = pkgs.hadoop;
services.hadoop.yarn.nodemanager.enabled = true;
services.hadoop.yarnSite = {
"yarn.resourcemanager.hostname" = "resourcemanager";

View file

@ -1,19 +1,7 @@
{ lib, stdenv, fetchFromGitHub, fetchurl, linkFarmFromDrvs, makeWrapper,
dotnetPackages, dotnetCorePackages, altcoinSupport ? false
}:
{ lib, buildDotnetModule, fetchFromGitHub, dotnetCorePackages
, altcoinSupport ? false }:
let
deps = import ./deps.nix {
fetchNuGet = { name, version, sha256 }: fetchurl {
name = "nuget-${name}-${version}.nupkg";
url = "https://www.nuget.org/api/v2/package/${name}/${version}";
inherit sha256;
};
};
dotnetSdk = dotnetCorePackages.sdk_3_1;
in
stdenv.mkDerivation rec {
buildDotnetModule rec {
pname = "btcpayserver";
version = "1.2.4";
@ -24,35 +12,29 @@ stdenv.mkDerivation rec {
sha256 = "sha256-vjNJ08twsJ036TTFF6srOGshDpP7ZwWCGN0XjrtFT/g=";
};
nativeBuildInputs = [ dotnetSdk dotnetPackages.Nuget makeWrapper ];
projectFile = "BTCPayServer/BTCPayServer.csproj";
nugetDeps = ./deps.nix;
buildPhase = ''
export HOME=$TMP/home
export DOTNET_CLI_TELEMETRY_OPTOUT=1
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
dotnet-sdk = dotnetCorePackages.sdk_3_1;
dotnet-runtime = dotnetCorePackages.aspnetcore_3_1;
nuget sources Add -Name tmpsrc -Source $TMP/nuget
nuget init ${linkFarmFromDrvs "deps" deps} $TMP/nuget
dotnet restore --source $TMP/nuget ${lib.optionalString altcoinSupport ''/p:Configuration="Altcoins-Release"''} BTCPayServer/BTCPayServer.csproj
dotnet publish --no-restore --output $out/share/$pname ${lib.optionalString altcoinSupport "-c Altcoins-Release"} BTCPayServer/BTCPayServer.csproj
'';
dotnetFlags = lib.optionals altcoinSupport [ "/p:Configuration=Altcoins-Release" ];
# btcpayserver requires the publish directory as its working dir
# https://github.com/btcpayserver/btcpayserver/issues/1894
installPhase = ''
makeWrapper $out/share/$pname/BTCPayServer $out/bin/$pname \
--set DOTNET_ROOT "${dotnetSdk}" \
--run "cd $out/share/$pname"
preInstall = ''
makeWrapperArgs+=(--run "cd $out/lib/btcpayserver")
'';
dontStrip = true;
postInstall = ''
mv $out/bin/{BTCPayServer,btcpayserver}
'';
meta = with lib; {
description = "Self-hosted, open-source cryptocurrency payment processor";
homepage = "https://btcpayserver.org";
maintainers = with maintainers; [ kcalvinalvin earvstedt ];
license = lib.licenses.mit;
platforms = lib.platforms.linux;
license = licenses.mit;
platforms = platforms.linux;
};
}

View file

@ -1,19 +1,6 @@
{ lib, stdenv, fetchFromGitHub, fetchurl, linkFarmFromDrvs, makeWrapper,
dotnetPackages, dotnetCorePackages
}:
{ lib, buildDotnetModule, fetchFromGitHub, dotnetCorePackages }:
let
deps = import ./deps.nix {
fetchNuGet = { name, version, sha256 }: fetchurl {
name = "nuget-${name}-${version}.nupkg";
url = "https://www.nuget.org/api/v2/package/${name}/${version}";
inherit sha256;
};
};
dotnetSdk = dotnetCorePackages.sdk_3_1;
in
stdenv.mkDerivation rec {
buildDotnetModule rec {
pname = "nbxplorer";
version = "2.2.11";
@ -24,31 +11,20 @@ stdenv.mkDerivation rec {
sha256 = "sha256-ZDqzkANGMdvv3e5gWCYcacUYKLJRquXRHLr8RAzT9hY=";
};
nativeBuildInputs = [ dotnetSdk dotnetPackages.Nuget makeWrapper ];
projectFile = "NBXplorer/NBXplorer.csproj";
nugetDeps = ./deps.nix;
buildPhase = ''
export HOME=$TMP/home
export DOTNET_CLI_TELEMETRY_OPTOUT=1
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
dotnet-sdk = dotnetCorePackages.sdk_3_1;
dotnet-runtime = dotnetCorePackages.aspnetcore_3_1;
nuget sources Add -Name tmpsrc -Source $TMP/nuget
nuget init ${linkFarmFromDrvs "deps" deps} $TMP/nuget
dotnet restore --source $TMP/nuget NBXplorer/NBXplorer.csproj
dotnet publish --no-restore --output $out/share/$pname -c Release NBXplorer/NBXplorer.csproj
postInstall = ''
mv $out/bin/{NBXplorer,nbxplorer}
'';
installPhase = ''
makeWrapper $out/share/$pname/NBXplorer $out/bin/$pname \
--set DOTNET_ROOT "${dotnetSdk}"
'';
dontStrip = true;
meta = with lib; {
description = "Minimalist UTXO tracker for HD Cryptocurrency Wallets";
maintainers = with maintainers; [ kcalvinalvin earvstedt ];
license = lib.licenses.mit;
platforms = lib.platforms.linux;
license = licenses.mit;
platforms = platforms.linux;
};
}

View file

@ -1,180 +1,96 @@
{ lib, stdenv, fetchurl, makeWrapper, pkg-config, which, maven, cmake, jre, jdk8, bash
, coreutils, glibc, protobuf2_5, fuse, snappy, zlib, bzip2, openssl, openssl_1_0_2, fetchpatch, libtirpc
{ lib, stdenv, fetchurl, makeWrapper, autoPatchelfHook
, jdk8_headless, jdk11_headless
, bash, coreutils, which
, bzip2, cyrus_sasl , protobuf3_7, snappy, zlib, zstd
, openssl
}:
with lib;
let
maven-jdk8 = maven.override {
jdk = jdk8;
};
common = { version, sha256, dependencies-sha256, maven, tomcat, opensslPkg ? openssl }:
let
# compile the hadoop tarball from sources, it requires some patches
binary-distributon = stdenv.mkDerivation rec {
name = "hadoop-${version}-bin";
src = fetchurl {
url = "mirror://apache/hadoop/common/hadoop-${version}/hadoop-${version}-src.tar.gz";
inherit sha256;
};
postUnpack = lib.optionalString (tomcat != null) ''
install -D ${tomcat.src} $sourceRoot/hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads/apache-tomcat-${tomcat.version}.tar.gz
install -D ${tomcat.src} $sourceRoot/hadoop-common-project/hadoop-kms/downloads/apache-tomcat-${tomcat.version}.tar.gz
'';
# perform fake build to make a fixed-output derivation of dependencies downloaded from maven central (~100Mb in ~3000 files)
fetched-maven-deps = stdenv.mkDerivation {
name = "hadoop-${version}-maven-deps";
inherit src postUnpack nativeBuildInputs buildInputs;
buildPhase = ''
while mvn package -Dmaven.repo.local=$out/.m2 ${mavenFlags} -Dmaven.wagon.rto=5000; [ $? = 1 ]; do
echo "timeout, restart maven to continue downloading"
done
'';
# keep only *.{pom,jar,xml,sha1,so,dll,dylib} and delete all ephemeral files with lastModified timestamps inside
installPhase = ''find $out/.m2 -type f -regex '.+\(\.lastUpdated\|resolver-status\.properties\|_remote\.repositories\)' -delete'';
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = dependencies-sha256;
};
nativeBuildInputs = [ maven cmake pkg-config ];
buildInputs = [ fuse snappy zlib bzip2 opensslPkg protobuf2_5 libtirpc ];
NIX_CFLAGS_COMPILE = [ "-I${libtirpc.dev}/include/tirpc" ];
NIX_LDFLAGS = [ "-ltirpc" ];
# most of the hardcoded pathes are fixed in 2.9.x and 3.0.0, this list of patched files might be reduced when 2.7.x and 2.8.x will be deprecated
patches = [
(fetchpatch {
url = "https://patch-diff.githubusercontent.com/raw/apache/hadoop/pull/2886.patch";
sha256 = "1fim1d8va050za5i8a6slphmx015fzvhxkc2wi4rwg7kbj31sv0r";
})
];
postPatch = ''
for file in hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java \
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java \
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java; do
if [ -f "$file" ]; then
substituteInPlace "$file" \
--replace '/usr/bin/stat' 'stat' \
--replace '/bin/bash' 'bash' \
--replace '/bin/ls' 'ls' \
--replace '/bin/mv' 'mv'
fi
done
'';
dontConfigure = true; # do not trigger cmake hook
mavenFlags = "-Drequire.snappy -Drequire.bzip2 -DskipTests -Pdist,native -e";
buildPhase = ''
# 'maven.repo.local' must be writable
mvn package --offline -Dmaven.repo.local=$(cp -dpR ${fetched-maven-deps}/.m2 ./ && chmod +w -R .m2 && pwd)/.m2 ${mavenFlags}
# remove runtime dependency on $jdk/jre/lib/amd64/server/libjvm.so
patchelf --set-rpath ${lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhadoop.so.1.0.0
patchelf --set-rpath ${lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhdfs.so.0.0.0
'';
installPhase = "mv hadoop-dist/target/hadoop-${version} $out";
};
in
stdenv.mkDerivation {
pname = "hadoop";
inherit version;
src = binary-distributon;
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/share/doc/hadoop
cp -dpR * $out/
mv $out/*.txt $out/share/doc/hadoop/
#
# Do not use `wrapProgram` here, script renaming may result to weird things: http://i.imgur.com/0Xee013.png
#
mkdir -p $out/bin.wrapped
for n in $out/bin/*; do
if [ -f "$n" ]; then # only regular files
mv $n $out/bin.wrapped/
makeWrapper $out/bin.wrapped/$(basename $n) $n \
--prefix PATH : "${lib.makeBinPath [ which jre bash coreutils ]}" \
--prefix JAVA_LIBRARY_PATH : "${lib.makeLibraryPath [ opensslPkg snappy zlib bzip2 ]}" \
--set JAVA_HOME "${jre}" \
--set HADOOP_PREFIX "$out"
fi
done
'';
meta = with lib; {
homepage = "https://hadoop.apache.org/";
description = "Framework for distributed processing of large data sets across clusters of computers";
license = licenses.asl20;
longDescription = ''
The Apache Hadoop software library is a framework that allows for
the distributed processing of large data sets across clusters of
computers using a simple programming model. It is designed to
scale up from single servers to thousands of machines, each
offering local computation and storage. Rather than rely on
hardware to deliver high-avaiability, the library itself is
designed to detect and handle failures at the application layer,
so delivering a highly-availabile service on top of a cluster of
computers, each of which may be prone to failures.
'';
maintainers = with maintainers; [ volth ];
platforms = [ "x86_64-linux" ];
};
common = { pname, version, untarDir ? "${pname}-${version}", sha256, jdk, openssl, nativeLibs ? [ ], libPatches ? "" }:
stdenv.mkDerivation rec {
inherit pname version jdk libPatches untarDir openssl;
src = fetchurl {
url = "mirror://apache/hadoop/common/hadoop-${version}/hadoop-${version}.tar.gz";
inherit sha256;
};
nativeBuildInputs = [ makeWrapper ]
++ optional (nativeLibs != [] || libPatches != "") [ autoPatchelfHook ];
buildInputs = [ openssl ] ++ nativeLibs;
installPhase = ''
mkdir -p $out/{lib/${untarDir}/conf,bin,lib}
mv * $out/lib/${untarDir}
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
makeWrapper "$n" "$out/bin/$(basename $n)"\
--set-default JAVA_HOME ${jdk.home}\
--set-default HADOOP_HOME $out/lib/${untarDir}\
--set-default HADOOP_CONF_DIR /etc/hadoop-conf/\
--prefix PATH : "${makeBinPath [ bash coreutils which]}"\
--prefix JAVA_LIBRARY_PATH : "${makeLibraryPath buildInputs}"
done
'' + libPatches;
meta = {
homepage = "https://hadoop.apache.org/";
description = "Framework for distributed processing of large data sets across clusters of computers";
license = licenses.asl20;
longDescription = ''
The Apache Hadoop software library is a framework that allows for
the distributed processing of large data sets across clusters of
computers using a simple programming model. It is designed to
scale up from single servers to thousands of machines, each
offering local computation and storage. Rather than rely on
hardware to deliver high-avaiability, the library itself is
designed to detect and handle failures at the application layer,
so delivering a highly-availabile service on top of a cluster of
computers, each of which may be prone to failures.
'';
maintainers = with maintainers; [ volth illustris ];
platforms = [ "x86_64-linux" ];
};
tomcat_6_0_48 = rec {
version = "6.0.48";
src = fetchurl {
# do not use "mirror://apache/" here, tomcat-6 is legacy and has been removed from the mirrors
url = "https://archive.apache.org/dist/tomcat/tomcat-6/v${version}/bin/apache-tomcat-${version}.tar.gz";
sha256 = "1w4jf28g8p25fmijixw6b02iqlagy2rvr57y3n90hvz341kb0bbc";
};
in
{
# Different version of hadoop support different java runtime versions
# https://cwiki.apache.org/confluence/display/HADOOP/Hadoop+Java+Versions
hadoop_3_3 = common rec {
pname = "hadoop";
version = "3.3.1";
sha256 = "1b3v16ihysqaxw8za1r5jlnphy8dwhivdx2d0z64309w57ihlxxd";
untarDir = "${pname}-${version}";
jdk = jdk11_headless;
inherit openssl;
# TODO: Package and add Intel Storage Acceleration Library
nativeLibs = [ stdenv.cc.cc.lib protobuf3_7 zlib snappy ];
libPatches = ''
ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/${untarDir}/lib/native/libsasl2.so.2
ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/${untarDir}/lib/native/
ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/${untarDir}/lib/native/
ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/${untarDir}/lib/native/
ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/${untarDir}/lib/native/
patchelf --add-rpath ${jdk.home}/lib/server $out/lib/${untarDir}/lib/native/libnativetask.so.1.0.0
'';
};
in {
hadoop_2_7 = common {
version = "2.7.7";
sha256 = "1ahv67f3lwak3kbjvnk1gncq56z6dksbajj872iqd0awdsj3p5rf";
dependencies-sha256 = "1lsr9nvrynzspxqcamb10d596zlnmnfpxhkd884gdiva0frm0b1r";
tomcat = tomcat_6_0_48;
opensslPkg = openssl_1_0_2;
maven = maven-jdk8;
hadoop_3_2 = common rec {
pname = "hadoop";
version = "3.2.2";
sha256 = "1hxq297cqvkfgz2yfdiwa3l28g44i2abv5921k2d6b4pqd33prwp";
jdk = jdk8_headless;
# not using native libs because of broken openssl_1_0_2 dependency
# can be manually overriden
openssl = null;
};
hadoop_2_8 = common {
version = "2.8.4";
sha256 = "16c3ljhrzibkjn3y1bmjxdgf0kn60l23ay5hqpp7vpbnqx52x68w";
dependencies-sha256 = "1j4f461487fydgr5978nnm245ksv4xbvskfr8pbmfhcyss6b7w03";
tomcat = tomcat_6_0_48;
opensslPkg = openssl_1_0_2;
maven = maven-jdk8;
};
hadoop_2_9 = common {
version = "2.9.1";
sha256 = "0qgmpfbpv7f521fkjy5ldzdb4lwiblhs0hyl8qy041ws17y5x7d7";
dependencies-sha256 = "1d5i8jj5y746rrqb9lscycnd7acmxlkz64ydsiyqsh5cdqgy2x7x";
tomcat = tomcat_6_0_48;
opensslPkg = openssl_1_0_2;
maven = maven-jdk8;
};
hadoop_3_0 = common {
version = "3.0.3";
sha256 = "1vvkci0kx4b48dg0niifn2d3r4wwq8pb3c5z20wy8pqsqrqhlci5";
dependencies-sha256 = "1kzkna9ywacm2m1cirj9cyip66bgqjhid2xf9rrhq6g10lhr8j9m";
tomcat = null;
maven = maven-jdk8;
};
hadoop_3_1 = common {
version = "3.1.1";
sha256 = "04hhdbyd4x1hy0fpy537f8mi0864hww97zap29x7dk1smrffwabd";
dependencies-sha256 = "1q63jsxg3d31x0p8hvhpvbly2b07almyzsbhwphbczl3fhlqgiwn";
tomcat = null;
maven = maven-jdk8;
hadoop2 = common rec {
pname = "hadoop";
version = "2.10.1";
sha256 = "1w31x4bk9f2swnx8qxx0cgwfg8vbpm6cy5lvfnbbpl3rsjhmyg97";
jdk = jdk8_headless;
openssl = null;
};
}

View file

@ -338,6 +338,8 @@ stdenv.mkDerivation {
disable_test t6300-for-each-ref
#===( 22665;1651 9/? 1/? 0/? 0/? )= =/private/tmp/nix-build-git-2.33.0.drv-2/git-2.33.0/t/../contrib/completion/git-completion.bash: line 405: compgen: command not found
disable_test t9902-completion
# not ok 1 - populate workdir (with 2.33.1 on x86_64-darwin)
disable_test t5003-archive-zip
'' + lib.optionalString stdenv.hostPlatform.isMusl ''
# Test fails (as of 2.17.0, musl 1.1.19)
disable_test t3900-i18n-commit

View file

@ -51,6 +51,8 @@ let
});
package = stdenv.mkDerivation (args // {
inherit buildType;
nativeBuildInputs = args.nativeBuildInputs or [] ++ [ dotnet-sdk dotnetPackages.Nuget cacert makeWrapper ];
# Stripping breaks the executable
@ -71,7 +73,7 @@ let
mkdir -p $HOME/.nuget/NuGet
cp $HOME/.config/NuGet/NuGet.Config $HOME/.nuget/NuGet
dotnet restore ${lib.escapeShellArg projectFile} \
dotnet restore "$projectFile" \
${lib.optionalString (!enableParallelBuilding) "--disable-parallel"} \
-p:ContinuousIntegrationBuild=true \
-p:Deterministic=true \
@ -85,13 +87,13 @@ let
buildPhase = args.buildPhase or ''
runHook preBuild
dotnet build ${lib.escapeShellArg projectFile} \
dotnet build "$projectFile" \
-maxcpucount:${if enableParallelBuilding then "$NIX_BUILD_CORES" else "1"} \
-p:BuildInParallel=${if enableParallelBuilding then "true" else "false"} \
-p:ContinuousIntegrationBuild=true \
-p:Deterministic=true \
-p:Version=${args.version} \
--configuration ${buildType} \
--configuration "$buildType" \
--no-restore \
"''${dotnetBuildFlags[@]}" \
"''${dotnetFlags[@]}"
@ -102,17 +104,17 @@ let
installPhase = args.installPhase or ''
runHook preInstall
dotnet publish ${lib.escapeShellArg projectFile} \
dotnet publish "$projectFile" \
-p:ContinuousIntegrationBuild=true \
-p:Deterministic=true \
--output $out/lib/${args.pname} \
--configuration ${buildType} \
--configuration "$buildType" \
--no-build \
--no-self-contained \
"''${dotnetInstallFlags[@]}" \
"''${dotnetFlags[@]}"
'' + (if executables != null then ''
for executable in ''${executables}; do
for executable in $executables; do
execPath="$out/lib/${args.pname}/$executable"
if [[ -f "$execPath" && -x "$execPath" ]]; then
@ -120,7 +122,7 @@ let
--set DOTNET_ROOT "${dotnet-runtime}" \
--suffix LD_LIBRARY_PATH : "${lib.makeLibraryPath runtimeDeps}" \
"''${gappsWrapperArgs[@]}" \
''${makeWrapperArgs}
"''${makeWrapperArgs[@]}"
else
echo "Specified binary \"$executable\" is either not an executable, or does not exist!"
exit 1
@ -133,7 +135,7 @@ let
--set DOTNET_ROOT "${dotnet-runtime}" \
--suffix LD_LIBRARY_PATH : "${lib.makeLibraryPath runtimeDeps}" \
"''${gappsWrapperArgs[@]}" \
''${makeWrapperArgs}
"''${makeWrapperArgs[@]}"
fi
done
'') + ''

View file

@ -0,0 +1 @@
Hello there!

View file

@ -0,0 +1,8 @@
{ invalidateFetcherByDrvHash, fetchpatch, ... }:
{
simple = invalidateFetcherByDrvHash fetchpatch {
url = "https://github.com/facebook/zstd/pull/2724/commits/e1f85dbca3a0ed5ef06c8396912a0914db8dea6a.patch";
sha256 = "sha256-PuYAqnJWAE+L9bsroOnnBGJhERW8LHrGSLtIEkKU9vg=";
};
}

View file

@ -0,0 +1,40 @@
{ lib, stdenv, fetchurl, perl, linuxPackages_latest }:
stdenv.mkDerivation rec {
pname = "linux-manual";
inherit (linuxPackages_latest.kernel) version src;
nativeBuildInputs = [ perl ];
dontConfigure = true;
dontBuild = true;
postPatch = ''
patchShebangs --build \
scripts/kernel-doc \
scripts/split-man.pl
'';
installPhase = ''
mandir=$out/share/man/man9
mkdir -p $mandir
KBUILD_BUILD_TIMESTAMP=$(stat -c %Y Makefile) \
grep -F -l -Z \
--exclude-dir Documentation \
--exclude-dir tools \
-R '/**' \
| xargs -0 -n 256 -P $NIX_BUILD_CORES \
$SHELL -c '{ scripts/kernel-doc -man "$@" || :; } \
| scripts/split-man.pl '$mandir kernel-doc
test -f $mandir/kmalloc.9
'';
meta = with lib; {
homepage = "https://kernel.org/";
description = "Linux kernel API manual pages";
license = licenses.gpl2Only;
maintainers = with maintainers; [ mvs ];
};
}

View file

@ -2,7 +2,7 @@
buildDunePackage rec {
pname = "minisat";
version = "0.3";
version = "0.4";
useDune2 = true;
@ -12,7 +12,7 @@ buildDunePackage rec {
owner = "c-cube";
repo = "ocaml-minisat";
rev = "v${version}";
sha256 = "01wggbziqz5x6d7mwdl40sbf6qal7fd853b224zjf9n0kzzsnczh";
sha256 = "009jncrvnl9synxx6jnm6gp0cs7zlj71z22zz7bs1750b0jrfm2r";
};
meta = {

View file

@ -8,7 +8,7 @@
buildPythonPackage rec {
pname = "aiomusiccast";
version = "0.11.0";
version = "0.12.0";
format = "pyproject";
@ -18,7 +18,7 @@ buildPythonPackage rec {
owner = "vigonotion";
repo = "aiomusiccast";
rev = version;
sha256 = "sha256-58l9bmGmSRFHqiKwHtGcAu2H+JnKjsZZg+fxbLW5xBg=";
sha256 = "sha256-LZaRxpjTNkrf8JAWTQLp5LcWOf+epXOnvidJl2tEEE4=";
};
nativeBuildInputs = [
@ -32,7 +32,9 @@ buildPythonPackage rec {
# upstream has no tests
doCheck = false;
pythonImportsCheck = [ "aiomusiccast" ];
pythonImportsCheck = [
"aiomusiccast"
];
meta = with lib; {
description = "Companion library for musiccast devices intended for the Home Assistant integration";

View file

@ -80,6 +80,6 @@ buildPythonPackage rec {
homepage = "https://github.com/psf/black";
changelog = "https://github.com/psf/black/blob/${version}/CHANGES.md";
license = licenses.mit;
maintainers = with maintainers; [ sveitser ];
maintainers = with maintainers; [ sveitser autophagy ];
};
}

View file

@ -4,14 +4,14 @@
, stdenv
, makeWrapper
, fetchurl
, nodejs-14_x
, nodejs-10_x
, coreutils
, which
}:
with lib;
let
nodejs = nodejs-14_x;
nodejs = nodejs-10_x;
inherit (builtins) elemAt;
info = splitString "-" stdenv.hostPlatform.system;
arch = elemAt info 0;

View file

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "steampipe";
version = "0.8.5";
version = "0.9.0";
src = fetchFromGitHub {
owner = "turbot";
repo = "steampipe";
rev = "v${version}";
sha256 = "sha256-3vetSUJwCeaBzKj+635siskfcDPs/kkgCH954cg/REA=";
sha256 = "sha256-wG5KvyY40CNxIScuQHQdJ4u8fzNU+oV7iNe9VAvTQMg=";
};
vendorSha256 = "sha256-TGDFNHWWbEy1cD7b2yPqAN7rRrLvL0ZX/R3BWGRWjjw=";
vendorSha256 = "sha256-3JBCiF1gxGCVn81s7abGvNIAy+eP7orAnSBOXUNImao=";
# tests are failing for no obvious reasons
doCheck = false;

View file

@ -0,0 +1,8 @@
{ callPackage, python3, ... } @ args:
callPackage ./generic.nix (args // {
version = "0.4.2";
sha256 = "sha256-iHWwll/jPeYriQ9s15O+f6/kGk5VLtv2QfH+1eu/Re0=";
# for gitdiff
extraBuildInputs = [ python3 ];
})

View file

@ -1,5 +1,5 @@
{ lib, stdenv, fetchurl, perl
, version, sha256, patches ? []
{ lib, stdenv, fetchurl, perl, makeWrapper
, version, sha256, patches ? [], extraBuildInputs ? []
, ...
}:
stdenv.mkDerivation rec {
@ -11,18 +11,32 @@ stdenv.mkDerivation rec {
inherit sha256;
};
buildInputs = [ perl ];
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ perl ] ++ extraBuildInputs;
hardeningDisable = [ "format" ];
doCheck = false; # fails
postInstall = ''
for bin in $out/bin/{splitdiff,rediff,editdiff,dehtmldiff}; do
wrapProgram "$bin" \
--prefix PATH : "$out/bin"
done
'';
doCheck = lib.versionAtLeast version "0.3.4";
preCheck = ''
patchShebangs tests
chmod +x scripts/*
'' + lib.optionalString (lib.versionOlder version "0.4.2") ''
find tests -type f -name 'run-test' \
-exec sed -i '{}' -e 's|/bin/echo|echo|g' \;
'';
meta = with lib; {
description = "Tools to manipulate patch files";
homepage = "http://cyberelk.net/tim/software/patchutils";
license = licenses.gpl2Plus;
platforms = platforms.all;
executables = [ "combinediff" "dehtmldiff" "editdiff" "espdiff"
"filterdiff" "fixcvsdiff" "flipdiff" "grepdiff" "interdiff" "lsdiff"
"recountdiff" "rediff" "splitdiff" "unwrapdiff" ];
maintainers = with maintainers; [ artturin ];
};
}

View file

@ -509,7 +509,10 @@ with pkgs;
mht2htm = callPackage ../tools/misc/mht2htm { };
fetchpatch = callPackage ../build-support/fetchpatch { };
fetchpatch = callPackage ../build-support/fetchpatch { }
// {
tests = callPackages ../build-support/fetchpatch/tests.nix { };
};
fetchs3 = callPackage ../build-support/fetchs3 { };
@ -8298,6 +8301,8 @@ with pkgs;
patchutils_0_3_3 = callPackage ../tools/text/patchutils/0.3.3.nix { };
patchutils_0_4_2 = callPackage ../tools/text/patchutils/0.4.2.nix { };
parted = callPackage ../tools/misc/parted { };
passh = callPackage ../tools/networking/passh { };
@ -13178,15 +13183,12 @@ with pkgs;
groovy = callPackage ../development/interpreters/groovy { };
inherit (callPackages ../applications/networking/cluster/hadoop {
jre = jre8; # TODO: remove override https://github.com/NixOS/nixpkgs/pull/89731
})
hadoop_2_7
hadoop_2_8
hadoop_2_9
hadoop_3_0
hadoop_3_1;
hadoop = hadoop_2_7;
inherit (callPackages ../applications/networking/cluster/hadoop { })
hadoop_3_3
hadoop_3_2
hadoop2;
hadoop3 = hadoop_3_3;
hadoop = hadoop3;
io = callPackage ../development/interpreters/io { };
@ -13540,7 +13542,7 @@ with pkgs;
self = pkgsi686Linux.callPackage ../development/interpreters/self { };
inherit (callPackages ../applications/networking/cluster/spark { hadoop = hadoop_3_1; })
inherit (callPackages ../applications/networking/cluster/spark { })
spark3
spark2;
spark = spark3;
@ -23067,6 +23069,8 @@ with pkgs;
line-awesome = callPackage ../data/fonts/line-awesome { };
linux-manual = callPackage ../data/documentation/linux-manual { };
lmmath = callPackage ../data/fonts/lmmath {};
lmodern = callPackage ../data/fonts/lmodern { };