Merge pull request #6045 from ragnard/apache-kafka

apache-kafka: New package and service
This commit is contained in:
lethalman 2015-01-30 10:50:14 +01:00
commit 41cca7d2d6
5 changed files with 208 additions and 1 deletions

View file

@ -176,7 +176,7 @@
tox-bootstrapd = 166;
cadvisor = 167;
nylon = 168;
apache-kafka = 169;
# When adding a uid, make sure it doesn't match an existing gid. And don't use uids above 399!
nixbld = 30000; # start of range of uids

View file

@ -170,6 +170,7 @@
./services/mail/opensmtpd.nix
./services/mail/postfix.nix
./services/mail/spamassassin.nix
./services/misc/apache-kafka.nix
#./services/misc/autofs.nix
./services/misc/cpuminer-cryptonight.nix
./services/misc/cgminer.nix

View file

@ -0,0 +1,156 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.apache-kafka;
serverProperties =
if cfg.serverProperties != null then
cfg.serverProperties
else
''
# Generated by nixos
broker.id=${toString cfg.brokerId}
port=${toString cfg.port}
host.name=${cfg.hostname}
log.dirs=${concatStringsSep "," cfg.logDirs}
zookeeper.connect=${cfg.zookeeper}
${toString cfg.extraProperties}
'';
configDir = pkgs.buildEnv {
name = "apache-kafka-conf";
paths = [
(pkgs.writeTextDir "server.properties" serverProperties)
(pkgs.writeTextDir "log4j.properties" cfg.log4jProperties)
];
};
in {
options.services.apache-kafka = {
enable = mkOption {
description = "Whether to enable Apache Kafka.";
default = false;
type = types.uniq types.bool;
};
brokerId = mkOption {
description = "Broker ID.";
default = 0;
type = types.int;
};
port = mkOption {
description = "Port number the broker should listen on.";
default = 9092;
type = types.int;
};
hostname = mkOption {
description = "Hostname the broker should bind to.";
default = "localhost";
type = types.string;
};
logDirs = mkOption {
description = "Log file directories";
default = [ "/tmp/kafka-logs" ];
type = types.listOf types.path;
};
zookeeper = mkOption {
description = "Zookeeper connection string";
default = "localhost:2181";
type = types.string;
};
extraProperties = mkOption {
description = "Extra properties for server.properties.";
type = types.nullOr types.lines;
default = null;
};
serverProperties = mkOption {
description = ''
Complete server.properties content. Other server.properties config
options will be ignored if this option is used.
'';
type = types.nullOr types.lines;
default = null;
};
log4jProperties = mkOption {
description = "Kafka log4j property configuration.";
default = ''
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
'';
type = types.lines;
};
jvmOptions = mkOption {
description = "Extra command line options for the JVM running Kafka.";
default = [
"-server"
"-Xmx1G"
"-Xms1G"
"-XX:+UseCompressedOops"
"-XX:+UseParNewGC"
"-XX:+UseConcMarkSweepGC"
"-XX:+CMSClassUnloadingEnabled"
"-XX:+CMSScavengeBeforeRemark"
"-XX:+DisableExplicitGC"
"-Djava.awt.headless=true"
"-Djava.net.preferIPv4Stack=true"
];
type = types.listOf types.string;
example = [
"-Djava.net.preferIPv4Stack=true"
"-Dcom.sun.management.jmxremote"
"-Dcom.sun.management.jmxremote.local.only=true"
];
};
};
config = mkIf cfg.enable {
environment.systemPackages = [pkgs.apacheKafka];
users.extraUsers = singleton {
name = "apache-kafka";
uid = config.ids.uids.apache-kafka;
description = "Apache Kafka daemon user";
home = head cfg.logDirs;
};
systemd.services.apache-kafka = {
description = "Apache Kafka Daemon";
wantedBy = [ "multi-user.target" ];
after = [ "network-interfaces.target" ];
serviceConfig = {
ExecStart = ''
${pkgs.jre}/bin/java \
-cp "${pkgs.apacheKafka}/libs/*:${configDir}" \
${toString cfg.jvmOptions} \
kafka.Kafka \
${configDir}/server.properties
'';
User = "apache-kafka";
PermissionsStartOnly = true;
};
preStart = ''
mkdir -m 0700 -p ${concatStringsSep " " cfg.logDirs}
if [ "$(id -u)" = 0 ]; then
chown apache-kafka ${concatStringsSep " " cfg.logDirs};
fi
'';
};
};
}

View file

@ -0,0 +1,48 @@
{ stdenv, fetchurl, jre, makeWrapper, bash }:
let
kafkaVersion = "0.8.1.1";
scalaVersion = "2.8.0";
in
stdenv.mkDerivation rec {
version = "${scalaVersion}-${kafkaVersion}";
name = "apache-kafka-${version}";
src = fetchurl {
url = "mirror://apache/kafka/${kafkaVersion}/kafka_${version}.tgz";
sha256 = "1bya4qs0ccrqibmdivgdxcsyiay4c3vywddrkci1dz9v3ymrqby9";
};
buildInputs = [ jre makeWrapper bash ];
installPhase = ''
mkdir -p $out
cp -R config libs $out
mkdir -p $out/bin
cp bin/kafka* $out/bin
# allow us the specify logging directory using env
substituteInPlace $out/bin/kafka-run-class.sh \
--replace 'LOG_DIR=$base_dir/logs' 'LOG_DIR=$KAFKA_LOG_DIR'
for p in $out/bin\/*.sh; do
wrapProgram $p \
--set JAVA_HOME "${jre}" \
--set KAFKA_LOG_DIR "/tmp/apache-kafka-logs" \
--prefix PATH : "${bash}/bin"
done
chmod +x $out/bin\/*
'';
meta = with stdenv.lib; {
homepage = "http://kafka.apache.org";
description = "A high-throughput distributed messaging system";
license = licenses.asl20;
maintainers = [ maintainers.ragge ];
platforms = platforms.unix;
};
}

View file

@ -4515,6 +4515,8 @@ let
apacheAnt = callPackage ../development/tools/build-managers/apache-ant { };
apacheKafka = callPackage ../servers/apache-kafka { };
astyle = callPackage ../development/tools/misc/astyle { };
atom-shell = callPackage ../development/tools/atom-shell {