Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2022-03-25 18:01:48 +00:00 committed by GitHub
commit 84962daf66
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
75 changed files with 766 additions and 394 deletions

View file

@ -22,7 +22,7 @@ jobs:
if: steps.ismerge.outputs.ismerge != 'true'
- name: Warn if the commit was a direct push
if: steps.ismerge.outputs.ismerge != 'true'
uses: peter-evans/commit-comment@v1
uses: peter-evans/commit-comment@v2
with:
body: |
@${{ github.actor }}, you pushed a commit directly to master/release branch

View file

@ -540,10 +540,11 @@ If you do need to do create this sort of patch file, one way to do so is with gi
If a patch is available online but does not cleanly apply, it can be modified in some fixed ways by using additional optional arguments for `fetchpatch`:
- `relative`: Similar to using `git-diff`'s `--relative` flag, only keep changes inside the specified directory, making paths relative to it.
- `stripLen`: Remove the first `stripLen` components of pathnames in the patch.
- `extraPrefix`: Prefix pathnames by this string.
- `excludes`: Exclude files matching this pattern.
- `includes`: Include only files matching this pattern.
- `excludes`: Exclude files matching these patterns (applies after the above arguments).
- `includes`: Include only files matching these patterns (applies after the above arguments).
- `revert`: Revert the patch.
Note that because the checksum is computed after applying these effects, using or modifying these arguments will have no effect unless the `sha256` argument is changed as well.

View file

@ -122,7 +122,7 @@ Reviewing process:
- [CODEOWNERS](https://help.github.com/articles/about-codeowners/) will make GitHub notify users based on the submitted changes, but it can happen that it misses some of the package maintainers.
- Ensure that the module tests, if any, are succeeding.
- Ensure that the introduced options are correct.
- Type should be appropriate (string related types differs in their merging capabilities, `optionSet` and `string` types are deprecated).
- Type should be appropriate (string related types differs in their merging capabilities, `loaOf` and `string` types are deprecated).
- Description, default and example should be provided.
- Ensure that option changes are backward compatible.
- `mkRenamedOptionModuleWith` provides a way to make option changes backward compatible.
@ -157,7 +157,7 @@ Reviewing process:
- Ensure that the module tests, if any, are succeeding.
- Ensure that the introduced options are correct.
- Type should be appropriate (string related types differs in their merging capabilities, `optionSet` and `string` types are deprecated).
- Type should be appropriate (string related types differs in their merging capabilities, `loaOf` and `string` types are deprecated).
- Description, default and example should be provided.
- Ensure that module `meta` field is present
- Maintainers should be declared in `meta.maintainers`.

View file

@ -609,17 +609,9 @@ rec {
throw "The option `${showOption loc}' in `${opt._file}' is already declared in ${showFiles res.declarations}."
else
let
/* Add the modules of the current option to the list of modules
already collected. The options attribute except either a list of
submodules or a submodule. For each submodule, we add the file of the
current option declaration as the file use for the submodule. If the
submodule defines any filename, then we ignore the enclosing option file. */
options' = toList opt.options.options;
getSubModules = opt.options.type.getSubModules or null;
submodules =
if getSubModules != null then map (setDefaultModuleLocation opt._file) getSubModules ++ res.options
else if opt.options ? options then map (coerceOption opt._file) options' ++ res.options
else res.options;
in opt.options // res //
{ declarations = res.declarations ++ [opt._file];
@ -802,27 +794,13 @@ rec {
compare = a: b: (a.priority or 1000) < (b.priority or 1000);
in sort compare defs';
# This calls substSubModules, whose entire purpose is only to ensure that
# option declarations in submodules have accurate position information.
# TODO: Merge this into mergeOptionDecls
fixupOptionType = loc: opt:
let
options = opt.options or
(throw "Option `${showOption loc}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
# Hack for backward compatibility: convert options of type
# optionSet to options of type submodule. FIXME: remove
# eventually.
f = tp:
if tp.name == "option set" || tp.name == "submodule" then
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
else if (tp.functor.wrapped.name or null) == "optionSet" then
if tp.name == "attrsOf" then types.attrsOf (types.submodule options)
else if tp.name == "listOf" then types.listOf (types.submodule options)
else if tp.name == "nullOr" then types.nullOr (types.submodule options)
else tp
else tp;
in
if opt.type.getSubModules or null == null
then opt // { type = f (opt.type or types.unspecified); }
else opt // { type = opt.type.substSubModules opt.options; options = []; };
if opt.type.getSubModules or null == null
then opt // { type = opt.type or types.unspecified; }
else opt // { type = opt.type.substSubModules opt.options; options = []; };
/* Properties. */

View file

@ -79,8 +79,6 @@ rec {
visible ? null,
# Whether the option can be set only once
readOnly ? null,
# Deprecated, used by types.optionSet.
options ? null
} @ attrs:
attrs // { _type = "option"; };

View file

@ -749,14 +749,6 @@ rec {
nestedTypes.finalType = finalType;
};
# Obsolete alternative to configOf. It takes its option
# declarations from the options attribute of containing option
# declaration.
optionSet = mkOptionType {
name = "optionSet";
description = "option set";
deprecationMessage = "Use `types.submodule' instead";
};
# Augment the given type with an additional type check function.
addCheck = elemType: check: elemType // { check = x: elemType.check x && check x; };

View file

@ -158,6 +158,12 @@ The following methods are available on machine objects:
e.g., `send_chars("foobar\n")` will type the string `foobar`
followed by the Enter key.
`send_console`
: Send keys to the kernel console. This allows interaction with the systemd
emergency mode, for example. Takes a string that is sent, e.g.,
`send_console("\n\nsystemctl default\n")`.
`execute`
: Execute a shell command, returning a list `(status, stdout)`.
@ -272,6 +278,13 @@ The following methods are available on machine objects:
Killing the interactive session with `Ctrl-d` or `Ctrl-c` also ends
the guest session.
`console_interact`
: Allows you to directly interact with QEMU's stdin. This should
only be used during test development, not in production tests.
Output from QEMU is only read line-wise. `Ctrl-c` kills QEMU and
`Ctrl-d` closes console and returns to the test runner.
To test user units declared by `systemd.user.services` the optional
`user` argument can be used:

View file

@ -261,6 +261,19 @@ start_all()
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>send_console</literal>
</term>
<listitem>
<para>
Send keys to the kernel console. This allows interaction
with the systemd emergency mode, for example. Takes a string
that is sent, e.g.,
<literal>send_console(&quot;\n\nsystemctl default\n&quot;)</literal>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>execute</literal>
@ -502,6 +515,21 @@ machine.systemctl(&quot;list-jobs --no-pager&quot;, &quot;any-user&quot;) # spaw
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>console_interact</literal>
</term>
<listitem>
<para>
Allows you to directly interact with QEMUs stdin. This
should only be used during test development, not in
production tests. Output from QEMU is only read line-wise.
<literal>Ctrl-c</literal> kills QEMU and
<literal>Ctrl-d</literal> closes console and returns to the
test runner.
</para>
</listitem>
</varlistentry>
</variablelist>
<para>
To test user units declared by

View file

@ -1671,6 +1671,12 @@
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
The <literal>programs.nncp</literal> options were added for
generating host-global NNCP configuration.
</para>
</listitem>
</itemizedlist>
</section>
</section>

View file

@ -591,4 +591,6 @@ In addition to numerous new and upgraded packages, this release has the followin
- Testing has been enabled for `aarch64-linux` in addition to `x86_64-linux`.
- The `spark3` package is now usable on `aarch64-darwin` as a result of [#158613](https://github.com/NixOS/nixpkgs/pull/158613) and [#158992](https://github.com/NixOS/nixpkgs/pull/158992).
- The `programs.nncp` options were added for generating host-global NNCP configuration.
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->

View file

@ -198,7 +198,7 @@ class StartCommand:
) -> subprocess.Popen:
return subprocess.Popen(
self.cmd(monitor_socket_path, shell_socket_path),
stdin=subprocess.DEVNULL,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
@ -558,6 +558,28 @@ class Machine:
pass_fds=[self.shell.fileno()],
)
def console_interact(self) -> None:
"""Allows you to interact with QEMU's stdin
The shell can be exited with Ctrl+D. Note that Ctrl+C is not allowed to be used.
QEMU's stdout is read line-wise.
Should only be used during test development, not in the production test."""
self.log("Terminal is ready (there is no prompt):")
assert self.process
assert self.process.stdin
while True:
try:
char = sys.stdin.buffer.read(1)
except KeyboardInterrupt:
break
if char == b"": # ctrl+d
self.log("Closing connection to the console")
break
self.send_console(char.decode())
def succeed(self, *commands: str, timeout: Optional[int] = None) -> str:
"""Execute each command and check that it succeeds."""
output = ""
@ -834,6 +856,12 @@ class Machine:
self.send_monitor_command("sendkey {}".format(key))
time.sleep(0.01)
def send_console(self, chars: str) -> None:
assert self.process
assert self.process.stdin
self.process.stdin.write(chars.encode())
self.process.stdin.flush()
def start(self) -> None:
if self.booted:
return

View file

@ -184,6 +184,7 @@
./programs/nix-ld.nix
./programs/neovim.nix
./programs/nm-applet.nix
./programs/nncp.nix
./programs/npm.nix
./programs/noisetorch.nix
./programs/oblogout.nix

View file

@ -0,0 +1,101 @@
{ config, lib, pkgs, ... }:
with lib;
let
nncpCfgFile = "/run/nncp.hjson";
programCfg = config.programs.nncp;
settingsFormat = pkgs.formats.json { };
jsonCfgFile = settingsFormat.generate "nncp.json" programCfg.settings;
pkg = programCfg.package;
in {
options.programs.nncp = {
enable =
mkEnableOption "NNCP (Node to Node copy) utilities and configuration";
group = mkOption {
type = types.str;
default = "uucp";
description = ''
The group under which NNCP files shall be owned.
Any member of this group may access the secret keys
of this NNCP node.
'';
};
package = mkOption {
type = types.package;
default = pkgs.nncp;
defaultText = literalExpression "pkgs.nncp";
description = "The NNCP package to use system-wide.";
};
secrets = mkOption {
type = with types; listOf str;
example = [ "/run/keys/nncp.hjson" ];
description = ''
A list of paths to NNCP configuration files that should not be
in the Nix store. These files are layered on top of the values at
<xref linkend="opt-programs.nncp.settings"/>.
'';
};
settings = mkOption {
type = settingsFormat.type;
description = ''
NNCP configuration, see
<link xlink:href="http://www.nncpgo.org/Configuration.html"/>.
At runtime these settings will be overlayed by the contents of
<xref linkend="opt-programs.nncp.secrets"/> into the file
<literal>${nncpCfgFile}</literal>. Node keypairs go in
<literal>secrets</literal>, do not specify them in
<literal>settings</literal> as they will be leaked into
<literal>/nix/store</literal>!
'';
default = { };
};
};
config = mkIf programCfg.enable {
environment = {
systemPackages = [ pkg ];
etc."nncp.hjson".source = nncpCfgFile;
};
programs.nncp.settings = {
spool = mkDefault "/var/spool/nncp";
log = mkDefault "/var/spool/nncp/log";
};
systemd.tmpfiles.rules = [
"d ${programCfg.settings.spool} 0770 root ${programCfg.group}"
"f ${programCfg.settings.log} 0770 root ${programCfg.group}"
];
systemd.services.nncp-config = {
path = [ pkg ];
description = "Generate NNCP configuration";
wantedBy = [ "basic.target" ];
serviceConfig.Type = "oneshot";
script = ''
umask u=rw
nncpCfgDir=$(mktemp --directory nncp.XXX)
for f in ${jsonCfgFile} ${toString config.programs.nncp.secrets}; do
tmpdir=$(mktemp --directory nncp.XXX)
nncp-cfgdir -cfg $f -dump $tmpdir
find $tmpdir -size 1c -delete
cp -a $tmpdir/* $nncpCfgDir/
rm -rf $tmpdir
done
nncp-cfgdir -load $nncpCfgDir > ${nncpCfgFile}
rm -rf $nncpCfgDir
chgrp ${programCfg.group} ${nncpCfgFile}
chmod g+r ${nncpCfgFile}
'';
};
};
meta.maintainers = with lib.maintainers; [ ehmry ];
}

View file

@ -111,6 +111,13 @@ in
description = "Whether to run squid web proxy.";
};
package = mkOption {
default = pkgs.squid;
defaultText = literalExpression "pkgs.squid";
type = types.package;
description = "Squid package to use.";
};
proxyAddress = mkOption {
type = types.nullOr types.str;
default = null;
@ -157,17 +164,21 @@ in
users.groups.squid = {};
systemd.services.squid = {
description = "Squid caching web proxy";
description = "Squid caching proxy";
documentation = [ "man:squid(8)" ];
after = [ "network.target" "nss-lookup.target" ];
wantedBy = [ "multi-user.target"];
preStart = ''
mkdir -p "/var/log/squid"
chown squid:squid "/var/log/squid"
${cfg.package}/bin/squid --foreground -z -f ${squidConfig}
'';
serviceConfig = {
Type="forking";
PIDFile="/run/squid.pid";
ExecStart = "${pkgs.squid}/bin/squid -YCs -f ${squidConfig}";
ExecStart = "${cfg.package}/bin/squid --foreground -YCs -f ${squidConfig}";
ExecReload="kill -HUP $MAINPID";
KillMode="mixed";
NotifyAccess="all";
};
};

View file

@ -129,6 +129,14 @@ in
'';
};
plugins = lib.mkOption {
type = lib.types.listOf lib.types.path;
default = [];
description = ''
Keycloak plugin jar, ear files or derivations with them
'';
};
database = {
type = mkOption {
type = enum [ "mysql" "postgresql" ];
@ -787,6 +795,14 @@ in
umask u=rwx,g=,o=
install_plugin() {
if [ -d "$1" ]; then
find "$1" -type f \( -iname \*.ear -o -iname \*.jar \) -exec install -m 0500 -o keycloak -g keycloak "{}" "/run/keycloak/deployments/" \;
else
install -m 0500 -o keycloak -g keycloak "$1" "/run/keycloak/deployments/"
fi
}
install -m 0600 ${cfg.package}/standalone/configuration/*.properties /run/keycloak/configuration
install -T -m 0600 ${keycloakConfig} /run/keycloak/configuration/standalone.xml
@ -794,7 +810,9 @@ in
export JAVA_OPTS=-Djboss.server.config.user.dir=/run/keycloak/configuration
add-user-keycloak.sh -u admin -p '${cfg.initialAdminPassword}'
'' + optionalString (cfg.sslCertificate != null && cfg.sslCertificateKey != null) ''
''
+ lib.optionalString (cfg.plugins != []) (lib.concatStringsSep "\n" (map (pl: "install_plugin ${lib.escapeShellArg pl}") cfg.plugins)) + "\n"
+ optionalString (cfg.sslCertificate != null && cfg.sslCertificateKey != null) ''
pushd /run/keycloak/ssl/
cat "$CREDENTIALS_DIRECTORY/ssl_cert" <(echo) \
"$CREDENTIALS_DIRECTORY/ssl_key" <(echo) \

View file

@ -16,8 +16,7 @@ let
};
nodes = {
keycloak = { ... }: {
keycloak = { config, ... }: {
security.pki.certificateFiles = [
certs.ca.cert
];
@ -36,6 +35,10 @@ let
username = "bogus";
passwordFile = pkgs.writeText "dbPassword" "wzf6vOCbPp6cqTH";
};
plugins = with config.services.keycloak.package.plugins; [
keycloak-discord
keycloak-metrics-spi
];
};
environment.systemPackages = with pkgs; [
@ -102,8 +105,21 @@ let
### Realm Setup ###
# Get an admin interface access token
keycloak.succeed("""
curl -sSf -d 'client_id=admin-cli' \
-d 'username=admin' \
-d 'password=${initialAdminPassword}' \
-d 'grant_type=password' \
'${frontendUrl}/realms/master/protocol/openid-connect/token' \
| jq -r '"Authorization: bearer " + .access_token' >admin_auth_header
""")
# Register the metrics SPI
keycloak.succeed(
"curl -sSf -d 'client_id=admin-cli' -d 'username=admin' -d 'password=${initialAdminPassword}' -d 'grant_type=password' '${frontendUrl}/realms/master/protocol/openid-connect/token' | jq -r '\"Authorization: bearer \" + .access_token' >admin_auth_header"
"${pkgs.jre}/bin/keytool -import -alias snakeoil -file ${certs.ca.cert} -storepass aaaaaa -keystore cacert.jks -noprompt",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' ${pkgs.keycloak}/bin/kcadm.sh config credentials --server '${frontendUrl}' --realm master --user admin --password '${initialAdminPassword}'",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' ${pkgs.keycloak}/bin/kcadm.sh update events/config -s 'eventsEnabled=true' -s 'adminEventsEnabled=true' -s 'eventsListeners+=metrics-listener'",
"curl -sSf '${frontendUrl}/realms/master/metrics' | grep '^keycloak_admin_event_UPDATE'"
)
# Publish the realm, including a test OIDC client and user

View file

@ -0,0 +1,33 @@
{ stdenv
, lib
, fetchzip
, SDL
, pkg-config
, libSM
}:
stdenv.mkDerivation rec {
pname = "synaesthesia";
version = "2.4";
src = fetchzip {
url = "https://logarithmic.net/pfh-files/synaesthesia/synaesthesia-${version}.tar.gz";
sha256 = "0nzsdxbah0shm2vlziaaw3ilzlizd3d35rridkpg40nfxmq84qnx";
};
nativeBuildInputs = [
pkg-config
];
buildInputs = [
SDL
libSM
];
meta = {
homepage = "https://logarithmic.net/pfh/synaesthesia";
description = "Program for representing sounds visually";
license = lib.licenses.gpl2Only;
platforms = lib.platforms.linux;
maintainers = [ lib.maintainers.infinisil ];
};
}

View file

@ -16,9 +16,10 @@
, maven
, webkitgtk
, glib-networking
, javaPackages
}:
stdenv.mkDerivation rec {
javaPackages.mavenfod rec {
pname = "dbeaver";
version = "22.0.1"; # When updating also update fetchedMavenDeps.sha256
@ -29,6 +30,10 @@ stdenv.mkDerivation rec {
sha256 = "sha256-IG5YWwq3WVzQBvAslQ9Z2Ou6ADzf4n9NkQCtH4Jgkac=";
};
mvnSha256 = "7Sm1hAoi5xc4MLONOD8ySLLkpao0qmlMRRva/8zR210=";
mvnParameters = "-P desktop,all-platforms";
fetchedMavenDeps = stdenv.mkDerivation {
name = "dbeaver-${version}-maven-deps";
inherit src;
@ -37,7 +42,7 @@ stdenv.mkDerivation rec {
maven
];
buildPhase = "mvn package -Dmaven.repo.local=$out/.m2 -P desktop,all-platforms";
buildPhase = "mvn package -Dmaven.repo.local=$out/.m2 ${mvnParameters}";
# keep only *.{pom,jar,sha1,nbm} and delete all ephemeral files with lastModified timestamps inside
installPhase = ''
@ -88,14 +93,6 @@ stdenv.mkDerivation rec {
})
];
buildPhase = ''
runHook preBuild
mvn package --offline -Dmaven.repo.local=$(cp -dpR ${fetchedMavenDeps}/.m2 ./ && chmod +w -R .m2 && pwd)/.m2 -P desktop,all-platforms
runHook postBuild
'';
installPhase =
let
productTargetPath = "product/community/target/products/org.jkiss.dbeaver.core.product";

View file

@ -27,7 +27,7 @@
}:
let
version = "8.0.1";
version = "8.1.1";
ptFiles = stdenv.mkDerivation {
name = "PacketTracer8Drv";
@ -36,7 +36,7 @@ let
dontUnpack = true;
src = requireFile {
name = "CiscoPacketTracer_${builtins.replaceStrings ["."] [""] version}_Ubuntu_64bit.deb";
sha256 = "77a25351b016faed7c78959819c16c7013caa89c6b1872cb888cd96edd259140";
sha256 = "08c53171aa0257a64ae7de1540f242214033cfa4f879fbc9fed5cc0d32232abf";
url = "https://www.netacad.com";
};
@ -72,6 +72,7 @@ let
libXrandr
libXrender
libXScrnSaver
libXtst
xcbutilimage
xcbutilkeysyms
xcbutilrenderutil

View file

@ -5,14 +5,14 @@
python3Packages.buildPythonApplication rec {
pname = "flexget";
version = "3.3.3";
version = "3.3.4";
# Fetch from GitHub in order to use `requirements.in`
src = fetchFromGitHub {
owner = "flexget";
repo = "flexget";
rev = "v${version}";
hash = "sha256-a76x4Klad3lct2M9RxSroUYKmEX7lPqDN+dFvfjavo8=";
hash = "sha256-/nuY8+/RMM7ASke+NXb95yu+FeQHawCdgqVsBrk/KZ8=";
};
postPatch = ''

View file

@ -9,13 +9,7 @@
, withManpage ? false
}:
with python3.pkgs;
let
notmuch2 = callPackage ./notmuch.nix {
inherit notmuch;
};
in
buildPythonApplication rec {
with python3.pkgs; buildPythonApplication rec {
pname = "alot";
version = "0.10";

View file

@ -62,7 +62,12 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
makeFlags = [ "V=1" ];
outputs = [ "out" "man" "info" ]
postConfigure = ''
mkdir ${placeholder "bindingconfig"}
cp bindings/python-cffi/_notmuch_config.py ${placeholder "bindingconfig"}/
'';
outputs = [ "out" "man" "info" "bindingconfig" ]
++ lib.optional withEmacs "emacs"
++ lib.optional withRuby "ruby";

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "inav-configurator";
version = "3.0.2";
version = "4.1.0";
src = fetchurl {
url = "https://github.com/iNavFlight/inav-configurator/releases/download/${version}/INAV-Configurator_linux64_${version}.tar.gz";
sha256 = "0v6dcg634wpp9q4ya3mj00j3pg25g62aq209iq2dsvj0a03afbp2";
sha256 = "sha256-+rPzytnAQcNGbISFBHb4JS9Nqy0C2i36k+EPBvq/mso=";
};
icon = fetchurl {

View file

@ -9,7 +9,8 @@ let
# 0.3.4 would change hashes: https://github.com/NixOS/nixpkgs/issues/25154
patchutils = buildPackages.patchutils_0_3_3;
in
{ stripLen ? 0
{ relative ? null
, stripLen ? 0
, extraPrefix ? null
, excludes ? []
, includes ? []
@ -17,7 +18,18 @@ in
, postFetch ? ""
, ...
}@args:
let
args' = if relative != null then {
stripLen = 1 + lib.length (lib.splitString "/" relative) + stripLen;
extraPrefix = if extraPrefix != null then extraPrefix else "";
} else {
inherit stripLen extraPrefix;
};
in let
inherit (args') stripLen extraPrefix;
in
lib.throwIfNot (excludes == [] || includes == [])
"fetchpatch: cannot use excludes and includes simultaneously"
fetchurl ({
postFetch = ''
tmpfile="$TMPDIR/patch"
@ -27,17 +39,19 @@ fetchurl ({
exit 1
fi
"${patchutils}/bin/lsdiff" "$out" \
| sort -u | sed -e 's/[*?]/\\&/g' \
| xargs -I{} \
"${patchutils}/bin/filterdiff" \
--include={} \
--strip=${toString stripLen} \
${lib.optionalString (extraPrefix != null) ''
--addoldprefix=a/${extraPrefix} \
--addnewprefix=b/${extraPrefix} \
''} \
--clean "$out" > "$tmpfile"
"${patchutils}/bin/lsdiff" \
${lib.optionalString (relative != null) "-p1 -i ${lib.escapeShellArg relative}/'*'"} \
"$out" \
| sort -u | sed -e 's/[*?]/\\&/g' \
| xargs -I{} \
"${patchutils}/bin/filterdiff" \
--include={} \
--strip=${toString stripLen} \
${lib.optionalString (extraPrefix != null) ''
--addoldprefix=a/${lib.escapeShellArg extraPrefix} \
--addnewprefix=b/${lib.escapeShellArg extraPrefix} \
''} \
--clean "$out" > "$tmpfile"
if [ ! -s "$tmpfile" ]; then
echo "error: Normalized patch '$tmpfile' is empty (while the fetched file was not)!" 1>&2
@ -64,5 +78,6 @@ fetchurl ({
${patchutils}/bin/interdiff "$out" /dev/null > "$tmpfile"
mv "$tmpfile" "$out"
'' + postFetch;
meta.broken = excludes != [] && includes != [];
} // builtins.removeAttrs args ["stripLen" "extraPrefix" "excludes" "includes" "revert" "postFetch"])
} // builtins.removeAttrs args [
"relative" "stripLen" "extraPrefix" "excludes" "includes" "revert" "postFetch"
])

View file

@ -5,4 +5,20 @@
url = "https://github.com/facebook/zstd/pull/2724/commits/e1f85dbca3a0ed5ef06c8396912a0914db8dea6a.patch";
sha256 = "sha256-PuYAqnJWAE+L9bsroOnnBGJhERW8LHrGSLtIEkKU9vg=";
};
relative = invalidateFetcherByDrvHash fetchpatch {
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
relative = "include";
sha256 = "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
};
full = invalidateFetcherByDrvHash fetchpatch {
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
relative = "test";
stripLen = 1;
extraPrefix = "foo/bar/";
excludes = [ "foo/bar/bernoulli_no_atomic_mp.cpp" ];
revert = true;
sha256 = "sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM=";
};
}

View file

@ -859,9 +859,8 @@ self: super: {
(pkgs.fetchpatch {
name = "hledger-properly-escape-quotes-csv.patch";
url = "https://github.com/simonmichael/hledger/commit/c9a72e1615e2ddc2824f2e248456e1042eb31e1d.patch";
sha256 = "10knvrd5bl9nrmi27i0pm82sfr64jy04xgbjp228qywyijpr3pqv";
includes = [ "Hledger/Read/CsvReader.hs" ];
stripLen = 1;
relative = "hledger-lib";
sha256 = "sha256-gjYYo0eq1gWNAAFF3dKt9QDq0VpLnN5/648r/NXEPVE=";
})
super.hledger-lib;
@ -1819,10 +1818,8 @@ self: super: {
# Presumably to be removed at the next release
yi-language = appendPatch (pkgs.fetchpatch {
url = "https://github.com/yi-editor/yi/commit/0d3bcb5ba4c237d57ce33a3dc39b63c56d890765.patch";
sha256 = "0r4mzngs0x1akqpajzx7ssa9rax977fvj5ra8d3grfbpx6z0nm01";
includes = [ "yi-language.cabal" ];
stripLen = 2;
extraPrefix = "";
relative = "yi-language";
sha256 = "sha256-AVQLvul3ufxGQyoXud05qauclNanf6kunip0oJ/9lWQ=";
}) super.yi-language;
# https://github.com/ghcjs/jsaddle/issues/123

View file

@ -0,0 +1,56 @@
{ lib
, stdenv
, maven
}:
{ src
, patches ? []
, pname
, version
, mvnSha256 ? "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
, mvnHash ? "sha256-${mvnSha256}"
, mvnFetchExtraArgs ? {}
, mvnParameters ? ""
, ...
} @args:
# originally extracted from dbeaver
# created to allow using maven packages in the same style as rust
stdenv.mkDerivation (rec {
fetchedMavenDeps = stdenv.mkDerivation ({
name = "${pname}-${version}-maven-deps";
inherit src;
buildInputs = [
maven
];
buildPhase = ''
mvn package -Dmaven.repo.local=$out/.m2 ${mvnParameters}
'';
# keep only *.{pom,jar,sha1,nbm} and delete all ephemeral files with lastModified timestamps inside
installPhase = ''
find $out -type f \
-name \*.lastUpdated -or \
-name resolver-status.properties -or \
-name _remote.repositories \
-delete
'';
# don't do any fixup
dontFixup = true;
outputHashMode = "recursive";
outputHash = mvnHash;
} // mvnFetchExtraArgs);
buildPhase = ''
runHook preBuild
mvnDeps=$(cp -dpR ${fetchedMavenDeps}/.m2 ./ && chmod +w -R .m2 && pwd)
mvn package --offline "-Dmaven.repo.local=$mvnDeps/.m2" -P desktop,all-platforms
runHook postBuild
'';
} // args)

View file

@ -18,18 +18,14 @@ stdenv.mkDerivation rec {
(fetchpatch {
name = "gcc-12-prereq.patch";
url = "https://github.com/CGAL/cgal/commit/4581f1b7a8e97d1a136830e64b77cdae3546c4bf.patch";
sha256 = "1gzrvbrwxylv80v0m3j2s1znlysmr69lp3ggagnh38lp6423i6pq";
# Upstream slightly reordered directory structure since.
stripLen = 1;
# Fill patch does not apply: touches too many parts of the source.
includes = [ "include/CGAL/CORE/BigFloatRep.h" ];
relative = "CGAL_Core"; # Upstream slightly reordered directory structure since.
sha256 = "sha256-4+7mzGSBwAv5RHBQPAecPPKNN/LQBgvYq5mq+fHAteo=";
})
(fetchpatch {
name = "gcc-12.patch";
url = "https://github.com/CGAL/cgal/commit/6680a6e6f994b2c5b9f068eb3014d12ee1134d53.patch";
sha256 = "1c0h1lh8zng60yx78qc8wx714b517mil8mac87v6xr21q0b11wk7";
# Upstream slightly reordered directory structure since.
stripLen = 1;
relative = "CGAL_Core"; # Upstream slightly reordered directory structure since.
sha256 = "sha256-8kxJDT47jXI9kQNFI/ARWl9JBNS4AfU57/D0tYlgW0M=";
})
];

View file

@ -137,10 +137,8 @@ stdenv.mkDerivation {
++ optional (versionAtLeast version "1.73") ./cmake-paths-173.patch
++ optional (version == "1.77.0") (fetchpatch {
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
relative = "include";
sha256 = "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
stripLen = 2;
extraPrefix = "";
includes = [ "boost/math/special_functions/detail/bernoulli_details.hpp" ];
});
meta = {

View file

@ -1,36 +1,57 @@
{ stdenv, lib, fetchFromGitHub, autoreconfHook, perl, cracklib, python3, fetchpatch }:
{ stdenv
, lib
, fetchFromGitHub
, autoreconfHook
, perl
, cracklib
, enablePAM ? stdenv.hostPlatform.isLinux
, pam
, enablePython ? false
, python
}:
# python binding generates a shared library which are unavailable with musl build
assert enablePython -> !stdenv.hostPlatform.isStatic;
stdenv.mkDerivation rec {
pname = "libpwquality";
version = "1.4.2";
version = "1.4.4";
outputs = [ "out" "dev" "lib" "man" ] ++ lib.optionals enablePython [ "py" ];
src = fetchFromGitHub {
owner = "libpwquality";
repo = "libpwquality";
rev = "${pname}-${version}";
sha256 = "0n4pjhm7wfivk0wizggaxq4y4mcxic876wcarjabkp5z9k14y36h";
sha256 = "sha256-7gAzrx5VP1fEBwAt6E5zGM8GyuPRR+JxYifYfirY+U8=";
};
nativeBuildInputs = [ autoreconfHook perl python3 ];
buildInputs = [ cracklib ];
patches = lib.optional stdenv.hostPlatform.isStatic [
(fetchpatch {
name = "static-build.patch";
url = "https://github.com/libpwquality/libpwquality/pull/40.patch";
sha256 = "1ypccq437wxwgddd98cvd330jfm7jscdlzlyxgy05g6yzrr68xyk";
})
patches = [
# ensure python site-packages goes in $py output
./python-binding-prefix.patch
];
configureFlags = lib.optional stdenv.hostPlatform.isStatic [
# Python binding generates a shared library which are unavailable with musl build
"--disable-python-bindings"
];
nativeBuildInputs = [ autoreconfHook perl ] ++ lib.optionals enablePython [ python ];
buildInputs = [ cracklib ] ++ lib.optionals enablePAM [ pam ];
configureFlags = lib.optionals (!enablePython) [ "--disable-python-bindings" ];
meta = with lib; {
description = "Password quality checking and random password generation library";
homepage = "https://github.com/libpwquality/libpwquality";
license = licenses.bsd3;
description = "Password quality checking and random password generation library";
longDescription = ''
The libpwquality library purpose is to provide common functions for
password quality checking and also scoring them based on their apparent
randomness. The library also provides a function for generating random
passwords with good pronounceability. The library supports reading and
parsing of a configuration file.
In the package there are also very simple utilities that use the library
function and PAM module that can be used instead of pam_cracklib. The
module supports all the options of pam_cracklib.
'';
license = with licenses; [ bsd3 /* or */ gpl2Plus ];
maintainers = with maintainers; [ jk ];
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,13 @@
diff --git a/python/Makefile.am b/python/Makefile.am
index 1d00c0c..0987690 100644
--- a/python/Makefile.am
+++ b/python/Makefile.am
@@ -14,7 +14,7 @@ all-local:
CFLAGS="${CFLAGS} -fno-strict-aliasing" @PYTHONBINARY@ setup.py build --build-base py$(PYTHONREV)
install-exec-local:
- CFLAGS="${CFLAGS} -fno-strict-aliasing" @PYTHONBINARY@ setup.py build --build-base py$(PYTHONREV) install --prefix=${DESTDIR}${prefix}
+ CFLAGS="${CFLAGS} -fno-strict-aliasing" @PYTHONBINARY@ setup.py build --build-base py$(PYTHONREV) install --prefix=${DESTDIR}${py}
clean-local:
rm -rf py$(PYTHONREV)

View file

@ -1,4 +1,4 @@
{ stdenv, lib, fetchFromGitHub, ocaml, findlib, ocamlbuild, qtest, num, ounit
{ stdenv, lib, fetchFromGitHub, ocaml, findlib, ocamlbuild, qtest, qcheck, num, ounit
, doCheck ? lib.versionAtLeast ocaml.version "4.08" && !stdenv.isAarch64
}:
@ -18,7 +18,7 @@ stdenv.mkDerivation rec {
};
nativeBuildInputs = [ ocaml findlib ocamlbuild ];
checkInputs = [ qtest ounit ];
checkInputs = [ qtest ounit qcheck ];
propagatedBuildInputs = [ num ];
strictDeps = !doCheck;

View file

@ -15,8 +15,7 @@ buildDunePackage rec {
sha256 = "2a37ffaa352a1e145ef3d80ac28661213c69a741b238623e59f29e3d5a12c537";
};
buildInputs = [ ppx_sexp_conv ];
propagatedBuildInputs = [ astring ipaddr ipaddr-sexp sexplib uri logs ];
propagatedBuildInputs = [ astring ipaddr ipaddr-sexp sexplib uri logs ppx_sexp_conv ];
meta = {
description = "A network connection establishment library";

View file

@ -8,9 +8,9 @@ stdenv.mkDerivation rec {
sha256 = "1nymykskq1yx87y4xl6hl9i4q6kv0qaq25rniqgl1bfn883p1ysc";
};
nativeBuildInputs = [ perl ocaml findlib ];
nativeBuildInputs = [ perl ocaml findlib camlidl ];
propagatedBuildInputs = [ apron camlidl gmp mpfr ];
propagatedBuildInputs = [ apron gmp mpfr ];
strictDeps = true;

View file

@ -1,11 +1,9 @@
{ lib, fetchurl, buildDunePackage }:
{ lib, fetchurl, buildDunePackage, ocaml }:
buildDunePackage rec {
pname = "facile";
version = "1.1.4";
useDune2 = false;
src = fetchurl {
url = "https://github.com/Emmanuel-PLF/facile/releases/download/${version}/facile-${version}.tbz";
sha256 = "0jqrwmn6fr2vj2rrbllwxq4cmxykv7zh0y4vnngx29f5084a04jp";
@ -13,6 +11,9 @@ buildDunePackage rec {
doCheck = true;
useDune2 = lib.versionAtLeast ocaml.version "4.12";
postPatch = lib.optionalString useDune2 "dune upgrade";
meta = {
homepage = "http://opti.recherche.enac.fr/facile/";
license = lib.licenses.lgpl21Plus;

View file

@ -22,8 +22,8 @@ stdenv.mkDerivation rec {
'';
nativeBuildInputs = [ pkg-config ocaml findlib ];
buildInputs = [ gdome2 libxslt];
propagatedBuildInputs = [gdome2];
buildInputs = [ libxslt ];
propagatedBuildInputs = [ gdome2 ];
strictDeps = true;

View file

@ -15,7 +15,7 @@ buildDunePackage rec {
nativeBuildInputs = [ pkg-config ];
buildInputs = [ core core_kernel sqlite ];
propagatedBuildInputs = [ core core_kernel sqlite ];
meta = {
description =

View file

@ -13,7 +13,7 @@ buildDunePackage rec {
useDune2 = true;
buildInputs = [
propagatedBuildInputs = [
cudf
];

View file

@ -1,6 +1,6 @@
{ lib, fetchurl, buildDunePackage
, logs, lwt, mirage-clock, mirage-profile, ptime
, alcotest
, alcotest, stdlib-shims
}:
buildDunePackage rec {
@ -14,7 +14,7 @@ buildDunePackage rec {
sha256 = "0h0amzjxy067jljscib7fvw5q8k0adqa8m86affha9hq5jsh07a1";
};
propagatedBuildInputs = [ logs lwt mirage-clock mirage-profile ptime ];
propagatedBuildInputs = [ logs lwt mirage-clock mirage-profile ptime stdlib-shims ];
doCheck = true;
checkInputs = [ alcotest ];

View file

@ -1,5 +1,5 @@
{ lib, fetchurl, buildDunePackage
, ppx_cstruct
, ppx_cstruct, stdlib-shims
, cstruct, lwt
}:
@ -15,7 +15,7 @@ buildDunePackage rec {
};
buildInputs = [ ppx_cstruct ];
propagatedBuildInputs = [ cstruct lwt ];
propagatedBuildInputs = [ cstruct lwt stdlib-shims ];
meta = with lib; {
description = "Collect runtime profiling information in CTF format";

View file

@ -10,7 +10,7 @@ stdenv.mkDerivation rec {
sha256 = "17xqiclaqs4hmnb92p9z6z9a1xfr31vcn8nlnj8ykk57by31vfza";
};
nativeBuildInputs = [ perl ocaml findlib mpfr camlidl ];
nativeBuildInputs = [ perl ocaml findlib camlidl ];
buildInputs = [ gmp mpfr ];
strictDeps = true;
@ -22,8 +22,7 @@ stdenv.mkDerivation rec {
];
postConfigure = ''
sed -i Makefile \
-e 's|/bin/rm|rm|'
substituteInPlace Makefile --replace "/bin/rm" "rm"
mkdir -p $out/lib/ocaml/${ocaml.version}/site-lib/stublibs
'';

View file

@ -1,11 +1,9 @@
{ lib, fetchzip, buildDunePackage }:
{ lib, fetchzip, buildDunePackage, ocaml }:
buildDunePackage rec {
pname = "nonstd";
version = "0.0.3";
useDune2 = false;
minimalOCamlVersion = "4.02";
src = fetchzip {
@ -13,6 +11,8 @@ buildDunePackage rec {
sha256 = "0ccjwcriwm8fv29ij1cnbc9win054kb6pfga3ygzdbjpjb778j46";
};
useDune2 = lib.versionAtLeast ocaml.version "4.12";
postPatch = lib.optionalString useDune2 "dune upgrade";
doCheck = true;
meta = with lib; {

View file

@ -23,7 +23,7 @@ buildDunePackage rec {
useDune2 = true;
buildInputs = [ astring result ];
propagatedBuildInputs = [ astring result ];
meta = {
description = "Parser for Ocaml documentation comments";

View file

@ -9,6 +9,7 @@
, ezjsonm
, hmap
, sexplib
, fieldslib
}:
buildDunePackage rec {
@ -31,7 +32,7 @@ buildDunePackage rec {
];
propagatedBuildInputs = [
hmap cohttp-lwt ezjsonm sexplib
hmap cohttp-lwt ezjsonm sexplib fieldslib
];
meta = {

View file

@ -1,6 +1,6 @@
{ lib, buildDunePackage, fetchurl
, ppx_cstruct, ppx_tools
, cstruct, ounit, mmap
, cstruct, ounit, mmap, stdlib-shims
}:
buildDunePackage rec {
@ -24,6 +24,7 @@ buildDunePackage rec {
propagatedBuildInputs = [
cstruct
stdlib-shims
];
doCheck = true;

View file

@ -15,7 +15,7 @@ buildOcaml rec {
strictDeps = true;
buildInputs = [ camlp4 ];
propagatedBuildInputs = [ camlp4 ];
meta = with lib; {
homepage = "https://github.com/janestreet/pipebang";

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, fetchpatch, ocaml, findlib, piqi, stdlib-shims }:
{ lib, stdenv, fetchFromGitHub, fetchpatch, ocaml, findlib, piqi, stdlib-shims, num }:
stdenv.mkDerivation rec {
version = "0.7.7";
@ -15,6 +15,8 @@ stdenv.mkDerivation rec {
nativeBuildInputs = [ ocaml findlib ];
buildInputs = [ piqi stdlib-shims ];
checkInputs = [ num ];
strictDeps = true;
createFindlibDestdir = true;

View file

@ -27,17 +27,19 @@ buildDunePackage rec {
sha256 = "15cjb9ygnvp2kv85rrb7ncz7yalifyl7wd2hp2cl8r1qrpgi1d0w";
};
nativeBuildInputs = [ cppo ];
nativeBuildInputs = [ cppo findlib ];
buildInputs = [
bigarray-compat
containers
ctypes
integers
num
ppxlib
re
findlib
];
propagatedBuildInputs = [
ctypes
];
strictDeps = true;

View file

@ -1,5 +1,5 @@
{ lib, fetchurl, buildDunePackage, cppo, ppx_deriving
, ppxlib
, ppxlib, dune-configurator
}:
buildDunePackage rec {
@ -13,7 +13,9 @@ buildDunePackage rec {
sha256 = "1dc1vxnkd0cnrgac5v3zbaj2lq1d2w8118mp1cmsdxylp06yz1sj";
};
buildInputs = [ cppo ppxlib ppx_deriving ];
nativeBuildInputs = [ cppo ];
buildInputs = [ ppxlib dune-configurator ];
propagatedBuildInputs = [ ppx_deriving ];
meta = with lib; {
homepage = "https://github.com/ocaml-ppx/ppx_deriving_protobuf";

View file

@ -5,6 +5,7 @@
, mirage-profile
, cstruct
, ounit
, stdlib-shims
}:
buildDunePackage rec {
@ -25,6 +26,7 @@ buildDunePackage rec {
propagatedBuildInputs = [
mirage-profile
cstruct
stdlib-shims
];
doCheck = true;

View file

@ -26,6 +26,7 @@ stdenv.mkDerivation rec {
createFindlibDestdir = true;
nativeBuildInputs = [ ocaml findlib ocamlbuild camlp4 ];
propagatedBuildInputs = [ camlp4 ];
strictDeps = true;

View file

@ -7,14 +7,14 @@
buildPythonPackage rec {
pname = "asyncsleepiq";
version = "1.2.0";
version = "1.2.1";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-bE9eOjOLERnioOunIBN7Hc/Nvs1zDXMSMzqZsVRg6Jo=";
sha256 = "sha256-pIfEdNmtnwA+PE3lXVd7Qd8Igj+/aqZmuDqFs60PxgY=";
};
propagatedBuildInputs = [

View file

@ -14,6 +14,12 @@ buildPythonPackage {
buildInputs = [ python notmuch cffi ];
# since notmuch 0.35, this package expects _notmuch_config.py that is
# generated by notmuch's configure script
postPatch = ''
cp ${notmuch.bindingconfig}/_notmuch_config.py .
'';
# no tests
doCheck = false;
pythonImportsCheck = [ "notmuch2" ];

View file

@ -1,27 +1,52 @@
{ lib, buildPythonPackage, fetchPypi, future, python-dateutil, six, pytest, mock, parameterized }:
{ lib
, buildPythonPackage
, fetchPypi
, future
, mock
, parameterized
, pytestCheckHook
, python-dateutil
, pythonOlder
, six
}:
buildPythonPackage rec {
pname = "vertica-python";
version = "1.0.3";
version = "1.0.4";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
sha256 = "cfe1794c5ba9fdfbd470a55d82f60c2e08e129828367753bf64199a58a539bc2";
hash = "sha256-IpdrR9mDG+8cNnXgSXkmXahSEP4EGnEBJqZk5SNu9pA=";
};
propagatedBuildInputs = [ future python-dateutil six ];
propagatedBuildInputs = [
future
python-dateutil
six
];
checkInputs = [ pytest mock parameterized ];
checkInputs = [
mock
parameterized
pytestCheckHook
];
# Integration tests require an accessible Vertica db
checkPhase = ''
pytest --ignore vertica_python/tests/integration_tests
'';
disabledTestPaths = [
# Integration tests require an accessible Vertica db
"vertica_python/tests/integration_tests"
];
pythonImportsCheck = [
"vertica_python"
];
meta = with lib; {
description = "Native Python client for Vertica database";
homepage = "https://github.com/vertica/vertica-python";
license = licenses.asl20;
maintainers = [ maintainers.arnoldfarkas ];
maintainers = with maintainers; [ arnoldfarkas ];
};
}

View file

@ -7,8 +7,8 @@ let
in
buildNodejs {
inherit enableNpm;
version = "17.7.2";
sha256 = "sha256-OuXnTgsWIoz37faIU1mp0uAZrIQ5BsXgGUjXRvq6Sq8=";
version = "17.8.0";
sha256 = "0jsf6sv42rzpizvil7g1gf9bskh8lx0gcxg0yzpr4hk7mx7i90br";
patches = [
./disable-darwin-v8-system-instrumentation.patch
];

View file

@ -60,16 +60,16 @@ let
};
power-management = {
PM_ADVANCED_DEBUG = yes;
X86_INTEL_LPSS = yes;
X86_INTEL_PSTATE = yes;
INTEL_IDLE = yes;
CPU_FREQ_DEFAULT_GOV_PERFORMANCE = yes;
CPU_FREQ_GOV_SCHEDUTIL = yes;
PM_ADVANCED_DEBUG = yes;
PM_WAKELOCKS = yes;
# Power-capping framework and support for INTEL RAPL
POWERCAP = yes;
} // optionalAttrs (stdenv.hostPlatform.isx86) {
INTEL_IDLE = yes;
INTEL_RAPL = whenAtLeast "5.3" module;
X86_INTEL_LPSS = yes;
X86_INTEL_PSTATE = yes;
};
external-firmware = {
@ -269,8 +269,6 @@ let
DRM_GMA600 = whenOlder "5.13" yes;
DRM_GMA3600 = whenOlder "5.12" yes;
DRM_VMWGFX_FBCON = yes;
# necessary for amdgpu polaris support
DRM_AMD_POWERPLAY = whenBetween "4.5" "4.9" yes;
# (experimental) amdgpu support for verde and newer chipsets
DRM_AMDGPU_SI = yes;
# (stable) amdgpu support for bonaire and newer chipsets
@ -383,7 +381,7 @@ let
EXT4_FS_POSIX_ACL = yes;
EXT4_FS_SECURITY = yes;
EXT4_ENCRYPTION = { optional = true; tristate = if (versionOlder version "4.8") then "m" else "y"; };
EXT4_ENCRYPTION = option yes;
REISERFS_FS_XATTR = option yes;
REISERFS_FS_POSIX_ACL = option yes;
@ -464,7 +462,7 @@ let
# https://googleprojectzero.blogspot.com/2019/11/bad-binder-android-in-wild-exploit.html
DEBUG_LIST = yes;
# Detect writes to read-only module pages
DEBUG_SET_MODULE_RONX = { optional = true; tristate = whenOlder "4.11" "y"; };
DEBUG_SET_MODULE_RONX = whenOlder "4.11" (option yes);
RANDOMIZE_BASE = option yes;
STRICT_DEVMEM = mkDefault yes; # Filter access to /dev/mem
IO_STRICT_DEVMEM = mkDefault yes;
@ -564,8 +562,8 @@ let
PARAVIRT_SPINLOCKS = option yes;
KVM_ASYNC_PF = yes;
KVM_COMPAT = { optional = true; tristate = whenBetween "4.0" "4.12" "y"; };
KVM_DEVICE_ASSIGNMENT = { optional = true; tristate = whenBetween "3.10" "4.12" "y"; };
KVM_COMPAT = whenOlder "4.12" (option yes);
KVM_DEVICE_ASSIGNMENT = whenOlder "4.12" (option yes);
KVM_GENERIC_DIRTYLOG_READ_PROTECT = yes;
KVM_GUEST = yes;
KVM_MMIO = yes;
@ -773,7 +771,6 @@ let
AIC79XX_DEBUG_ENABLE = no;
AIC7XXX_DEBUG_ENABLE = no;
AIC94XX_DEBUG = no;
B43_PCMCIA = { optional=true; tristate = whenOlder "4.4" "y";};
BLK_DEV_INTEGRITY = yes;
@ -794,7 +791,9 @@ let
BT_RFCOMM_TTY = option yes; # RFCOMM TTY support
BT_QCA = module; # enables QCA6390 bluetooth
CLEANCACHE = option yes;
# Removed on 5.17 as it was unused
# upstream: https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=0a4ee518185e902758191d968600399f3bc2be31
CLEANCACHE = whenOlder "5.17" (option yes);
CRASH_DUMP = option no;
DVB_DYNAMIC_MINORS = option yes; # we use udev

View file

@ -352,16 +352,6 @@ if [[ -z $flake && -e /etc/nixos/flake.nix && -z $noFlake ]]; then
flake="$(dirname "$(readlink -f /etc/nixos/flake.nix)")"
fi
# Re-execute nixos-rebuild from the Nixpkgs tree.
# FIXME: get nixos-rebuild from $flake.
if [[ -z $_NIXOS_REBUILD_REEXEC && -n $canRun && -z $fast && -z $flake ]]; then
if p=$(runCmd nix-build --no-out-link --expr 'with import <nixpkgs/nixos> {}; config.system.build.nixos-rebuild' "${extraBuildFlags[@]}"); then
export _NIXOS_REBUILD_REEXEC=1
runCmd exec "$p/bin/nixos-rebuild" "${origArgs[@]}"
exit 1
fi
fi
# For convenience, use the hostname as the default configuration to
# build from the flake.
if [[ -n $flake ]]; then
@ -380,6 +370,40 @@ if [[ -n $flake ]]; then
fi
fi
tmpDir=$(mktemp -t -d nixos-rebuild.XXXXXX)
cleanup() {
for ctrl in "$tmpDir"/ssh-*; do
ssh -o ControlPath="$ctrl" -O exit dummyhost 2>/dev/null || true
done
rm -rf "$tmpDir"
}
trap cleanup EXIT
# Re-execute nixos-rebuild from the Nixpkgs tree.
if [[ -z $_NIXOS_REBUILD_REEXEC && -n $canRun && -z $fast ]]; then
if [[ -z $flake ]]; then
if p=$(runCmd nix-build --no-out-link --expr 'with import <nixpkgs/nixos> {}; config.system.build.nixos-rebuild' "${extraBuildFlags[@]}"); then
SHOULD_REEXEC=1
fi
else
runCmd nix "${flakeFlags[@]}" build --out-link "${tmpDir}/nixos-rebuild" "$flake#$flakeAttr.config.system.build.nixos-rebuild" "${extraBuildFlags[@]}" "${lockFlags[@]}"
if p=$(readlink -e "${tmpDir}/nixos-rebuild"); then
SHOULD_REEXEC=1
fi
fi
if [[ -n $SHOULD_REEXEC ]]; then
export _NIXOS_REBUILD_REEXEC=1
# Manually call cleanup as the EXIT trap is not triggered when using exec
cleanup
runCmd exec "$p/bin/nixos-rebuild" "${origArgs[@]}"
exit 1
fi
fi
# Find configuration.nix and open editor instead of building.
if [ "$action" = edit ]; then
if [[ -z $flake ]]; then
@ -394,19 +418,8 @@ if [ "$action" = edit ]; then
exit 1
fi
tmpDir=$(mktemp -t -d nixos-rebuild.XXXXXX)
SSHOPTS="$NIX_SSHOPTS -o ControlMaster=auto -o ControlPath=$tmpDir/ssh-%n -o ControlPersist=60"
cleanup() {
for ctrl in "$tmpDir"/ssh-*; do
ssh -o ControlPath="$ctrl" -O exit dummyhost 2>/dev/null || true
done
rm -rf "$tmpDir"
}
trap cleanup EXIT
# First build Nix, since NixOS may require a newer version than the
# current one.
if [[ -n "$rollback" || "$action" = dry-build ]]; then

View file

@ -19,10 +19,10 @@ rec {
# Policy: use the highest stable version as the default (on our master).
stable = if stdenv.hostPlatform.system == "x86_64-linux"
then generic {
version = "510.54";
sha256_64bit = "TCDezK4/40et/Q5piaMG+QJP2t+DGtwejmCFVnUzUWE=";
settingsSha256 = "ZWz5UN6Pa69NlmerKu30G+X8WfGlAwnVerDrO7TRO6w=";
persistencedSha256 = "MgWrBjKXJeRqF+ouT72tTiLPtn+lsS/Cp3oS61AWV8Q=";
version = "510.60.02";
sha256_64bit = "sha256-qADfwFSQeP2Mbo5ngO+47uh4cuYFXH9fOGpHaM4H4AM=";
settingsSha256 = "sha256-Voa1JZ2qqJ1t+bfwKh/mssEi/hjzLTPwef2XG/gAC+0=";
persistencedSha256 = "sha256-THgK2GpRcttqSN2WxcuJu5My++Q+Y34jG8hm7daxhAQ=";
}
else legacy_390;

View file

@ -16,7 +16,7 @@
, enablePython ? true
# for determining the latest compatible linuxPackages
, linuxPackages_5_16 ? pkgs.linuxKernel.packages.linux_5_16
, linuxPackages_5_17 ? pkgs.linuxKernel.packages.linux_5_17
}:
let
@ -216,28 +216,28 @@ in {
# to be adapted
zfsStable = common {
# check the release notes for compatible kernels
kernelCompatible = kernel.kernelAtLeast "3.10" && kernel.kernelOlder "5.17";
latestCompatibleLinuxPackages = linuxPackages_5_16;
kernelCompatible = kernel.kernelAtLeast "3.10" && kernel.kernelOlder "5.18";
latestCompatibleLinuxPackages = linuxPackages_5_17;
# this package should point to the latest release.
version = "2.1.3";
version = "2.1.4";
sha256 = "10p9s835wj5msspqwnqbfbnh8jmcazzd2v0gj4hn7vvni4p48gfl";
sha256 = "sha256-pHz1N2j+d9p1xleEBwwrmK9mN5gEyM69Suy0dsrkZT4=";
};
zfsUnstable = common {
# check the release notes for compatible kernels
kernelCompatible = kernel.kernelAtLeast "3.10" && kernel.kernelOlder "5.17";
latestCompatibleLinuxPackages = linuxPackages_5_16;
kernelCompatible = kernel.kernelAtLeast "3.10" && kernel.kernelOlder "5.18";
latestCompatibleLinuxPackages = linuxPackages_5_17;
# this package should point to a version / git revision compatible with the latest kernel release
# IMPORTANT: Always use a tagged release candidate or commits from the
# zfs-<version>-staging branch, because this is tested by the OpenZFS
# maintainers.
version = "2.1.3";
version = "2.1.4";
# rev = "0000000000000000000000000000000000000000";
sha256 = "10p9s835wj5msspqwnqbfbnh8jmcazzd2v0gj4hn7vvni4p48gfl";
sha256 = "sha256-pHz1N2j+d9p1xleEBwwrmK9mN5gEyM69Suy0dsrkZT4=";
isUnstable = true;
};

View file

@ -0,0 +1,7 @@
{ callPackage }:
{
scim-for-keycloak = callPackage ./scim-for-keycloak {};
keycloak-discord = callPackage ./keycloak-discord {};
keycloak-metrics-spi = callPackage ./keycloak-metrics-spi {};
}

View file

@ -1,5 +1,6 @@
{ stdenv, lib, fetchzip, makeWrapper, jre, writeText, nixosTests
, postgresql_jdbc ? null, mysql_jdbc ? null
, callPackage
}:
let
@ -57,7 +58,10 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/kcreg.sh --prefix PATH : ${jre}/bin
'';
passthru.tests = nixosTests.keycloak;
passthru = {
tests = nixosTests.keycloak;
plugins = callPackage ./all-plugins.nix {};
};
meta = with lib; {
homepage = "https://www.keycloak.org/";

View file

@ -0,0 +1,29 @@
{ stdenv
, lib
, fetchurl
}:
stdenv.mkDerivation rec {
pname = "keycloak-discord";
version = "0.3.1";
src = fetchurl {
url = "https://github.com/wadahiro/keycloak-discord/releases/download/v${version}/keycloak-discord-ear-${version}.ear";
sha256 = "0fswhbnxc80dpfqf5y6j29dxk3vcnm4kki6qdk22qliasvpw5n9c";
};
dontUnpack = true;
dontBuild = true;
installPhase = ''
mkdir -p "$out"
install "$src" "$out/${pname}-ear-${version}.ear"
'';
meta = with lib; {
homepage = "https://github.com/wadahiro/keycloak-discord";
description = "Keycloak Social Login extension for Discord";
license = licenses.apsl20;
maintainers = with maintainers; [ mkg20001 ];
};
}

View file

@ -0,0 +1,26 @@
{ stdenv, lib, fetchurl }:
stdenv.mkDerivation rec {
pname = "keycloak-metrics-spi";
version = "2.5.3";
src = fetchurl {
url = "https://github.com/aerogear/keycloak-metrics-spi/releases/download/${version}/keycloak-metrics-spi-${version}.jar";
sha256 = "15lsy8wjw6nlfdfhllc45z9l5474p0lsghrwzzsssvd68bw54gwv";
};
dontUnpack = true;
dontBuild = true;
installPhase = ''
mkdir -p $out
install "$src" "$out"
'';
meta = with lib; {
homepage = "https://github.com/aerogear/keycloak-metrics-spi";
description = "Keycloak Service Provider that adds a metrics endpoint";
license = licenses.apsl20;
maintainers = with maintainers; [ benley ];
};
}

View file

@ -0,0 +1,36 @@
{ lib
, stdenv
, fetchFromGitHub
, maven
, javaPackages
}:
javaPackages.mavenfod rec {
pname = "scim-for-keycloak";
version = "kc-15-b2"; # When updating also update mvnHash
src = fetchFromGitHub {
owner = "Captain-P-Goldfish";
repo = "scim-for-keycloak";
rev = version;
sha256 = "K34c7xISjEETI3jFkRLdZ0C8pZHTWtPtrrIzwC76Tv0=";
};
mvnHash = "sha256-kDYhXTEOAWH/dcRJalKtbwBpoxcD1aX9eqcRKs6ewbE=";
nativeBuildInputs = [
maven
];
installPhase = ''
EAR=$(find -iname "*.ear")
install -D "$EAR" "$out/$(basename $EAR)"
'';
meta = with lib; {
homepage = "https://github.com/Captain-P-Goldfish/scim-for-keycloak";
description = "A third party module that extends Keycloak with SCIM functionality";
license = licenses.bsd3;
maintainers = with maintainers; [ mkg20001 ];
};
}

View file

@ -1,20 +1,20 @@
{ lib, stdenv, fetchurl, perl, openldap, pam, db, cyrus_sasl, libcap
, expat, libxml2, openssl, pkg-config
, expat, libxml2, openssl, pkg-config, systemd
}:
stdenv.mkDerivation rec {
pname = "squid";
version = "4.17";
version = "5.4.1";
src = fetchurl {
url = "http://www.squid-cache.org/Versions/v4/${pname}-${version}.tar.xz";
sha256 = "sha256-y5KKwIx8hrFRscj4J6vhqE2DGBoqhuDVEihhY+HjFBg=";
url = "http://www.squid-cache.org/Versions/v5/${pname}-${version}.tar.xz";
sha256 = "sha256-300xCpFmOuWcKbD4GD8iYjxeb3MYaa95OAWYerlMpBw=";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [
perl openldap db cyrus_sasl expat libxml2 openssl
] ++ lib.optionals stdenv.isLinux [ libcap pam ];
] ++ lib.optionals stdenv.isLinux [ libcap pam systemd ];
configureFlags = [
"--enable-ipv6"
@ -26,7 +26,9 @@ stdenv.mkDerivation rec {
"--enable-removal-policies=lru,heap"
"--enable-delay-pools"
"--enable-x-accelerator-vary"
] ++ lib.optional (stdenv.isLinux && !stdenv.hostPlatform.isMusl) "--enable-linux-netfilter";
"--enable-htcp"
] ++ lib.optional (stdenv.isLinux && !stdenv.hostPlatform.isMusl)
"--enable-linux-netfilter";
meta = with lib; {
description = "A caching proxy for the Web supporting HTTP, HTTPS, FTP, and more";

View file

@ -1,53 +0,0 @@
# sets of small configurations:
# Each configuration
rec {
# has 2 arguments pkgs and this.
configA = pkgs: this: {
# Can depends on other configuration
require = configB;
# Defines new options
optionA = pkgs.lib.mkOption {
# With default values
default = false;
# And merging functions.
merge = pkgs.lib.mergeEnableOption;
};
# Add a new definition to other options.
optionB = this.optionA;
};
# Can be used for option header.
configB = pkgs: this: {
# Can depends on more than one configuration.
require = [ configC configD ];
optionB = pkgs.lib.mkOption {
default = false;
};
# Is not obliged to define other options.
};
configC = pkgs: this: {
require = [ configA ];
optionC = pkgs.lib.mkOption {
default = false;
};
# Use the default value if it is not overwritten.
optionA = this.optionC;
};
# Can also be used as option configuration only.
# without any arguments (backward compatibility)
configD = {
# Is not forced to specify the require attribute.
# Is not force to make new options.
optionA = true;
optionD = false;
};
}

View file

@ -1,11 +0,0 @@
let
pkgs = import ../../.. {};
config = import ./declare.nix;
in
with (pkgs.lib);
finalReferenceOptionSets
filterOptionSets
pkgs
# List of main configurations.
[ config.configB config.configC ]

View file

@ -1,57 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<expr>
<attrs>
<attr name="optionA">
<list>
<attrs>
<attr name="_type">
<string value="option" />
</attr>
<attr name="default">
<bool value="false" />
</attr>
<attr name="merge">
<unevaluated />
</attr>
<attr name="name">
<string value="optionA" />
</attr>
</attrs>
</list>
</attr>
<attr name="optionB">
<list>
<attrs>
<attr name="_type">
<string value="option" />
</attr>
<attr name="default">
<bool value="false" />
</attr>
<attr name="name">
<string value="optionB" />
</attr>
</attrs>
</list>
</attr>
<attr name="optionC">
<list>
<attrs>
<attr name="_type">
<string value="option" />
</attr>
<attr name="default">
<bool value="false" />
</attr>
<attr name="name">
<string value="optionC" />
</attr>
</attrs>
</list>
</attr>
<attr name="optionD">
<attrs>
</attrs>
</attr>
</attrs>
</expr>

View file

@ -1,15 +0,0 @@
let
pkgs = import ../../.. {};
config = import ./declare.nix;
# Define the handler of unbound options.
noOption = name: values:
builtins.trace "Attribute named '${name}' does not match any option declaration." values;
in
with (pkgs.lib);
finalReferenceOptionSets
(mergeOptionSets noOption)
pkgs
# List of main configurations.
[ config.configB config.configC ]

View file

@ -1,20 +0,0 @@
trace: Str("Attribute named 'optionD' does not match any option declaration.",[])
<?xml version='1.0' encoding='utf-8'?>
<expr>
<attrs>
<attr name="optionA">
<bool value="true" />
</attr>
<attr name="optionB">
<bool value="true" />
</attr>
<attr name="optionC">
<bool value="false" />
</attr>
<attr name="optionD">
<list>
<bool value="false" />
</list>
</attr>
</attrs>
</expr>

View file

@ -1,9 +0,0 @@
#! /bin/sh -e
echo 1>&2 "Test: Merge of option bindings."
nix-instantiate merge.nix --eval-only --strict --xml >& merge.out
diff merge.ref merge.out
echo 1>&2 "Test: Filter of option declarations."
nix-instantiate keep.nix --eval-only --strict --xml >& keep.out
diff keep.ref keep.out

View file

@ -0,0 +1,26 @@
{ lib, stdenv, fetchFromGitHub, fuse }:
stdenv.mkDerivation rec {
pname = "littlefs-fuse";
version = "2.4.1";
src = fetchFromGitHub {
owner = "littlefs-project";
repo = pname;
rev = "v${version}";
hash = "sha256-+EUZluBtgZiziTEIfXIhkRIBu/Pe78qmL18TQHkfHO4=";
};
buildInputs = [ fuse ];
installPhase = ''
runHook preInstall
install -D lfs $out/bin/${pname}
ln -s $out/bin/${pname} $out/bin/mount.littlefs
ln -s $out/bin $out/sbin
runHook postInstall
'';
meta = src.meta // {
description = "A FUSE wrapper that puts the littlefs in user-space";
license = lib.licenses.bsd3;
maintainers = with lib.maintainers; [ ehmry ];
inherit (fuse.meta) platforms;
};
}

View file

@ -7233,6 +7233,8 @@ with pkgs;
lfs = callPackage ../tools/filesystems/lfs { };
littlefs-fuse = callPackage ../tools/filesystems/littlefs-fuse { };
lksctp-tools = callPackage ../os-specific/linux/lksctp-tools { };
lldpd = callPackage ../tools/networking/lldpd { };
@ -10292,6 +10294,8 @@ with pkgs;
symengine = callPackage ../development/libraries/symengine { };
synaesthesia = callPackage ../applications/audio/synaesthesia { };
sysbench = callPackage ../development/tools/misc/sysbench {};
system-config-printer = callPackage ../tools/misc/system-config-printer {
@ -18731,7 +18735,9 @@ with pkgs;
libpulsar = callPackage ../development/libraries/libpulsar { };
libpwquality = callPackage ../development/libraries/libpwquality { };
libpwquality = callPackage ../development/libraries/libpwquality {
python = python3;
};
libqalculate = callPackage ../development/libraries/libqalculate {
readline = readline81;

View file

@ -10,8 +10,10 @@ let
openjfx15 = callPackage ../development/compilers/openjdk/openjfx/15.nix { };
openjfx17 = callPackage ../development/compilers/openjdk/openjfx/17.nix { };
mavenfod = callPackage ../development/java-modules/maven-fod.nix { };
in {
inherit mavenbuild fetchMaven openjfx11 openjfx15 openjfx17;
inherit mavenbuild mavenfod fetchMaven openjfx11 openjfx15 openjfx17;
compiler = let

View file

@ -4668,6 +4668,13 @@ in {
libpyvivotek = callPackage ../development/python-modules/libpyvivotek { };
libpwquality = pipe pkgs.libpwquality [
toPythonModule
(p: p.overrideAttrs (super: { meta = super.meta // { outputsToInstall = [ "py" ]; }; }))
(p: p.override { enablePython = true; inherit python; })
(p: p.py)
];
libredwg = toPythonModule (pkgs.libredwg.override {
enablePython = true;
inherit (self) python libxml2;
@ -5610,6 +5617,10 @@ in {
inherit (pkgs) notmuch;
};
notmuch2 = callPackage ../development/python-modules/notmuch2 {
inherit (pkgs) notmuch;
};
nototools = callPackage ../data/fonts/noto-fonts/tools.nix { };
notus-scanner = callPackage ../development/python-modules/notus-scanner { };