Merge master (+ 11b095e revert) into staging-next

Conflicts:
	pkgs/development/libraries/libvisual/default.nix
This commit is contained in:
Alyssa Ross 2023-02-18 14:04:32 +00:00
commit 38f0121cce
No known key found for this signature in database
GPG key ID: F9DBED4859B271C0
35 changed files with 477 additions and 84 deletions

View file

@ -158,7 +158,7 @@ let
'@NIXOS_TEST_OPTIONS_JSON@' \ '@NIXOS_TEST_OPTIONS_JSON@' \
${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json ${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
nixos-render-docs manual docbook \ nixos-render-docs -j $NIX_BUILD_CORES manual docbook \
--manpage-urls ${manpageUrls} \ --manpage-urls ${manpageUrls} \
--revision ${lib.escapeShellArg revision} \ --revision ${lib.escapeShellArg revision} \
./manual.md \ ./manual.md \
@ -285,7 +285,7 @@ in rec {
'' ''
else '' else ''
mkdir -p $out/share/man/man5 mkdir -p $out/share/man/man5
nixos-render-docs options manpage \ nixos-render-docs -j $NIX_BUILD_CORES options manpage \
--revision ${lib.escapeShellArg revision} \ --revision ${lib.escapeShellArg revision} \
${optionsJSON}/share/doc/nixos/options.json \ ${optionsJSON}/share/doc/nixos/options.json \
$out/share/man/man5/configuration.nix.5 $out/share/man/man5/configuration.nix.5

View file

@ -152,7 +152,7 @@ in rec {
pkgs.nixos-render-docs pkgs.nixos-render-docs
]; ];
} '' } ''
nixos-render-docs options docbook \ nixos-render-docs -j $NIX_BUILD_CORES options docbook \
--manpage-urls ${pkgs.path + "/doc/manpage-urls.json"} \ --manpage-urls ${pkgs.path + "/doc/manpage-urls.json"} \
--revision ${lib.escapeShellArg revision} \ --revision ${lib.escapeShellArg revision} \
--document-type ${lib.escapeShellArg documentType} \ --document-type ${lib.escapeShellArg documentType} \

View file

@ -95,6 +95,8 @@ stdenv.mkDerivation rec {
runHook postInstall runHook postInstall
''; '';
passthru.updateScript = ./update.sh;
meta = with lib; { meta = with lib; {
homepage = "https://hexler.net/kodelife"; homepage = "https://hexler.net/kodelife";
description = "Real-time GPU shader editor"; description = "Real-time GPU shader editor";

View file

@ -0,0 +1,54 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nix curl libxml2 jq
set -euo pipefail
nixpkgs="$(git rev-parse --show-toplevel || (printf 'Could not find root of nixpkgs repo\nAre we running from within the nixpkgs git repo?\n' >&2; exit 1))"
attr="${UPDATE_NIX_ATTR_PATH:-kodelife}"
version="$(curl -sSL https://hexler.net/kodelife/appcast/linux | xmllint --xpath '/rss/channel/item/enclosure/@*[local-name()="version"]' - | cut -d= -f2- | tr -d '"' | head -n1)"
narhash() {
nix --extra-experimental-features nix-command store prefetch-file --json "$url" | jq -r .hash
}
nixeval() {
if [ "$#" -ge 2 ]; then
systemargs=(--argstr system "$2")
else
systemargs=()
fi
nix --extra-experimental-features nix-command eval --json --impure "${systemargs[@]}" -f "$nixpkgs" "$1" | jq -r .
}
findpath() {
path="$(nix --extra-experimental-features nix-command eval --json --impure -f "$nixpkgs" "$1.meta.position" | jq -r . | cut -d: -f1)"
outpath="$(nix --extra-experimental-features nix-command eval --json --impure --expr "builtins.fetchGit \"$nixpkgs\"")"
if [ -n "$outpath" ]; then
path="${path/$(echo "$outpath" | jq -r .)/$nixpkgs}"
fi
echo "$path"
}
oldversion="${UPDATE_NIX_OLD_VERSION:-$(nixeval "$attr".version)}"
pkgpath="$(findpath "$attr")"
if [ "$version" = "$oldversion" ]; then
echo 'update.sh: New version same as old version, nothing to do.'
exit 0
fi
sed -i -e "/version\s*=/ s|\"$oldversion\"|\"$version\"|" "$pkgpath"
for system in aarch64-linux armv7l-linux x86_64-linux; do
url="$(nixeval "$attr".src.url "$system")"
curhash="$(nixeval "$attr".src.outputHash "$system")"
newhash="$(narhash "$url")"
sed -i -e "s|\"$curhash\"|\"$newhash\"|" "$pkgpath"
done

View file

@ -0,0 +1,60 @@
{ stdenv, lib, buildGoModule, fetchFromGitHub, installShellFiles, testers, kubeshark, nix-update-script }:
buildGoModule rec {
pname = "kubeshark";
version = "38.5";
src = fetchFromGitHub {
owner = "kubeshark";
repo = "kubeshark";
rev = version;
sha256 = "sha256-xu+IcmYNsFBYhb0Grnqyi31LCG/3XhSh1LH8XakQ3Yk=";
};
vendorHash = "sha256-o04XIUsHNqOBkvcejASHNz1HDnV6F9t+Q2Hg8eL/Uoc=";
ldflags = let t = "github.com/kubeshark/kubeshark"; in [
"-s" "-w"
"-X ${t}/misc.GitCommitHash=${src.rev}"
"-X ${t}/misc.Branch=master"
"-X ${t}/misc.BuildTimestamp=0"
"-X ${t}/misc.Platform=unknown"
"-X ${t}/misc.Ver=${version}"
];
nativeBuildInputs = [ installShellFiles ];
checkPhase = ''
go test ./...
'';
doCheck = true;
postInstall = lib.optionalString (stdenv.hostPlatform == stdenv.buildPlatform) ''
installShellCompletion --cmd kubeshark \
--bash <($out/bin/kubeshark completion bash) \
--fish <($out/bin/kubeshark completion fish) \
--zsh <($out/bin/kubeshark completion zsh)
'';
passthru = {
tests.version = testers.testVersion {
package = kubeshark;
command = "kubeshark version";
inherit version;
};
updateScript = nix-update-script { };
};
meta = with lib; {
changelog = "https://github.com/kubeshark/kubeshark/releases/tag/${version}";
description = "The API Traffic Viewer for Kubernetes";
homepage = "https://kubeshark.co/";
license = licenses.asl20;
longDescription = ''
The API traffic viewer for Kubernetes providing real-time, protocol-aware visibility into Kubernetes internal network,
Think TCPDump and Wireshark re-invented for Kubernetes
capturing, dissecting and monitoring all traffic and payloads going in, out and across containers, pods, nodes and clusters.
'';
maintainers = with maintainers; [ bryanasdev000 ];
};
}

View file

@ -119,20 +119,20 @@
"vendorHash": "sha256-WndEg+n0hId7R35OeHamP+OxzzRee7f+qXIhWmos8WI=" "vendorHash": "sha256-WndEg+n0hId7R35OeHamP+OxzzRee7f+qXIhWmos8WI="
}, },
"azuread": { "azuread": {
"hash": "sha256-StoT7ujiJhEMpQ4wMN/qzILFxJ18ZkgLkY56LwxKY+Y=", "hash": "sha256-CTiYxmH39Jjh7wGKWmH0EoQa1H1bkV9hBNeHR+WVvF0=",
"homepage": "https://registry.terraform.io/providers/hashicorp/azuread", "homepage": "https://registry.terraform.io/providers/hashicorp/azuread",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-azuread", "repo": "terraform-provider-azuread",
"rev": "v2.34.0", "rev": "v2.34.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
"azurerm": { "azurerm": {
"hash": "sha256-40oarnfSdA2Sif40C1gi+SupY4bNsp78TBfgNDCFf7U=", "hash": "sha256-FPgq/BsciisMhdSYsYiHnMQJFaTAa/llQ1RVS/sOzhQ=",
"homepage": "https://registry.terraform.io/providers/hashicorp/azurerm", "homepage": "https://registry.terraform.io/providers/hashicorp/azurerm",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-azurerm", "repo": "terraform-provider-azurerm",
"rev": "v3.44.0", "rev": "v3.44.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -420,11 +420,11 @@
"vendorHash": "sha256-uWTY8cFztXFrQQ7GW6/R+x9M6vHmsb934ldq+oeW5vk=" "vendorHash": "sha256-uWTY8cFztXFrQQ7GW6/R+x9M6vHmsb934ldq+oeW5vk="
}, },
"github": { "github": {
"hash": "sha256-QobAIpDDl5SXG9hmpdq8lDm8Sg5w2oK4A+e8WKw52Cc=", "hash": "sha256-GieysqBcXSgHuT13FqDtPPklFPRBREwMrTZZ7QH14pY=",
"homepage": "https://registry.terraform.io/providers/integrations/github", "homepage": "https://registry.terraform.io/providers/integrations/github",
"owner": "integrations", "owner": "integrations",
"repo": "terraform-provider-github", "repo": "terraform-provider-github",
"rev": "v5.17.0", "rev": "v5.18.0",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": null "vendorHash": null
}, },
@ -540,11 +540,11 @@
"vendorHash": "sha256-rxh8Me+eOKPCbfHFT3tRsbM7JU67dBqv2JOiWArI/2Y=" "vendorHash": "sha256-rxh8Me+eOKPCbfHFT3tRsbM7JU67dBqv2JOiWArI/2Y="
}, },
"huaweicloud": { "huaweicloud": {
"hash": "sha256-8H9DgpZukJ6K78H6YITPgWai8lPPb8O1tITRTl/azHw=", "hash": "sha256-oZUPfhndpht9EuBiltLknblGaMX2M/dD1iOiwDJKgWY=",
"homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud", "homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud",
"owner": "huaweicloud", "owner": "huaweicloud",
"repo": "terraform-provider-huaweicloud", "repo": "terraform-provider-huaweicloud",
"rev": "v1.44.1", "rev": "v1.44.2",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -1172,12 +1172,12 @@
"vendorHash": "sha256-yTcroKTdYv0O8cX80A451I1vjYclVjA8P69fsb0wY/U=" "vendorHash": "sha256-yTcroKTdYv0O8cX80A451I1vjYclVjA8P69fsb0wY/U="
}, },
"vault": { "vault": {
"hash": "sha256-aNyCUDV1yjpmbPNYlxuJNaiXtG3fJySxRsDLJx/hZ04=", "hash": "sha256-cYSw5aN7TvVMUY+YnyyosB4HjiosXYB7kDiNDQ258Eg=",
"homepage": "https://registry.terraform.io/providers/hashicorp/vault", "homepage": "https://registry.terraform.io/providers/hashicorp/vault",
"owner": "hashicorp", "owner": "hashicorp",
"proxyVendor": true, "proxyVendor": true,
"repo": "terraform-provider-vault", "repo": "terraform-provider-vault",
"rev": "v3.12.0", "rev": "v3.13.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-EOBNoEW9GI21IgXSiEN93B3skxfCrBkNwLxGXaso1oE=" "vendorHash": "sha256-EOBNoEW9GI21IgXSiEN93B3skxfCrBkNwLxGXaso1oE="
}, },

View file

@ -5,7 +5,7 @@
python3Packages.buildPythonApplication rec { python3Packages.buildPythonApplication rec {
pname = "flexget"; pname = "flexget";
version = "3.5.24"; version = "3.5.25";
format = "pyproject"; format = "pyproject";
# Fetch from GitHub in order to use `requirements.in` # Fetch from GitHub in order to use `requirements.in`
@ -13,7 +13,7 @@ python3Packages.buildPythonApplication rec {
owner = "flexget"; owner = "flexget";
repo = "flexget"; repo = "flexget";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-4RQBaqC3nmyEq9Kqg0n9KIN9Gw4Z+uCl6hQeqo/NTls="; hash = "sha256-Xb33/wz85RjBpRkKD09hfDr6txoB1ksKphbjrVt0QWg=";
}; };
postPatch = '' postPatch = ''

View file

@ -75,7 +75,7 @@ let
in in
env.mkDerivation rec { env.mkDerivation rec {
pname = "telegram-desktop"; pname = "telegram-desktop";
version = "4.6.2"; version = "4.6.3";
# Note: Update via pkgs/applications/networking/instant-messengers/telegram/tdesktop/update.py # Note: Update via pkgs/applications/networking/instant-messengers/telegram/tdesktop/update.py
# Telegram-Desktop with submodules # Telegram-Desktop with submodules
@ -84,7 +84,7 @@ env.mkDerivation rec {
repo = "tdesktop"; repo = "tdesktop";
rev = "v${version}"; rev = "v${version}";
fetchSubmodules = true; fetchSubmodules = true;
sha256 = "0lrflhnlsajay8gbz1x91fqk2ckxwpqmm19hyjfqxkvi56nl7a0g"; sha256 = "1kv7aqj4d85iz6vbgvfplyfr9y3rw31xhdgwiskrdfv8mqb0mr5v";
}; };
postPatch = '' postPatch = ''

View file

@ -1,7 +1,9 @@
{ lib, stdenv, graalvm-ce, glibcLocales }: { lib
, stdenv
{ name ? "${args.pname}-${args.version}" , glibcLocales
# Final executable name # The GraalVM derivation to use
, graalvmDrv
, name ? "${args.pname}-${args.version}"
, executable ? args.pname , executable ? args.pname
# JAR used as input for GraalVM derivation, defaults to src # JAR used as input for GraalVM derivation, defaults to src
, jar ? args.src , jar ? args.src
@ -9,7 +11,6 @@
# Default native-image arguments. You probably don't want to set this, # Default native-image arguments. You probably don't want to set this,
# except in special cases. In most cases, use extraNativeBuildArgs instead # except in special cases. In most cases, use extraNativeBuildArgs instead
, nativeImageBuildArgs ? [ , nativeImageBuildArgs ? [
"-jar" jar
(lib.optionalString stdenv.isDarwin "-H:-CheckToolchain") (lib.optionalString stdenv.isDarwin "-H:-CheckToolchain")
"-H:Name=${executable}" "-H:Name=${executable}"
"--verbose" "--verbose"
@ -18,16 +19,25 @@
, extraNativeImageBuildArgs ? [ ] , extraNativeImageBuildArgs ? [ ]
# XMX size of GraalVM during build # XMX size of GraalVM during build
, graalvmXmx ? "-J-Xmx6g" , graalvmXmx ? "-J-Xmx6g"
# The GraalVM derivation to use
, graalvmDrv ? graalvm-ce
# Locale to be used by GraalVM compiler # Locale to be used by GraalVM compiler
, LC_ALL ? "en_US.UTF-8" , LC_ALL ? "en_US.UTF-8"
, meta ? { } , meta ? { }
, ... , ...
} @ args: } @ args:
stdenv.mkDerivation (args // { let
inherit dontUnpack LC_ALL; extraArgs = builtins.removeAttrs args [
"lib"
"stdenv"
"glibcLocales"
"jar"
"dontUnpack"
"LC_ALL"
"meta"
];
in
stdenv.mkDerivation ({
inherit dontUnpack LC_ALL jar;
nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ graalvmDrv glibcLocales ]; nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ graalvmDrv glibcLocales ];
@ -36,7 +46,7 @@ stdenv.mkDerivation (args // {
buildPhase = args.buildPhase or '' buildPhase = args.buildPhase or ''
runHook preBuild runHook preBuild
native-image ''${nativeImageBuildArgs[@]} native-image -jar "$jar" ''${nativeImageBuildArgs[@]}
runHook postBuild runHook postBuild
''; '';
@ -61,4 +71,4 @@ stdenv.mkDerivation (args // {
# need to have native-image-installable-svm available # need to have native-image-installable-svm available
broken = !(builtins.any (p: (p.product or "") == "native-image-installable-svm") graalvmDrv.products); broken = !(builtins.any (p: (p.product or "") == "native-image-installable-svm") graalvmDrv.products);
} // meta; } // meta;
}) } // extraArgs)

View file

@ -1,6 +1,6 @@
{ mkDerivation }: { mkDerivation }:
mkDerivation { mkDerivation {
version = "25.2.2"; version = "25.2.3";
sha256 = "HfEh2IhifFe/gQ4sK99uBnmUGvNCeJ2mlTQf7IzenEs="; sha256 = "peTH8hDOEuMq18exbFhtEMrQQEqg2FPkapfNnnEfTYE=";
} }

View file

@ -43,8 +43,6 @@ stdenv.mkDerivation rec {
buildInputs = [ SDL glib ]; buildInputs = [ SDL glib ];
configureFlags = lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ configureFlags = lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"ac_cv_func_malloc_0_nonnull=yes"
"ac_cv_func_realloc_0_nonnull=yes"
# Remove once "sdl-cross-prereq.patch" patch above is removed. # Remove once "sdl-cross-prereq.patch" patch above is removed.
"--disable-lv-tool" "--disable-lv-tool"
]; ];

View file

@ -16,13 +16,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "qtstyleplugin-kvantum"; pname = "qtstyleplugin-kvantum";
version = "1.0.7"; version = "1.0.9";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "tsujan"; owner = "tsujan";
repo = "Kvantum"; repo = "Kvantum";
rev = "V${version}"; rev = "V${version}";
sha256 = "Ys77z5BoeQEOYe1h5ITEuVtVn6Uug9zQjrCBxLQOrSs="; sha256 = "5/cScJpi5Z5Z/SjizKfMTGytuEo2uUT6QtpMnn7JhKc=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View file

@ -20,6 +20,8 @@ buildDunePackage {
meta meta
; ;
duneVersion = "3";
nativeBuildInputs = [ nativeBuildInputs = [
ppx_sexp_conv ppx_sexp_conv
]; ];

View file

@ -19,13 +19,14 @@
buildDunePackage rec { buildDunePackage rec {
pname = "netchannel"; pname = "netchannel";
version = "2.1.1"; version = "2.1.2";
minimalOCamlVersion = "4.08"; minimalOCamlVersion = "4.08";
duneVersion = "3";
src = fetchurl { src = fetchurl {
url = "https://github.com/mirage/mirage-net-xen/releases/download/v${version}/mirage-net-xen-${version}.tbz"; url = "https://github.com/mirage/mirage-net-xen/releases/download/v${version}/mirage-net-xen-${version}.tbz";
sha256 = "sha256-kYsAf6ntwWKUp26dMcp5BScdUOaGpM46050jVZe6gfs="; hash = "sha256-lTmwcNKiaq5EdJdM4UaaAVdZ+hTCX5U9MPKY/r3i7fw=";
}; };
buildInputs = [ buildInputs = [

View file

@ -1,6 +1,7 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, dnspython , dnspython
, deprecat
, fetchFromGitHub , fetchFromGitHub
, loguru , loguru
, passlib , passlib
@ -12,7 +13,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "ciscoconfparse"; pname = "ciscoconfparse";
version = "1.6.50"; version = "1.7.15";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
@ -20,8 +21,8 @@ buildPythonPackage rec {
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "mpenning"; owner = "mpenning";
repo = pname; repo = pname;
rev = version; rev = "refs/tags/${version}";
hash = "sha256-OKPw7P2hhk8yzqjOcf2NYEueJR1ecC/D93ULfkM88Xg="; hash = "sha256-oGvwtaIgVvvW8Oq/dZN+Zj/PESpqWALFYPia9yeilco=";
}; };
postPatch = '' postPatch = ''
@ -34,6 +35,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [ propagatedBuildInputs = [
passlib passlib
deprecat
dnspython dnspython
loguru loguru
toml toml
@ -51,6 +53,8 @@ buildPythonPackage rec {
# Tests require network access # Tests require network access
"test_dns_lookup" "test_dns_lookup"
"test_reverse_dns_lookup" "test_reverse_dns_lookup"
# Path issues with configuration files
"testParse_valid_filepath"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [
@ -58,8 +62,9 @@ buildPythonPackage rec {
]; ];
meta = with lib; { meta = with lib; {
description = "Parse, Audit, Query, Build, and Modify Cisco IOS-style configurations"; description = "Module to parse, audit, query, build, and modify Cisco IOS-style configurations";
homepage = "https://github.com/mpenning/ciscoconfparse"; homepage = "https://github.com/mpenning/ciscoconfparse";
changelog = "https://github.com/mpenning/ciscoconfparse/blob/${version}/CHANGES.md";
license = licenses.gpl3Only; license = licenses.gpl3Only;
maintainers = with maintainers; [ astro ]; maintainers = with maintainers; [ astro ];
}; };

View file

@ -0,0 +1,47 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, pytestCheckHook
, pythonOlder
, setuptools-scm
, wrapt
}:
buildPythonPackage rec {
pname = "deprecat";
version = "2.1.1";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "mjhajharia";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-3Xl/IC+ImFUxxLry15MIIVRf6aR+gA9K5S2IQomkv+o=";
};
nativeBuildInputs = [
setuptools-scm
];
propagatedBuildInputs = [
wrapt
];
nativeCheckInputs = [
pytestCheckHook
];
pythonImportsCheck = [
"deprecat"
];
meta = with lib; {
description = "Decorator to deprecate old python classes, functions or methods";
homepage = "https://github.com/mjhajharia/deprecat";
changelog = "https://github.com/mjhajharia/deprecat/releases/tag/v${version}";
license = licenses.mit;
maintainers = with maintainers; [ fab ];
};
}

View file

@ -5,18 +5,23 @@
, packaging , packaging
, paramiko , paramiko
, pexpect , pexpect
, pythonOlder
, requests , requests
, six
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "fortiosapi"; pname = "fortiosapi";
version = "1.0.5"; version = "1.0.5";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "fortinet-solutions-cse"; owner = "fortinet-solutions-cse";
repo = pname; repo = pname;
rev = "v${version}"; rev = "refs/tags/v${version}";
sha256 = "0679dizxcd4sk1b4h6ss8qsbjb3c8qyijlp4gzjqji91w6anzg9k"; hash = "sha256-M71vleEhRYnlf+RSGT1GbCy5NEZaG0hWmJo01n9s6Rg=";
}; };
propagatedBuildInputs = [ propagatedBuildInputs = [
@ -25,11 +30,15 @@ buildPythonPackage rec {
paramiko paramiko
packaging packaging
oyaml oyaml
six
]; ];
# Tests require a local VM # Tests require a local VM
doCheck = false; doCheck = false;
pythonImportsCheck = [ "fortiosapi" ];
pythonImportsCheck = [
"fortiosapi"
];
meta = with lib; { meta = with lib; {
description = "Python module to work with Fortigate/Fortios devices"; description = "Python module to work with Fortigate/Fortios devices";

View file

@ -8,7 +8,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "jaconv"; pname = "jaconv";
version = "0.3.1"; version = "0.3.4";
format = "setuptools"; format = "setuptools";
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
@ -16,8 +16,8 @@ buildPythonPackage rec {
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "ikegami-yukino"; owner = "ikegami-yukino";
repo = pname; repo = pname;
rev = "v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-uzGHvklFHVoNloZauczgITeHQIgYQAfI9cjLWgG/vyI="; hash = "sha256-9ruhOLaYNESeKOwJs3IN6ct66fSq7My9DOyA7/cH3d0=";
}; };
nativeCheckInputs = [ nativeCheckInputs = [

View file

@ -14,7 +14,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "pipdeptree"; pname = "pipdeptree";
version = "2.3.3"; version = "2.4.0";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
@ -23,7 +23,7 @@ buildPythonPackage rec {
owner = "tox-dev"; owner = "tox-dev";
repo = "pipdeptree"; repo = "pipdeptree";
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-ivqu9b+4FhGa5y+WnKRk4nF6MR4Vj62pSs2d7ycIZMc="; hash = "sha256-agjerQTSkrpHCleqNUxg+NFiPnf9u9DQrs3vSR917oE=";
}; };
SETUPTOOLS_SCM_PRETEND_VERSION = version; SETUPTOOLS_SCM_PRETEND_VERSION = version;

View file

@ -18,7 +18,7 @@
# it would also make the default tensorflow package unfree. See # it would also make the default tensorflow package unfree. See
# https://groups.google.com/a/tensorflow.org/forum/#!topic/developers/iRCt5m4qUz0 # https://groups.google.com/a/tensorflow.org/forum/#!topic/developers/iRCt5m4qUz0
, cudaSupport ? false, cudaPackages ? {} , cudaSupport ? false, cudaPackages ? {}
, mklSupport ? false, mkl ? null , mklSupport ? false, mkl
, tensorboardSupport ? true , tensorboardSupport ? true
# XLA without CUDA is broken # XLA without CUDA is broken
, xlaSupport ? cudaSupport , xlaSupport ? cudaSupport
@ -39,8 +39,6 @@ assert cudaSupport -> cudatoolkit != null
# unsupported combination # unsupported combination
assert ! (stdenv.isDarwin && cudaSupport); assert ! (stdenv.isDarwin && cudaSupport);
assert mklSupport -> mkl != null;
let let
withTensorboard = (pythonOlder "3.6") || tensorboardSupport; withTensorboard = (pythonOlder "3.6") || tensorboardSupport;

View file

@ -1,17 +1,19 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, fetchPypi , fetchFromGitHub
, requests-cache , requests-cache
, pytest , pytest
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "tvdb_api"; pname = "tvdb_api";
version = "3.1.0"; version = "3.2.0-beta";
src = fetchPypi { src = fetchFromGitHub {
inherit pname version; owner = "dbr";
sha256 = "f63f6db99441bb202368d44aaabc956acc4202b18fc343a66bf724383ee1f563"; repo = "tvdb_api";
rev = "ce0382181a9e08a5113bfee0fed2c78f8b1e613f";
sha256 = "sha256-poUuwySr6+8U9PIHhqFaR7nXzh8kSaW7mZkuKTUJKj8=";
}; };
propagatedBuildInputs = [ requests-cache ]; propagatedBuildInputs = [ requests-cache ];
@ -26,7 +28,5 @@ buildPythonPackage rec {
homepage = "https://github.com/dbr/tvdb_api"; homepage = "https://github.com/dbr/tvdb_api";
license = licenses.unlicense; license = licenses.unlicense;
maintainers = with maintainers; [ peterhoeg ]; maintainers = with maintainers; [ peterhoeg ];
# https://github.com/dbr/tvdb_api/issues/94
broken = true;
}; };
} }

View file

@ -10,16 +10,16 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "cargo-semver-checks"; pname = "cargo-semver-checks";
version = "0.18.0"; version = "0.18.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "obi1kenobi"; owner = "obi1kenobi";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-ugcmsm1j2a1wOnUe9u70yoRXALCmtXSnb80N4B4IUWE="; sha256 = "sha256-nxQ060M5TEUMKtJZ1j3A3v3s1cW1mnHy0/2ZXbxoaMc=";
}; };
cargoSha256 = "sha256-PxnPCevjVvmFMlmYv6qwIBZk2MThz65hgUyVhm2tzlc="; cargoSha256 = "sha256-D/rY9d50uKkheEeHe6S04TSdmTyUVgrABIYrFOuZudY=";
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "kitty-themes"; pname = "kitty-themes";
version = "unstable-2022-08-11"; version = "unstable-2023-01-08";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "kovidgoyal"; owner = "kovidgoyal";
repo = pname; repo = pname;
rev = "72cf0dc4338ab1ad85f5ed93fdb13318916cae14"; rev = "e0bb9d751033e82e455bf658744872c83f04b89d";
sha256 = "d9mO2YqA7WD2dTPsmNeQg2dUR/iv2T/l7yxrt6WKX60="; sha256 = "sha256-ol/AWScGsskoxOEW32aGkJFgg8V6pIujoYIMQaVskWM=";
}; };
installPhase = '' installPhase = ''

View file

@ -0,0 +1,87 @@
From 1640688018f329559c61352646f283f98938af31 Mon Sep 17 00:00:00 2001
From: Cole Helbling <cole.helbling@determinate.systems>
Date: Thu, 16 Feb 2023 09:30:21 -0800
Subject: [PATCH] Revert "RDMA/irdma: Report the correct link speed"
This reverts commit 425c9bd06b7a70796d880828d15c11321bdfb76d.
Some Equinix Metal instances, such as a3.large.x86, m3.large.x86
(specific hardware revisions), and n3.large.x86, use the `ice` kernel
driver for their network cards, in conjunction with bonded devices.
However, this commit caused a regression where these bonded devices
would deadlock. This was initially reported by Jaroslav Pulchart on
the netdev mailing list[1], and there were follow-up patches from Dave
Ertman[2][3] that attempted to fix this but were not up to snuff for
various reasons[4].
Specifically, v2 of the patch ([3]) appears to fix the issue on some
devices (tested with 8086:159B network cards), while it is still broken
on others (such as an 8086:1593 network card).
We revert the patch exposing the issue until upstream has a working
solution in order to make Equinix Metal instances work reliably again.
[1]: https://lore.kernel.org/netdev/CAK8fFZ6A_Gphw_3-QMGKEFQk=sfCw1Qmq0TVZK3rtAi7vb621A@mail.gmail.com/
[2]: https://patchwork.ozlabs.org/project/intel-wired-lan/patch/20230111183145.1497367-1-david.m.ertman@intel.com/
[3]: https://patchwork.ozlabs.org/project/intel-wired-lan/patch/20230215191757.1826508-1-david.m.ertman@intel.com/
[4]: https://lore.kernel.org/netdev/cb31a911-ba80-e2dc-231f-851757cfd0b8@intel.com/T/#m6e53f8c43093693c10268140126abe99e082dc1c
---
drivers/infiniband/hw/irdma/verbs.c | 35 ++++++++++++++++++++++++++---
1 file changed, 32 insertions(+), 3 deletions(-)
diff --git a/drivers/infiniband/hw/irdma/verbs.c b/drivers/infiniband/hw/irdma/verbs.c
index c5971a840b87..911902d2b93e 100644
--- a/drivers/infiniband/hw/irdma/verbs.c
+++ b/drivers/infiniband/hw/irdma/verbs.c
@@ -60,6 +60,36 @@ static int irdma_query_device(struct ib_device *ibdev,
return 0;
}
+/**
+ * irdma_get_eth_speed_and_width - Get IB port speed and width from netdev speed
+ * @link_speed: netdev phy link speed
+ * @active_speed: IB port speed
+ * @active_width: IB port width
+ */
+static void irdma_get_eth_speed_and_width(u32 link_speed, u16 *active_speed,
+ u8 *active_width)
+{
+ if (link_speed <= SPEED_1000) {
+ *active_width = IB_WIDTH_1X;
+ *active_speed = IB_SPEED_SDR;
+ } else if (link_speed <= SPEED_10000) {
+ *active_width = IB_WIDTH_1X;
+ *active_speed = IB_SPEED_FDR10;
+ } else if (link_speed <= SPEED_20000) {
+ *active_width = IB_WIDTH_4X;
+ *active_speed = IB_SPEED_DDR;
+ } else if (link_speed <= SPEED_25000) {
+ *active_width = IB_WIDTH_1X;
+ *active_speed = IB_SPEED_EDR;
+ } else if (link_speed <= SPEED_40000) {
+ *active_width = IB_WIDTH_4X;
+ *active_speed = IB_SPEED_FDR10;
+ } else {
+ *active_width = IB_WIDTH_4X;
+ *active_speed = IB_SPEED_EDR;
+ }
+}
+
/**
* irdma_query_port - get port attributes
* @ibdev: device pointer from stack
@@ -87,9 +117,8 @@ static int irdma_query_port(struct ib_device *ibdev, u32 port,
props->state = IB_PORT_DOWN;
props->phys_state = IB_PORT_PHYS_STATE_DISABLED;
}
-
- ib_get_eth_speed(ibdev, port, &props->active_speed,
- &props->active_width);
+ irdma_get_eth_speed_and_width(SPEED_100000, &props->active_speed,
+ &props->active_width);
if (rdma_protocol_roce(ibdev, 1)) {
props->gid_tbl_len = 32;
--
2.39.0

View file

@ -57,4 +57,9 @@
name = "export-rt-sched-migrate"; name = "export-rt-sched-migrate";
patch = ./export-rt-sched-migrate.patch; patch = ./export-rt-sched-migrate.patch;
}; };
fix-em-ice-bonding = {
name = "fix-em-ice-bonding";
patch = ./fix-em-ice-bonding.patch;
};
} }

View file

@ -95,11 +95,12 @@ stdenv.mkDerivation rec {
zlib zlib
]; ];
mesonFlags = [ mesonFlags = [
"-Dxwayland_eglstream=true" (lib.mesonBool "xwayland_eglstream" true)
"-Ddefault_font_path=${defaultFontPath}" (lib.mesonOption "default_font_path" defaultFontPath)
"-Dxkb_bin_dir=${xkbcomp}/bin" (lib.mesonOption "xkb_bin_dir" "${xkbcomp}/bin")
"-Dxkb_dir=${xkeyboard_config}/etc/X11/xkb" (lib.mesonOption "xkb_dir" "${xkeyboard_config}/etc/X11/xkb")
"-Dxkb_output_dir=${placeholder "out"}/share/X11/xkb/compiled" (lib.mesonOption "xkb_output_dir" "${placeholder "out"}/share/X11/xkb/compiled")
(lib.mesonBool "libunwind" (libunwind != null))
]; ];
meta = with lib; { meta = with lib; {

View file

@ -32,14 +32,14 @@
buildGoModule rec { buildGoModule rec {
pname = "lxd"; pname = "lxd";
version = "5.10"; version = "5.11";
src = fetchurl { src = fetchurl {
urls = [ urls = [
"https://linuxcontainers.org/downloads/lxd/lxd-${version}.tar.gz" "https://linuxcontainers.org/downloads/lxd/lxd-${version}.tar.gz"
"https://github.com/lxc/lxd/releases/download/lxd-${version}/lxd-${version}.tar.gz" "https://github.com/lxc/lxd/releases/download/lxd-${version}/lxd-${version}.tar.gz"
]; ];
hash = "sha256-sYJkPr/tE22xJEjKX7fMjOLQ9zBDm52UjqbVLrm39zU="; hash = "sha256-6z6C1nWmnHLdLtLf7l1f4riGhuP2J2mt8mVWZIiege0=";
}; };
vendorSha256 = null; vendorSha256 = null;

View file

@ -4,16 +4,20 @@
let let
python' = python3.override { python' = python3.override {
packageOverrides = self: super: rec { packageOverrides = final: prev: rec {
# tvdb_api v3.1.0 has a hard requirement on requests-cache < 0.6 # tvdb_api v3.1.0 has a hard requirement on requests-cache < 0.6
requests-cache = super.requests-cache.overridePythonAttrs (super: rec { requests-cache = prev.requests-cache.overridePythonAttrs (oldAttrs: rec {
version = "0.5.2"; version = "0.5.2";
src = self.fetchPypi { src = final.fetchPypi {
inherit (super) pname; inherit (oldAttrs) pname;
inherit version; inherit version;
sha256 = "sha256-gTAjJpaGBF+OAeIonMHn6a5asi3dHihJqQk6s6tycOs="; sha256 = "sha256-gTAjJpaGBF+OAeIonMHn6a5asi3dHihJqQk6s6tycOs=";
}; };
nativeBuildInputs = with final; [
setuptools
];
# too many changes have been made to requests-cache based on version 0.6 so # too many changes have been made to requests-cache based on version 0.6 so
# simply disable tests # simply disable tests
doCheck = false; doCheck = false;

View file

@ -10,6 +10,7 @@ from typing import Any, Dict
from .md import Converter from .md import Converter
from . import manual from . import manual
from . import options from . import options
from . import parallel
def pretty_print_exc(e: BaseException, *, _desc_text: str = "error") -> None: def pretty_print_exc(e: BaseException, *, _desc_text: str = "error") -> None:
print(f"\x1b[1;31m{_desc_text}:\x1b[0m", file=sys.stderr) print(f"\x1b[1;31m{_desc_text}:\x1b[0m", file=sys.stderr)
@ -35,6 +36,7 @@ def pretty_print_exc(e: BaseException, *, _desc_text: str = "error") -> None:
def main() -> None: def main() -> None:
parser = argparse.ArgumentParser(description='render nixos manual bits') parser = argparse.ArgumentParser(description='render nixos manual bits')
parser.add_argument('-j', '--jobs', type=int, default=None)
commands = parser.add_subparsers(dest='command', required=True) commands = parser.add_subparsers(dest='command', required=True)
@ -43,6 +45,7 @@ def main() -> None:
args = parser.parse_args() args = parser.parse_args()
try: try:
parallel.pool_processes = args.jobs
if args.command == 'options': if args.command == 'options':
options.run_cli(args) options.run_cli(args)
elif args.command == 'manual': elif args.command == 'manual':

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import argparse import argparse
import json import json
@ -10,6 +12,7 @@ from xml.sax.saxutils import escape, quoteattr
import markdown_it import markdown_it
from . import parallel
from .docbook import DocBookRenderer, make_xml_id from .docbook import DocBookRenderer, make_xml_id
from .manpage import ManpageRenderer, man_escape from .manpage import ManpageRenderer, man_escape
from .md import Converter, md_escape from .md import Converter, md_escape
@ -148,15 +151,33 @@ class BaseConverter(Converter):
return [ l for part in blocks for l in part ] return [ l for part in blocks for l in part ]
# this could return a TState parameter, but that does not allow dependent types and
# will cause headaches when using BaseConverter as a type bound anywhere. Any is the
# next best thing we can use, and since this is internal it will be mostly safe.
@abstractmethod
def _parallel_render_prepare(self) -> Any: raise NotImplementedError()
# this should return python 3.11's Self instead to ensure that a prepare+finish
# round-trip ends up with an object of the same type. for now we'll use BaseConverter
# since it's good enough so far.
@classmethod
@abstractmethod
def _parallel_render_init_worker(cls, a: Any) -> BaseConverter: raise NotImplementedError()
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption: def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
try: try:
return RenderedOption(option['loc'], self._convert_one(option)) return RenderedOption(option['loc'], self._convert_one(option))
except Exception as e: except Exception as e:
raise Exception(f"Failed to render option {name}") from e raise Exception(f"Failed to render option {name}") from e
@classmethod
def _parallel_render_step(cls, s: BaseConverter, a: Any) -> RenderedOption:
return s._render_option(*a)
def add_options(self, options: dict[str, Any]) -> None: def add_options(self, options: dict[str, Any]) -> None:
for (name, option) in options.items(): mapped = parallel.map(self._parallel_render_step, options.items(), 100,
self._options[name] = self._render_option(name, option) self._parallel_render_init_worker, self._parallel_render_prepare())
for (name, option) in zip(options.keys(), mapped):
self._options[name] = option
@abstractmethod @abstractmethod
def finalize(self) -> str: raise NotImplementedError() def finalize(self) -> str: raise NotImplementedError()
@ -194,6 +215,13 @@ class DocBookConverter(BaseConverter):
self._varlist_id = varlist_id self._varlist_id = varlist_id
self._id_prefix = id_prefix self._id_prefix = id_prefix
def _parallel_render_prepare(self) -> Any:
return (self._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
self._varlist_id, self._id_prefix)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> DocBookConverter:
return cls(*a)
def _render_code(self, option: dict[str, Any], key: str) -> list[str]: def _render_code(self, option: dict[str, Any], key: str) -> list[str]:
if lit := option_is(option, key, 'literalDocBook'): if lit := option_is(option, key, 'literalDocBook'):
return [ f"<para><emphasis>{key.capitalize()}:</emphasis> {lit['text']}</para>" ] return [ f"<para><emphasis>{key.capitalize()}:</emphasis> {lit['text']}</para>" ]
@ -283,10 +311,19 @@ class ManpageConverter(BaseConverter):
_options_by_id: dict[str, str] _options_by_id: dict[str, str]
_links_in_last_description: Optional[list[str]] = None _links_in_last_description: Optional[list[str]] = None
def __init__(self, revision: str, markdown_by_default: bool): def __init__(self, revision: str, markdown_by_default: bool,
self._options_by_id = {} *,
# only for parallel rendering
_options_by_id: Optional[dict[str, str]] = None):
self._options_by_id = _options_by_id or {}
super().__init__({}, revision, markdown_by_default) super().__init__({}, revision, markdown_by_default)
def _parallel_render_prepare(self) -> Any:
return ((self._revision, self._markdown_by_default), { '_options_by_id': self._options_by_id })
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> ManpageConverter:
return cls(*a[0], **a[1])
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption: def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
assert isinstance(self._md.renderer, OptionsManpageRenderer) assert isinstance(self._md.renderer, OptionsManpageRenderer)
links = self._md.renderer.link_footnotes = [] links = self._md.renderer.link_footnotes = []

View file

@ -0,0 +1,58 @@
# this module only has to exist because cpython has a global interpreter lock
# and markdown-it is pure python code. ideally we'd just use thread pools, but
# the GIL prohibits this.
import multiprocessing
from typing import Any, Callable, ClassVar, Iterable, Optional, TypeVar
R = TypeVar('R')
S = TypeVar('S')
T = TypeVar('T')
A = TypeVar('A')
pool_processes: Optional[int] = None
# this thing is impossible to type because there's so much global state involved.
# wrapping in a class to get access to Generic[] parameters is not sufficient
# because mypy is too weak, and unnecessarily obscures how much global state is
# needed in each worker to make this whole brouhaha work.
_map_worker_fn: Any = None
_map_worker_state_fn: Any = None
_map_worker_state_arg: Any = None
def _map_worker_init(*args: Any) -> None:
global _map_worker_fn, _map_worker_state_fn, _map_worker_state_arg
(_map_worker_fn, _map_worker_state_fn, _map_worker_state_arg) = args
# NOTE: the state argument is never passed by any caller, we only use it as a localized
# cache for the created state in lieu of another global. it is effectively a global though.
def _map_worker_step(arg: Any, state: Any = []) -> Any:
global _map_worker_fn, _map_worker_state_fn, _map_worker_state_arg
# if a Pool initializer throws it'll just be retried, leading to endless loops.
# doing the proper initialization only on first use avoids this.
if not state:
state.append(_map_worker_state_fn(_map_worker_state_arg))
return _map_worker_fn(state[0], arg)
def map(fn: Callable[[S, T], R], d: Iterable[T], chunk_size: int,
state_fn: Callable[[A], S], state_arg: A) -> list[R]:
"""
`[ fn(state, i) for i in d ]` where `state = state_fn(state_arg)`, but using multiprocessing
if `pool_processes` is not `None`. when using multiprocessing is used the state function will
be run once in ever worker process and `multiprocessing.Pool.imap` will be used.
**NOTE:** neither `state_fn` nor `fn` are allowed to mutate global state! doing so will cause
discrepancies if `pool_processes` is not None, since each worker will have its own copy.
**NOTE**: all data types that potentially cross a process boundary (so, all of them) must be
pickle-able. this excludes lambdas, bound functions, local functions, and a number of other
types depending on their exact internal structure. *theoretically* the pool constructor
can transfer non-pickleable data to worker processes, but this only works when using the
`fork` spawn method (and is thus not available on darwin or windows).
"""
if pool_processes is None:
state = state_fn(state_arg)
return [ fn(state, i) for i in d ]
with multiprocessing.Pool(pool_processes, _map_worker_init, (fn, state_fn, state_arg)) as p:
return list(p.imap(_map_worker_step, d, chunk_size))

View file

@ -8,13 +8,13 @@
buildGoModule rec { buildGoModule rec {
pname = "grype"; pname = "grype";
version = "0.56.0"; version = "0.57.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "anchore"; owner = "anchore";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-xNv4pI6iT6lNmjeUIW8ObPFJw9H1SiVTg9fRx6Osiwc="; hash = "sha256-NACasOoCABoHmb4U5LvQ8EPO7G10A7uQtX4th/WJqrw=";
# populate values that require us to use git. By doing this in postFetch we # populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src. # can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true; leaveDotGit = true;
@ -28,7 +28,7 @@ buildGoModule rec {
}; };
proxyVendor = true; proxyVendor = true;
vendorHash = "sha256-Sez5jNFdL11cHBBPcY0b8qUiupmjPo9MHwUUi7FaNiA="; vendorHash = "sha256-DLY0tcacGFcP17IqUVvpVkUjd2xQMO5JZxltmL4b+Wo=";
nativeBuildInputs = [ nativeBuildInputs = [
installShellFiles installShellFiles

View file

@ -8869,6 +8869,8 @@ with pkgs;
kubepug = callPackage ../development/tools/kubepug { }; kubepug = callPackage ../development/tools/kubepug { };
kubeshark = callPackage ../applications/networking/cluster/kubeshark { };
kubergrunt = callPackage ../applications/networking/cluster/kubergrunt { }; kubergrunt = callPackage ../applications/networking/cluster/kubergrunt { };
kubo = callPackage ../applications/networking/kubo { }; kubo = callPackage ../applications/networking/kubo { };
@ -10363,7 +10365,11 @@ with pkgs;
openfortivpn = callPackage ../tools/networking/openfortivpn { }; openfortivpn = callPackage ../tools/networking/openfortivpn { };
opensnitch = callPackage ../tools/networking/opensnitch/daemon.nix { }; opensnitch = callPackage ../tools/networking/opensnitch/daemon.nix {
# Build currently fails on Go > 1.18
# See https://github.com/evilsocket/opensnitch/issues/851
buildGoModule = buildGo118Module;
};
opensnitch-ui = libsForQt5.callPackage ../tools/networking/opensnitch/ui.nix { }; opensnitch-ui = libsForQt5.callPackage ../tools/networking/opensnitch/ui.nix { };
@ -15054,7 +15060,9 @@ with pkgs;
graalvm-ce = graalvm11-ce; graalvm-ce = graalvm11-ce;
graalvm11-ce = graalvmCEPackages.graalvm11-ce; graalvm11-ce = graalvmCEPackages.graalvm11-ce;
graalvm17-ce = graalvmCEPackages.graalvm17-ce; graalvm17-ce = graalvmCEPackages.graalvm17-ce;
buildGraalvmNativeImage = callPackage ../build-support/build-graalvm-native-image { }; buildGraalvmNativeImage = (callPackage ../build-support/build-graalvm-native-image {
graalvmDrv = graalvm-ce;
}).override;
openshot-qt = libsForQt5.callPackage ../applications/video/openshot-qt { }; openshot-qt = libsForQt5.callPackage ../applications/video/openshot-qt { };

View file

@ -148,6 +148,7 @@ in {
kernelPatches = [ kernelPatches = [
kernelPatches.bridge_stp_helper kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper kernelPatches.request_key_helper
kernelPatches.fix-em-ice-bonding
]; ];
}; };
@ -169,6 +170,7 @@ in {
kernelPatches = [ kernelPatches = [
kernelPatches.bridge_stp_helper kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper kernelPatches.request_key_helper
kernelPatches.fix-em-ice-bonding
]; ];
}; };

View file

@ -2375,6 +2375,8 @@ self: super: with self; {
deploykit = callPackage ../development/python-modules/deploykit { }; deploykit = callPackage ../development/python-modules/deploykit { };
deprecat = callPackage ../development/python-modules/deprecat { };
deprecated = callPackage ../development/python-modules/deprecated { }; deprecated = callPackage ../development/python-modules/deprecated { };
deprecation = callPackage ../development/python-modules/deprecation { }; deprecation = callPackage ../development/python-modules/deprecation { };