Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-05-07 00:03:11 +00:00 committed by GitHub
commit a0eb573683
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
97 changed files with 5411 additions and 1333 deletions

View file

@ -1,7 +1,7 @@
{ pkgs ? import ../../.. {} }:
let
inherit (pkgs) lib;
manpageURLs = builtins.fromJSON (builtins.readFile (pkgs.path + "/doc/manpage-urls.json"));
manpageURLs = lib.importJSON (pkgs.path + "/doc/manpage-urls.json");
in pkgs.writeText "link-manpages.lua" ''
--[[
Adds links to known man pages that aren't already in a link.

View file

@ -45,7 +45,10 @@ let
# NB: This file describes the Nixpkgs manual, which happens to use module
# docs infra originally developed for NixOS.
optionsDoc = pkgs.nixosOptionsDoc {
inherit (pkgs.lib.evalModules { modules = [ ../../pkgs/top-level/config.nix ]; }) options;
inherit (pkgs.lib.evalModules {
modules = [ ../../pkgs/top-level/config.nix ];
class = "nixpkgsConfig";
}) options;
documentType = "none";
transformOptions = opt:
opt // {

View file

@ -12,7 +12,11 @@
<xi:include href="using/configuration.chapter.xml" />
<xi:include href="using/overlays.chapter.xml" />
<xi:include href="using/overrides.chapter.xml" />
</part>
<part>
<title>Nixpkgs <code>lib</code></title>
<xi:include href="functions.xml" />
<xi:include href="module-system/module-system.chapter.xml" />
</part>
<part xml:id="part-stdenv">
<title>Standard environment</title>

View file

@ -0,0 +1,105 @@
# Module System {#module-system}
## Introduction {#module-system-introduction}
The module system is a language for handling configuration, implemented as a Nix library.
Compared to plain Nix, it adds documentation, type checking and composition or extensibility.
::: {.note}
This chapter is new and not complete yet. For a gentle introduction to the module system, in the context of NixOS, see [Writing NixOS Modules](https://nixos.org/manual/nixos/unstable/index.html#sec-writing-modules) in the NixOS manual.
:::
## `lib.evalModules` {#module-system-lib-evalModules}
Evaluate a set of modules. This function is typically only used once per application (e.g. once in NixOS, once in Home Manager, ...).
### Parameters {#module-system-lib-evalModules-parameters}
#### `modules` {#module-system-lib-evalModules-param-modules}
A list of modules. These are merged together to form the final configuration.
<!-- TODO link to section about merging, TBD -->
#### `specialArgs` {#module-system-lib-evalModules-param-specialArgs}
An attribute set of module arguments that can be used in `imports`.
This is in contrast to `config._module.args`, which is only available after all `imports` have been resolved.
#### `class` {#module-system-lib-evalModules-param-class}
If the `class` attribute is set and non-`null`, the module system will reject `imports` with a different `_class` declaration.
The `class` value should be a string in lower [camel case](https://en.wikipedia.org/wiki/Camel_case).
If applicable, the `class` should match the "prefix" of the attributes used in (experimental) [flakes](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#description). Some examples are:
- `nixos` as in `flake.nixosModules`
- `nixosTest`: modules that constitute a [NixOS VM test](https://nixos.org/manual/nixos/stable/index.html#sec-nixos-tests)
<!-- We've only just started with `class`. You're invited to add a few more. -->
#### `prefix` {#module-system-lib-evalModules-param-prefix}
A list of strings representing the location at or below which all options are evaluated. This is used by `types.submodule` to improve error reporting and find the implicit `name` module argument.
### Return value {#module-system-lib-evalModules-return-value}
The result is an attribute set with the following attributes:
#### `options` {#module-system-lib-evalModules-return-value-options}
The nested attribute set of all option declarations.
#### `config` {#module-system-lib-evalModules-return-value-config}
The nested attribute set of all option values.
#### `type` {#module-system-lib-evalModules-return-value-type}
A module system type. This type is an instance of `types.submoduleWith` containing the current [`modules`](#module-system-lib-evalModules-param-modules).
The option definitions that are typed with this type will extend the current set of modules, like [`extendModules`](#module-system-lib-evalModules-return-value-extendModules).
However, the value returned from the type is just the [`config`](#module-system-lib-evalModules-return-value-config), like any submodule.
If you're familiar with prototype inheritance, you can think of this `evalModules` invocation as the prototype, and usages of this type as the instances.
This type is also available to the [`modules`](#module-system-lib-evalModules-param-modules) as the module argument `moduleType`.
<!-- TODO: document the module arguments. Using moduleType is like saying: suppose this configuration was extended. -->
#### `extendModules` {#module-system-lib-evalModules-return-value-extendModules}
A function similar to `evalModules` but building on top of the already passed [`modules`](#module-system-lib-evalModules-param-modules). Its arguments, `modules` and `specialArgs` are added to the existing values.
If you're familiar with prototype inheritance, you can think of the current, actual `evalModules` invocation as the prototype, and the return value of `extendModules` as the instance.
This functionality is also available to modules as the `extendModules` module argument.
::: {.note}
**Evaluation Performance**
`extendModules` returns a configuration that shares very little with the original `evalModules` invocation, because the module arguments may be different.
So if you have a configuration that has been (or will be) largely evaluated, almost none of the computation is shared with the configuration returned by `extendModules`.
The real work of module evaluation happens while computing the values in `config` and `options`, so multiple invocations of `extendModules` have a particularly small cost, as long as only the final `config` and `options` are evaluated.
If you do reference multiple `config` (or `options`) from before and after `extendModules`, evaluation performance is the same as with multiple `evalModules` invocations, because the new modules' ability to override existing configuration fundamentally requires constructing a new `config` and `options` fixpoint.
:::
#### `_module` {#module-system-lib-evalModules-return-value-_module}
A portion of the configuration tree which is elided from `config`.
<!-- TODO: when markdown migration is complete, make _module docs visible again and reference _module docs. Maybe move those docs into this chapter? -->
#### `_type` {#module-system-lib-evalModules-return-value-_type}
A nominal type marker, always `"configuration"`.
#### `class` {#module-system-lib-evalModules-return-value-_configurationClass}
The [`class` argument](#module-system-lib-evalModules-param-class).

View file

@ -63,39 +63,8 @@ let
decls
));
in
rec {
/*
Evaluate a set of modules. The result is a set with the attributes:
options: The nested set of all option declarations,
config: The nested set of all option values.
type: A module system type representing the module set as a submodule,
to be extended by configuration from the containing module set.
This is also available as the module argument moduleType.
extendModules: A function similar to evalModules but building on top
of the module set. Its arguments, modules and specialArgs are
added to the existing values.
Using extendModules a few times has no performance impact as long
as you only reference the final options and config.
If you do reference multiple config (or options) from before and
after extendModules, performance is the same as with multiple
evalModules invocations, because the new modules' ability to
override existing configuration fundamentally requires a new
fixpoint to be constructed.
This is also available as a module argument.
_module: A portion of the configuration tree which is elided from
config. It contains some values that are mostly internal to the
module system implementation.
/* See https://nixos.org/manual/nixpkgs/unstable/#module-system-lib-evalModules
or file://./../doc/module-system/module-system.chapter.md
!!! Please think twice before adding to this argument list! The more
that is specified here instead of in the modules themselves the harder
@ -110,8 +79,12 @@ rec {
# there's _module.args. If specialArgs.modulesPath is defined it will be
# used as the base path for disabledModules.
specialArgs ? {}
, # This would be remove in the future, Prefer _module.args option instead.
args ? {}
, # `class`:
# A nominal type for modules. When set and non-null, this adds a check to
# make sure that only compatible modules are imported.
# This would be remove in the future, Prefer _module.args option instead.
class ? null
, args ? {}
, # This would be remove in the future, Prefer _module.check option instead.
check ? true
}:
@ -260,6 +233,7 @@ rec {
merged =
let collected = collectModules
class
(specialArgs.modulesPath or "")
(regularModules ++ [ internalModule ])
({ inherit lib options config specialArgs; } // specialArgs);
@ -336,38 +310,64 @@ rec {
prefix ? [],
}:
evalModules (evalModulesArgs // {
inherit class;
modules = regularModules ++ modules;
specialArgs = evalModulesArgs.specialArgs or {} // specialArgs;
prefix = extendArgs.prefix or evalModulesArgs.prefix or [];
});
type = lib.types.submoduleWith {
inherit modules specialArgs;
inherit modules specialArgs class;
};
result = withWarnings {
_type = "configuration";
options = checked options;
config = checked (removeAttrs config [ "_module" ]);
_module = checked (config._module);
inherit extendModules type;
class = class;
};
in result;
# collectModules :: (modulesPath: String) -> (modules: [ Module ]) -> (args: Attrs) -> [ Module ]
# collectModules :: (class: String) -> (modulesPath: String) -> (modules: [ Module ]) -> (args: Attrs) -> [ Module ]
#
# Collects all modules recursively through `import` statements, filtering out
# all modules in disabledModules.
collectModules = let
collectModules = class: let
# Like unifyModuleSyntax, but also imports paths and calls functions if necessary
loadModule = args: fallbackFile: fallbackKey: m:
if isFunction m || isAttrs m then
unifyModuleSyntax fallbackFile fallbackKey (applyModuleArgsIfFunction fallbackKey m args)
if isFunction m then
unifyModuleSyntax fallbackFile fallbackKey (applyModuleArgs fallbackKey m args)
else if isAttrs m then
if m._type or "module" == "module" then
unifyModuleSyntax fallbackFile fallbackKey m
else if m._type == "if" || m._type == "override" then
loadModule args fallbackFile fallbackKey { config = m; }
else
throw (
"Could not load a value as a module, because it is of type ${lib.strings.escapeNixString m._type}"
+ lib.optionalString (fallbackFile != unknownModule) ", in file ${toString fallbackFile}."
+ lib.optionalString (m._type == "configuration") " If you do intend to import this configuration, please only import the modules that make up the configuration. You may have to create a `let` binding, file or attribute to give yourself access to the relevant modules.\nWhile loading a configuration into the module system is a very sensible idea, it can not be done cleanly in practice."
# Extended explanation: That's because a finalized configuration is more than just a set of modules. For instance, it has its own `specialArgs` that, by the nature of `specialArgs` can't be loaded through `imports` or the the `modules` argument. So instead, we have to ask you to extract the relevant modules and use those instead. This way, we keep the module system comparatively simple, and hopefully avoid a bad surprise down the line.
)
else if isList m then
let defs = [{ file = fallbackFile; value = m; }]; in
throw "Module imports can't be nested lists. Perhaps you meant to remove one level of lists? Definitions: ${showDefs defs}"
else unifyModuleSyntax (toString m) (toString m) (applyModuleArgsIfFunction (toString m) (import m) args);
checkModule =
if class != null
then
m:
if m._class != null -> m._class == class
then m
else
throw "The module ${m._file or m.key} was imported into ${class} instead of ${m._class}."
else
m: m;
/*
Collects all modules recursively into the form
@ -401,7 +401,7 @@ rec {
};
in parentFile: parentKey: initialModules: args: collectResults (imap1 (n: x:
let
module = loadModule args parentFile "${parentKey}:anon-${toString n}" x;
module = checkModule (loadModule args parentFile "${parentKey}:anon-${toString n}" x);
collectedImports = collectStructuredModules module._file module.key module.imports args;
in {
key = module.key;
@ -465,11 +465,12 @@ rec {
else config;
in
if m ? config || m ? options then
let badAttrs = removeAttrs m ["_file" "key" "disabledModules" "imports" "options" "config" "meta" "freeformType"]; in
let badAttrs = removeAttrs m ["_class" "_file" "key" "disabledModules" "imports" "options" "config" "meta" "freeformType"]; in
if badAttrs != {} then
throw "Module `${key}' has an unsupported attribute `${head (attrNames badAttrs)}'. This is caused by introducing a top-level `config' or `options' attribute. Add configuration attributes immediately on the top level instead, or move all of them (namely: ${toString (attrNames badAttrs)}) into the explicit `config' attribute."
else
{ _file = toString m._file or file;
_class = m._class or null;
key = toString m.key or key;
disabledModules = m.disabledModules or [];
imports = m.imports or [];
@ -480,14 +481,18 @@ rec {
# shorthand syntax
lib.throwIfNot (isAttrs m) "module ${file} (${key}) does not look like a module."
{ _file = toString m._file or file;
_class = m._class or null;
key = toString m.key or key;
disabledModules = m.disabledModules or [];
imports = m.require or [] ++ m.imports or [];
options = {};
config = addFreeformType (removeAttrs m ["_file" "key" "disabledModules" "require" "imports" "freeformType"]);
config = addFreeformType (removeAttrs m ["_class" "_file" "key" "disabledModules" "require" "imports" "freeformType"]);
};
applyModuleArgsIfFunction = key: f: args@{ config, options, lib, ... }: if isFunction f then
applyModuleArgsIfFunction = key: f: args@{ config, options, lib, ... }:
if isFunction f then applyModuleArgs key f args else f;
applyModuleArgs = key: f: args@{ config, options, lib, ... }:
let
# Module arguments are resolved in a strict manner when attribute set
# deconstruction is used. As the arguments are now defined with the
@ -511,9 +516,7 @@ rec {
# context on the explicit arguments of "args" too. This update
# operator is used to make the "args@{ ... }: with args.lib;" notation
# works.
in f (args // extraArgs)
else
f;
in f (args // extraArgs);
/* Merge a list of modules. This will recurse over the option
declarations in all modules, combining them into a single set.
@ -1218,4 +1221,67 @@ rec {
_file = file;
config = lib.importTOML file;
};
private = lib.mapAttrs
(k: lib.warn "External use of `lib.modules.${k}` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/.")
{
inherit
applyModuleArgsIfFunction
dischargeProperties
evalOptionValue
mergeModules
mergeModules'
pushDownProperties
unifyModuleSyntax
;
collectModules = collectModules null;
};
in
private //
{
# NOTE: not all of these functions are necessarily public interfaces; some
# are just needed by types.nix, but are not meant to be consumed
# externally.
inherit
defaultOrderPriority
defaultOverridePriority
defaultPriority
doRename
evalModules
filterOverrides
filterOverrides'
fixMergeModules
fixupOptionType # should be private?
importJSON
importTOML
mergeDefinitions
mergeOptionDecls # should be private?
mkAfter
mkAliasAndWrapDefinitions
mkAliasAndWrapDefsWithPriority
mkAliasDefinitions
mkAliasIfDef
mkAliasOptionModule
mkAliasOptionModuleMD
mkAssert
mkBefore
mkChangedOptionModule
mkDefault
mkDerivedConfig
mkFixStrictness
mkForce
mkIf
mkImageMediaOverride
mkMerge
mkMergedOptionModule
mkOptionDefault
mkOrder
mkOverride
mkRemovedOptionModule
mkRenamedOptionModule
mkRenamedOptionModuleWith
mkVMOverride
setDefaultModuleLocation
sortProperties;
}

View file

@ -166,6 +166,7 @@ checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*'
checkConfigOutput '^true$' "$@" ./define-module-check.nix
# Check coerced value.
set --
checkConfigOutput '^"42"$' config.value ./declare-coerced-value.nix
checkConfigOutput '^"24"$' config.value ./declare-coerced-value.nix ./define-value-string.nix
checkConfigError 'A definition for option .* is not.*string or signed integer convertible to it.*. Definition values:\n\s*- In .*: \[ \]' config.value ./declare-coerced-value.nix ./define-value-list.nix
@ -254,6 +255,8 @@ checkConfigError 'A definition for option .* is not of type .*' \
## Freeform modules
# Assigning without a declared option should work
checkConfigOutput '^"24"$' config.value ./freeform-attrsOf.nix ./define-value-string.nix
# Shorthand modules interpret `meta` and `class` as config items
checkConfigOutput '^true$' options._module.args.value.result ./freeform-attrsOf.nix ./define-freeform-keywords-shorthand.nix
# No freeform assignments shouldn't make it error
checkConfigOutput '^{ }$' config ./freeform-attrsOf.nix
# but only if the type matches
@ -359,6 +362,24 @@ checkConfigOutput 'ok' config.freeformItems.foo.bar ./adhoc-freeformType-survive
# because of an `extendModules` bug, issue 168767.
checkConfigOutput '^1$' config.sub.specialisation.value ./extendModules-168767-imports.nix
# Class checks, evalModules
checkConfigOutput '^{ }$' config.ok.config ./class-check.nix
checkConfigOutput '"nixos"' config.ok.class ./class-check.nix
checkConfigError 'The module .*/module-class-is-darwin.nix was imported into nixos instead of darwin.' config.fail.config ./class-check.nix
checkConfigError 'The module foo.nix#darwinModules.default was imported into nixos instead of darwin.' config.fail-anon.config ./class-check.nix
# Class checks, submoduleWith
checkConfigOutput '^{ }$' config.sub.nixosOk ./class-check.nix
checkConfigError 'The module .*/module-class-is-darwin.nix was imported into nixos instead of darwin.' config.sub.nixosFail.config ./class-check.nix
# submoduleWith type merge with different class
checkConfigError 'error: A submoduleWith option is declared multiple times with conflicting class values "darwin" and "nixos".' config.sub.mergeFail.config ./class-check.nix
# _type check
checkConfigError 'Could not load a value as a module, because it is of type "flake", in file .*/module-imports-_type-check.nix' config.ok.config ./module-imports-_type-check.nix
checkConfigOutput '^true$' "$@" config.enable ./declare-enable.nix ./define-enable-with-top-level-mkIf.nix
checkConfigError 'Could not load a value as a module, because it is of type "configuration", in file .*/import-configuration.nix.*please only import the modules that make up the configuration.*' config ./import-configuration.nix
# doRename works when `warnings` does not exist.
checkConfigOutput '^1234$' config.c.d.e ./doRename-basic.nix
# doRename adds a warning.

View file

@ -0,0 +1,76 @@
{ lib, ... }: {
options = {
sub = {
nixosOk = lib.mkOption {
type = lib.types.submoduleWith {
class = "nixos";
modules = [ ];
};
};
# Same but will have bad definition
nixosFail = lib.mkOption {
type = lib.types.submoduleWith {
class = "nixos";
modules = [ ];
};
};
mergeFail = lib.mkOption {
type = lib.types.submoduleWith {
class = "nixos";
modules = [ ];
};
default = { };
};
};
};
imports = [
{
options = {
sub = {
mergeFail = lib.mkOption {
type = lib.types.submoduleWith {
class = "darwin";
modules = [ ];
};
};
};
};
}
];
config = {
_module.freeformType = lib.types.anything;
ok =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
];
};
fail =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
./module-class-is-darwin.nix
];
};
fail-anon =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
{ _file = "foo.nix#darwinModules.default";
_class = "darwin";
config = {};
imports = [];
}
];
};
sub.nixosOk = { _class = "nixos"; };
sub.nixosFail = { imports = [ ./module-class-is-darwin.nix ]; };
};
}

View file

@ -0,0 +1,5 @@
{ lib, ... }:
# I think this might occur more realistically in a submodule
{
imports = [ (lib.mkIf true { enable = true; }) ];
}

View file

@ -0,0 +1,15 @@
{ config, ... }: {
class = { "just" = "data"; };
a = "one";
b = "two";
meta = "meta";
_module.args.result =
let r = builtins.removeAttrs config [ "_module" ];
in builtins.trace (builtins.deepSeq r r) (r == {
a = "one";
b = "two";
class = { "just" = "data"; };
meta = "meta";
});
}

View file

@ -0,0 +1,12 @@
{ lib, ... }:
let
myconf = lib.evalModules { modules = [ { } ]; };
in
{
imports = [
# We can't do this. A configuration is not equal to its set of a modules.
# Equating those would lead to a mess, as specialArgs, anonymous modules
# that can't be deduplicated, and possibly more come into play.
myconf
];
}

View file

@ -0,0 +1,4 @@
{
_class = "darwin";
config = {};
}

View file

@ -0,0 +1,4 @@
{
_class = "nixos";
config = {};
}

View file

@ -0,0 +1,3 @@
{
imports = [ { _type = "flake"; } ];
}

View file

@ -696,6 +696,7 @@ rec {
, specialArgs ? {}
, shorthandOnlyDefinesConfig ? false
, description ? null
, class ? null
}@attrs:
let
inherit (lib.modules) evalModules;
@ -707,7 +708,7 @@ rec {
) defs;
base = evalModules {
inherit specialArgs;
inherit class specialArgs;
modules = [{
# This is a work-around for the fact that some sub-modules,
# such as the one included in an attribute set, expects an "args"
@ -762,9 +763,14 @@ rec {
functor = defaultFunctor name // {
type = types.submoduleWith;
payload = {
inherit modules specialArgs shorthandOnlyDefinesConfig description;
inherit modules class specialArgs shorthandOnlyDefinesConfig description;
};
binOp = lhs: rhs: {
class =
if lhs.class == null then rhs.class
else if rhs.class == null then lhs.class
else if lhs.class == rhs.class then lhs.class
else throw "A submoduleWith option is declared multiple times with conflicting class values \"${toString lhs.class}\" and \"${toString rhs.class}\".";
modules = lhs.modules ++ rhs.modules;
specialArgs =
let intersecting = builtins.intersectAttrs lhs.specialArgs rhs.specialArgs;

View file

@ -1654,6 +1654,16 @@
githubId = 1017537;
name = "Bruno Bieth";
};
badele = {
name = "Bruno Adelé";
email = "brunoadele@gmail.com";
matrix = "@badele:matrix.org";
github = "badele";
githubId = 2806307;
keys = [{
fingerprint = "00F4 21C4 C537 7BA3 9820 E13F 6B95 E13D E469 CC5D";
}];
};
badmutex = {
email = "github@badi.sh";
github = "badmutex";

View file

@ -33,6 +33,7 @@ let
];
specialArgs = {
inherit config pkgs utils;
class = "nixos";
};
};
docs = import "${nixosPath}/doc/manual" {

View file

@ -38,6 +38,7 @@ let
# is experimental.
lib.evalModules {
inherit prefix modules;
class = "nixos";
specialArgs = {
modulesPath = builtins.toString ../modules;
} // specialArgs;

View file

@ -1,7 +1,10 @@
{ lib }:
let
evalTest = module: lib.evalModules { modules = testModules ++ [ module ]; };
evalTest = module: lib.evalModules {
modules = testModules ++ [ module ];
class = "nixosTest";
};
runTest = module: (evalTest ({ config, ... }: { imports = [ module ]; result = config.test; })).config.result;
testModules = [

View file

@ -38,6 +38,7 @@ let
modules = [ {
_module.check = false;
} ] ++ docModules.eager;
class = "nixos";
specialArgs = specialArgs // {
pkgs = scrubDerivations "pkgs" pkgs;
# allow access to arbitrary options for eager modules, eg for getting

View file

@ -1,8 +1,9 @@
{pkgs, config, lib, ...}:
{ pkgs, config, lib, ... }:
with lib;
let
cfg = config.programs.fzf;
in {
in
{
options = {
programs.fzf = {
fuzzyCompletion = mkEnableOption (mdDoc "fuzzy completion with fzf");
@ -11,17 +12,21 @@ in {
};
config = {
environment.systemPackages = optional (cfg.keybindings || cfg.fuzzyCompletion) pkgs.fzf;
programs.bash.interactiveShellInit = optionalString cfg.fuzzyCompletion ''
source ${pkgs.fzf}/share/fzf/completion.bash
'' + optionalString cfg.keybindings ''
source ${pkgs.fzf}/share/fzf/key-bindings.bash
'';
programs.zsh.interactiveShellInit = optionalString cfg.fuzzyCompletion ''
source ${pkgs.fzf}/share/fzf/completion.zsh
'' + optionalString cfg.keybindings ''
source ${pkgs.fzf}/share/fzf/key-bindings.zsh
'';
programs.zsh.interactiveShellInit = optionalString (!config.programs.zsh.ohMyZsh.enable)
(optionalString cfg.fuzzyCompletion ''
source ${pkgs.fzf}/share/fzf/completion.zsh
'' + optionalString cfg.keybindings ''
source ${pkgs.fzf}/share/fzf/key-bindings.zsh
'');
programs.zsh.ohMyZsh.plugins = optional (cfg.keybindings || cfg.fuzzyCompletion) [ "fzf" ];
};
meta.maintainers = with maintainers; [ laalsaas ];
}

View file

@ -9,6 +9,7 @@ let
env = {
GUNICORN_CMD_ARGS = "--bind=${cfg.address}:${toString cfg.port}";
DEBUG = "0";
DEBUG_TOOLBAR = "0";
MEDIA_ROOT = "/var/lib/tandoor-recipes";
} // optionalAttrs (config.time.timeZone != null) {
TIMEZONE = config.time.timeZone;

View file

@ -2,18 +2,22 @@
with lib;
let cfg = config.services.cloud-init;
path = with pkgs; [
cloud-init
iproute2
nettools
openssh
shadow
util-linux
busybox
] ++ optional cfg.btrfs.enable btrfs-progs
++ optional cfg.ext4.enable e2fsprogs
;
let
cfg = config.services.cloud-init;
path = with pkgs; [
cloud-init
iproute2
nettools
openssh
shadow
util-linux
busybox
]
++ optional cfg.btrfs.enable btrfs-progs
++ optional cfg.ext4.enable e2fsprogs
;
settingsFormat = pkgs.formats.yaml { };
cfgfile = settingsFormat.generate "cloud.cfg" cfg.settings;
in
{
options = {
@ -21,7 +25,7 @@ in
enable = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
description = mdDoc ''
Enable the cloud-init service. This services reads
configuration metadata in a cloud environment and configures
the machine according to this metadata.
@ -40,7 +44,7 @@ in
btrfs.enable = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
description = mdDoc ''
Allow the cloud-init service to operate `btrfs` filesystem.
'';
};
@ -48,7 +52,7 @@ in
ext4.enable = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc ''
description = mdDoc ''
Allow the cloud-init service to operate `ext4` filesystem.
'';
};
@ -56,141 +60,170 @@ in
network.enable = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
description = mdDoc ''
Allow the cloud-init service to configure network interfaces
through systemd-networkd.
'';
};
settings = mkOption {
description = mdDoc ''
Structured cloud-init configuration.
'';
type = types.submodule {
freeformType = settingsFormat.type;
};
default = { };
};
config = mkOption {
type = types.str;
default = ''
system_info:
distro: nixos
network:
renderers: [ 'networkd' ]
users:
- root
default = "";
description = mdDoc ''
raw cloud-init configuration.
disable_root: false
preserve_hostname: false
cloud_init_modules:
- migrator
- seed_random
- bootcmd
- write-files
- growpart
- resizefs
- update_hostname
- resolv_conf
- ca-certs
- rsyslog
- users-groups
cloud_config_modules:
- disk_setup
- mounts
- ssh-import-id
- set-passwords
- timezone
- disable-ec2-metadata
- runcmd
- ssh
cloud_final_modules:
- rightscale_userdata
- scripts-vendor
- scripts-per-once
- scripts-per-boot
- scripts-per-instance
- scripts-user
- ssh-authkey-fingerprints
- keys-to-console
- phone-home
- final-message
- power-state-change
'';
description = lib.mdDoc "cloud-init configuration.";
Takes precedence over the `settings` option if set.
'';
};
};
};
config = mkIf cfg.enable {
config = {
services.cloud-init.settings = {
system_info = mkDefault {
distro = "nixos";
network = {
renderers = [ "networkd" ];
};
};
environment.etc."cloud/cloud.cfg".text = cfg.config;
users = mkDefault [ "root" ];
disable_root = mkDefault false;
preserve_hostname = mkDefault false;
cloud_init_modules = mkDefault [
"migrator"
"seed_random"
"bootcmd"
"write-files"
"growpart"
"resizefs"
"update_hostname"
"resolv_conf"
"ca-certs"
"rsyslog"
"users-groups"
];
cloud_config_modules = mkDefault [
"disk_setup"
"mounts"
"ssh-import-id"
"set-passwords"
"timezone"
"disable-ec2-metadata"
"runcmd"
"ssh"
];
cloud_final_modules = mkDefault [
"rightscale_userdata"
"scripts-vendor"
"scripts-per-once"
"scripts-per-boot"
"scripts-per-instance"
"scripts-user"
"ssh-authkey-fingerprints"
"keys-to-console"
"phone-home"
"final-message"
"power-state-change"
];
};
} // (mkIf cfg.enable {
environment.etc."cloud/cloud.cfg" =
if cfg.config == "" then
{ source = cfgfile; }
else
{ text = cfg.config; }
;
systemd.network.enable = cfg.network.enable;
systemd.services.cloud-init-local =
{ description = "Initial cloud-init job (pre-networking)";
wantedBy = [ "multi-user.target" ];
before = ["systemd-networkd.service"];
path = path;
serviceConfig =
{ Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init init --local";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
systemd.services.cloud-init-local = {
description = "Initial cloud-init job (pre-networking)";
wantedBy = [ "multi-user.target" ];
before = [ "systemd-networkd.service" ];
path = path;
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init init --local";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
};
systemd.services.cloud-init =
{ description = "Initial cloud-init job (metadata service crawler)";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" "cloud-init-local.service"
"sshd.service" "sshd-keygen.service" ];
after = [ "network-online.target" "cloud-init-local.service" ];
before = [ "sshd.service" "sshd-keygen.service" ];
requires = [ "network.target"];
path = path;
serviceConfig =
{ Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init init";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
systemd.services.cloud-init = {
description = "Initial cloud-init job (metadata service crawler)";
wantedBy = [ "multi-user.target" ];
wants = [
"network-online.target"
"cloud-init-local.service"
"sshd.service"
"sshd-keygen.service"
];
after = [ "network-online.target" "cloud-init-local.service" ];
before = [ "sshd.service" "sshd-keygen.service" ];
requires = [ "network.target" ];
path = path;
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init init";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
};
systemd.services.cloud-config =
{ description = "Apply the settings specified in cloud-config";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" ];
after = [ "network-online.target" "syslog.target" "cloud-config.target" ];
systemd.services.cloud-config = {
description = "Apply the settings specified in cloud-config";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" ];
after = [ "network-online.target" "syslog.target" "cloud-config.target" ];
path = path;
serviceConfig =
{ Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init modules --mode=config";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
path = path;
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init modules --mode=config";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
};
systemd.services.cloud-final =
{ description = "Execute cloud user/final scripts";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" ];
after = [ "network-online.target" "syslog.target" "cloud-config.service" "rc-local.service" ];
requires = [ "cloud-config.target" ];
path = path;
serviceConfig =
{ Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init modules --mode=final";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
systemd.services.cloud-final = {
description = "Execute cloud user/final scripts";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" ];
after = [ "network-online.target" "syslog.target" "cloud-config.service" "rc-local.service" ];
requires = [ "cloud-config.target" ];
path = path;
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.cloud-init}/bin/cloud-init modules --mode=final";
RemainAfterExit = "yes";
TimeoutSec = "infinity";
StandardOutput = "journal+console";
};
};
systemd.targets.cloud-config =
{ description = "Cloud-config availability";
requires = [ "cloud-init-local.service" "cloud-init.service" ];
};
};
systemd.targets.cloud-config = {
description = "Cloud-config availability";
requires = [ "cloud-init-local.service" "cloud-init.service" ];
};
});
}

View file

@ -272,7 +272,7 @@ let
suggestedRootDevice = {
"efi_bootloading_with_default_fs" = "${cfg.bootLoaderDevice}2";
"legacy_bootloading_with_default_fs" = "${cfg.bootLoaderDevice}1";
"direct_boot_with_default_fs" = lookupDriveDeviceName "root" cfg.qemu.drives;
"direct_boot_with_default_fs" = cfg.bootLoaderDevice;
# This will enforce a NixOS module type checking error
# to ask explicitly the user to set a rootDevice.
# As it will look like `rootDevice = lib.mkDefault null;` after
@ -344,14 +344,14 @@ in
virtualisation.bootLoaderDevice =
mkOption {
type = types.nullOr types.path;
default = if cfg.useBootLoader then lookupDriveDeviceName "root" cfg.qemu.drives else null;
defaultText = literalExpression ''if cfg.useBootLoader then lookupDriveDeviceName "root" cfg.qemu.drives else null;'';
type = types.path;
default = lookupDriveDeviceName "root" cfg.qemu.drives;
defaultText = literalExpression ''lookupDriveDeviceName "root" cfg.qemu.drives'';
example = "/dev/vda";
description =
lib.mdDoc ''
The disk to be used for the boot filesystem.
By default, it is the same disk as the root filesystem if you use a bootloader, otherwise it's null.
By default, it is the same disk as the root filesystem.
'';
};
@ -862,16 +862,6 @@ in
Invalid virtualisation.forwardPorts.<entry ${toString i}>.guest.address:
The address must be in the default VLAN (10.0.2.0/24).
'';
}
{ assertion = cfg.useBootLoader -> cfg.diskImage != null;
message =
''
Currently, bootloaders cannot be used with a tmpfs disk image.
It would require some rework in the boot configuration mechanism
to detect the proper boot partition in UEFI scenarios for example.
If you are interested into this feature, please open an issue or open a pull request.
'';
}
]));
@ -899,8 +889,7 @@ in
# legacy and UEFI. In order to avoid this, we have to put "nodev" to force UEFI-only installs.
# Otherwise, we set the proper bootloader device for this.
# FIXME: make a sense of this mess wrt to multiple ESP present in the system, probably use boot.efiSysMountpoint?
boot.loader.grub.enable = cfg.useBootLoader;
boot.loader.grub.device = mkIf cfg.useBootLoader (mkVMOverride (if cfg.useEFIBoot then "nodev" else cfg.bootLoaderDevice));
boot.loader.grub.device = mkVMOverride (if cfg.useEFIBoot then "nodev" else cfg.bootLoaderDevice);
boot.loader.grub.gfxmodeBios = with cfg.resolution; "${toString x}x${toString y}";
virtualisation.rootDevice = mkDefault suggestedRootDevice;
@ -908,13 +897,13 @@ in
boot.loader.supportsInitrdSecrets = mkIf (!cfg.useBootLoader) (mkVMOverride false);
boot.initrd.extraUtilsCommands = lib.mkIf (cfg.useDefaultFilesystems && !config.boot.initrd.systemd.enable && cfg.diskImage != null)
boot.initrd.extraUtilsCommands = lib.mkIf (cfg.useDefaultFilesystems && !config.boot.initrd.systemd.enable)
''
# We need mke2fs in the initrd.
copy_bin_and_libs ${pkgs.e2fsprogs}/bin/mke2fs
'';
boot.initrd.postDeviceCommands = lib.mkIf (cfg.useDefaultFilesystems && !config.boot.initrd.systemd.enable && cfg.diskImage != null)
boot.initrd.postDeviceCommands = lib.mkIf (cfg.useDefaultFilesystems && !config.boot.initrd.systemd.enable)
''
# If the disk image appears to be empty, run mke2fs to
# initialise.

View file

@ -42,7 +42,7 @@
}:
let
hashesFile = builtins.fromJSON (builtins.readFile ./hashes.json);
hashesFile = lib.importJSON ./hashes.json;
getCoreSrc = core:
fetchFromGitHub (builtins.getAttr core hashesFile);

View file

@ -1,15 +1,15 @@
{ lib, fetchFromGitHub }:
rec {
version = "1.4.4";
version = "1.4.9";
src = fetchFromGitHub {
owner = "TandoorRecipes";
repo = "recipes";
rev = version;
sha256 = "sha256-1wqZoOT2Aafbs2P0mL33jw5HkrLIitUcRt6bQQcHx40=";
sha256 = "sha256-h424lUm/wmCHXkMW2XejogvH3wL/+J67cG4m8rIWM1U=";
};
yarnSha256 = "sha256-gH0q3pJ2BC5pAU9KSo3C9DDRUnpypoyLOEqKSrkxYrk=";
yarnSha256 = "sha256-LJ0uL66tcK6zL8Mkd2UB8dHsslMTtf8wQmgbZdvOT6s=";
meta = with lib; {
homepage = "https://tandoor.dev/";

View file

@ -2,6 +2,7 @@
, nixosTests
, python3
, fetchFromGitHub
, fetchpatch
}:
let
python = python3.override {
@ -41,6 +42,12 @@ python.pkgs.pythonPackages.buildPythonPackage rec {
patches = [
# Allow setting MEDIA_ROOT through environment variable
./media-root.patch
# Address CVE-2023-31047 on Django 4.2.1+
(fetchpatch {
name = "fix-multiple-file-field";
url = "https://github.com/TandoorRecipes/recipes/pull/2458/commits/6b04c922977317354a367487427b15a8ed619be9.patch";
hash = "sha256-KmfjJSrB/4tOWtU7zrDJ/AOG4XlmWy/halw8IEEXdZ0=";
})
];
propagatedBuildInputs = with python.pkgs; [
@ -101,8 +108,10 @@ python.pkgs.pythonPackages.buildPythonPackage rec {
buildPhase = ''
runHook preBuild
# Avoid dependency on django debug toolbar
# Disable debug logging
export DEBUG=0
# Avoid dependency on django debug toolbar
export DEBUG_TOOLBAR=0
# See https://github.com/TandoorRecipes/recipes/issues/2043
mkdir cookbook/static/themes/maps/

View file

@ -1,4 +1,4 @@
{ stdenv, fetchYarnDeps, fixup_yarn_lock, callPackage, nodejs_16 }:
{ stdenv, fetchYarnDeps, fixup_yarn_lock, callPackage, nodejs }:
let
common = callPackage ./common.nix { };
in
@ -15,9 +15,8 @@ stdenv.mkDerivation {
nativeBuildInputs = [
fixup_yarn_lock
# Use Node JS 16 because of @achrinza/node-ipc@9.2.2
nodejs_16
nodejs_16.pkgs.yarn
nodejs
nodejs.pkgs.yarn
];
configurePhase = ''

View file

@ -1,5 +1,5 @@
{ fetchurl, fetchFromGitLab }:
let src = builtins.fromJSON (builtins.readFile ./src.json);
{ lib, fetchurl, fetchFromGitLab }:
let src = lib.importJSON ./src.json;
in
{
inherit (src) packageVersion;

View file

@ -39,14 +39,14 @@
, makeWrapper
, wrapGAppsHook
, withQt ? true
, qt5 ? null
, qt6 ? null
, ApplicationServices
, SystemConfiguration
, gmp
, asciidoctor
}:
assert withQt -> qt5 != null;
assert withQt -> qt6 != null;
let
version = "4.0.5";
@ -70,6 +70,7 @@ stdenv.mkDerivation {
# Fix `extcap` and `plugins` paths. See https://bugs.wireshark.org/bugzilla/show_bug.cgi?id=16444
"-DCMAKE_INSTALL_LIBDIR=lib"
"-DLEMON_C_COMPILER=cc"
"-DUSE_qt6=ON"
] ++ lib.optionals (stdenv.buildPlatform != stdenv.hostPlatform) [
"-DHAVE_C99_VSNPRINTF_EXITCODE=0"
"-DHAVE_C99_VSNPRINTF_EXITCODE__TRYRUN_OUTPUT="
@ -79,7 +80,7 @@ stdenv.mkDerivation {
env.NIX_CFLAGS_COMPILE = toString [ "-DQT_NO_DEBUG" ];
nativeBuildInputs = [ asciidoctor bison cmake ninja flex makeWrapper pkg-config python3 perl ]
++ lib.optionals withQt [ qt5.wrapQtAppsHook wrapGAppsHook ];
++ lib.optionals withQt [ qt6.wrapQtAppsHook wrapGAppsHook ];
depsBuildBuild = [ buildPackages.stdenv.cc ];
@ -108,11 +109,10 @@ stdenv.mkDerivation {
c-ares
glib
zlib
] ++ lib.optionals withQt (with qt5; [ qtbase qtmultimedia qtsvg qttools ])
++ lib.optionals (withQt && stdenv.isLinux) [ qt5.qtwayland ]
] ++ lib.optionals withQt (with qt6; [ qtbase qtmultimedia qtsvg qttools qt5compat ])
++ lib.optionals (withQt && stdenv.isLinux) [ qt6.qtwayland ]
++ lib.optionals stdenv.isLinux [ libcap libnl sbc ]
++ lib.optionals stdenv.isDarwin [ SystemConfiguration ApplicationServices gmp ]
++ lib.optionals (withQt && stdenv.isDarwin) (with qt5; [ qtmacextras ]);
++ lib.optionals stdenv.isDarwin [ SystemConfiguration ApplicationServices gmp ];
strictDeps = true;

File diff suppressed because it is too large Load diff

View file

@ -13,13 +13,13 @@
rustPlatform.buildRustPackage rec {
pname = "noaa-apt";
version = "1.3.1";
version = "1.4.0";
src = fetchFromGitHub {
owner = "martinber";
repo = "noaa-apt";
rev = "v${version}";
sha256 = "sha256-A78O5HkD/LyfvjLJjf7PpJDuftkNbaxq7Zs5kNUaULk=";
sha256 = "sha256-wmjglF2+BFmlTfvqt90nbCxuldN8AEFXj7y9tgTvA2Y=";
};
nativeBuildInputs = [
@ -55,15 +55,15 @@ rustPlatform.buildRustPackage rec {
# Desktop icon.
install -Dm644 -t $out/share/applications $src/debian/ar.com.mbernardi.noaa-apt.desktop
install -Dm644 -t $out/share/icons/hicolor/48x48/apps $src/debian/noaa-apt.png
install -Dm644 -t $out/share/icons/hicolor/scalable/apps $src/debian/noaa-apt.svg
install -Dm644 -t $out/share/icons/hicolor/48x48/apps $src/debian/ar.com.mbernardi.noaa-apt.png
install -Dm644 -t $out/share/icons/hicolor/scalable/apps $src/debian/ar.com.mbernardi.noaa-apt.svg
'';
meta = with lib; {
description = "NOAA APT image decoder";
homepage = "https://noaa-apt.mbernardi.com.ar/";
license = licenses.gpl3Only;
maintainers = with maintainers; [ trepetti ];
maintainers = with maintainers; [ trepetti tmarkus ];
platforms = platforms.all;
changelog = "https://github.com/martinber/noaa-apt/releases/tag/v${version}";
};

View file

@ -3,11 +3,11 @@
stdenv.mkDerivation rec {
pname = "gnuastro";
version = "0.19";
version = "0.20";
src = fetchurl {
url = "mirror://gnu/gnuastro/gnuastro-${version}.tar.gz";
sha256 = "sha256-4bPNW0sSb/J34vSOit8BA9Z/wK0Hz5o9OqfgVSlDDjU=";
sha256 = "sha256-kkuLtqwc0VFj3a3Dqb/bi4jKx7UJnV+CHs7bw/Cwac0=";
};
nativeBuildInputs = [ libtool ];

View file

@ -1,5 +1,6 @@
{ lib, stdenv
, fetchFromGitHub
, fetchpatch
, autoconf
, bison
, bzip2
@ -7,56 +8,62 @@
, gperf
, ncurses
, perl
, python3
, readline
, zlib
}:
let
# iverilog-test has been merged to the main iverilog main source tree
# in January 2022, so it won't be longer necessary.
# For now let's fetch it from the separate repo, since 11.0 was released in 2020.
iverilog-test = fetchFromGitHub {
owner = "steveicarus";
repo = "ivtest";
rev = "a19e629a1879801ffcc6f2e6256ca435c20570f3";
sha256 = "sha256-3EkmrAXU0/mRxrxp5Hy7C3yWTVK16L+tPqqeEryY/r8=";
};
in
stdenv.mkDerivation rec {
pname = "iverilog";
version = "11.0";
version = "12.0";
src = fetchFromGitHub {
owner = "steveicarus";
repo = pname;
rev = "v${lib.replaceStrings ["."] ["_"] version}";
sha256 = "0nzcyi6l2zv9wxzsv9i963p3igyjds0n55x0ph561mc3pfbc7aqp";
hash = "sha256-J9hedSmC6mFVcoDnXBtaTXigxrSCFa2AhhFd77ueo7I=";
};
nativeBuildInputs = [ autoconf bison flex gperf ];
CC_FOR_BUILD="${stdenv.cc}/bin/cc";
CXX_FOR_BUILD="${stdenv.cc}/bin/c++";
patches = [
# NOTE(jleightcap): `-Werror=format-security` warning patched shortly after release, backport the upstream fix
(fetchpatch {
name = "format-security";
url = "https://github.com/steveicarus/iverilog/commit/23e51ef7a8e8e4ba42208936e0a6a25901f58c65.patch";
hash = "sha256-fMWfBsCl2fuXe+6AR10ytb8QpC84bXlP5RSdrqsWzEk=";
})
];
buildInputs = [ bzip2 ncurses readline zlib ];
preConfigure = "sh autoconf.sh";
enableParallelBuilding = true;
nativeInstallCheckInputs = [ perl ];
# NOTE(jleightcap): the `make check` target only runs a "Hello, World"-esque sanity check.
# the tests in the doInstallCheck phase run a full regression test suite.
# however, these tests currently fail upstream on aarch64
# (see https://github.com/steveicarus/iverilog/issues/917)
# so disable the full suite for now.
doCheck = true;
doInstallCheck = !stdenv.isAarch64;
nativeInstallCheckInputs = [
perl
(python3.withPackages (pp: with pp; [
docopt
]))
];
installCheckPhase = ''
# copy tests to allow writing results
export TESTDIR=$(mktemp -d)
cp -r ${iverilog-test}/* $TESTDIR
pushd $TESTDIR
# Run & check tests
PATH=$out/bin:$PATH perl vvp_reg.pl
# Check the tests, will error if unexpected tests fail. Some failures MIGHT be normal.
diff regression_report-devel.txt regression_report.txt
PATH=$out/bin:$PATH perl vpi_reg.pl
popd
runHook preInstallCheck
export PATH="$PATH:$out/bin"
sh .github/test.sh
runHook postInstallCheck
'';
meta = with lib; {

View file

@ -15,16 +15,16 @@
rustPlatform.buildRustPackage rec {
pname = "i3status-rust";
version = "0.31.1";
version = "0.31.2";
src = fetchFromGitHub {
owner = "greshake";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-nAwAQUjoKeGaTixTdk9yIgdy4+j6t6cbvH4NpBdSyns=";
hash = "sha256-4lr2ibtBtJYXeeArBK4M35L4CUNqZcUDB+3Nm1kqp4w=";
};
cargoHash = "sha256-/Z6HKOMIhQm52MlPty8ED9QPPJcM7juDpQQKgJVozyU=";
cargoHash = "sha256-5LIXzfYSuHOdxYxfp1eMdxsqyP+3sldBCV0mgv7SRRI=";
nativeBuildInputs = [ pkg-config makeWrapper ];

View file

@ -135,11 +135,11 @@ let
packageOverrideRepository = (callPackage ../../development/compilers/flutter/package-overrides { }) // customPackageOverrides;
productPackages = builtins.filter (package: package.kind != "dev")
(if autoDepsList
then builtins.fromJSON (builtins.readFile deps.depsListFile)
then lib.importJSON deps.depsListFile
else
if depsListFile == null
then [ ]
else builtins.fromJSON (builtins.readFile depsListFile));
else lib.importJSON depsListFile);
in
builtins.foldl'
(prev: package:

View file

@ -89,7 +89,7 @@ rec {
};
};
mkNotoCJK = { typeface, version, rev, sha256 }:
mkNotoCJK = { typeface, version, sha256 }:
stdenvNoCC.mkDerivation {
pname = "noto-fonts-cjk-${lib.toLower typeface}";
inherit version;
@ -97,7 +97,8 @@ rec {
src = fetchFromGitHub {
owner = "googlefonts";
repo = "noto-cjk";
inherit rev sha256;
rev = "${typeface}${version}";
inherit sha256;
sparseCheckout = [ "${typeface}/Variable/OTC" ];
};
@ -154,15 +155,13 @@ rec {
noto-fonts-cjk-sans = mkNotoCJK {
typeface = "Sans";
version = "2.004";
rev = "9f7f3c38eab63e1d1fddd8d50937fe4f1eacdb1d";
sha256 = "sha256-PWpcTBnBRK87ZuRI/PsGp2UMQgCCyfkLHwvB1mOl5K0=";
sha256 = "sha256-IgalJkiOAVjNxKaPAQWfb5hKeqclliR4qVXCq63FGWY=";
};
noto-fonts-cjk-serif = mkNotoCJK {
typeface = "Serif";
version = "2.000";
rev = "9f7f3c38eab63e1d1fddd8d50937fe4f1eacdb1d";
sha256 = "sha256-1w66Ge7DZjbONGhxSz69uFhfsjMsDiDkrGl6NsoB7dY=";
version = "2.001";
sha256 = "sha256-y1103SS0qkZMhEL5+7kQZ+OBs5tRaqkqOcs4796Fzhg=";
};
noto-fonts-emoji =

View file

@ -2,7 +2,7 @@
# and callHackage
{ lib, fetchurl }:
let
pin = builtins.fromJSON (builtins.readFile ./pin.json);
pin = lib.importJSON ./pin.json;
in
fetchurl {
inherit (pin) url sha256;

View file

@ -75,9 +75,12 @@ let rpath = lib.makeLibraryPath [
buildType = if debugBuild then "Debug" else "Release";
in stdenv.mkDerivation rec {
name = "jcef-jetbrains";
rev = "153d40c761a25a745d7ebf0ee3a024bbc2c840b5";
commit-num = "611"; # Run `git rev-list --count HEAD`
pname = "jcef-jetbrains";
rev = "3dfde2a70f1f914c6a84ba967123a0e38f51053f";
# This is the commit number
# Currently from the 231 branch: https://github.com/JetBrains/jcef/tree/231
# Run `git rev-list --count HEAD`
version = "654";
nativeBuildInputs = [ cmake python3 jdk17 git rsync ant ninja ];
buildInputs = [ libX11 libXdamage nss nspr ];
@ -86,7 +89,7 @@ in stdenv.mkDerivation rec {
owner = "jetbrains";
repo = "jcef";
inherit rev;
hash = "sha256-Vud4nIT2c7uOK7GKKw3plf41WzKqhg+2xpIwB/LyqnE=";
hash = "sha256-g8jWzRI2uYzu8O7JHENn0u9yY08fvY6g0Uym02oYUMI=";
};
cef-bin = let
fileName = "cef_binary_104.4.26+g4180781+chromium-104.0.5112.102_linux64_minimal";
@ -116,7 +119,7 @@ in stdenv.mkDerivation rec {
-e 's|os.path.isdir(os.path.join(path, \x27.git\x27))|True|' \
-e 's|"%s rev-parse %s" % (git_exe, branch)|"echo '${rev}'"|' \
-e 's|"%s config --get remote.origin.url" % git_exe|"echo 'https://github.com/jetbrains/jcef'"|' \
-e 's|"%s rev-list --count %s" % (git_exe, branch)|"echo '${commit-num}'"|' \
-e 's|"%s rev-list --count %s" % (git_exe, branch)|"echo '${version}'"|' \
-i tools/git_util.py
cp ${clang-fmt} tools/buildtools/linux64/clang-format

View file

@ -1,12 +1,15 @@
{ stdenv, fetchurl, perl, icu, zlib, gmp, lib, nqp, removeReferencesTo }:
{ stdenv, fetchFromGitHub, perl, icu, zlib, gmp, lib, nqp, removeReferencesTo }:
stdenv.mkDerivation rec {
pname = "rakudo";
version = "2023.02";
version = "2023.04";
src = fetchurl {
url = "https://rakudo.org/dl/rakudo/rakudo-${version}.tar.gz";
hash = "sha256-/RaGqizzLrnw630Nb5bfyJfPU8z4ntp9Iltoc4CTqhE=";
src = fetchFromGitHub {
owner = "rakudo";
repo = "rakudo";
rev = version;
hash = "sha256-m5rXriBKfp/i9AIcBGCYGfXIGBRsxgVmBbLJPXXc5AY=";
fetchSubmodules = true;
};
nativeBuildInputs = [ removeReferencesTo ];

View file

@ -1,6 +1,6 @@
{ lib
, stdenv
, fetchurl
, fetchFromGitHub
, perl
, CoreServices
, ApplicationServices
@ -8,11 +8,14 @@
stdenv.mkDerivation rec {
pname = "moarvm";
version = "2023.02";
version = "2023.04";
src = fetchurl {
url = "https://moarvm.org/releases/MoarVM-${version}.tar.gz";
hash = "sha256-Z+IU1E1fYmeHyn8EQkBDpjkwikOnd3tvpBkmtyQODcU=";
src = fetchFromGitHub {
owner = "moarvm";
repo = "moarvm";
rev = version;
hash = "sha256-QYA4nSsrouYFaw1eju/6gNWwMcE/VeL0sNJmsTvtU3I=";
fetchSubmodules = true;
};
postPatch = ''

View file

@ -1,12 +1,15 @@
{ stdenv, fetchurl, perl, lib, moarvm }:
{ stdenv, fetchFromGitHub, perl, lib, moarvm }:
stdenv.mkDerivation rec {
pname = "nqp";
version = "2023.02";
version = "2023.04";
src = fetchurl {
url = "https://github.com/raku/nqp/releases/download/${version}/nqp-${version}.tar.gz";
hash = "sha256-417V7ZTsMqbXMO6BW/hcX8+IqGf6xlZjaMGtSf5jtT8=";
src = fetchFromGitHub {
owner = "raku";
repo = "nqp";
rev = version;
hash = "sha256-6V9d01aacDc+770XPSbQd4m1bg7Bbe47TTNOUxc2Fpw=";
fetchSubmodules = true;
};
buildInputs = [ perl ];

View file

@ -8,6 +8,7 @@
, zlib
, libgcrypt
, libpng
, withSystemd ? lib.meta.availableOn stdenv.hostPlatform systemd
, systemd
, Carbon
}:
@ -29,12 +30,16 @@ stdenv.mkDerivation rec {
cmake
];
cmakeFlags = [
"-DWITH_SYSTEMD=${if withSystemd then "ON" else "OFF"}"
];
buildInputs = [
libjpeg
openssl
libgcrypt
libpng
] ++ lib.optionals stdenv.isLinux [
] ++ lib.optionals withSystemd [
systemd
] ++ lib.optionals stdenv.isDarwin [
Carbon

View file

@ -1,7 +1,7 @@
{ lib, stdenv, requireFile, avahi, obs-studio-plugins }:
let
versionJSON = builtins.fromJSON (builtins.readFile ./version.json);
versionJSON = lib.importJSON ./version.json;
in
stdenv.mkDerivation rec {
pname = "ndi";

View file

@ -3,12 +3,12 @@
}:
stdenv.mkDerivation rec {
version = "7.0.0";
version = "7.0.1";
pname = "papi";
src = fetchurl {
url = "https://bitbucket.org/icl/papi/get/papi-${lib.replaceStrings ["."] ["-"] version}-t.tar.gz";
sha256 = "sha256-MxiOzfBxLmzsUg4jo2VHThyGE0/WYD3ZEBrq3WRjXGU=";
sha256 = "sha256-VajhmPW8sEJksfhLjBVlpBH7+AZr4fwKZPAtZxRF1Bk=";
};
setSourceRoot = ''

View file

@ -34,7 +34,9 @@ let
in
mapAliases {
"@antora/cli" = pkgs.antora;
"@antora/cli" = pkgs.antora; # Added 2023-05-06
"@githubnext/github-copilot-cli" = pkgs.github-copilot-cli; # Added 2023-05-02
"@nestjs/cli" = pkgs.nest-cli;
"@nestjs/cli" = pkgs.nest-cli; # Added 2023-05-06
manta = pkgs.node-manta; # Added 2023-05-06
trito = pkgs.triton; # Added 2023-05-06
}

View file

@ -214,7 +214,6 @@
, "lua-fmt"
, "lv_font_conv"
, "madoko"
, "manta"
, "markdownlint-cli"
, "markdownlint-cli2"
, "markdown-link-check"
@ -365,7 +364,6 @@
, "three"
, "tiddlywiki"
, "titanium"
, "triton"
, "tsun"
, "ts-node"
, "ttf2eot"

View file

@ -126368,198 +126368,6 @@ in
bypassCache = true;
reconstructLock = true;
};
manta = nodeEnv.buildNodePackage {
name = "manta";
packageName = "manta";
version = "5.3.2";
src = fetchurl {
url = "https://registry.npmjs.org/manta/-/manta-5.3.2.tgz";
sha512 = "Vsgmc7hZbra1oicuHH9e5UNkcVyRJiH+Y4uvpTW3OQ60NhUAbv3V+re3ZtyN51MH3QJ9WNgkMAfR8dZ3Sv5gCw==";
};
dependencies = [
sources."ansi-regex-4.1.1"
sources."ansi-styles-3.2.1"
sources."asn1-0.2.6"
sources."assert-plus-1.0.0"
sources."backoff-2.3.0"
sources."balanced-match-1.0.2"
sources."bcrypt-pbkdf-1.0.2"
sources."block-stream-0.0.9"
sources."brace-expansion-1.1.11"
sources."bunyan-1.8.15"
sources."camelcase-5.3.1"
sources."cliui-5.0.0"
sources."clone-0.1.19"
sources."cmdln-4.1.2"
sources."color-convert-1.9.3"
sources."color-name-1.1.3"
sources."concat-map-0.0.1"
sources."core-util-is-1.0.2"
sources."dashdash-1.14.1"
sources."decamelize-1.2.0"
sources."dtrace-provider-0.8.8"
sources."ecc-jsbn-0.1.2"
sources."emoji-regex-7.0.3"
sources."extsprintf-1.4.1"
sources."fast-safe-stringify-1.2.3"
sources."find-up-3.0.0"
sources."fstream-1.0.12"
sources."fuzzyset.js-0.0.1"
sources."get-caller-file-2.0.5"
sources."getpass-0.1.7"
sources."glob-6.0.4"
sources."graceful-fs-4.2.11"
sources."hogan.js-2.0.0"
(sources."http-signature-1.3.6" // {
dependencies = [
sources."extsprintf-1.3.0"
sources."jsprim-2.0.2"
sources."verror-1.10.0"
];
})
sources."inflight-1.0.6"
sources."inherits-2.0.4"
sources."is-fullwidth-code-point-2.0.0"
sources."isarray-0.0.1"
sources."jsbn-0.1.1"
sources."json-schema-0.4.0"
(sources."jsprim-1.4.2" // {
dependencies = [
sources."extsprintf-1.3.0"
sources."verror-1.10.0"
];
})
sources."keep-alive-agent-0.0.1"
sources."locate-path-3.0.0"
sources."lodash-4.17.21"
(sources."lomstream-1.1.1" // {
dependencies = [
sources."assert-plus-0.1.5"
sources."extsprintf-1.3.0"
];
})
sources."lru-cache-4.1.5"
sources."lstream-0.0.4"
sources."mime-2.4.7"
sources."minimatch-3.1.2"
sources."minimist-1.2.8"
sources."mkdirp-0.5.6"
sources."moment-2.29.4"
(sources."mooremachine-2.3.0" // {
dependencies = [
sources."assert-plus-0.2.0"
];
})
sources."mv-2.1.1"
sources."nan-2.17.0"
sources."ncp-2.0.0"
sources."once-1.4.0"
sources."p-limit-2.3.0"
sources."p-locate-3.0.0"
sources."p-try-2.2.0"
sources."path-exists-3.0.0"
sources."path-is-absolute-1.0.1"
sources."path-platform-0.0.1"
sources."precond-0.2.3"
sources."process-nextick-args-2.0.1"
(sources."progbar-1.2.1" // {
dependencies = [
sources."readable-stream-1.0.34"
];
})
sources."pseudomap-1.0.2"
sources."readable-stream-1.1.14"
sources."require-directory-2.1.1"
sources."require-main-filename-2.0.0"
(sources."restify-clients-1.6.0" // {
dependencies = [
sources."backoff-2.5.0"
sources."mime-1.6.0"
sources."uuid-3.4.0"
];
})
(sources."restify-errors-3.1.0" // {
dependencies = [
sources."assert-plus-0.2.0"
sources."lodash-3.10.1"
];
})
sources."rimraf-2.4.5"
sources."safe-buffer-5.2.1"
sources."safe-json-stringify-1.2.0"
sources."safer-buffer-2.1.2"
sources."semver-5.7.1"
sources."set-blocking-2.0.0"
sources."showdown-1.9.1"
(sources."smartdc-auth-2.5.9" // {
dependencies = [
sources."bunyan-1.8.12"
sources."clone-0.1.5"
(sources."dashdash-1.10.1" // {
dependencies = [
sources."assert-plus-0.1.5"
];
})
sources."once-1.3.0"
sources."vasync-2.2.1"
sources."verror-1.10.0"
];
})
sources."sshpk-1.17.0"
(sources."sshpk-agent-1.8.1" // {
dependencies = [
sources."isarray-1.0.0"
sources."readable-stream-2.3.8"
sources."safe-buffer-5.1.2"
sources."sshpk-1.16.1"
sources."string_decoder-1.1.1"
];
})
sources."string-width-3.1.0"
sources."string_decoder-0.10.31"
sources."strip-ansi-5.2.0"
sources."strsplit-1.0.0"
sources."tar-2.2.2"
sources."tunnel-agent-0.6.0"
sources."tweetnacl-0.14.5"
sources."util-deprecate-1.0.2"
sources."uuid-2.0.3"
(sources."vasync-1.6.4" // {
dependencies = [
sources."extsprintf-1.2.0"
sources."verror-1.6.0"
];
})
sources."verror-1.10.1"
(sources."vstream-0.1.0" // {
dependencies = [
sources."assert-plus-0.1.5"
sources."extsprintf-1.2.0"
];
})
(sources."watershed-0.3.4" // {
dependencies = [
sources."readable-stream-1.0.2"
];
})
sources."which-module-2.0.1"
sources."wrap-ansi-5.1.0"
sources."wrappy-1.0.2"
sources."y18n-4.0.3"
sources."yallist-2.1.2"
sources."yargs-14.2.3"
sources."yargs-parser-15.0.3"
];
buildInputs = globalBuildInputs;
meta = {
description = "Manta Client API";
homepage = "http://apidocs.tritondatacenter.com/manta";
license = "MIT";
};
production = true;
bypassCache = true;
reconstructLock = true;
};
markdownlint-cli = nodeEnv.buildNodePackage {
name = "markdownlint-cli";
packageName = "markdownlint-cli";
@ -149660,208 +149468,6 @@ in
bypassCache = true;
reconstructLock = true;
};
triton = nodeEnv.buildNodePackage {
name = "triton";
packageName = "triton";
version = "7.15.4";
src = fetchurl {
url = "https://registry.npmjs.org/triton/-/triton-7.15.4.tgz";
sha512 = "xGR0oMmwiP4eiCGn4kLN5TWi8Dh+hMrLQ30KJQy7gRf9uhcBX3bQXTeuWVC9Yh8WUuHKJ2Wdgii88JZ4hIIUHw==";
};
dependencies = [
sources."asn1-0.2.6"
sources."assert-plus-0.2.0"
sources."backoff-2.4.1"
sources."balanced-match-1.0.2"
sources."bcrypt-pbkdf-1.0.2"
sources."bigspinner-3.1.0"
sources."brace-expansion-1.1.11"
sources."bunyan-1.8.12"
sources."clone-0.1.5"
(sources."cmdln-4.1.2" // {
dependencies = [
sources."assert-plus-1.0.0"
sources."extsprintf-1.4.1"
];
})
sources."concat-map-0.0.1"
sources."core-util-is-1.0.3"
(sources."dashdash-1.14.1" // {
dependencies = [
sources."assert-plus-1.0.0"
];
})
sources."dtrace-provider-0.8.8"
sources."ecc-jsbn-0.1.2"
sources."extsprintf-1.0.2"
sources."fast-safe-stringify-1.2.3"
sources."fuzzyset.js-0.0.1"
(sources."getpass-0.1.6" // {
dependencies = [
sources."assert-plus-1.0.0"
];
})
sources."glob-5.0.15"
(sources."http-signature-1.3.6" // {
dependencies = [
sources."assert-plus-1.0.0"
sources."extsprintf-1.3.0"
sources."json-schema-0.4.0"
sources."jsprim-2.0.2"
];
})
sources."inflight-1.0.6"
sources."inherits-2.0.4"
sources."is-absolute-0.1.7"
sources."is-relative-0.1.3"
sources."isarray-1.0.0"
sources."isexe-1.1.2"
sources."jsbn-0.1.1"
sources."json-schema-0.2.3"
(sources."jsprim-1.4.0" // {
dependencies = [
sources."assert-plus-1.0.0"
sources."verror-1.3.6"
];
})
sources."keep-alive-agent-0.0.1"
sources."lodash-4.17.21"
(sources."lomstream-1.1.0" // {
dependencies = [
sources."assert-plus-0.1.5"
sources."extsprintf-1.3.0"
];
})
sources."lru-cache-4.1.5"
sources."lstream-0.0.4"
sources."mime-1.6.0"
sources."minimatch-3.1.2"
sources."minimist-0.0.8"
sources."mkdirp-0.5.1"
sources."moment-2.29.4"
sources."mooremachine-2.3.0"
sources."mute-stream-0.0.8"
sources."mv-2.1.1"
sources."nan-2.17.0"
sources."ncp-2.0.0"
sources."once-1.3.2"
sources."path-is-absolute-1.0.1"
sources."precond-0.2.3"
sources."process-nextick-args-2.0.1"
sources."pseudomap-1.0.2"
sources."read-1.0.7"
(sources."readable-stream-2.3.8" // {
dependencies = [
sources."safe-buffer-5.1.2"
];
})
(sources."restify-clients-1.5.2" // {
dependencies = [
sources."assert-plus-1.0.0"
(sources."restify-errors-3.1.0" // {
dependencies = [
sources."assert-plus-0.2.0"
sources."lodash-3.10.1"
];
})
];
})
(sources."restify-errors-3.0.0" // {
dependencies = [
sources."assert-plus-0.1.5"
sources."lodash-3.10.1"
];
})
sources."rimraf-2.4.4"
sources."safe-buffer-5.2.1"
sources."safe-json-stringify-1.2.0"
sources."safer-buffer-2.1.2"
sources."semver-5.1.0"
(sources."smartdc-auth-2.5.7" // {
dependencies = [
sources."assert-plus-1.0.0"
(sources."dashdash-1.10.1" // {
dependencies = [
sources."assert-plus-0.1.5"
];
})
sources."extsprintf-1.0.0"
sources."json-schema-0.2.2"
(sources."jsprim-0.3.0" // {
dependencies = [
sources."verror-1.3.3"
];
})
sources."once-1.3.0"
sources."vasync-1.4.3"
sources."verror-1.1.0"
];
})
(sources."sshpk-1.17.0" // {
dependencies = [
sources."assert-plus-1.0.0"
];
})
(sources."sshpk-agent-1.7.0" // {
dependencies = [
sources."assert-plus-1.0.0"
sources."sshpk-1.14.2"
];
})
(sources."string_decoder-1.1.1" // {
dependencies = [
sources."safe-buffer-5.1.2"
];
})
sources."strsplit-1.0.0"
(sources."tabula-1.10.0" // {
dependencies = [
sources."assert-plus-1.0.0"
];
})
sources."tunnel-agent-0.6.0"
sources."tweetnacl-0.14.5"
sources."util-deprecate-1.0.2"
sources."uuid-3.4.0"
(sources."vasync-1.6.3" // {
dependencies = [
sources."extsprintf-1.2.0"
sources."verror-1.6.0"
];
})
(sources."verror-1.10.0" // {
dependencies = [
sources."assert-plus-1.0.0"
sources."core-util-is-1.0.2"
sources."extsprintf-1.4.1"
];
})
(sources."vstream-0.1.0" // {
dependencies = [
sources."assert-plus-0.1.5"
sources."extsprintf-1.2.0"
];
})
(sources."watershed-0.3.4" // {
dependencies = [
sources."readable-stream-1.0.2"
];
})
sources."which-1.2.4"
sources."wordwrap-1.0.0"
sources."wrappy-1.0.2"
sources."yallist-2.1.2"
];
buildInputs = globalBuildInputs;
meta = {
description = "Triton CLI and client (https://www.tritondatacenter.com/)";
homepage = "https://github.com/TritonDataCenter/node-triton";
license = "MPL-2.0";
};
production = true;
bypassCache = true;
reconstructLock = true;
};
tsun = nodeEnv.buildNodePackage {
name = "tsun";
packageName = "tsun";

View file

@ -280,20 +280,6 @@ final: prev: {
'';
};
manta = prev.manta.override ( oldAttrs: {
nativeBuildInputs = with pkgs; [ nodejs_14 installShellFiles ];
postInstall = ''
# create completions, following upstream procedure https://github.com/joyent/node-manta/blob/v5.2.3/Makefile#L85-L91
completion_cmds=$(find ./bin -type f -printf "%f\n")
node ./lib/create_client.js
for cmd in $completion_cmds; do
installShellCompletion --cmd $cmd --bash <(./bin/$cmd --completion)
done
'';
meta = oldAttrs.meta // { maintainers = with lib.maintainers; [ teutat3s ]; };
});
mermaid-cli = prev."@mermaid-js/mermaid-cli".override (
if stdenv.isDarwin
then {}
@ -560,14 +546,6 @@ final: prev: {
'';
};
triton = prev.triton.override (oldAttrs: {
nativeBuildInputs = [ pkgs.installShellFiles ];
postInstall = ''
installShellCompletion --cmd triton --bash <($out/bin/triton completion)
'';
meta = oldAttrs.meta // { maintainers = with lib.maintainers; [ teutat3s ]; };
});
ts-node = prev.ts-node.override {
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];
postInstall = ''

View file

@ -0,0 +1,45 @@
{ lib
, babel
, buildPythonPackage
, fetchFromGitHub
, pygments
, python3Packages
, setuptools-scm
}:
buildPythonPackage rec {
pname = "colout";
version = "0.12.0";
src = fetchFromGitHub {
owner = "nojhan";
repo = pname;
rev = "v${version}";
sha256 = "sha256-5ETKNo3KfncnnLTClA6BnQA7SN5KwwsLdQoozI9li7I=";
};
nativeBuildInputs = [
babel
pygments
setuptools-scm
];
SETUPTOOLS_SCM_PRETEND_VERSION = version;
propagatedBuildInputs = [
babel
pygments
];
pythonImportsCheck = [ "colout" ];
# This project does not have a unit test
doCheck = false;
meta = with lib; {
description = "Color Up Arbitrary Command Output";
homepage = "https://github.com/nojhan/colout";
license = licenses.gpl3;
maintainers = with maintainers; [ badele ];
};
}

View file

@ -1,9 +1,11 @@
{ lib
, buildPythonPackage
, pythonAtLeast
, fetchpatch
, fetchFromGitHub
, python
, django
, packaging
, nodejs
, js2py
, six
@ -11,26 +13,19 @@
buildPythonPackage rec {
pname = "django-js-reverse";
# Support for Django 4.0 not yet released
version = "unstable-2022-09-16";
version = "0.10.1-b1";
src = fetchFromGitHub {
owner = "ierror";
owner = "BITSOLVER";
repo = "django-js-reverse";
rev = "7cab78c4531780ab4b32033d5104ccd5be1a246a";
hash = "sha256-oA4R5MciDMcSsb+GAgWB5jhj+nl7E8t69u0qlx2G93E=";
rev = version;
hash = "sha256-i78UsxVwxyDAc8LrOVEXLG0tdidoQhvUx7GvPDaH0KY=";
};
patches = [
(fetchpatch {
name = "fix-requires_system_checks-list-or-tuple";
url = "https://github.com/ierror/django-js-reverse/commit/1477ba44b62c419d12ebec86e56973f1ae56f712.patch";
hash = "sha256-xUtCziewVhnCOaNWddJBH4/Vvhwjjq/wcQDvh2YzWMQ=";
})
];
propagatedBuildInputs = [
django
] ++ lib.optionals (pythonAtLeast "3.7") [
packaging
];
nativeCheckInputs = [

View file

@ -8,14 +8,14 @@
buildPythonPackage rec {
pname = "onvif-zeep-async";
version = "2.1.1";
version = "2.1.4";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-y4o3zsLacbOVLZpa3mljdXuzVEGRzkc+Be6pt+UMLrA=";
hash = "sha256-F8NqdEYz38mWSfOQ9oIjQccaGkON8skqm+ItQD71CPo=";
};
propagatedBuildInputs = [

View file

@ -18,13 +18,13 @@
}:
buildPythonPackage rec {
version = "2.0.4";
version = "2.0.6";
pname = "pyglet";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-+JGAjBv2XHzFExsLJrBH6uXPN8fiUycJZKxLLwFHdPI=";
hash = "sha256-b5PyvebfgYCH4bXZEDMIbLL7aJwSILYyxG0fxKZoWgA=";
extension = "zip";
};

View file

@ -8,14 +8,14 @@
buildPythonPackage rec {
pname = "pyphen";
version = "0.13.2";
version = "0.14.0";
format = "pyproject";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-hH9XoEOlhAjyRnCuAYT/bt+1/VcxdDIIIowCjdxRRDg=";
hash = "sha256-WWyLO+HBpwQRul9lF9nM/jCDx1iuK5SkXycHNG2OZvo=";
};
nativeBuildInputs = [

View file

@ -5,18 +5,24 @@
, pytest
, packaging
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
pname = "pytest-sugar";
version = "0.9.6";
version = "0.9.7";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-xHk0lfPDLhFPD1QWKQlGwxbrlq1aNoTc2t2pJn5Zsrg=";
hash = "sha256-8edMGr+lX3JBz3CIAytuN4Vm8WuTjz8IkF4s9ElO3UY=";
};
buildInputs = [ pytest ];
buildInputs = [
pytest
];
propagatedBuildInputs = [
termcolor
@ -28,9 +34,10 @@ buildPythonPackage rec {
];
meta = with lib; {
description = "A plugin that changes the default look and feel of py.test";
description = "A plugin that changes the default look and feel of pytest";
homepage = "https://github.com/Frozenball/pytest-sugar";
changelog = "https://github.com/Teemu/pytest-sugar/releases/tag/v${version}";
license = licenses.bsd3;
maintainers = [ maintainers.costrouc ];
maintainers = with maintainers; [ costrouc ];
};
}

View file

@ -13,14 +13,14 @@
buildPythonPackage rec {
pname = "recipe-scrapers";
version = "14.32.1";
version = "14.36.1";
format = "pyproject";
src = fetchFromGitHub {
owner = "hhursev";
repo = "recipe-scrapers";
rev = "refs/tags/${version}";
hash = "sha256-6iUagD1PTTAraBHOWLjHiLFFsImO30w84p+6IcIv52c=";
hash = "sha256-JadtlJMxRib8FpNC4QGYXfUEJGyB1aniDbsbsBYU3no=";
};
nativeBuildInputs = [

View file

@ -14,7 +14,7 @@
buildPythonPackage rec {
pname = "tweepy";
version = "4.13.0";
version = "4.14.0";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -23,7 +23,7 @@ buildPythonPackage rec {
owner = pname;
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-47TXKZLS2j+YdCYt2cJbdzsVOHeRzGYqUNyOOKIgXkc=";
hash = "sha256-ugqa85l0eWVtMUl5d+BjEWvTyH8c5NVtsnPflkHTWh8=";
};
propagatedBuildInputs = [

View file

@ -6,12 +6,13 @@
, pytestCheckHook
, pythonOlder
, requests
, setuptools
}:
buildPythonPackage rec {
pname = "whitenoise";
version = "6.2.0";
format = "setuptools";
version = "6.4.0";
format = "pyproject";
disabled = pythonOlder "3.7";
@ -20,10 +21,14 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "evansd";
repo = pname;
rev = version;
hash = "sha256-HcWWWMIuU8kfcOnntgXUnHD3pFogq8OEAd3wRtCnXjQ=";
rev = "refs/tags/${version}";
hash = "sha256-ouEoqMcNh3Vwahwaq6bGQuVUFViVN14CDJosDXC5ozI=";
};
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [
brotli
];
@ -51,8 +56,9 @@ buildPythonPackage rec {
];
meta = with lib; {
description = "Radically simplified static file serving for WSGI applications";
description = "Library to serve static file for WSGI applications";
homepage = "https://whitenoise.evans.io/";
changelog = "https://github.com/evansd/whitenoise/blob/${version}/docs/changelog.rst";
license = licenses.mit;
maintainers = with maintainers; [ ];
};

View file

@ -12,7 +12,7 @@
let
pname = "scala-cli";
sources = builtins.fromJSON (builtins.readFile ./sources.json);
sources = lib.importJSON ./sources.json;
inherit (sources) version assets;
platforms = builtins.attrNames assets;

View file

@ -2,12 +2,12 @@
stdenv.mkDerivation rec {
pname = "opengrok";
version = "1.12.3";
version = "1.12.4";
# binary distribution
src = fetchurl {
url = "https://github.com/oracle/opengrok/releases/download/${version}/${pname}-${version}.tar.gz";
hash = "sha256-GHSsfsEhBYeUbSKZfve3O2Z+bL3e7dqpl4sQKrQgWDE=";
hash = "sha256-pUHNLiZng8lIO+UFP6r6dfwPI6m8RRuuW2wS1pJXZmQ=";
};
nativeBuildInputs = [ makeWrapper ];

View file

@ -15,16 +15,16 @@ let
in
rustPlatform.buildRustPackage rec {
pname = "texlab";
version = "5.5.0";
version = "5.5.1";
src = fetchFromGitHub {
owner = "latex-lsp";
repo = "texlab";
rev = "refs/tags/v${version}";
hash = "sha256-xff6Wj1ZYn3jGrj/snr4ATabLUmL1Jw2LjsjpoG3ZjI=";
hash = "sha256-8m7GTD4EX7mWe1bYPuz+4g7FaPuW8++Y/fpIRsdxo6g=";
};
cargoHash = "sha256-gEwsnVXY84mTO+JZvcI7EEYCOnVFM07m4VvcWI6zFT0=";
cargoHash = "sha256-dcKVhHYODTFw46o3wM8EH0IpT6DkUfOHvdDmbMQmsX0=";
outputs = [ "out" ] ++ lib.optional (!isCross) "man";
@ -41,7 +41,7 @@ rustPlatform.buildRustPackage rec {
# generate the man page
postInstall = lib.optionalString (!isCross) ''
# TexLab builds man page separately in CI:
# https://github.com/latex-lsp/texlab/blob/v5.5.0/.github/workflows/publish.yml#L127-L131
# https://github.com/latex-lsp/texlab/blob/v5.5.1/.github/workflows/publish.yml#L127-L131
help2man --no-info "$out/bin/texlab" > texlab.1
installManPage texlab.1
'';

View file

@ -6,7 +6,7 @@
}:
let
source = builtins.fromJSON (builtins.readFile ./source.json);
source = lib.importJSON ./source.json;
in
buildNpmPackage {
pname = "mongosh";

View file

@ -17,7 +17,7 @@ let
toPluginAble = (import ./plugins.nix { inherit pkgs lib; }).toPluginAble;
# List of known build systems that are passed through from nixpkgs unmodified
knownBuildSystems = builtins.fromJSON (builtins.readFile ./known-build-systems.json);
knownBuildSystems = lib.importJSON ./known-build-systems.json;
nixpkgsBuildSystems = lib.subtractLists [ "poetry" "poetry-core" ] knownBuildSystems;
mkInputAttrs =

View file

@ -19,7 +19,7 @@ in rec {
# Make packaging helpers from swiftpm2nix generated output.
helpers = generated: let
inherit (import generated) workspaceStateFile hashes;
workspaceState = builtins.fromJSON (builtins.readFile workspaceStateFile);
workspaceState = lib.importJSON workspaceStateFile;
pinFile = mkPinFile workspaceState;
in rec {

View file

@ -1,6 +1,6 @@
{ stdenv, lib, fetchurl, unzip }:
let
sources = builtins.fromJSON (builtins.readFile ./sources.json);
sources = lib.importJSON ./sources.json;
platform =
if (builtins.hasAttr stdenv.hostPlatform.system sources.platforms) then
builtins.getAttr (stdenv.hostPlatform.system) sources.platforms

View file

@ -2,21 +2,28 @@
buildGoModule rec {
pname = "xc";
version = "0.4.0";
version = "0.4.1";
src = fetchFromGitHub {
owner = "joerdav";
repo = pname;
rev = "v${version}";
sha256 = "sha256-pKsttrdXZQnWgJocGtyk7+qze1dpmZTclsUhwun6n8E=";
sha256 = "sha256-Dc7MVn9hF2HtXqMvWQ5UsLQW5ZKcFKt7AHcXdiWDs1I=";
};
vendorHash = "sha256-hCdIO377LiXFKz0GfCmAADTPfoatk8YWzki7lVP3yLw=";
ldflags = [
"-s"
"-w"
"-X=main.version=${version}"
];
meta = with lib; {
homepage = "https://xcfile.dev/";
description = "Markdown defined task runner";
homepage = "https://xcfile.dev/";
changelog = "https://github.com/joerdav/xc/releases/tag/${src.rev}";
license = licenses.mit;
maintainers = with maintainers; [ joerdav ];
maintainers = with maintainers; [ figsoda joerdav ];
};
}

View file

@ -16,12 +16,12 @@ let
];
in stdenv.mkDerivation rec {
pname = "insomnia";
version = "2022.7.5";
version = "2023.2.0";
src = fetchurl {
url =
"https://github.com/Kong/insomnia/releases/download/core%40${version}/Insomnia.Core-${version}.deb";
sha256 = "sha256-BJAiDv+Zg+wU6ovAkuMVTGN9WElOlC96m/GEYrg6exE=";
sha256 = "sha256-RI7i/yfGfwmube3Utuidw9Y3OqC+5htsyx1Vi1730WQ=";
};
nativeBuildInputs = [

View file

@ -55,7 +55,7 @@ let
name = null; # Appimage sets it to "appimage-env"
# Dependencies of anki
targetPkgs = pkgs: (with pkgs; [ xorg.libxkbfile krb5 ]);
targetPkgs = pkgs: (with pkgs; [ xorg.libxkbfile xcb-util-cursor-HEAD krb5 ]);
runScript = writeShellScript "anki-wrapper.sh" ''
exec ${unpacked}/bin/anki ${ lib.strings.escapeShellArgs commandLineArgs }

View file

@ -12,7 +12,7 @@ let
# * update lsquic and boringssl if necessarry, lsquic.cr depends on
# the same version of lsquic and lsquic requires the boringssl
# commit mentioned in its README
versions = builtins.fromJSON (builtins.readFile ./versions.json);
versions = lib.importJSON ./versions.json;
in
crystal.buildCrystalPackage rec {
pname = "invidious";

View file

@ -1,6 +1,6 @@
{ lib, boringssl, stdenv, fetchgit, fetchFromGitHub, fetchurl, cmake, zlib, perl, libevent }:
let
versions = builtins.fromJSON (builtins.readFile ./versions.json);
versions = lib.importJSON ./versions.json;
fetchGitilesPatch = { name, url, sha256 }:
fetchurl {

View file

@ -1,7 +1,7 @@
{ stdenvNoCC, cacert, crystal, openssl, pkg-config, invidious }:
{ lib, stdenvNoCC, cacert, crystal, openssl, pkg-config, invidious }:
let
versions = builtins.fromJSON (builtins.readFile ./versions.json);
versions = lib.importJSON ./versions.json;
in
stdenvNoCC.mkDerivation {
name = "videojs";

View file

@ -1,7 +1,7 @@
{ callPackage, ... } @ args:
{ callPackage, lib, ... } @ args:
callPackage ./generic.nix (
args
// builtins.fromJSON (builtins.readFile ./3.0.json)
// lib.importJSON ./3.0.json
// {
generation = "3_0";
})

View file

@ -1,7 +1,7 @@
{ callPackage, ... } @ args:
{ callPackage, lib, ... } @ args:
callPackage ./generic.nix (
args
// builtins.fromJSON (builtins.readFile ./3.11.json)
// lib.importJSON ./3.11.json
// {
generation = "3_11";
})

View file

@ -1,8 +1,8 @@
# GENERATED BY update.sh
{ callPackage, ... } @ args:
{ callPackage, lib, ... } @ args:
callPackage ./generic.nix (
args
// builtins.fromJSON (builtins.readFile ./4.json)
// lib.importJSON ./4.json
// {
generation = "4";
})

View file

@ -8,17 +8,17 @@
rustPlatform.buildRustPackage rec {
pname = "oxigraph";
version = "0.3.14";
version = "0.3.16";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "v${version}";
sha256 = "sha256-BiPK0eFlKtGQ7m0LaVeeXp5gz+C1UW0JK7L8Nc21rKU=";
sha256 = "sha256-sE+HeXg6JovLXhFFJhgsASfObvzgeYX+MjwI5b7G0gI=";
fetchSubmodules = true;
};
cargoHash = "sha256-VGXnUdJDbD8bGdXRbIkC4wgAmSUK2gh/XEYLaPfaaLg=";
cargoHash = "sha256-7ykVKPjCFwpLqdPiWlxO/rBofgbfv+U3aM50RhzjGVY=";
nativeBuildInputs = [
rustPlatform.bindgenHook

View file

@ -10,23 +10,23 @@ stdenv.mkDerivation {
NIX_DEBUG=1 $CC -v
NIX_DEBUG=1 $CXX -v
printf "checking whether compiler builds valid C binaries... " >&2
printf "checking whether compiler builds valid C binaries...\n " >&2
$CC -o cc-check ${./cc-main.c}
./cc-check
printf "checking whether compiler builds valid 32bit C binaries... " >&2
printf "checking whether compiler builds valid 32bit C binaries...\n " >&2
$CC -m32 -o c32-check ${./cc-main.c}
./c32-check
printf "checking whether compiler builds valid 64bit C binaries... " >&2
printf "checking whether compiler builds valid 64bit C binaries...\n " >&2
$CC -m64 -o c64-check ${./cc-main.c}
./c64-check
printf "checking whether compiler builds valid 32bit C++ binaries... " >&2
printf "checking whether compiler builds valid 32bit C++ binaries...\n " >&2
$CXX -m32 -o cxx32-check ${./cxx-main.cc}
./cxx32-check
printf "checking whether compiler builds valid 64bit C++ binaries... " >&2
printf "checking whether compiler builds valid 64bit C++ binaries...\n " >&2
$CXX -m64 -o cxx64-check ${./cxx-main.cc}
./cxx64-check

View file

@ -53,7 +53,6 @@ with pkgs;
pkg-config = recurseIntoAttrs (callPackage ../top-level/pkg-config/tests.nix { });
rustCustomSysroot = callPackage ./rust-sysroot {};
buildRustCrate = callPackage ../build-support/rust/build-rust-crate/test { };
importCargoLock = callPackage ../build-support/rust/test/import-cargo-lock { };

View file

@ -1,60 +0,0 @@
{ lib, rust, rustPlatform, fetchFromGitHub }:
let
mkBlogOsTest = target: rustPlatform.buildRustPackage rec {
name = "blog_os-sysroot-test";
src = fetchFromGitHub {
owner = "phil-opp";
repo = "blog_os";
rev = "4e38e7ddf8dd021c3cd7e4609dfa01afb827797b";
sha256 = "0k9ipm9ddm1bad7bs7368wzzp6xwrhyfzfpckdax54l4ffqwljcg";
};
cargoSha256 = "1x8iwgy1irgfkv2yjkxm6479nwbrk82b0c80jm7y4kw0s32r01lg";
inherit target;
RUSTFLAGS = "-C link-arg=-nostartfiles";
# Tests don't work for `no_std`. See https://os.phil-opp.com/testing/
doCheck = false;
meta = with lib; {
description = "Test for using custom sysroots with buildRustPackage";
maintainers = with maintainers; [ aaronjanse ];
platforms = lib.platforms.x86_64;
};
};
# The book uses rust-lld for linking, but rust-lld is not currently packaged for NixOS.
# The justification in the book for using rust-lld suggests that gcc can still be used for testing:
# > Instead of using the platform's default linker (which might not support Linux targets),
# > we use the cross platform LLD linker that is shipped with Rust for linking our kernel.
# https://github.com/phil-opp/blog_os/blame/7212ffaa8383122b1eb07fe1854814f99d2e1af4/blog/content/second-edition/posts/02-minimal-rust-kernel/index.md#L157
targetContents = {
"llvm-target" = "x86_64-unknown-none";
"data-layout" = "e-m:e-i64:64-f80:128-n8:16:32:64-S128";
"arch" = "x86_64";
"target-endian" = "little";
"target-pointer-width" = "64";
"target-c-int-width" = "32";
"os" = "none";
"executables" = true;
"linker-flavor" = "gcc";
"panic-strategy" = "abort";
"disable-redzone" = true;
"features" = "-mmx,-sse,+soft-float";
};
in {
blogOS-targetByFile = mkBlogOsTest (builtins.toFile "x86_64-blog_os.json" (builtins.toJSON targetContents));
blogOS-targetByNix = let
plat = lib.systems.elaborate { config = "x86_64-none"; } // {
rustc = {
config = "x86_64-blog_os";
platform = targetContents;
};
};
in mkBlogOsTest (rust.toRustTargetSpec plat);
}

View file

@ -38,7 +38,7 @@ let
# A description of all available google-cloud-sdk components.
# It's a JSON file with a list of components, along with some metadata
snapshot = builtins.fromJSON (builtins.readFile snapshotPath);
snapshot = lib.importJSON snapshotPath;
# Generate a snapshot file for a single component. It has the same format as
# `snapshot`, but only contains a single component. These files are

View file

@ -0,0 +1,65 @@
{ lib
, buildNpmPackage
, fetchurl
, nodejs
, installShellFiles
, testers
, node-manta
}:
let
source = lib.importJSON ./source.json;
in
buildNpmPackage rec {
pname = "manta";
inherit (source) version;
src = fetchurl {
url = "https://registry.npmjs.org/${pname}/-/${source.filename}";
hash = source.integrity;
};
npmDepsHash = source.deps;
dontBuild = true;
nativeBuildInputs = [ nodejs installShellFiles ];
postPatch = ''
# Use generated package-lock.json as upstream does not provide one
ln -s ${./package-lock.json} package-lock.json
'';
postInstall = ''
ln -s ./lib/node_modules/manta/bin $out/bin
'';
postFixup = ''
# create completions, following upstream procedure https://github.com/joyent/node-manta/blob/v5.3.2/Makefile#L85-L91
cmds=$(find ./bin/ -type f -printf "%f\n")
node $out/lib/node_modules/manta/lib/create_client.js
for cmd in $cmds; do
installShellCompletion --cmd $cmd --bash <($out/bin/$cmd --completion)
# Strip timestamp from generated bash completion
sed -i '/Bash completion generated.*/d' $out/share/bash-completion/completions/$cmd.bash
done
'';
passthru = {
tests.version = testers.testVersion {
package = node-manta;
};
updateScript = ./update.sh;
};
meta = with lib; {
description = "Manta Object-Storage Client CLIs and Node.js SDK";
homepage = "https://github.com/TritonDataCenter/node-manta";
license = licenses.mit;
maintainers = with maintainers; [ teutat3s ];
mainProgram = "mls";
};
}

3477
pkgs/tools/admin/manta/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,6 @@
{
"version": "5.3.2",
"integrity": "sha512-Vsgmc7hZbra1oicuHH9e5UNkcVyRJiH+Y4uvpTW3OQ60NhUAbv3V+re3ZtyN51MH3QJ9WNgkMAfR8dZ3Sv5gCw==",
"filename": "manta-5.3.2.tgz",
"deps": "sha256-npoCp4PSgv1gK6PziQZINkHUfqxTu8sBbYR/HRu98KA="
}

View file

@ -0,0 +1,25 @@
#!/usr/bin/env nix-shell
#! nix-shell -i bash -p nodejs libarchive prefetch-npm-deps moreutils
# shellcheck shell=bash
set -exuo pipefail
cd -- "$(dirname -- "${BASH_SOURCE[0]}")"
TMPDIR="$(mktemp -d)"
trap 'rm -r -- "$TMPDIR"' EXIT
pushd -- "$TMPDIR"
npm pack manta --json | jq '.[0] | { version, integrity, filename }' > source.json
bsdtar -x -f "$(jq -r .filename source.json)"
pushd package
npm install --package-lock-only
popd
DEPS="$(prefetch-npm-deps package/package-lock.json)"
jq ".deps = \"$DEPS\"" source.json | sponge source.json
popd
cp -t . -- "$TMPDIR/source.json" "$TMPDIR/package/package-lock.json"

View file

@ -0,0 +1,44 @@
{ lib
, buildNpmPackage
, fetchFromGitHub
, installShellFiles
, testers
, triton
}:
buildNpmPackage rec {
pname = "triton";
version = "7.15.4";
src = fetchFromGitHub {
owner = "TritonDataCenter";
repo = "node-triton";
rev = version;
hash = "sha256-RjYJT8Iw9JZzvd2d9zh2CS27qUx12nDi12k+YuTh7tk=";
};
npmDepsHash = "sha256-2ZTTgJ4LzmlfFoNNNPrrmna5pbREshdw5x9w5N7nasc=";
dontBuild = true;
nativeBuildInputs = [ installShellFiles ];
postInstall = ''
installShellCompletion --cmd triton --bash <($out/bin/triton completion)
# Strip timestamp from generated bash completion
sed -i '/Bash completion generated.*/d' $out/share/bash-completion/completions/triton.bash
'';
passthru = {
tests.version = testers.testVersion {
package = triton;
};
};
meta = with lib; {
description = "TritonDataCenter Client CLI and Node.js SDK";
homepage = "https://github.com/TritonDataCenter/node-triton";
license = licenses.mpl20;
maintainers = with maintainers; [ teutat3s ];
};
}

View file

@ -7,7 +7,6 @@
, vulkan-headers
, vulkan-loader
, glslang
, libgcc
, libwebp
, ncnn
}:
@ -44,8 +43,7 @@ stdenv.mkDerivation rec {
];
nativeBuildInputs = [ cmake ];
buildInputs = [ vulkan-headers vulkan-loader glslang libwebp ncnn ]
++ lib.optional (!stdenv.isDarwin) libgcc;
buildInputs = [ vulkan-headers vulkan-loader glslang libwebp ncnn ];
postPatch = ''
substituteInPlace main.cpp --replace REPLACE_MODELS $out/share/models
@ -62,5 +60,6 @@ stdenv.mkDerivation rec {
homepage = "https://github.com/xinntao/Real-ESRGAN-ncnn-vulkan";
license = licenses.mit;
maintainers = with maintainers; [ tilcreator ];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,42 @@
{ lib
, stdenv
, fetchFromGitHub
, libX11
, libXi
, libXrandr
, txt2man
}:
stdenv.mkDerivation rec {
pname = "xlibinput-calibrator";
version = "0.11";
src = fetchFromGitHub {
owner = "kreijack";
repo = "xlibinput_calibrator";
rev = "v${version}";
hash = "sha256-MvlamN8WSER0zN9Ru3Kr2MFARD9s7PYKkRtyD8s6ZPI=";
};
nativeBuildInputs = [
txt2man
];
buildInputs = [
libX11
libXi
libXrandr
];
installFlags = [ "prefix=$(out)" ];
enableParallelBuilding = true;
meta = with lib; {
description = "Touch calibrator for libinput";
homepage = "https://github.com/kreijack/xlibinput_calibrator";
changelog = "https://github.com/kreijack/xlibinput_calibrator/blob/${src.rev}/Changelog";
license = with licenses; [ mit ];
maintainers = with maintainers; [ atemu ];
};
}

View file

@ -15,7 +15,7 @@
let
runtimePath = lib.makeBinPath [ nix nix-prefetch-git git ];
sources = (builtins.fromJSON (builtins.readFile ./sources.json)).pins;
sources = (lib.importJSON ./sources.json).pins;
in rustPlatform.buildRustPackage rec {
pname = "npins";
version = src.version;

View file

@ -3,7 +3,7 @@
# Usage:
# ```nix
# let
# sources = builtins.fromJSON (builtins.readFile ./sources.json);
# sources = lib.importJSON ./sources.json;
# in mkMyDerivation rec {
# version = src.version; # This obviously only works for releases
# src = pkgs.npins.mkSource sources.mySource;

View file

@ -5,14 +5,14 @@
python3.pkgs.buildPythonApplication rec {
pname = "dnsrecon";
version = "1.1.3";
version = "1.1.4";
format = "setuptools";
src = fetchFromGitHub {
owner = "darkoperator";
repo = pname;
rev = version;
hash = "sha256-V4/6VUlMizy8EN8ajN56YF+COn3/dfmD0997R+iR86g=";
hash = "sha256-DtyYYNtv0Zk8103NN+vlnr3Etv0bAZ6+A2CXeZZgiUg=";
};
propagatedBuildInputs = with python3.pkgs; [

View file

@ -9,13 +9,13 @@
rustPlatform.buildRustPackage rec {
pname = "feroxbuster";
version = "2.9.5";
version = "2.10.0";
src = fetchFromGitHub {
owner = "epi052";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-+cjRfuUspq9eE5PsYgha0Vj1ELHjTUxxdM7yR3L9T2k=";
hash = "sha256-u2c+s5kCAYOKwl5eb1zY7xdl4pD6eAjiyRj6JFkA07M=";
};
# disable linker overrides on aarch64-linux
@ -23,7 +23,7 @@ rustPlatform.buildRustPackage rec {
rm .cargo/config
'';
cargoHash = "sha256-yd97iiKjMIlMhilU0L1yngNIKptv4I0nEIKWRfhx/40=";
cargoHash = "sha256-rPFj53KQkucz1/yAr6U2nk6gTdxcBxyRHVqGeawBYZU=";
OPENSSL_NO_VENDOR = true;

View file

@ -6,13 +6,13 @@
buildGoModule rec {
pname = "kubescape";
version = "2.3.0";
version = "2.3.1";
src = fetchFromGitHub {
owner = "kubescape";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-xYkNwANAGWlYxGLIhIkOLKmOW/SM3Duqus4WJ6MKGZE=";
hash = "sha256-TMK+9C1L+pNIjWg/lahVQk1G4CdfgRLH68XKAfszTys=";
fetchSubmodules = true;
};

View file

@ -6,14 +6,14 @@
python3.pkgs.buildPythonApplication rec {
pname = "quark-engine";
version = "23.2.1";
version = "23.4.1";
format = "setuptools";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "refs/tags/v${version}";
sha256 = "sha256-9WrOyBOoSif1P67Z19HW56RvsojoubeT58P0rM18XSk=";
sha256 = "sha256-YOI768QNAgqUy3Vc2kyJCUeJE7j0PyP5BOUelhvyHgU=";
};
propagatedBuildInputs = with python3.pkgs; [

View file

@ -5,16 +5,16 @@
buildGoModule rec {
pname = "yatas";
version = "1.3.3";
version = "1.5.1";
src = fetchFromGitHub {
owner = "padok-team";
repo = "YATAS";
rev = "refs/tags/v${version}";
hash = "sha256-BjcqEO+rDEjPttGgTH07XyQKLcs/O+FarKTWjqXWQOo=";
hash = "sha256-gw4aZ7SLUz5WLUb1z4zDtI6Ca0tEWhE5wobp5NRvjkg=";
};
vendorHash = "sha256-QOFt9h4Hdt+Mx82yw4mjAoyUXHeprvjRoLYLBnihwJo=";
vendorHash = "sha256-zp5EVJe5Q6o6C0CZ8u+oEFEOy0NU5SgVN+cSc6A/jZ4=";
meta = with lib; {
description = "Tool to audit AWS infrastructure for misconfiguration or potential security issues";

View file

@ -0,0 +1,110 @@
From 6df2a198013ebed9aeff119ee0d15cb2d616474c Mon Sep 17 00:00:00 2001
From: zimbatm <zimbatm@zimbatm.com>
Date: Sun, 30 Apr 2023 12:13:54 +0200
Subject: [PATCH] vultr: remove check_route check
The heuristic is assuming that the URL will contain an IP, and that the
route explicitly lists that IP (eg: 0.0.0.0/0 should match but doesn't).
In order for the heuristic to be 100% reliable, it would have to
replicate exactly what the system is doing both in terms of DNS and
route resolution.
Because the HTTP request below is already exercising the python nd
system resolution, it is simpler to just remove this check and lean on
the HTTP request to provide the answer if the network is up or not.
---
cloudinit/sources/helpers/vultr.py | 22 ----------------------
tests/unittests/sources/test_vultr.py | 12 ------------
2 files changed, 34 deletions(-)
diff --git a/cloudinit/sources/helpers/vultr.py b/cloudinit/sources/helpers/vultr.py
index 71676bb1..aac2a610 100644
--- a/cloudinit/sources/helpers/vultr.py
+++ b/cloudinit/sources/helpers/vultr.py
@@ -32,10 +32,6 @@ def get_metadata(
iface=iface,
connectivity_url_data={"url": url},
):
- # Check for the metadata route, skip if not there
- if not check_route(url):
- continue
-
# Fetch the metadata
v1 = read_metadata(url, timeout, retries, sec_between, agent)
@@ -75,24 +71,6 @@ def get_interface_list():
return ifaces
-# Check for /32 route that our dhcp servers inject
-# in order to determine if this a customer-run dhcp server
-def check_route(url):
- # Get routes, confirm entry exists
- routes = netinfo.route_info()
-
- # If no tools exist and empty dict is returned
- if "ipv4" not in routes:
- return False
-
- # Parse each route into a more searchable format
- for route in routes["ipv4"]:
- if route.get("destination", None) in url:
- return True
-
- return False
-
-
# Read the system information from SMBIOS
def get_sysinfo():
return {
diff --git a/tests/unittests/sources/test_vultr.py b/tests/unittests/sources/test_vultr.py
index ba21ae24..7fa02b1c 100644
--- a/tests/unittests/sources/test_vultr.py
+++ b/tests/unittests/sources/test_vultr.py
@@ -274,14 +274,6 @@ INTERFACE_MAP = {
FINAL_INTERFACE_USED = ""
-# Static override, pylint doesnt like this in
-# classes without self
-def check_route(url):
- if FINAL_INTERFACE_USED == "eth0":
- return True
- return False
-
-
class TestDataSourceVultr(CiTestCase):
def setUp(self):
global VULTR_V1_3
@@ -431,7 +423,6 @@ class TestDataSourceVultr(CiTestCase):
@mock.patch(
"cloudinit.net.ephemeral.EphemeralDHCPv4.__exit__", override_exit
)
- @mock.patch("cloudinit.sources.helpers.vultr.check_route")
@mock.patch("cloudinit.sources.helpers.vultr.is_vultr")
@mock.patch("cloudinit.sources.helpers.vultr.read_metadata")
@mock.patch("cloudinit.sources.helpers.vultr.get_interface_list")
@@ -440,12 +431,10 @@ class TestDataSourceVultr(CiTestCase):
mock_interface_list,
mock_read_metadata,
mock_isvultr,
- mock_check_route,
):
mock_read_metadata.return_value = {}
mock_isvultr.return_value = True
mock_interface_list.return_value = FILTERED_INTERFACES
- mock_check_route.return_value = True
distro = mock.MagicMock()
distro.get_tmp_exec_path = self.tmp_dir
@@ -461,7 +450,6 @@ class TestDataSourceVultr(CiTestCase):
self.assertEqual(FINAL_INTERFACE_USED, INTERFACES[3])
# Test route checking sucessful DHCPs
- @mock.patch("cloudinit.sources.helpers.vultr.check_route", check_route)
@mock.patch(
"cloudinit.net.ephemeral.EphemeralDHCPv4.__init__",
ephemeral_init_always,
--
2.40.0

View file

@ -26,7 +26,13 @@ python3.pkgs.buildPythonApplication rec {
hash = "sha256-tn4flcrf04hVWhqkmK4qDenXcnV93pP+C+8J63b6FXQ=";
};
patches = [ ./0001-add-nixos-support.patch ./0002-Add-Udhcpc-support.patch ];
patches = [
./0001-add-nixos-support.patch
# upstream: https://github.com/canonical/cloud-init/pull/2125
./0002-Add-Udhcpc-support.patch
# upstream: https://github.com/canonical/cloud-init/pull/2151
./0003-vultr-remove-check_route-check.patch
];
prePatch = ''
substituteInPlace setup.py \

View file

@ -13082,7 +13082,9 @@ with pkgs;
trickle = callPackage ../tools/networking/trickle { };
inherit (nodePackages) triton;
node-manta = callPackage ../tools/admin/manta { };
triton = callPackage ../tools/admin/triton { };
triggerhappy = callPackage ../tools/inputmethods/triggerhappy { };
@ -13356,9 +13358,7 @@ with pkgs;
verilator = callPackage ../applications/science/electronics/verilator { };
verilog = callPackage ../applications/science/electronics/verilog {
autoconf = buildPackages.autoconf269;
};
verilog = callPackage ../applications/science/electronics/verilog { };
versus = callPackage ../applications/networking/versus { };
@ -39721,6 +39721,8 @@ with pkgs;
xlayoutdisplay = callPackage ../tools/X11/xlayoutdisplay { };
xlibinput-calibrator = callPackage ../tools/inputmethods/xlibinput_calibrator { };
xlog = callPackage ../applications/radio/xlog { };
xmagnify = callPackage ../tools/X11/xmagnify { };

View file

@ -82,6 +82,7 @@ in let
config = config1;
})
];
class = "nixpkgsConfig";
};
# take all the rest as-is

View file

@ -1985,6 +1985,8 @@ self: super: with self; {
colour = callPackage ../development/python-modules/colour { };
colout = callPackage ../development/python-modules/colout { };
cometblue-lite = callPackage ../development/python-modules/cometblue-lite { };
comm = callPackage ../development/python-modules/comm { };