Merge remote-tracking branch 'origin/master' into mvn

This commit is contained in:
Benjamin Staffin 2022-03-21 18:37:45 -04:00 committed by Benjamin Staffin
commit 34006ebc9d
10752 changed files with 292573 additions and 199420 deletions

View file

@ -61,19 +61,12 @@ trim_trailing_whitespace = unset
[nixos/modules/services/networking/ircd-hybrid/*.{conf,in}] [nixos/modules/services/networking/ircd-hybrid/*.{conf,in}]
trim_trailing_whitespace = unset trim_trailing_whitespace = unset
[nixos/tests/systemd-networkd-vrf.nix]
trim_trailing_whitespace = unset
[pkgs/build-support/dotnetenv/Wrapper/**] [pkgs/build-support/dotnetenv/Wrapper/**]
end_of_line = unset end_of_line = unset
indent_style = unset indent_style = unset
insert_final_newline = unset insert_final_newline = unset
trim_trailing_whitespace = unset trim_trailing_whitespace = unset
[pkgs/build-support/upstream-updater/**]
indent_style = unset
trim_trailing_whitespace = unset
[pkgs/development/compilers/elm/registry.dat] [pkgs/development/compilers/elm/registry.dat]
end_of_line = unset end_of_line = unset
insert_final_newline = unset insert_final_newline = unset
@ -87,10 +80,3 @@ trim_trailing_whitespace = unset
[pkgs/tools/misc/timidity/timidity.cfg] [pkgs/tools/misc/timidity/timidity.cfg]
trim_trailing_whitespace = unset trim_trailing_whitespace = unset
[pkgs/tools/security/enpass/data.json]
insert_final_newline = unset
trim_trailing_whitespace = unset
[pkgs/top-level/emscripten-packages.nix]
trim_trailing_whitespace = unset

32
.git-blame-ignore-revs Normal file
View file

@ -0,0 +1,32 @@
# This file contains a list of commits that are not likely what you
# are looking for in a blame, such as mass reformatting or renaming.
# You can set this file as a default ignore file for blame by running
# the following command.
#
# $ git config blame.ignoreRevsFile .git-blame-ignore-revs
#
# To temporarily not use this file add
# --ignore-revs-file=""
# to your blame command.
#
# The ignoreRevsFile can't be set globally due to blame failing if the file isn't present.
# To not have to set the option in every repository it is needed in,
# save the following script in your path with the name "git-bblame"
# now you can run
# $ git bblame $FILE
# to use the .git-blame-ignore-revs file if it is present.
#
# #!/usr/bin/env bash
# repo_root=$(git rev-parse --show-toplevel)
# if [[ -e $repo_root/.git-blame-ignore-revs ]]; then
# git blame --ignore-revs-file="$repo_root/.git-blame-ignore-revs" $@
# else
# git blame $@
# fi
# nixos/modules/rename: Sort alphabetically
1f71224fe86605ef4cd23ed327b3da7882dad382
# nixos: fix module paths in rename.nix
d08ede042b74b8199dc748323768227b88efcf7c

72
.github/CODEOWNERS vendored
View file

@ -104,9 +104,9 @@
/pkgs/top-level/haskell-packages.nix @cdepillabout @sternenseemann @maralorn @expipiplus1 /pkgs/top-level/haskell-packages.nix @cdepillabout @sternenseemann @maralorn @expipiplus1
# Perl # Perl
/pkgs/development/interpreters/perl @volth @stigtsp @zakame /pkgs/development/interpreters/perl @stigtsp @zakame
/pkgs/top-level/perl-packages.nix @volth @stigtsp @zakame /pkgs/top-level/perl-packages.nix @stigtsp @zakame
/pkgs/development/perl-modules @volth @stigtsp @zakame /pkgs/development/perl-modules @stigtsp @zakame
# R # R
/pkgs/applications/science/math/R @jbedo @bcdarwin /pkgs/applications/science/math/R @jbedo @bcdarwin
@ -118,7 +118,8 @@
# Rust # Rust
/pkgs/development/compilers/rust @Mic92 @LnL7 @zowoq /pkgs/development/compilers/rust @Mic92 @LnL7 @zowoq
/pkgs/build-support/rust @andir @zowoq /pkgs/build-support/rust @zowoq
/doc/languages-frameworks/rust.section.md @zowoq
# Darwin-related # Darwin-related
/pkgs/stdenv/darwin @NixOS/darwin-maintainers /pkgs/stdenv/darwin @NixOS/darwin-maintainers
@ -141,6 +142,15 @@
/pkgs/development/tools/build-managers/rebar3 @gleber /pkgs/development/tools/build-managers/rebar3 @gleber
/pkgs/development/tools/erlang @gleber /pkgs/development/tools/erlang @gleber
# Audio
/nixos/modules/services/audio/botamusique.nix @mweinelt
/nixos/modules/services/audio/snapserver.nix @mweinelt
/nixos/tests/modules/services/audio/botamusique.nix @mweinelt
/nixos/tests/snapcast.nix @mweinelt
# Browsers
/pkgs/applications/networking/browsers/firefox @mweinelt
# Jetbrains # Jetbrains
/pkgs/applications/editors/jetbrains @edwtjo /pkgs/applications/editors/jetbrains @edwtjo
@ -167,12 +177,30 @@
/nixos/tests/hardened.nix @joachifm /nixos/tests/hardened.nix @joachifm
/pkgs/os-specific/linux/kernel/hardened-config.nix @joachifm /pkgs/os-specific/linux/kernel/hardened-config.nix @joachifm
# Home Automation
/nixos/modules/services/misc/home-assistant.nix @mweinelt
/nixos/modules/services/misc/zigbee2mqtt.nix @mweinelt
/nixos/tests/home-assistant.nix @mweinelt
/nixos/tests/zigbee2mqtt.nix @mweinelt
/pkgs/servers/home-assistant @mweinelt
/pkgs/tools/misc/esphome @mweinelt
# Network Time Daemons # Network Time Daemons
/pkgs/tools/networking/chrony @thoughtpolice /pkgs/tools/networking/chrony @thoughtpolice
/pkgs/tools/networking/ntp @thoughtpolice /pkgs/tools/networking/ntp @thoughtpolice
/pkgs/tools/networking/openntpd @thoughtpolice /pkgs/tools/networking/openntpd @thoughtpolice
/nixos/modules/services/networking/ntp @thoughtpolice /nixos/modules/services/networking/ntp @thoughtpolice
# Network
/pkgs/tools/networking/kea/default.nix @mweinelt
/pkgs/tools/networking/babeld/default.nix @mweinelt
/nixos/modules/services/networking/babeld.nix @mweinelt
/nixos/modules/services/networking/kea.nix @mweinelt
/nixos/modules/services/networking/knot.nix @mweinelt
/nixos/tests/babeld.nix @mweinelt
/nixos/tests/kea.nix @mweinelt
/nixos/tests/knot.nix @mweinelt
# Dhall # Dhall
/pkgs/development/dhall-modules @Gabriel439 @Profpatsch @ehmry /pkgs/development/dhall-modules @Gabriel439 @Profpatsch @ehmry
/pkgs/development/interpreters/dhall @Gabriel439 @Profpatsch @ehmry /pkgs/development/interpreters/dhall @Gabriel439 @Profpatsch @ehmry
@ -190,18 +218,18 @@
/nixos/modules/services/mail/rspamd.nix @peti /nixos/modules/services/mail/rspamd.nix @peti
# Emacs # Emacs
/pkgs/applications/editors/emacs-modes @adisbladis /pkgs/applications/editors/emacs/elisp-packages @adisbladis
/pkgs/applications/editors/emacs @adisbladis /pkgs/applications/editors/emacs @adisbladis
/pkgs/top-level/emacs-packages.nix @adisbladis /pkgs/top-level/emacs-packages.nix @adisbladis
# Neovim # Neovim
/pkgs/applications/editors/neovim @jonringer @teto /pkgs/applications/editors/neovim @jonringer @teto
# VimPlugins # VimPlugins
/pkgs/misc/vim-plugins @jonringer @softinio /pkgs/applications/editors/vim/plugins @jonringer
# VsCode Extensions # VsCode Extensions
/pkgs/misc/vscode-extensions @jonringer /pkgs/applications/editors/vscode/extensions @jonringer
# Prometheus exporter modules and tests # Prometheus exporter modules and tests
/nixos/modules/services/monitoring/prometheus/exporters.nix @WilliButz /nixos/modules/services/monitoring/prometheus/exporters.nix @WilliButz
@ -233,6 +261,7 @@
/pkgs/applications/blockchains @mmahut @RaghavSood /pkgs/applications/blockchains @mmahut @RaghavSood
# Go # Go
/doc/languages-frameworks/go.section.md @kalbasit @Mic92 @zowoq
/pkgs/development/compilers/go @kalbasit @Mic92 @zowoq /pkgs/development/compilers/go @kalbasit @Mic92 @zowoq
/pkgs/development/go-modules @kalbasit @Mic92 @zowoq /pkgs/development/go-modules @kalbasit @Mic92 @zowoq
/pkgs/development/go-packages @kalbasit @Mic92 @zowoq /pkgs/development/go-packages @kalbasit @Mic92 @zowoq
@ -244,7 +273,26 @@
# Cinnamon # Cinnamon
/pkgs/desktops/cinnamon @mkg20001 /pkgs/desktops/cinnamon @mkg20001
#nim # nim
/pkgs/development/compilers/nim @ehmry /pkgs/development/compilers/nim @ehmry
/pkgs/development/nim-packages @ehmry /pkgs/development/nim-packages @ehmry
/pkgs/top-level/nim-packages.nix @ehmry /pkgs/top-level/nim-packages.nix @ehmry
# terraform providers
/pkgs/applications/networking/cluster/terraform-providers @zowoq
# kubernetes
/nixos/doc/manual/configuration/kubernetes.chapter.md @zowoq
/nixos/modules/services/cluster/kubernetes @zowoq
/nixos/tests/kubernetes @zowoq
/pkgs/applications/networking/cluster/kubernetes @zowoq
# Matrix
/pkgs/servers/heisenbridge @piegamesde
/pkgs/servers/matrix-conduit @piegamesde @pstn
/pkgs/servers/matrix-synapse/matrix-appservice-irc @piegamesde
/nixos/modules/services/misc/heisenbridge.nix @piegamesde
/nixos/modules/services/misc/matrix-appservice-irc.nix @piegamesde
/nixos/modules/services/misc/matrix-conduit.nix @piegamesde @pstn
/nixos/tests/matrix-appservice-irc.nix @piegamesde
/nixos/tests/matrix-conduit.nix @piegamesde @pstn

View file

@ -38,11 +38,3 @@ Please run `nix-shell -p nix-info --run "nix-info -m"` and paste the result.
[user@system:~]$ nix-shell -p nix-info --run "nix-info -m" [user@system:~]$ nix-shell -p nix-info --run "nix-info -m"
output here output here
``` ```
Maintainer information:
```yaml
# a list of nixpkgs attributes affected by the problem
attribute:
# a list of nixos modules affected by the problem
module:
```

View file

@ -13,10 +13,10 @@ assignees: ''
<!-- Note that these are hard requirements --> <!-- Note that these are hard requirements -->
<!-- <!--
You can use the "Go to file" functionality on github to find the package You can use the "Go to file" functionality on GitHub to find the package
Then you can go to the history for this package Then you can go to the history for this package
Find the latest "package_name: old_version -> new_version" commit Find the latest "package_name: old_version -> new_version" commit
The "new_version" is the the current version of the package The "new_version" is the current version of the package
--> -->
- [ ] Checked the [nixpkgs master branch](https://github.com/NixOS/nixpkgs) - [ ] Checked the [nixpkgs master branch](https://github.com/NixOS/nixpkgs)
<!-- <!--
@ -29,7 +29,7 @@ There's a high chance that you'll have the new version right away while helping
###### Project name ###### Project name
`nix search` name: `nix search` name:
<!-- <!--
The current version can be found easily with the same process than above for checking the master branch The current version can be found easily with the same process as above for checking the master branch
If an open PR is present for the package, take this version as the current one and link to the PR If an open PR is present for the package, take this version as the current one and link to the PR
--> -->
current version: current version:

View file

@ -1,16 +1,10 @@
###### Description of changes
<!-- <!--
To help with the large amounts of pull requests, we would appreciate your For package updates please link to a changelog or describe changes, this helps your fellow maintainers discover breaking updates.
reviews of other pull requests, especially simple package updates. Just leave a For new packages please briefly describe the package or provide a link to its homepage.
comment describing what you have tested in the relevant package/service.
Reviewing helps to reduce the average time-to-merge for everyone.
Thanks a lot if you do!
List of open PRs: https://github.com/NixOS/nixpkgs/pulls
Reviewing guidelines: https://nixos.org/manual/nixpkgs/unstable/#chap-reviewing-contributions
--> -->
###### Motivation for this change
###### Things done ###### Things done
<!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. --> <!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. -->
@ -34,3 +28,14 @@ Reviewing guidelines: https://nixos.org/manual/nixpkgs/unstable/#chap-reviewing-
- [ ] (Module addition) Added a release notes entry if adding a new NixOS module - [ ] (Module addition) Added a release notes entry if adding a new NixOS module
- [ ] (Release notes changes) Ran `nixos/doc/manual/md-to-db.sh` to update generated release notes - [ ] (Release notes changes) Ran `nixos/doc/manual/md-to-db.sh` to update generated release notes
- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md). - [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md).
<!--
To help with the large amounts of pull requests, we would appreciate your
reviews of other pull requests, especially simple package updates. Just leave a
comment describing what you have tested in the relevant package/service.
Reviewing helps to reduce the average time-to-merge for everyone.
Thanks a lot if you do!
List of open PRs: https://github.com/NixOS/nixpkgs/pulls
Reviewing guidelines: https://nixos.org/manual/nixpkgs/unstable/#chap-reviewing-contributions
-->

8
.github/labeler.yml vendored
View file

@ -5,10 +5,6 @@
- pkgs/development/libraries/agda/**/* - pkgs/development/libraries/agda/**/*
- pkgs/top-level/agda-packages.nix - pkgs/top-level/agda-packages.nix
"6.topic: bsd":
- pkgs/os-specific/bsd/**/*
- pkgs/stdenv/freebsd/**/*
"6.topic: cinnamon": "6.topic: cinnamon":
- pkgs/desktops/cinnamon/**/* - pkgs/desktops/cinnamon/**/*
@ -16,7 +12,7 @@
- nixos/modules/services/editors/emacs.nix - nixos/modules/services/editors/emacs.nix
- nixos/modules/services/editors/emacs.xml - nixos/modules/services/editors/emacs.xml
- nixos/tests/emacs-daemon.nix - nixos/tests/emacs-daemon.nix
- pkgs/applications/editors/emacs-modes/**/* - pkgs/applications/editors/emacs/elisp-packages/**/*
- pkgs/applications/editors/emacs/**/* - pkgs/applications/editors/emacs/**/*
- pkgs/build-support/emacs/**/* - pkgs/build-support/emacs/**/*
- pkgs/top-level/emacs-packages.nix - pkgs/top-level/emacs-packages.nix
@ -142,7 +138,7 @@
"6.topic: vim": "6.topic: vim":
- doc/languages-frameworks/vim.section.md - doc/languages-frameworks/vim.section.md
- pkgs/applications/editors/vim/**/* - pkgs/applications/editors/vim/**/*
- pkgs/misc/vim-plugins/**/* - pkgs/applications/editors/vim/plugins/**/*
- nixos/modules/programs/neovim.nix - nixos/modules/programs/neovim.nix
- pkgs/applications/editors/neovim/**/* - pkgs/applications/editors/neovim/**/*

View file

@ -2,13 +2,19 @@ name: Backport
on: on:
pull_request_target: pull_request_target:
types: [closed, labeled] types: [closed, labeled]
# WARNING:
# When extending this action, be aware that $GITHUB_TOKEN allows write access to
# the GitHub repository. This means that it should not evaluate user input in a
# way that allows code injection.
jobs: jobs:
backport: backport:
name: Backport Pull Request name: Backport Pull Request
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
# required to find all branches # required to find all branches
fetch-depth: 0 fetch-depth: 0

View file

@ -14,7 +14,12 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
# we don't limit this action to only NixOS repo since the checks are cheap and useful developer feedback # we don't limit this action to only NixOS repo since the checks are cheap and useful developer feedback
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: cachix/install-nix-action@v16 - uses: cachix/install-nix-action@v16
- uses: cachix/cachix-action@v10
with:
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
# explicit list of supportedSystems is needed until aarch64-darwin becomes part of the trunk jobset # explicit list of supportedSystems is needed until aarch64-darwin becomes part of the trunk jobset
- run: nix-build pkgs/top-level/release.nix -A tarball.nixpkgs-basic-release-checks --arg supportedSystems '[ "aarch64-darwin" "aarch64-linux" "x86_64-linux" "x86_64-darwin" ]' - run: nix-build pkgs/top-level/release.nix -A tarball.nixpkgs-basic-release-checks --arg supportedSystems '[ "aarch64-darwin" "aarch64-linux" "x86_64-linux" "x86_64-darwin" ]'

View file

@ -24,14 +24,15 @@ jobs:
- name: print list of changed files - name: print list of changed files
run: | run: |
cat "$HOME/changed_files" cat "$HOME/changed_files"
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
# pull_request_target checks out the base branch by default # pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@v16 - uses: cachix/install-nix-action@v16
with: with:
# nixpkgs commit is pinned so that it doesn't break # nixpkgs commit is pinned so that it doesn't break
nix_path: nixpkgs=https://github.com/NixOS/nixpkgs/archive/f93ecc4f6bc60414d8b73dbdf615ceb6a2c604df.tar.gz # editorconfig-checker 2.4.0
nix_path: nixpkgs=https://github.com/NixOS/nixpkgs/archive/c473cc8714710179df205b153f4e9fa007107ff9.tar.gz
- name: install editorconfig-checker - name: install editorconfig-checker
run: nix-env -iA editorconfig-checker -f '<nixpkgs>' run: nix-env -iA editorconfig-checker -f '<nixpkgs>'
- name: Checking EditorConfig - name: Checking EditorConfig

View file

@ -4,6 +4,11 @@ on:
pull_request_target: pull_request_target:
types: [edited, opened, synchronize, reopened] types: [edited, opened, synchronize, reopened]
# WARNING:
# When extending this action, be aware that $GITHUB_TOKEN allows some write
# access to the GitHub API. This means that it should not evaluate user input in
# a way that allows code injection.
permissions: permissions:
contents: read contents: read
pull-requests: write pull-requests: write
@ -13,7 +18,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS' if: github.repository_owner == 'NixOS'
steps: steps:
- uses: actions/labeler@v3 - uses: actions/labeler@v4
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
sync-labels: true sync-labels: true

View file

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS' if: github.repository_owner == 'NixOS'
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
# pull_request_target checks out the base branch by default # pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge ref: refs/pull/${{ github.event.pull_request.number }}/merge
@ -24,7 +24,7 @@ jobs:
extra_nix_config: sandbox = true extra_nix_config: sandbox = true
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v10
with: with:
# This cache is for the nixos/nixpkgs manual builds and should not be trusted or used elsewhere. # This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci name: nixpkgs-ci
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- name: Building NixOS manual - name: Building NixOS manual

View file

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS' if: github.repository_owner == 'NixOS'
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
# pull_request_target checks out the base branch by default # pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge ref: refs/pull/${{ github.event.pull_request.number }}/merge
@ -24,7 +24,7 @@ jobs:
extra_nix_config: sandbox = true extra_nix_config: sandbox = true
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v10
with: with:
# This cache is for the nixos/nixpkgs manual builds and should not be trusted or used elsewhere. # This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci name: nixpkgs-ci
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- name: Building Nixpkgs manual - name: Building Nixpkgs manual

View file

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS' if: github.repository_owner == 'NixOS'
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
# pull_request_target checks out the base branch by default # pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge ref: refs/pull/${{ github.event.pull_request.number }}/merge

View file

@ -3,6 +3,11 @@ name: "set pending status"
on: on:
pull_request_target: pull_request_target:
# WARNING:
# When extending this action, be aware that $GITHUB_TOKEN allows write access to
# the GitHub repository. This means that it should not evaluate user input in a
# way that allows code injection.
jobs: jobs:
action: action:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View file

@ -38,7 +38,7 @@ jobs:
into: staging-21.11 into: staging-21.11
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }} name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }} - name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
uses: devmasx/merge-branch@1.4.0 uses: devmasx/merge-branch@1.4.0

View file

@ -32,7 +32,7 @@ jobs:
into: staging into: staging
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }} name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }} - name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
uses: devmasx/merge-branch@1.4.0 uses: devmasx/merge-branch@1.4.0

View file

@ -0,0 +1,47 @@
name: "Update terraform-providers"
on:
schedule:
- cron: "14 3 * * 1"
workflow_dispatch:
jobs:
tf-providers:
if: github.repository_owner == 'NixOS' && github.ref == 'refs/heads/master' # ensure workflow_dispatch only runs on master
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: cachix/install-nix-action@v16
- name: setup
id: setup
run: |
echo ::set-output name=title::"terraform-providers: update $(date -u +"%Y-%m-%d")"
- name: update terraform-providers
run: |
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config user.name "github-actions[bot]"
pushd pkgs/applications/networking/cluster/terraform-providers
./update-all-providers --no-build
git commit -m "${{ steps.setup.outputs.title }}" providers.json
popd
- name: create PR
uses: peter-evans/create-pull-request@v3
with:
body: |
Automatic update of terraform providers.
Created by [update-terraform-providers](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/update-terraform-providers.yml) action.
Check that all providers build with `@ofborg build terraform-full`
branch: terraform-providers-update
delete-branch: false
labels: "2.status: work-in-progress"
title: ${{ steps.setup.outputs.title }}
token: ${{ secrets.GITHUB_TOKEN }}
- name: comment on failure
uses: peter-evans/create-or-update-comment@v1
if: ${{ failure() }}
with:
issue-number: 153416
body: |
Automatic update of terraform providers [failed](https://github.com/NixOS/nixpkgs/actions/runs/${{ github.run_id }}).

1
.gitignore vendored
View file

@ -3,6 +3,7 @@
.*.swp .*.swp
.*.swo .*.swo
.idea/ .idea/
.vscode/
outputs/ outputs/
result result
result-* result-*

View file

@ -11,6 +11,10 @@ under the terms of [COPYING](COPYING), which is an MIT-like license.
## Submitting changes ## Submitting changes
Read the ["Submitting changes"](https://nixos.org/nixpkgs/manual/#chap-submitting-changes) section of the nixpkgs manual. It explains how to write, test, and iterate on your change, and which branch to base your pull request against.
Below is a short excerpt of some points in there:
* Format the commit messages in the following way: * Format the commit messages in the following way:
``` ```
@ -40,7 +44,7 @@ under the terms of [COPYING](COPYING), which is an MIT-like license.
* If there is no upstream license, `meta.license` should default to `lib.licenses.unfree`. * If there is no upstream license, `meta.license` should default to `lib.licenses.unfree`.
* `meta.maintainers` must be set. * `meta.maintainers` must be set.
See the nixpkgs manual for more details on [standard meta-attributes](https://nixos.org/nixpkgs/manual/#sec-standard-meta-attributes) and on how to [submit changes to nixpkgs](https://nixos.org/nixpkgs/manual/#chap-submitting-changes). See the nixpkgs manual for more details on [standard meta-attributes](https://nixos.org/nixpkgs/manual/#sec-standard-meta-attributes).
## Writing good commit messages ## Writing good commit messages

View file

@ -1,4 +1,4 @@
Copyright (c) 2003-2021 Eelco Dolstra and the Nixpkgs/NixOS contributors Copyright (c) 2003-2022 Eelco Dolstra and the Nixpkgs/NixOS contributors
Permission is hereby granted, free of charge, to any person obtaining Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the a copy of this software and associated documentation files (the

View file

@ -1,10 +1,15 @@
<p align="center"> <p align="center">
<a href="https://nixos.org/nixos"><img src="https://nixos.org/logo/nixos-hires.png" width="500px" alt="NixOS logo" /></a> <a href="https://nixos.org#gh-light-mode-only">
<img src="https://raw.githubusercontent.com/NixOS/nixos-homepage/master/logo/nixos-hires.png" width="500px" alt="NixOS logo"/>
</a>
<a href="https://nixos.org#gh-dark-mode-only">
<img src="https://raw.githubusercontent.com/NixOS/nixos-artwork/master/logo/nixos-white.png" width="500px" alt="NixOS logo"/>
</a>
</p> </p>
<p align="center"> <p align="center">
<a href="https://www.codetriage.com/nixos/nixpkgs"><img src="https://www.codetriage.com/nixos/nixpkgs/badges/users.svg" alt="Code Triagers badge" /></a> <a href="https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md"><img src="https://img.shields.io/github/contributors-anon/NixOS/nixpkgs" alt="Contributors badge" /></a>
<a href="https://opencollective.com/nixos"><img src="https://opencollective.com/nixos/tiers/supporter/badge.svg?label=Supporter&color=brightgreen" alt="Open Collective supporters" /></a> <a href="https://opencollective.com/nixos"><img src="https://opencollective.com/nixos/tiers/supporter/badge.svg?label=supporters&color=brightgreen" alt="Open Collective supporters" /></a>
</p> </p>
[Nixpkgs](https://github.com/nixos/nixpkgs) is a collection of over [Nixpkgs](https://github.com/nixos/nixpkgs) is a collection of over

View file

@ -1,6 +1,5 @@
--[[ --[[
Turns a manpage reference into a link, when a mapping is defined Turns a manpage reference into a link, when a mapping is defined below.
in the unix-man-urls.lua file.
]] ]]
local man_urls = { local man_urls = {

View file

@ -40,6 +40,24 @@ Used with Git. Expects `url` to a Git repo, `rev`, and `sha256`. `rev` in this c
Additionally the following optional arguments can be given: `fetchSubmodules = true` makes `fetchgit` also fetch the submodules of a repository. If `deepClone` is set to true, the entire repository is cloned as opposing to just creating a shallow clone. `deepClone = true` also implies `leaveDotGit = true` which means that the `.git` directory of the clone won't be removed after checkout. Additionally the following optional arguments can be given: `fetchSubmodules = true` makes `fetchgit` also fetch the submodules of a repository. If `deepClone` is set to true, the entire repository is cloned as opposing to just creating a shallow clone. `deepClone = true` also implies `leaveDotGit = true` which means that the `.git` directory of the clone won't be removed after checkout.
If only parts of the repository are needed, `sparseCheckout` can be used. This will prevent git from fetching unnecessary blobs from server, see [git sparse-checkout](https://git-scm.com/docs/git-sparse-checkout) and [git clone --filter](https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---filterltfilter-specgt) for more infomation:
```nix
{ stdenv, fetchgit }:
stdenv.mkDerivation {
name = "hello";
src = fetchgit {
url = "https://...";
sparseCheckout = ''
path/to/be/included
another/path
'';
sha256 = "0000000000000000000000000000000000000000000000000000";
};
}
```
## `fetchfossil` {#fetchfossil} ## `fetchfossil` {#fetchfossil}
Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `sha256`. Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `sha256`.
@ -82,4 +100,11 @@ This is used with repo.or.cz repositories. The arguments expected are very simil
## `fetchFromSourcehut` {#fetchfromsourcehut} ## `fetchFromSourcehut` {#fetchfromsourcehut}
This is used with sourcehut repositories. The arguments expected are very similar to fetchFromGitHub above. Don't forget the tilde (~) in front of the user name! This is used with sourcehut repositories. Similar to `fetchFromGitHub` above,
it expects `owner`, `repo`, `rev` and `sha256`, but don't forget the tilde (~)
in front of the username! Expected arguments also include `vc` ("git" (default)
or "hg"), `domain` and `fetchSubmodules`.
If `fetchSubmodules` is `true`, `fetchFromSourcehut` uses `fetchgit`
or `fetchhg` with `fetchSubmodules` or `fetchSubrepos` set to `true`,
respectively. Otherwise the fetcher uses `fetchzip`.

View file

@ -29,7 +29,7 @@ How to add a new (major) version of the Linux kernel to Nixpkgs:
4. If needed you can also run `make menuconfig`: 4. If needed you can also run `make menuconfig`:
```ShellSession ```ShellSession
$ nix-env -i ncurses $ nix-env -f "<nixpkgs>" -iA ncurses
$ export NIX_CFLAGS_LINK=-lncurses $ export NIX_CFLAGS_LINK=-lncurses
$ make menuconfig ARCH=arch $ make menuconfig ARCH=arch
``` ```

View file

@ -56,7 +56,7 @@ Use `programs.steam.enable = true;` if you want to add steam to systemPackages a
## steam-run {#sec-steam-run} ## steam-run {#sec-steam-run}
The FHS-compatible chroot used for Steam can also be used to run other Linux games that expect a FHS environment. To use it, install the `steam-run-native` package and run the game with The FHS-compatible chroot used for Steam can also be used to run other Linux games that expect a FHS environment. To use it, install the `steam-run` package and run the game with
``` ```
steam-run ./foo steam-run ./foo

View file

@ -45,3 +45,5 @@ One can create a simple environment using a `shell.nix` like that:
``` ```
Running `nix-shell` would then drop you into a shell with these libraries and binaries available. You can use this to run closed-source applications which expect FHS structure without hassles: simply change `runScript` to the application path, e.g. `./bin/start.sh` -- relative paths are supported. Running `nix-shell` would then drop you into a shell with these libraries and binaries available. You can use this to run closed-source applications which expect FHS structure without hassles: simply change `runScript` to the application path, e.g. `./bin/start.sh` -- relative paths are supported.
Additionally, the FHS builder links all relocated gsettings-schemas (the glib setup-hook moves them to `share/gsettings-schemas/${name}/glib-2.0/schemas`) to their standard FHS location. This means you don't need to wrap binaries with `wrapGAppsHook`.

View file

@ -1,17 +1,37 @@
# pkgs.mkShell {#sec-pkgs-mkShell} # pkgs.mkShell {#sec-pkgs-mkShell}
`pkgs.mkShell` is a special kind of derivation that is only useful when using `pkgs.mkShell` is a specialized `stdenv.mkDerivation` that removes some
it combined with `nix-shell`. It will in fact fail to instantiate when invoked repetition when using it with `nix-shell` (or `nix develop`).
with `nix-build`.
## Usage {#sec-pkgs-mkShell-usage} ## Usage {#sec-pkgs-mkShell-usage}
Here is a common usage example:
```nix ```nix
{ pkgs ? import <nixpkgs> {} }: { pkgs ? import <nixpkgs> {} }:
pkgs.mkShell { pkgs.mkShell {
# specify which packages to add to the shell environment
packages = [ pkgs.gnumake ]; packages = [ pkgs.gnumake ];
# add all the dependencies, of the given packages, to the shell environment
inputsFrom = with pkgs; [ hello gnutar ]; inputsFrom = [ pkgs.hello pkgs.gnutar ];
shellHook = ''
export DEBUG=1
'';
} }
``` ```
## Attributes
* `name` (default: `nix-shell`). Set the name of the derivation.
* `packages` (default: `[]`). Add executable packages to the `nix-shell` environment.
* `inputsFrom` (default: `[]`). Add build dependencies of the listed derivations to the `nix-shell` environment.
* `shellHook` (default: `""`). Bash statements that are executed by `nix-shell`.
... all the attributes of `stdenv.mkDerivation`.
## Building the shell
This derivation output will contain a text file that contains a reference to
all the build inputs. This is useful in CI where we want to make sure that
every derivation, and its dependencies, build properly. Or when creating a GC
root so that the build dependencies don't get garbage-collected.

View file

@ -47,6 +47,88 @@ These functions write `text` to the Nix store. This is useful for creating scrip
Many more commands wrap `writeTextFile` including `writeText`, `writeTextDir`, `writeScript`, and `writeScriptBin`. These are convenience functions over `writeTextFile`. Many more commands wrap `writeTextFile` including `writeText`, `writeTextDir`, `writeScript`, and `writeScriptBin`. These are convenience functions over `writeTextFile`.
Here are a few examples:
```nix
# Writes my-file to /nix/store/<store path>
writeTextFile {
name = "my-file";
text = ''
Contents of File
'';
}
# See also the `writeText` helper function below.
# Writes executable my-file to /nix/store/<store path>/bin/my-file
writeTextFile {
name = "my-file";
text = ''
Contents of File
'';
executable = true;
destination = "/bin/my-file";
}
# Writes contents of file to /nix/store/<store path>
writeText "my-file"
''
Contents of File
'';
# Writes contents of file to /nix/store/<store path>/share/my-file
writeTextDir "share/my-file"
''
Contents of File
'';
# Writes my-file to /nix/store/<store path> and makes executable
writeScript "my-file"
''
Contents of File
'';
# Writes my-file to /nix/store/<store path>/bin/my-file and makes executable.
writeScriptBin "my-file"
''
Contents of File
'';
# Writes my-file to /nix/store/<store path> and makes executable.
writeShellScript "my-file"
''
Contents of File
'';
# Writes my-file to /nix/store/<store path>/bin/my-file and makes executable.
writeShellScriptBin "my-file"
''
Contents of File
'';
```
## `concatTextFile`, `concatText`, `concatScript` {#trivial-builder-concatText}
These functions concatenate `files` to the Nix store in a single file. This is useful for configuration files structured in lines of text. `concatTextFile` takes an attribute set and expects two arguments, `name` and `files`. `name` corresponds to the name used in the Nix store path. `files` will be the files to be concatenated. You can also set `executable` to true to make this file have the executable bit set.
`concatText` and`concatScript` are simple wrappers over `concatTextFile`.
Here are a few examples:
```nix
# Writes my-file to /nix/store/<store path>
concatTextFile {
name = "my-file";
files = [ drv1 "${drv2}/path/to/file" ];
}
# See also the `concatText` helper function below.
# Writes executable my-file to /nix/store/<store path>/bin/my-file
concatTextFile {
name = "my-file";
files = [ drv1 "${drv2}/path/to/file" ];
executable = true;
destination = "/bin/my-file";
}
# Writes contents of files to /nix/store/<store path>
concatText "my-file" [ file1 file2 ]
# Writes contents of files to /nix/store/<store path>
concatScript "my-file" [ file1 file2 ]
```
## `writeShellApplication` {#trivial-builder-writeShellApplication} ## `writeShellApplication` {#trivial-builder-writeShellApplication}
This can be used to easily produce a shell script that has some dependencies (`runtimeInputs`). It automatically sets the `PATH` of the script to contain all of the listed inputs, sets some sanity shellopts (`errexit`, `nounset`, `pipefail`), and checks the resulting script with [`shellcheck`](https://github.com/koalaman/shellcheck). This can be used to easily produce a shell script that has some dependencies (`runtimeInputs`). It automatically sets the `PATH` of the script to contain all of the listed inputs, sets some sanity shellopts (`errexit`, `nounset`, `pipefail`), and checks the resulting script with [`shellcheck`](https://github.com/koalaman/shellcheck).
@ -72,6 +154,26 @@ validation.
## `symlinkJoin` {#trivial-builder-symlinkJoin} ## `symlinkJoin` {#trivial-builder-symlinkJoin}
This can be used to put many derivations into the same directory structure. It works by creating a new derivation and adding symlinks to each of the paths listed. It expects two arguments, `name`, and `paths`. `name` is the name used in the Nix store path for the created derivation. `paths` is a list of paths that will be symlinked. These paths can be to Nix store derivations or any other subdirectory contained within. This can be used to put many derivations into the same directory structure. It works by creating a new derivation and adding symlinks to each of the paths listed. It expects two arguments, `name`, and `paths`. `name` is the name used in the Nix store path for the created derivation. `paths` is a list of paths that will be symlinked. These paths can be to Nix store derivations or any other subdirectory contained within.
Here is an example:
```nix
# adds symlinks of hello and stack to current build and prints "links added"
symlinkJoin { name = "myexample"; paths = [ pkgs.hello pkgs.stack ]; postBuild = "echo links added"; }
```
This creates a derivation with a directory structure like the following:
```
/nix/store/sglsr5g079a5235hy29da3mq3hv8sjmm-myexample
|-- bin
| |-- hello -> /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10/bin/hello
| `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/bin/stack
`-- share
|-- bash-completion
| `-- completions
| `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/bash-completion/completions/stack
|-- fish
| `-- vendor_completions.d
| `-- stack.fish -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/fish/vendor_completions.d/stack.fish
...
```
## `writeReferencesToFile` {#trivial-builder-writeReferencesToFile} ## `writeReferencesToFile` {#trivial-builder-writeReferencesToFile}

View file

@ -224,7 +224,7 @@ There are a few naming guidelines:
- Dashes in the package name _should_ be preserved in new variable names, rather than converted to underscores or camel cased — e.g., `http-parser` instead of `http_parser` or `httpParser`. The hyphenated style is preferred in all three package names. - Dashes in the package name _should_ be preserved in new variable names, rather than converted to underscores or camel cased — e.g., `http-parser` instead of `http_parser` or `httpParser`. The hyphenated style is preferred in all three package names.
- If there are multiple versions of a package, this _should_ be reflected in the variable names in `all-packages.nix`, e.g. `json-c-0-9` and `json-c-0-11`. If there is an obvious “default” version, make an attribute like `json-c = json-c-0-9;`. See also [](#sec-versioning) - If there are multiple versions of a package, this _should_ be reflected in the variable names in `all-packages.nix`, e.g. `json-c_0_9` and `json-c_0_11`. If there is an obvious “default” version, make an attribute like `json-c = json-c_0_9;`. See also [](#sec-versioning)
## File naming and organisation {#sec-organisation} ## File naming and organisation {#sec-organisation}

View file

@ -55,7 +55,7 @@ Additionally, the following syntax extensions are currently used:
- []{#ssec-contributing-markup-inline-roles} - []{#ssec-contributing-markup-inline-roles}
If you want to link to a man page, you can use `` {manpage}`nix.conf(5)` ``, which will turn into {manpage}`nix.conf(5)`. If you want to link to a man page, you can use `` {manpage}`nix.conf(5)` ``, which will turn into {manpage}`nix.conf(5)`.
The references will turn into links when a mapping exists in {file}`doc/build-aux/pandoc-filters/unix-man-urls.lua`. The references will turn into links when a mapping exists in {file}`doc/build-aux/pandoc-filters/link-unix-man-references.lua`.
This syntax is taken from [MyST](https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point). Though, the feature originates from [reStructuredText](https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-manpage) with slightly different syntax. This syntax is taken from [MyST](https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point). Though, the feature originates from [reStructuredText](https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-manpage) with slightly different syntax.

View file

@ -103,7 +103,8 @@ Sample template for a new package review is provided below.
- [ ] `meta.maintainers` is set - [ ] `meta.maintainers` is set
- [ ] build time only dependencies are declared in `nativeBuildInputs` - [ ] build time only dependencies are declared in `nativeBuildInputs`
- [ ] source is fetched using the appropriate function - [ ] source is fetched using the appropriate function
- [ ] phases are respected - [ ] the list of `phases` is not overridden
- [ ] when a phase (like `installPhase`) is overridden it starts with `runHook preInstall` and ends with `runHook postInstall`.
- [ ] patches that are remotely available are fetched with `fetchpatch` - [ ] patches that are remotely available are fetched with `fetchpatch`
##### Possible improvements ##### Possible improvements
@ -124,7 +125,7 @@ Reviewing process:
- Type should be appropriate (string related types differs in their merging capabilities, `optionSet` and `string` types are deprecated). - Type should be appropriate (string related types differs in their merging capabilities, `optionSet` and `string` types are deprecated).
- Description, default and example should be provided. - Description, default and example should be provided.
- Ensure that option changes are backward compatible. - Ensure that option changes are backward compatible.
- `mkRenamedOptionModule` and `mkAliasOptionModule` functions provide way to make option changes backward compatible. - `mkRenamedOptionModuleWith` provides a way to make option changes backward compatible.
- Ensure that removed options are declared with `mkRemovedOptionModule` - Ensure that removed options are declared with `mkRemovedOptionModule`
- Ensure that changes that are not backward compatible are mentioned in release notes. - Ensure that changes that are not backward compatible are mentioned in release notes.
- Ensure that documentations affected by the change is updated. - Ensure that documentations affected by the change is updated.

View file

@ -43,13 +43,13 @@
- nixpkgs: - nixpkgs:
- update pkg - update pkg
- `nix-env -i pkg-name -f <path to your local nixpkgs folder>` - `nix-env -iA pkg-attribute-name -f <path to your local nixpkgs folder>`
- add pkg - add pkg
- Make sure its in `pkgs/top-level/all-packages.nix` - Make sure its in `pkgs/top-level/all-packages.nix`
- `nix-env -i pkg-name -f <path to your local nixpkgs folder>` - `nix-env -iA pkg-attribute-name -f <path to your local nixpkgs folder>`
- _If you dont want to install pkg in you profile_. - _If you dont want to install pkg in you profile_.
- `nix-build -A pkg-attribute-name <path to your local nixpkgs folder>/default.nix` and check results in the folder `result`. It will appear in the same directory where you did `nix-build`. - `nix-build -A pkg-attribute-name <path to your local nixpkgs folder>` and check results in the folder `result`. It will appear in the same directory where you did `nix-build`.
- If you did `nix-env -i pkg-name` you can do `nix-env -e pkg-name` to uninstall it from your system. - If you installed your package with `nix-env`, you can run `nix-env -e pkg-name` where `pkg-name` is as reported by `nix-env -q` to uninstall it from your system.
- NixOS and its modules: - NixOS and its modules:
- You can add new module to your NixOS configuration file (usually its `/etc/nixos/configuration.nix`). And do `sudo nixos-rebuild test -I nixpkgs=<path to your local nixpkgs folder> --fast`. - You can add new module to your NixOS configuration file (usually its `/etc/nixos/configuration.nix`). And do `sudo nixos-rebuild test -I nixpkgs=<path to your local nixpkgs folder> --fast`.
@ -98,7 +98,7 @@ We use jbidwatcher as an example for a discontinued project here.
1. Create a new branch for your change, e.g. `git checkout -b jbidwatcher` 1. Create a new branch for your change, e.g. `git checkout -b jbidwatcher`
1. Remove the actual package including its directory, e.g. `rm -rf pkgs/applications/misc/jbidwatcher` 1. Remove the actual package including its directory, e.g. `rm -rf pkgs/applications/misc/jbidwatcher`
1. Remove the package from the list of all packages (`pkgs/top-level/all-packages.nix`). 1. Remove the package from the list of all packages (`pkgs/top-level/all-packages.nix`).
1. Add an alias for the package name in `pkgs/top-level/aliases.nix` (There is also `pkgs/misc/vim-plugins/aliases.nix`. Package sets typically do not have aliases, so we can't add them there.) 1. Add an alias for the package name in `pkgs/top-level/aliases.nix` (There is also `pkgs/applications/editors/vim/plugins/aliases.nix`. Package sets typically do not have aliases, so we can't add them there.)
For example in this case: For example in this case:
@ -227,7 +227,7 @@ digraph {
} }
``` ```
[This GitHub Action](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/merge-staging.yml) brings changes from `master` to `staging-next` and from `staging-next` to `staging` every 6 hours. [This GitHub Action](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/periodic-merge-6h.yml) brings changes from `master` to `staging-next` and from `staging-next` to `staging` every 6 hours.
### Master branch {#submitting-changes-master-branch} ### Master branch {#submitting-changes-master-branch}
@ -246,11 +246,21 @@ If the branch is already in a broken state, please refrain from adding extra new
### Stable release branches {#submitting-changes-stable-release-branches} ### Stable release branches {#submitting-changes-stable-release-branches}
For cherry-picking a commit to a stable release branch (“backporting”), use `git cherry-pick -x <original commit>` so that the original commit id is included in the commit. The same staging workflow applies to stable release branches, but the main branch is called `release-*` instead of `master`.
Add a reason for the backport by using `git cherry-pick -xe <original commit>` instead when it is not obvious from the original commit message. It is not needed when it's a minor version update that includes security and bug fixes but don't add new features or when the commit fixes an otherwise broken package. Example branch names: `release-21.11`, `staging-21.11`, `staging-next-21.11`.
For backporting Pull Requests to stable branches, assign label `backport <branch>` to the original Pull Requests and automation should take care of the rest once the Pull Requests is merged. Most changes added to the stable release branches are cherry-picked (“backported”) from the `master` and staging branches.
#### Automatically backporting a Pull Request {#submitting-changes-stable-release-branches-automatic-backports}
Assign label `backport <branch>` (e.g. `backport release-21.11`) to the PR and a backport PR is automatically created after the PR is merged.
#### Manually backporting changes {#submitting-changes-stable-release-branches-manual-backports}
Cherry-pick changes via `git cherry-pick -x <original commit>` so that the original commit id is included in the commit message.
Add a reason for the backport when it is not obvious from the original commit message. You can do this by cherry picking with `git cherry-pick -xe <original commit>`, which allows editing the commit message. This is not needed for minor version updates that include security and bug fixes but don't add new features or when the commit fixes an otherwise broken package.
Here is an example of a cherry-picked commit message with good reason description: Here is an example of a cherry-picked commit message with good reason description:

View file

@ -1474,7 +1474,7 @@ lib.attrsets.zipAttrsWith
<section xml:id="function-library-lib.attrsets.zipAttrs"> <section xml:id="function-library-lib.attrsets.zipAttrs">
<title><function>lib.attrsets.zipAttrs</function></title> <title><function>lib.attrsets.zipAttrs</function></title>
<subtitle><literal>zipAttrsWith :: [ AttrSet ] -> AttrSet</literal> <subtitle><literal>zipAttrs :: [ AttrSet ] -> AttrSet</literal>
</subtitle> </subtitle>
<xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrs" /> <xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrs" />

View file

@ -74,7 +74,7 @@ there are 3 steps, frontend dependencies (javascript), backend dependencies (eli
##### mixRelease - Frontend dependencies (javascript) {#mix-release-javascript-deps} ##### mixRelease - Frontend dependencies (javascript) {#mix-release-javascript-deps}
for phoenix projects, inside of nixpkgs you can either use yarn2nix (mkYarnModule) or node2nix. An example with yarn2nix can be found [here](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix#L39). An example with node2nix will follow. To package something outside of nixpkgs, you have alternatives like [npmlock2nix](https://github.com/nix-community/npmlock2nix) or [nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage) For phoenix projects, inside of nixpkgs you can either use yarn2nix (mkYarnModule) or node2nix. An example with yarn2nix can be found [here](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix#L39). An example with node2nix will follow. To package something outside of nixpkgs, you have alternatives like [npmlock2nix](https://github.com/nix-community/npmlock2nix) or [nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage)
##### mixRelease - backend dependencies (mix) {#mix-release-mix-deps} ##### mixRelease - backend dependencies (mix) {#mix-release-mix-deps}
@ -82,13 +82,13 @@ There are 2 ways to package backend dependencies. With mix2nix and with a fixed-
###### mix2nix {#mix2nix} ###### mix2nix {#mix2nix}
mix2nix is a cli tool available in nixpkgs. it will generate a nix expression from a mix.lock file. It is quite standard in the 2nix tool series. `mix2nix` is a cli tool available in nixpkgs. it will generate a nix expression from a mix.lock file. It is quite standard in the 2nix tool series.
Note that currently mix2nix can't handle git dependencies inside the mix.lock file. If you have git dependencies, you can either add them manually (see [example](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/pleroma/default.nix#L20)) or use the FOD method. Note that currently mix2nix can't handle git dependencies inside the mix.lock file. If you have git dependencies, you can either add them manually (see [example](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/pleroma/default.nix#L20)) or use the FOD method.
The advantage of using mix2nix is that nix will know your whole dependency graph. On a dependency update, this won't trigger a full rebuild and download of all the dependencies, where FOD will do so. The advantage of using mix2nix is that nix will know your whole dependency graph. On a dependency update, this won't trigger a full rebuild and download of all the dependencies, where FOD will do so.
practical steps: Practical steps:
- run `mix2nix > mix_deps.nix` in the upstream repo. - run `mix2nix > mix_deps.nix` in the upstream repo.
- pass `mixNixDeps = with pkgs; import ./mix_deps.nix { inherit lib beamPackages; };` as an argument to mixRelease. - pass `mixNixDeps = with pkgs; import ./mix_deps.nix { inherit lib beamPackages; };` as an argument to mixRelease.
@ -280,6 +280,30 @@ mkShell {
} }
``` ```
### Using an overlay
If you need to use an overlay to change some attributes of a derivation, e.g. if you need a bugfix from a version that is not yet available in nixpkgs, you can override attributes such as `version` (and the corresponding `sha256`) and then use this overlay in your development environment:
#### `shell.nix`
```nix
let
elixir_1_13_1_overlay = (self: super: {
elixir_1_13 = super.elixir_1_13.override {
version = "1.13.1";
sha256 = "0z0b1w2vvw4vsnb99779c2jgn9bgslg7b1pmd9vlbv02nza9qj5p";
};
});
pkgs = import <nixpkgs> { overlays = [ elixir_1_13_1_overlay ]; };
in
with pkgs;
mkShell {
buildInputs = [
elixir_1_13
];
}
```
#### Elixir - Phoenix project {#elixir---phoenix-project} #### Elixir - Phoenix project {#elixir---phoenix-project}
Here is an example `shell.nix`. Here is an example `shell.nix`.

View file

@ -29,7 +29,8 @@ The recommended way of defining a derivation for a Coq library, is to use the `c
* `releaseRev` (optional, defaults to `(v: v)`), provides a default mapping from release names to revision hashes/branch names/tags, * `releaseRev` (optional, defaults to `(v: v)`), provides a default mapping from release names to revision hashes/branch names/tags,
* `displayVersion` (optional), provides a way to alter the computation of `name` from `pname`, by explaining how to display version numbers, * `displayVersion` (optional), provides a way to alter the computation of `name` from `pname`, by explaining how to display version numbers,
* `namePrefix` (optional, defaults to `[ "coq" ]`), provides a way to alter the computation of `name` from `pname`, by explaining which dependencies must occur in `name`, * `namePrefix` (optional, defaults to `[ "coq" ]`), provides a way to alter the computation of `name` from `pname`, by explaining which dependencies must occur in `name`,
* `extraBuildInputs` (optional), by default `buildInputs` just contains `coq`, this allows to add more build inputs, * `extraNativeBuildInputs` (optional), by default `nativeBuildInputs` just contains `coq`, this allows to add more native build inputs, `nativeBuildInputs` are executables and `buildInputs` are libraries and dependencies,
* `extraBuildInputs` (optional), this allows to add more build inputs,
* `mlPlugin` (optional, defaults to `false`). Some extensions (plugins) might require OCaml and sometimes other OCaml packages. Standard dependencies can be added by setting the current option to `true`. For a finer grain control, the `coq.ocamlPackages` attribute can be used in `extraBuildInputs` to depend on the same package set Coq was built against. * `mlPlugin` (optional, defaults to `false`). Some extensions (plugins) might require OCaml and sometimes other OCaml packages. Standard dependencies can be added by setting the current option to `true`. For a finer grain control, the `coq.ocamlPackages` attribute can be used in `extraBuildInputs` to depend on the same package set Coq was built against.
* `useDune2ifVersion` (optional, default to `(x: false)` uses Dune2 to build the package if the provided predicate evaluates to true on the version, e.g. `useDune2if = versions.isGe "1.1"` will use dune if the version of the package is greater or equal to `"1.1"`, * `useDune2ifVersion` (optional, default to `(x: false)` uses Dune2 to build the package if the provided predicate evaluates to true on the version, e.g. `useDune2if = versions.isGe "1.1"` will use dune if the version of the package is greater or equal to `"1.1"`,
* `useDune2` (optional, defaults to `false`) uses Dune2 to build the package if set to true, the presence of this attribute overrides the behavior of the previous one. * `useDune2` (optional, defaults to `false`) uses Dune2 to build the package if set to true, the presence of this attribute overrides the behavior of the previous one.

View file

@ -72,33 +72,51 @@ The `dotnetCorePackages.sdk` contains both a runtime and the full sdk of a given
To package Dotnet applications, you can use `buildDotnetModule`. This has similar arguments to `stdenv.mkDerivation`, with the following additions: To package Dotnet applications, you can use `buildDotnetModule`. This has similar arguments to `stdenv.mkDerivation`, with the following additions:
* `projectFile` has to be used for specifying the dotnet project file relative to the source root. These usually have `.sln` or `.csproj` file extensions. This can be an array of multiple projects as well. * `projectFile` has to be used for specifying the dotnet project file relative to the source root. These usually have `.sln` or `.csproj` file extensions. This can be an array of multiple projects as well.
* `nugetDeps` has to be used to specify the NuGet dependency file. Unfortunately, these cannot be deterministically fetched without a lockfile. This file should be generated using `nuget-to-nix` tool, which is available in nixpkgs. * `nugetDeps` has to be used to specify the NuGet dependency file. Unfortunately, these cannot be deterministically fetched without a lockfile. A script to fetch these is available as `passthru.fetch-deps`. This file can also be generated manually using `nuget-to-nix` tool, which is available in nixpkgs.
* `executables` is used to specify which executables get wrapped to `$out/bin`, relative to `$out/lib/$pname`. If this is unset, all executables generated will get installed. If you do not want to install any, set this to `[]`. * `packNupkg` is used to pack project as a `nupkg`, and installs it to `$out/share`. If set to `true`, the derivation can be used as a dependency for another dotnet project by adding it to `projectReferences`.
* `projectReferences` can be used to resolve `ProjectReference` project items. Referenced projects can be packed with `buildDotnetModule` by setting the `packNupkg = true` attribute and passing a list of derivations to `projectReferences`. Since we are sharing referenced projects as NuGets they must be added to csproj/fsproj files as `PackageReference` as well.
For example, your project has a local dependency:
```xml
<ProjectReference Include="../foo/bar.fsproj" />
```
To enable discovery through `projectReferences` you would need to add:
```xml
<ProjectReference Include="../foo/bar.fsproj" />
<PackageReference Include="bar" Version="*" Condition=" '$(ContinuousIntegrationBuild)'=='true' "/>
```
* `executables` is used to specify which executables get wrapped to `$out/bin`, relative to `$out/lib/$pname`. If this is unset, all executables generated will get installed. If you do not want to install any, set this to `[]`. This gets done in the `preFixup` phase.
* `runtimeDeps` is used to wrap libraries into `LD_LIBRARY_PATH`. This is how dotnet usually handles runtime dependencies. * `runtimeDeps` is used to wrap libraries into `LD_LIBRARY_PATH`. This is how dotnet usually handles runtime dependencies.
* `buildType` is used to change the type of build. Possible values are `Release`, `Debug`, etc. By default, this is set to `Release`. * `buildType` is used to change the type of build. Possible values are `Release`, `Debug`, etc. By default, this is set to `Release`.
* `dotnet-sdk` is useful in cases where you need to change what dotnet SDK is being used. * `dotnet-sdk` is useful in cases where you need to change what dotnet SDK is being used.
* `dotnet-runtime` is useful in cases where you need to change what dotnet runtime is being used. This can be either a regular dotnet runtime, or an aspnetcore. * `dotnet-runtime` is useful in cases where you need to change what dotnet runtime is being used. This can be either a regular dotnet runtime, or an aspnetcore.
* `dotnet-test-sdk` is useful in cases where unit tests expect a different dotnet SDK. By default, this is set to the `dotnet-sdk` attribute. * `dotnet-test-sdk` is useful in cases where unit tests expect a different dotnet SDK. By default, this is set to the `dotnet-sdk` attribute.
* `testProjectFile` is useful in cases where the regular project file does not contain the unit tests. By default, this is set to the `projectFile` attribute. * `testProjectFile` is useful in cases where the regular project file does not contain the unit tests. It gets restored and build, but not installed. You may need to regenerate your nuget lockfile after setting this.
* `disabledTests` is used to disable running specific unit tests. This gets passed as: `dotnet test --filter "FullyQualifiedName!={}"`, to ensure compatibility with all unit test frameworks. * `disabledTests` is used to disable running specific unit tests. This gets passed as: `dotnet test --filter "FullyQualifiedName!={}"`, to ensure compatibility with all unit test frameworks.
* `dotnetRestoreFlags` can be used to pass flags to `dotnet restore`. * `dotnetRestoreFlags` can be used to pass flags to `dotnet restore`.
* `dotnetBuildFlags` can be used to pass flags to `dotnet build`. * `dotnetBuildFlags` can be used to pass flags to `dotnet build`.
* `dotnetTestFlags` can be used to pass flags to `dotnet test`. * `dotnetTestFlags` can be used to pass flags to `dotnet test`. Used only if `doCheck` is set to `true`.
* `dotnetInstallFlags` can be used to pass flags to `dotnet install`. * `dotnetInstallFlags` can be used to pass flags to `dotnet install`.
* `dotnetPackFlags` can be used to pass flags to `dotnet pack`. Used only if `packNupkg` is set to `true`.
* `dotnetFlags` can be used to pass flags to all of the above phases. * `dotnetFlags` can be used to pass flags to all of the above phases.
When packaging a new application, you need to fetch it's dependencies. You can set `nugetDeps` to an empty string to make the derivation temporarily evaluate, and then run `nix-build -A package.passthru.fetch-deps` to generate it's dependency fetching script. After running the script, you should have the location of the generated lockfile printed to the console. This can be copied to a stable directory. Note that if either `projectFile` or `nugetDeps` are unset, this script cannot be generated!
Here is an example `default.nix`, using some of the previously discussed arguments: Here is an example `default.nix`, using some of the previously discussed arguments:
```nix ```nix
{ lib, buildDotnetModule, dotnetCorePackages, ffmpeg }: { lib, buildDotnetModule, dotnetCorePackages, ffmpeg }:
buildDotnetModule rec { let
referencedProject = import ../../bar { ... };
in buildDotnetModule rec {
pname = "someDotnetApplication"; pname = "someDotnetApplication";
version = "0.1"; version = "0.1";
src = ./.; src = ./.;
projectFile = "src/project.sln"; projectFile = "src/project.sln";
nugetDeps = ./deps.nix; # File generated with `nuget-to-nix path/to/src > deps.nix`. nugetDeps = ./deps.nix; # File generated with `nix-build -A package.passthru.fetch-deps`.
projectReferences = [ referencedProject ]; # `referencedProject` must contain `nupkg` in the folder structure.
dotnet-sdk = dotnetCorePackages.sdk_3_1; dotnet-sdk = dotnetCorePackages.sdk_3_1;
dotnet-runtime = dotnetCorePackages.net_5_0; dotnet-runtime = dotnetCorePackages.net_5_0;
@ -107,6 +125,8 @@ buildDotnetModule rec {
executables = [ "foo" ]; # This wraps "$out/lib/$pname/foo" to `$out/bin/foo`. executables = [ "foo" ]; # This wraps "$out/lib/$pname/foo" to `$out/bin/foo`.
executables = []; # Don't install any executables. executables = []; # Don't install any executables.
packNupkg = true; # This packs the project as "foo-0.1.nupkg" at `$out/share`.
runtimeDeps = [ ffmpeg ]; # This will wrap ffmpeg's library path into `LD_LIBRARY_PATH`. runtimeDeps = [ ffmpeg ]; # This will wrap ffmpeg's library path into `LD_LIBRARY_PATH`.
} }
``` ```

View file

@ -15,12 +15,12 @@ Modes of use of `emscripten`:
If you want to work with `emcc`, `emconfigure` and `emmake` as you are used to from Ubuntu and similar distributions you can use these commands: If you want to work with `emcc`, `emconfigure` and `emmake` as you are used to from Ubuntu and similar distributions you can use these commands:
* `nix-env -i emscripten` * `nix-env -f "<nixpkgs>" -iA emscripten`
* `nix-shell -p emscripten` * `nix-shell -p emscripten`
* **Declarative usage**: * **Declarative usage**:
This mode is far more power full since this makes use of `nix` for dependency management of emscripten libraries and targets by using the `mkDerivation` which is implemented by `pkgs.emscriptenStdenv` and `pkgs.buildEmscriptenPackage`. The source for the packages is in `pkgs/top-level/emscripten-packages.nix` and the abstraction behind it in `pkgs/development/em-modules/generic/default.nix`. This mode is far more power full since this makes use of `nix` for dependency management of emscripten libraries and targets by using the `mkDerivation` which is implemented by `pkgs.emscriptenStdenv` and `pkgs.buildEmscriptenPackage`. The source for the packages is in `pkgs/top-level/emscripten-packages.nix` and the abstraction behind it in `pkgs/development/em-modules/generic/default.nix`. From the root of the nixpkgs repository:
* build and install all packages: * build and install all packages:
* `nix-env -iA emscriptenPackages` * `nix-env -iA emscriptenPackages`

View file

@ -92,7 +92,7 @@ For convenience, it also adds `dconf.lib` for a GIO module implementing a GSetti
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGAppsHook` will prepend it to `XDG_DATA_DIRS`. - []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGAppsHook` will prepend it to `XDG_DATA_DIRS`.
- []{#ssec-gnome-hooks-gdk-pixbuf} `gdk-pixbuf` setup hook will populate `GDK_PIXBUF_MODULE_FILE` with the path to biggest `loaders.cache` file from the dependencies containing [GdkPixbuf loaders](ssec-gnome-gdk-pixbuf-loaders). This works fine when there are only two packages containing loaders (`gdk-pixbuf` and e.g. `librsvg`) it will choose the second one, reasonably expecting that it will be bigger since it describes extra loader in addition to the default ones. But when there are more than two loader packages, this logic will break. One possible solution would be constructing a custom cache file for each package containing a program like `services/x11/gdk-pixbuf.nix` NixOS module does. `wrapGAppsHook` copies the `GDK_PIXBUF_MODULE_FILE` environment variable into the produced wrapper. - []{#ssec-gnome-hooks-gdk-pixbuf} `gdk-pixbuf` setup hook will populate `GDK_PIXBUF_MODULE_FILE` with the path to biggest `loaders.cache` file from the dependencies containing [GdkPixbuf loaders](#ssec-gnome-gdk-pixbuf-loaders). This works fine when there are only two packages containing loaders (`gdk-pixbuf` and e.g. `librsvg`) it will choose the second one, reasonably expecting that it will be bigger since it describes extra loader in addition to the default ones. But when there are more than two loader packages, this logic will break. One possible solution would be constructing a custom cache file for each package containing a program like `services/x11/gdk-pixbuf.nix` NixOS module does. `wrapGAppsHook` copies the `GDK_PIXBUF_MODULE_FILE` environment variable into the produced wrapper.
- []{#ssec-gnome-hooks-gtk-drop-icon-theme-cache} One of `gtk3`s setup hooks will remove `icon-theme.cache` files from packages icon theme directories to avoid conflicts. Icon theme packages should prevent this with `dontDropIconThemeCache = true;`. - []{#ssec-gnome-hooks-gtk-drop-icon-theme-cache} One of `gtk3`s setup hooks will remove `icon-theme.cache` files from packages icon theme directories to avoid conflicts. Icon theme packages should prevent this with `dontDropIconThemeCache = true;`.

View file

@ -12,8 +12,7 @@ The function `buildGoModule` builds Go programs managed with Go modules. It buil
In the following is an example expression using `buildGoModule`, the following arguments are of special significance to the function: In the following is an example expression using `buildGoModule`, the following arguments are of special significance to the function:
- `vendorSha256`: is the hash of the output of the intermediate fetcher derivation. `vendorSha256` can also take `null` as an input. When `null` is used as a value, rather than fetching the dependencies and vendoring them, we use the vendoring included within the source repo. If you'd like to not have to update this field on dependency changes, run `go mod vendor` in your source repo and set `vendorSha256 = null;` - `vendorSha256`: is the hash of the output of the intermediate fetcher derivation. `vendorSha256` can also take `null` as an input. When `null` is used as a value, rather than fetching the dependencies and vendoring them, we use the vendoring included within the source repo. If you'd like to not have to update this field on dependency changes, run `go mod vendor` in your source repo and set `vendorSha256 = null;`
- `runVend`: runs the vend command to generate the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build. - `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build or if any dependency has case-insensitive conflicts which will produce platform dependant `vendorSha256` checksums.
- `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if any dependency has case-insensitive conflicts which will produce platform dependant `vendorSha256` checksums.
```nix ```nix
pet = buildGoModule rec { pet = buildGoModule rec {
@ -29,14 +28,11 @@ pet = buildGoModule rec {
vendorSha256 = "1879j77k96684wi554rkjxydrj8g3hpp0kvxz03sd8dmwr3lh83j"; vendorSha256 = "1879j77k96684wi554rkjxydrj8g3hpp0kvxz03sd8dmwr3lh83j";
runVend = true;
meta = with lib; { meta = with lib; {
description = "Simple command-line snippet manager, written in Go"; description = "Simple command-line snippet manager, written in Go";
homepage = "https://github.com/knqyf263/pet"; homepage = "https://github.com/knqyf263/pet";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ kalbasit ]; maintainers = with maintainers; [ kalbasit ];
platforms = platforms.linux ++ platforms.darwin;
}; };
} }
``` ```

View file

@ -5,10 +5,7 @@
The easiest way to get a working idris version is to install the `idris` attribute: The easiest way to get a working idris version is to install the `idris` attribute:
```ShellSession ```ShellSession
$ # On NixOS $ nix-env -f "<nixpkgs>" -iA idris
$ nix-env -i nixos.idris
$ # On non-NixOS
$ nix-env -i nixpkgs.idris
``` ```
This however only provides the `prelude` and `base` libraries. To install idris with additional libraries, you can use the `idrisPackages.with-packages` function, e.g. in an overlay in `~/.config/nixpkgs/overlays/my-idris.nix`: This however only provides the `prelude` and `base` libraries. To install idris with additional libraries, you can use the `idrisPackages.with-packages` function, e.g. in an overlay in `~/.config/nixpkgs/overlays/my-idris.nix`:

View file

@ -85,7 +85,7 @@ you will still need to commit the modified version of the lock files, but at lea
each tool has an abstraction to just build the node_modules (dependencies) directory. you can always use the stdenv.mkDerivation with the node_modules to build the package (symlink the node_modules directory and then use the package build command). the node_modules abstraction can be also used to build some web framework frontends. For an example of this see how [plausible](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix) is built. mkYarnModules to make the derivation containing node_modules. Then when building the frontend you can just symlink the node_modules directory each tool has an abstraction to just build the node_modules (dependencies) directory. you can always use the stdenv.mkDerivation with the node_modules to build the package (symlink the node_modules directory and then use the package build command). the node_modules abstraction can be also used to build some web framework frontends. For an example of this see how [plausible](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix) is built. mkYarnModules to make the derivation containing node_modules. Then when building the frontend you can just symlink the node_modules directory
## javascript packages inside nixpkgs {#javascript-packages-nixpkgs} ## Javascript packages inside nixpkgs {#javascript-packages-nixpkgs}
The `pkgs/development/node-packages` folder contains a generated collection of The `pkgs/development/node-packages` folder contains a generated collection of
[NPM packages](https://npmjs.com/) that can be installed with the Nix package [NPM packages](https://npmjs.com/) that can be installed with the Nix package
@ -121,12 +121,14 @@ requires `node-gyp-build`, so [we override](https://github.com/NixOS/nixpkgs/blo
}; };
``` ```
### Adding and Updating Javascript packages in nixpkgs
To add a package from NPM to nixpkgs: To add a package from NPM to nixpkgs:
1. Modify `pkgs/development/node-packages/node-packages.json` to add, update 1. Modify `pkgs/development/node-packages/node-packages.json` to add, update
or remove package entries to have it included in `nodePackages` and or remove package entries to have it included in `nodePackages` and
`nodePackages_latest`. `nodePackages_latest`.
2. Run the script: `cd pkgs/development/node-packages && ./generate.sh`. 2. Run the script: `./pkgs/development/node-packages/generate.sh`.
3. Build your new package to test your changes: 3. Build your new package to test your changes:
`cd /path/to/nixpkgs && nix-build -A nodePackages.<new-or-updated-package>`. `cd /path/to/nixpkgs && nix-build -A nodePackages.<new-or-updated-package>`.
To build against the latest stable Current Node.js version (e.g. 14.x): To build against the latest stable Current Node.js version (e.g. 14.x):
@ -137,6 +139,26 @@ For more information about the generation process, consult the
[README.md](https://github.com/svanderburg/node2nix) file of the `node2nix` [README.md](https://github.com/svanderburg/node2nix) file of the `node2nix`
tool. tool.
To update NPM packages in nixpkgs, run the same `generate.sh` script:
```sh
./pkgs/development/node-packages/generate.sh
```
#### Git protocol error
Some packages may have Git dependencies from GitHub specified with `git://`.
GitHub has
[disabled unecrypted Git connections](https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git),
so you may see the following error when running the generate script:
`The unauthenticated git protocol on port 9418 is no longer supported`.
Use the following Git configuration to resolve the issue:
```sh
git config --global url."https://github.com/".insteadOf git://github.com/
```
## Tool specific instructions {#javascript-tool-specific} ## Tool specific instructions {#javascript-tool-specific}
### node2nix {#javascript-node2nix} ### node2nix {#javascript-node2nix}

View file

@ -128,7 +128,7 @@ Let's present the luarocks way first and the manual one in a second time.
### Packaging a library on luarocks {#packaging-a-library-on-luarocks} ### Packaging a library on luarocks {#packaging-a-library-on-luarocks}
[Luarocks.org](www.luarocks.org) is the main repository of lua packages. [Luarocks.org](https://luarocks.org/) is the main repository of lua packages.
The site proposes two types of packages, the rockspec and the src.rock The site proposes two types of packages, the rockspec and the src.rock
(equivalent of a [rockspec](https://github.com/luarocks/luarocks/wiki/Rockspec-format) but with the source). (equivalent of a [rockspec](https://github.com/luarocks/luarocks/wiki/Rockspec-format) but with the source).
These packages can have different build types such as `cmake`, `builtin` etc . These packages can have different build types such as `cmake`, `builtin` etc .

View file

@ -32,14 +32,14 @@ Given that most of the OCaml ecosystem is now built with dune, nixpkgs includes
Here is a simple package example. Here is a simple package example.
- It defines an (optional) attribute `minimalOCamlVersion` that will be used to - It defines an (optional) attribute `minimalOCamlVersion` (see note below)
throw a descriptive evaluation error if building with an older OCaml is that will be used to throw a descriptive evaluation error if building with
attempted. an older OCaml is attempted.
- It uses the `fetchFromGitHub` fetcher to get its source. - It uses the `fetchFromGitHub` fetcher to get its source.
- `useDune2 = true` ensures that the latest version of Dune is used for the - `useDune2 = true` ensures that Dune version 2 is used for the
build (this may become the default value in a future release). build (this is the default; set to `false` to use Dune version 1).
- It sets the optional `doCheck` attribute such that tests will be run with - It sets the optional `doCheck` attribute such that tests will be run with
`dune runtest -p angstrom` after the build (`dune build -p angstrom`) is `dune runtest -p angstrom` after the build (`dune build -p angstrom`) is
@ -117,3 +117,11 @@ buildDunePackage rec {
}; };
} }
``` ```
Note about `minimalOCamlVersion`. A deprecated version of this argument was
spelled `minimumOCamlVersion`; setting the old attribute wrongly modifies the
derivation hash and is therefore inappropriate. As a technical dept, currently
packaged libraries may still use the old spelling: maintainers are invited to
fix this when updating packages. Massive renaming is strongly discouraged as it
would be challenging to review, difficult to test, and will cause unnecessary
rebuild.

View file

@ -24,18 +24,10 @@ You can test building an Octave package as follows:
$ nix-build -A octavePackages.symbolic $ nix-build -A octavePackages.symbolic
``` ```
When building Octave packages with `nix-build`, the `buildOctavePackage` function adds `octave-octaveVersion` to; the start of the package's name attribute. To install it into your user profile, run this command from the root of the repository:
This can be required when installing the package using `nix-env`:
```ShellSession ```ShellSession
$ nix-env -i octave-6.2.0-symbolic $ nix-env -f. -iA octavePackages.symbolic
```
Although, you can also install it using the attribute name:
```ShellSession
$ nix-env -i -A octavePackages.symbolic
``` ```
You can build Octave with packages by using the `withPackages` passed-through function. You can build Octave with packages by using the `withPackages` passed-through function.

View file

@ -58,13 +58,7 @@ in `all-packages.nix`. You can test building a Perl package as follows:
$ nix-build -A perlPackages.ClassC3 $ nix-build -A perlPackages.ClassC3
``` ```
`buildPerlPackage` adds `perl-` to the start of the name attribute, so the package above is actually called `perl-Class-C3-0.21`. So to install it, you can say: To install it with `nix-env` instead: `nix-env -f. -iA perlPackages.ClassC3`.
```ShellSession
$ nix-env -i perl-Class-C3
```
(Of course you can also install using the attribute name: `nix-env -i -A perlPackages.ClassC3`.)
So what does `buildPerlPackage` do? It does the following: So what does `buildPerlPackage` do? It does the following:
@ -135,9 +129,11 @@ This will remove the `-I` flags from the shebang line, rewrite them in the `use
Nix expressions for Perl packages can be generated (almost) automatically from CPAN. This is done by the program `nix-generate-from-cpan`, which can be installed as follows: Nix expressions for Perl packages can be generated (almost) automatically from CPAN. This is done by the program `nix-generate-from-cpan`, which can be installed as follows:
```ShellSession ```ShellSession
$ nix-env -i nix-generate-from-cpan $ nix-env -f "<nixpkgs>" -iA nix-generate-from-cpan
``` ```
Substitute `<nixpkgs>` by the path of a nixpkgs clone to use the latest version.
This program takes a Perl module name, looks it up on CPAN, fetches and unpacks the corresponding package, and prints a Nix expression on standard output. For example: This program takes a Perl module name, looks it up on CPAN, fetches and unpacks the corresponding package, and prints a Nix expression on standard output. For example:
```ShellSession ```ShellSession

View file

@ -764,7 +764,7 @@ and in this case the `python38` interpreter is automatically used.
### Interpreters {#interpreters} ### Interpreters {#interpreters}
Versions 2.7, 3.6, 3.7, 3.8 and 3.9 of the CPython interpreter are available as Versions 2.7, 3.7, 3.8 and 3.9 of the CPython interpreter are available as
respectively `python27`, `python37`, `python38` and `python39`. The respectively `python27`, `python37`, `python38` and `python39`. The
aliases `python2` and `python3` correspond to respectively `python27` and aliases `python2` and `python3` correspond to respectively `python27` and
`python39`. The attribute `python` maps to `python2`. The PyPy interpreters `python39`. The attribute `python` maps to `python2`. The PyPy interpreters
@ -834,6 +834,7 @@ sets are
* `pkgs.python38Packages` * `pkgs.python38Packages`
* `pkgs.python39Packages` * `pkgs.python39Packages`
* `pkgs.python310Packages` * `pkgs.python310Packages`
* `pkgs.python311Packages`
* `pkgs.pypyPackages` * `pkgs.pypyPackages`
and the aliases and the aliases
@ -978,6 +979,31 @@ with import <nixpkgs> {};
in python.withPackages(ps: [ps.blaze])).env in python.withPackages(ps: [ps.blaze])).env
``` ```
#### Optional extra dependencies
Some packages define optional dependencies for additional features. With
`setuptools` this is called `extras_require` and `flit` calls it `extras-require`. A
method for supporting this is by declaring the extras of a package in its
`passthru`, e.g. in case of the package `dask`
```nix
passthru.extras-require = {
complete = [ distributed ];
};
```
and letting the package requiring the extra add the list to its dependencies
```nix
propagatedBuildInputs = [
...
] ++ dask.extras-require.complete;
```
Note this method is preferred over adding parameters to builders, as that can
result in packages depending on different variants and thereby causing
collisions.
#### `buildPythonApplication` function {#buildpythonapplication-function} #### `buildPythonApplication` function {#buildpythonapplication-function}
The `buildPythonApplication` function is practically the same as The `buildPythonApplication` function is practically the same as
@ -995,18 +1021,18 @@ called with `callPackage` and passed `python` or `pythonPackages` (possibly
specifying an interpreter version), like this: specifying an interpreter version), like this:
```nix ```nix
{ lib, python3Packages }: { lib, python3 }:
python3Packages.buildPythonApplication rec { python3.pkgs.buildPythonApplication rec {
pname = "luigi"; pname = "luigi";
version = "2.7.9"; version = "2.7.9";
src = python3Packages.fetchPypi { src = python3.pkgs.fetchPypi {
inherit pname version; inherit pname version;
sha256 = "035w8gqql36zlan0xjrzz9j4lh9hs0qrsgnbyw07qs7lnkvbdv9x"; sha256 = "035w8gqql36zlan0xjrzz9j4lh9hs0qrsgnbyw07qs7lnkvbdv9x";
}; };
propagatedBuildInputs = with python3Packages; [ tornado_4 python-daemon ]; propagatedBuildInputs = with python3.pkgs; [ tornado python-daemon ];
meta = with lib; { meta = with lib; {
... ...

View file

@ -293,7 +293,7 @@ Test flags, e.g., `--package foo`, can be passed to `cargo test` via the
Another attribute, called `checkFlags`, is used to pass arguments to the test Another attribute, called `checkFlags`, is used to pass arguments to the test
binary itself, as stated binary itself, as stated
(here)[https://doc.rust-lang.org/cargo/commands/cargo-test.html]. [here](https://doc.rust-lang.org/cargo/commands/cargo-test.html).
#### Tests relying on the structure of the `target/` directory {#tests-relying-on-the-structure-of-the-target-directory} #### Tests relying on the structure of the `target/` directory {#tests-relying-on-the-structure-of-the-target-directory}
@ -464,6 +464,8 @@ you of the correct hash.
be disabled by setting `dontUseCargoParallelTests`. be disabled by setting `dontUseCargoParallelTests`.
* `cargoInstallHook`: install binaries and static/shared libraries * `cargoInstallHook`: install binaries and static/shared libraries
that were built using `cargoBuildHook`. that were built using `cargoBuildHook`.
* `bindgenHook`: for crates which use `bindgen` as a build dependency, lets
`bindgen` find `libclang` and `libclang` find the libraries in `buildInputs`.
### Examples {#examples} ### Examples {#examples}

View file

@ -309,9 +309,9 @@ Sample output2:
## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs} ## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs}
Nix expressions for Vim plugins are stored in [pkgs/misc/vim-plugins](/pkgs/misc/vim-plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](/pkgs/misc/vim-plugins/update.py). This creates a [generated.nix](/pkgs/misc/vim-plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](/pkgs/misc/vim-plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]@[gitref]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`. Nix expressions for Vim plugins are stored in [pkgs/applications/editors/vim/plugins](https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/editors/vim/plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py). This creates a [generated.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]@[gitref]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`.
Some plugins require overrides in order to function properly. Overrides are placed in [overrides.nix](/pkgs/misc/vim-plugins/overrides.nix). Overrides are most often required when a plugin requires some dependencies, or extra steps are required during the build process. For example `deoplete-fish` requires both `deoplete-nvim` and `vim-fish`, and so the following override was added: Some plugins require overrides in order to function properly. Overrides are placed in [overrides.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/overrides.nix). Overrides are most often required when a plugin requires some dependencies, or extra steps are required during the build process. For example `deoplete-fish` requires both `deoplete-nvim` and `vim-fish`, and so the following override was added:
```nix ```nix
deoplete-fish = super.deoplete-fish.overrideAttrs(old: { deoplete-fish = super.deoplete-fish.overrideAttrs(old: {
@ -330,13 +330,13 @@ Finally, there are some plugins that are also packaged in nodePackages because t
Run the update script with a GitHub API token that has at least `public_repo` access. Running the script without the token is likely to result in rate-limiting (429 errors). For steps on creating an API token, please refer to [GitHub's token documentation](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token). Run the update script with a GitHub API token that has at least `public_repo` access. Running the script without the token is likely to result in rate-limiting (429 errors). For steps on creating an API token, please refer to [GitHub's token documentation](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token).
```sh ```sh
GITHUB_API_TOKEN=my_token ./pkgs/misc/vim-plugins/update.py GITHUB_API_TOKEN=my_token ./pkgs/applications/editors/vim/plugins/update.py
``` ```
Alternatively, set the number of processes to a lower count to avoid rate-limiting. Alternatively, set the number of processes to a lower count to avoid rate-limiting.
```sh ```sh
./pkgs/misc/vim-plugins/update.py --proc 1 ./pkgs/applications/editors/vim/plugins/update.py --proc 1
``` ```
## Important repositories {#important-repositories} ## Important repositories {#important-repositories}

View file

@ -192,10 +192,6 @@ meta.hydraPlatforms = [];
If set to `true`, the package is marked as "broken", meaning that it wont show up in `nix-env -qa`, and cannot be built or installed. Such packages should be removed from Nixpkgs eventually unless they are fixed. If set to `true`, the package is marked as "broken", meaning that it wont show up in `nix-env -qa`, and cannot be built or installed. Such packages should be removed from Nixpkgs eventually unless they are fixed.
### `updateWalker` {#var-meta-updateWalker}
If set to `true`, the package is tested to be updated correctly by the `update-walker.sh` script without additional settings. Such packages have `meta.version` set and their homepage (or the page specified by `meta.downloadPage`) contains a direct link to the package tarball.
## Licenses {#sec-meta-license} ## Licenses {#sec-meta-license}
The `meta.license` attribute should preferably contain a value from `lib.licenses` defined in [`nixpkgs/lib/licenses.nix`](https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix), or in-place license description of the same format if the license is unlikely to be useful in another expression. The `meta.license` attribute should preferably contain a value from `lib.licenses` defined in [`nixpkgs/lib/licenses.nix`](https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix), or in-place license description of the same format if the license is unlikely to be useful in another expression.

View file

@ -319,10 +319,16 @@ For information about how to run the updates, execute `nix-shell maintainers/scr
## Phases {#sec-stdenv-phases} ## Phases {#sec-stdenv-phases}
The generic builder has a number of *phases*. Package builds are split into phases to make it easier to override specific parts of the build (e.g., unpacking the sources or installing the binaries). Furthermore, it allows a nicer presentation of build logs in the Nix build farm. `stdenv.mkDerivation` sets the Nix [derivation](https://nixos.org/manual/nix/stable/expressions/derivations.html#derivations)'s builder to a script that loads the stdenv `setup.sh` bash library and calls `genericBuild`. Most packaging functions rely on this default builder.
This generic command invokes a number of *phases*. Package builds are split into phases to make it easier to override specific parts of the build (e.g., unpacking the sources or installing the binaries).
Each phase can be overridden in its entirety either by setting the environment variable `namePhase` to a string containing some shell commands to be executed, or by redefining the shell function `namePhase`. The former is convenient to override a phase from the derivation, while the latter is convenient from a build script. However, typically one only wants to *add* some commands to a phase, e.g. by defining `postInstall` or `preFixup`, as skipping some of the default actions may have unexpected consequences. The default script for each phase is defined in the file `pkgs/stdenv/generic/setup.sh`. Each phase can be overridden in its entirety either by setting the environment variable `namePhase` to a string containing some shell commands to be executed, or by redefining the shell function `namePhase`. The former is convenient to override a phase from the derivation, while the latter is convenient from a build script. However, typically one only wants to *add* some commands to a phase, e.g. by defining `postInstall` or `preFixup`, as skipping some of the default actions may have unexpected consequences. The default script for each phase is defined in the file `pkgs/stdenv/generic/setup.sh`.
When overriding a phase, for example `installPhase`, it is important to start with `runHook preInstall` and end it with `runHook postInstall`, otherwise `preInstall` and `postInstall` will not be run. Even if you don't use them directly, it is good practice to do so anyways for downstream users who would want to add a `postInstall` by overriding your derivation.
While inside an interactive `nix-shell`, if you wanted to run all phases in the order they would be run in an actual build, you can invoke `genericBuild` yourself.
### Controlling phases {#ssec-controlling-phases} ### Controlling phases {#ssec-controlling-phases}
There are a number of variables that control what phases are executed and in what order: There are a number of variables that control what phases are executed and in what order:
@ -333,7 +339,8 @@ There are a number of variables that control what phases are executed and in wha
Specifies the phases. You can change the order in which phases are executed, or add new phases, by setting this variable. If its not set, the default value is used, which is `$prePhases unpackPhase patchPhase $preConfigurePhases configurePhase $preBuildPhases buildPhase checkPhase $preInstallPhases installPhase fixupPhase installCheckPhase $preDistPhases distPhase $postPhases`. Specifies the phases. You can change the order in which phases are executed, or add new phases, by setting this variable. If its not set, the default value is used, which is `$prePhases unpackPhase patchPhase $preConfigurePhases configurePhase $preBuildPhases buildPhase checkPhase $preInstallPhases installPhase fixupPhase installCheckPhase $preDistPhases distPhase $postPhases`.
Usually, if you just want to add a few phases, its more convenient to set one of the variables below (such as `preInstallPhases`), as you then dont specify all the normal phases. It is discouraged to set this variable, as it is easy to miss some important functionality hidden in some of the less obviously needed phases (like `fixupPhase` which patches the shebang of scripts).
Usually, if you just want to add a few phases, its more convenient to set one of the variables below (such as `preInstallPhases`).
##### `prePhases` {#var-stdenv-prePhases} ##### `prePhases` {#var-stdenv-prePhases}
@ -790,7 +797,7 @@ Hook executed at the start of the distribution phase.
Hook executed at the end of the distribution phase. Hook executed at the end of the distribution phase.
## Shell functions {#ssec-stdenv-functions} ## Shell functions and utilities {#ssec-stdenv-functions}
The standard environment provides a number of useful functions. The standard environment provides a number of useful functions.
@ -814,6 +821,19 @@ Theres many more kinds of arguments, they are documented in `nixpkgs/pkgs/bui
Using the `makeBinaryWrapper` implementation is usually preferred, as it creates a tiny _compiled_ wrapper executable, that can be used as a shebang interpreter. This is needed mostly on Darwin, where shebangs cannot point to scripts, [due to a limitation with the `execve`-syscall](https://stackoverflow.com/questions/67100831/macos-shebang-with-absolute-path-not-working). Compiled wrappers generated by `makeBinaryWrapper` can be inspected with `less <path-to-wrapper>` - by scrolling past the binary data you should be able to see the shell command that generated the executable and there see the environment variables that were injected into the wrapper. Using the `makeBinaryWrapper` implementation is usually preferred, as it creates a tiny _compiled_ wrapper executable, that can be used as a shebang interpreter. This is needed mostly on Darwin, where shebangs cannot point to scripts, [due to a limitation with the `execve`-syscall](https://stackoverflow.com/questions/67100831/macos-shebang-with-absolute-path-not-working). Compiled wrappers generated by `makeBinaryWrapper` can be inspected with `less <path-to-wrapper>` - by scrolling past the binary data you should be able to see the shell command that generated the executable and there see the environment variables that were injected into the wrapper.
### `remove-references-to -t` \<storepath\> [ `-t` \<storepath\> ... ] \<file\> ... {#fun-remove-references-to}
Removes the references of the specified files to the specified store files. This is done without changing the size of the file by replacing the hash by `eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee`, and should work on compiled executables. This is meant to be used to remove the dependency of the output on inputs that are known to be unnecessary at runtime. Of course, reckless usage will break the patched programs.
To use this, add `removeReferencesTo` to `nativeBuildInputs`.
As `remove-references-to` is an actual executable and not a shell function, it can be used with `find`.
Example removing all references to the compiler in the output:
```nix
postInstall = ''
find "$out" -type f -exec remove-references-to -t ${stdenv.cc} '{}' +
'';
```
### `substitute` \<infile\> \<outfile\> \<subs\> {#fun-substitute} ### `substitute` \<infile\> \<outfile\> \<subs\> {#fun-substitute}
Performs string substitution on the contents of \<infile\>, writing the result to \<outfile\>. The substitutions in \<subs\> are of the following form: Performs string substitution on the contents of \<infile\>, writing the result to \<outfile\>. The substitutions in \<subs\> are of the following form:

View file

@ -77,7 +77,7 @@ In Nixpkgs, we have multiple implementations of the BLAS/LAPACK numerical linear
The Nixpkgs attribute is `openblas` for ILP64 (integer width = 64 bits) and `openblasCompat` for LP64 (integer width = 32 bits). `openblasCompat` is the default. The Nixpkgs attribute is `openblas` for ILP64 (integer width = 64 bits) and `openblasCompat` for LP64 (integer width = 32 bits). `openblasCompat` is the default.
- [LAPACK reference](http://www.netlib.org/lapack/) (also provides BLAS) - [LAPACK reference](http://www.netlib.org/lapack/) (also provides BLAS and CBLAS)
The Nixpkgs attribute is `lapack-reference`. The Nixpkgs attribute is `lapack-reference`.
@ -117,7 +117,23 @@ $ LD_LIBRARY_PATH=$(nix-build -A mkl)/lib${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH n
Intel MKL requires an `openmp` implementation when running with multiple processors. By default, `mkl` will use Intel's `iomp` implementation if no other is specified, but this is a runtime-only dependency and binary compatible with the LLVM implementation. To use that one instead, Intel recommends users set it with `LD_PRELOAD`. Note that `mkl` is only available on `x86_64-linux` and `x86_64-darwin`. Moreover, Hydra is not building and distributing pre-compiled binaries using it. Intel MKL requires an `openmp` implementation when running with multiple processors. By default, `mkl` will use Intel's `iomp` implementation if no other is specified, but this is a runtime-only dependency and binary compatible with the LLVM implementation. To use that one instead, Intel recommends users set it with `LD_PRELOAD`. Note that `mkl` is only available on `x86_64-linux` and `x86_64-darwin`. Moreover, Hydra is not building and distributing pre-compiled binaries using it.
For BLAS/LAPACK switching to work correctly, all packages must depend on `blas` or `lapack`. This ensures that only one BLAS/LAPACK library is used at one time. There are two versions of BLAS/LAPACK currently in the wild, `LP64` (integer size = 32 bits) and `ILP64` (integer size = 64 bits). Some software needs special flags or patches to work with `ILP64`. You can check if `ILP64` is used in Nixpkgs with `blas.isILP64` and `lapack.isILP64`. Some software does NOT work with `ILP64`, and derivations need to specify an assertion to prevent this. You can prevent `ILP64` from being used with the following: To override `blas` and `lapack` with its reference implementations (i.e. for development purposes), one can use the following overlay:
```nix
self: super:
{
blas = super.blas.override {
blasProvider = self.lapack-reference;
};
lapack = super.lapack.override {
lapackProvider = self.lapack-reference;
};
}
```
For BLAS/LAPACK switching to work correctly, all packages must depend on `blas` or `lapack`. This ensures that only one BLAS/LAPACK library is used at one time. There are two versions of BLAS/LAPACK currently in the wild, `LP64` (integer size = 32 bits) and `ILP64` (integer size = 64 bits). The attributes `blas` and `lapack` are `LP64` by default. Their `ILP64` version are provided through the attributes `blas-ilp64` and `lapack-ilp64`. Some software needs special flags or patches to work with `ILP64`. You can check if `ILP64` is used in Nixpkgs with `blas.isILP64` and `lapack.isILP64`. Some software does NOT work with `ILP64`, and derivations need to specify an assertion to prevent this. You can prevent `ILP64` from being used with the following:
```nix ```nix
{ stdenv, blas, lapack, ... }: { stdenv, blas, lapack, ... }:

View file

@ -18,58 +18,16 @@
in in
{ {
lib = lib.extend (final: prev: { lib = lib.extend (final: prev: {
nixosSystem = { modules, ... } @ args:
nixos = import ./nixos/lib { lib = final; };
nixosSystem = args:
import ./nixos/lib/eval-config.nix (args // { import ./nixos/lib/eval-config.nix (args // {
modules = modules = args.modules ++ [ {
let system.nixos.versionSuffix =
vmConfig = (import ./nixos/lib/eval-config.nix ".${final.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}.${self.shortRev or "dirty"}";
(args // { system.nixos.revision = final.mkIf (self ? rev) self.rev;
modules = modules ++ [ ./nixos/modules/virtualisation/qemu-vm.nix ]; } ];
})).config;
vmWithBootLoaderConfig = (import ./nixos/lib/eval-config.nix
(args // {
modules = modules ++ [
./nixos/modules/virtualisation/qemu-vm.nix
{ virtualisation.useBootLoader = true; }
({ config, ... }: {
virtualisation.useEFIBoot =
config.boot.loader.systemd-boot.enable ||
config.boot.loader.efi.canTouchEfiVariables;
})
];
})).config;
moduleDeclarationFile =
let
# Even though `modules` is a mandatory argument for `nixosSystem`, it doesn't
# mean that the evaluator always keeps track of its position. If there
# are too many levels of indirection, the position gets lost at some point.
intermediatePos = builtins.unsafeGetAttrPos "modules" args;
in
if intermediatePos == null then null else intermediatePos.file;
# Add the invoking file as error message location for modules
# that don't have their own locations; presumably inline modules.
addModuleDeclarationFile =
m: if moduleDeclarationFile == null then m else {
_file = moduleDeclarationFile;
imports = [ m ];
};
in
map addModuleDeclarationFile modules ++ [
{
system.nixos.versionSuffix =
".${final.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}.${self.shortRev or "dirty"}";
system.nixos.revision = final.mkIf (self ? rev) self.rev;
system.build = {
vm = vmConfig.system.build.vm;
vmWithBootLoader = vmWithBootLoaderConfig.system.build.vm;
};
}
];
}); });
}); });

View file

@ -2,35 +2,33 @@
rec { rec {
/* Print a trace message if pred is false. /* Throw if pred is false, else return pred.
Intended to be used to augment asserts with helpful error messages. Intended to be used to augment asserts with helpful error messages.
Example: Example:
assertMsg false "nope" assertMsg false "nope"
=> false stderr> error: nope
stderr> trace: nope
assert (assertMsg ("foo" == "bar") "foo is not bar, silly"); "" assert assertMsg ("foo" == "bar") "foo is not bar, silly"; ""
stderr> trace: foo is not bar, silly stderr> error: foo is not bar, silly
stderr> assert failed at
Type: Type:
assertMsg :: Bool -> String -> Bool assertMsg :: Bool -> String -> Bool
*/ */
# TODO(Profpatsch): add tests that check stderr # TODO(Profpatsch): add tests that check stderr
assertMsg = pred: msg: assertMsg = pred: msg:
if pred pred || builtins.throw msg;
then true
else builtins.trace msg false;
/* Specialized `assertMsg` for checking if val is one of the elements /* Specialized `assertMsg` for checking if val is one of the elements
of a list. Useful for checking enums. of a list. Useful for checking enums.
Example: Example:
let sslLibrary = "libressl" let sslLibrary = "libressl";
in assertOneOf "sslLibrary" sslLibrary [ "openssl" "bearssl" ] in assertOneOf "sslLibrary" sslLibrary [ "openssl" "bearssl" ]
=> false stderr> error: sslLibrary must be one of [
stderr> trace: sslLibrary must be one of "openssl", "bearssl", but is: "libressl" stderr> "openssl"
stderr> "bearssl"
stderr> ], but is: "libressl"
Type: Type:
assertOneOf :: String -> ComparableVal -> List ComparableVal -> Bool assertOneOf :: String -> ComparableVal -> List ComparableVal -> Bool

View file

@ -3,9 +3,9 @@
let let
inherit (builtins) head tail length; inherit (builtins) head tail length;
inherit (lib.trivial) and; inherit (lib.trivial) id;
inherit (lib.strings) concatStringsSep sanitizeDerivationName; inherit (lib.strings) concatStringsSep concatMapStringsSep escapeNixIdentifier sanitizeDerivationName;
inherit (lib.lists) foldr foldl' concatMap concatLists elemAt; inherit (lib.lists) foldr foldl' concatMap concatLists elemAt all partition groupBy take foldl;
in in
rec { rec {
@ -73,11 +73,108 @@ rec {
getAttrFromPath ["z" "z"] x getAttrFromPath ["z" "z"] x
=> error: cannot find attribute `z.z' => error: cannot find attribute `z.z'
*/ */
getAttrFromPath = attrPath: set: getAttrFromPath = attrPath:
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'"; let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
in attrByPath attrPath (abort errorMsg) set; in attrByPath attrPath (abort errorMsg);
/* Update or set specific paths of an attribute set.
Takes a list of updates to apply and an attribute set to apply them to,
and returns the attribute set with the updates applied. Updates are
represented as { path = ...; update = ...; } values, where `path` is a
list of strings representing the attribute path that should be updated,
and `update` is a function that takes the old value at that attribute path
as an argument and returns the new
value it should be.
Properties:
- Updates to deeper attribute paths are applied before updates to more
shallow attribute paths
- Multiple updates to the same attribute path are applied in the order
they appear in the update list
- If any but the last `path` element leads into a value that is not an
attribute set, an error is thrown
- If there is an update for an attribute path that doesn't exist,
accessing the argument in the update function causes an error, but
intermediate attribute sets are implicitly created as needed
Example:
updateManyAttrsByPath [
{
path = [ "a" "b" ];
update = old: { d = old.c; };
}
{
path = [ "a" "b" "c" ];
update = old: old + 1;
}
{
path = [ "x" "y" ];
update = old: "xy";
}
] { a.b.c = 0; }
=> { a = { b = { d = 1; }; }; x = { y = "xy"; }; }
*/
updateManyAttrsByPath = let
# When recursing into attributes, instead of updating the `path` of each
# update using `tail`, which needs to allocate an entirely new list,
# we just pass a prefix length to use and make sure to only look at the
# path without the prefix length, so that we can reuse the original list
# entries.
go = prefixLength: hasValue: value: updates:
let
# Splits updates into ones on this level (split.right)
# And ones on levels further down (split.wrong)
split = partition (el: length el.path == prefixLength) updates;
# Groups updates on further down levels into the attributes they modify
nested = groupBy (el: elemAt el.path prefixLength) split.wrong;
# Applies only nested modification to the input value
withNestedMods =
# Return the value directly if we don't have any nested modifications
if split.wrong == [] then
if hasValue then value
else
# Throw an error if there is no value. This `head` call here is
# safe, but only in this branch since `go` could only be called
# with `hasValue == false` for nested updates, in which case
# it's also always called with at least one update
let updatePath = (head split.right).path; in
throw
( "updateManyAttrsByPath: Path '${showAttrPath updatePath}' does "
+ "not exist in the given value, but the first update to this "
+ "path tries to access the existing value.")
else
# If there are nested modifications, try to apply them to the value
if ! hasValue then
# But if we don't have a value, just use an empty attribute set
# as the value, but simplify the code a bit
mapAttrs (name: go (prefixLength + 1) false null) nested
else if isAttrs value then
# If we do have a value and it's an attribute set, override it
# with the nested modifications
value //
mapAttrs (name: go (prefixLength + 1) (value ? ${name}) value.${name}) nested
else
# However if it's not an attribute set, we can't apply the nested
# modifications, throw an error
let updatePath = (head split.wrong).path; in
throw
( "updateManyAttrsByPath: Path '${showAttrPath updatePath}' needs to "
+ "be updated, but path '${showAttrPath (take prefixLength updatePath)}' "
+ "of the given value is not an attribute set, so we can't "
+ "update an attribute inside of it.");
# We get the final result by applying all the updates on this level
# after having applied all the nested updates
# We use foldl instead of foldl' so that in case of multiple updates,
# intermediate values aren't evaluated if not needed
in foldl (acc: el: el.update acc) withNestedMods split.right;
in updates: value: go 0 true value updates;
/* Return the specified attributes from a set. /* Return the specified attributes from a set.
Example: Example:
@ -154,12 +251,12 @@ rec {
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }] foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
=> { a = [ 2 3 ]; } => { a = [ 2 3 ]; }
*/ */
foldAttrs = op: nul: list_of_attrs: foldAttrs = op: nul:
foldr (n: a: foldr (n: a:
foldr (name: o: foldr (name: o:
o // { ${name} = op n.${name} (a.${name} or nul); } o // { ${name} = op n.${name} (a.${name} or nul); }
) a (attrNames n) ) a (attrNames n)
) {} list_of_attrs; ) {};
/* Recursively collect sets that verify a given predicate named `pred' /* Recursively collect sets that verify a given predicate named `pred'
@ -276,7 +373,7 @@ rec {
/* Like `mapAttrsRecursive', but it takes an additional predicate /* Like `mapAttrsRecursive', but it takes an additional predicate
function that tells it whether to recursive into an attribute function that tells it whether to recurse into an attribute
set. If it returns false, `mapAttrsRecursiveCond' does not set. If it returns false, `mapAttrsRecursiveCond' does not
recurse, but does apply the map function. If it returns true, it recurse, but does apply the map function. If it returns true, it
does recurse, and does not apply the map function. does recurse, and does not apply the map function.
@ -295,14 +392,14 @@ rec {
*/ */
mapAttrsRecursiveCond = cond: f: set: mapAttrsRecursiveCond = cond: f: set:
let let
recurse = path: set: recurse = path:
let let
g = g =
name: value: name: value:
if isAttrs value && cond value if isAttrs value && cond value
then recurse (path ++ [name]) value then recurse (path ++ [name]) value
else f (path ++ [name]) value; else f (path ++ [name]) value;
in mapAttrs g set; in mapAttrs g;
in recurse [] set; in recurse [] set;
@ -327,7 +424,7 @@ rec {
isDerivation "foobar" isDerivation "foobar"
=> false => false
*/ */
isDerivation = x: isAttrs x && x ? type && x.type == "derivation"; isDerivation = x: x.type or null == "derivation";
/* Converts a store path to a fake derivation. */ /* Converts a store path to a fake derivation. */
toDerivation = path: toDerivation = path:
@ -369,7 +466,7 @@ rec {
value = f name (catAttrs name sets); value = f name (catAttrs name sets);
}) names); }) names);
/* Implementation note: Common names appear multiple times in the list of /* Implementation note: Common names appear multiple times in the list of
names, hopefully this does not affect the system because the maximal names, hopefully this does not affect the system because the maximal
laziness avoid computing twice the same expression and listToAttrs does laziness avoid computing twice the same expression and listToAttrs does
not care about duplicated attribute names. not care about duplicated attribute names.
@ -378,7 +475,8 @@ rec {
zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}] zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] } => { a = ["x" "y"]; b = ["z"] }
*/ */
zipAttrsWith = f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets; zipAttrsWith =
builtins.zipAttrsWith or (f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets);
/* Like `zipAttrsWith' with `(name: values: values)' as the function. /* Like `zipAttrsWith' with `(name: values: values)' as the function.
Example: Example:
@ -419,8 +517,8 @@ rec {
let f = attrPath: let f = attrPath:
zipAttrsWith (n: values: zipAttrsWith (n: values:
let here = attrPath ++ [n]; in let here = attrPath ++ [n]; in
if tail values == [] if length values == 1
|| pred here (head (tail values)) (head values) then || pred here (elemAt values 1) (head values) then
head values head values
else else
f here values f here values
@ -446,10 +544,7 @@ rec {
} }
*/ */
recursiveUpdate = lhs: rhs: recursiveUpdate = recursiveUpdateUntil (path: lhs: rhs: !(isAttrs lhs && isAttrs rhs));
recursiveUpdateUntil (path: lhs: rhs:
!(isAttrs lhs && isAttrs rhs)
) lhs rhs;
/* Returns true if the pattern is contained in the set. False otherwise. /* Returns true if the pattern is contained in the set. False otherwise.
@ -458,8 +553,8 @@ rec {
=> true => true
*/ */
matchAttrs = pattern: attrs: assert isAttrs pattern; matchAttrs = pattern: attrs: assert isAttrs pattern;
foldr and true (attrValues (zipAttrsWithNames (attrNames pattern) (n: values: all id (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
let pat = head values; val = head (tail values); in let pat = head values; val = elemAt values 1; in
if length values == 1 then false if length values == 1 then false
else if isAttrs pat then isAttrs val && matchAttrs pat val else if isAttrs pat then isAttrs val && matchAttrs pat val
else pat == val else pat == val
@ -479,6 +574,20 @@ rec {
overrideExisting = old: new: overrideExisting = old: new:
mapAttrs (name: value: new.${name} or value) old; mapAttrs (name: value: new.${name} or value) old;
/* Turns a list of strings into a human-readable description of those
strings represented as an attribute path. The result of this function is
not intended to be machine-readable.
Example:
showAttrPath [ "foo" "10" "bar" ]
=> "foo.\"10\".bar"
showAttrPath []
=> "<root attribute path>"
*/
showAttrPath = path:
if path == [] then "<root attribute path>"
else concatMapStringsSep "." escapeNixIdentifier path;
/* Get a package output. /* Get a package output.
If no output is found, fallback to `.out` and then to the default. If no output is found, fallback to `.out` and then to the default.

View file

@ -66,7 +66,8 @@ let
stringLength sub substring tail trace; stringLength sub substring tail trace;
inherit (self.trivial) id const pipe concat or and bitAnd bitOr bitXor inherit (self.trivial) id const pipe concat or and bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf info showWarnings nixpkgsVersion version importJSON importTOML warn warnIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease
mod compare splitByAndCompare functionArgs setFunctionArgs isFunction mod compare splitByAndCompare functionArgs setFunctionArgs isFunction
toHexString toBaseDigits; toHexString toBaseDigits;
inherit (self.fixedPoints) fix fix' converge extends composeExtensions inherit (self.fixedPoints) fix fix' converge extends composeExtensions
@ -77,9 +78,10 @@ let
mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond
genAttrs isDerivation toDerivation optionalAttrs genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs overrideExisting getOutput getBin recursiveUpdate matchAttrs overrideExisting showAttrPath getOutput getBin
getLib getDev getMan chooseDevOutputs zipWithNames zip getLib getDev getMan chooseDevOutputs zipWithNames zip
recurseIntoAttrs dontRecurseIntoAttrs cartesianProductOfSets; recurseIntoAttrs dontRecurseIntoAttrs cartesianProductOfSets
updateManyAttrsByPath;
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1 inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
concatMap flatten remove findSingle findFirst any all count concatMap flatten remove findSingle findFirst any all count
optional optionals toList range partition zipListsWith zipLists optional optionals toList range partition zipListsWith zipLists
@ -110,21 +112,23 @@ let
cleanSource sourceByRegex sourceFilesBySuffices cleanSource sourceByRegex sourceFilesBySuffices
commitIdFromGitRepo cleanSourceWith pathHasContext commitIdFromGitRepo cleanSourceWith pathHasContext
canCleanSource pathIsRegularFile pathIsGitRepo; canCleanSource pathIsRegularFile pathIsGitRepo;
inherit (self.modules) evalModules unifyModuleSyntax inherit (self.modules) evalModules setDefaultModuleLocation
applyIfFunction mergeModules unifyModuleSyntax applyIfFunction mergeModules
mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
pushDownProperties dischargeProperties filterOverrides pushDownProperties dischargeProperties filterOverrides
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
mkOptionDefault mkDefault mkImageMediaOverride mkForce mkVMOverride mkOptionDefault mkDefault mkImageMediaOverride mkForce mkVMOverride
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
mkRenamedOptionModule mkMergedOptionModule mkChangedOptionModule mkRenamedOptionModule mkRenamedOptionModuleWith
mkMergedOptionModule mkChangedOptionModule
mkAliasOptionModule mkDerivedConfig doRename; mkAliasOptionModule mkDerivedConfig doRename;
inherit (self.options) isOption mkEnableOption mkSinkUndeclaredOptions inherit (self.options) isOption mkEnableOption mkSinkUndeclaredOptions
mergeDefaultOption mergeOneOption mergeEqualOption getValues mergeDefaultOption mergeOneOption mergeEqualOption mergeUniqueOption
getFiles optionAttrSetToDocList optionAttrSetToDocList' getValues getFiles
optionAttrSetToDocList optionAttrSetToDocList'
scrubOptionValue literalExpression literalExample literalDocBook scrubOptionValue literalExpression literalExample literalDocBook
showOption showFiles unknownModule mkOption; showOption showFiles unknownModule mkOption mkPackageOption;
inherit (self.types) isType setType defaultTypeMerge defaultFunctor inherit (self.types) isType setType defaultTypeMerge defaultFunctor
isOptionType mkOptionType; isOptionType mkOptionType;
inherit (self.asserts) inherit (self.asserts)

View file

@ -67,6 +67,11 @@ in mkLicense lset) ({
free = false; free = false;
}; };
aom = {
fullName = "Alliance for Open Media Patent License 1.0";
url = "https://aomedia.org/license/patent-license/";
};
apsl20 = { apsl20 = {
spdxId = "APSL-2.0"; spdxId = "APSL-2.0";
fullName = "Apple Public Source License 2.0"; fullName = "Apple Public Source License 2.0";
@ -460,6 +465,11 @@ in mkLicense lset) ({
spdxId = "imagemagick"; spdxId = "imagemagick";
}; };
imlib2 = {
spdxId = "Imlib2";
fullName = "Imlib2 License";
};
inria-compcert = { inria-compcert = {
fullName = "INRIA Non-Commercial License Agreement for the CompCert verified compiler"; fullName = "INRIA Non-Commercial License Agreement for the CompCert verified compiler";
url = "https://compcert.org/doc/LICENSE.txt"; url = "https://compcert.org/doc/LICENSE.txt";
@ -586,6 +596,16 @@ in mkLicense lset) ({
spdxId = "MIT"; spdxId = "MIT";
fullName = "MIT License"; fullName = "MIT License";
}; };
# https://spdx.org/licenses/MIT-feh.html
mit-feh = {
spdxId = "MIT-feh";
fullName = "feh License";
};
mitAdvertising = {
spdxId = "MIT-advertising";
fullName = "Enlightenment License (e16)";
};
mpl10 = { mpl10 = {
spdxId = "MPL-1.0"; spdxId = "MPL-1.0";

View file

@ -4,6 +4,7 @@
let let
inherit (lib.strings) toInt; inherit (lib.strings) toInt;
inherit (lib.trivial) compare min; inherit (lib.trivial) compare min;
inherit (lib.attrsets) mapAttrs;
in in
rec { rec {
@ -340,15 +341,15 @@ rec {
groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ] groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
=> { true = 12; false = 3; } => { true = 12; false = 3; }
*/ */
groupBy' = op: nul: pred: lst: groupBy' = op: nul: pred: lst: mapAttrs (name: foldl op nul) (groupBy pred lst);
foldl' (r: e:
let
key = pred e;
in
r // { ${key} = op (r.${key} or nul) e; }
) {} lst;
groupBy = groupBy' (sum: e: sum ++ [e]) []; groupBy = builtins.groupBy or (
pred: foldl' (r: e:
let
key = pred e;
in
r // { ${key} = (r.${key} or []) ++ [e]; }
) {});
/* Merges two lists of the same size together. If the sizes aren't the same /* Merges two lists of the same size together. If the sizes aren't the same
the merging stops at the shortest. How both lists are merged is defined the merging stops at the shortest. How both lists are merged is defined

View file

@ -78,7 +78,7 @@ rec {
2. (modern) a pattern for the platform `parsed` field. 2. (modern) a pattern for the platform `parsed` field.
We can inject these into a patten for the whole of a structured platform, We can inject these into a pattern for the whole of a structured platform,
and then match that. and then match that.
*/ */
platformMatch = platform: elem: let platformMatch = platform: elem: let

View file

@ -9,7 +9,7 @@ let
catAttrs catAttrs
concatLists concatLists
concatMap concatMap
count concatStringsSep
elem elem
filter filter
findFirst findFirst
@ -37,6 +37,7 @@ let
toList toList
types types
warnIf warnIf
zipAttrsWith
; ;
inherit (lib.options) inherit (lib.options)
isOption isOption
@ -46,6 +47,20 @@ let
showOption showOption
unknownModule unknownModule
; ;
showDeclPrefix = loc: decl: prefix:
" - option(s) with prefix `${showOption (loc ++ [prefix])}' in module `${decl._file}'";
showRawDecls = loc: decls:
concatStringsSep "\n"
(sort (a: b: a < b)
(concatMap
(decl: map
(showDeclPrefix loc decl)
(attrNames decl.options)
)
decls
));
in in
rec { rec {
@ -137,7 +152,7 @@ rec {
# support for that, in turn it's lazy in its values. This means e.g. # support for that, in turn it's lazy in its values. This means e.g.
# a `_module.args.pkgs = import (fetchTarball { ... }) {}` won't # a `_module.args.pkgs = import (fetchTarball { ... }) {}` won't
# start a download when `pkgs` wasn't evaluated. # start a download when `pkgs` wasn't evaluated.
type = types.lazyAttrsOf types.unspecified; type = types.lazyAttrsOf types.raw;
internal = true; internal = true;
description = "Arguments passed to each module."; description = "Arguments passed to each module.";
}; };
@ -150,8 +165,7 @@ rec {
}; };
_module.freeformType = mkOption { _module.freeformType = mkOption {
# Disallow merging for now, but could be implemented nicely with a `types.optionType` type = types.nullOr types.optionType;
type = types.nullOr (types.uniq types.attrs);
internal = true; internal = true;
default = null; default = null;
description = '' description = ''
@ -333,6 +347,10 @@ rec {
in modulesPath: initialModules: args: in modulesPath: initialModules: args:
filterModules modulesPath (collectStructuredModules unknownModule "" initialModules args); filterModules modulesPath (collectStructuredModules unknownModule "" initialModules args);
/* Wrap a module with a default location for reporting errors. */
setDefaultModuleLocation = file: m:
{ _file = file; imports = [ m ]; };
/* Massage a module into canonical form, that is, a set consisting /* Massage a module into canonical form, that is, a set consisting
of options, config and imports attributes. */ of options, config and imports attributes. */
unifyModuleSyntax = file: key: m: unifyModuleSyntax = file: key: m:
@ -442,10 +460,11 @@ rec {
} }
*/ */
byName = attr: f: modules: byName = attr: f: modules:
foldl' (acc: module: zipAttrsWith (n: concatLists)
if !(builtins.isAttrs module.${attr}) then (map (module: let subtree = module.${attr}; in
if !(builtins.isAttrs subtree) then
throw '' throw ''
You're trying to declare a value of type `${builtins.typeOf module.${attr}}' You're trying to declare a value of type `${builtins.typeOf subtree}'
rather than an attribute-set for the option rather than an attribute-set for the option
`${builtins.concatStringsSep "." prefix}'! `${builtins.concatStringsSep "." prefix}'!
@ -454,11 +473,8 @@ rec {
this option by e.g. referring to `man 5 configuration.nix'! this option by e.g. referring to `man 5 configuration.nix'!
'' ''
else else
acc // (mapAttrs (n: v: mapAttrs (n: f module) subtree
(acc.${n} or []) ++ f module v ) modules);
) module.${attr}
)
) {} modules;
# an attrset 'name' => list of submodules that declare name. # an attrset 'name' => list of submodules that declare name.
declsByName = byName "options" (module: option: declsByName = byName "options" (module: option:
[{ inherit (module) _file; options = option; }] [{ inherit (module) _file; options = option; }]
@ -472,26 +488,61 @@ rec {
[{ inherit (module) file; inherit value; }] [{ inherit (module) file; inherit value; }]
) configs; ) configs;
# Convert an option tree decl to a submodule option decl
optionTreeToOption = decl:
if isOption decl.options
then decl
else decl // {
options = mkOption {
type = types.submoduleWith {
modules = [ { options = decl.options; } ];
# `null` is not intended for use by modules. It is an internal
# value that means "whatever the user has declared elsewhere".
# This might become obsolete with https://github.com/NixOS/nixpkgs/issues/162398
shorthandOnlyDefinesConfig = null;
};
};
};
resultsByName = mapAttrs (name: decls: resultsByName = mapAttrs (name: decls:
# We're descending into attribute name. # We're descending into attribute name.
let let
loc = prefix ++ [name]; loc = prefix ++ [name];
defns = defnsByName.${name} or []; defns = defnsByName.${name} or [];
defns' = defnsByName'.${name} or []; defns' = defnsByName'.${name} or [];
nrOptions = count (m: isOption m.options) decls; optionDecls = filter (m: isOption m.options) decls;
in in
if nrOptions == length decls then if length optionDecls == length decls then
let opt = fixupOptionType loc (mergeOptionDecls loc decls); let opt = fixupOptionType loc (mergeOptionDecls loc decls);
in { in {
matchedOptions = evalOptionValue loc opt defns'; matchedOptions = evalOptionValue loc opt defns';
unmatchedDefns = []; unmatchedDefns = [];
} }
else if nrOptions != 0 then else if optionDecls != [] then
let if all (x: x.options.type.name == "submodule") optionDecls
firstOption = findFirst (m: isOption m.options) "" decls; # Raw options can only be merged into submodules. Merging into
firstNonOption = findFirst (m: !isOption m.options) "" decls; # attrsets might be nice, but ambiguous. Suppose we have
in # attrset as a `attrsOf submodule`. User declares option
throw "The option `${showOption loc}' in `${firstOption._file}' is a prefix of options in `${firstNonOption._file}'." # attrset.foo.bar, this could mean:
# a. option `bar` is only available in `attrset.foo`
# b. option `foo.bar` is available in all `attrset.*`
# c. reject and require "<name>" as a reminder that it behaves like (b).
# d. magically combine (a) and (c).
# All of the above are merely syntax sugar though.
then
let opt = fixupOptionType loc (mergeOptionDecls loc (map optionTreeToOption decls));
in {
matchedOptions = evalOptionValue loc opt defns';
unmatchedDefns = [];
}
else
let
firstNonOption = findFirst (m: !isOption m.options) "" decls;
nonOptions = filter (m: !isOption m.options) decls;
in
throw "The option `${showOption loc}' in module `${(lib.head optionDecls)._file}' would be a parent of the following options, but its type `${(lib.head optionDecls).options.type.description or "<no description>"}' does not support nested options.\n${
showRawDecls loc nonOptions
}"
else else
mergeModules' loc decls defns) declsByName; mergeModules' loc decls defns) declsByName;
@ -535,11 +586,9 @@ rec {
correspond to the definition of 'loc' in 'opt.file'. */ correspond to the definition of 'loc' in 'opt.file'. */
mergeOptionDecls = mergeOptionDecls =
let let
packSubmodule = file: m:
{ _file = file; imports = [ m ]; };
coerceOption = file: opt: coerceOption = file: opt:
if isFunction opt then packSubmodule file opt if isFunction opt then setDefaultModuleLocation file opt
else packSubmodule file { options = opt; }; else setDefaultModuleLocation file { options = opt; };
in loc: opts: in loc: opts:
foldl' (res: opt: foldl' (res: opt:
let t = res.type; let t = res.type;
@ -569,7 +618,7 @@ rec {
getSubModules = opt.options.type.getSubModules or null; getSubModules = opt.options.type.getSubModules or null;
submodules = submodules =
if getSubModules != null then map (packSubmodule opt._file) getSubModules ++ res.options if getSubModules != null then map (setDefaultModuleLocation opt._file) getSubModules ++ res.options
else if opt.options ? options then map (coerceOption opt._file) options' ++ res.options else if opt.options ? options then map (coerceOption opt._file) options' ++ res.options
else res.options; else res.options;
in opt.options // res // in opt.options // res //
@ -614,6 +663,8 @@ rec {
definitions = map (def: def.value) res.defsFinal; definitions = map (def: def.value) res.defsFinal;
files = map (def: def.file) res.defsFinal; files = map (def: def.file) res.defsFinal;
inherit (res) isDefined; inherit (res) isDefined;
# This allows options to be correctly displayed using `${options.path.to.it}`
__toString = _: showOption loc;
}; };
# Merge definitions of a value of a given type. # Merge definitions of a value of a given type.
@ -751,21 +802,22 @@ rec {
compare = a: b: (a.priority or 1000) < (b.priority or 1000); compare = a: b: (a.priority or 1000) < (b.priority or 1000);
in sort compare defs'; in sort compare defs';
/* Hack for backward compatibility: convert options of type
optionSet to options of type submodule. FIXME: remove
eventually. */
fixupOptionType = loc: opt: fixupOptionType = loc: opt:
let let
options = opt.options or options = opt.options or
(throw "Option `${showOption loc}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}."); (throw "Option `${showOption loc}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
# Hack for backward compatibility: convert options of type
# optionSet to options of type submodule. FIXME: remove
# eventually.
f = tp: f = tp:
let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet");
in
if tp.name == "option set" || tp.name == "submodule" then if tp.name == "option set" || tp.name == "submodule" then
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}." throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options) else if (tp.functor.wrapped.name or null) == "optionSet" then
else if optionSetIn "listOf" then types.listOf (types.submodule options) if tp.name == "attrsOf" then types.attrsOf (types.submodule options)
else if optionSetIn "nullOr" then types.nullOr (types.submodule options) else if tp.name == "listOf" then types.listOf (types.submodule options)
else if tp.name == "nullOr" then types.nullOr (types.submodule options)
else tp
else tp; else tp;
in in
if opt.type.getSubModules or null == null if opt.type.getSubModules or null == null
@ -902,6 +954,26 @@ rec {
use = builtins.trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'."; use = builtins.trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
}; };
mkRenamedOptionModuleWith = {
/* Old option path as list of strings. */
from,
/* New option path as list of strings. */
to,
/*
Release number of the first release that contains the rename, ignoring backports.
Set it to the upcoming release, matching the nixpkgs/.version file.
*/
sinceRelease,
}: doRename {
inherit from to;
visible = false;
warn = lib.isInOldestRelease sinceRelease;
use = lib.warnIf (lib.isInOldestRelease sinceRelease)
"Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
};
/* Return a module that causes a warning to be shown if any of the "from" /* Return a module that causes a warning to be shown if any of the "from"
option is defined; the defined values can be used in the "mergeFn" to set option is defined; the defined values can be used in the "mergeFn" to set
the "to" value. the "to" value.

View file

@ -26,6 +26,7 @@ let
take take
; ;
inherit (lib.attrsets) inherit (lib.attrsets)
attrByPath
optionalAttrs optionalAttrs
; ;
inherit (lib.strings) inherit (lib.strings)
@ -99,6 +100,49 @@ rec {
type = lib.types.bool; type = lib.types.bool;
}; };
/* Creates an Option attribute set for an option that specifies the
package a module should use for some purpose.
Type: mkPackageOption :: pkgs -> string -> { default :: [string], example :: null | string | [string] } -> option
The package is specified as a list of strings representing its attribute path in nixpkgs.
Because of this, you need to pass nixpkgs itself as the first argument.
The second argument is the name of the option, used in the description "The <name> package to use.".
You can also pass an example value, either a literal string or a package's attribute path.
You can omit the default path if the name of the option is also attribute path in nixpkgs.
Example:
mkPackageOption pkgs "hello" { }
=> { _type = "option"; default = «derivation /nix/store/3r2vg51hlxj3cx5vscp0vkv60bqxkaq0-hello-2.10.drv»; defaultText = { ... }; description = "The hello package to use."; type = { ... }; }
Example:
mkPackageOption pkgs "GHC" {
default = [ "ghc" ];
example = "pkgs.haskell.package.ghc921.ghc.withPackages (hkgs: [ hkgs.primes ])";
}
=> { _type = "option"; default = «derivation /nix/store/jxx55cxsjrf8kyh3fp2ya17q99w7541r-ghc-8.10.7.drv»; defaultText = { ... }; description = "The GHC package to use."; example = { ... }; type = { ... }; }
*/
mkPackageOption =
# Package set (a specific version of nixpkgs)
pkgs:
# Name for the package, shown in option description
name:
{ default ? [ name ], example ? null }:
let default' = if !isList default then [ default ] else default;
in mkOption {
type = lib.types.package;
description = "The ${name} package to use.";
default = attrByPath default'
(throw "${concatStringsSep "." default'} cannot be found in pkgs") pkgs;
defaultText = literalExpression ("pkgs." + concatStringsSep "." default');
${if example != null then "example" else null} = literalExpression
(if isList example then "pkgs." + concatStringsSep "." example else example);
};
/* This option accepts anything, but it does not produce any result. /* This option accepts anything, but it does not produce any result.
This is useful for sharing a module across different module sets This is useful for sharing a module across different module sets
@ -128,11 +172,13 @@ rec {
else if all isInt list && all (x: x == head list) list then head list else if all isInt list && all (x: x == head list) list then head list
else throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}"; else throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
mergeOneOption = loc: defs: mergeOneOption = mergeUniqueOption { message = ""; };
if defs == [] then abort "This case should never happen."
else if length defs != 1 then mergeUniqueOption = { message }: loc: defs:
throw "The unique option `${showOption loc}' is defined multiple times. Definition values:${showDefs defs}" if length defs == 1
else (head defs).value; then (head defs).value
else assert length defs > 1;
throw "The option `${showOption loc}' is defined multiple times.\n${message}\nDefinition values:${showDefs defs}";
/* "Merge" option definitions by checking that they all have the same value. */ /* "Merge" option definitions by checking that they all have the same value. */
mergeEqualOption = loc: defs: mergeEqualOption = loc: defs:
@ -177,7 +223,7 @@ rec {
docOption = rec { docOption = rec {
loc = opt.loc; loc = opt.loc;
name = showOption opt.loc; name = showOption opt.loc;
description = opt.description or (lib.warn "Option `${name}' has no description." "This option has no description."); description = opt.description or null;
declarations = filter (x: x != unknownModule) opt.declarations; declarations = filter (x: x != unknownModule) opt.declarations;
internal = opt.internal or false; internal = opt.internal or false;
visible = visible =
@ -185,7 +231,7 @@ rec {
then true then true
else opt.visible or true; else opt.visible or true;
readOnly = opt.readOnly or false; readOnly = opt.readOnly or false;
type = opt.type.description or null; type = opt.type.description or "unspecified";
} }
// optionalAttrs (opt ? example) { example = scrubOptionValue opt.example; } // optionalAttrs (opt ? example) { example = scrubOptionValue opt.example; }
// optionalAttrs (opt ? default) { default = scrubOptionValue opt.default; } // optionalAttrs (opt ? default) { default = scrubOptionValue opt.default; }

View file

@ -20,17 +20,26 @@ let
readFile readFile
; ;
# Returns the type of a path: regular (for file), symlink, or directory /*
pathType = p: getAttr (baseNameOf p) (readDir (dirOf p)); Returns the type of a path: regular (for file), symlink, or directory.
*/
pathType = path: getAttr (baseNameOf path) (readDir (dirOf path));
# Returns true if the path exists and is a directory, false otherwise /*
pathIsDirectory = p: if pathExists p then (pathType p) == "directory" else false; Returns true if the path exists and is a directory, false otherwise.
*/
pathIsDirectory = path: if pathExists path then (pathType path) == "directory" else false;
# Returns true if the path exists and is a regular file, false otherwise /*
pathIsRegularFile = p: if pathExists p then (pathType p) == "regular" else false; Returns true if the path exists and is a regular file, false otherwise.
*/
pathIsRegularFile = path: if pathExists path then (pathType path) == "regular" else false;
# Bring in a path as a source, filtering out all Subversion and CVS /*
# directories, as well as backup files (*~). A basic filter for `cleanSourceWith` that removes
directories of version control system, backup files (*~)
and some generated files.
*/
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! ( cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
# Filter out version control software files/directories # Filter out version control software files/directories
(baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) || (baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
@ -48,43 +57,48 @@ let
(type == "unknown") (type == "unknown")
); );
# Filters a source tree removing version control files and directories using cleanSourceWith /*
# Filters a source tree removing version control files and directories using cleanSourceFilter.
# Example:
# cleanSource ./. Example:
cleanSource ./.
*/
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; }; cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
# Like `builtins.filterSource`, except it will compose with itself, /*
# allowing you to chain multiple calls together without any Like `builtins.filterSource`, except it will compose with itself,
# intermediate copies being put in the nix store. allowing you to chain multiple calls together without any
# intermediate copies being put in the nix store.
# lib.cleanSourceWith {
# filter = f; Example:
# src = lib.cleanSourceWith { lib.cleanSourceWith {
# filter = g; filter = f;
# src = ./.; src = lib.cleanSourceWith {
# }; filter = g;
# } src = ./.;
# # Succeeds! };
# }
# builtins.filterSource f (builtins.filterSource g ./.) # Succeeds!
# # Fails!
# builtins.filterSource f (builtins.filterSource g ./.)
# Parameters: # Fails!
#
# src: A path or cleanSourceWith result to filter and/or rename. */
# cleanSourceWith =
# filter: A function (path -> type -> bool) {
# Optional with default value: constant true (include everything) # A path or cleanSourceWith result to filter and/or rename.
# The function will be combined with the && operator such src,
# that src.filter is called lazily. # Optional with default value: constant true (include everything)
# For implementing a filter, see # The function will be combined with the && operator such
# https://nixos.org/nix/manual/#builtin-filterSource # that src.filter is called lazily.
# # For implementing a filter, see
# name: Optional name to use as part of the store path. # https://nixos.org/nix/manual/#builtin-filterSource
# This defaults to `src.name` or otherwise `"source"`. # Type: A function (path -> type -> bool)
# filter ? _path: _type: true,
cleanSourceWith = { filter ? _path: _type: true, src, name ? null }: # Optional name to use as part of the store path.
# This defaults to `src.name` or otherwise `"source"`.
name ? null
}:
let let
orig = toSourceAttributes src; orig = toSourceAttributes src;
in fromSourceAttributes { in fromSourceAttributes {
@ -116,9 +130,11 @@ let
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant; satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
}; };
# Filter sources by a list of regular expressions. /*
# Filter sources by a list of regular expressions.
# E.g. `src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]`
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
*/
sourceByRegex = src: regexes: sourceByRegex = src: regexes:
let let
isFiltered = src ? _isLibCleanSourceWith; isFiltered = src ? _isLibCleanSourceWith;
@ -153,8 +169,11 @@ let
pathIsGitRepo = path: (tryEval (commitIdFromGitRepo path)).success; pathIsGitRepo = path: (tryEval (commitIdFromGitRepo path)).success;
# Get the commit id of a git repo /*
# Example: commitIdFromGitRepo <nixpkgs/.git> Get the commit id of a git repo.
Example: commitIdFromGitRepo <nixpkgs/.git>
*/
commitIdFromGitRepo = commitIdFromGitRepo =
let readCommitFromFile = file: path: let readCommitFromFile = file: path:
let fileName = toString path + "/" + file; let fileName = toString path + "/" + file;

View file

@ -105,7 +105,8 @@ rec {
else if final.isAarch64 then "arm64" else if final.isAarch64 then "arm64"
else if final.isx86_32 then "i386" else if final.isx86_32 then "i386"
else if final.isx86_64 then "x86_64" else if final.isx86_64 then "x86_64"
else if final.isMips then "mips" else if final.isMips32 then "mips"
else if final.isMips64 then "mips" # linux kernel does not distinguish mips32/mips64
else if final.isPower then "powerpc" else if final.isPower then "powerpc"
else if final.isRiscV then "riscv" else if final.isRiscV then "riscv"
else if final.isS390 then "s390" else if final.isS390 then "s390"

View file

@ -26,7 +26,7 @@ let
# Linux # Linux
"aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux" "aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux"
"armv7l-linux" "i686-linux" "m68k-linux" "mipsel-linux" "armv7l-linux" "i686-linux" "m68k-linux" "mipsel-linux" "mips64el-linux"
"powerpc64-linux" "powerpc64le-linux" "riscv32-linux" "powerpc64-linux" "powerpc64le-linux" "riscv32-linux"
"riscv64-linux" "s390-linux" "s390x-linux" "x86_64-linux" "riscv64-linux" "s390-linux" "s390x-linux" "x86_64-linux"
@ -87,7 +87,11 @@ in {
darwin = filterDoubles predicates.isDarwin; darwin = filterDoubles predicates.isDarwin;
freebsd = filterDoubles predicates.isFreeBSD; freebsd = filterDoubles predicates.isFreeBSD;
# Should be better, but MinGW is unclear. # Should be better, but MinGW is unclear.
gnu = filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnu; }) ++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabi; }) ++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabihf; }); gnu = filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnu; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabi; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabihf; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabin32; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabi64; });
illumos = filterDoubles predicates.isSunOS; illumos = filterDoubles predicates.isSunOS;
linux = filterDoubles predicates.isLinux; linux = filterDoubles predicates.isLinux;
netbsd = filterDoubles predicates.isNetBSD; netbsd = filterDoubles predicates.isNetBSD;

View file

@ -93,6 +93,26 @@ rec {
config = "mipsel-unknown-linux-gnu"; config = "mipsel-unknown-linux-gnu";
} // platforms.fuloong2f_n32; } // platforms.fuloong2f_n32;
# MIPS ABI table transcribed from here: https://wiki.debian.org/Multiarch/Tuples
# can execute on 32bit chip
mips-linux-gnu = { config = "mips-linux-gnu"; } // platforms.gcc_mips32r2_o32;
mipsel-linux-gnu = { config = "mipsel-linux-gnu"; } // platforms.gcc_mips32r2_o32;
mipsisa32r6-linux-gnu = { config = "mipsisa32r6-linux-gnu"; } // platforms.gcc_mips32r6_o32;
mipsisa32r6el-linux-gnu = { config = "mipsisa32r6el-linux-gnu"; } // platforms.gcc_mips32r6_o32;
# require 64bit chip (for more registers, 64-bit floating point, 64-bit "long long") but use 32bit pointers
mips64-linux-gnuabin32 = { config = "mips64-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
mips64el-linux-gnuabin32 = { config = "mips64el-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
mipsisa64r6-linux-gnuabin32 = { config = "mipsisa64r6-linux-gnuabin32"; } // platforms.gcc_mips64r6_n32;
mipsisa64r6el-linux-gnuabin32 = { config = "mipsisa64r6el-linux-gnuabin32"; } // platforms.gcc_mips64r6_n32;
# 64bit pointers
mips64-linux-gnuabi64 = { config = "mips64-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
mips64el-linux-gnuabi64 = { config = "mips64el-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
mipsisa64r6-linux-gnuabi64 = { config = "mipsisa64r6-linux-gnuabi64"; } // platforms.gcc_mips64r6_64;
mipsisa64r6el-linux-gnuabi64 = { config = "mipsisa64r6el-linux-gnuabi64"; } // platforms.gcc_mips64r6_64;
muslpi = raspberryPi // { muslpi = raspberryPi // {
config = "armv6l-unknown-linux-musleabihf"; config = "armv6l-unknown-linux-musleabihf";
}; };

View file

@ -17,6 +17,10 @@ rec {
isAarch32 = { cpu = { family = "arm"; bits = 32; }; }; isAarch32 = { cpu = { family = "arm"; bits = 32; }; };
isAarch64 = { cpu = { family = "arm"; bits = 64; }; }; isAarch64 = { cpu = { family = "arm"; bits = 64; }; };
isMips = { cpu = { family = "mips"; }; }; isMips = { cpu = { family = "mips"; }; };
isMips32 = { cpu = { family = "mips"; bits = 32; }; };
isMips64 = { cpu = { family = "mips"; bits = 64; }; };
isMips64n32 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "n32"; }; };
isMips64n64 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "64"; }; };
isMmix = { cpu = { family = "mmix"; }; }; isMmix = { cpu = { family = "mmix"; }; };
isRiscV = { cpu = { family = "riscv"; }; }; isRiscV = { cpu = { family = "riscv"; }; };
isSparc = { cpu = { family = "sparc"; }; }; isSparc = { cpu = { family = "sparc"; }; };
@ -57,7 +61,7 @@ rec {
isAndroid = [ { abi = abis.android; } { abi = abis.androideabi; } ]; isAndroid = [ { abi = abis.android; } { abi = abis.androideabi; } ];
isGnu = with abis; map (a: { abi = a; }) [ gnuabi64 gnu gnueabi gnueabihf ]; isGnu = with abis; map (a: { abi = a; }) [ gnuabi64 gnu gnueabi gnueabihf ];
isMusl = with abis; map (a: { abi = a; }) [ musl musleabi musleabihf ]; isMusl = with abis; map (a: { abi = a; }) [ musl musleabi musleabihf muslabin32 muslabi64 ];
isUClibc = with abis; map (a: { abi = a; }) [ uclibc uclibceabi uclibceabihf ]; isUClibc = with abis; map (a: { abi = a; }) [ uclibc uclibceabi uclibceabihf ];
isEfi = map (family: { cpu.family = family; }) isEfi = map (family: { cpu.family = family; })

View file

@ -359,13 +359,20 @@ rec {
]; ];
}; };
gnuabi64 = { abi = "64"; }; gnuabi64 = { abi = "64"; };
muslabi64 = { abi = "64"; };
# NOTE: abi=n32 requires a 64-bit MIPS chip! That is not a typo.
# It is basically the 64-bit abi with 32-bit pointers. Details:
# https://www.linux-mips.org/pub/linux/mips/doc/ABI/MIPS-N32-ABI-Handbook.pdf
gnuabin32 = { abi = "n32"; };
muslabin32 = { abi = "n32"; };
musleabi = { float = "soft"; }; musleabi = { float = "soft"; };
musleabihf = { float = "hard"; }; musleabihf = { float = "hard"; };
musl = {}; musl = {};
uclibceabihf = { float = "soft"; }; uclibceabi = { float = "soft"; };
uclibceabi = { float = "hard"; }; uclibceabihf = { float = "hard"; };
uclibc = {}; uclibc = {};
unknown = {}; unknown = {};

View file

@ -1,3 +1,10 @@
# Note: lib/systems/default.nix takes care of producing valid,
# fully-formed "platform" values (e.g. hostPlatform, buildPlatform,
# targetPlatform, etc) containing at least the minimal set of attrs
# required (see types.parsedPlatform in lib/systems/parse.nix). This
# file takes an already-valid platform and further elaborates it with
# optional fields such as linux-kernel, gcc, etc.
{ lib }: { lib }:
rec { rec {
pc = { pc = {
@ -482,6 +489,43 @@ rec {
}; };
}; };
# can execute on 32bit chip
gcc_mips32r2_o32 = { gcc = { arch = "mips32r2"; abi = "o32"; }; };
gcc_mips32r6_o32 = { gcc = { arch = "mips32r6"; abi = "o32"; }; };
gcc_mips64r2_n32 = { gcc = { arch = "mips64r2"; abi = "n32"; }; };
gcc_mips64r6_n32 = { gcc = { arch = "mips64r6"; abi = "n32"; }; };
gcc_mips64r2_64 = { gcc = { arch = "mips64r2"; abi = "64"; }; };
gcc_mips64r6_64 = { gcc = { arch = "mips64r6"; abi = "64"; }; };
# based on:
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
# https://gmplib.org/~tege/qemu.html#mips64-debian
mips64el-qemu-linux-gnuabi64 = (import ./examples).mips64el-linux-gnuabi64 // {
linux-kernel = {
name = "mips64el";
baseConfig = "64r2el_defconfig";
target = "vmlinuz";
autoModules = false;
DTB = true;
# for qemu 9p passthrough filesystem
extraConfig = ''
MIPS_MALTA y
PAGE_SIZE_4KB y
CPU_LITTLE_ENDIAN y
CPU_MIPS64_R2 y
64BIT y
CPU_MIPS64_R2 y
NET_9P y
NET_9P_VIRTIO y
9P_FS y
9P_FS_POSIX_ACL y
PCI y
VIRTIO_PCI y
'';
};
};
## ##
## Other ## Other
## ##
@ -499,6 +543,9 @@ rec {
}; };
}; };
# This function takes a minimally-valid "platform" and returns an
# attrset containing zero or more additional attrs which should be
# included in the platform in order to further elaborate it.
select = platform: select = platform:
# x86 # x86
/**/ if platform.isx86 then pc /**/ if platform.isx86 then pc

View file

@ -4,7 +4,9 @@
{ lib }: { lib }:
rec { rec {
# List of systems that are built by Hydra. # List of systems that are built by Hydra.
hydra = tier1 ++ tier2 ++ tier3; hydra = tier1 ++ tier2 ++ tier3 ++ [
"aarch64-darwin"
];
tier1 = [ tier1 = [
"x86_64-linux" "x86_64-linux"
@ -16,7 +18,6 @@ rec {
]; ];
tier3 = [ tier3 = [
"aarch64-darwin"
"armv6l-linux" "armv6l-linux"
"armv7l-linux" "armv7l-linux"
"i686-linux" "i686-linux"

View file

@ -496,7 +496,7 @@ runTests {
testToPretty = testToPretty =
let let
deriv = derivation { name = "test"; builder = "/bin/sh"; system = builtins.currentSystem; }; deriv = derivation { name = "test"; builder = "/bin/sh"; system = "aarch64-linux"; };
in { in {
expr = mapAttrs (const (generators.toPretty { multiline = false; })) rec { expr = mapAttrs (const (generators.toPretty { multiline = false; })) rec {
int = 42; int = 42;
@ -761,4 +761,156 @@ runTests {
{ a = 3; b = 30; c = 300; } { a = 3; b = 30; c = 300; }
]; ];
}; };
# The example from the showAttrPath documentation
testShowAttrPathExample = {
expr = showAttrPath [ "foo" "10" "bar" ];
expected = "foo.\"10\".bar";
};
testShowAttrPathEmpty = {
expr = showAttrPath [];
expected = "<root attribute path>";
};
testShowAttrPathVarious = {
expr = showAttrPath [
"."
"foo"
"2"
"a2-b"
"_bc'de"
];
expected = ''".".foo."2".a2-b._bc'de'';
};
testGroupBy = {
expr = groupBy (n: toString (mod n 5)) (range 0 16);
expected = {
"0" = [ 0 5 10 15 ];
"1" = [ 1 6 11 16 ];
"2" = [ 2 7 12 ];
"3" = [ 3 8 13 ];
"4" = [ 4 9 14 ];
};
};
testGroupBy' = {
expr = groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ];
expected = { false = 3; true = 12; };
};
# The example from the updateManyAttrsByPath documentation
testUpdateManyAttrsByPathExample = {
expr = updateManyAttrsByPath [
{
path = [ "a" "b" ];
update = old: { d = old.c; };
}
{
path = [ "a" "b" "c" ];
update = old: old + 1;
}
{
path = [ "x" "y" ];
update = old: "xy";
}
] { a.b.c = 0; };
expected = { a = { b = { d = 1; }; }; x = { y = "xy"; }; };
};
# If there are no updates, the value is passed through
testUpdateManyAttrsByPathNone = {
expr = updateManyAttrsByPath [] "something";
expected = "something";
};
# A single update to the root path is just like applying the function directly
testUpdateManyAttrsByPathSingleIncrement = {
expr = updateManyAttrsByPath [
{
path = [ ];
update = old: old + 1;
}
] 0;
expected = 1;
};
# Multiple updates can be applied are done in order
testUpdateManyAttrsByPathMultipleIncrements = {
expr = updateManyAttrsByPath [
{
path = [ ];
update = old: old + "a";
}
{
path = [ ];
update = old: old + "b";
}
{
path = [ ];
update = old: old + "c";
}
] "";
expected = "abc";
};
# If an update doesn't use the value, all previous updates are not evaluated
testUpdateManyAttrsByPathLazy = {
expr = updateManyAttrsByPath [
{
path = [ ];
update = old: old + throw "nope";
}
{
path = [ ];
update = old: "untainted";
}
] (throw "start");
expected = "untainted";
};
# Deeply nested attributes can be updated without affecting others
testUpdateManyAttrsByPathDeep = {
expr = updateManyAttrsByPath [
{
path = [ "a" "b" "c" ];
update = old: old + 1;
}
] {
a.b.c = 0;
a.b.z = 0;
a.y.z = 0;
x.y.z = 0;
};
expected = {
a.b.c = 1;
a.b.z = 0;
a.y.z = 0;
x.y.z = 0;
};
};
# Nested attributes are updated first
testUpdateManyAttrsByPathNestedBeforehand = {
expr = updateManyAttrsByPath [
{
path = [ "a" ];
update = old: old // { x = old.b; };
}
{
path = [ "a" "b" ];
update = old: old + 1;
}
] {
a.b = 0;
};
expected = {
a.b = 1;
a.x = 1;
};
};
} }

View file

@ -62,6 +62,13 @@ checkConfigError() {
checkConfigOutput '^false$' config.enable ./declare-enable.nix checkConfigOutput '^false$' config.enable ./declare-enable.nix
checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*: true' config.enable ./define-enable.nix checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*: true' config.enable ./define-enable.nix
checkConfigOutput '^1$' config.bare-submodule.nested ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix
checkConfigOutput '^42$' config.bare-submodule.nested ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix ./declare-bare-submodule-deep-option.nix ./define-bare-submodule-values.nix
checkConfigOutput '^420$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix ./declare-bare-submodule-deep-option.nix ./define-bare-submodule-values.nix
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./define-shorthandOnlyDefinesConfig-true.nix
checkConfigError 'The option .bare-submodule.deep. in .*/declare-bare-submodule-deep-option.nix. is already declared in .*/declare-bare-submodule-deep-option-duplicate.nix' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./declare-bare-submodule-deep-option-duplicate.nix
# Check integer types. # Check integer types.
# unsigned # unsigned
checkConfigOutput '^42$' config.value ./declare-int-unsigned-value.nix ./define-value-int-positive.nix checkConfigOutput '^42$' config.value ./declare-int-unsigned-value.nix ./define-value-int-positive.nix
@ -240,6 +247,11 @@ checkConfigOutput '^"24"$' config.foo ./freeform-attrsOf.nix ./freeform-str-dep-
checkConfigError 'infinite recursion encountered' config.foo ./freeform-attrsOf.nix ./freeform-unstr-dep-str.nix checkConfigError 'infinite recursion encountered' config.foo ./freeform-attrsOf.nix ./freeform-unstr-dep-str.nix
checkConfigError 'The option .* is used but not defined' config.foo ./freeform-lazyAttrsOf.nix ./freeform-unstr-dep-str.nix checkConfigError 'The option .* is used but not defined' config.foo ./freeform-lazyAttrsOf.nix ./freeform-unstr-dep-str.nix
checkConfigOutput '^"24"$' config.foo ./freeform-lazyAttrsOf.nix ./freeform-unstr-dep-str.nix ./define-value-string.nix checkConfigOutput '^"24"$' config.foo ./freeform-lazyAttrsOf.nix ./freeform-unstr-dep-str.nix ./define-value-string.nix
# submodules in freeformTypes should have their locations annotated
checkConfigOutput '/freeform-submodules.nix"$' config.fooDeclarations.0 ./freeform-submodules.nix
# freeformTypes can get merged using `types.type`, including submodules
checkConfigOutput '^10$' config.free.xxx.foo ./freeform-submodules.nix
checkConfigOutput '^10$' config.free.yyy.bar ./freeform-submodules.nix
## types.anything ## types.anything
# Check that attribute sets are merged recursively # Check that attribute sets are merged recursively
@ -284,6 +296,37 @@ checkConfigOutput '^"a b"$' config.resultFoo ./declare-variants.nix ./define-var
checkConfigOutput '^"a y z"$' config.resultFooBar ./declare-variants.nix ./define-variant.nix checkConfigOutput '^"a y z"$' config.resultFooBar ./declare-variants.nix ./define-variant.nix
checkConfigOutput '^"a b c"$' config.resultFooFoo ./declare-variants.nix ./define-variant.nix checkConfigOutput '^"a b c"$' config.resultFooFoo ./declare-variants.nix ./define-variant.nix
## emptyValue's
checkConfigOutput "[ ]" config.list.a ./emptyValues.nix
checkConfigOutput "{ }" config.attrs.a ./emptyValues.nix
checkConfigOutput "null" config.null.a ./emptyValues.nix
checkConfigOutput "{ }" config.submodule.a ./emptyValues.nix
# These types don't have empty values
checkConfigError 'The option .int.a. is used but not defined' config.int.a ./emptyValues.nix
checkConfigError 'The option .nonEmptyList.a. is used but not defined' config.nonEmptyList.a ./emptyValues.nix
## types.raw
checkConfigOutput "{ foo = <CODE>; }" config.unprocessedNesting ./raw.nix
checkConfigOutput "10" config.processedToplevel ./raw.nix
checkConfigError "The option .multiple. is defined multiple times" config.multiple ./raw.nix
checkConfigOutput "bar" config.priorities ./raw.nix
## Option collision
checkConfigError \
'The option .set. in module .*/declare-set.nix. would be a parent of the following options, but its type .attribute set of signed integers. does not support nested options.\n\s*- option[(]s[)] with prefix .set.enable. in module .*/declare-enable-nested.nix.' \
config.set \
./declare-set.nix ./declare-enable-nested.nix
# Test that types.optionType merges types correctly
checkConfigOutput '^10$' config.theOption.int ./optionTypeMerging.nix
checkConfigOutput '^"hello"$' config.theOption.str ./optionTypeMerging.nix
# Test that types.optionType correctly annotates option locations
checkConfigError 'The option .theOption.nested. in .other.nix. is already declared in .optionTypeFile.nix.' config.theOption.nested ./optionTypeFile.nix
# Test that types.optionType leaves types untouched as long as they don't need to be merged
checkConfigOutput 'ok' config.freeformItems.foo.bar ./adhoc-freeformType-survives-type-merge.nix
cat <<EOF cat <<EOF
====== module tests ====== ====== module tests ======
$pass Pass $pass Pass

View file

@ -0,0 +1,14 @@
{ lib, ... }: {
options.dummy = lib.mkOption { type = lib.types.anything; default = {}; };
freeformType =
let
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
in
# modifying types like this breaks type merging.
# This test makes sure that type merging is not performed when only a single declaration exists.
# Don't modify types in practice!
a // {
merge = loc: defs: { freeformItems = a.merge loc defs; };
};
config.foo.bar = "ok";
}

View file

@ -0,0 +1,10 @@
{ lib, ... }:
let
inherit (lib) mkOption types;
in
{
options.bare-submodule.deep = mkOption {
type = types.int;
default = 2;
};
}

View file

@ -0,0 +1,10 @@
{ lib, ... }:
let
inherit (lib) mkOption types;
in
{
options.bare-submodule.deep = mkOption {
type = types.int;
default = 2;
};
}

View file

@ -0,0 +1,19 @@
{ config, lib, ... }:
let
inherit (lib) mkOption types;
in
{
options.bare-submodule = mkOption {
type = types.submoduleWith {
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
modules = [
{
options.nested = mkOption {
type = types.int;
default = 1;
};
}
];
};
};
}

View file

@ -0,0 +1,18 @@
{ config, lib, ... }:
let
inherit (lib) mkOption types;
in
{
options.bare-submodule = mkOption {
type = types.submoduleWith {
modules = [ ];
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
};
default = {};
};
# config-dependent options: won't recommend, but useful for making this test parameterized
options.shorthandOnlyDefinesConfig = mkOption {
default = false;
};
}

View file

@ -0,0 +1,12 @@
{ lib, ... }:
{
options.set = lib.mkOption {
default = { };
example = { a = 1; };
type = lib.types.attrsOf lib.types.int;
description = ''
Some descriptive text
'';
};
}

View file

@ -0,0 +1,4 @@
{
bare-submodule.nested = 42;
bare-submodule.deep = 420;
}

View file

@ -0,0 +1 @@
{ shorthandOnlyDefinesConfig = true; }

View file

@ -0,0 +1,36 @@
{ lib, ... }:
let
inherit (lib) types;
in {
options = {
int = lib.mkOption {
type = types.lazyAttrsOf types.int;
};
list = lib.mkOption {
type = types.lazyAttrsOf (types.listOf types.int);
};
nonEmptyList = lib.mkOption {
type = types.lazyAttrsOf (types.nonEmptyListOf types.int);
};
attrs = lib.mkOption {
type = types.lazyAttrsOf (types.attrsOf types.int);
};
null = lib.mkOption {
type = types.lazyAttrsOf (types.nullOr types.int);
};
submodule = lib.mkOption {
type = types.lazyAttrsOf (types.submodule {});
};
};
config = {
int.a = lib.mkIf false null;
list.a = lib.mkIf false null;
nonEmptyList.a = lib.mkIf false null;
attrs.a = lib.mkIf false null;
null.a = lib.mkIf false null;
submodule.a = lib.mkIf false null;
};
}

View file

@ -0,0 +1,22 @@
{ lib, options, ... }: with lib.types; {
options.fooDeclarations = lib.mkOption {
default = (options.free.type.getSubOptions [])._freeformOptions.foo.declarations;
};
options.free = lib.mkOption {
type = submodule {
config._module.freeformType = lib.mkMerge [
(attrsOf (submodule {
options.foo = lib.mkOption {};
}))
(attrsOf (submodule {
options.bar = lib.mkOption {};
}))
];
};
};
config.free.xxx.foo = 10;
config.free.yyy.bar = 10;
}

View file

@ -0,0 +1,28 @@
{ config, lib, ... }: {
_file = "optionTypeFile.nix";
options.theType = lib.mkOption {
type = lib.types.optionType;
};
options.theOption = lib.mkOption {
type = config.theType;
default = {};
};
config.theType = lib.mkMerge [
(lib.types.submodule {
options.nested = lib.mkOption {
type = lib.types.int;
};
})
(lib.types.submodule {
_file = "other.nix";
options.nested = lib.mkOption {
type = lib.types.str;
};
})
];
}

View file

@ -0,0 +1,27 @@
{ config, lib, ... }: {
options.theType = lib.mkOption {
type = lib.types.optionType;
};
options.theOption = lib.mkOption {
type = config.theType;
};
config.theType = lib.mkMerge [
(lib.types.submodule {
options.int = lib.mkOption {
type = lib.types.int;
default = 10;
};
})
(lib.types.submodule {
options.str = lib.mkOption {
type = lib.types.str;
};
})
];
config.theOption.str = "hello";
}

30
lib/tests/modules/raw.nix Normal file
View file

@ -0,0 +1,30 @@
{ lib, ... }: {
options = {
processedToplevel = lib.mkOption {
type = lib.types.raw;
};
unprocessedNesting = lib.mkOption {
type = lib.types.raw;
};
multiple = lib.mkOption {
type = lib.types.raw;
};
priorities = lib.mkOption {
type = lib.types.raw;
};
};
config = {
processedToplevel = lib.mkIf true 10;
unprocessedNesting.foo = throw "foo";
multiple = lib.mkMerge [
"foo"
"foo"
];
priorities = lib.mkMerge [
"foo"
(lib.mkForce "bar")
];
};
}

View file

@ -17,7 +17,7 @@ with lib.systems.doubles; lib.runTests {
testarm = mseteq arm [ "armv5tel-linux" "armv6l-linux" "armv6l-netbsd" "armv6l-none" "armv7a-linux" "armv7a-netbsd" "armv7l-linux" "armv7l-netbsd" "arm-none" "armv7a-darwin" ]; testarm = mseteq arm [ "armv5tel-linux" "armv6l-linux" "armv6l-netbsd" "armv6l-none" "armv7a-linux" "armv7a-netbsd" "armv7l-linux" "armv7l-netbsd" "arm-none" "armv7a-darwin" ];
testi686 = mseteq i686 [ "i686-linux" "i686-freebsd" "i686-genode" "i686-netbsd" "i686-openbsd" "i686-cygwin" "i686-windows" "i686-none" "i686-darwin" ]; testi686 = mseteq i686 [ "i686-linux" "i686-freebsd" "i686-genode" "i686-netbsd" "i686-openbsd" "i686-cygwin" "i686-windows" "i686-none" "i686-darwin" ];
testmips = mseteq mips [ "mipsel-linux" "mipsel-netbsd" ]; testmips = mseteq mips [ "mips64el-linux" "mipsel-linux" "mipsel-netbsd" ];
testmmix = mseteq mmix [ "mmix-mmixware" ]; testmmix = mseteq mmix [ "mmix-mmixware" ];
testx86_64 = mseteq x86_64 [ "x86_64-linux" "x86_64-darwin" "x86_64-freebsd" "x86_64-genode" "x86_64-redox" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin" "x86_64-solaris" "x86_64-windows" "x86_64-none" ]; testx86_64 = mseteq x86_64 [ "x86_64-linux" "x86_64-darwin" "x86_64-freebsd" "x86_64-genode" "x86_64-redox" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin" "x86_64-solaris" "x86_64-windows" "x86_64-none" ];
@ -28,7 +28,7 @@ with lib.systems.doubles; lib.runTests {
testredox = mseteq redox [ "x86_64-redox" ]; testredox = mseteq redox [ "x86_64-redox" ];
testgnu = mseteq gnu (linux /* ++ kfreebsd ++ ... */); testgnu = mseteq gnu (linux /* ++ kfreebsd ++ ... */);
testillumos = mseteq illumos [ "x86_64-solaris" ]; testillumos = mseteq illumos [ "x86_64-solaris" ];
testlinux = mseteq linux [ "aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux" "armv7l-linux" "i686-linux" "mipsel-linux" "riscv32-linux" "riscv64-linux" "x86_64-linux" "powerpc64-linux" "powerpc64le-linux" "m68k-linux" "s390-linux" "s390x-linux" ]; testlinux = mseteq linux [ "aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux" "armv7l-linux" "i686-linux" "mips64el-linux" "mipsel-linux" "riscv32-linux" "riscv64-linux" "x86_64-linux" "powerpc64-linux" "powerpc64le-linux" "m68k-linux" "s390-linux" "s390x-linux" ];
testnetbsd = mseteq netbsd [ "aarch64-netbsd" "armv6l-netbsd" "armv7a-netbsd" "armv7l-netbsd" "i686-netbsd" "m68k-netbsd" "mipsel-netbsd" "powerpc-netbsd" "riscv32-netbsd" "riscv64-netbsd" "x86_64-netbsd" ]; testnetbsd = mseteq netbsd [ "aarch64-netbsd" "armv6l-netbsd" "armv7a-netbsd" "armv7l-netbsd" "i686-netbsd" "m68k-netbsd" "mipsel-netbsd" "powerpc-netbsd" "riscv32-netbsd" "riscv64-netbsd" "x86_64-netbsd" ];
testopenbsd = mseteq openbsd [ "i686-openbsd" "x86_64-openbsd" ]; testopenbsd = mseteq openbsd [ "i686-openbsd" "x86_64-openbsd" ];
testwindows = mseteq windows [ "i686-cygwin" "x86_64-cygwin" "i686-windows" "x86_64-windows" ]; testwindows = mseteq windows [ "i686-cygwin" "x86_64-cygwin" "i686-windows" "x86_64-windows" ];

View file

@ -61,11 +61,11 @@ rec {
pipe = val: functions: pipe = val: functions:
let reverseApply = x: f: f x; let reverseApply = x: f: f x;
in builtins.foldl' reverseApply val functions; in builtins.foldl' reverseApply val functions;
/* note please dont add a function like `compose = flip pipe`.
This would confuse users, because the order of the functions # note please dont add a function like `compose = flip pipe`.
in the list is not clear. With pipe, its obvious that it # This would confuse users, because the order of the functions
goes first-to-last. With `compose`, not so much. # in the list is not clear. With pipe, its obvious that it
*/ # goes first-to-last. With `compose`, not so much.
## Named versions corresponding to some builtin operators. ## Named versions corresponding to some builtin operators.
@ -166,6 +166,30 @@ rec {
/* Returns the current nixpkgs release number as string. */ /* Returns the current nixpkgs release number as string. */
release = lib.strings.fileContents ../.version; release = lib.strings.fileContents ../.version;
/* The latest release that is supported, at the time of release branch-off,
if applicable.
Ideally, out-of-tree modules should be able to evaluate cleanly with all
supported Nixpkgs versions (master, release and old release until EOL).
So if possible, deprecation warnings should take effect only when all
out-of-tree expressions/libs/modules can upgrade to the new way without
losing support for supported Nixpkgs versions.
This release number allows deprecation warnings to be implemented such that
they take effect as soon as the oldest release reaches end of life. */
oldestSupportedRelease =
# Update on master only. Do not backport.
2111;
/* Whether a feature is supported in all supported releases (at the time of
release branch-off, if applicable). See `oldestSupportedRelease`. */
isInOldestRelease =
/* Release number of feature introduction as an integer, e.g. 2111 for 21.11.
Set it to the upcoming release, matching the nixpkgs/.version file.
*/
release:
release <= lib.trivial.oldestSupportedRelease;
/* Returns the current nixpkgs release code name. /* Returns the current nixpkgs release code name.
On each release the first letter is bumped and a new animal is chosen On each release the first letter is bumped and a new animal is chosen
@ -325,6 +349,45 @@ rec {
*/ */
warnIf = cond: msg: if cond then warn msg else id; warnIf = cond: msg: if cond then warn msg else id;
/*
Like the `assert b; e` expression, but with a custom error message and
without the semicolon.
If true, return the identity function, `r: r`.
If false, throw the error message.
Calls can be juxtaposed using function application, as `(r: r) a = a`, so
`(r: r) (r: r) a = a`, and so forth.
Type: bool -> string -> a -> a
Example:
throwIfNot (lib.isList overlays) "The overlays argument to nixpkgs must be a list."
lib.foldr (x: throwIfNot (lib.isFunction x) "All overlays passed to nixpkgs must be functions.") (r: r) overlays
pkgs
*/
throwIfNot = cond: msg: if cond then x: x else throw msg;
/* Check if the elements in a list are valid values from a enum, returning the identity function, or throwing an error message otherwise.
Example:
let colorVariants = ["bright" "dark" "black"]
in checkListOfEnum "color variants" [ "standard" "light" "dark" ] colorVariants;
=>
error: color variants: bright, black unexpected; valid ones: standard, light, dark
Type: String -> List ComparableVal -> List ComparableVal -> a -> a
*/
checkListOfEnum = msg: valid: given:
let
unexpected = lib.subtractLists valid given;
in
lib.throwIfNot (unexpected == [])
"${msg}: ${builtins.concatStringsSep ", " (builtins.map builtins.toString unexpected)} unexpected; valid ones: ${builtins.concatStringsSep ", " (builtins.map builtins.toString valid)}";
info = msg: builtins.trace "INFO: ${msg}"; info = msg: builtins.trace "INFO: ${msg}";
showWarnings = warnings: res: lib.foldr (w: x: warn w x) res warnings; showWarnings = warnings: res: lib.foldr (w: x: warn w x) res warnings;

View file

@ -32,7 +32,6 @@ let
last last
length length
tail tail
unique
; ;
inherit (lib.attrsets) inherit (lib.attrsets)
attrNames attrNames
@ -48,6 +47,7 @@ let
mergeDefaultOption mergeDefaultOption
mergeEqualOption mergeEqualOption
mergeOneOption mergeOneOption
mergeUniqueOption
showFiles showFiles
showOption showOption
; ;
@ -61,7 +61,11 @@ let
boolToString boolToString
; ;
inherit (lib.modules) mergeDefinitions; inherit (lib.modules)
mergeDefinitions
fixupOptionType
mergeOptionDecls
;
outer_types = outer_types =
rec { rec {
isType = type: x: (x._type or "") == type; isType = type: x: (x._type or "") == type;
@ -162,6 +166,13 @@ rec {
# nixos/doc/manual/development/option-types.xml! # nixos/doc/manual/development/option-types.xml!
types = rec { types = rec {
raw = mkOptionType rec {
name = "raw";
description = "raw value";
check = value: true;
merge = mergeOneOption;
};
anything = mkOptionType { anything = mkOptionType {
name = "anything"; name = "anything";
description = "anything"; description = "anything";
@ -300,6 +311,19 @@ rec {
inherit (str) merge; inherit (str) merge;
}; };
# Allow a newline character at the end and trim it in the merge function.
singleLineStr =
let
inherit (strMatching "[^\n\r]*\n?") check merge;
in
mkOptionType {
name = "singleLineStr";
description = "(optionally newline-terminated) single-line string";
inherit check;
merge = loc: defs:
lib.removeSuffix "\n" (merge loc defs);
};
strMatching = pattern: mkOptionType { strMatching = pattern: mkOptionType {
name = "strMatching ${escapeNixString pattern}"; name = "strMatching ${escapeNixString pattern}";
description = "string matching the pattern ${pattern}"; description = "string matching the pattern ${pattern}";
@ -344,13 +368,21 @@ rec {
emptyValue = { value = {}; }; emptyValue = { value = {}; };
}; };
# derivation is a reserved keyword. # A package is a top-level store path (/nix/store/hash-name). This includes:
# - derivations
# - more generally, attribute sets with an `outPath` or `__toString` attribute
# pointing to a store path, e.g. flake inputs
# - strings with context, e.g. "${pkgs.foo}" or (toString pkgs.foo)
# - hardcoded store path literals (/nix/store/hash-foo) or strings without context
# ("/nix/store/hash-foo"). These get a context added to them using builtins.storePath.
package = mkOptionType { package = mkOptionType {
name = "package"; name = "package";
check = x: isDerivation x || isStorePath x; check = x: isDerivation x || isStorePath x;
merge = loc: defs: merge = loc: defs:
let res = mergeOneOption loc defs; let res = mergeOneOption loc defs;
in if isDerivation res then res else toDerivation res; in if builtins.isPath res || (builtins.isString res && ! builtins.hasContext res)
then toDerivation res
else res;
}; };
shellPackage = package // { shellPackage = package // {
@ -377,7 +409,7 @@ rec {
).optionalValue ).optionalValue
) def.value ) def.value
) defs))); ) defs)));
emptyValue = { value = {}; }; emptyValue = { value = []; };
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]); getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]);
getSubModules = elemType.getSubModules; getSubModules = elemType.getSubModules;
substSubModules = m: listOf (elemType.substSubModules m); substSubModules = m: listOf (elemType.substSubModules m);
@ -389,7 +421,7 @@ rec {
let list = addCheck (types.listOf elemType) (l: l != []); let list = addCheck (types.listOf elemType) (l: l != []);
in list // { in list // {
description = "non-empty " + list.description; description = "non-empty " + list.description;
# Note: emptyValue is left as is, because another module may define an element. emptyValue = { }; # no .value attr, meaning unset
}; };
attrsOf = elemType: mkOptionType rec { attrsOf = elemType: mkOptionType rec {
@ -457,6 +489,18 @@ rec {
nestedTypes.elemType = elemType; nestedTypes.elemType = elemType;
}; };
unique = { message }: type: mkOptionType rec {
name = "unique";
inherit (type) description check;
merge = mergeUniqueOption { inherit message; };
emptyValue = type.emptyValue;
getSubOptions = type.getSubOptions;
getSubModules = type.getSubModules;
substSubModules = m: uniq (type.substSubModules m);
functor = (defaultFunctor name) // { wrapped = type; };
nestedTypes.elemType = type;
};
# Null or value of ... # Null or value of ...
nullOr = elemType: mkOptionType rec { nullOr = elemType: mkOptionType rec {
name = "nullOr"; name = "nullOr";
@ -478,7 +522,7 @@ rec {
functionTo = elemType: mkOptionType { functionTo = elemType: mkOptionType {
name = "functionTo"; name = "functionTo";
description = "function that evaluates to a(n) ${elemType.name}"; description = "function that evaluates to a(n) ${elemType.description}";
check = isFunction; check = isFunction;
merge = loc: defs: merge = loc: defs:
fnArgs: (mergeDefinitions (loc ++ [ "[function body]" ]) elemType (map (fn: { inherit (fn) file; value = fn.value fnArgs; }) defs)).mergedValue; fnArgs: (mergeDefinitions (loc ++ [ "[function body]" ]) elemType (map (fn: { inherit (fn) file; value = fn.value fnArgs; }) defs)).mergedValue;
@ -493,6 +537,33 @@ rec {
modules = toList modules; modules = toList modules;
}; };
# The type of a type!
optionType = mkOptionType {
name = "optionType";
description = "optionType";
check = value: value._type or null == "option-type";
merge = loc: defs:
if length defs == 1
then (head defs).value
else let
# Prepares the type definitions for mergeOptionDecls, which
# annotates submodules types with file locations
optionModules = map ({ value, file }:
{
_file = file;
# There's no way to merge types directly from the module system,
# but we can cheat a bit by just declaring an option with the type
options = lib.mkOption {
type = value;
};
}
) defs;
# Merges all the types into a single one, including submodule merging.
# This also propagates file information to all submodules
mergedOption = fixupOptionType loc (mergeOptionDecls loc optionModules);
in mergedOption.type;
};
submoduleWith = submoduleWith =
{ modules { modules
, specialArgs ? {} , specialArgs ? {}
@ -501,14 +572,18 @@ rec {
let let
inherit (lib.modules) evalModules; inherit (lib.modules) evalModules;
coerce = unify: value: if isFunction value shorthandToModule = if shorthandOnlyDefinesConfig == false
then setFunctionArgs (args: unify (value args)) (functionArgs value) then value: value
else unify (if shorthandOnlyDefinesConfig then { config = value; } else value); else value: { config = value; };
allModules = defs: imap1 (n: { value, file }: allModules = defs: imap1 (n: { value, file }:
if isAttrs value || isFunction value then if isFunction value
# Annotate the value with the location of its definition for better error messages then setFunctionArgs
coerce (lib.modules.unifyModuleSyntax file "${toString file}-${toString n}") value (args: lib.modules.unifyModuleSyntax file "${toString file}-${toString n}" (value args))
(functionArgs value)
else if isAttrs value
then
lib.modules.unifyModuleSyntax file "${toString file}-${toString n}" (shorthandToModule value)
else value else value
) defs; ) defs;
@ -576,7 +651,11 @@ rec {
then lhs.specialArgs // rhs.specialArgs then lhs.specialArgs // rhs.specialArgs
else throw "A submoduleWith option is declared multiple times with the same specialArgs \"${toString (attrNames intersecting)}\""; else throw "A submoduleWith option is declared multiple times with the same specialArgs \"${toString (attrNames intersecting)}\"";
shorthandOnlyDefinesConfig = shorthandOnlyDefinesConfig =
if lhs.shorthandOnlyDefinesConfig == rhs.shorthandOnlyDefinesConfig if lhs.shorthandOnlyDefinesConfig == null
then rhs.shorthandOnlyDefinesConfig
else if rhs.shorthandOnlyDefinesConfig == null
then lhs.shorthandOnlyDefinesConfig
else if lhs.shorthandOnlyDefinesConfig == rhs.shorthandOnlyDefinesConfig
then lhs.shorthandOnlyDefinesConfig then lhs.shorthandOnlyDefinesConfig
else throw "A submoduleWith option is declared multiple times with conflicting shorthandOnlyDefinesConfig values"; else throw "A submoduleWith option is declared multiple times with conflicting shorthandOnlyDefinesConfig values";
}; };
@ -586,6 +665,7 @@ rec {
# A value from a set of allowed ones. # A value from a set of allowed ones.
enum = values: enum = values:
let let
inherit (lib.lists) unique;
show = v: show = v:
if builtins.isString v then ''"${v}"'' if builtins.isString v then ''"${v}"''
else if builtins.isInt v then builtins.toString v else if builtins.isInt v then builtins.toString v

File diff suppressed because it is too large Load diff

View file

@ -4,20 +4,19 @@
# Example how to work with the `lib.maintainers` attrset. # Example how to work with the `lib.maintainers` attrset.
# Can be used to check whether all user handles are still valid. # Can be used to check whether all user handles are still valid.
set -e set -o errexit -o noclobber -o nounset -o pipefail
shopt -s failglob inherit_errexit
# nixpkgs='<nixpkgs>'
# if [ -n "$1" ]; then
function checkCommits { function checkCommits {
local user="$1" local ret status tmp user
local tmp=$(mktemp) user="$1"
tmp=$(mktemp)
curl --silent -w "%{http_code}" \ curl --silent -w "%{http_code}" \
"https://github.com/NixOS/nixpkgs/commits?author=$user" \ "https://github.com/NixOS/nixpkgs/commits?author=$user" \
> "$tmp" > "$tmp"
# the last line of tmp contains the http status # the last line of tmp contains the http status
local status=$(tail -n1 "$tmp") status=$(tail -n1 "$tmp")
local ret= ret=
case $status in case $status in
200) if <"$tmp" grep -i "no commits found" > /dev/null; then 200) if <"$tmp" grep -i "no commits found" > /dev/null; then
ret=1 ret=1
@ -31,7 +30,7 @@ function checkCommits {
checkCommits "$user" checkCommits "$user"
ret=$? ret=$?
;; ;;
*) printf "BAD STATUS: $(tail -n1 $tmp) for %s\n" "$user"; ret=1 *) printf "BAD STATUS: $(tail -n1 "$tmp") for %s\n" "$user"; ret=1
ret=1 ret=1
;; ;;
esac esac
@ -63,4 +62,5 @@ nix-instantiate -A lib.maintainers --eval --strict --json \
| jq -r '.[]|.github|select(.)' \ | jq -r '.[]|.github|select(.)' \
| parallel -j5 checkUser | parallel -j5 checkUser
# To check some arbitrary users:
# parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a" # parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a"

View file

@ -20,7 +20,7 @@ HACKAGE2NIX="${HACKAGE2NIX:-hackage2nix}"
# See: https://github.com/NixOS/nixpkgs/pull/122023 # See: https://github.com/NixOS/nixpkgs/pull/122023
export LC_ALL=C.UTF-8 export LC_ALL=C.UTF-8
extraction_derivation='with import ./. {}; runCommand "unpacked-cabal-hashes" { } "tar xf ${all-cabal-hashes} --strip-components=1 --one-top-level=$out"' extraction_derivation='with import ./. {}; runCommandLocal "unpacked-cabal-hashes" { } "tar xf ${all-cabal-hashes} --strip-components=1 --one-top-level=$out"'
unpacked_hackage="$(nix-build -E "$extraction_derivation" --no-out-link)" unpacked_hackage="$(nix-build -E "$extraction_derivation" --no-out-link)"
config_dir=pkgs/development/haskell-modules/configuration-hackage2nix config_dir=pkgs/development/haskell-modules/configuration-hackage2nix

View file

@ -0,0 +1,136 @@
/* Nix expression to test for regressions in the Haskell configuration overlays.
test-configurations.nix determines all attributes touched by given Haskell
configuration overlays (i. e. pkgs/development/haskell-modules/configuration-*.nix)
and builds all derivations (or at least a reasonable subset) affected by
these overrides.
By default, it checks `configuration-{common,nix,ghc-8.10.x}.nix`. You can
invoke it like this:
nix-build maintainers/scripts/haskell/test-configurations.nix --keep-going
It is possible to specify other configurations:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg files '[ "configuration-ghc-9.0.x.nix" "configuration-ghc-9.2.x.nix" ]' \
--keep-going
You can also just supply a single string:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files "configuration-arm.nix" --keep-going
You can even supply full paths which is handy, as it allows for tab-completing
the configurations:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files pkgs/development/haskell-modules/configuration-arm.nix \
--keep-going
By default, derivation that fail to evaluate are skipped, unless they are
just marked as broken. You can check for other eval errors like this:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipEvalErrors false --keep-going
You can also disable checking broken packages by passing a nixpkgs config:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg config '{ allowBroken = false; }' --keep-going
By default the haskell.packages.ghc*Binary sets used for bootstrapping GHC
are _not_ tested. You can change this using:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipBinaryGHCs false --keep-going
*/
{ files ? [
"configuration-common.nix"
"configuration-nix.nix"
"configuration-ghc-8.10.x.nix"
]
, nixpkgsPath ? ../../..
, config ? { allowBroken = true; }
, skipEvalErrors ? true
, skipBinaryGHCs ? true
}:
let
pkgs = import nixpkgsPath { inherit config; };
inherit (pkgs) lib;
# see usage explanation for the input format `files` allows
files' = builtins.map builtins.baseNameOf (
if !builtins.isList files then [ files ] else files
);
setsForFile = fileName:
let
# extract the unique part of the config's file name
configName = builtins.head (
builtins.match "configuration-(.+).nix" fileName
);
# match the major and minor version of the GHC the config is intended for, if any
configVersion = lib.concatStrings (
builtins.match "ghc-([0-9]+).([0-9]+).x" configName
);
# return all package sets under haskell.packages matching the version components
setsForVersion = builtins.map (name: pkgs.haskell.packages.${name}) (
builtins.filter (setName:
lib.hasPrefix "ghc${configVersion}" setName
&& (skipBinaryGHCs -> !(lib.hasInfix "Binary" setName))
) (
builtins.attrNames pkgs.haskell.packages
)
);
defaultSets = [ pkgs.haskellPackages ];
in {
# use plain haskellPackages for the version-agnostic files
# TODO(@sternenseemann): also consider currently selected versioned sets
"common" = defaultSets;
"nix" = defaultSets;
"arm" = defaultSets;
"darwin" = defaultSets;
}.${configName} or setsForVersion;
# attribute set that has all the attributes of haskellPackages set to null
availableHaskellPackages = builtins.listToAttrs (
builtins.map (attr: lib.nameValuePair attr null) (
builtins.attrNames pkgs.haskellPackages
)
);
# evaluate a configuration and only return the attributes changed by it,
# pass availableHaskellPackages as super in case intersectAttrs is used
overriddenAttrs = fileName: builtins.attrNames (
lib.fix (self:
import (nixpkgsPath + "/pkgs/development/haskell-modules/${fileName}") {
haskellLib = pkgs.haskell.lib.compose;
inherit pkgs;
} self availableHaskellPackages
)
);
# list of derivations that are affected by overrides in the given configuration
# overlays. For common, nix, darwin etc. only the derivation from the default
# package set will be emitted.
packages = builtins.filter (v:
lib.warnIf (v.meta.broken or false) "${v.pname} is marked as broken" (
v != null
&& (skipEvalErrors -> (builtins.tryEval (v.outPath or v)).success)
)
) (
lib.concatMap (fileName:
let
sets = setsForFile fileName;
attrs = overriddenAttrs fileName;
in
lib.concatMap (set: builtins.map (attr: set.${attr}) attrs) sets
) files'
);
in
packages

View file

@ -68,7 +68,7 @@ luautf8,,,,,,pstn
luazip,,,,,, luazip,,,,,,
lua-yajl,,,,,,pstn lua-yajl,,,,,,pstn
luuid,,,,,, luuid,,,,,,
luv,,,,1.42.0-0,, luv,,,,1.43.0-0,,
lyaml,,,,,,lblasc lyaml,,,,,,lblasc
markdown,,,,,, markdown,,,,,,
mediator_lua,,,,,, mediator_lua,,,,,,

1 name src ref server version luaversion maintainers
68 luazip
69 lua-yajl pstn
70 luuid
71 luv 1.42.0-0 1.43.0-0
72 lyaml lblasc
73 markdown
74 mediator_lua

View file

@ -1,4 +1,4 @@
# Used by pkgs/misc/vim-plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py # Used by pkgs/applications/editors/vim/plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
# format: # format:
# $ nix run nixpkgs.python3Packages.black -c black update.py # $ nix run nixpkgs.python3Packages.black -c black update.py
@ -73,42 +73,90 @@ def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: floa
return deco_retry return deco_retry
@dataclass
class FetchConfig:
proc: int
github_token: str
def make_request(url: str) -> urllib.request.Request:
token = os.getenv("GITHUB_API_TOKEN") def make_request(url: str, token=None) -> urllib.request.Request:
headers = {} headers = {}
if token is not None: if token is not None:
headers["Authorization"] = f"token {token}" headers["Authorization"] = f"token {token}"
return urllib.request.Request(url, headers=headers) return urllib.request.Request(url, headers=headers)
@dataclass
class PluginDesc:
owner: str
repo: str
branch: str
alias: Optional[str]
class Repo: class Repo:
def __init__( def __init__(
self, owner: str, name: str, branch: str, alias: Optional[str] self, uri: str, branch: str, alias: Optional[str]
) -> None: ) -> None:
self.owner = owner self.uri = uri
self.name = name '''Url to the repo'''
self.branch = branch self.branch = branch
self.alias = alias self.alias = alias
self.redirect: Dict[str, str] = {} self.redirect: Dict[str, str] = {}
self.token = "dummy_token"
@property
def name(self):
return self.uri.split('/')[-1]
def __repr__(self) -> str:
return f"Repo({self.name}, {self.uri})"
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
def has_submodules(self) -> bool:
return True
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
def latest_commit(self) -> Tuple[str, datetime]:
loaded = self._prefetch(None)
updated = datetime.strptime(loaded['date'], "%Y-%m-%dT%H:%M:%S%z")
return loaded['rev'], updated
def _prefetch(self, ref: Optional[str]):
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
if ref is not None:
cmd.append(ref)
log.debug(cmd)
data = subprocess.check_output(cmd)
loaded = json.loads(data)
return loaded
def prefetch(self, ref: Optional[str]) -> str:
loaded = self._prefetch(ref)
return loaded["sha256"]
def as_nix(self, plugin: "Plugin") -> str:
return f'''fetchgit {{
url = "{self.uri}";
rev = "{plugin.commit}";
sha256 = "{plugin.sha256}";
}}'''
class RepoGitHub(Repo):
def __init__(
self, owner: str, repo: str, branch: str, alias: Optional[str]
) -> None:
self.owner = owner
self.repo = repo
self.token = None
'''Url to the repo'''
super().__init__(self.url(""), branch, alias)
log.debug("Instantiating github repo %s/%s", self.owner, self.repo)
@property
def name(self):
return self.repo
def url(self, path: str) -> str: def url(self, path: str) -> str:
return urljoin(f"https://github.com/{self.owner}/{self.name}/", path) return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
def __repr__(self) -> str:
return f"Repo({self.owner}, {self.name})"
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2) @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
def has_submodules(self) -> bool: def has_submodules(self) -> bool:
try: try:
req = make_request(self.url(f"blob/{self.branch}/.gitmodules")) req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
urllib.request.urlopen(req, timeout=10).close() urllib.request.urlopen(req, timeout=10).close()
except urllib.error.HTTPError as e: except urllib.error.HTTPError as e:
if e.code == 404: if e.code == 404:
@ -120,9 +168,9 @@ class Repo:
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2) @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
def latest_commit(self) -> Tuple[str, datetime]: def latest_commit(self) -> Tuple[str, datetime]:
commit_url = self.url(f"commits/{self.branch}.atom") commit_url = self.url(f"commits/{self.branch}.atom")
commit_req = make_request(commit_url) commit_req = make_request(commit_url, self.token)
with urllib.request.urlopen(commit_req, timeout=10) as req: with urllib.request.urlopen(commit_req, timeout=10) as req:
self.check_for_redirect(commit_url, req) self._check_for_redirect(commit_url, req)
xml = req.read() xml = req.read()
root = ET.fromstring(xml) root = ET.fromstring(xml)
latest_entry = root.find(ATOM_ENTRY) latest_entry = root.find(ATOM_ENTRY)
@ -137,7 +185,7 @@ class Repo:
updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ") updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
return Path(str(url.path)).name, updated return Path(str(url.path)).name, updated
def check_for_redirect(self, url: str, req: http.client.HTTPResponse): def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
response_url = req.geturl() response_url = req.geturl()
if url != response_url: if url != response_url:
new_owner, new_name = ( new_owner, new_name = (
@ -150,11 +198,13 @@ class Repo:
new_plugin = plugin_line.format(owner=new_owner, name=new_name) new_plugin = plugin_line.format(owner=new_owner, name=new_name)
self.redirect[old_plugin] = new_plugin self.redirect[old_plugin] = new_plugin
def prefetch_git(self, ref: str) -> str:
data = subprocess.check_output( def prefetch(self, commit: str) -> str:
["nix-prefetch-git", "--fetch-submodules", self.url(""), ref] if self.has_submodules():
) sha256 = super().prefetch(commit)
return json.loads(data)["sha256"] else:
sha256 = self.prefetch_github(commit)
return sha256
def prefetch_github(self, ref: str) -> str: def prefetch_github(self, ref: str) -> str:
data = subprocess.check_output( data = subprocess.check_output(
@ -162,6 +212,33 @@ class Repo:
) )
return data.strip().decode("utf-8") return data.strip().decode("utf-8")
def as_nix(self, plugin: "Plugin") -> str:
if plugin.has_submodules:
submodule_attr = "\n fetchSubmodules = true;"
else:
submodule_attr = ""
return f'''fetchFromGitHub {{
owner = "{self.owner}";
repo = "{self.repo}";
rev = "{plugin.commit}";
sha256 = "{plugin.sha256}";{submodule_attr}
}}'''
@dataclass
class PluginDesc:
repo: Repo
branch: str
alias: Optional[str]
@property
def name(self):
if self.alias is None:
return self.repo.name
else:
return self.alias
class Plugin: class Plugin:
def __init__( def __init__(
@ -193,6 +270,7 @@ class Plugin:
return copy return copy
class Editor: class Editor:
"""The configuration of the update script.""" """The configuration of the update script."""
@ -219,15 +297,41 @@ class Editor:
"""To fill the cache""" """To fill the cache"""
return get_current_plugins(self) return get_current_plugins(self)
def load_plugin_spec(self, plugin_file) -> List[PluginDesc]: def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
return load_plugin_spec(plugin_file) plugins = []
with open(plugin_file) as f:
for line in f:
if line.startswith("#"):
continue
plugin = parse_plugin_line(config, line)
plugins.append(plugin)
return plugins
def generate_nix(self, plugins, outfile: str): def generate_nix(self, plugins, outfile: str):
'''Returns nothing for now, writes directly to outfile''' '''Returns nothing for now, writes directly to outfile'''
raise NotImplementedError() raise NotImplementedError()
def get_update(self, input_file: str, outfile: str, proc: int): def get_update(self, input_file: str, outfile: str, config: FetchConfig):
return get_update(input_file, outfile, proc, editor=self) cache: Cache = Cache(self.get_current_plugins(), self.cache_file)
_prefetch = functools.partial(prefetch, cache=cache)
def update() -> dict:
plugin_names = self.load_plugin_spec(config, input_file)
try:
pool = Pool(processes=config.proc)
results = pool.map(_prefetch, plugin_names)
finally:
cache.store()
plugins, redirects = check_results(results)
self.generate_nix(plugins, outfile)
return redirects
return update
@property @property
def attr_path(self): def attr_path(self):
@ -241,9 +345,9 @@ class Editor:
def create_parser(self): def create_parser(self):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description=( description=(f"""
f"Updates nix derivations for {self.name} plugins" Updates nix derivations for {self.name} plugins.\n
f"By default from {self.default_in} to {self.default_out}" By default from {self.default_in} to {self.default_out}"""
) )
) )
parser.add_argument( parser.add_argument(
@ -273,7 +377,15 @@ class Editor:
dest="proc", dest="proc",
type=int, type=int,
default=30, default=30,
help="Number of concurrent processes to spawn.", help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
)
parser.add_argument(
"--github-token",
"-t",
type=str,
default=os.getenv("GITHUB_API_TOKEN"),
help="""Allows to set --proc to higher values.
Uses GITHUB_API_TOKEN environment variables as the default value.""",
) )
parser.add_argument( parser.add_argument(
"--no-commit", "-n", action="store_true", default=False, "--no-commit", "-n", action="store_true", default=False,
@ -305,7 +417,7 @@ class CleanEnvironment(object):
def get_current_plugins(editor: Editor) -> List[Plugin]: def get_current_plugins(editor: Editor) -> List[Plugin]:
with CleanEnvironment(): with CleanEnvironment():
cmd = ["nix", "eval", "--impure", "--json", "--expr", editor.get_plugins] cmd = ["nix", "eval", "--extra-experimental-features", "nix-command", "--impure", "--json", "--expr", editor.get_plugins]
log.debug("Running command %s", cmd) log.debug("Running command %s", cmd)
out = subprocess.check_output(cmd) out = subprocess.check_output(cmd)
data = json.loads(out) data = json.loads(out)
@ -320,32 +432,30 @@ def prefetch_plugin(
p: PluginDesc, p: PluginDesc,
cache: "Optional[Cache]" = None, cache: "Optional[Cache]" = None,
) -> Tuple[Plugin, Dict[str, str]]: ) -> Tuple[Plugin, Dict[str, str]]:
user, repo_name, branch, alias = p.owner, p.repo, p.branch, p.alias repo, branch, alias = p.repo, p.branch, p.alias
log.info(f"Fetching last commit for plugin {user}/{repo_name}@{branch}") name = alias or p.repo.name
repo = Repo(user, repo_name, branch, alias) commit = None
log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
commit, date = repo.latest_commit() commit, date = repo.latest_commit()
has_submodules = repo.has_submodules()
cached_plugin = cache[commit] if cache else None cached_plugin = cache[commit] if cache else None
if cached_plugin is not None: if cached_plugin is not None:
log.debug("Cache hit !") log.debug("Cache hit !")
cached_plugin.name = alias or repo_name cached_plugin.name = name
cached_plugin.date = date cached_plugin.date = date
return cached_plugin, repo.redirect return cached_plugin, repo.redirect
print(f"prefetch {user}/{repo_name}") has_submodules = repo.has_submodules()
if has_submodules: print(f"prefetch {name}")
sha256 = repo.prefetch_git(commit) sha256 = repo.prefetch(commit)
else:
sha256 = repo.prefetch_github(commit)
return ( return (
Plugin(alias or repo_name, commit, has_submodules, sha256, date=date), Plugin(name, commit, has_submodules, sha256, date=date),
repo.redirect, repo.redirect,
) )
def fetch_plugin_from_pluginline(plugin_line: str) -> Plugin: def fetch_plugin_from_pluginline(config: FetchConfig, plugin_line: str) -> Plugin:
plugin, _ = prefetch_plugin(parse_plugin_line(plugin_line)) plugin, _ = prefetch_plugin(parse_plugin_line(config, plugin_line))
return plugin return plugin
@ -360,16 +470,17 @@ def print_download_error(plugin: str, ex: Exception):
def check_results( def check_results(
results: List[Tuple[str, str, Union[Exception, Plugin], Dict[str, str]]] results: List[Tuple[PluginDesc, Union[Exception, Plugin], Dict[str, str]]]
) -> Tuple[List[Tuple[str, str, Plugin]], Dict[str, str]]: ) -> Tuple[List[Tuple[PluginDesc, Plugin]], Dict[str, str]]:
''' '''
failures: List[Tuple[str, Exception]] = [] failures: List[Tuple[str, Exception]] = []
plugins = [] plugins = []
redirects: Dict[str, str] = {} redirects: Dict[str, str] = {}
for (owner, name, result, redirect) in results: for (pdesc, result, redirect) in results:
if isinstance(result, Exception): if isinstance(result, Exception):
failures.append((name, result)) failures.append((pdesc.name, result))
else: else:
plugins.append((owner, name, result)) plugins.append((pdesc, result))
redirects.update(redirect) redirects.update(redirect)
print(f"{len(results) - len(failures)} plugins were checked", end="") print(f"{len(results) - len(failures)} plugins were checked", end="")
@ -384,30 +495,30 @@ def check_results(
sys.exit(1) sys.exit(1)
def parse_plugin_line(line: str) -> PluginDesc: def make_repo(uri, branch, alias) -> Repo:
'''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
# dumb check to see if it's of the form owner/repo (=> github) or https://...
res = uri.split('/')
if len(res) <= 2:
repo = RepoGitHub(res[0], res[1], branch, alias)
else:
repo = Repo(uri.strip(), branch, alias)
return repo
def parse_plugin_line(config: FetchConfig, line: str) -> PluginDesc:
branch = "HEAD" branch = "HEAD"
alias = None alias = None
name, repo = line.split("/") uri = line
if " as " in repo: if " as " in uri:
repo, alias = repo.split(" as ") uri, alias = uri.split(" as ")
alias = alias.strip() alias = alias.strip()
if "@" in repo: if "@" in uri:
repo, branch = repo.split("@") uri, branch = uri.split("@")
return PluginDesc(name.strip(), repo.strip(), branch.strip(), alias) repo = make_repo(uri.strip(), branch.strip(), alias)
repo.token = config.github_token
return PluginDesc(repo, branch.strip(), alias)
def load_plugin_spec(plugin_file: str) -> List[PluginDesc]:
plugins = []
with open(plugin_file) as f:
for line in f:
plugin = parse_plugin_line(line)
if not plugin.owner:
msg = f"Invalid repository {line}, must be in the format owner/repo[ as alias]"
print(msg, file=sys.stderr)
sys.exit(1)
plugins.append(plugin)
return plugins
def get_cache_path(cache_file_name: str) -> Optional[Path]: def get_cache_path(cache_file_name: str) -> Optional[Path]:
@ -465,17 +576,17 @@ class Cache:
def prefetch( def prefetch(
pluginDesc: PluginDesc, cache: Cache pluginDesc: PluginDesc, cache: Cache
) -> Tuple[str, str, Union[Exception, Plugin], dict]: ) -> Tuple[PluginDesc, Union[Exception, Plugin], dict]:
owner, repo = pluginDesc.owner, pluginDesc.repo
try: try:
plugin, redirect = prefetch_plugin(pluginDesc, cache) plugin, redirect = prefetch_plugin(pluginDesc, cache)
cache[plugin.commit] = plugin cache[plugin.commit] = plugin
return (owner, repo, plugin, redirect) return (pluginDesc, plugin, redirect)
except Exception as e: except Exception as e:
return (owner, repo, e, {}) return (pluginDesc, e, {})
def rewrite_input( def rewrite_input(
config: FetchConfig,
input_file: Path, input_file: Path,
deprecated: Path, deprecated: Path,
redirects: Dict[str, str] = None, redirects: Dict[str, str] = None,
@ -493,8 +604,8 @@ def rewrite_input(
with open(deprecated, "r") as f: with open(deprecated, "r") as f:
deprecations = json.load(f) deprecations = json.load(f)
for old, new in redirects.items(): for old, new in redirects.items():
old_plugin = fetch_plugin_from_pluginline(old) old_plugin = fetch_plugin_from_pluginline(config, old)
new_plugin = fetch_plugin_from_pluginline(new) new_plugin = fetch_plugin_from_pluginline(config, new)
if old_plugin.normalized_name != new_plugin.normalized_name: if old_plugin.normalized_name != new_plugin.normalized_name:
deprecations[old_plugin.normalized_name] = { deprecations[old_plugin.normalized_name] = {
"new": new_plugin.normalized_name, "new": new_plugin.normalized_name,
@ -520,40 +631,21 @@ def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
print("no changes in working tree to commit") print("no changes in working tree to commit")
def get_update(input_file: str, outfile: str, proc: int, editor: Editor):
cache: Cache = Cache(editor.get_current_plugins(), editor.cache_file)
_prefetch = functools.partial(prefetch, cache=cache)
def update() -> dict:
plugin_names = editor.load_plugin_spec(input_file)
try:
pool = Pool(processes=proc)
results = pool.map(_prefetch, plugin_names)
finally:
cache.store()
plugins, redirects = check_results(results)
editor.generate_nix(plugins, outfile)
return redirects
return update
def update_plugins(editor: Editor, args): def update_plugins(editor: Editor, args):
"""The main entry function of this module. All input arguments are grouped in the `Editor`.""" """The main entry function of this module. All input arguments are grouped in the `Editor`."""
log.setLevel(LOG_LEVELS[args.debug]) log.setLevel(LOG_LEVELS[args.debug])
log.info("Start updating plugins") log.info("Start updating plugins")
update = editor.get_update(args.input_file, args.outfile, args.proc) fetch_config = FetchConfig(args.proc, args.github_token)
update = editor.get_update(args.input_file, args.outfile, fetch_config)
redirects = update() redirects = update()
editor.rewrite_input(args.input_file, editor.deprecated, redirects) editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
autocommit = not args.no_commit autocommit = not args.no_commit
nixpkgs_repo = None
if autocommit: if autocommit:
nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True) nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
commit(nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile]) commit(nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
@ -568,9 +660,9 @@ def update_plugins(editor: Editor, args):
) )
for plugin_line in args.add_plugins: for plugin_line in args.add_plugins:
editor.rewrite_input(args.input_file, editor.deprecated, append=(plugin_line + "\n",)) editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, append=(plugin_line + "\n",))
update() update()
plugin = fetch_plugin_from_pluginline(plugin_line) plugin = fetch_plugin_from_pluginline(fetch_config, plugin_line)
if autocommit: if autocommit:
commit( commit(
nixpkgs_repo, nixpkgs_repo,

View file

@ -0,0 +1,202 @@
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ ])" nix
"""
A program to remove old aliases or convert old aliases to throws
Example usage:
./maintainers/scripts/remove-old-aliases.py --year 2018 --file ./pkgs/top-level/aliases.nix
Check this file with mypy after every change!
$ mypy --strict maintainers/scripts/remove-old-aliases.py
"""
import argparse
import shutil
import subprocess
from datetime import date as datetimedate
from datetime import datetime
from pathlib import Path
def process_args() -> argparse.Namespace:
"""process args"""
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
"--year", required=True, type=int, help="operate on aliases older than $year"
)
arg_parser.add_argument(
"--month",
type=int,
default=1,
help="operate on aliases older than $year-$month",
)
arg_parser.add_argument("--file", required=True, type=Path, help="alias file")
arg_parser.add_argument(
"--dry-run", action="store_true", help="don't modify files, only print results"
)
return arg_parser.parse_args()
def get_date_lists(
txt: list[str], cutoffdate: datetimedate
) -> tuple[list[str], list[str], list[str]]:
"""get a list of lines in which the date is older than $cutoffdate"""
date_older_list: list[str] = []
date_older_throw_list: list[str] = []
date_sep_line_list: list[str] = []
for lineno, line in enumerate(txt, start=1):
line = line.rstrip()
my_date = None
for string in line.split():
string = string.strip(":")
try:
# strip ':' incase there is a string like 2019-11-01:
my_date = datetime.strptime(string, "%Y-%m-%d").date()
except ValueError:
try:
my_date = datetime.strptime(string, "%Y-%m").date()
except ValueError:
continue
if my_date is None or my_date > cutoffdate:
continue
if "=" not in line:
date_sep_line_list.append(f"{lineno} {line}")
# 'if' lines could be complicated
elif "if " in line and "if =" not in line:
print(f"RESOLVE MANUALLY {line}")
elif "throw" in line:
date_older_throw_list.append(line)
else:
date_older_list.append(line)
return (
date_older_list,
date_sep_line_list,
date_older_throw_list,
)
def convert_to_throw(date_older_list: list[str]) -> list[tuple[str, str]]:
"""convert a list of lines to throws"""
converted_list = []
for line in date_older_list.copy():
indent: str = " " * (len(line) - len(line.lstrip()))
before_equal = ""
after_equal = ""
try:
before_equal, after_equal = (x.strip() for x in line.split("=", maxsplit=2))
except ValueError as err:
print(err, line, "\n")
date_older_list.remove(line)
continue
alias = before_equal.strip()
after_equal_list = [x.strip(";:") for x in after_equal.split()]
converted = (
f"{indent}{alias} = throw \"'{alias}' has been renamed to/replaced by"
f" '{after_equal_list.pop(0)}'\";"
f' # Converted to throw {datetime.today().strftime("%Y-%m-%d")}'
)
converted_list.append((line, converted))
return converted_list
def generate_text_to_write(
txt: list[str],
date_older_list: list[str],
converted_to_throw: list[tuple[str, str]],
date_older_throw_list: list[str],
) -> list[str]:
"""generate a list of text to be written to the aliasfile"""
text_to_write: list[str] = []
for line in txt:
text_to_append: str = ""
if converted_to_throw:
for tupl in converted_to_throw:
if line == tupl[0]:
text_to_append = f"{tupl[1]}\n"
if line not in date_older_list and line not in date_older_throw_list:
text_to_append = f"{line}\n"
if text_to_append:
text_to_write.append(text_to_append)
return text_to_write
def write_file(
aliasfile: Path,
text_to_write: list[str],
) -> None:
"""write file"""
temp_aliasfile = Path(f"{aliasfile}.raliases")
with open(temp_aliasfile, "w", encoding="utf-8") as far:
for line in text_to_write:
far.write(line)
print("\nChecking the syntax of the new aliasfile")
try:
subprocess.run(
["nix-instantiate", "--eval", temp_aliasfile],
check=True,
stdout=subprocess.DEVNULL,
)
except subprocess.CalledProcessError:
print(
"\nSyntax check failed,",
"there may have been a line which only has\n"
'aliasname = "reason why";\n'
"when it should have been\n"
'aliasname = throw "reason why";',
)
temp_aliasfile.unlink()
return
shutil.move(f"{aliasfile}.raliases", aliasfile)
print(f"{aliasfile} modified! please verify with 'git diff'.")
def main() -> None:
"""main"""
args = process_args()
aliasfile = Path(args.file).absolute()
cutoffdate = (datetime.strptime(f"{args.year}-{args.month}-01", "%Y-%m-%d")).date()
txt: list[str] = (aliasfile.read_text(encoding="utf-8")).splitlines()
date_older_list: list[str] = []
date_sep_line_list: list[str] = []
date_older_throw_list: list[str] = []
date_older_list, date_sep_line_list, date_older_throw_list = get_date_lists(
txt, cutoffdate
)
converted_to_throw: list[tuple[str, str]] = []
converted_to_throw = convert_to_throw(date_older_list)
if date_older_list:
print(" Will be converted to throws. ".center(100, "-"))
for l_n in date_older_list:
print(l_n)
if date_older_throw_list:
print(" Will be removed. ".center(100, "-"))
for l_n in date_older_throw_list:
print(l_n)
if date_sep_line_list:
print(" On separate line, resolve manually. ".center(100, "-"))
for l_n in date_sep_line_list:
print(l_n)
if not args.dry_run:
text_to_write = generate_text_to_write(
txt, date_older_list, converted_to_throw, date_older_throw_list
)
write_file(aliasfile, text_to_write)
if __name__ == "__main__":
main()

View file

@ -25,8 +25,8 @@ from pathlib import Path
log = logging.getLogger() log = logging.getLogger()
log.addHandler(logging.StreamHandler()) log.addHandler(logging.StreamHandler())
ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))).parent.parent ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))).parent.parent # type: ignore
from pluginupdate import Editor, update_plugins, PluginDesc, CleanEnvironment, LOG_LEVELS, Cache from pluginupdate import Editor, update_plugins, FetchConfig, CleanEnvironment
PKG_LIST="maintainers/scripts/luarocks-packages.csv" PKG_LIST="maintainers/scripts/luarocks-packages.csv"
TMP_FILE="$(mktemp)" TMP_FILE="$(mktemp)"
@ -118,7 +118,7 @@ class LuaEditor(Editor):
def attr_path(self): def attr_path(self):
return "luaPackages" return "luaPackages"
def get_update(self, input_file: str, outfile: str, proc: int): def get_update(self, input_file: str, outfile: str, config: FetchConfig):
_prefetch = generate_pkg_nix _prefetch = generate_pkg_nix
def update() -> dict: def update() -> dict:
@ -126,14 +126,14 @@ class LuaEditor(Editor):
sorted_plugin_specs = sorted(plugin_specs, key=lambda v: v.name.lower()) sorted_plugin_specs = sorted(plugin_specs, key=lambda v: v.name.lower())
try: try:
pool = Pool(processes=proc) pool = Pool(processes=config.proc)
results = pool.map(_prefetch, sorted_plugin_specs) results = pool.map(_prefetch, sorted_plugin_specs)
finally: finally:
pass pass
self.generate_nix(results, outfile) self.generate_nix(results, outfile)
redirects = [] redirects = {}
return redirects return redirects
return update return update
@ -181,11 +181,9 @@ def generate_pkg_nix(plug: LuaPlugin):
cmd.append(plug.version) cmd.append(plug.version)
#
if plug.server != "src" and plug.server: if plug.server != "src" and plug.server:
cmd.append(f"--only-server={plug.server}") cmd.append(f"--only-server={plug.server}")
if plug.luaversion: if plug.luaversion:
with CleanEnvironment(): with CleanEnvironment():
local_pkgs = str(ROOT.resolve()) local_pkgs = str(ROOT.resolve())
@ -209,7 +207,6 @@ def main():
parser = editor.create_parser() parser = editor.create_parser()
args = parser.parse_args() args = parser.parse_args()
log.setLevel(LOG_LEVELS[args.debug])
update_plugins(editor, args) update_plugins(editor, args)

View file

@ -147,7 +147,7 @@ let
to run update script for specific package, or to run update script for specific package, or
% nix-shell maintainers/scripts/update.nix --arg predicate '(path: pkg: builtins.isList pkg.updateScript && builtins.length pkg.updateScript >= 1 && (let script = builtins.head pkg.updateScript; in builtins.isAttrs script && script.name == "gnome-update-script"))' % nix-shell maintainers/scripts/update.nix --arg predicate '(path: pkg: pkg.updateScript.name or null == "gnome-update-script")'
to run update script for all packages matching given predicate, or to run update script for all packages matching given predicate, or

View file

@ -88,6 +88,10 @@ async def commit_changes(name: str, merge_lock: asyncio.Lock, worktree: str, bra
async with merge_lock: async with merge_lock:
await check_subprocess('git', 'add', *change['files'], cwd=worktree) await check_subprocess('git', 'add', *change['files'], cwd=worktree)
commit_message = '{attrPath}: {oldVersion}{newVersion}'.format(**change) commit_message = '{attrPath}: {oldVersion}{newVersion}'.format(**change)
if 'commitMessage' in change:
commit_message = change['commitMessage']
elif 'commitBody' in change:
commit_message = commit_message + '\n\n' + change['commitBody']
await check_subprocess('git', 'commit', '--quiet', '-m', commit_message, cwd=worktree) await check_subprocess('git', 'commit', '--quiet', '-m', commit_message, cwd=worktree)
await check_subprocess('git', 'cherry-pick', branch) await check_subprocess('git', 'cherry-pick', branch)

View file

@ -29,6 +29,20 @@ with lib.maintainers; {
scope = "Maintain ACME-related packages and modules."; scope = "Maintain ACME-related packages and modules.";
}; };
bazel = {
members = [
mboes
marsam
uri-canva
cbley
olebedev
groodt
aherrmann
ylecornec
];
scope = "Bazel build tool & related tools https://bazel.build/";
};
beam = { beam = {
members = [ members = [
ankhers ankhers
@ -36,6 +50,7 @@ with lib.maintainers; {
DianaOlympos DianaOlympos
gleber gleber
happysalada happysalada
minijackson
yurrriq yurrriq
]; ];
scope = "Maintain BEAM-related packages and modules."; scope = "Maintain BEAM-related packages and modules.";
@ -50,7 +65,6 @@ with lib.maintainers; {
chia = { chia = {
members = [ members = [
atemu
lourkeur lourkeur
]; ];
scope = "Maintain the Chia blockchain and its dependencies"; scope = "Maintain the Chia blockchain and its dependencies";
@ -152,6 +166,17 @@ with lib.maintainers; {
scope = "Maintain Jitsi."; scope = "Maintain Jitsi.";
}; };
kubernetes = {
members = [
johanot
offline
saschagrunert
srhb
zowoq
];
scope = "Maintain the Kubernetes package and module";
};
kodi = { kodi = {
members = [ members = [
aanderse aanderse
@ -189,7 +214,6 @@ with lib.maintainers; {
mguentner mguentner
ekleog ekleog
ralith ralith
mjlbach
dandellion dandellion
sumnerevans sumnerevans
]; ];
@ -198,7 +222,7 @@ with lib.maintainers; {
openstack = { openstack = {
members = [ members = [
angustrau emilytrau
SuperSandro2000 SuperSandro2000
]; ];
scope = "Maintain the ecosystem around OpenStack"; scope = "Maintain the ecosystem around OpenStack";
@ -215,6 +239,7 @@ with lib.maintainers; {
php = { php = {
members = [ members = [
aanderse aanderse
drupol
etu etu
globin globin
ma27 ma27
@ -233,6 +258,15 @@ with lib.maintainers; {
scope = "Maintain Podman and CRI-O related packages and modules."; scope = "Maintain Podman and CRI-O related packages and modules.";
}; };
redcodelabs = {
members = [
unrooted
wr0belj
wintrmvte
];
scope = "Maintain Red Code Labs related packages and modules.";
};
sage = { sage = {
members = [ members = [
timokau timokau
@ -243,6 +277,13 @@ with lib.maintainers; {
scope = "Maintain SageMath and the dependencies that are likely to break it."; scope = "Maintain SageMath and the dependencies that are likely to break it.";
}; };
sphinx = {
members = [
SuperSandro2000
];
scope = "Maintain Sphinx related packages.";
};
serokell = { serokell = {
# Verify additions by approval of an already existing member of the team. # Verify additions by approval of an already existing member of the team.
members = [ members = [

View file

@ -9,27 +9,6 @@ let
modules = [ configuration ]; modules = [ configuration ];
}; };
# This is for `nixos-rebuild build-vm'.
vmConfig = (import ./lib/eval-config.nix {
inherit system;
modules = [ configuration ./modules/virtualisation/qemu-vm.nix ];
}).config;
# This is for `nixos-rebuild build-vm-with-bootloader'.
vmWithBootLoaderConfig = (import ./lib/eval-config.nix {
inherit system;
modules =
[ configuration
./modules/virtualisation/qemu-vm.nix
{ virtualisation.useBootLoader = true; }
({ config, ... }: {
virtualisation.useEFIBoot =
config.boot.loader.systemd-boot.enable ||
config.boot.loader.efi.canTouchEfiVariables;
})
];
}).config;
in in
{ {
@ -37,7 +16,5 @@ in
system = eval.config.system.build.toplevel; system = eval.config.system.build.toplevel;
vm = vmConfig.system.build.vm; inherit (eval.config.system.build) vm vmWithBootLoader;
vmWithBootLoader = vmWithBootLoaderConfig.system.build.vm;
} }

View file

@ -1,4 +1,13 @@
{ pkgs, options, config, version, revision, extraSources ? [] }: { pkgs
, options
, config
, version
, revision
, extraSources ? []
, baseOptionsJSON ? null
, warningsAreErrors ? true
, prefix ? ../../..
}:
with pkgs; with pkgs;
@ -11,11 +20,11 @@ let
# #
# E.g. if some `options` came from modules in ${pkgs.customModules}/nix, # E.g. if some `options` came from modules in ${pkgs.customModules}/nix,
# you'd need to include `extraSources = [ pkgs.customModules ]` # you'd need to include `extraSources = [ pkgs.customModules ]`
prefixesToStrip = map (p: "${toString p}/") ([ ../../.. ] ++ extraSources); prefixesToStrip = map (p: "${toString p}/") ([ prefix ] ++ extraSources);
stripAnyPrefixes = lib.flip (lib.foldr lib.removePrefix) prefixesToStrip; stripAnyPrefixes = lib.flip (lib.foldr lib.removePrefix) prefixesToStrip;
optionsDoc = buildPackages.nixosOptionsDoc { optionsDoc = buildPackages.nixosOptionsDoc {
inherit options revision; inherit options revision baseOptionsJSON warningsAreErrors;
transformOptions = opt: opt // { transformOptions = opt: opt // {
# Clean up declaration sites to not refer to the NixOS source tree. # Clean up declaration sites to not refer to the NixOS source tree.
declarations = map stripAnyPrefixes opt.declarations; declarations = map stripAnyPrefixes opt.declarations;
@ -161,7 +170,7 @@ let
in rec { in rec {
inherit generatedSources; inherit generatedSources;
inherit (optionsDoc) optionsJSON optionsDocBook; inherit (optionsDoc) optionsJSON optionsNix optionsDocBook;
# Generate the NixOS manual. # Generate the NixOS manual.
manualHTML = runCommand "nixos-manual-html" manualHTML = runCommand "nixos-manual-html"
@ -205,7 +214,7 @@ in rec {
manualEpub = runCommand "nixos-manual-epub" manualEpub = runCommand "nixos-manual-epub"
{ inherit sources; { inherit sources;
buildInputs = [ libxml2.bin libxslt.bin zip ]; nativeBuildInputs = [ buildPackages.libxml2.bin buildPackages.libxslt.bin buildPackages.zip ];
} }
'' ''
# Generate the epub manual. # Generate the epub manual.

Some files were not shown because too many files have changed in this diff Show more