Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package nix for openSUSE:Factory checked in 
at 2026-01-20 21:02:45
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/nix (Old)
 and      /work/SRC/openSUSE:Factory/.nix.new.1928 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "nix"

Tue Jan 20 21:02:45 2026 rev:11 rq:1328148 version:2.33.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/nix/nix.changes  2026-01-12 10:32:04.724066053 
+0100
+++ /work/SRC/openSUSE:Factory/.nix.new.1928/nix.changes        2026-01-20 
21:02:55.700042215 +0100
@@ -1,0 +2,11 @@
+Mon Jan 19 20:35:00 UTC 2026 - Eyad Issa <[email protected]>
+
+- Update to version 2.33.1:
+  * Fix a heap-use-after-free bug on libstore
+  * libfetchers: Bump tarball-cache version to v2
+  * libutil: Gracefully fall back from unsupported O_TMPFILE
+  * libstore: include path in the world-writable error
+  * libutil/union-source-accessor: Barf on non-existent
+    directories
+
+-------------------------------------------------------------------

Old:
----
  nix-2.33.0.tar.gz

New:
----
  nix-2.33.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ nix.spec ++++++
--- /var/tmp/diff_new_pack.zq1CAI/_old  2026-01-20 21:02:56.796087575 +0100
+++ /var/tmp/diff_new_pack.zq1CAI/_new  2026-01-20 21:02:56.800087740 +0100
@@ -2,7 +2,7 @@
 # spec file for package nix
 #
 # Copyright (c) 2025 SUSE LLC
-# Copyright (c) 2025 Eyad Issa
+# Copyright (c) 2026 Eyad Issa
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -26,7 +26,7 @@
 %endif
 
 Name:           nix
-Version:        2.33.0
+Version:        2.33.1
 Release:        0
 Summary:        The purely functional package manager
 License:        LGPL-2.1-only

++++++ nix-2.33.0.tar.gz -> nix-2.33.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/.github/workflows/backport.yml 
new/nix-2.33.1/.github/workflows/backport.yml
--- old/nix-2.33.0/.github/workflows/backport.yml       2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/.github/workflows/backport.yml       2026-01-11 
20:46:37.000000000 +0100
@@ -26,7 +26,7 @@
           # required to find all branches
           fetch-depth: 0
       - name: Create backport PRs
-        uses: 
korthout/backport-action@d07416681cab29bf2661702f925f020aaa962997 # v3.4.1
+        uses: 
korthout/backport-action@c656f5d5851037b2b38fb5db2691a03fa229e3b2 # v4.0.1
         id: backport
         with:
           # Config README: 
https://github.com/korthout/backport-action#backport-action
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/.github/workflows/ci.yml 
new/nix-2.33.1/.github/workflows/ci.yml
--- old/nix-2.33.0/.github/workflows/ci.yml     2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/.github/workflows/ci.yml     2026-01-11 20:46:37.000000000 
+0100
@@ -164,7 +164,7 @@
     steps:
     - uses: actions/checkout@v6
     - name: Download installer tarball
-      uses: actions/download-artifact@v6
+      uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 
# v7.0.0
       with:
         name: installer-${{matrix.os}}
         path: out
@@ -197,79 +197,20 @@
     - run: exec bash -c "nix-channel --add 
https://releases.nixos.org/nixos/unstable/nixos-23.05pre466020.60c1d71f2ba 
nixpkgs"
     - run: exec bash -c "nix-channel --update && nix-env -iA nixpkgs.hello && 
hello"
 
-  # Steps to test CI automation in your own fork.
-  # 1. Sign-up for https://hub.docker.com/
-  # 2. Store your dockerhub username as DOCKERHUB_USERNAME in "Repository 
secrets" of your fork repository settings 
(https://github.com/$githubuser/nix/settings/secrets/actions)
-  # 3. Create an access token in https://hub.docker.com/settings/security and 
store it as DOCKERHUB_TOKEN in "Repository secrets" of your fork
-  check_secrets:
-    permissions:
-      contents: none
-    name: Check presence of secrets
-    runs-on: ubuntu-24.04
-    outputs:
-      docker: ${{ steps.secret.outputs.docker }}
-    steps:
-      - name: Check for DockerHub secrets
-        id: secret
-        env:
-          _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ 
secrets.DOCKERHUB_TOKEN }}
-        run: |
-          echo "docker=${{ env._DOCKER_SECRETS != '' }}" >> $GITHUB_OUTPUT
-
   docker_push_image:
-    needs: [tests, check_secrets]
+    name: Push docker image to DockerHub and GHCR
+    needs: [flake_regressions, installer_test]
+    if: github.event_name == 'push' && github.ref_name == 'master'
+    uses: ./.github/workflows/docker-push.yml
+    with:
+      ref: ${{ github.sha }}
+      is_master: true
     permissions:
       contents: read
       packages: write
-    if: >-
-      needs.check_secrets.outputs.docker == 'true' &&
-      github.event_name == 'push' &&
-      github.ref_name == 'master'
-    runs-on: ubuntu-24.04
-    steps:
-    - uses: actions/checkout@v6
-      with:
-        fetch-depth: 0
-    - uses: ./.github/actions/install-nix-action
-      with:
-        dogfood: false
-        extra_nix_config: |
-          experimental-features = flakes nix-command
-    - run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> 
$GITHUB_ENV
-    - run: nix build .#dockerImage -L
-    - run: docker load -i ./result/image.tar.gz
-    - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME 
}}/nix:$NIX_VERSION
-    - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME 
}}/nix:master
-    # We'll deploy the newly built image to both Docker Hub and Github 
Container Registry.
-    #
-    # Push to Docker Hub first
-    - name: Login to Docker Hub
-      uses: docker/login-action@v3
-      with:
-        username: ${{ secrets.DOCKERHUB_USERNAME }}
-        password: ${{ secrets.DOCKERHUB_TOKEN }}
-    - run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
-    - run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
-    # Push to GitHub Container Registry as well
-    - name: Login to GitHub Container Registry
-      uses: docker/login-action@v3
-      with:
-        registry: ghcr.io
-        username: ${{ github.actor }}
-        password: ${{ secrets.GITHUB_TOKEN }}
-    - name: Push image
-      run: |
-        IMAGE_ID=ghcr.io/${{ github.repository_owner }}/nix
-        # Change all uppercase to lowercase
-        IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
-
-        docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
-        docker tag nix:$NIX_VERSION $IMAGE_ID:latest
-        docker push $IMAGE_ID:$NIX_VERSION
-        docker push $IMAGE_ID:latest
-        # deprecated 2024-02-24
-        docker tag nix:$NIX_VERSION $IMAGE_ID:master
-        docker push $IMAGE_ID:master
+    secrets:
+      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+      DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
 
   flake_regressions:
     needs: tests
@@ -287,13 +228,21 @@
         with:
           repository: NixOS/flake-regressions-data
           path: flake-regressions/tests
-      - uses: ./.github/actions/install-nix-action
+      - name: Download installer tarball
+        uses: 
actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
+        with:
+          name: installer-linux
+          path: out
+      - name: Looking up the installer tarball URL
+        id: installer-tarball-url
+        run: |
+          echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
+      - uses: 
cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15 # v31.9.0
         with:
-          dogfood: ${{ github.event_name == 'workflow_dispatch' && 
inputs.dogfood || github.event_name != 'workflow_dispatch' }}
-          extra_nix_config:
-            experimental-features = nix-command flakes
-          github_token: ${{ secrets.GITHUB_TOKEN }}
-      - run: nix build -L --out-link ./new-nix && 
PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
+          install_url: ${{ format('{0}/install', 
steps.installer-tarball-url.outputs.installer-url) }}
+          install_options: ${{ format('--tarball-url-prefix {0}', 
steps.installer-tarball-url.outputs.installer-url) }}
+      - name: Run flake regressions tests
+        run: MAX_FLAKES=25 flake-regressions/eval-all.sh
 
   profile_build:
     needs: tests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/.github/workflows/docker-push.yml 
new/nix-2.33.1/.github/workflows/docker-push.yml
--- old/nix-2.33.0/.github/workflows/docker-push.yml    1970-01-01 
01:00:00.000000000 +0100
+++ new/nix-2.33.1/.github/workflows/docker-push.yml    2026-01-11 
20:46:37.000000000 +0100
@@ -0,0 +1,101 @@
+name: "Push Docker Image"
+
+on:
+  workflow_call:
+    inputs:
+      ref:
+        description: "Git ref to build the docker image from"
+        required: true
+        type: string
+      is_master:
+        description: "Whether run from master branch"
+        required: true
+        type: boolean
+    secrets:
+      DOCKERHUB_USERNAME:
+        required: true
+      DOCKERHUB_TOKEN:
+        required: true
+
+permissions: {}
+
+jobs:
+  # Steps to test CI automation in your own fork.
+  # 1. Sign-up for https://hub.docker.com/
+  # 2. Store your dockerhub username as DOCKERHUB_USERNAME in "Repository 
secrets" of your fork repository settings 
(https://github.com/$githubuser/nix/settings/secrets/actions)
+  # 3. Create an access token in https://hub.docker.com/settings/security and 
store it as DOCKERHUB_TOKEN in "Repository secrets" of your fork
+  check_secrets:
+    permissions:
+      contents: none
+    name: Check presence of secrets
+    runs-on: ubuntu-24.04
+    outputs:
+      docker: ${{ steps.secret.outputs.docker }}
+    steps:
+      - name: Check for DockerHub secrets
+        id: secret
+        env:
+          _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ 
secrets.DOCKERHUB_TOKEN }}
+        run: |
+          echo "docker=${{ env._DOCKER_SECRETS != '' }}" >> $GITHUB_OUTPUT
+
+  push:
+    name: Push docker image to DockerHub and GHCR
+    needs: [check_secrets]
+    permissions:
+      contents: read
+      packages: write
+    if: needs.check_secrets.outputs.docker == 'true'
+    runs-on: ubuntu-24.04
+    steps:
+    - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+      with:
+        fetch-depth: 0
+        ref: ${{ inputs.ref }}
+    - uses: ./.github/actions/install-nix-action
+      with:
+        dogfood: false
+        extra_nix_config: |
+          experimental-features = flakes nix-command
+    - run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> 
$GITHUB_ENV
+    - run: nix build .#dockerImage -L
+    - run: docker load -i ./result/image.tar.gz
+    # We'll deploy the newly built image to both Docker Hub and Github 
Container Registry.
+    #
+    # Push to Docker Hub first
+    - name: Login to Docker Hub
+      uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # 
v3.6.0
+      with:
+        username: ${{ secrets.DOCKERHUB_USERNAME }}
+        password: ${{ secrets.DOCKERHUB_TOKEN }}
+    - name: Push to Docker Hub
+      env:
+        IS_MASTER: ${{ inputs.is_master }}
+        DOCKERHUB_REPO: ${{ secrets.DOCKERHUB_USERNAME }}/nix
+      run: |
+        docker tag nix:$NIX_VERSION $DOCKERHUB_REPO:$NIX_VERSION
+        docker push $DOCKERHUB_REPO:$NIX_VERSION
+        if [ "$IS_MASTER" = "true" ]; then
+          docker tag nix:$NIX_VERSION $DOCKERHUB_REPO:master
+          docker push $DOCKERHUB_REPO:master
+        fi
+    # Push to GitHub Container Registry as well
+    - name: Login to GitHub Container Registry
+      uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # 
v3.6.0
+      with:
+        registry: ghcr.io
+        username: ${{ github.actor }}
+        password: ${{ secrets.GITHUB_TOKEN }}
+    - name: Push to GHCR
+      env:
+        IS_MASTER: ${{ inputs.is_master }}
+      run: |
+        IMAGE_ID=ghcr.io/${{ github.repository_owner }}/nix
+        IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
+
+        docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
+        docker push $IMAGE_ID:$NIX_VERSION
+        if [ "$IS_MASTER" = "true" ]; then
+          docker tag nix:$NIX_VERSION $IMAGE_ID:master
+          docker push $IMAGE_ID:master
+        fi
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/.github/workflows/upload-release.yml 
new/nix-2.33.1/.github/workflows/upload-release.yml
--- old/nix-2.33.0/.github/workflows/upload-release.yml 1970-01-01 
01:00:00.000000000 +0100
+++ new/nix-2.33.1/.github/workflows/upload-release.yml 2026-01-11 
20:46:37.000000000 +0100
@@ -0,0 +1,69 @@
+name: Upload Release
+on:
+  workflow_dispatch:
+    inputs:
+      eval_id:
+        description: "Hydra evaluation ID"
+        required: true
+        type: number
+      is_latest:
+        description: "Mark as latest release"
+        required: false
+        type: boolean
+        default: false
+permissions:
+  contents: read
+  id-token: write
+  packages: write
+jobs:
+  release:
+    runs-on: ubuntu-24.04
+    environment: releases
+    steps:
+      - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # 
v6.0.1
+      - uses: ./.github/actions/install-nix-action
+        with:
+          dogfood: false # Use stable version
+          use_cache: false # Don't want any cache injection shenanigans
+          extra_nix_config: |
+            experimental-features = nix-command flakes
+      - name: Set NIX_PATH from flake input
+        run: |
+          NIXPKGS_PATH=$(nix build --inputs-from .# nixpkgs#path 
--print-out-paths --no-link)
+          # Shebangs with perl have issues. Pin nixpkgs this way. nix shell 
should maybe
+          # get the same uberhack that nix-shell has to support it.
+          echo "NIX_PATH=nixpkgs=$NIXPKGS_PATH" >> "$GITHUB_ENV"
+      - name: Configure AWS credentials
+        uses: 
aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 
# v5.1.1
+        with:
+          role-to-assume: "arn:aws:iam::080433136561:role/nix-release"
+          role-session-name: nix-release-oidc-${{ github.run_id }}
+          aws-region: eu-west-1
+      - name: Login to Docker Hub
+        uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # 
v3.6.0
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_TOKEN }}
+      - name: Login to GitHub Container Registry
+        uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # 
v3.6.0
+        with:
+          registry: ghcr.io
+          username: ${{ github.actor }}
+          password: ${{ secrets.GITHUB_TOKEN }}
+      - name: Upload release
+        run: |
+          ./maintainers/upload-release.pl \
+            ${{ inputs.eval_id }} \
+            --skip-git
+        env:
+          IS_LATEST: ${{ inputs.is_latest && '1' || '' }}
+      - name: Push to GHCR
+        run: |
+          DOCKER_OWNER="ghcr.io/$(echo '${{ github.repository_owner }}' | tr 
'[A-Z]' '[a-z]')/nix"
+          ./maintainers/upload-release.pl \
+            ${{ inputs.eval_id }} \
+            --skip-git \
+            --skip-s3 \
+            --docker-owner "$DOCKER_OWNER"
+        env:
+          IS_LATEST: ${{ inputs.is_latest && '1' || '' }}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/.version new/nix-2.33.1/.version
--- old/nix-2.33.0/.version     2025-12-10 21:38:26.000000000 +0100
+++ new/nix-2.33.1/.version     2026-01-11 20:46:37.000000000 +0100
@@ -1 +1 @@
-2.33.0
+2.33.1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/maintainers/keys/158A6F530EA202E5F651611314FAEA63448E1DF9.asc 
new/nix-2.33.1/maintainers/keys/158A6F530EA202E5F651611314FAEA63448E1DF9.asc
--- 
old/nix-2.33.0/maintainers/keys/158A6F530EA202E5F651611314FAEA63448E1DF9.asc    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/maintainers/keys/158A6F530EA202E5F651611314FAEA63448E1DF9.asc    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,110 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBGPtMiwBEAC0sFZW2QW/OaDjKm5zGRpDvHXDsMIUtlHfoi5ce8pocC63W05o
+FSXbUZjZ1VfYO8lT8DFANCzTkiXYaZx0cPRG2pVY4AOQZDNFt5XrAyvw496XCAIM
+DTYGFLjCqgjPt9RUFEy4MyHPJTEpB0x3rXgT4ILNu9vsj9Q0vttps7SpbZ3Ldq5H
+o/BBbLW77q/vNjpYzCbBIXF7ycUGpnNv9Go/WuiDnrBMcyxh+8kjjIHB5cxZSnjJ
+DUv681+m83v+gLZQGX/jexQrrf5JpS0X9qEnhGLrNUDhtyv5ud3Je4EfamkjLVVC
+RlNLofgflOCsl/tP80i+K7S1QdKhUALxuJ6H0prYUflGBDxDyC8XYuJ62TT0OUpa
+vJvgwVlCq8/jq+ykYQXlbuBVOzi5wAuI4l3+HqreSQYPSiwe+6N590Zbafdv1fvN
+WFtZKCTGMqfyaaAnppioH9/+NWkI2AQxaYVasYM/JEYvY9pJgA7alh51jHW4JglP
+ErypKfBKPKJID0QENqYoa3bDDCihuNWhgQf9dxzPlj2ckd35Zb6w4DfuSmtjaa9D
+o0jZVY1JbFuxBqP09+saVPrxLHgmPxjcdzPGQQtAqdO2vyJXNEGLFMoVEZPNaLo3
+QmcIJnT7oSck+4vGfOYtWUHXQynu/Tnwsv2XkA/uyw8HNe+RRMqv/apnzQARAQAB
+tCdTZXJnZWkgWmltbWVybWFuIDxzZXJnZWlAemltbWVybWFuLmZvbz6JAlEEEwEK
+ADsCGwEFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AWIQQVim9TDqIC5fZRYRMU+upj
+RI4d+QUCaUbsaAIZAQAKCRAU+upjRI4d+VdDD/492HRaJ/8V7R7VUzkafmb2Hb28
+SLf7oiB8Uq9I7SukiDEaIT1fUhquYWQ9KWpPRNR1TX6ApXnIeuJRMGFoDVIRnmnr
+cKnYYXfqqc81VxIyKvaumB7KWbS7G4Nbor8AH1ouOOOMMS50OTJOWQA4A26inIuG
+n+7L8MeS5aT+3uNKDoTKsidC47vnaxNMcke1taPfbfo7vn69PsRCM/g9/7TQYU8b
+6xp+pM9Ao9nJneRk2YCpsGYRrWTpaik0DFKnfpPKJM/yunhtGLF2IYAp3l1mvHPK
+nnzo92zjpQuZwazEIK+23V1vRT4IjM2BewbJPAzf2/UuxEjjgNQm0tOtH2JhFNeB
+VM0BVrGxWrrwrsmv6lWghTtBc6zRWyHrj/rpjtVQNmeKYrHWJeXwVz1rqgGPmB2N
+k0MZD1UjHHhEs1Cntn7yLmxTPztRJCtR+euRu81Uo2NAvrMJ4xsDjaM0LeLTnzjV
+9AsPjD188dOFyz7VExZum4+XaaEJ41FIPLEqU3U0GAa6stEy0ylSlIN4x9aiXXVW
+xfzHHchS5jK6QAjuZxN8t01GRactNylINRf7uoTECFZTtXNqfeuk0HQxBH0LuKVE
+0PxJbcNI4mVWw1KgTJ8PUVC1IXP3sEpqPdJOYiRXgnpcS26fWOBu0aQ4mhxiaJhr
+/zBfrkLEqp20TdNDZLkCDQRj7TM1ARAAt73xO24curnHTTgXkkVMMRzcMLx3Mb1a
+2FuddxC5hzTpEpw01L91UBrXVJEg9K2KAwP5CtCLgPCqXr47Tm7krvHxWwBksgY/
+6aHRsoPQfCFUZHc0aiO+C4NCzR+aEeGKn66Oc1Hq9oUTpDgiBWhsuEPiyA1OSGF0
+4L0jeTCqfm68kWp4PIK9yuugkdDsoyj6TonuMsb3V5ctHLqop9KH+eHSkUTPo+Lk
++bxaeAOJ1UfbohgbRbrYKAfsaghhOMDH3R1w2pvtUJz+sDbuQsiPFTqbxsXDTFws
+H4N/AQCYnnvOhqEek2sOEZ19bJXt5UrAr10mX4PGmAkWqE1JWBxpOKG3BXSGOTu1
+3dFhQfPMK+PmvUrs0kcWQr53K/aRUdKKhIfTcMfkqYTGPK5HclHph24WjXj3QFFA
+SjksQTdm6486ZmLZK4CTbAFOPfTF/aWg8gu9v4ihdq6lqHNNXxv2xBAChcd59H7p
+D5zy9z8SpwWR9V5JDmlF6HWIIau1c6lSsQq1xHvYM8EuPe03vJvor+2u/cn5zYF1
+5ZxAuPI2i5vtavg1s8ZGAAogJ9dVcP36LdJfL9quXWvmovkd//qHIepBB+l/zQio
+ZRDZlIcfV3Xycaqsb5OqHGARHE0097koipMt5y/iXlqG4Ruue6Idb8bW96EKpaWj
+kKy/iNfQfQMAEQEAAYkEcgQYAQoAJgIbAhYhBBWKb1MOogLl9lFhExT66mNEjh35
+BQJpRuz3BQkJHCDCAkDBdCAEGQEKAB0WIQRK3hK0WyJ4BicGpcmpsLVXymMjJQUC
+Y+0zNQAKCRCpsLVXymMjJbdSD/9+f1FOOeGDAJI6Duo5fsWnf4xJJdtQtDbz6d2A
+SeDapxeJ3zWfKBD0wu5sISEa0uiWsYSmLtsa2SqVAKHlEaMGRR+tkBMPQ+rvgI4c
+62YjGTgm+IPd+NFIn+ixFU1hpinTh+KhUEoeOwWCvKs9nZfSG9vkienfiG0bBxo2
+zrvBzXA50x5hbUL+ghKu/AVfN9qZDwh30O4KZTwk4g4cM9SeaQa4YvHYIS3IEhDZ
+hGybwrrqV9cs92ln4IJw9WCy9QReBNrdeFgC4+3ziUp1QsG3RvqrtuMttwBVC1Z3
+bj5QjLLOREhhodfvk98t9yVkragObb4rGrLo1mWuF0c4mJGvXwnrqhCMvzv4M+0T
+Zdrmw6YpGkGOaOPghVuwoTtqSAkl+zFWIJS89jidvkYG3EqKAkgLKog/TQReCq13
+HWrF8cMck+Rf2K8k26q/RNZaA9ZUKjLExzz8lsWmd2C7rvkGLrlxnzxz0gGyNR3Q
+KK74vcPhqeABt2GSkHtEXZFFA9IVVzwlRWK3e0S+mVQnZVjNL+cBPn3/hZHMLesB
+CucyYZv+DxvT+JkYXBkGSw4s3hpABqGym7gdPUIa0q4rbBFG6xP5sLLBG4yru8vV
+2dyCMmFqRuxpT49uNfyQ6Vj+dobN6qHnP/9NwfzOixXYBHXR6LBqb/M+iCiJaaIn
+uiRLHwkQFPrqY0SOHfkQpA//W51vj8meuz7snRO+vZFcjLneFFzqfh1Jdz8IqDpO
+CkI5pBJmi8e0oSe6r68MkahiQLlYPwm7d+sjHvJhPWipNKWq/uwCgBs+Ac1lpPXR
+MwLbrZukcLMYlLmb2MrCKmjcMt0BZsZKBNYL3a3X9nHgwXdeqFYS4WQDMCCc09lz
+9YqfdoEsqRO4qN7D0hFqnwjOzb34ixZ6UO8a8ekY9QKxAgWc9fJWGMg6Pjdg4qsK
+nqymOIAdGVOJdoRM46wKGVBvbsF2gNfQU4XyzgJo5vHGFwJm6EoSnODlL5e2wsQh
+uN1oqBt/8ef/plloMEqVBweUBATqSqjRF6IhhYJvWVuQHQL1p1vnV9FebiVj34ir
+Z8ID+o0AnTJcclbUcDwannGJ0cuDcPhk/v/ahVuoMERCi12qnMBo5B/e6Omyh1yB
+4pbf4GATGGQipDQG75eC/kP2GQEqJP5WYN0Ar8Le/AA/2xyL7upW0yIByyXCwGEb
+JRwEgU3+bPyu58bFt8Pftit6J7rA3oBVVMOPrYH5eZwRaj5m2RptwKGL6BfHnhNv
+ZqmCq9EBGX6L1NI0xHMjEFfXJ8jU01XdfG8nCqkwqsHwslXLhqjJphfHcx89YwbV
+/15GCuURAv1cKe/7277sOhcvP/QpQqSWgvYExHw8PeFJcTYtF2NrRgNwcQsWS1Rj
+gXa5Ag0EY+0zcAEQANC5N6kSfezuucAgi+X3BD+MT37mxQyvICSggEJf1LDSmy0+
+bnvD7setL8CP9etTA2fcVNYKI1oboMyhoCnsRP2jDdv1iXOI/hZg4wSb/D1yUkae
+fUpxv3Wuci2QKavH2MfraDD7BFMbsQeMcHtn4Rk216T6jndZHnzT1Ih7iX0XeQPb
+li5fojOiZssgWAVT4HPXFCJB6lI35Hjp35oRYwrtMmu5INinZ79n9h1igGtt1ItZ
+b7rQKNd772Jxcn4UU71ovORSL/xT5i5sxZ+evQOxkpqUAokMOFaoHcOXLmA1NsFv
+yryXHK4Ioq9ap2jKlLTWkJWjua9JZ4AmKhbvT8X4ELxIKSCAdJKAWP8ZHbXNu5MD
+aznyzZQLxSO7uFvu356De75mI5iohZNj5wB5Wju71pBiorTKVj4+iJ4e+xVIzFdG
+hFC0DehNcl2t9w/y8qHwIQ1yUAjXHLXq0/2jsVeH6bU5q/MsgvUP1jcFe0eyOpxy
+CDvyFdzZFbI57TnB/fvcZTRZ5ewXMFpH8gzuoFzAjUAP95UjYKgaGdrNPNIy28Ii
+4zhvdghei2+n9jgiMfcGQg8lyfH5yF0vWWWynX0KcJsRwEZoL2EauVdwq4PcYOoU
+pQFhpcreCjD4LdZ4yRU4InbhcUogXjrQ9Dz01TbPmQD5b5iso21bCEFBXrhzABEB
+AAGJAjwEGAEKACYCGwwWIQQVim9TDqIC5fZRYRMU+upjRI4d+QUCaUbs9wUJCRwg
+hwAKCRAU+upjRI4d+X/XD/sH5xvHPfTJq52v8weFmB52up+DzqG2lyhGdoUQ1Muw
+dRDLTLXLJrFdfpoOo7/j4Scr0rdc7/dpCn0DLcPuCoPxu+SkjEnVehFmZrGSv7Ga
+x9dHr3DBh42fdlX/U/EnDuyosY0JU1gNF2/6FIA+bTTOFE3RxfN906RjslYQDjMZ
+UAlSeLYHOZofdltI0YIr32vrxgdWQGZXPxU4XusDUc0z163OO+TGg7iUNWFZP5Qj
+ubM7e0YbDX0NPIshk8us99YJmrWnhaix1/W5ryO3DXiGaQ7XFi9u7QofRqvRIctg
+QXavdepkzJow9V9qpMECAJePIuICq7rm+xy+njjbuF436W7390bfVBwRr+FPADsl
+jgQP4KvY5rykss30kheom8wNEbveWkhH5oTfH9b7O4KXJfpfJzrlgOWp2BD9JL8t
+/M4HvFXTr2a75H/QbHK5OFrZeGATuv9OTxv7EZvnrPXU+DYTFldpu7TrNNqKCoj3
+ZyXmc3Hhg5kskDhfHJppaeOayuhMOpT3ud1MFzROY5SLVIH8rBR12KUgsCUYQcGs
+Iy0+0QvEGkjb4cAH1NK3VlbqVNsy1RmqRt2B28R2ueewDfTOoqkzt4MmzLqTdnAx
+mTqmHmkEKhEf3K4MRNUPO2yieUg2COk5l6x9HhAnoxxeOZrTmcMsPY/UViG2HEPm
+ybkCDQRj7TRDARAA9DZuKdfKq4Bs2+NwxC0aplljWOl8VIsEVg+Q8agD7/HU6/b6
+Dry0njtWybn2x6Axf/nUdeOC01Fi1lmht/fpj6mRkgAvd/V6P10xnsUoykPSDSTh
+P25MFFGW3JAA82bwdJ4AJpEQvTZG2nTb3237vlBiI1qHQrac8GYkju2O4UfySRN6
+7cyi7bMf2pjWBBOEhaNy4b6CMDsb32P/N5J7sTE/TXgrS+u4ITIgjzSrkUkh5Z+B
+8QVRa7xPIDZJdvZWTEXWu5fgRPZvxbr154GIkWJkFzlDoB1UcO56/uzRUuKhEV6o
+HW3LMUuWdPMjpHpq8hrL0G2rDniJFUtbDFzHdZK1LUU3T2BJM8rjI3D/euph+IDT
+27vl5qo72zCYE/iKzx4FMLZcQvx1kUAxkPX8l+dzZEwKeRIIpFDxQvatRtl+z0bM
+jbkpDb+Yjv66sC4dYRpgTTGX6rok0PWHR3IxDNzyf2j8zQ4LFJ+rVBM1GjGSt6mG
+j9TeL8CVeiSp4SuJ7I/FJVPHsKb50m+BDzeB31qTydNqh2kKr0DVAUa+TUsCr7e0
+OYr8WE2adJcRXIW0qw50xXF+W7/05GqSCVD0dpeOUdBTQTsSkQmM3/0hcj9aVo9e
+UDCM9RF0WRqiDAoHzJFfg+ztamkQI5HO6CklC4Ok22qrHRf6HDNYSuT6QFkAEQEA
+AYkCPQQYAQoAJwMbIAQWIQQVim9TDqIC5fZRYRMU+upjRI4d+QUCaUbs9wUJCRwf
+tAAKCRAU+upjRI4d+Y+cD/9yllG6uo934pcHNsVppZBfREFwSc8ywlbosCuSVpay
+PjSqgrWwDrnqrsk0F2kUdC6rR3BIcXbn+lA9KqylH+cCXAJCkh8EDq6TlQ7Lt5EV
+w1U0MAMXOyxPwDymQ/BO+iDyjXWkRRYgbF5XiFhCfGeuKyhkhACisAgNZ1uA1P5k
+0SJYc14YfEhQkB46Y20SpfVHRsQ46FyNB6GHbmTmfoO8La8VTh++7GBdh85HfvkG
+VNQ3wpi5oXsOLN9+MJOezc0XsW2LQsKQj1/J7QKzGh+lxN5cemsA5aqPzh8dyxeT
+0lYRFp4AHkimqGUomVpRkbegMIPxXqOE+ZAmsddErw0UtmrKxcmMptOJwNgYzEgu
+++2vtqerL/NYp+wsdcWaBjCz2F3NiwHgNli7NSB/FPwucZZ5gN5C4SnmeFzrGdHg
+Oy+tQUN6ayQKljHeBO7CjMlsFNo/dcVrEMa1ShxBMqlj/6ivoEhktLz0Nru4FwNU
+xE5SJYDYfpjD7Ws8y4LoXgWXjFHrMO6N9GzqLN/e8LT7I+w4ps2MrgJ8QSrelmQ3
+rjkxp3uWp5v2lqy4rLfpi9iB6zIAeoN2eU1yOM9joxOYMxKYaYeYyP1Mm90wFol8
+LcTSaN+tVniPddBiL6zvsGBEMbCR9XN3EQ+mErbuw5ovWBOCrr+dvN3FxvD11y4J
+7w==
+=mXYP
+-----END PGP PUBLIC KEY BLOCK-----
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/maintainers/keys/B541D55301270E0BCF15CA5D8170B4726D7198DE.asc 
new/nix-2.33.1/maintainers/keys/B541D55301270E0BCF15CA5D8170B4726D7198DE.asc
--- 
old/nix-2.33.0/maintainers/keys/B541D55301270E0BCF15CA5D8170B4726D7198DE.asc    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/maintainers/keys/B541D55301270E0BCF15CA5D8170B4726D7198DE.asc    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,51 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQENBFZu2zwBCADfatenjH3cvhlU6AeInvp4R0JmPBG942aghFj1Qh57smRcO5Bv
+y9mqrX3UDdmVvu58V3k1k9/GzPnAG1t+c7ohdymv/AMuNY4pE2sfxx7bX+mncTHX
+5wthipn8kTNm4WjREjCJM1Bm5sozzEZetED3+0/dWlnHl8b38evnLsD+WbSrDPVp
+o6M6Eg9IfMwTfcXzdmLmSnGolBWDQ9i1a0x0r3o+sDW5UTnr7jVP+zILcnOZ1Ewl
+Rn9OJ4Qg3ULM7WTMDYpKH4BO7RLR3aJgmsFAHp17vgUnzzFBZ10MCS3UOyUNoyph
+xo3belf7Q9nrHcSNbqSeQuBnW/vafAZUreAlABEBAAG0IkVlbGNvIERvbHN0cmEg
+PGVkb2xzdHJhQGdtYWlsLmNvbT6JATwEEwEIACYCGyMHCwkIBwMCAQYVCAIJCgsE
+FgIDAQIeAQIXgAUCVm7etAIZAQAKCRCBcLRybXGY3q51B/96qt41tmcDSzrj/UTl
+O6rErfW5zFvVsJTZ95Duwu87t/DVhw5lKBQcjALqVddufw1nMzyN/tSOMVDW8xe4
+wMEdcU4+QAMzNX80enuyinsw1glxfLcK0+VbTvqNIfw0sG3MjPqNs6cK2VRfMHK4
+paJjytBVICszNX9TfjLyIpKKoSSo1vqnT47LDZ5GIMy7l9Cs2sO/rqQHSPcR79yz
+8m8tbHpDDEMZmJeklckKP2QoiqnHiIvlisDxLclYnUmNaPdaN/f++qZz5Yqvu1n+
+sNUBA5eLaZH64Uy2SwtABxO3JPJ8nQ2+SFZ7ocFm4Gcdv4aM+Ura9S6fvM91tEJp
+yAQOiQE5BBMBCAAjBQJWbts8AhsjBwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AA
+CgkQgXC0cm1xmN6sIAgAielxO8zJREqEkA2xudg/o4e9ZlNZ3X1NvY8OzJH/qlB2
+SmwKqwifhtbC1K0uavXA7eaxdtd2zrI+Yq7IooUyv7juMjHTZhLcFbR5iVkQ4Mfp
+JmeHXJ/ChYKxD5mMj/C3WbCZ91oCSNZ6Iyi5fvQj/691OC4q+y/2NEUcOI8D8cw8
+XKHbKtceFYc+nZmdOv3ZZrNTSN/kszGViNNLKgnpPdDVPtLp+vjXtbmitiFG2HL/
+WfbJ+3Gh2Yr1Vy3O9dWKH++e1AmIv7WWqmUjRFVpqC/wr7/BLaScWT8WKF5vkshU
+gq8Ez1/cuizsgs3wQIZWgXKQK5njvwnbKg+Zmh/uGbQmRWVsY28gRG9sc3RyYSA8
+ZWVsY28uZG9sc3RyYUB0d2VhZy5pbz6JAU4EEwEIADgWIQS1QdVTAScOC88Vyl2B
+cLRybXGY3gUCXELt4gIbIwULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRCBcLRy
+bXGY3ujFCADfS5D1xHU8KH6TpqgssSggYVq62Wwn/Ga+4XPPetM+ajcXagyH6SwB
+mxlHICcnv9xC93ryiTI10P1ADJl+aBsI66wEdHBU+ty4RTDy4JZNUPtmRCk9LhSc
+mtUO3ry/wtWkRLdJxP49hg7BbQvWoU0M6WODp7SJjPKPWNX64mzHBeOuy+DqGCbM
+lpGNCvW8ahU/ewbm7+xwWmzqLDoWzXjHsdF4QdzMVM/vkAgWEP4y0wEqFASzIYaR
+GNEkBWU4OQVq5Bdm9+wWWAgsbM0FJAQl0GDqnz4QxWzxxCAAXdbh9F5ffafWYsA9
+bise4ZQLkvYo6iUnrcFm4dtZbT8iL3gptCtFZWxjbyBEb2xzdHJhIDxlZWxjby5k
+b2xzdHJhQGxvZ2ljYmxveC5jb20+iQE5BBMBCAAjBQJWbt6nAhsjBwsJCAcDAgEG
+FQgCCQoLBBYCAwECHgECF4AACgkQgXC0cm1xmN4b/wf8DApMV/jSPEpibekrUPQu
+Ye3Z8cxBQuRm/nOPowtPEH/ShAevrCdRiob2nuEZWNoqZ2e5/+6ud07Hs9bslvco
+cDv1jeY1dof1idxfKhH3kfSpuD2XJhuzQBxBqOrIlCS/rdnW+Y9wOGD7+bs9QpcA
+IyAeQGLLkfggAxaGYQ2Aev8pS7i3a/+lOWbFhcTe02I49KemCOJqBorG5FfILLNr
+DjO3EoutNGpuz6rZvc/BlymphWBoAdUmxgoObr7NYWgw9pI8WeE6C7bbSOO7p5aQ
+spWXU7Hm17DkzsVDpaJlyClllqK+DdKza5oWlBMe/P02jD3Y+0P/2rCCyQQwmH3D
+RbkBDQRWbts8AQgA0g556xc08dH5YNEjbCwEt1j+XoRnV4+GfbSJIXOl9joIgzRC
+4IaijvL8+4biWvX7HiybfvBKto0XB1AWLZRC3jWKX5p74I77UAcrD+VQ/roWQqlJ
+BKbiQMlRYEsj/5Xnf72G90IP4DAFKvNl+rLChe+jUySA91BCtrYoP75Sw1BE9Cyz
+xEtm4WUzKAJdXI+ZTBttA2Nbqy+GSuzBs7fSKDwREJaZmVrosvmns+pQVG4WPWf4
+0l4mPguDQmZ9wSWZvBDkpG7AgHYDRYRGkMbAGsVfc6cScN2VsSTa6cbeeAEowKxM
+qx9RbY3WOq6aKAm0qDvow1nl7WwXwe8K0wQxfQARAQABiQEfBBgBCAAJBQJWbts8
+AhsMAAoJEIFwtHJtcZjeuAAH/0YNz2Qe1IAEO5oqEZNFOccL4KxVPrBhWUen83/b
+C6PjOnOqv6q5ztAcms88WIKxBlfzIfq+dzJcbKVS/H7TEXgcaC+7EYW8sJVEsipN
+BtEZ3LQNJ5coDjm7WZygniah1lfXNuiritAXduK5FWNNndqGArEaeZ8Shzdo/Uyi
+b9lOsBIL6xc2ZcnX5f+rTu02LCEtEb0FwCycZLEWYf8hG4k8uttIOZOC+CLk/k8d
+kBmPikMwUVTTV0CdT1cemQKdTaoAaK+kurF6FYXwcnjhRlHrisSt/tVMEwTw4LUM
+3MYf6qfjjvE4HlDwZal8th7ccoQp/flfJIuRv85xCcKK+PI=
+=u5cX
+-----END PGP PUBLIC KEY BLOCK-----
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/maintainers/keys/README.md 
new/nix-2.33.1/maintainers/keys/README.md
--- old/nix-2.33.0/maintainers/keys/README.md   1970-01-01 01:00:00.000000000 
+0100
+++ new/nix-2.33.1/maintainers/keys/README.md   2026-01-11 20:46:37.000000000 
+0100
@@ -0,0 +1,13 @@
+# Maintainer GPG Keys
+
+Release tags are signed by members of the [Nix maintainer 
team](https://nixos.org/community/teams/nix/) as part of the [release 
process](../release-process.md). This directory contains the public GPG keys 
used for signing.
+
+## Keys
+
+- **Eelco Dolstra**
+  GPG Fingerprint: `B541 D553 0127 0E0B CF15 CA5D 8170 B472 6D71 98DE`
+
+- **Sergei Zimmerman**
+  GPG Fingerprint: [`158A 6F53 0EA2 02E5 F651 6113 14FA EA63 448E 
1DF9`](https://keys.openpgp.org/vks/v1/by-fingerprint/158A6F530EA202E5F651611314FAEA63448E1DF9)
+
+<!-- TODO: Add keys for other Nix team members -->
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/maintainers/release-process.md 
new/nix-2.33.1/maintainers/release-process.md
--- old/nix-2.33.0/maintainers/release-process.md       2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/maintainers/release-process.md       2026-01-11 
20:46:37.000000000 +0100
@@ -5,11 +5,11 @@
 The release process is intended to create the following for each
 release:
 
-* A Git tag
+* A signed Git tag (public keys in `maintainers/keys/`)
 
 * Binary tarballs in https://releases.nixos.org/?prefix=nix/
 
-* Docker images
+* Docker images (arm64 and amd64 variants, uploaded to DockerHub and GHCR)
 
 * Closures in https://cache.nixos.org
 
@@ -104,21 +104,17 @@
   evaluation ID (e.g. `1780832` in
   `https://hydra.nixos.org/eval/1780832`).
 
-* Tag the release and upload the release artifacts to
-  [`releases.nixos.org`](https://releases.nixos.org/) and [Docker 
Hub](https://hub.docker.com/):
+* Tag the release:
 
   ```console
-  $ IS_LATEST=1 ./maintainers/upload-release.pl <EVAL-ID>
+  $ IS_LATEST=1 ./maintainers/upload-release.pl --skip-docker --skip-s3 
--project-root $PWD <EVAL-ID>
   ```
 
   Note: `IS_LATEST=1` causes the `latest-release` branch to be
   force-updated. This is used by the `nixos.org` website to get the
   [latest Nix manual](https://nixos.org/manual/nixpkgs/unstable/).
 
-  TODO: This script requires the right AWS credentials. Document.
-
-  TODO: This script currently requires a
-  `/home/eelco/Dev/nix-pristine`.
+* Trigger the [`upload-release.yml` 
workflow](https://github.com/NixOS/nix/actions/workflows/upload-release.yml) 
via `workflow_dispatch` trigger. At the top click `Run workflow` -> select the 
current release branch from `Use workflow from` -> fill in `Hydra evaluation 
ID` with `<EVAL-ID>` value from previous steps -> click `Run workflow`. Wait 
for the run to be approved by `NixOS/nix-team` (or bypass checks if warranted). 
Wait for the workflow to succeed.
 
   TODO: trigger nixos.org netlify: 
https://docs.netlify.com/configure-builds/build-hooks/
 
@@ -181,16 +177,18 @@
 * Wait for the desired evaluation of the maintenance jobset to finish
   building.
 
-* Run
+* Tag the release
 
   ```console
-  $ IS_LATEST=1 ./maintainers/upload-release.pl <EVAL-ID>
+  $ IS_LATEST=1 ./maintainers/upload-release.pl --skip-docker --skip-s3 
--project-root $PWD <EVAL-ID>
   ```
 
   Omit `IS_LATEST=1` when creating a point release that is not on the
   most recent stable branch. This prevents `nixos.org` to going back
   to an older release.
 
+* Trigger the [`upload-release.yml` 
workflow](https://github.com/NixOS/nix/actions/workflows/upload-release.yml) 
via `workflow_dispatch` trigger. At the top click `Run workflow` -> select the 
current release branch from `Use workflow from` -> fill in `Hydra evaluation 
ID` with `<EVAL-ID>` value from previous steps -> click `Run workflow`. Wait 
for the run to be approved by `NixOS/nix-team` (or bypass checks if warranted). 
Wait for the workflow to succeed.
+
 * Bump the version number of the release branch as above (e.g. to
   `2.12.2`).
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/maintainers/upload-release.pl 
new/nix-2.33.1/maintainers/upload-release.pl
--- old/nix-2.33.0/maintainers/upload-release.pl        2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/maintainers/upload-release.pl        2026-01-11 
20:46:37.000000000 +0100
@@ -1,7 +1,8 @@
 #! /usr/bin/env nix-shell
-#! nix-shell -i perl -p perl perlPackages.LWPUserAgent 
perlPackages.LWPProtocolHttps perlPackages.FileSlurp perlPackages.NetAmazonS3 
gnupg1
+#! nix-shell -i perl -p awscli2 perl perlPackages.LWPUserAgent 
perlPackages.LWPProtocolHttps perlPackages.FileSlurp perlPackages.NetAmazonS3 
perlPackages.GetoptLongDescriptive gnupg1
 
 use strict;
+use Getopt::Long::Descriptive;
 use Data::Dumper;
 use File::Basename;
 use File::Path;
@@ -13,7 +14,30 @@
 
 delete $ENV{'shell'}; # shut up a LWP::UserAgent.pm warning
 
-my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
+my ($opt, $usage) = describe_options(
+    '%c %o <eval-id>',
+    [ 'skip-docker',      'Skip Docker image upload' ],
+    [ 'skip-git',         'Skip Git tagging' ],
+    [ 'skip-s3',          'Skip S3 upload' ],
+    [ 'docker-owner=s',   'Docker image owner', { default => 'nixos/nix' } ],
+    [ 'project-root=s',   'Pristine git repository path' ],
+    [ 's3-endpoint=s',    'Custom S3 endpoint' ],
+    [ 's3-host=s',        'S3 host', { default => 's3-eu-west-1.amazonaws.com' 
} ],
+    [],
+    [ 'help|h',           'Show this help message', { shortcircuit => 1 } ],
+    [],
+    [ 'Environment variables:' ],
+    [ 'AWS_ACCESS_KEY_ID' ],
+    [ 'AWS_SECRET_ACCESS_KEY' ],
+    [ 'AWS_SESSION_TOKEN   For OIDC' ],
+    [ 'IS_LATEST           Set to "1" to mark as latest release' ],
+);
+
+print($usage->text), exit if $opt->help;
+
+my $evalId = $ARGV[0] or do { print STDERR $usage->text; exit 1 };
+
+die "--project-root is required unless --skip-git is specified\n" unless 
$opt->skip_git || $opt->project_root;
 
 my $releasesBucketName = "nix-releases";
 my $channelsBucketName = "nix-channels";
@@ -62,25 +86,38 @@
 my $binaryCache = "https://cache.nixos.org/?local-nar-cache=$narCache";;
 
 # S3 setup.
-my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID 
given.";
-my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No 
AWS_SECRET_ACCESS_KEY given.";
+my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'};
+my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'};
+my $aws_session_token = $ENV{'AWS_SESSION_TOKEN'};
+
+my ($s3, $releasesBucket, $s3_channels, $channelsBucket);
+
+unless ($opt->skip_s3) {
+    $aws_access_key_id or die "No AWS_ACCESS_KEY_ID given.";
+    $aws_secret_access_key or die "No AWS_SECRET_ACCESS_KEY given.";
+
+    $s3 = Net::Amazon::S3->new(
+        { aws_access_key_id     => $aws_access_key_id,
+          aws_secret_access_key => $aws_secret_access_key,
+          $aws_session_token ? (aws_session_token => $aws_session_token) : (),
+          retry                 => 1,
+          host                  => $opt->s3_host,
+          secure                => ($opt->s3_endpoint && $opt->s3_endpoint =~ 
/^http:/) ? 0 : 1,
+        });
+
+    $releasesBucket = $s3->bucket($releasesBucketName) or die;
+
+    $s3_channels = Net::Amazon::S3->new(
+        { aws_access_key_id     => $aws_access_key_id,
+          aws_secret_access_key => $aws_secret_access_key,
+          $aws_session_token ? (aws_session_token => $aws_session_token) : (),
+          retry                 => 1,
+          $opt->s3_endpoint ? (host => $opt->s3_host) : (),
+          $opt->s3_endpoint ? (secure => ($opt->s3_endpoint =~ /^http:/) ? 0 : 
1) : (),
+        });
 
-my $s3 = Net::Amazon::S3->new(
-    { aws_access_key_id     => $aws_access_key_id,
-      aws_secret_access_key => $aws_secret_access_key,
-      retry                 => 1,
-      host                  => "s3-eu-west-1.amazonaws.com",
-    });
-
-my $releasesBucket = $s3->bucket($releasesBucketName) or die;
-
-my $s3_us = Net::Amazon::S3->new(
-    { aws_access_key_id     => $aws_access_key_id,
-      aws_secret_access_key => $aws_secret_access_key,
-      retry                 => 1,
-    });
-
-my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
+    $channelsBucket = $s3_channels->bucket($channelsBucketName) or die;
+}
 
 sub getStorePath {
     my ($jobName, $output) = @_;
@@ -115,11 +152,12 @@
         File::Path::remove_tree("$tmpDir/manual.tmp", {safe => 1});
     }
 
-    system("aws s3 sync '$tmpDir/manual' 
s3://$releasesBucketName/$releaseDir/manual") == 0
+    my $awsEndpoint = $opt->s3_endpoint ? "--endpoint-url " . 
$opt->s3_endpoint : "";
+    system("aws $awsEndpoint s3 sync '$tmpDir/manual' 
s3://$releasesBucketName/$releaseDir/manual") == 0
         or die "syncing manual to S3\n";
 }
 
-copyManual;
+copyManual unless $opt->skip_s3;
 
 sub downloadFile {
     my ($jobName, $productNr, $dstName) = @_;
@@ -158,30 +196,12 @@
     return $sha256_expected;
 }
 
-downloadFile("binaryTarball.i686-linux", "1");
-downloadFile("binaryTarball.x86_64-linux", "1");
-downloadFile("binaryTarball.aarch64-linux", "1");
-downloadFile("binaryTarball.x86_64-darwin", "1");
-downloadFile("binaryTarball.aarch64-darwin", "1");
-eval {
-    
downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", 
"1");
-};
-warn "$@" if $@;
-eval {
-    
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", 
"1");
-};
-warn "$@" if $@;
-eval {
-    downloadFile("binaryTarballCross.x86_64-linux.riscv64-unknown-linux-gnu", 
"1");
-};
-warn "$@" if $@;
-downloadFile("installerScript", "1");
-
-# Upload docker images to dockerhub.
+# Upload docker images.
 my $dockerManifest = "";
 my $dockerManifestLatest = "";
 my $haveDocker = 0;
 
+unless ($opt->skip_docker) {
 for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
     my $system = $platforms->[0];
     my $dockerPlatform = $platforms->[1];
@@ -195,8 +215,8 @@
     print STDERR "loading docker image for $dockerPlatform...\n";
     system("docker load -i $tmpDir/$fn") == 0 or die;
 
-    my $tag = "nixos/nix:$version-$dockerPlatform";
-    my $latestTag = "nixos/nix:latest-$dockerPlatform";
+    my $tag = $opt->docker_owner . ":$version-$dockerPlatform";
+    my $latestTag = $opt->docker_owner . ":latest-$dockerPlatform";
 
     print STDERR "tagging $version docker image for $dockerPlatform...\n";
     system("docker tag nix:$version $tag") == 0 or die;
@@ -219,68 +239,94 @@
 }
 
 if ($haveDocker) {
+    my $dockerOwner = $opt->docker_owner;
     print STDERR "creating multi-platform docker manifest...\n";
-    system("docker manifest rm nixos/nix:$version");
-    system("docker manifest create nixos/nix:$version $dockerManifest") == 0 
or die;
+    system("docker manifest rm $dockerOwner:$version");
+    system("docker manifest create $dockerOwner:$version $dockerManifest") == 
0 or die;
     if ($isLatest) {
         print STDERR "creating latest multi-platform docker manifest...\n";
-        system("docker manifest rm nixos/nix:latest");
-        system("docker manifest create nixos/nix:latest 
$dockerManifestLatest") == 0 or die;
+        system("docker manifest rm $dockerOwner:latest");
+        system("docker manifest create $dockerOwner:latest 
$dockerManifestLatest") == 0 or die;
     }
 
     print STDERR "pushing multi-platform docker manifest...\n";
-    system("docker manifest push nixos/nix:$version") == 0 or die;
+    system("docker manifest push $dockerOwner:$version") == 0 or die;
 
     if ($isLatest) {
         print STDERR "pushing latest multi-platform docker manifest...\n";
-        system("docker manifest push nixos/nix:latest") == 0 or die;
+        system("docker manifest push $dockerOwner:latest") == 0 or die;
     }
 }
+}
 
-# Upload nix-fallback-paths.nix.
-write_file("$tmpDir/fallback-paths.nix",
-    "{\n" .
-    "  x86_64-linux = \"" . getStorePath("build.nix-everything.x86_64-linux") 
. "\";\n" .
-    "  i686-linux = \"" . getStorePath("build.nix-everything.i686-linux") . 
"\";\n" .
-    "  aarch64-linux = \"" . 
getStorePath("build.nix-everything.aarch64-linux") . "\";\n" .
-    "  riscv64-linux = \"" . 
getStorePath("buildCross.nix-everything.riscv64-unknown-linux-gnu.x86_64-linux")
 . "\";\n" .
-    "  x86_64-darwin = \"" . 
getStorePath("build.nix-everything.x86_64-darwin") . "\";\n" .
-    "  aarch64-darwin = \"" . 
getStorePath("build.nix-everything.aarch64-darwin") . "\";\n" .
-    "}\n");
 
 # Upload release files to S3.
-for my $fn (glob "$tmpDir/*") {
-    my $name = basename($fn);
-    next if $name eq "manual";
-    my $dstKey = "$releaseDir/" . $name;
-    unless (defined $releasesBucket->head_key($dstKey)) {
-        print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n";
+unless ($opt->skip_s3) {
+    downloadFile("binaryTarball.i686-linux", "1");
+    downloadFile("binaryTarball.x86_64-linux", "1");
+    downloadFile("binaryTarball.aarch64-linux", "1");
+    downloadFile("binaryTarball.x86_64-darwin", "1");
+    downloadFile("binaryTarball.aarch64-darwin", "1");
+    eval {
+        
downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", 
"1");
+    };
+    warn "$@" if $@;
+    eval {
+        
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", 
"1");
+    };
+    warn "$@" if $@;
+    eval {
+        
downloadFile("binaryTarballCross.x86_64-linux.riscv64-unknown-linux-gnu", "1");
+    };
+    warn "$@" if $@;
+    downloadFile("installerScript", "1");
 
-        my $configuration = ();
-        $configuration->{content_type} = "application/octet-stream";
+    # Upload nix-fallback-paths.nix.
+    write_file("$tmpDir/fallback-paths.nix",
+        "{\n" .
+        "  x86_64-linux = \"" . 
getStorePath("build.nix-everything.x86_64-linux") . "\";\n" .
+        "  i686-linux = \"" . getStorePath("build.nix-everything.i686-linux") 
. "\";\n" .
+        "  aarch64-linux = \"" . 
getStorePath("build.nix-everything.aarch64-linux") . "\";\n" .
+        "  riscv64-linux = \"" . 
getStorePath("buildCross.nix-everything.riscv64-unknown-linux-gnu.x86_64-linux")
 . "\";\n" .
+        "  x86_64-darwin = \"" . 
getStorePath("build.nix-everything.x86_64-darwin") . "\";\n" .
+        "  aarch64-darwin = \"" . 
getStorePath("build.nix-everything.aarch64-darwin") . "\";\n" .
+        "}\n");
+
+    for my $fn (glob "$tmpDir/*") {
+        my $name = basename($fn);
+        next if $name eq "manual";
+        my $dstKey = "$releaseDir/" . $name;
+        unless (defined $releasesBucket->head_key($dstKey)) {
+            print STDERR "uploading $fn to 
s3://$releasesBucketName/$dstKey...\n";
+
+            my $configuration = ();
+            $configuration->{content_type} = "application/octet-stream";
+
+            if ($fn =~ /.sha256|install|\.nix$/) {
+                $configuration->{content_type} = "text/plain";
+            }
 
-        if ($fn =~ /.sha256|install|\.nix$/) {
-            $configuration->{content_type} = "text/plain";
+            $releasesBucket->add_key_filename($dstKey, $fn, $configuration)
+                or die $releasesBucket->err . ": " . $releasesBucket->errstr;
         }
-
-        $releasesBucket->add_key_filename($dstKey, $fn, $configuration)
-            or die $releasesBucket->err . ": " . $releasesBucket->errstr;
     }
-}
 
-# Update the "latest" symlink.
-$channelsBucket->add_key(
-    "nix-latest/install", "",
-    { "x-amz-website-redirect-location" => 
"https://releases.nixos.org/$releaseDir/install"; })
-    or die $channelsBucket->err . ": " . $channelsBucket->errstr
-    if $isLatest;
+    # Update the "latest" symlink.
+    $channelsBucket->add_key(
+        "nix-latest/install", "",
+        { "x-amz-website-redirect-location" => 
"https://releases.nixos.org/$releaseDir/install"; })
+        or die $channelsBucket->err . ": " . $channelsBucket->errstr
+        if $isLatest;
+}
 
 # Tag the release in Git.
-chdir("/home/eelco/Dev/nix-pristine") or die;
-system("git remote update origin") == 0 or die;
-system("git tag --force --sign $version $nixRev -m 'Tagging release 
$version'") == 0 or die;
-system("git push --tags") == 0 or die;
-system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") 
== 0 or die if $isLatest;
+unless ($opt->skip_git) {
+    chdir($opt->project_root) or die "Cannot chdir to " . $opt->project_root . 
": $!";
+    system("git remote update origin") == 0 or die;
+    system("git tag --force --sign $version $nixRev -m 'Tagging release 
$version'") == 0 or die;
+    system("git push origin refs/tags/$version") == 0 or die;
+    system("git push --force-with-lease origin 
$nixRev:refs/heads/latest-release") == 0 or die if $isLatest;
+}
 
 File::Path::remove_tree($narCache, {safe => 1});
 File::Path::remove_tree($tmpDir, {safe => 1});
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/packaging/dev-shell.nix 
new/nix-2.33.1/packaging/dev-shell.nix
--- old/nix-2.33.0/packaging/dev-shell.nix      2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/packaging/dev-shell.nix      2026-01-11 20:46:37.000000000 
+0100
@@ -148,6 +148,15 @@
     isInternal =
       dep: internalDrvs ? ${builtins.unsafeDiscardStringContext dep.drvPath or 
"_non-existent_"};
 
+    activeComponentNames = lib.listToAttrs (
+      map (c: {
+        name = c.pname or c.name;
+        value = null;
+      }) activeComponents
+    );
+
+    isActiveComponent = name: activeComponentNames ? ${name};
+
   in
   {
     pname = "shell-for-nix";
@@ -190,27 +199,19 @@
           }
         );
 
-      small =
-        (finalAttrs.finalPackage.withActiveComponents (
-          c:
-          lib.intersectAttrs (lib.genAttrs [
-            "nix-cli"
-            "nix-util-tests"
-            "nix-store-tests"
-            "nix-expr-tests"
-            "nix-fetchers-tests"
-            "nix-flake-tests"
-            "nix-functional-tests"
-            "nix-perl-bindings"
-          ] (_: null)) c
-        )).overrideAttrs
-          (o: {
-            mesonFlags = o.mesonFlags ++ [
-              # TODO: infer from activeComponents or vice versa
-              "-Dkaitai-struct-checks=false"
-              "-Djson-schema-checks=false"
-            ];
-          });
+      small = finalAttrs.finalPackage.withActiveComponents (
+        c:
+        lib.intersectAttrs (lib.genAttrs [
+          "nix-cli"
+          "nix-util-tests"
+          "nix-store-tests"
+          "nix-expr-tests"
+          "nix-fetchers-tests"
+          "nix-flake-tests"
+          "nix-functional-tests"
+          "nix-perl-bindings"
+        ] (_: null)) c
+      );
     };
 
     # Remove the version suffix to avoid unnecessary attempts to substitute in 
nix develop
@@ -275,21 +276,33 @@
 
     dontUseCmakeConfigure = true;
 
-    mesonFlags =
-      map (transformFlag "libutil") (ignoreCrossFile 
pkgs.nixComponents2.nix-util.mesonFlags)
-      ++ map (transformFlag "libstore") (ignoreCrossFile 
pkgs.nixComponents2.nix-store.mesonFlags)
-      ++ map (transformFlag "libfetchers") (ignoreCrossFile 
pkgs.nixComponents2.nix-fetchers.mesonFlags)
-      ++ lib.optionals havePerl (
-        map (transformFlag "perl") (ignoreCrossFile 
pkgs.nixComponents2.nix-perl-bindings.mesonFlags)
-      )
-      ++ map (transformFlag "libexpr") (ignoreCrossFile 
pkgs.nixComponents2.nix-expr.mesonFlags)
-      ++ map (transformFlag "libcmd") (ignoreCrossFile 
pkgs.nixComponents2.nix-cmd.mesonFlags);
+    mesonFlags = [
+      (lib.mesonBool "kaitai-struct-checks" (isActiveComponent 
"nix-kaitai-struct-checks"))
+      (lib.mesonBool "json-schema-checks" (isActiveComponent 
"nix-json-schema-checks"))
+    ]
+    ++ map (transformFlag "libutil") (ignoreCrossFile 
pkgs.nixComponents2.nix-util.mesonFlags)
+    ++ map (transformFlag "libstore") (ignoreCrossFile 
pkgs.nixComponents2.nix-store.mesonFlags)
+    ++ map (transformFlag "libfetchers") (ignoreCrossFile 
pkgs.nixComponents2.nix-fetchers.mesonFlags)
+    ++ lib.optionals havePerl (
+      map (transformFlag "perl") (ignoreCrossFile 
pkgs.nixComponents2.nix-perl-bindings.mesonFlags)
+    )
+    ++ map (transformFlag "libexpr") (ignoreCrossFile 
pkgs.nixComponents2.nix-expr.mesonFlags)
+    ++ map (transformFlag "libcmd") (ignoreCrossFile 
pkgs.nixComponents2.nix-cmd.mesonFlags);
 
     nativeBuildInputs =
       let
         inputs =
           dedupByString (v: "${v}") (
-            lib.filter (x: !isInternal x) (lib.lists.concatMap (c: 
c.nativeBuildInputs) activeComponents)
+            lib.filter (x: !isInternal x) (
+              lib.lists.concatMap (
+                # Nix manual has a build-time dependency on nix, but we
+                # don't want to do a native build just to enter the ross
+                # dev shell.
+                #
+                # TODO: think of a more principled fix for this.
+                c: lib.filter (f: f.pname or null != "nix") c.nativeBuildInputs
+              ) activeComponents
+            )
           )
           ++ lib.optional (
             !buildCanExecuteHost
@@ -305,8 +318,8 @@
             pkgs.buildPackages.nixfmt-rfc-style
             pkgs.buildPackages.shellcheck
             pkgs.buildPackages.include-what-you-use
-            pkgs.buildPackages.gdb
           ]
+          ++ lib.optional pkgs.hostPlatform.isUnix pkgs.buildPackages.gdb
           ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == 
stdenv.buildPlatform) (
             lib.hiPrio pkgs.buildPackages.clang-tools
           )
@@ -322,13 +335,13 @@
       )
     );
 
-    buildInputs = [
-      pkgs.gbenchmark
-    ]
-    ++ dedupByString (v: "${v}") (
-      lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.buildInputs) 
activeComponents)
-    )
-    ++ lib.optional havePerl pkgs.perl;
+    buildInputs =
+      # TODO change Nixpkgs to mark gbenchmark as building on Windows
+      lib.optional pkgs.hostPlatform.isUnix pkgs.gbenchmark
+      ++ dedupByString (v: "${v}") (
+        lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.buildInputs) 
activeComponents)
+      )
+      ++ lib.optional havePerl pkgs.perl;
 
     propagatedBuildInputs = dedupByString (v: "${v}") (
       lib.filter (x: !isInternal x) (lib.lists.concatMap (c: 
c.propagatedBuildInputs) activeComponents)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libfetchers/git-utils.cc 
new/nix-2.33.1/src/libfetchers/git-utils.cc
--- old/nix-2.33.0/src/libfetchers/git-utils.cc 2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/src/libfetchers/git-utils.cc 2026-01-11 20:46:37.000000000 
+0100
@@ -1427,7 +1427,11 @@
 
 ref<GitRepo> Settings::getTarballCache() const
 {
-    static auto repoDir = std::filesystem::path(getCacheDir()) / 
"tarball-cache";
+    /* v1: Had either only loose objects or thin packfiles referring to loose 
objects
+     * v2: Must have only packfiles with no loose objects. Should get repacked 
periodically
+     * for optimal packfiles.
+     */
+    static auto repoDir = std::filesystem::path(getCacheDir()) / 
"tarball-cache-v2";
     return GitRepo::openRepo(repoDir, /*create=*/true, /*bare=*/true, 
/*packfilesOnly=*/true);
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/src/libstore/include/nix/store/build/derivation-builder.hh 
new/nix-2.33.1/src/libstore/include/nix/store/build/derivation-builder.hh
--- old/nix-2.33.0/src/libstore/include/nix/store/build/derivation-builder.hh   
2025-12-10 21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libstore/include/nix/store/build/derivation-builder.hh   
2026-01-11 20:46:37.000000000 +0100
@@ -79,7 +79,7 @@
      */
     const StorePathSet & inputPaths;
 
-    const std::map<std::string, InitialOutput> & initialOutputs;
+    const std::map<std::string, InitialOutput> initialOutputs;
 
     const BuildMode & buildMode;
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libstore/store-api.cc 
new/nix-2.33.1/src/libstore/store-api.cc
--- old/nix-2.33.0/src/libstore/store-api.cc    2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/src/libstore/store-api.cc    2026-01-11 20:46:37.000000000 
+0100
@@ -454,6 +454,8 @@
                         .downloadSize = narInfo ? narInfo->fileSize : 0,
                         .narSize = info->narSize,
                     });
+
+                break; /* We are done. */
             } catch (InvalidPath &) {
             } catch (SubstituterDisabled &) {
             } catch (Error & e) {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/src/libstore/unix/build/derivation-builder.cc 
new/nix-2.33.1/src/libstore/unix/build/derivation-builder.cc
--- old/nix-2.33.0/src/libstore/unix/build/derivation-builder.cc        
2025-12-10 21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libstore/unix/build/derivation-builder.cc        
2026-01-11 20:46:37.000000000 +0100
@@ -678,17 +678,17 @@
     }
 }
 
-static bool checkNotWorldWritable(std::filesystem::path path)
+static void checkNotWorldWritable(std::filesystem::path path)
 {
     while (true) {
         auto st = lstat(path);
         if (st.st_mode & S_IWOTH)
-            return false;
+            throw Error("Path %s is world-writable or a symlink. That's not 
allowed for security.", path);
         if (path == path.parent_path())
             break;
         path = path.parent_path();
     }
-    return true;
+    return;
 }
 
 std::optional<Descriptor> DerivationBuilderImpl::startBuild()
@@ -710,9 +710,8 @@
 
     createDirs(buildDir);
 
-    if (buildUser && !checkNotWorldWritable(buildDir))
-        throw Error(
-            "Path %s or a parent directory is world-writable or a symlink. 
That's not allowed for security.", buildDir);
+    if (buildUser)
+        checkNotWorldWritable(buildDir);
 
     /* Create a temporary directory where the build will take
        place. */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/src/libstore/unix/build/sandbox-network.sb 
new/nix-2.33.1/src/libstore/unix/build/sandbox-network.sb
--- old/nix-2.33.0/src/libstore/unix/build/sandbox-network.sb   2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libstore/unix/build/sandbox-network.sb   2026-01-11 
20:46:37.000000000 +0100
@@ -16,6 +16,7 @@
 
 ; Allow DNS lookups.
 (allow network-outbound (remote unix-socket (path-literal 
"/private/var/run/mDNSResponder")))
+(allow mach-lookup (global-name 
"com.apple.SystemConfiguration.DNSConfiguration"))
 
 ; Allow access to trustd.
 (allow mach-lookup (global-name "com.apple.trustd"))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libutil/file-system.cc 
new/nix-2.33.1/src/libutil/file-system.cc
--- old/nix-2.33.0/src/libutil/file-system.cc   2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/src/libutil/file-system.cc   2026-01-11 20:46:37.000000000 
+0100
@@ -713,17 +713,27 @@
 {
     AutoCloseFD fd;
 #ifdef O_TMPFILE
-    fd = ::open(defaultTempDir().c_str(), O_TMPFILE | O_CLOEXEC | O_RDWR, 
S_IWUSR | S_IRUSR);
-    if (!fd)
-        throw SysError("creating anonymous temporary file");
-#else
+    static std::atomic_flag tmpfileUnsupported{};
+    if (!tmpfileUnsupported.test()) /* Try with O_TMPFILE first. */ {
+        /* Use O_EXCL, because the file is never supposed to be linked into 
filesystem. */
+        fd = ::open(defaultTempDir().c_str(), O_TMPFILE | O_CLOEXEC | O_RDWR | 
O_EXCL, S_IWUSR | S_IRUSR);
+        if (!fd) {
+            /* Not supported by the filesystem or the kernel. */
+            if (errno == EOPNOTSUPP || errno == EISDIR)
+                tmpfileUnsupported.test_and_set(); /* Set flag and fall 
through to createTempFile. */
+            else
+                throw SysError("creating anonymous temporary file");
+        } else {
+            return fd; /* Successfully created. */
+        }
+    }
+#endif
     auto [fd2, path] = createTempFile("nix-anonymous");
     if (!fd2)
         throw SysError("creating temporary file '%s'", path);
     fd = std::move(fd2);
-#  ifndef _WIN32
+#ifndef _WIN32
     unlink(requireCString(path)); /* We only care about the file descriptor. */
-#  endif
 #endif
     return fd;
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libutil/include/nix/util/meson.build 
new/nix-2.33.1/src/libutil/include/nix/util/meson.build
--- old/nix-2.33.0/src/libutil/include/nix/util/meson.build     2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libutil/include/nix/util/meson.build     2026-01-11 
20:46:37.000000000 +0100
@@ -65,6 +65,7 @@
   'signals.hh',
   'signature/local-keys.hh',
   'signature/signer.hh',
+  'socket.hh',
   'sort.hh',
   'source-accessor.hh',
   'source-path.hh',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libutil/include/nix/util/socket.hh 
new/nix-2.33.1/src/libutil/include/nix/util/socket.hh
--- old/nix-2.33.0/src/libutil/include/nix/util/socket.hh       1970-01-01 
01:00:00.000000000 +0100
+++ new/nix-2.33.1/src/libutil/include/nix/util/socket.hh       2026-01-11 
20:46:37.000000000 +0100
@@ -0,0 +1,61 @@
+#pragma once
+///@file
+
+#include "nix/util/file-descriptor.hh"
+
+#ifdef _WIN32
+#  include <winsock2.h>
+#endif
+
+namespace nix {
+
+/**
+ * Often we want to use `Descriptor`, but Windows makes a slightly
+ * stronger file descriptor vs socket distinction, at least at the level
+ * of C types.
+ */
+using Socket =
+#ifdef _WIN32
+    SOCKET
+#else
+    int
+#endif
+    ;
+
+#ifdef _WIN32
+/**
+ * Windows gives this a different name
+ */
+#  define SHUT_WR SD_SEND
+#  define SHUT_RDWR SD_BOTH
+#endif
+
+/**
+ * Convert a `Descriptor` to a `Socket`
+ *
+ * This is a no-op except on Windows.
+ */
+static inline Socket toSocket(Descriptor fd)
+{
+#ifdef _WIN32
+    return reinterpret_cast<Socket>(fd);
+#else
+    return fd;
+#endif
+}
+
+/**
+ * Convert a `Socket` to a `Descriptor`
+ *
+ * This is a no-op except on Windows.
+ */
+static inline Descriptor fromSocket(Socket fd)
+{
+#ifdef _WIN32
+    return reinterpret_cast<Descriptor>(fd);
+#else
+    return fd;
+#endif
+}
+
+} // namespace nix
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/src/libutil/include/nix/util/unix-domain-socket.hh 
new/nix-2.33.1/src/libutil/include/nix/util/unix-domain-socket.hh
--- old/nix-2.33.0/src/libutil/include/nix/util/unix-domain-socket.hh   
2025-12-10 21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libutil/include/nix/util/unix-domain-socket.hh   
2026-01-11 20:46:37.000000000 +0100
@@ -3,10 +3,8 @@
 
 #include "nix/util/types.hh"
 #include "nix/util/file-descriptor.hh"
+#include "nix/util/socket.hh"
 
-#ifdef _WIN32
-#  include <winsock2.h>
-#endif
 #include <unistd.h>
 
 #include <filesystem>
@@ -24,55 +22,6 @@
 AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode);
 
 /**
- * Often we want to use `Descriptor`, but Windows makes a slightly
- * stronger file descriptor vs socket distinction, at least at the level
- * of C types.
- */
-using Socket =
-#ifdef _WIN32
-    SOCKET
-#else
-    int
-#endif
-    ;
-
-#ifdef _WIN32
-/**
- * Windows gives this a different name
- */
-#  define SHUT_WR SD_SEND
-#  define SHUT_RDWR SD_BOTH
-#endif
-
-/**
- * Convert a `Socket` to a `Descriptor`
- *
- * This is a no-op except on Windows.
- */
-static inline Socket toSocket(Descriptor fd)
-{
-#ifdef _WIN32
-    return reinterpret_cast<Socket>(fd);
-#else
-    return fd;
-#endif
-}
-
-/**
- * Convert a `Socket` to a `Descriptor`
- *
- * This is a no-op except on Windows.
- */
-static inline Descriptor fromSocket(Socket fd)
-{
-#ifdef _WIN32
-    return reinterpret_cast<Descriptor>(fd);
-#else
-    return fd;
-#endif
-}
-
-/**
  * Bind a Unix domain socket to a path.
  */
 void bind(Socket fd, const std::string & path);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libutil/serialise.cc 
new/nix-2.33.1/src/libutil/serialise.cc
--- old/nix-2.33.0/src/libutil/serialise.cc     2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/src/libutil/serialise.cc     2026-01-11 20:46:37.000000000 
+0100
@@ -1,6 +1,7 @@
 #include "nix/util/serialise.hh"
 #include "nix/util/compression.hh"
 #include "nix/util/signals.hh"
+#include "nix/util/socket.hh"
 #include "nix/util/util.hh"
 
 #include <cstring>
@@ -11,7 +12,6 @@
 
 #ifdef _WIN32
 #  include <fileapi.h>
-#  include <winsock2.h>
 #  include "nix/util/windows-error.hh"
 #else
 #  include <poll.h>
@@ -184,20 +184,20 @@
     while (true) {
         fd_set fds;
         FD_ZERO(&fds);
-        int fd_ = fromDescriptorReadOnly(fd);
-        FD_SET(fd_, &fds);
+        Socket sock = toSocket(fd);
+        FD_SET(sock, &fds);
 
         struct timeval timeout;
         timeout.tv_sec = 0;
         timeout.tv_usec = 0;
 
-        auto n = select(fd_ + 1, &fds, nullptr, nullptr, &timeout);
+        auto n = select(sock + 1, &fds, nullptr, nullptr, &timeout);
         if (n < 0) {
             if (errno == EINTR)
                 continue;
             throw SysError("polling file descriptor");
         }
-        return FD_ISSET(fd, &fds);
+        return FD_ISSET(sock, &fds);
     }
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/libutil/union-source-accessor.cc 
new/nix-2.33.1/src/libutil/union-source-accessor.cc
--- old/nix-2.33.0/src/libutil/union-source-accessor.cc 2025-12-10 
21:38:26.000000000 +0100
+++ new/nix-2.33.1/src/libutil/union-source-accessor.cc 2026-01-11 
20:46:37.000000000 +0100
@@ -35,14 +35,18 @@
     DirEntries readDirectory(const CanonPath & path) override
     {
         DirEntries result;
+        bool exists = false;
         for (auto & accessor : accessors) {
             auto st = accessor->maybeLstat(path);
             if (!st)
                 continue;
+            exists = true;
             for (auto & entry : accessor->readDirectory(path))
                 // Don't override entries from previous accessors.
                 result.insert(entry);
         }
+        if (!exists)
+            throw FileNotFound("path '%s' does not exist", showPath(path));
         return result;
     }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/nix-2.33.0/src/nix/unix/daemon.cc 
new/nix-2.33.1/src/nix/unix/daemon.cc
--- old/nix-2.33.0/src/nix/unix/daemon.cc       2025-12-10 21:38:26.000000000 
+0100
+++ new/nix-2.33.1/src/nix/unix/daemon.cc       2026-01-11 20:46:37.000000000 
+0100
@@ -437,22 +437,23 @@
     int from = conn->from.fd;
     int to = conn->to.fd;
 
-    auto nfds = std::max(from, STDIN_FILENO) + 1;
+    Socket fromSock = toSocket(from), stdinSock = toSocket(getStandardInput());
+    auto nfds = std::max(fromSock, stdinSock) + 1;
     while (true) {
         fd_set fds;
         FD_ZERO(&fds);
-        FD_SET(from, &fds);
-        FD_SET(STDIN_FILENO, &fds);
+        FD_SET(fromSock, &fds);
+        FD_SET(stdinSock, &fds);
         if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
             throw SysError("waiting for data from client or server");
-        if (FD_ISSET(from, &fds)) {
+        if (FD_ISSET(fromSock, &fds)) {
             auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, 
SSIZE_MAX, SPLICE_F_MOVE);
             if (res == -1)
                 throw SysError("splicing data from daemon socket to stdout");
             else if (res == 0)
                 throw EndOfFile("unexpected EOF from daemon socket");
         }
-        if (FD_ISSET(STDIN_FILENO, &fds)) {
+        if (FD_ISSET(stdinSock, &fds)) {
             auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, 
SPLICE_F_MOVE);
             if (res == -1)
                 throw SysError("splicing data from stdin to daemon socket");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-1.err.exp 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-1.err.exp
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-1.err.exp    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-1.err.exp    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,16 @@
+error:
+       … while evaluating the attribute 'absolutePath'
+         at /pwd/lang/eval-fail-readDir-nonexistent-1.nix:2:3:
+            1| {
+            2|   absolutePath = builtins.readDir 
/this/path/really/should/not/exist;
+             |   ^
+            3| }
+
+       … while calling the 'readDir' builtin
+         at /pwd/lang/eval-fail-readDir-nonexistent-1.nix:2:18:
+            1| {
+            2|   absolutePath = builtins.readDir 
/this/path/really/should/not/exist;
+             |                  ^
+            3| }
+
+       error: path '/this/path/really/should/not/exist' does not exist
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-1.nix 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-1.nix
--- old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-1.nix    
1970-01-01 01:00:00.000000000 +0100
+++ new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-1.nix    
2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,3 @@
+{
+  absolutePath = builtins.readDir /this/path/really/should/not/exist;
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-2.err.exp 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-2.err.exp
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-2.err.exp    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-2.err.exp    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,16 @@
+error:
+       … while evaluating the attribute 'relativePath'
+         at /pwd/lang/eval-fail-readDir-nonexistent-2.nix:2:3:
+            1| {
+            2|   relativePath = builtins.readDir 
./this/path/really/should/not/exist;
+             |   ^
+            3| }
+
+       … while calling the 'readDir' builtin
+         at /pwd/lang/eval-fail-readDir-nonexistent-2.nix:2:18:
+            1| {
+            2|   relativePath = builtins.readDir 
./this/path/really/should/not/exist;
+             |                  ^
+            3| }
+
+       error: path '/pwd/lang/this/path/really/should/not/exist' does not exist
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-2.nix 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-2.nix
--- old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-nonexistent-2.nix    
1970-01-01 01:00:00.000000000 +0100
+++ new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-nonexistent-2.nix    
2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,3 @@
+{
+  relativePath = builtins.readDir ./this/path/really/should/not/exist;
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-1.err.exp
 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-1.err.exp
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-1.err.exp
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-1.err.exp
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,16 @@
+error:
+       … while evaluating the attribute 'regularFile'
+         at /pwd/lang/eval-fail-readDir-not-a-directory-1.nix:2:3:
+            1| {
+            2|   regularFile = builtins.readDir ./readDir/bar;
+             |   ^
+            3| }
+
+       … while calling the 'readDir' builtin
+         at /pwd/lang/eval-fail-readDir-not-a-directory-1.nix:2:17:
+            1| {
+            2|   regularFile = builtins.readDir ./readDir/bar;
+             |                 ^
+            3| }
+
+       error: cannot read directory "/pwd/lang/readDir/bar": Not a directory
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-1.nix 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-1.nix
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-1.nix    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-1.nix    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,3 @@
+{
+  regularFile = builtins.readDir ./readDir/bar;
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-2.err.exp
 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-2.err.exp
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-2.err.exp
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-2.err.exp
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,16 @@
+error:
+       … while evaluating the attribute 'symlinkedRegularFile'
+         at /pwd/lang/eval-fail-readDir-not-a-directory-2.nix:2:3:
+            1| {
+            2|   symlinkedRegularFile = builtins.readDir ./readDir/linked;
+             |   ^
+            3| }
+
+       … while calling the 'readDir' builtin
+         at /pwd/lang/eval-fail-readDir-not-a-directory-2.nix:2:26:
+            1| {
+            2|   symlinkedRegularFile = builtins.readDir ./readDir/linked;
+             |                          ^
+            3| }
+
+       error: cannot read directory 
"/pwd/lang/readDir/foo/git-hates-directories": Not a directory
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-2.nix 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-2.nix
--- 
old/nix-2.33.0/tests/functional/lang/eval-fail-readDir-not-a-directory-2.nix    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-fail-readDir-not-a-directory-2.nix    
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1,3 @@
+{
+  symlinkedRegularFile = builtins.readDir ./readDir/linked;
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-okay-readDir-symlinked-directory.exp 
new/nix-2.33.1/tests/functional/lang/eval-okay-readDir-symlinked-directory.exp
--- 
old/nix-2.33.0/tests/functional/lang/eval-okay-readDir-symlinked-directory.exp  
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-okay-readDir-symlinked-directory.exp  
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1 @@
+{ git-hates-directories = "regular"; }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/nix-2.33.0/tests/functional/lang/eval-okay-readDir-symlinked-directory.nix 
new/nix-2.33.1/tests/functional/lang/eval-okay-readDir-symlinked-directory.nix
--- 
old/nix-2.33.0/tests/functional/lang/eval-okay-readDir-symlinked-directory.nix  
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/nix-2.33.1/tests/functional/lang/eval-okay-readDir-symlinked-directory.nix  
    2026-01-11 20:46:37.000000000 +0100
@@ -0,0 +1 @@
+builtins.readDir ./readDir/ldir

Reply via email to