Sven-Hendrik Haase pushed to branch main at Arch Linux / Packaging / Packages / 
python-pytorch


Commits:
f2eac825 by Sven-Hendrik Haase at 2024-04-08T17:17:09+02:00
upgpkg: 2.2.2-3: Use system libs for eigen and pybind11

See #12.

- - - - -


2 changed files:

- .SRCINFO
- PKGBUILD


Changes:

=====================================
.SRCINFO
=====================================
@@ -1,7 +1,7 @@
 pkgbase = python-pytorch
        pkgdesc = Tensors and Dynamic neural networks in Python with strong GPU 
acceleration
        pkgver = 2.2.2
-       pkgrel = 2
+       pkgrel = 3
        url = https://pytorch.org
        arch = x86_64
        license = BSD
@@ -40,6 +40,7 @@ pkgbase = python-pytorch
        depends = ffmpeg
        depends = python-future
        depends = qt6-base
+       depends = eigen
        depends = intel-oneapi-mkl
        depends = python-typing_extensions
        depends = numactl
@@ -192,6 +193,7 @@ pkgname = python-pytorch-cuda
        depends = ffmpeg
        depends = python-future
        depends = qt6-base
+       depends = eigen
        depends = intel-oneapi-mkl
        depends = python-typing_extensions
        depends = numactl
@@ -223,6 +225,7 @@ pkgname = python-pytorch-opt-cuda
        depends = ffmpeg
        depends = python-future
        depends = qt6-base
+       depends = eigen
        depends = intel-oneapi-mkl
        depends = python-typing_extensions
        depends = numactl
@@ -255,6 +258,7 @@ pkgname = python-pytorch-rocm
        depends = ffmpeg
        depends = python-future
        depends = qt6-base
+       depends = eigen
        depends = intel-oneapi-mkl
        depends = python-typing_extensions
        depends = numactl
@@ -286,6 +290,7 @@ pkgname = python-pytorch-opt-rocm
        depends = ffmpeg
        depends = python-future
        depends = qt6-base
+       depends = eigen
        depends = intel-oneapi-mkl
        depends = python-typing_extensions
        depends = numactl


=====================================
PKGBUILD
=====================================
@@ -7,14 +7,14 @@ pkgbase="python-${_pkgname}"
 pkgname=("${pkgbase}" "${pkgbase}-opt" "${pkgbase}-cuda" "${pkgbase}-opt-cuda" 
"${pkgbase}-rocm" "${pkgbase}-opt-rocm")
 pkgver=2.2.2
 _pkgver=2.2.2
-pkgrel=2
+pkgrel=3
 _pkgdesc='Tensors and Dynamic neural networks in Python with strong GPU 
acceleration'
 pkgdesc="${_pkgdesc}"
 arch=('x86_64')
 url="https://pytorch.org";
 license=('BSD')
 depends=('google-glog' 'gflags' 'opencv' 'openmp' 'openmpi' 'pybind11' 
'python' 'python-yaml' 'libuv'
-         'python-numpy' 'python-sympy' 'protobuf' 'ffmpeg' 'python-future' 
'qt6-base'
+         'python-numpy' 'python-sympy' 'protobuf' 'ffmpeg' 'python-future' 
'qt6-base' 'eigen'
          'intel-oneapi-mkl' 'python-typing_extensions' 'numactl' 'python-jinja'
          'python-networkx' 'python-filelock')
 makedepends=('python' 'python-setuptools' 'python-yaml' 'python-numpy' 'cmake' 
'cuda'
@@ -273,6 +273,8 @@ _prepare() {
   export USE_OPENCV=ON
   # export USE_SYSTEM_LIBS=ON  # experimental, not all libs present in repos
   export USE_SYSTEM_NCCL=ON
+  export USE_SYSTEM_PYBIND11=ON
+  export USE_SYSTEM_EIGEN_INSTALL=ON
   export NCCL_VERSION=$(pkg-config nccl --modversion)
   export NCCL_VER_CODE=$(sed -n 's/^#define NCCL_VERSION_CODE\s*\(.*\).*/\1/p' 
/usr/include/nccl.h)
   # export BUILD_SPLIT_CUDA=ON  # modern preferred build, but splits libs and 
symbols, ABI break
@@ -394,8 +396,7 @@ _package() {
     mv "${_lib}" "${pkgdir}"/usr/lib/
   done
 
-  # clean up duplicates
-  rm -r "${pkgdir}/usr/include/pybind11"
+  # Clean up duplicates with Arch packages
   rm "${pkgdir}"/usr/include/*.h
 
   # Python module is hardcoded so look there at runtime



View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/python-pytorch/-/commit/f2eac825b55837ec9ef02a0fb05713bdda3c9c53

-- 
View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/python-pytorch/-/commit/f2eac825b55837ec9ef02a0fb05713bdda3c9c53
You're receiving this email because of your account on gitlab.archlinux.org.


Reply via email to