Sven-Hendrik Haase pushed to branch main at Arch Linux / Packaging / Packages / 
ollama


Commits:
870e8dd0 by Sven-Hendrik Haase at 2026-02-27T01:11:56+01:00
upgpkg: 0.17.1-1

- - - - -


2 changed files:

- .SRCINFO
- PKGBUILD


Changes:

=====================================
.SRCINFO
=====================================
@@ -1,6 +1,6 @@
 pkgbase = ollama
        pkgdesc = Create, run and share large language models (LLMs)
-       pkgver = 0.17.0
+       pkgver = 0.17.1
        pkgrel = 1
        url = https://github.com/ollama/ollama
        arch = x86_64
@@ -16,15 +16,16 @@ pkgbase = ollama
        makedepends = vulkan-headers
        makedepends = vulkan-icd-loader
        makedepends = shaderc
-       depends = gcc-libs
+       depends = libgcc
+       depends = libstdc++
        depends = glibc
        options = !lto
-       source = git+https://github.com/ollama/ollama#tag=v0.17.0
+       source = git+https://github.com/ollama/ollama#tag=v0.17.1
        source = ollama-ld.conf
        source = ollama.service
        source = sysusers.conf
        source = tmpfiles.d
-       b2sums = 
14826d8f1e1a61cf7f09f860ebeff04bb466f6da94e0034eeb253dd9ed957a81bc758a94879e7fabc51510a046ee78c628d5fd07ad065c0c285cd9bcaf5fff93
+       b2sums = 
c342c6b1e8bbcd76725d114631aef63f374fd0ff54d0f26ae30a55d9ce578163cbe5ef68b056d40286d64ad499cf037e4d043a391326bc2085fa8d0eff976b4f
        b2sums = 
121a7854b5a7ffb60226aaf22eed1f56311ab7d0a5630579525211d5c096040edbcfd2608169a4b6d83e8b4e4855dbb22f8ebf3d52de78a34ea3d4631b7eff36
        b2sums = 
031e0809a7f564de87017401c83956d43ac29bd0e988b250585af728b952a27d139b3cad0ab1e43750e2cd3b617287d3b81efc4a70ddd61709127f68bd15eabd
        b2sums = 
68622ac2e20c1d4f9741c57d2567695ec7b5204ab43356d164483cd3bc9da79fad72489bb33c8a17c2e5cb3b142353ed5f466ce857b0f46965426d16fb388632
@@ -34,14 +35,16 @@ pkgname = ollama
 
 pkgname = ollama-rocm
        pkgdesc = Create, run and share large language models (LLMs) with ROCm
-       depends = gcc-libs
+       depends = libgcc
+       depends = libstdc++
        depends = glibc
        depends = ollama
        depends = hipblas
 
 pkgname = ollama-cuda
        pkgdesc = Create, run and share large language models (LLMs) with CUDA
-       depends = gcc-libs
+       depends = libgcc
+       depends = libstdc++
        depends = glibc
        depends = ollama
        depends = cuda


=====================================
PKGBUILD
=====================================
@@ -6,14 +6,14 @@
 
 pkgbase=ollama
 pkgname=(ollama ollama-rocm ollama-cuda ollama-vulkan ollama-docs)
-pkgver=0.17.0
+pkgver=0.17.1
 pkgrel=1
 pkgdesc='Create, run and share large language models (LLMs)'
 arch=(x86_64)
 url='https://github.com/ollama/ollama'
 license=(MIT)
 options=('!lto')
-depends=(gcc-libs glibc)
+depends=(libgcc libstdc++ glibc)
 makedepends=(cmake
              ninja
              git
@@ -30,7 +30,7 @@ source=(git+https://github.com/ollama/ollama#tag=v$pkgver
         ollama.service
         sysusers.conf
         tmpfiles.d)
-b2sums=('14826d8f1e1a61cf7f09f860ebeff04bb466f6da94e0034eeb253dd9ed957a81bc758a94879e7fabc51510a046ee78c628d5fd07ad065c0c285cd9bcaf5fff93'
+b2sums=('c342c6b1e8bbcd76725d114631aef63f374fd0ff54d0f26ae30a55d9ce578163cbe5ef68b056d40286d64ad499cf037e4d043a391326bc2085fa8d0eff976b4f'
         
'121a7854b5a7ffb60226aaf22eed1f56311ab7d0a5630579525211d5c096040edbcfd2608169a4b6d83e8b4e4855dbb22f8ebf3d52de78a34ea3d4631b7eff36'
         
'031e0809a7f564de87017401c83956d43ac29bd0e988b250585af728b952a27d139b3cad0ab1e43750e2cd3b617287d3b81efc4a70ddd61709127f68bd15eabd'
         
'68622ac2e20c1d4f9741c57d2567695ec7b5204ab43356d164483cd3bc9da79fad72489bb33c8a17c2e5cb3b142353ed5f466ce857b0f46965426d16fb388632'



View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/commit/870e8dd0113bd3ec2a0736b898b7bac4a90db753

-- 
View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/commit/870e8dd0113bd3ec2a0736b898b7bac4a90db753
You're receiving this email because of your account on gitlab.archlinux.org.


Reply via email to