Alexander Rødseth pushed to branch main at Arch Linux / Packaging / Packages / 
ollama


Commits:
659c0326 by Alexander F. Rødseth at 2024-06-09T00:47:03+02:00
Fix issue #5 about the GIN_MODE env var

- - - - -
3251d06e by Alexander F. Rødseth at 2024-06-09T01:54:08+02:00
upgpkg: 0.1.42-1

- - - - -


3 changed files:

- .SRCINFO
- PKGBUILD
- ollama.service


Changes:

=====================================
.SRCINFO
=====================================
@@ -1,6 +1,6 @@
 pkgbase = ollama
        pkgdesc = Create, run and share large language models (LLMs)
-       pkgver = 0.1.41
+       pkgver = 0.1.42
        pkgrel = 1
        url = https://github.com/ollama/ollama
        arch = x86_64
@@ -12,14 +12,14 @@ pkgbase = ollama
        makedepends = go
        makedepends = rocm-hip-sdk
        makedepends = rocm-opencl-sdk
-       source = 
git+https://github.com/ollama/ollama#commit=476fb8e89242720a7cdd57400ba928de4dde9cc1
+       source = 
git+https://github.com/ollama/ollama#commit=385a32ecb5b2987f9cd7decaf0052f0a316ac6f6
        source = 
llama.cpp::git+https://github.com/ggerganov/llama.cpp#commit=5921b8f089d3b7bda86aac5a66825df6a6c10603
        source = ollama.service
        source = sysusers.conf
        source = tmpfiles.d
-       b2sums = 
b05df8c2ea49d332a394c2688b04bda67718f2f28d251adfaebf7cc04f7e57482275b8e5e8373d6be26b44613f16fe1c50954b55b2378309872af7935db055bb
+       b2sums = 
f0bfb4bb1c1133b722bb4b185c46ee1a9b4407e7f61292cdf2f2d69b6a24078c35bd1ad4069d8602943e7e1caa85dfae0233a912408d5b6337afea720cf9f684
        b2sums = 
21643fc46052e673f747606a774bb7b161e41e3c0166700281d995018003d0af573db6d7c2ddf68765449545b72b41713f9335aa3485df90871431bc66097b27
-       b2sums = 
2bf4c2076b7841de266ec40da2e2cbb675dcbfebfa8aed8d4ede65435854cb43d39ea32bc9210cfc28a042382dd0094a153e351edfa5586eb7c6a0783f3bc517
+       b2sums = 
18a1468f5614f9737f6ff2e6c7dfb3dfc0ba82836a98e3f14f8e544e3aba8f74ef0a03c5376a0d0aa2e59e948701d7c639dda69477b051b732896021e753e32e
        b2sums = 
3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec
        b2sums = 
e8f2b19e2474f30a4f984b45787950012668bf0acb5ad1ebb25cd9776925ab4a6aa927f8131ed53e35b1c71b32c504c700fe5b5145ecd25c7a8284373bb951ed
 


=====================================
PKGBUILD
=====================================
@@ -1,13 +1,14 @@
 # Maintainer: Alexander F. Rødseth <xypr...@archlinux.org>
+# Contributor: Steven Allen <ste...@stebalien.com>
 # Contributor: Matt Harrison <m...@harrison.us.com>
 # Contributor: Kainoa Kanter <kai...@t1c.dev>
 
 pkgbase=ollama
 pkgname=(ollama ollama-cuda ollama-rocm)
-pkgver=0.1.41
-_ollamacommit=476fb8e89242720a7cdd57400ba928de4dde9cc1 # tag: v0.1.41
+pkgver=0.1.42
+_ollamacommit=385a32ecb5b2987f9cd7decaf0052f0a316ac6f6 # tag: v0.1.42
 # The llama.cpp git submodule commit hash can be found here:
-# https://github.com/ollama/ollama/tree/v0.1.41/llm
+# https://github.com/ollama/ollama/tree/v0.1.42/llm
 _llama_cpp_commit=5921b8f089d3b7bda86aac5a66825df6a6c10603
 pkgrel=1
 pkgdesc='Create, run and share large language models (LLMs)'
@@ -20,9 +21,9 @@ source=(git+$url#commit=$_ollamacommit
         ollama.service
         sysusers.conf
         tmpfiles.d)
-b2sums=('b05df8c2ea49d332a394c2688b04bda67718f2f28d251adfaebf7cc04f7e57482275b8e5e8373d6be26b44613f16fe1c50954b55b2378309872af7935db055bb'
+b2sums=('f0bfb4bb1c1133b722bb4b185c46ee1a9b4407e7f61292cdf2f2d69b6a24078c35bd1ad4069d8602943e7e1caa85dfae0233a912408d5b6337afea720cf9f684'
         
'21643fc46052e673f747606a774bb7b161e41e3c0166700281d995018003d0af573db6d7c2ddf68765449545b72b41713f9335aa3485df90871431bc66097b27'
-        
'2bf4c2076b7841de266ec40da2e2cbb675dcbfebfa8aed8d4ede65435854cb43d39ea32bc9210cfc28a042382dd0094a153e351edfa5586eb7c6a0783f3bc517'
+        
'18a1468f5614f9737f6ff2e6c7dfb3dfc0ba82836a98e3f14f8e544e3aba8f74ef0a03c5376a0d0aa2e59e948701d7c639dda69477b051b732896021e753e32e'
         
'3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec'
         
'e8f2b19e2474f30a4f984b45787950012668bf0acb5ad1ebb25cd9776925ab4a6aa927f8131ed53e35b1c71b32c504c700fe5b5145ecd25c7a8284373bb951ed')
 
@@ -45,16 +46,16 @@ prepare() {
 }
 
 build() {
-  export ROCM_PATH=/disabled
-  export CUDA_LIB_DIR=/disabled
-  export CGO_CFLAGS="$CFLAGS" CGO_CPPFLAGS="$CPPFLAGS" 
CGO_CXXFLAGS="$CXXFLAGS" CGO_LDFLAGS="$LDFLAGS"
   export CFLAGS+=' -w'
   export CXXFLAGS+=' -w'
+  export CGO_CFLAGS="$CFLAGS" CGO_CPPFLAGS="$CPPFLAGS" 
CGO_CXXFLAGS="$CXXFLAGS" CGO_LDFLAGS="$LDFLAGS"
 
   local goflags="-buildmode=pie -trimpath -mod=readonly -modcacherw"
-  local ldflags="-linkmode=external -buildid= -X 
github.com/ollama/ollama/version.Version=${pkgver}"
+  local ldflags="-linkmode=external -buildid= -X 
github.com/ollama/ollama/version.Version=${pkgver} -X 
github.com/ollama/ollama/server.mode=release"
 
   # Ollama with CPU only support
+  export ROCM_PATH=/disabled
+  export CUDA_LIB_DIR=/disabled
   cd $pkgbase
   go generate ./...
   go build $goflags -ldflags="$ldflags"
@@ -67,8 +68,8 @@ build() {
 
   # Ollama with ROCm support
   cd "$srcdir/$pkgbase-rocm"
-  export ROCM_PATH=/opt/rocm
   export CUDA_LIB_DIR=/disabled
+  export ROCM_PATH=/opt/rocm
   export CC=/opt/rocm/llvm/bin/clang
   export CFLAGS+=' -fcf-protection=none'
   export CXX=/opt/rocm/llvm/bin/clang++


=====================================
ollama.service
=====================================
@@ -6,7 +6,7 @@ After=network.target network-online.target
 [Service]
 ExecStart=/usr/bin/ollama serve
 WorkingDirectory=/var/lib/ollama
-Environment="GIN_MODE=release" "HOME=/var/lib/ollama"
+Environment="HOME=/var/lib/ollama"
 User=ollama
 Group=ollama
 Restart=on-failure



View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/compare/b52417c777f599933fded93baacfc41481e61704...3251d06e82d4265d8d52d73bf0b202eb9982d0d5

-- 
View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/compare/b52417c777f599933fded93baacfc41481e61704...3251d06e82d4265d8d52d73bf0b202eb9982d0d5
You're receiving this email because of your account on gitlab.archlinux.org.


Reply via email to