Alexander Rødseth pushed to branch main at Arch Linux / Packaging / Packages / 
ollama


Commits:
164a4be1 by Alexander F. Rødseth at 2024-04-18T00:53:19+02:00
upgpkg: 0.1.32-1

- - - - -


3 changed files:

- .SRCINFO
- PKGBUILD
- ollama.service


Changes:

=====================================
.SRCINFO
=====================================
@@ -1,6 +1,6 @@
 pkgbase = ollama
        pkgdesc = Create, run and share large language models (LLMs)
-       pkgver = 0.1.31
+       pkgver = 0.1.32
        pkgrel = 1
        url = https://github.com/ollama/ollama
        arch = x86_64
@@ -8,14 +8,14 @@ pkgbase = ollama
        makedepends = cmake
        makedepends = git
        makedepends = go
-       source = 
git+https://github.com/ollama/ollama#commit=dc011d16b9ff160c0be3829fc39a43054f0315d0
-       source = 
llama.cpp::git+https://github.com/ggerganov/llama.cpp#commit=ad3a0505e3b6cd777259ee35e61d428357ffc565
+       source = 
git+https://github.com/ollama/ollama#commit=fb9580df85c562295d919b6c2632117d3d8cea89
+       source = 
llama.cpp::git+https://github.com/ggerganov/llama.cpp#commit=7593639ce335e8d7f89aa9a54d616951f273af60
        source = ollama.service
        source = sysusers.conf
        source = tmpfiles.d
-       b2sums = 
f3659e606be342822b13021b2468c77c9de47bb54a97bd16640196d8d49590fbabe100e45964c59564dca8e0dda71f117f9c4ebd5a16d07fdcd066ce296b25dc
-       b2sums = 
eaa62efe3549124d055cad5761343c06f8059db93445e639c4e072d234a998691999145116f33d86213a380a946bdef8a05167f168be3a47427ca1556c8e7634
-       b2sums = 
a773bbf16cf5ccc2ee505ad77c3f9275346ddf412be283cfeaee7c2e4c41b8637a31aaff8766ed769524ebddc0c03cf924724452639b62208e578d98b9176124
+       b2sums = 
12b952df20a194e7b41040574b449aa9781dc40e9316d6d459caf0228c51c588d93a79223fe4009ca6532dffb9846853a6e74769db15c25135032bf5ccfde65e
+       b2sums = 
de8f50fb5a99a0251f6a0f3d596975cf43ad76770ac2edeaac6497b81214a7f31574b499f10b77f0542f9d46c5d33e0f5ac1e49fa72343d860c70fd3ca0ca113
+       b2sums = 
2bf4c2076b7841de266ec40da2e2cbb675dcbfebfa8aed8d4ede65435854cb43d39ea32bc9210cfc28a042382dd0094a153e351edfa5586eb7c6a0783f3bc517
        b2sums = 
3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec
        b2sums = 
e8f2b19e2474f30a4f984b45787950012668bf0acb5ad1ebb25cd9776925ab4a6aa927f8131ed53e35b1c71b32c504c700fe5b5145ecd25c7a8284373bb951ed
 


=====================================
PKGBUILD
=====================================
@@ -4,24 +4,24 @@
 
 pkgname=ollama
 pkgdesc='Create, run and share large language models (LLMs)'
-pkgver=0.1.31
+pkgver=0.1.32
 pkgrel=1
 arch=(x86_64)
 url='https://github.com/ollama/ollama'
 license=(MIT)
-_ollamacommit=dc011d16b9ff160c0be3829fc39a43054f0315d0 # tag: v0.1.31
+_ollamacommit=fb9580df85c562295d919b6c2632117d3d8cea89 # tag: v0.1.32
 # The llama.cpp git submodule commit hash can be found here:
-# https://github.com/ollama/ollama/tree/v0.1.31/llm
-_llama_cpp_commit=ad3a0505e3b6cd777259ee35e61d428357ffc565
+# https://github.com/ollama/ollama/tree/v0.1.32/llm
+_llama_cpp_commit=7593639ce335e8d7f89aa9a54d616951f273af60
 makedepends=(cmake git go)
 source=(git+$url#commit=$_ollamacommit
         
llama.cpp::git+https://github.com/ggerganov/llama.cpp#commit=$_llama_cpp_commit
         ollama.service
         sysusers.conf
         tmpfiles.d)
-b2sums=('f3659e606be342822b13021b2468c77c9de47bb54a97bd16640196d8d49590fbabe100e45964c59564dca8e0dda71f117f9c4ebd5a16d07fdcd066ce296b25dc'
-        
'eaa62efe3549124d055cad5761343c06f8059db93445e639c4e072d234a998691999145116f33d86213a380a946bdef8a05167f168be3a47427ca1556c8e7634'
-        
'a773bbf16cf5ccc2ee505ad77c3f9275346ddf412be283cfeaee7c2e4c41b8637a31aaff8766ed769524ebddc0c03cf924724452639b62208e578d98b9176124'
+b2sums=('12b952df20a194e7b41040574b449aa9781dc40e9316d6d459caf0228c51c588d93a79223fe4009ca6532dffb9846853a6e74769db15c25135032bf5ccfde65e'
+        
'de8f50fb5a99a0251f6a0f3d596975cf43ad76770ac2edeaac6497b81214a7f31574b499f10b77f0542f9d46c5d33e0f5ac1e49fa72343d860c70fd3ca0ca113'
+        
'2bf4c2076b7841de266ec40da2e2cbb675dcbfebfa8aed8d4ede65435854cb43d39ea32bc9210cfc28a042382dd0094a153e351edfa5586eb7c6a0783f3bc517'
         
'3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec'
         
'e8f2b19e2474f30a4f984b45787950012668bf0acb5ad1ebb25cd9776925ab4a6aa927f8131ed53e35b1c71b32c504c700fe5b5145ecd25c7a8284373bb951ed')
 
@@ -35,9 +35,6 @@ prepare() {
 
   # Turn LTO on and set the build type to Release
   sed -i 's,T_CODE=on,T_CODE=on -D LLAMA_LTO=on -D CMAKE_BUILD_TYPE=Release,g' 
llm/generate/gen_linux.sh
-
-  # Display a more helpful error message
-  sed -i "s|could not connect to ollama server, run 'ollama serve' to start 
it|ollama is not running, try 'systemctl start ollama'|g" cmd/cmd.go
 }
 
 build() {


=====================================
ollama.service
=====================================
@@ -6,7 +6,7 @@ After=network.target network-online.target
 [Service]
 ExecStart=/usr/bin/ollama serve
 WorkingDirectory=/var/lib/ollama
-Environment="HOME=/var/lib/ollama" "GIN_MODE=release"
+Environment="GIN_MODE=release" "HOME=/var/lib/ollama"
 User=ollama
 Group=ollama
 Restart=on-failure



View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/commit/164a4be1eed4f8637313d7a43482403fac0b24c4

-- 
View it on GitLab: 
https://gitlab.archlinux.org/archlinux/packaging/packages/ollama/-/commit/164a4be1eed4f8637313d7a43482403fac0b24c4
You're receiving this email because of your account on gitlab.archlinux.org.


Reply via email to