From 46b646b48d7a9f5a23ccc6e9ea323a8b0ac5290f Mon Sep 17 00:00:00 2001 From: HUMORCE Date: Sat, 28 Sep 2024 07:02:14 +0000 Subject: [PATCH] ollama: Switch to cli build --- bucket/ollama.json | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/bucket/ollama.json b/bucket/ollama.json index ecc690fe5a..661c208d9b 100644 --- a/bucket/ollama.json +++ b/bucket/ollama.json @@ -3,26 +3,34 @@ "description": "Get up and running with large language models locally.", "homepage": "https://ollama.com/", "license": "MIT", - "url": "https://github.com/ollama/ollama/releases/download/v0.3.12/OllamaSetup.exe", - "hash": "b502efeb282f7121d4e372a4208645feeb464aed8ee01f2c677986b1f1954e85", - "innosetup": true, + "suggest": { + "Ollama Full": "extras/ollama-full" + }, + "architecture": { + "64bit": { + "url": "https://github.com/ollama/ollama/releases/download/v0.3.12/ollama-windows-amd64.zip", + "hash": "0798a0630edaa8cb2b667a54f5566bdbd49be47820f75a9209ad0145f8f6a74e" + }, + "arm64": { + "url": "https://github.com/ollama/ollama/releases/download/v0.3.12/ollama-windows-amd64.zip", + "hash": "0798a0630edaa8cb2b667a54f5566bdbd49be47820f75a9209ad0145f8f6a74e" + } + }, "bin": "ollama.exe", - "shortcuts": [ - [ - "ollama app.exe", - "Ollama", - "", - "app.ico" - ] - ], "checkver": { "github": "https://github.com/ollama/ollama" }, "autoupdate": { - "url": "https://github.com/ollama/ollama/releases/download/v$version/OllamaSetup.exe", + "architecture": { + "64bit": { + "url": "https://github.com/ollama/ollama/releases/download/v$version/ollama-windows-amd64.zip" + }, + "arm64": { + "url": "https://github.com/ollama/ollama/releases/download/v$version/ollama-windows-amd64.zip" + } + }, "hash": { "url": "$baseurl/sha256sum.txt" } - }, - "notes": "You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models." + } }