From fb19059b2995a2bbdc8f173176a531e4bd643a7c Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Fri, 30 Jun 2023 17:02:07 -0700 Subject: [PATCH] just in time install llama-cpp-python --- ollama/engine.py | 3 +-- ollama/platform.py | 24 ++++++++++++++++++++++++ poetry.lock | 4 ++-- pyproject.toml | 7 ++++++- requirements.txt | 2 -- 5 files changed, 33 insertions(+), 7 deletions(-) create mode 100644 ollama/platform.py diff --git a/ollama/engine.py b/ollama/engine.py index db1d51c72..2ef8c905c 100644 --- a/ollama/engine.py +++ b/ollama/engine.py @@ -1,14 +1,13 @@ import os import sys from os import path -from pathlib import Path from contextlib import contextmanager from fuzzywuzzy import process -from llama_cpp import Llama from ctransformers import AutoModelForCausalLM import ollama.prompt from ollama.model import MODELS_CACHE_PATH +from ollama.platform import Llama @contextmanager diff --git a/ollama/platform.py b/ollama/platform.py new file mode 100644 index 000000000..93964312b --- /dev/null +++ b/ollama/platform.py @@ -0,0 +1,24 @@ +import os +import sys +import importlib +from subprocess import call +from contextlib import contextmanager + + +@contextmanager +def preserve_environ(): + env = os.environ.copy() + yield + os.environ = env + + +if not importlib.util.find_spec('llama_cpp'): + with preserve_environ(): + os.environ['FORCE_CMAKE'] = '1' + if os.uname().sysname.lower() == 'darwin': + os.environ['CMAKE_ARGS'] = '-DLLAMA_METAL=on' + + call([sys.executable, '-m', 'pip', 'install', 'llama-cpp-python==0.1.67']) + + +from llama_cpp import Llama # noqa: E402 diff --git a/poetry.lock b/poetry.lock index ccf07a8ad..f47ec01bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -637,7 +637,7 @@ rapidfuzz = ">=2.3.0,<4.0.0" name = "llama-cpp-python" version = "0.1.67" description = "A Python wrapper for llama.cpp" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "llama_cpp_python-0.1.67.tar.gz", hash = "sha256:33bdcd42b30df3c21d56ce094132e1cdc0da0f8a27109f8eaf698addad02fd20"}, @@ -1211,4 +1211,4 @@ termcolor = ">=2.2,<3.0" [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "bd4b373e3903bd26b983163f3cc527a6f768f4280201fcbca4d4dc05dea66912" +content-hash = "59f5dc1f3dbe73040cedac8d9484482f86f9ea8ae3793191de15014c0033debc" diff --git a/pyproject.toml b/pyproject.toml index fdfa4e969..ce82ddb1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,10 +16,15 @@ requests = "^2.31.0" tqdm = "^4.65.0" validators = "^0.20.0" yaspin = "^2.3.0" -llama-cpp-python = "^0.1.67" ctransformers = "^0.2.10" fuzzywuzzy = {extras = ["speedup"], version = "^0.18.0"} +# required by llama-cpp-python +llama-cpp-python = {version = "^0.1.67", optional = true} +typing-extensions = "^4.6.3" +numpy = "^1.24.4" +diskcache = "^5.6.1" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt index c0f24b3bf..f42710234 100644 --- a/requirements.txt +++ b/requirements.txt @@ -385,8 +385,6 @@ levenshtein==0.21.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:f5f7ce639bea0f5e95a1f71963624b85521a39928a2a1bb0e66f6180facf5969 \ --hash=sha256:f9e3a5f4386c8f1811153f309a0ba3dc47d17e81a6dd29aa22d3e10212a2fd73 \ --hash=sha256:ffa6762f8ef1e7dfba101babe43de6edc541cbe64d33d816314ac67cd76c3979 -llama-cpp-python==0.1.67 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:33bdcd42b30df3c21d56ce094132e1cdc0da0f8a27109f8eaf698addad02fd20 markupsafe==2.1.3 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \