just in time install llama-cpp-python
This commit is contained in:
parent
23c589d578
commit
fb19059b29
|
|
@ -1,14 +1,13 @@
|
|||
import os
|
||||
import sys
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from contextlib import contextmanager
|
||||
from fuzzywuzzy import process
|
||||
from llama_cpp import Llama
|
||||
from ctransformers import AutoModelForCausalLM
|
||||
|
||||
import ollama.prompt
|
||||
from ollama.model import MODELS_CACHE_PATH
|
||||
from ollama.platform import Llama
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
import os
|
||||
import sys
|
||||
import importlib
|
||||
from subprocess import call
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
@contextmanager
|
||||
def preserve_environ():
|
||||
env = os.environ.copy()
|
||||
yield
|
||||
os.environ = env
|
||||
|
||||
|
||||
if not importlib.util.find_spec('llama_cpp'):
|
||||
with preserve_environ():
|
||||
os.environ['FORCE_CMAKE'] = '1'
|
||||
if os.uname().sysname.lower() == 'darwin':
|
||||
os.environ['CMAKE_ARGS'] = '-DLLAMA_METAL=on'
|
||||
|
||||
call([sys.executable, '-m', 'pip', 'install', 'llama-cpp-python==0.1.67'])
|
||||
|
||||
|
||||
from llama_cpp import Llama # noqa: E402
|
||||
|
|
@ -637,7 +637,7 @@ rapidfuzz = ">=2.3.0,<4.0.0"
|
|||
name = "llama-cpp-python"
|
||||
version = "0.1.67"
|
||||
description = "A Python wrapper for llama.cpp"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "llama_cpp_python-0.1.67.tar.gz", hash = "sha256:33bdcd42b30df3c21d56ce094132e1cdc0da0f8a27109f8eaf698addad02fd20"},
|
||||
|
|
@ -1211,4 +1211,4 @@ termcolor = ">=2.2,<3.0"
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "bd4b373e3903bd26b983163f3cc527a6f768f4280201fcbca4d4dc05dea66912"
|
||||
content-hash = "59f5dc1f3dbe73040cedac8d9484482f86f9ea8ae3793191de15014c0033debc"
|
||||
|
|
|
|||
|
|
@ -16,10 +16,15 @@ requests = "^2.31.0"
|
|||
tqdm = "^4.65.0"
|
||||
validators = "^0.20.0"
|
||||
yaspin = "^2.3.0"
|
||||
llama-cpp-python = "^0.1.67"
|
||||
ctransformers = "^0.2.10"
|
||||
fuzzywuzzy = {extras = ["speedup"], version = "^0.18.0"}
|
||||
|
||||
# required by llama-cpp-python
|
||||
llama-cpp-python = {version = "^0.1.67", optional = true}
|
||||
typing-extensions = "^4.6.3"
|
||||
numpy = "^1.24.4"
|
||||
diskcache = "^5.6.1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
|
|
|||
|
|
@ -385,8 +385,6 @@ levenshtein==0.21.1 ; python_version >= "3.8" and python_version < "4.0" \
|
|||
--hash=sha256:f5f7ce639bea0f5e95a1f71963624b85521a39928a2a1bb0e66f6180facf5969 \
|
||||
--hash=sha256:f9e3a5f4386c8f1811153f309a0ba3dc47d17e81a6dd29aa22d3e10212a2fd73 \
|
||||
--hash=sha256:ffa6762f8ef1e7dfba101babe43de6edc541cbe64d33d816314ac67cd76c3979
|
||||
llama-cpp-python==0.1.67 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:33bdcd42b30df3c21d56ce094132e1cdc0da0f8a27109f8eaf698addad02fd20
|
||||
markupsafe==2.1.3 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
|
||||
--hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
|
||||
|
|
|
|||
Loading…
Reference in New Issue