From c6db7f5d01c97538dcc115e712b2b8ab3e41193b Mon Sep 17 00:00:00 2001 From: ian Date: Wed, 27 Mar 2024 18:58:19 +0700 Subject: [PATCH] pin llama-cpp-python to 0.2.55 due to https://github.com/abetlen/llama-cpp-python/issues/1288 --- scripts/serve_local.py | 6 ++---- scripts/server_llamacpp_linux.sh | 2 +- scripts/server_llamacpp_macos.sh | 2 +- scripts/server_llamacpp_windows.bat | 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/serve_local.py b/scripts/serve_local.py index 61b8f77..f1f4831 100644 --- a/scripts/serve_local.py +++ b/scripts/serve_local.py @@ -3,9 +3,7 @@ import subprocess from inspect import currentframe, getframeinfo from pathlib import Path -import dotenv - -configs = dotenv.dotenv_values(".env") +from decouple import config system_name = platform.system() @@ -53,7 +51,7 @@ def serve_llamacpp_python(local_model_file: Path, **kwargs): def main(): - local_model_file = configs.get("LOCAL_MODEL", "") + local_model_file = config("LOCAL_MODEL", default="") if not local_model_file: print("LOCAL_MODEL not set in the `.env` file.") diff --git a/scripts/server_llamacpp_linux.sh b/scripts/server_llamacpp_linux.sh index f72ccde..a45e670 100755 --- a/scripts/server_llamacpp_linux.sh +++ b/scripts/server_llamacpp_linux.sh @@ -87,7 +87,7 @@ activate_environment # install dependencies # ver 0.2.56 produces segment error for /embeddings on MacOS -python -m pip install llama-cpp-python[server]!=0.2.56 +python -m pip install llama-cpp-python[server]==0.2.55 # start the server with passed params python -m llama_cpp.server $@ diff --git a/scripts/server_llamacpp_macos.sh b/scripts/server_llamacpp_macos.sh index 4ed9ac2..13d0784 100755 --- a/scripts/server_llamacpp_macos.sh +++ b/scripts/server_llamacpp_macos.sh @@ -88,7 +88,7 @@ activate_environment # install dependencies # ver 0.2.56 produces segment error for /embeddings on MacOS -python -m pip install llama-cpp-python[server]!=0.2.56 +python -m pip install llama-cpp-python[server]==0.2.55 # start the server with passed params python -m llama_cpp.server $@ diff --git a/scripts/server_llamacpp_windows.bat b/scripts/server_llamacpp_windows.bat index 48779db..97c1292 100644 --- a/scripts/server_llamacpp_windows.bat +++ b/scripts/server_llamacpp_windows.bat @@ -28,7 +28,7 @@ call :activate_environment @rem install dependencies @rem ver 0.2.56 produces segment error for /embeddings on MacOS -call python -m pip install llama-cpp-python[server]!=0.2.56 +call python -m pip install llama-cpp-python[server]==0.2.55 @REM @rem start the server with passed params call python -m llama_cpp.server %*