improve llm selection for simple reasoning pipeline

This commit is contained in:
ian
2024-03-28 16:35:13 +07:00
parent b2089245f2
commit f9cc40ca25
5 changed files with 44 additions and 13 deletions

View File

@@ -17,8 +17,7 @@ if machine == "x86_64":
BINARY_REMOTE_NAME = f"frpc_{platform.system().lower()}_{machine.lower()}"
EXTENSION = ".exe" if os.name == "nt" else ""
BINARY_URL = (
"some-endpoint.com"
f"/kotaemon/tunneling/{VERSION}/{BINARY_REMOTE_NAME}{EXTENSION}"
"some-endpoint.com" f"/kotaemon/tunneling/{VERSION}/{BINARY_REMOTE_NAME}{EXTENSION}"
)
BINARY_FILENAME = f"{BINARY_REMOTE_NAME}_v{VERSION}"

View File

@@ -194,7 +194,6 @@ class ChatOpenAI(LCChatMixin, ChatLLM): # type: ignore
class AzureChatOpenAI(LCChatMixin, ChatLLM): # type: ignore
def __init__(
self,
azure_endpoint: str | None = None,