Migrate the MVP into kotaemon (#108)

- Migrate the MVP into kotaemon.
- Preliminary include the pipeline within chatbot interface.
- Organize MVP as an application.

Todo:

- Add an info panel to view the planning of agents -> Fix streaming agents' output.

Resolve: #60
Resolve: #61 
Resolve: #62
This commit is contained in:
Duc Nguyen (john)
2024-01-10 15:28:09 +07:00
committed by GitHub
parent 230328c62f
commit 5a9d6f75be
31 changed files with 273 additions and 92 deletions

View File

@@ -15,15 +15,23 @@ class LCChatMixin:
"Please return the relevant Langchain class in in _get_lc_class"
)
def __init__(self, **params):
def __init__(self, stream: bool = False, **params):
self._lc_class = self._get_lc_class()
self._obj = self._lc_class(**params)
self._kwargs: dict = params
self._stream = stream
super().__init__()
def run(
self, messages: str | BaseMessage | list[BaseMessage], **kwargs
) -> LLMInterface:
if self._stream:
return self.stream(messages, **kwargs) # type: ignore
return self.invoke(messages, **kwargs)
def invoke(
self, messages: str | BaseMessage | list[BaseMessage], **kwargs
) -> LLMInterface:
"""Generate response from messages
@@ -68,6 +76,10 @@ class LCChatMixin:
logits=[],
)
def stream(self, messages: str | BaseMessage | list[BaseMessage], **kwargs):
for response in self._obj.stream(input=messages, **kwargs):
yield LLMInterface(content=response.content)
def to_langchain_format(self):
return self._obj
@@ -150,6 +162,9 @@ class AzureChatOpenAI(LCChatMixin, ChatLLM):
)
def _get_lc_class(self):
import langchain.chat_models
try:
from langchain_community.chat_models import AzureChatOpenAI
except ImportError:
from langchain.chat_models import AzureChatOpenAI
return langchain.chat_models.AzureChatOpenAI
return AzureChatOpenAI