Migrate the MVP into kotaemon (#108)

- Migrate the MVP into kotaemon.
- Preliminary include the pipeline within chatbot interface.
- Organize MVP as an application.

Todo:

- Add an info panel to view the planning of agents -> Fix streaming agents' output.

Resolve: #60
Resolve: #61 
Resolve: #62
This commit is contained in:
Duc Nguyen (john)
2024-01-10 15:28:09 +07:00
committed by GitHub
parent 230328c62f
commit 5a9d6f75be
31 changed files with 273 additions and 92 deletions

View File

@@ -41,7 +41,7 @@ class BaseTool(BaseComponent):
args_schema = self.args_schema
if isinstance(tool_input, str):
if args_schema is not None:
key_ = next(iter(args_schema.__fields__.keys()))
key_ = next(iter(args_schema.model_fields.keys()))
args_schema.validate({key_: tool_input})
return tool_input
else:
@@ -121,9 +121,11 @@ class BaseTool(BaseComponent):
class ComponentTool(BaseTool):
"""
A Tool based on another pipeline / BaseComponent to be used
as its main entry point
"""Wrapper around other BaseComponent to use it as a tool
Args:
component: BaseComponent-based component to wrap
postprocessor: Optional postprocessor for the component output
"""
component: BaseComponent

View File

@@ -1,13 +1,11 @@
from typing import AnyStr, Optional, Type, Union
from typing import AnyStr, Optional, Type
from pydantic import BaseModel, Field
from kotaemon.llms import LLM, AzureChatOpenAI, ChatLLM
from kotaemon.llms import BaseLLM
from .base import BaseTool, ToolException
BaseLLM = Union[ChatLLM, LLM]
class LLMArgs(BaseModel):
query: str = Field(..., description="a search question or prompt")
@@ -21,7 +19,7 @@ class LLMTool(BaseTool):
"are confident in solving the problem "
"yourself. Input can be any instruction."
)
llm: BaseLLM = AzureChatOpenAI.withx()
llm: BaseLLM
args_schema: Optional[Type[BaseModel]] = LLMArgs
def _run_tool(self, query: AnyStr) -> str: