kotaemon/knowledgehub/llms/chats/base.py
Duc Nguyen (john) 0e30dcbb06 Create Langchain LLM converter to quickly supply it to Langchain's chain (#102)
* Create Langchain LLM converter to quickly supply it to Langchain's chain

* Clean up
2023-12-11 14:55:56 +07:00

23 lines
551 B
Python

from __future__ import annotations
import logging
from kotaemon.base import BaseComponent
from kotaemon.llms.base import BaseLLM
logger = logging.getLogger(__name__)
class ChatLLM(BaseLLM):
def flow(self):
if self.inflow is None:
raise ValueError("No inflow provided.")
if not isinstance(self.inflow, BaseComponent):
raise ValueError(
f"inflow must be a BaseComponent, found {type(self.inflow)}"
)
text = self.inflow.flow().text
return self.__call__(text)