Provide type hints for pass-through Langchain and Llama-index objects (#95)

This commit is contained in:
Duc Nguyen (john)
2023-12-04 10:59:13 +07:00
committed by GitHub
parent e34b1e4c6d
commit 0ce3a8832f
34 changed files with 641 additions and 310 deletions

View File

@@ -6,7 +6,7 @@ from theflow.utils.modules import ObjectInitDeclaration as _
from kotaemon.base import BaseComponent
from kotaemon.embeddings import AzureOpenAIEmbeddings
from kotaemon.indices import VectorRetrieval
from kotaemon.llms.completions.openai import AzureOpenAI
from kotaemon.llms import AzureOpenAI
from kotaemon.storages import ChromaVectorStore

View File

@@ -8,7 +8,7 @@ from kotaemon.agents.langchain import LangchainAgent
from kotaemon.agents.react import ReactAgent
from kotaemon.agents.rewoo import RewooAgent
from kotaemon.agents.tools import BaseTool, GoogleSearchTool, LLMTool, WikipediaTool
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms import AzureChatOpenAI
FINAL_RESPONSE_TEXT = "Final Answer: Hello Cinnamon AI!"
@@ -195,7 +195,7 @@ def test_react_agent_langchain(openai_completion, llm, mock_google_search):
langchain_plugins = [tool.to_langchain_format() for tool in plugins]
agent = initialize_agent(
langchain_plugins,
llm.agent,
llm._obj,
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True,
)

View File

@@ -5,7 +5,7 @@ import pytest
from openai.types.chat.chat_completion import ChatCompletion
from kotaemon.indices.qa import CitationPipeline
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms import AzureChatOpenAI
function_output = '{\n "question": "What is the provided _example_ benefits?",\n "answer": [\n {\n "fact": "特約死亡保険金: 被保険者がこの特約の保険期間中に死亡したときに支払います。",\n "substring_quote": ["特約死亡保険金"]\n },\n {\n "fact": "特約特定疾病保険金: 被保険者がこの特約の保険期間中に特定の疾病(悪性新生物(がん)、急性心筋梗塞または脳卒中)により所定の状態に該当したときに支払います。",\n "substring_quote": ["特約特定疾病保険金"]\n },\n {\n "fact": "特約障害保険金: 被保険者がこの特約の保険期間中に傷害もしくは疾病により所定の身体障害の状態に該当したとき、または不慮の事故により所定の身体障害の状態に該当したときに支払います。",\n "substring_quote": ["特約障害保険金"]\n },\n {\n "fact": "特約介護保険金: 被保険者がこの特約の保険期間中に傷害または疾病により所定の要介護状態に該当したときに支払います。",\n "substring_quote": ["特約介護保険金"]\n }\n ]\n}'

View File

@@ -3,9 +3,11 @@ from pathlib import Path
from unittest.mock import patch
from kotaemon.base import Document
from kotaemon.embeddings.cohere import CohereEmbdeddings
from kotaemon.embeddings.huggingface import HuggingFaceEmbeddings
from kotaemon.embeddings.openai import AzureOpenAIEmbeddings
from kotaemon.embeddings import (
AzureOpenAIEmbeddings,
CohereEmbdeddings,
HuggingFaceEmbeddings,
)
with open(Path(__file__).parent / "resources" / "embedding_openai_batch.json") as f:
openai_embedding_batch = json.load(f)
@@ -60,7 +62,7 @@ def test_azureopenai_embeddings_batch_raw(openai_embedding_call):
"langchain.embeddings.huggingface.HuggingFaceBgeEmbeddings.embed_documents",
side_effect=lambda *args, **kwargs: [[1.0, 2.1, 3.2]],
)
def test_huggingface_embddings(
def test_huggingface_embeddings(
langchain_huggingface_embedding_call, sentence_transformers_init
):
model = HuggingFaceEmbeddings(

View File

@@ -6,7 +6,7 @@ import pytest
from openai.resources.embeddings import Embeddings
from kotaemon.base import Document
from kotaemon.embeddings.openai import AzureOpenAIEmbeddings
from kotaemon.embeddings import AzureOpenAIEmbeddings
from kotaemon.indices import VectorIndexing, VectorRetrieval
from kotaemon.storages import ChromaVectorStore, InMemoryDocumentStore

View File

@@ -9,7 +9,7 @@ from kotaemon.base.schema import (
LLMInterface,
SystemMessage,
)
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms import AzureChatOpenAI
_openai_chat_completion_response = ChatCompletion.parse_obj(
{
@@ -48,7 +48,7 @@ def test_azureopenai_model(openai_completion):
temperature=0,
)
assert isinstance(
model.agent, AzureChatOpenAILC
model._obj, AzureChatOpenAILC
), "Agent not wrapped in Langchain's AzureChatOpenAI"
# test for str input - stream mode

View File

@@ -5,7 +5,7 @@ from langchain.llms import OpenAI as OpenAILC
from openai.types.completion import Completion
from kotaemon.base.schema import LLMInterface
from kotaemon.llms.completions.openai import AzureOpenAI, OpenAI
from kotaemon.llms import AzureOpenAI, OpenAI
_openai_completion_response = Completion.parse_obj(
{
@@ -41,7 +41,7 @@ def test_azureopenai_model(openai_completion):
request_timeout=60,
)
assert isinstance(
model.agent, AzureOpenAILC
model._obj, AzureOpenAILC
), "Agent not wrapped in Langchain's AzureOpenAI"
output = model("hello world")
@@ -64,7 +64,7 @@ def test_openai_model(openai_completion):
request_timeout=60,
)
assert isinstance(
model.agent, OpenAILC
model._obj, OpenAILC
), "Agent is not wrapped in Langchain's OpenAI"
output = model("hello world")

View File

@@ -5,7 +5,7 @@ from openai.types.chat.chat_completion import ChatCompletion
from kotaemon.base import Document
from kotaemon.indices.rankings import LLMReranking
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms import AzureChatOpenAI
_openai_chat_completion_responses = [
ChatCompletion.parse_obj(

View File

@@ -6,7 +6,7 @@ from openai.resources.embeddings import Embeddings
from kotaemon.agents.tools import ComponentTool, GoogleSearchTool, WikipediaTool
from kotaemon.base import Document
from kotaemon.embeddings.openai import AzureOpenAIEmbeddings
from kotaemon.embeddings import AzureOpenAIEmbeddings
from kotaemon.indices.vectorindex import VectorIndexing, VectorRetrieval
from kotaemon.storages import ChromaVectorStore, InMemoryDocumentStore