[AUR-361] Setup pre-commit, pytest, GitHub actions, ssh-secret (#3)

Co-authored-by: trducng <trungduc1992@gmail.com>
This commit is contained in:
ian_Cin
2023-08-30 07:22:01 +07:00
committed by GitHub
parent c3c25db48c
commit 5241edbc46
19 changed files with 268 additions and 54 deletions

View File

@@ -1,15 +1,10 @@
from unittest.mock import patch
from langchain.chat_models import AzureChatOpenAI as AzureChatOpenAILC
from langchain.schema.messages import (
SystemMessage,
HumanMessage,
AIMessage,
)
from langchain.schema.messages import AIMessage, HumanMessage, SystemMessage
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms.base import LLMInterface
from kotaemon.llms.chats.openai import AzureChatOpenAI
_openai_chat_completion_response = {
"id": "chatcmpl-7qyuw6Q1CFCpcKsMdFkmUPUa7JP2x",
@@ -49,7 +44,9 @@ def test_azureopenai_model(openai_completion):
# test for str input - stream mode
output = model("hello world")
assert isinstance(output, LLMInterface), "Output for single text is not LLMInterface"
assert isinstance(
output, LLMInterface
), "Output for single text is not LLMInterface"
openai_completion.assert_called()
# test for list[str] input - batch mode
@@ -67,7 +64,9 @@ def test_azureopenai_model(openai_completion):
]
output = model(messages)
assert isinstance(output, LLMInterface), "Output for single text is not LLMInterface"
assert isinstance(
output, LLMInterface
), "Output for single text is not LLMInterface"
openai_completion.assert_called()
# test for list[list[message]] input - batch mode
@@ -75,4 +74,3 @@ def test_azureopenai_model(openai_completion):
assert isinstance(output, list), "Output for batch string is not a list"
assert isinstance(output[0], LLMInterface), "Output for text is not LLMInterface"
openai_completion.assert_called()

View File

@@ -1,10 +1,10 @@
from unittest.mock import patch
from langchain.llms import AzureOpenAI as AzureOpenAILC, OpenAI as OpenAILC
from langchain.llms import AzureOpenAI as AzureOpenAILC
from langchain.llms import OpenAI as OpenAILC
from kotaemon.llms.completions.openai import AzureOpenAI, OpenAI
from kotaemon.llms.base import LLMInterface
from kotaemon.llms.completions.openai import AzureOpenAI, OpenAI
_openai_completion_response = {
"id": "cmpl-7qyNoIo6gRSCJR0hi8o3ZKBH4RkJ0",
@@ -41,7 +41,9 @@ def test_azureopenai_model(openai_completion):
openai_completion.assert_called()
output = model("hello world")
assert isinstance(output, LLMInterface), "Output for single text is not LLMInterface"
assert isinstance(
output, LLMInterface
), "Output for single text is not LLMInterface"
@patch(
@@ -67,4 +69,6 @@ def test_openai_model(openai_completion):
openai_completion.assert_called()
output = model("hello world")
assert isinstance(output, LLMInterface), "Output for single text is not LLMInterface"
assert isinstance(
output, LLMInterface
), "Output for single text is not LLMInterface"

View File

@@ -29,11 +29,14 @@ def clean_artifacts_for_telemetry():
def test_disable_telemetry_import_haystack_first():
"""Test that telemetry is disabled when kotaemon lib is initiated after"""
import os
import haystack.telemetry
assert haystack.telemetry.telemetry is not None
assert os.environ.get("HAYSTACK_TELEMETRY_ENABLED", "True") != "False"
import kotaemon # noqa: F401
import kotaemon # noqa: F401
assert haystack.telemetry.telemetry is None
assert os.environ.get("HAYSTACK_TELEMETRY_ENABLED", "True") == "False"
@@ -43,8 +46,9 @@ def test_disable_telemetry_import_haystack_after_kotaemon():
"""Test that telemetry is disabled when kotaemon lib is initiated before"""
import os
import kotaemon # noqa: F401
import haystack.telemetry
import kotaemon # noqa: F401
assert haystack.telemetry.telemetry is None
assert os.environ.get("HAYSTACK_TELEMETRY_ENABLED", "True") == "False"