Move prompts into LLMs module (#70)

Since the only usage of prompt is within LLMs, it is reasonable to keep it within the LLM module. This way, it would be easier to discover module, and make the code base less complicated.

Changes:

* Move prompt components into llms
* Bump version 0.3.1
* Make pip install dependencies in eager mode

---------

Co-authored-by: ian <ian@cinnamon.is>
This commit is contained in:
Nguyen Trung Duc (john) 2023-11-14 16:00:10 +07:00 committed by GitHub
parent 8532138842
commit 693ed39de4
24 changed files with 44 additions and 37 deletions

View File

@ -65,6 +65,7 @@ jobs:
with:
path: ${{ env.pythonLocation }}
key: ${{ steps.get-cache-key.outputs.key }}
# could using cache of previous ver to reuse unchanged packages
restore-keys: ${{ runner.os }}-py${{ matrix.python-version }}
- name: Check cache hit
@ -81,7 +82,7 @@ jobs:
steps.check-cache-hit.outputs.check != 'true'
run: |
python -m pip install --upgrade pip
pip install --ignore-installed -e .[dev]
pip install -U --upgrade-strategy eager -e .[dev]
- name: New dependencies cache for key ${{ steps.restore-dependencies.outputs.cache-primary-key }}
if: |

View File

@ -22,4 +22,4 @@ try:
except ImportError:
pass
__version__ = "0.3.0"
__version__ = "0.3.1"

View File

@ -17,7 +17,7 @@ class SimpleBranchingPipeline(BaseComponent):
from kotaemon.composite import GatedLinearPipeline
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.llms import BasePromptComponent
def identity(x):
return x
@ -90,7 +90,7 @@ class GatedBranchingPipeline(SimpleBranchingPipeline):
from kotaemon.composite import GatedLinearPipeline
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.llms import BasePromptComponent
def identity(x):
return x
@ -150,9 +150,9 @@ class GatedBranchingPipeline(SimpleBranchingPipeline):
if __name__ == "__main__":
import dotenv
from kotaemon.llms import BasePromptComponent
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
def identity(x):
return x

View File

@ -2,9 +2,7 @@ from typing import Any, Callable, Optional, Union
from kotaemon.base import BaseComponent
from kotaemon.base.schema import Document, IO_Type
from kotaemon.llms.chats.base import ChatLLM
from kotaemon.llms.completions.base import LLM
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.llms import LLM, BasePromptComponent, ChatLLM
class SimpleLinearPipeline(BaseComponent):
@ -22,7 +20,7 @@ class SimpleLinearPipeline(BaseComponent):
Example Usage:
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.llms import BasePromptComponent
def identity(x):
return x
@ -89,7 +87,7 @@ class GatedLinearPipeline(SimpleLinearPipeline):
Example Usage:
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.llms import BasePromptComponent
def identity(x):
return x

View File

@ -2,12 +2,22 @@ from langchain.schema.messages import AIMessage, SystemMessage
from .chats import AzureChatOpenAI, ChatLLM
from .chats.base import BaseMessage, HumanMessage
from .completions import LLM, AzureOpenAI, OpenAI
from .prompts import BasePromptComponent, PromptTemplate
__all__ = [
# chat-specific components
"ChatLLM",
"AzureChatOpenAI",
"BaseMessage",
"HumanMessage",
"AIMessage",
"SystemMessage",
"AzureChatOpenAI",
# completion-specific components
"LLM",
"OpenAI",
"AzureOpenAI",
# prompt-specific components
"BasePromptComponent",
"PromptTemplate",
]

View File

@ -0,0 +1,4 @@
from .base import LLM
from .openai import AzureOpenAI, OpenAI
__all__ = ["LLM", "OpenAI", "AzureOpenAI"]

View File

@ -64,7 +64,3 @@ class LangchainLLM(LLM):
setattr(self.agent, name, value)
else:
super().__setattr__(name, value)
class LLMChat(BaseComponent):
pass

View File

@ -0,0 +1,4 @@
from .base import BasePromptComponent
from .template import PromptTemplate
__all__ = ["BasePromptComponent", "PromptTemplate"]

View File

@ -1,7 +1,7 @@
from typing import Callable, Union
from kotaemon.base import BaseComponent, Document
from kotaemon.prompt.template import PromptTemplate
from ...base import BaseComponent, Document
from .template import PromptTemplate
class BasePromptComponent(BaseComponent):

View File

@ -3,10 +3,10 @@ from typing import Dict, List, Optional, Union
from pydantic import BaseModel
from kotaemon.llms import PromptTemplate
from kotaemon.llms.chats.base import ChatLLM
from kotaemon.llms.completions.base import LLM
from kotaemon.pipelines.tools import BaseTool
from kotaemon.prompt.template import PromptTemplate
BaseLLM = Union[ChatLLM, LLM]

View File

@ -4,7 +4,7 @@ from typing import Dict, List, Optional, Tuple, Type, Union
from pydantic import BaseModel, create_model
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import PromptTemplate
from ..base import AgentOutput, AgentType, BaseAgent, BaseLLM, BaseTool
from ..output.base import AgentAction, AgentFinish

View File

@ -1,6 +1,6 @@
# flake8: noqa
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import PromptTemplate
zero_shot_react_prompt = PromptTemplate(
template="""Answer the following questions as best you can. You have access to the following tools:

View File

@ -5,9 +5,7 @@ from typing import Any, Dict, List, Optional, Tuple, Type, Union
from pydantic import BaseModel, create_model
from kotaemon.llms.chats.base import ChatLLM
from kotaemon.llms.completions.base import LLM
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import LLM, ChatLLM, PromptTemplate
from ..base import AgentOutput, AgentType, BaseAgent, BaseLLM, BaseTool
from ..output.base import BaseScratchPad

View File

@ -1,8 +1,7 @@
from typing import Any, List, Optional, Union
from kotaemon.base import BaseComponent
from kotaemon.prompt.template import PromptTemplate
from ....base import BaseComponent
from ....llms import PromptTemplate
from ..base import BaseLLM, BaseTool
from ..output.base import BaseScratchPad
from .prompt import zero_shot_planner_prompt

View File

@ -1,6 +1,6 @@
# flake8: noqa
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import PromptTemplate
zero_shot_planner_prompt = PromptTemplate(
template="""You are an AI agent who makes step-by-step plans to solve a problem under the help of external tools.

View File

@ -1,7 +1,7 @@
from typing import Any, List, Optional, Union
from kotaemon.base import BaseComponent
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import PromptTemplate
from ..base import BaseLLM
from ..output.base import BaseScratchPad

View File

@ -4,8 +4,8 @@ from typing import List
from theflow import Compose, Node, Param
from kotaemon.base import BaseComponent
from kotaemon.llms import BasePromptComponent
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.prompt.base import BasePromptComponent
class Thought(BaseComponent):

View File

@ -9,11 +9,11 @@ from kotaemon.base import BaseComponent
from kotaemon.base.schema import RetrievedDocument
from kotaemon.docstores import InMemoryDocumentStore
from kotaemon.embeddings import AzureOpenAIEmbeddings
from kotaemon.llms import PromptTemplate
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.pipelines.agents import BaseAgent
from kotaemon.pipelines.retrieving import RetrieveDocumentFromVectorStorePipeline
from kotaemon.pipelines.tools import ComponentTool
from kotaemon.prompt.template import PromptTemplate
from kotaemon.vectorstores import InMemoryVectorStore
from .utils import file_names_to_collection_name

View File

@ -2,9 +2,7 @@ from typing import AnyStr, Optional, Type, Union
from pydantic import BaseModel, Field
from kotaemon.llms.chats.base import ChatLLM
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.llms.completions.base import LLM
from kotaemon.llms import LLM, AzureChatOpenAI, ChatLLM
from .base import BaseTool, ToolException

View File

@ -9,9 +9,9 @@ from kotaemon.composite import (
SimpleBranchingPipeline,
SimpleLinearPipeline,
)
from kotaemon.llms import BasePromptComponent
from kotaemon.llms.chats.openai import AzureChatOpenAI
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
_openai_chat_completion_response = ChatCompletion.parse_obj(
{

View File

@ -1,9 +1,8 @@
import pytest
from kotaemon.base import Document
from kotaemon.llms import BasePromptComponent, PromptTemplate
from kotaemon.post_processing.extractor import RegexExtractor
from kotaemon.prompt.base import BasePromptComponent
from kotaemon.prompt.template import PromptTemplate
def test_set_attributes():

View File

@ -1,6 +1,6 @@
import pytest
from kotaemon.prompt.template import PromptTemplate
from kotaemon.llms import PromptTemplate
def test_prompt_template_creation():