1. Introduce the concept of "collection_name" to docstore and vector store. Each collection can be viewed similarly to a table in a SQL database. It allows better organizing information within this data source. 2. Move the `Index` and `Source` tables from the application scope into the index scope. For each new index created by user, these tables should increase accordingly. So it depends on the index, rather than the app. 3. Make each index responsible for the UI components in the app. 4. Construct the File UI page.
110 lines
3.0 KiB
Python
110 lines
3.0 KiB
Python
from pathlib import Path
|
|
|
|
from decouple import config
|
|
from platformdirs import user_cache_dir
|
|
from theflow.settings.default import * # noqa
|
|
|
|
user_cache_dir = Path(
|
|
user_cache_dir(str(config("KH_APP_NAME", default="ktem")), "Cinnamon")
|
|
)
|
|
user_cache_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
COHERE_API_KEY = config("COHERE_API_KEY", default="")
|
|
KH_MODE = "dev"
|
|
KH_ENABLE_ALEMBIC = False
|
|
KH_DATABASE = f"sqlite:///{user_cache_dir / 'sql.db'}"
|
|
KH_DOCSTORE = {
|
|
"__type__": "kotaemon.storages.SimpleFileDocumentStore",
|
|
"path": str(user_cache_dir / "docstore"),
|
|
}
|
|
KH_VECTORSTORE = {
|
|
"__type__": "kotaemon.storages.ChromaVectorStore",
|
|
"path": str(user_cache_dir / "vectorstore"),
|
|
}
|
|
KH_FILESTORAGE_PATH = str(user_cache_dir / "files")
|
|
KH_LLMS = {
|
|
"gpt4": {
|
|
"def": {
|
|
"__type__": "kotaemon.llms.AzureChatOpenAI",
|
|
"temperature": 0,
|
|
"azure_endpoint": config("AZURE_OPENAI_ENDPOINT", default=""),
|
|
"openai_api_key": config("AZURE_OPENAI_API_KEY", default=""),
|
|
"openai_api_version": config("OPENAI_API_VERSION", default=""),
|
|
"deployment_name": "dummy-q2",
|
|
"stream": True,
|
|
},
|
|
"accuracy": 10,
|
|
"cost": 10,
|
|
"default": False,
|
|
},
|
|
"gpt35": {
|
|
"def": {
|
|
"__type__": "kotaemon.llms.AzureChatOpenAI",
|
|
"temperature": 0,
|
|
"azure_endpoint": config("AZURE_OPENAI_ENDPOINT", default=""),
|
|
"openai_api_key": config("AZURE_OPENAI_API_KEY", default=""),
|
|
"openai_api_version": config("OPENAI_API_VERSION", default=""),
|
|
"deployment_name": "dummy-q2",
|
|
"request_timeout": 10,
|
|
"stream": False,
|
|
},
|
|
"accuracy": 5,
|
|
"cost": 5,
|
|
"default": True,
|
|
},
|
|
}
|
|
KH_EMBEDDINGS = {
|
|
"ada": {
|
|
"def": {
|
|
"__type__": "kotaemon.embeddings.AzureOpenAIEmbeddings",
|
|
"model": "text-embedding-ada-002",
|
|
"azure_endpoint": config("AZURE_OPENAI_ENDPOINT", default=""),
|
|
"openai_api_key": config("AZURE_OPENAI_API_KEY", default=""),
|
|
"deployment": "dummy-q2-text-embedding",
|
|
"chunk_size": 16,
|
|
},
|
|
"accuracy": 5,
|
|
"cost": 5,
|
|
"default": True,
|
|
},
|
|
}
|
|
KH_REASONINGS = ["ktem.reasoning.simple.FullQAPipeline"]
|
|
|
|
|
|
SETTINGS_APP = {
|
|
"lang": {
|
|
"name": "Language",
|
|
"value": "en",
|
|
"choices": [("English", "en"), ("Japanese", "ja")],
|
|
"component": "dropdown",
|
|
}
|
|
}
|
|
|
|
|
|
SETTINGS_REASONING = {
|
|
"use": {
|
|
"name": "Reasoning options",
|
|
"value": None,
|
|
"choices": [],
|
|
"component": "radio",
|
|
},
|
|
"lang": {
|
|
"name": "Language",
|
|
"value": "en",
|
|
"choices": [("English", "en"), ("Japanese", "ja")],
|
|
"component": "dropdown",
|
|
},
|
|
}
|
|
|
|
|
|
KH_INDEX_TYPES = ["ktem.index.file.FileIndex"]
|
|
KH_INDICES = [
|
|
{
|
|
"id": 1,
|
|
"name": "File",
|
|
"config": {},
|
|
"index_type": "ktem.index.file.FileIndex",
|
|
}
|
|
]
|