diff --git a/chatbotcore/contextual_chunks.py b/chatbotcore/contextual_chunks.py index 7632586..9b7a6a0 100644 --- a/chatbotcore/contextual_chunks.py +++ b/chatbotcore/contextual_chunks.py @@ -50,7 +50,7 @@ class ContextualChunking: """Context retrieval for the chunk documents""" model: Any = field(init=False) - model_type: Enum = LLMType.OLLAMA + model_type: LLMType = LLMType.OLLAMA def __post_init__(self): if self.model_type == LLMType.OLLAMA: @@ -59,7 +59,7 @@ def __post_init__(self): self.model = OpenAIHandler() else: logger.error("Wrong LLM Type") - raise ValueError("Wront LLM Type") + raise ValueError("Wrong LLM Type") def get_prompt(self): """Creates a prompt""" diff --git a/chatbotcore/doc_loaders.py b/chatbotcore/doc_loaders.py index c1c6861..1f00791 100644 --- a/chatbotcore/doc_loaders.py +++ b/chatbotcore/doc_loaders.py @@ -9,6 +9,7 @@ from langchain_community.document_loaders import WebBaseLoader from chatbotcore.contextual_chunks import ContextualChunking +from utils import LLMType @dataclass(kw_only=True) @@ -22,7 +23,7 @@ class DocumentLoader: context_retrieval: ContextualChunking = field(init=False) def __post_init__(self): - self.context_retrieval = ContextualChunking(model_type=settings.LLM_TYPE) + self.context_retrieval = ContextualChunking(model_type=LLMType(int(settings.LLM_TYPE))) def _get_split_documents_with_recursive_char(self, documents: List[Document], multiplier: int = 3): """