Skip to content

Commit

Permalink
Free & local
Browse files Browse the repository at this point in the history
Bugfixes for free and local models
  • Loading branch information
frdel committed Sep 13, 2024
1 parent 62e244f commit b73f29a
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 33 deletions.
2 changes: 1 addition & 1 deletion example.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
API_KEY_OPENAI=sk-hyBlbkFJCJjaYGCbqPTyT3uaYGCbqFBlbkFJCyJCyuPhYGCb
API_KEY_OPENAI=
API_KEY_ANTHROPIC=
API_KEY_GROQ=
API_KEY_PERPLEXITY=
Expand Down
5 changes: 3 additions & 2 deletions initialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ def initialize():
# main chat model used by agents (smarter, more accurate)
chat_llm = models.get_openai_chat(model_name="gpt-4o-mini", temperature=0)
# chat_llm = models.get_ollama_chat(model_name="gemma2:latest", temperature=0)
# chat_llm = models.get_lmstudio_chat(model_name="TheBloke/Mistral-7B-Instruct-v0.2-GGUF", temperature=0)
# chat_llm = models.get_openrouter_chat(model_name="nousresearch/hermes-3-llama-3.1-405b")
# chat_llm = models.get_lmstudio_chat(model_name="lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF", temperature=0)
# chat_llm = models.get_openrouter_chat(model_name="mattshumer/reflection-70b:free")
# chat_llm = models.get_azure_openai_chat(deployment_name="gpt-4o-mini", temperature=0)
# chat_llm = models.get_anthropic_chat(model_name="claude-3-5-sonnet-20240620", temperature=0)
# chat_llm = models.get_google_chat(model_name="gemini-1.5-flash", temperature=0)
Expand All @@ -20,6 +20,7 @@ def initialize():
embedding_llm = models.get_openai_embedding(model_name="text-embedding-3-small")
# embedding_llm = models.get_ollama_embedding(model_name="nomic-embed-text")
# embedding_llm = models.get_huggingface_embedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
# embedding_llm = models.get_lmstudio_embedding(model_name="nomic-ai/nomic-embed-text-v1.5-GGUF")

# agent configuration
config = AgentConfig(
Expand Down
2 changes: 1 addition & 1 deletion models.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def get_lmstudio_chat(model_name:str, temperature=DEFAULT_TEMPERATURE, base_url=
return ChatOpenAI(model_name=model_name, base_url=base_url, temperature=temperature, api_key="none") # type: ignore

def get_lmstudio_embedding(model_name:str, base_url=os.getenv("LM_STUDIO_BASE_URL") or "http://127.0.0.1:1234/v1"):
return OpenAIEmbeddings(model_name=model_name, base_url=base_url) # type: ignore
return OpenAIEmbeddings(model=model_name, api_key="none", base_url=base_url, check_embedding_ctx_length=False) # type: ignore

# Anthropic models
def get_anthropic_chat(model_name:str, api_key=get_api_key("anthropic"), temperature=DEFAULT_TEMPERATURE):
Expand Down
71 changes: 42 additions & 29 deletions python/helpers/defer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,55 +7,68 @@ class EventLoopThread:
_instance = None
_lock = threading.Lock()

def __init__(self) -> None:
self.loop: asyncio.AbstractEventLoop = asyncio.new_event_loop()
self.thread: threading.Thread = threading.Thread(target=self._run_event_loop, daemon=True)
self.thread.start()

def __new__(cls) -> 'EventLoopThread':
def __new__(cls):
with cls._lock:
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.__init__()
return cls._instance
cls._instance = super(EventLoopThread, cls).__new__(cls)
cls._instance.loop = asyncio.new_event_loop() # type: ignore
cls._instance.thread = threading.Thread(target=cls._instance._run_event_loop, daemon=True) # type: ignore
cls._instance.thread.start() # type: ignore
return cls._instance

def _run_event_loop(self):
asyncio.set_event_loop(self.loop)
self.loop.run_forever()
asyncio.set_event_loop(self.loop) # type: ignore
self.loop.run_forever() # type: ignore

def run_coroutine(self, coro):
return asyncio.run_coroutine_threadsafe(coro, self.loop)
return asyncio.run_coroutine_threadsafe(coro, self.loop) # type: ignore

class DeferredTask:
def __init__(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: Any, **kwargs: Any) -> None:
self._event_loop_thread = EventLoopThread()
self._future: Future[Any] = self._event_loop_thread.run_coroutine(self._run(func, *args, **kwargs))
def __init__(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: Any, **kwargs: Any):
self.func = func
self.args = args
self.kwargs = kwargs
self.event_loop_thread = EventLoopThread()
self._future: Optional[Future] = None
self._start_task()

async def _run(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: Any, **kwargs: Any) -> Any:
return await func(*args, **kwargs)
def _start_task(self):
self._future = self.event_loop_thread.run_coroutine(self._run())

def is_ready(self) -> bool:
return self._future.done()
async def _run(self):
return await self.func(*self.args, **self.kwargs)

async def result(self, timeout: Optional[float] = None) -> Any:
try:
return await asyncio.wait_for(asyncio.wrap_future(self._future), timeout)
except asyncio.TimeoutError:
raise TimeoutError("The task did not complete within the specified timeout.")
def is_ready(self) -> bool:
return self._future.done() if self._future else False

def result_sync(self, timeout: Optional[float] = None) -> Any:
if not self._future:
raise RuntimeError("Task hasn't been started")
try:
return self._future.result(timeout)
except TimeoutError:
raise TimeoutError("The task did not complete within the specified timeout.")

async def result(self, timeout: Optional[float] = None) -> Any:
if not self._future:
raise RuntimeError("Task hasn't been started")

loop = asyncio.get_running_loop()

def _get_result():
try:
return self._future.result(timeout) # type: ignore
except TimeoutError:
raise TimeoutError("The task did not complete within the specified timeout.")

return await loop.run_in_executor(None, _get_result)

def kill(self) -> None:
if not self._future.done():
if self._future and not self._future.done():
self._future.cancel()

def is_alive(self) -> bool:
return not self._future.done()
return self._future and not self._future.done() # type: ignore

# Helper function to run async code
async def run_async(func, *args, **kwargs):
return await func(*args, **kwargs)
def restart(self) -> None:
self._start_task()

0 comments on commit b73f29a

Please sign in to comment.