Skip to content

Commit

Permalink
Update to 3.1.0 (#75)
Browse files Browse the repository at this point in the history
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
edamamez and github-actions[bot] authored Sep 24, 2024
1 parent aa2b82e commit c1d5264
Show file tree
Hide file tree
Showing 24 changed files with 1,669 additions and 161 deletions.
2 changes: 2 additions & 0 deletions lamini/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,5 @@
batch_size = int(os.environ.get("LAMINI_BATCH_SIZE", 5))
static_batching = bool(os.environ.get("LAMINI_STATIC_BATCHING", False))
bypass_reservation = bool(os.environ.get("LAMINI_BYPASS_RESERVATION", False))

__version__ = "3.1.0"
95 changes: 90 additions & 5 deletions lamini/api/classifier.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import time
from typing import List, Union
from typing import List, Union, Optional

import lamini
import requests
Expand All @@ -8,7 +8,31 @@


class Classifier:
def __init__(self, model_id: int = None, api_key: str = None, api_url: str = None):
"""Handler for classification functions of an already trained LLM for classification tasks
on the Lamini Platform
Parameters
----------
model_id: int = None
Tuned Model designation on the Lamini platform
api_key: Optional[str]
Lamini platform API key, if not provided the key stored
within ~.lamini/configure.yaml will be used. If either
don't exist then an error is raised.
api_url: Optional[str]
Lamini platform api url, only needed if a different url is needed outside of the default.
default = "https://app.lamini.ai"
"""

def __init__(
self,
model_id: int = None,
api_key: Optional[str] = None,
api_url: Optional[str] = None,
):
self.model_id = model_id
self.config = get_config()
self.api_key = api_key or lamini.api_key or get_configured_key(self.config)
Expand All @@ -21,7 +45,36 @@ def classify(
top_n: int = None,
threshold: float = None,
metadata: bool = None,
):
) -> str:
"""Send a classification request for self.model_id with the provided prompt.
Parameters
----------
prompt: Union[str, List[str]]
Text prompt for the LLM classifier
top_n: int = None
Top N responses from the LLM Classifier, n indicates the limit
threshold: float = None
Classifier threshold to indicate a prediction is 'confident' enough
for a predicted class
metadata: bool = None
Boolean flag to request for metadata return from the request
Raises
------
Exception
Raised if self.model_id was not set on instantiation. If no model_id
was provided then no model can be requested for a prediction.
Returns
-------
resp["classification"]: str
Returned predicted class as a string
"""

if self.model_id is None:
raise Exception(
"model_id must be set in order to classify. Upload a model or set an existing model_id"
Expand All @@ -41,7 +94,26 @@ def classify(
)
return resp["classification"]

def predict(self, prompt: Union[str, List[str]]):
def predict(self, prompt: Union[str, List[str]]) -> str:
"""Send a prediction request for self.model_id with the provided prompt.
Parameters
----------
prompt: Union[str, List[str]]
Text prompt for the LLM classifier
Raises
------
Exception
Raised if self.model_id was not set on instantiation. If no model_id
was provided then no model can be requested for a prediction.
Returns
-------
resp["prediction"]: str
Returned predicted class as a string
"""

if self.model_id is None:
raise Exception(
"model_id must be set in order to classify. Upload a model or set an existing model_id"
Expand All @@ -55,7 +127,20 @@ def predict(self, prompt: Union[str, List[str]]):
)
return resp["prediction"]

def upload(self, file_path: str):
def upload(self, file_path: str) -> None:
"""Upload file to Lamini platform
Parameters
----------
file_path: str
Path to file to upload
Returns
-------
None
"""

files = {"file": open(file_path, "rb")}
headers = {
"Authorization": "Bearer " + self.api_key,
Expand Down
38 changes: 37 additions & 1 deletion lamini/api/embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,27 @@


class Embedding:
"""Handler for embedding requests to the Lamini Platform
Parameters
----------
model_name: str = None
LLM hugging face ID, e.g. "meta-llama/Meta-Llama-3.1-8B-Instruct"
api_key: Optional[str]
Lamini platform API key, if not provided the key stored
within ~.lamini/configure.yaml will be used. If either
don't exist then an error is raised.
api_url: Optional[str]
Lamini platform api url, only needed if a different url is needed outside of the
defined ones here: https://github.com/lamini-ai/lamini-platform/blob/main/sdk/lamini/api/lamini_config.py#L68
i.e. localhost, staging.lamini.ai, or api.lamini.ai
Additionally, LLAMA_ENVIRONMENT can be set as an environment variable
that will be grabbed for the url before any of the above defaults
"""

def __init__(
self,
model_name: str = None,
Expand All @@ -19,7 +40,22 @@ def __init__(
self.api_prefix = self.api_url + "/v1/"
self.model_name = model_name

def generate(self, prompt: Union[str, List[str]]):
def generate(self, prompt: Union[str, List[str]]) -> List[np.ndarray]:
"""Request to Lamini platform for an embedding encoding of the provided
prompt
Parameters
----------
prompt: Union[str, List[str]]
Prompt to encoding into an embedding
Returns
-------
List[np.ndarray]
Formatted returned embedding from the Lamini platform
"""

params = {"prompt": prompt, "model_name": self.model_name}
resp = make_web_request(
self.api_key, self.api_prefix + "embedding", "post", params
Expand Down
Loading

0 comments on commit c1d5264

Please sign in to comment.