Skip to content

Commit

Permalink
Update to 2.5.6a1 (#60)
Browse files Browse the repository at this point in the history
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
edamamez and github-actions[bot] authored Jul 12, 2024
1 parent edd6650 commit 02574e0
Show file tree
Hide file tree
Showing 40 changed files with 2,790 additions and 150 deletions.
25 changes: 25 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
FROM python:3.9 as base

ARG PACKAGE_NAME="llama-lang"
ARG LLAMA_ENVIRONMENT

# Install Ubuntu libraries
RUN apt-get -yq update

# Install python packages
WORKDIR /app/${PACKAGE_NAME}
COPY ./requirements.txt /app/${PACKAGE_NAME}/requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -r requirements.txt
# Copy all files to the container
COPY ./scripts /app/${PACKAGE_NAME}/scripts
COPY ./test /app/${PACKAGE_NAME}/test

WORKDIR /app/${PACKAGE_NAME}

RUN chmod a+x /app/${PACKAGE_NAME}/scripts/start.sh

ENV PACKAGE_NAME=$PACKAGE_NAME
ENV LLAMA_ENVIRONMENT=$LLAMA_ENVIRONMENT

ENTRYPOINT /app/${PACKAGE_NAME}/scripts/start.sh -e ${LLAMA_ENVIRONMENT}
57 changes: 57 additions & 0 deletions deploy/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/bin/bash

# Safely execute this bash script
# e exit on first failure
# x all executed commands are printed to the terminal
# u unset variables are errors
# a export all variables to the environment
# E any trap on ERR is inherited by shell functions
# -o pipefail | produces a failure code if any stage fails
set -Eeuoxa pipefail

# Get the directory of this script
LOCAL_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

function get_version {
local version=$(cat ../pyproject.toml | grep version | sed 's/version[^"]*//g' | sed 's/"//g')
echo $version
}

function get_previous_revision {
local revision=$(curl "https://pypi.org/pypi/lamini/json" | python3 -c "import sys, json, re; project = json.load(sys.stdin); versions = list(sorted(project['releases'], key=lambda x : tuple(int(component) for component in x.split('.') if all([x.isnumeric() for x in component]) ))); latest_version = versions[-1]; latest_release = project['releases'][latest_version]; filenames = [re.findall('-\d+-|$', release['filename'])[0].strip('-') for release in latest_release] ; print(max([0] + [int(version) for version in filenames if len(version) > 0]))")
echo $revision
}

function get_old_name {
local version="$(get_version)"
local old_name="lamini-$version-py3-none-any.whl"
echo $old_name
}

function get_new_name {
local previous_revision="$(get_previous_revision)"
local next_revision=$((previous_revision + 1))
local version="$(get_version)"
local new_name="lamini-$version-$next_revision-py3-none-any.whl"
echo $new_name
}

# build
cd $LOCAL_DIRECTORY/..
mkdir -p lamini
touch lamini/__init__.py
pip3 install wheel build
python3 -m build --wheel
cd $LOCAL_DIRECTORY

old_name="$(get_old_name)"
new_name="$(get_new_name)"

echo "old version $old_name"
echo "new version $new_name"

# mv the build to the new minor version
mv $LOCAL_DIRECTORY/../dist/$old_name $LOCAL_DIRECTORY/../dist/$new_name

# upload it

23 changes: 23 additions & 0 deletions deploy/release.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash

# Safely execute this bash script
# e exit on first failure
# x all executed commands are printed to the terminal
# u unset variables are errors
# a export all variables to the environment
# E any trap on ERR is inherited by shell functions
# -o pipefail | produces a failure code if any stage fails
set -Eeuoa pipefail

# Get the directory of this script
LOCAL_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

# Get the version number in pyproject.toml
{
git add .
git commit -m "v$1"
git push
} || {
echo "No Changes to Push"
}
gh release create v$1 --title "v$1" --notes "checkpoint"
3 changes: 1 addition & 2 deletions lamini/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from lamini.runners.basic_model_runner import BasicModelRunner
from lamini.runners.mistral_runner import MistralRunner
from lamini.api.lamini import Lamini
from lamini.classify.lamini_classifier import LaminiClassifier, BinaryLaminiClassifier
from lamini.classify.lamini_classifier import LaminiClassifier
from lamini.api.classifier import Classifier
from lamini.api.embedding import Embedding
from lamini.generation.generation_node import GenerationNode
Expand All @@ -29,4 +29,3 @@

max_workers = os.environ.get("LAMINI_MAX_WORKERS", 10)
batch_size = os.environ.get("LAMINI_BATCH_SIZE", 5)
retry = os.environ.get("LAMINI_RETRY", False)
26 changes: 3 additions & 23 deletions lamini/api/lamini.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
import jsonlines
import pandas as pd
from lamini.api.lamini_config import get_config
from lamini.api.rest_requests import get_version
from lamini.api.synchronize import sync
from lamini.api.train import Train
from lamini.api.utils.async_inference_queue import AsyncInferenceQueue
from lamini.api.utils.completion import Completion
from lamini.api.utils.upload_client import get_dataset_name, upload_to_blob
from lamini.generation.token_optimizer import TokenOptimizer
from lamini.api.rest_requests import get_version

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -67,14 +67,13 @@ def generate(
metadata: Optional[List] = None,
):
if isinstance(prompt, str) or (isinstance(prompt, list) and len(prompt) == 1):
req_data = self.make_llm_req_map(
result = self.completion.generate(
prompt=prompt,
model_name=model_name or self.model_name,
output_type=output_type,
max_tokens=max_tokens,
max_new_tokens=max_new_tokens,
)
result = self.completion.generate(req_data)
if output_type is None:
if isinstance(prompt, list) and len(prompt) == 1:
result = [single_result["output"] for single_result in result]
Expand Down Expand Up @@ -105,7 +104,7 @@ async def async_generate(
callback: Optional[Callable] = None,
metadata: Optional[List] = None,
):
req_data = self.make_llm_req_map(
req_data = self.completion.make_llm_req_map(
prompt=prompt,
model_name=model_name or self.model_name,
output_type=output_type,
Expand Down Expand Up @@ -361,22 +360,3 @@ def get_jobs(self):

def evaluate(self, job_id=None):
return self.trainer.evaluate(job_id)

def make_llm_req_map(
self,
model_name,
prompt,
output_type,
max_tokens,
max_new_tokens,
):
req_data = {}
req_data["model_name"] = model_name
req_data["prompt"] = prompt
req_data["output_type"] = output_type
req_data["max_tokens"] = max_tokens
if max_new_tokens is not None:
req_data["max_new_tokens"] = max_new_tokens
if self.model_config:
req_data["model_config"] = self.model_config.as_dict()
return req_data
2 changes: 1 addition & 1 deletion lamini/api/lamini_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def get_configured_url(config):
if environment == "LOCAL":
url = config.get("local.url", "http://localhost:5001")
elif environment == "STAGING":
url = config.get("staging.url", "https://api.staging.powerml.co")
url = config.get("staging.url", "https://staging.lamini.ai")
else:
url = config.get("production.url", "https://api.lamini.ai")
return url
Expand Down
15 changes: 0 additions & 15 deletions lamini/api/rest_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,6 @@ def check_version(resp):
print(resp.headers["X-Warning"])


def retry_once(func):
async def wrapped(*args, **kwargs):
try:
result = await func(*args, **kwargs)
except Exception as e:
if lamini.retry:
result = await func(*args, **kwargs)
else:
raise e
return result

return wrapped


@retry_once
async def make_async_web_request(client, key, url, http_method, json=None):
try:
headers = {
Expand Down
Loading

0 comments on commit 02574e0

Please sign in to comment.