Skip to content

Commit

Permalink
Merge pull request #286 from ArnoChenFx/fix-dump
Browse files Browse the repository at this point in the history
use ensure_ascii=False in json.dumps to fix dump non-ascii character
  • Loading branch information
MadcowD authored Oct 5, 2024
2 parents a899193 + 221939f commit 4509ceb
Show file tree
Hide file tree
Showing 10 changed files with 15 additions and 13 deletions.
2 changes: 1 addition & 1 deletion src/ell/lmp/_track.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def _write_invocation(func, invocation_id, latency_ms, prompt_tokens, completion
# Write to the blob store
blob_id = config.store.blob_store.store_blob(
json.dumps(invocation_contents.model_dump(
), default=str).encode('utf-8'),
), default=str, ensure_ascii=False).encode('utf-8'),
invocation_id
)
invocation_contents = InvocationContents(
Expand Down
2 changes: 1 addition & 1 deletion src/ell/lmp/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def wrapper(
elif isinstance(result, list) and all(isinstance(c, ContentBlock) for c in result):
content_results = result
else:
content_results = [ContentBlock(text=_lstr(json.dumps(result),origin_trace=_invocation_origin))]
content_results = [ContentBlock(text=_lstr(json.dumps(result, ensure_ascii=False),origin_trace=_invocation_origin))]
except TypeError as e:
raise TypeError(f"Failed to convert tool use result to ContentBlock: {e}. Tools must return json serializable objects. or a list of ContentBlocks.")
# XXX: Need to support images and other content types somehow. We should look for images inside of the the result and then go from there.
Expand Down
2 changes: 1 addition & 1 deletion src/ell/providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def _content_block_to_anthropic_format(content_block: ContentBlock):
if (image := content_block.image): return serialize_image_for_anthropic(image)
elif ((text := content_block.text) is not None): return dict(type="text", text=text)
elif (parsed := content_block.parsed):
return dict(type="text", text=json.dumps(parsed.model_dump()))
return dict(type="text", text=json.dumps(parsed.model_dump(), ensure_ascii=False))
elif (tool_call := content_block.tool_call):
return dict(
type="tool_use",
Expand Down
2 changes: 1 addition & 1 deletion src/ell/providers/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def content_block_to_bedrock_format(content_block: ContentBlock) -> Dict[str, An
elif content_block.parsed:
return {
"type": "text",
"text": json.dumps(content_block.parsed.model_dump())
"text": json.dumps(content_block.parsed.model_dump(), ensure_ascii=False)
}
elif content_block.tool_call:
return {
Expand Down
2 changes: 1 addition & 1 deletion src/ell/providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def translate_to_provider(self, ell_call : EllCallParams) -> Dict[str, Any]:
type="function",
function=dict(
name=tool_call.tool.__name__,
arguments=json.dumps(tool_call.params.model_dump())
arguments=json.dumps(tool_call.params.model_dump(), ensure_ascii=False)
)
) for tool_call in tool_calls ],
role="assistant",
Expand Down
2 changes: 1 addition & 1 deletion src/ell/stores/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class SQLStore(ell.store.Store):
def __init__(self, db_uri: str, blob_store: Optional[ell.store.BlobStore] = None):
self.engine = create_engine(db_uri,
json_serializer=lambda obj: json.dumps(pydantic_ltype_aware_cattr.unstructure(obj),
sort_keys=True, default=repr))
sort_keys=True, default=repr, ensure_ascii=False))

SQLModel.metadata.create_all(self.engine)
self.open_files: Dict[str, Dict[str, Any]] = {}
Expand Down
4 changes: 3 additions & 1 deletion src/ell/types/studio.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,9 @@ def should_externalize(self) -> bool:
]

total_size = sum(
len(json.dumps(field, default=(lambda x: json.dumps(x.model_dump(), default=str) if isinstance(x, BaseModel) else str(x))).encode('utf-8')) for field in json_fields if field is not None
len(json.dumps(field, default=(lambda x: json.dumps(x.model_dump(), default=str, ensure_ascii=False)
if isinstance(x, BaseModel) else str(x)), ensure_ascii=False).encode('utf-8'))
for field in json_fields if field is not None
)
# print("total_size", total_size)

Expand Down
6 changes: 3 additions & 3 deletions src/ell/util/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,8 @@ def handle_complex_types(obj):


def compute_state_cache_key(ipstr, fn_closure):
_global_free_vars_str = f"{json.dumps(get_immutable_vars(fn_closure[2]), sort_keys=True, default=repr)}"
_free_vars_str = f"{json.dumps(get_immutable_vars(fn_closure[3]), sort_keys=True, default=repr)}"
_global_free_vars_str = f"{json.dumps(get_immutable_vars(fn_closure[2]), sort_keys=True, default=repr, ensure_ascii=False)}"
_free_vars_str = f"{json.dumps(get_immutable_vars(fn_closure[3]), sort_keys=True, default=repr, ensure_ascii=False)}"
state_cache_key = hashlib.sha256(f"{ipstr}{_global_free_vars_str}{_free_vars_str}".encode('utf-8')).hexdigest()
return state_cache_key

Expand All @@ -103,7 +103,7 @@ def prepare_invocation_params(params):
cleaned_invocation_params = pydantic_ltype_aware_cattr.unstructure(invocation_params)

# Thisis because we wneed the caching to work on the hash of a cleaned and serialized object.
jstr = json.dumps(cleaned_invocation_params, sort_keys=True, default=repr)
jstr = json.dumps(cleaned_invocation_params, sort_keys=True, default=repr, ensure_ascii=False)

consumes = set()
import re
Expand Down
2 changes: 1 addition & 1 deletion x/openai_realtime/src/openai_realtime/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,5 +77,5 @@ def send(self, event_name, data=None):
self.dispatch("client.*", event)
self.log("sent:", event_name, event)

asyncio.create_task(self.ws.send(json.dumps(event)))
asyncio.create_task(self.ws.send(json.dumps(event, ensure_ascii=False)))
return True
4 changes: 2 additions & 2 deletions x/openai_realtime/src/openai_realtime/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,15 +243,15 @@ async def _call_tool(self, tool):
'item': {
'type': 'function_call_output',
'call_id': tool['call_id'],
'output': json.dumps(result)
'output': json.dumps(result, ensure_ascii=False)
}
})
except Exception as e:
self.realtime.send('conversation.item.create', {
'item': {
'type': 'function_call_output',
'call_id': tool['call_id'],
'output': json.dumps({'error': str(e)})
'output': json.dumps({'error': str(e)}, ensure_ascii=False)
}
})
self.create_response()

0 comments on commit 4509ceb

Please sign in to comment.