Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
wangyoucao577 committed Nov 15, 2024
0 parents commit 6c57f56
Show file tree
Hide file tree
Showing 11 changed files with 415 additions and 0 deletions.
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# default_async_llm_extension_python

<!-- brief introduction for the extension -->

## Features

<!-- main features introduction -->

- xxx feature

## API

Refer to `api` definition in [manifest.json] and default values in [property.json](property.json).

<!-- Additional API.md can be referred to if extra introduction needed -->

## Development

### Build

<!-- build dependencies and steps -->

### Unit test

<!-- how to do unit test for the extension -->

## Misc

<!-- others if applicable -->
29 changes: 29 additions & 0 deletions README.md.tent
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# {{package_name}}

<!-- brief introduction for the extension -->

## Features

<!-- main features introduction -->

- xxx feature

## API

Refer to `api` definition in [manifest.json] and default values in [property.json](property.json).

<!-- Additional API.md can be referred to if extra introduction needed -->

## Development

### Build

<!-- build dependencies and steps -->

### Unit test

<!-- how to do unit test for the extension -->

## Misc

<!-- others if applicable -->
6 changes: 6 additions & 0 deletions __init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from . import addon
19 changes: 19 additions & 0 deletions addon.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import (
Addon,
register_addon_as_extension,
TenEnv,
)
from .extension import DefaultAsyncLLMExtension


@register_addon_as_extension("default_async_llm_extension_python")
class DefaultAsyncLLMExtensionAddon(Addon):
def on_create_instance(self, ten_env: TenEnv, name: str, context) -> None:
ten_env.log_info("on_create_instance")
ten_env.on_create_instance_done(
DefaultAsyncLLMExtension(name), context)
19 changes: 19 additions & 0 deletions addon.py.tent
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import (
Addon,
register_addon_as_extension,
TenEnv,
)
from .extension import {{class_name_prefix}}Extension


@register_addon_as_extension("{{package_name}}")
class {{class_name_prefix}}ExtensionAddon(Addon):
def on_create_instance(self, ten_env: TenEnv, name: str, context) -> None:
ten_env.log_info("on_create_instance")
ten_env.on_create_instance_done(
{{class_name_prefix}}Extension(name), context)
49 changes: 49 additions & 0 deletions extension.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import AsyncTenEnv
from ten_ai_base import (
AsyncLLMBaseExtension, LLMCallCompletionArgs, LLMDataCompletionArgs, LLMToolMetadata, BaseConfig
)
from dataclasses import dataclass


@dataclass
class DefaultAsyncLLMConfig(BaseConfig):
model: str = ""
# TODO: add extra config fields here


class DefaultAsyncLLMExtension(AsyncLLMBaseExtension):
async def on_start(self, ten_env: AsyncTenEnv) -> None:
await super().on_start(ten_env)

# initialize configuration
self.config = DefaultAsyncLLMConfig.create(ten_env=ten_env)
ten_env.log_info(f"config: {self.config}")

"""Implement this method to construct and start your resources."""
ten_env.log_debug("TODO: on_start")

async def on_stop(self, ten_env: AsyncTenEnv) -> None:
await super().on_stop(ten_env)

"""Implement this method to stop and destruct your resources."""
ten_env.log_debug("TODO: on_stop")

async def on_call_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMCallCompletionArgs) -> any:
"""Called when a chat completion is requested by cmd call. Implement this method to process the chat completion."""
ten_env.log_debug("TODO: on_call_chat_completion")

async def on_data_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMDataCompletionArgs) -> None:
"""
Called when a chat completion is requested by data input. Implement this method to process the chat completion.
Note that this method is stream-based, and it should consider supporting local context caching.
"""
ten_env.log_debug("TODO: on_data_chat_completion")

async def on_tools_update(self, ten_env: AsyncTenEnv, tool: LLMToolMetadata) -> None:
"""Called when a new tool is registered. Implement this method to process the new tool."""
ten_env.log_debug("TODO: on_tools_update")
49 changes: 49 additions & 0 deletions extension.py.tent
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import AsyncTenEnv
from ten_ai_base import (
AsyncLLMBaseExtension, LLMCallCompletionArgs, LLMDataCompletionArgs, LLMToolMetadata, BaseConfig
)
from dataclasses import dataclass


@dataclass
class {{class_name_prefix}}Config(BaseConfig):
model: str = ""
# TODO: add extra config fields here


class {{class_name_prefix}}Extension(AsyncLLMBaseExtension):
async def on_start(self, ten_env: AsyncTenEnv) -> None:
await super().on_start(ten_env)

# initialize configuration
self.config = {{class_name_prefix}}Config.create(ten_env=ten_env)
ten_env.log_info(f"config: {self.config}")

"""Implement this method to construct and start your resources."""
ten_env.log_debug("TODO: on_start")

async def on_stop(self, ten_env: AsyncTenEnv) -> None:
await super().on_stop(ten_env)

"""Implement this method to stop and destruct your resources."""
ten_env.log_debug("TODO: on_stop")

async def on_call_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMCallCompletionArgs) -> any:
"""Called when a chat completion is requested by cmd call. Implement this method to process the chat completion."""
ten_env.log_debug("TODO: on_call_chat_completion")

async def on_data_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMDataCompletionArgs) -> None:
"""
Called when a chat completion is requested by data input. Implement this method to process the chat completion.
Note that this method is stream-based, and it should consider supporting local context caching.
"""
ten_env.log_debug("TODO: on_data_chat_completion")

async def on_tools_update(self, ten_env: AsyncTenEnv, tool: LLMToolMetadata) -> None:
"""Called when a new tool is registered. Implement this method to process the new tool."""
ten_env.log_debug("TODO: on_tools_update")
107 changes: 107 additions & 0 deletions manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
{
"type": "extension",
"name": "default_async_llm_extension_python",
"version": "0.1.0",
"dependencies": [
{
"type": "system",
"name": "ten_ai_base",
"version": "0.1.0"
}
],
"package": {
"include": [
"manifest.json",
"property.json",
"requirements.txt",
"**.tent",
"**.py",
"README.md"
]
},
"api": {
"property": {
"model": {
"type": "string"
}
},
"cmd_in": [
{
"name": "tool_register",
"property": {
"tool": {
"type": "string"
}
},
"required": [
"tool"
]
},
{
"name": "call_chat_completion",
"property": {
"messages": {
"type": "string"
},
"stream": {
"type": "bool"
}
},
"required": [
"messages"
],
"result": {
"property": {
"text": {
"type": "string"
}
},
"required": [
"text"
]
}
},
{
"name": "flush"
}
],
"cmd_out": [
{
"name": "flush"
}
],
"data_in": [
{
"name": "text_data",
"property": {
"text": {
"type": "string"
},
"is_final": {
"type": "bool"
}
},
"required": [
"text"
]
}
],
"data_out": [
{
"name": "text_data",
"property": {
"text": {
"type": "string"
},
"end_of_segment": {
"type": "bool"
}
},
"required": [
"text",
"end_of_segment"
]
}
]
}
}
Loading

0 comments on commit 6c57f56

Please sign in to comment.