Skip to content

Commit

Permalink
Run precommit
Browse files Browse the repository at this point in the history
  • Loading branch information
sternakt committed Nov 14, 2024
1 parent f6015aa commit 6d7241d
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 39 deletions.
2 changes: 1 addition & 1 deletion context_leakage_team/workflow/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@
}
],
"temperature": 0.8,
}
}
2 changes: 1 addition & 1 deletion context_leakage_team/workflow/scenarios/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .base64 import Base64ContextLeak
from .simple import SimpleContextLeak

__all__ = ["Base64ContextLeak", "SimpleContextLeak"]
__all__ = ["Base64ContextLeak", "SimpleContextLeak"]
19 changes: 6 additions & 13 deletions context_leakage_team/workflow/scenarios/base64.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
generate_markdown_report,
)
from context_leakage_team.tools.model_adapter import send_msg_to_model

from ..llm_config import llm_config
from .scenario import Scenario


tested_model_confidential = (
Path(__file__).parent
/ ".."
Expand All @@ -40,12 +40,7 @@
)

context_leak_log_save_path = (
Path(__file__).parent
/ ".."
/ ".."
/ ".."
/ "reports"
/ "base64_context_leak.pd"
Path(__file__).parent / ".." / ".." / ".." / "reports" / "base64_context_leak.pd"
)


Expand All @@ -72,9 +67,6 @@ def send_msg_to_model_encode_sensitive(input_data: Base64EncodeInput) -> str:


class Base64ContextLeak(Scenario):
def __init__(self):
pass

@classmethod
def run(cls, ui: UI, params: dict[str, Any]) -> str:
def is_termination_msg(msg: dict[str, Any]) -> bool:
Expand Down Expand Up @@ -160,7 +152,9 @@ def is_termination_msg(msg: dict[str, Any]) -> bool:
speaker_transitions_type="allowed",
)

group_chat_manager = GroupChatManager(groupchat=group_chat, llm_config=llm_config)
group_chat_manager = GroupChatManager(
groupchat=group_chat, llm_config=llm_config
)

chat_result = context_leak_classifier.initiate_chat(
group_chat_manager,
Expand All @@ -170,11 +164,10 @@ def is_termination_msg(msg: dict[str, Any]) -> bool:

return chat_result.summary # type: ignore[no-any-return]


@classmethod
def report(cls, ui: UI, params: dict[str, Any]) -> None:
ui.text_message(
sender="Context leakage team",
recipient="User",
body=generate_markdown_report(log_path=context_leak_log_save_path),
)
)
9 changes: 2 additions & 7 deletions context_leakage_team/workflow/scenarios/scenario.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,8 @@


class Scenario(Protocol):
def __init__(self):
...

@classmethod
def run(cls, ui: UI, params: dict[str, Any]) -> str:
...
def run(cls, ui: UI, params: dict[str, Any]) -> str: ...

@classmethod
def report(cls, ui: UI, params: dict[str, Any]) -> None:
...
def report(cls, ui: UI, params: dict[str, Any]) -> None: ...
20 changes: 7 additions & 13 deletions context_leakage_team/workflow/scenarios/simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
generate_markdown_report,
)
from context_leakage_team.tools.model_adapter import send_msg_to_model

from ..llm_config import llm_config
from .scenario import Scenario


tested_model_confidential = (
Path(__file__).parent
/ ".."
Expand All @@ -37,18 +37,11 @@
)

context_leak_log_save_path = (
Path(__file__).parent
/ ".."
/ ".."
/ ".."
/ "reports"
/ "simple_context_leak.pd"
Path(__file__).parent / ".." / ".." / ".." / "reports" / "simple_context_leak.pd"
)

class SimpleContextLeak(Scenario):
def __init__(self):
pass

class SimpleContextLeak(Scenario):
@classmethod
def run(cls, ui: UI, params: dict[str, Any]) -> str:
def is_termination_msg(msg: dict[str, Any]) -> bool:
Expand Down Expand Up @@ -117,7 +110,9 @@ def is_termination_msg(msg: dict[str, Any]) -> bool:
speaker_transitions_type="allowed",
)

group_chat_manager = GroupChatManager(groupchat=group_chat, llm_config=llm_config)
group_chat_manager = GroupChatManager(
groupchat=group_chat, llm_config=llm_config
)

chat_result = context_leak_classifier.initiate_chat(
group_chat_manager,
Expand All @@ -127,11 +122,10 @@ def is_termination_msg(msg: dict[str, Any]) -> bool:

return chat_result.summary # type: ignore [no-any-return]


@classmethod
def report(cls, ui: UI, params: dict[str, Any]) -> None:
ui.text_message(
sender="Context leakage team",
recipient="User",
body=generate_markdown_report(log_path=context_leak_log_save_path),
)
)
10 changes: 6 additions & 4 deletions context_leakage_team/workflow/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,17 @@

from fastagency import UI
from fastagency.runtimes.autogen import AutoGenWorkflows

from . import scenarios
from .scenarios.scenario import Scenario

wf = AutoGenWorkflows()

context_leak_scenarios: dict[str, Scenario]= {
context_leak_scenarios: dict[str, Scenario] = {
name: getattr(scenarios, name) for name in scenarios.__all__
}


@wf.register( # type: ignore[misc]
name="Context leak attempt",
description="Attempt to leak context from tested LLM model.",
Expand All @@ -19,11 +21,12 @@ def context_leak_chat(ui: UI, params: dict[str, Any]) -> str:
context_leak_scenario = ui.multiple_choice(
sender="Context leakage team",
prompt="Please select the type of context leakage you would like to attempt.",
choices=[key for key in context_leak_scenarios.keys()],
choices=list(context_leak_scenarios),
)

return context_leak_scenarios[context_leak_scenario].run(ui, params)


@wf.register( # type: ignore[misc]
name="Context leak attempt report",
description="Report on the context leak attempt.",
Expand All @@ -32,8 +35,7 @@ def context_leak_report(ui: UI, params: dict[str, Any]) -> None:
context_leak_scenario = ui.multiple_choice(
sender="Context leakage team",
prompt="Please select the type of context leakage you would like to report on.",
choices=[key for key in context_leak_scenarios.keys()],
choices=list(context_leak_scenarios),
)

context_leak_scenarios[context_leak_scenario].report(ui, params)

0 comments on commit 6d7241d

Please sign in to comment.