Skip to content

Commit

Permalink
Merge pull request #327 from StampyAI/fix-error-log-bugs
Browse files Browse the repository at this point in the history
Fix bugs from error log
  • Loading branch information
mruwnik authored Nov 28, 2023
2 parents 84cfd1c + 166ce27 commit e0aa487
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 19 deletions.
39 changes: 21 additions & 18 deletions modules/chatgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,24 +155,27 @@ async def chatgpt_chat(self, message: ServiceMessage) -> Response:
self.class_name,
msg=f"sending chat prompt to chatgpt, engine {engine} ({engine.description})",
)
chatcompletion = cast(
OpenAIObject,
openai.ChatCompletion.create(model=str(engine), messages=messages),
)
if chatcompletion.choices:
response = chatcompletion.choices[0].message.content

# sometimes the response starts with "Stampy says:" or responds or replies etc, which we don't want
response = re.sub(r"^[sS]tampy\ ?[a-zA-Z]{,15}:\s?", "", response)

self.log.info(self.class_name, response=response)

if response:
return Response(
confidence=10,
text=f"{im}{response}{im}",
why="ChatGPT made me say it!",
)
try:
chatcompletion = cast(
OpenAIObject,
openai.ChatCompletion.create(model=str(engine), messages=messages),
)
if chatcompletion.choices:
response = chatcompletion.choices[0].message.content

# sometimes the response starts with "Stampy says:" or responds or replies etc, which we don't want
response = re.sub(r"^[sS]tampy\ ?[a-zA-Z]{,15}:\s?", "", response)

self.log.info(self.class_name, response=response)

if response:
return Response(
confidence=10,
text=f"{im}{response}{im}",
why="ChatGPT made me say it!",
)
except openai.error.Timeout:
pass
return Response()

def __str__(self):
Expand Down
3 changes: 2 additions & 1 deletion servicemodules/discord.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,8 @@ async def on_message(
sent.append(await message.channel.send(chunk))
elif isinstance(top_response.text, Iterable):
for chunk in top_response.text:
sent.append(await message.channel.send(chunk))
if chunk:
sent.append(await message.channel.send(chunk))
why_traceback.append("Responded with that response!")
for m in sent:
self.messages[str(m.id)] = {
Expand Down

0 comments on commit e0aa487

Please sign in to comment.