From d480db3a7364d49dc775caea292259a5a8b4af71 Mon Sep 17 00:00:00 2001 From: mostlikely4r Date: Tue, 19 Nov 2024 20:01:58 +0100 Subject: [PATCH] AI chat: When a response contains the bot's own name it properly splits up the responses. --- playerbot/PlayerbotLLMInterface.cpp | 5 ++-- playerbot/strategy/actions/SayAction.cpp | 31 ++++++++++++++++++------ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/playerbot/PlayerbotLLMInterface.cpp b/playerbot/PlayerbotLLMInterface.cpp index 39742f72..c2a16840 100644 --- a/playerbot/PlayerbotLLMInterface.cpp +++ b/playerbot/PlayerbotLLMInterface.cpp @@ -161,12 +161,11 @@ std::vector PlayerbotLLMInterface::ParseResponse(const std::string& subString += c; - if ((subString.size() > 1 && subString.back() == 'n' && subString[subString.size() - 2] == '\\') || (subString.size() > 100 && c == '.') || (subString.size() > 200 && c == ' ') || subString.size() > 250) + if (subString.back() == '|' || (subString.size() > 100 && c == '.') || (subString.size() > 200 && c == ' ') || subString.size() > 250) { - if (subString.back() == 'n' && subString[subString.size() - 2] == '\\') + if (subString.back() == '|') { subString.pop_back(); - subString.pop_back(); } if(subString.size()) responses.push_back(subString); diff --git a/playerbot/strategy/actions/SayAction.cpp b/playerbot/strategy/actions/SayAction.cpp index d311375e..33213a88 100644 --- a/playerbot/strategy/actions/SayAction.cpp +++ b/playerbot/strategy/actions/SayAction.cpp @@ -192,15 +192,17 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32 { PlayerbotAI* ai = bot->GetPlayerbotAI(); AiObjectContext* context = ai->GetAiObjectContext(); + std::string botName = bot->GetName(); + std::string playerName = player->GetName(); std::string llmContext = AI_VALUE(std::string, "manual string::llmcontext"); std::map placeholders; - placeholders[""] = bot->GetName(); + placeholders[""] = botName; placeholders[""] = std::to_string(bot->GetLevel()); placeholders[""] = ai->GetChatHelper()->formatClass(bot->getClass()); placeholders[""] = ai->GetChatHelper()->formatRace(bot->getRace()); - placeholders[""] = player->GetName(); + placeholders[""] = playerName; placeholders[""] = std::to_string(player->GetLevel()); placeholders[""] = ai->GetChatHelper()->formatClass(player->getClass()); placeholders[""] = ai->GetChatHelper()->formatRace(player->getRace()); @@ -280,8 +282,6 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32 json = BOT_TEXT2(json, placeholders); - std::string playerName = player->GetName(); - uint32 type = CHAT_MSG_WHISPER; switch (chatChannelSource) @@ -312,7 +312,10 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32 } } - std::future> futurePackets = std::async([type, bot, playerName, json] { + WorldSession* session = bot->GetSession(); + + + std::future> futurePackets = std::async([type, botName, playerName, json] { WorldPacket packet_template(CMSG_MESSAGECHAT, 4096); @@ -326,13 +329,27 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32 std::string response = PlayerbotLLMInterface::Generate(json); + size_t pos; + if (sPlayerbotAIConfig.llmPreventTalkingForPlayer) { - size_t pos = response.find(playerName + ":"); + pos = response.find(playerName + ":"); if (pos != std::string::npos) response = response.substr(0, pos) + sPlayerbotAIConfig.llmResponseEndPattern; } + pos = 0; + + while ((pos = response.find(botName + ":", pos)) != std::string::npos) { + response.replace(pos, botName.length() + 1, "|"); + pos += 1; + } + + while ((pos = response.find("/n", pos)) != std::string::npos) { + response.replace(pos, 2, "|"); + pos += 1; + } + std::vector lines = PlayerbotLLMInterface::ParseResponse(response, sPlayerbotAIConfig.llmResponseStartPattern, sPlayerbotAIConfig.llmResponseEndPattern); std::vector packets; @@ -345,7 +362,7 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32 return packets; }); - ai->SendDelayedPacket(bot->GetSession(), std::move(futurePackets)); + ai->SendDelayedPacket(session, std::move(futurePackets)); SET_AI_VALUE(std::string, "manual string::llmcontext", llmContext); }