yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
conversational_agent_service.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cctype>
5#include <iostream>
6#include <optional>
7#include <set>
8#include <sstream>
9#include <string>
10#include <vector>
11
12#include "absl/flags/declare.h"
13#include "absl/flags/flag.h"
14#include "absl/status/status.h"
15#include "absl/status/statusor.h"
16#include "absl/strings/ascii.h"
17#include "absl/strings/str_cat.h"
18#include "absl/strings/str_format.h"
19#include "absl/strings/str_join.h"
20#include "absl/strings/str_split.h"
21#include "absl/strings/string_view.h"
22#include "absl/time/clock.h"
23#include "absl/time/time.h"
24#ifdef YAZE_AI_RUNTIME_AVAILABLE
27#endif
31#include "nlohmann/json.hpp"
32#include "rom/rom.h"
35#include "zelda3/dungeon/room.h"
36
37#ifdef SendMessage
38#undef SendMessage
39#endif
40
41ABSL_DECLARE_FLAG(std::string, ai_provider);
42
43namespace yaze {
44namespace cli {
45namespace agent {
46
47namespace {
48
49std::string TrimWhitespace(const std::string& input) {
50 auto begin =
51 std::find_if_not(input.begin(), input.end(),
52 [](unsigned char c) { return std::isspace(c); });
53 auto end =
54 std::find_if_not(input.rbegin(), input.rend(), [](unsigned char c) {
55 return std::isspace(c);
56 }).base();
57 if (begin >= end) {
58 return "";
59 }
60 return std::string(begin, end);
61}
62
63std::string JsonValueToString(const nlohmann::json& value) {
64 if (value.is_string()) {
65 return value.get<std::string>();
66 }
67 if (value.is_boolean()) {
68 return value.get<bool>() ? "true" : "false";
69 }
70 if (value.is_number()) {
71 return value.dump();
72 }
73 if (value.is_null()) {
74 return "null";
75 }
76 return value.dump();
77}
78
79std::set<std::string> CollectObjectKeys(const nlohmann::json& array) {
80 std::set<std::string> keys;
81 for (const auto& item : array) {
82 if (!item.is_object()) {
83 continue;
84 }
85 for (const auto& [key, _] : item.items()) {
86 keys.insert(key);
87 }
88 }
89 return keys;
90}
91
92std::optional<ChatMessage::TableData> BuildTableData(
93 const nlohmann::json& data) {
94 using TableData = ChatMessage::TableData;
95
96 if (data.is_object()) {
97 TableData table;
98 table.headers = {"Key", "Value"};
99 table.rows.reserve(data.size());
100 for (const auto& [key, value] : data.items()) {
101 table.rows.push_back({key, JsonValueToString(value)});
102 }
103 return table;
104 }
105
106 if (data.is_array()) {
107 TableData table;
108 if (data.empty()) {
109 table.headers = {"Value"};
110 return table;
111 }
112
113 const bool all_objects = std::all_of(
114 data.begin(), data.end(),
115 [](const nlohmann::json& item) { return item.is_object(); });
116
117 if (all_objects) {
118 auto keys = CollectObjectKeys(data);
119 if (keys.empty()) {
120 table.headers = {"Value"};
121 for (const auto& item : data) {
122 table.rows.push_back({JsonValueToString(item)});
123 }
124 return table;
125 }
126
127 table.headers.assign(keys.begin(), keys.end());
128 table.rows.reserve(data.size());
129 for (const auto& item : data) {
130 std::vector<std::string> row;
131 row.reserve(table.headers.size());
132 for (const auto& key : table.headers) {
133 if (item.contains(key)) {
134 row.push_back(JsonValueToString(item.at(key)));
135 } else {
136 row.emplace_back("-");
137 }
138 }
139 table.rows.push_back(std::move(row));
140 }
141 return table;
142 }
143
144 table.headers = {"Value"};
145 table.rows.reserve(data.size());
146 for (const auto& item : data) {
147 table.rows.push_back({JsonValueToString(item)});
148 }
149 return table;
150 }
151
152 return std::nullopt;
153}
154
155bool IsExecutableCommand(absl::string_view command) {
156 return !command.empty() && command.front() != '#';
157}
158
159int CountExecutableCommands(const std::vector<std::string>& commands) {
160 int count = 0;
161 for (const auto& command : commands) {
162 if (IsExecutableCommand(command)) {
163 ++count;
164 }
165 }
166 return count;
167}
168
169bool IsLikelyOracleRom(const Rom* rom) {
170 if (rom == nullptr) {
171 return false;
172 }
173 const std::string rom_path = absl::AsciiStrToLower(rom->filename());
174 return absl::StrContains(rom_path, "oracle") ||
175 absl::StrContains(rom_path, "oos");
176}
177
178bool IsOracleDebugIntent(const std::string& user_message) {
179 const std::string lowered = absl::AsciiStrToLower(user_message);
180 static constexpr absl::string_view kOracleDebugKeywords[] = {
181 "oracle", "oos", "mesen", "dungeon", "collision", "minecart",
182 "preflight", "smoke", "room", "hook", "sprite", "water"};
183 for (const auto keyword : kOracleDebugKeywords) {
184 if (absl::StrContains(lowered, keyword)) {
185 return true;
186 }
187 }
188 return false;
189}
190
192 const std::string& user_message) {
193 if (rom == nullptr || !rom->is_loaded() || !IsLikelyOracleRom(rom) ||
194 !IsOracleDebugIntent(user_message)) {
195 return "";
196 }
197
198 const bool expanded =
200
202 structural_opts.require_water_fill_reserved_region = true;
203 structural_opts.require_custom_collision_write_support = false;
204 structural_opts.validate_water_fill_table = true;
205 structural_opts.validate_custom_collision_maps = false;
206 structural_opts.max_collision_errors = 0;
207 const auto structural =
208 zelda3::RunOracleRomSafetyPreflight(rom, structural_opts);
209
210 bool d4_required_rooms_ok = false;
211 bool d3_required_room_ok = false;
212 std::string d4_check_state = "skipped";
213 std::string d3_check_state = "skipped";
214 if (expanded) {
218 d4_opts.validate_water_fill_table = false;
219 d4_opts.validate_custom_collision_maps = false;
220 d4_opts.room_ids_requiring_custom_collision = {0x25, 0x27};
221 const auto d4 = zelda3::RunOracleRomSafetyPreflight(rom, d4_opts);
222 d4_required_rooms_ok = d4.ok();
223 d4_check_state = "ran";
224
228 d3_opts.validate_water_fill_table = false;
229 d3_opts.validate_custom_collision_maps = false;
231 const auto d3 = zelda3::RunOracleRomSafetyPreflight(rom, d3_opts);
232 d3_required_room_ok = d3.ok();
233 d3_check_state = "ran";
234 }
235
236 int d6_track_rooms = 0;
237 for (int room_id : {0xA8, 0xB8, 0xD8, 0xDA}) {
238 zelda3::Room room = zelda3::LoadRoomFromRom(rom, room_id);
239 bool has_track_object = false;
240 for (const auto& object : room.GetTileObjects()) {
241 if (object.id_ == 0x31) {
242 has_track_object = true;
243 break;
244 }
245 }
246 if (has_track_object) {
247 ++d6_track_rooms;
248 }
249 }
250
251 std::ostringstream hook;
252 hook << "[AUTO_ORACLE_STATE_HOOK]\n";
253 hook << "UserRequest: " << user_message << "\n";
254 hook << "OracleRom: " << rom->filename() << "\n";
255 hook << "OracleStructuralOk: " << (structural.ok() ? "true" : "false")
256 << "\n";
257 hook << "CustomCollisionWriteSupport: " << (expanded ? "true" : "false")
258 << "\n";
259 hook << "D4RequiredRoomsCheck: " << d4_check_state << "\n";
260 if (d4_check_state == "ran") {
261 hook << "D4RequiredRoomsOk: " << (d4_required_rooms_ok ? "true" : "false")
262 << "\n";
263 }
264 hook << "D6TrackRoomsFound: " << d6_track_rooms << "/4\n";
265 hook << "D3ReadinessCheck: " << d3_check_state << "\n";
266 if (d3_check_state == "ran") {
267 hook << "D3ReadinessOk: " << (d3_required_room_ok ? "true" : "false")
268 << "\n";
269 }
270 hook << "Guidance: Prefer oracle-smoke-check, dungeon-oracle-preflight, "
271 "dungeon-room-graph, and mesen-* commands when diagnosing Oracle "
272 "runtime state.\n";
273 hook << "[/AUTO_ORACLE_STATE_HOOK]";
274 return hook.str();
275}
276
278 const std::string& content) {
279 ChatMessage message;
280 message.sender = sender;
281 message.message = content;
282 message.timestamp = absl::Now();
283
284 if (sender == ChatMessage::Sender::kAgent) {
285 const std::string trimmed = TrimWhitespace(content);
286 if (!trimmed.empty() &&
287 (trimmed.front() == '{' || trimmed.front() == '[')) {
288 try {
289 nlohmann::json parsed = nlohmann::json::parse(trimmed);
290 message.table_data = BuildTableData(parsed);
291 message.json_pretty = parsed.dump(2);
292 } catch (const nlohmann::json::parse_error&) {
293 // Ignore parse errors, fall back to raw text.
294 }
295 }
296 }
297
298 return message;
299}
300
301} // namespace
302
304 // Default to a lightweight mock provider to avoid slow network checks during
305 // startup (especially on mac-ai builds). The real provider is created when
306 // ConfigureProvider is called from the UI.
307 provider_config_.provider = "mock";
308 ai_service_ = std::make_unique<MockAIService>();
310
311#ifdef Z3ED_AI
312 // Initialize advanced features
313 auto learn_status = learned_knowledge_.Initialize();
314 if (!learn_status.ok() && config_.verbose) {
315 std::cerr << "Warning: Failed to initialize learned knowledge: "
316 << learn_status.message() << std::endl;
317 }
318
319 auto todo_status = todo_manager_.Initialize();
320 if (!todo_status.ok() && config_.verbose) {
321 std::cerr << "Warning: Failed to initialize TODO manager: "
322 << todo_status.message() << std::endl;
323 }
324#endif
325}
326
328 const AgentConfig& config)
329 : config_(config) {
330 // Avoid auto-detecting providers (which can block on network) until the UI
331 // applies an explicit configuration.
332 provider_config_.provider = "mock";
333 ai_service_ = std::make_unique<MockAIService>();
335
336#ifdef Z3ED_AI
337 // Initialize advanced features
338 auto learn_status = learned_knowledge_.Initialize();
339 if (!learn_status.ok() && config_.verbose) {
340 std::cerr << "Warning: Failed to initialize learned knowledge: "
341 << learn_status.message() << std::endl;
342 }
343
344 auto todo_status = todo_manager_.Initialize();
345 if (!todo_status.ok() && config_.verbose) {
346 std::cerr << "Warning: Failed to initialize TODO manager: "
347 << todo_status.message() << std::endl;
348 }
349#endif
350}
351
360
365
368 return;
369 }
370
371 while (history_.size() > config_.max_history_messages) {
372 history_.erase(history_.begin());
373 }
374}
375
394
398
404
406 const AgentResponse& agent_response) {
407 // Process the response similar to the internal loop
408 // 1. Check for tool calls
409 // 2. Execute tools
410 // 3. Create proposal if needed
411 // 4. Append Agent message to history
412 // 5. If tools executed, call external driver again (loop)
413
414 bool executed_tool = false;
415 std::vector<std::string> executed_tools;
416
417 if (!agent_response.tool_calls.empty()) {
418 for (const auto& tool_call : agent_response.tool_calls) {
419 // Format tool arguments for display
420 std::vector<std::string> arg_parts;
421 for (const auto& [key, value] : tool_call.args) {
422 arg_parts.push_back(absl::StrCat(key, "=", value));
423 }
424 std::string args_str = absl::StrJoin(arg_parts, ", ");
425
426 util::PrintToolCall(tool_call.tool_name, args_str);
427
428 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
429 std::string tool_output;
430 if (!tool_result_or.ok()) {
431 tool_output =
432 absl::StrCat("Error: ", tool_result_or.status().message());
433 util::PrintError(tool_output);
434 } else {
435 tool_output = tool_result_or.value();
436 util::PrintSuccess("Tool executed successfully");
437 }
438
439 if (!tool_output.empty()) {
441 // Add tool result as internal message
442 std::string marked_output = absl::StrCat(
443 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
444 "The tool returned the following data:\n", tool_output, "\n\n",
445 "Please provide a text_response field in your JSON to summarize "
446 "this information for the user.");
447 auto tool_result_msg =
448 CreateMessage(ChatMessage::Sender::kUser, marked_output);
449 tool_result_msg.is_internal = true;
450 history_.push_back(tool_result_msg);
451 }
452 executed_tool = true;
453 executed_tools.push_back(tool_call.tool_name);
454 }
455 }
456
457 // If tools were executed, we need to loop back to the AI
458 if (executed_tool && has_external_driver_) {
460 return; // Wait for next response
461 }
462
463 // Final text response processing
464 std::optional<ProposalCreationResult> proposal_result;
465 absl::Status proposal_status = absl::OkStatus();
466 bool attempted_proposal = false;
467
468 if (!agent_response.commands.empty()) {
469 attempted_proposal = true;
472 // Use last user message as prompt context if available
473 if (!history_.empty()) {
474 for (auto it = history_.rbegin(); it != history_.rend(); ++it) {
475 if (it->sender == ChatMessage::Sender::kUser && !it->is_internal) {
476 request.prompt = it->message;
477 break;
478 }
479 }
480 }
481 request.response = &agent_response;
482 request.rom = rom_context_;
483 request.sandbox_label = "agent-chat";
484 request.ai_provider = "external";
485
486 auto creation_or = CreateProposalFromAgentResponse(request);
487 if (!creation_or.ok()) {
488 proposal_status = creation_or.status();
489 util::PrintError(absl::StrCat("Failed to create proposal: ",
490 proposal_status.message()));
491 } else {
492 proposal_result = std::move(creation_or.value());
493 }
494 }
495 }
496
497 // Construct text response
498 std::string response_text = agent_response.text_response;
499 if (!agent_response.reasoning.empty()) {
500 if (!response_text.empty())
501 response_text.append("\n\n");
502 response_text.append("Reasoning: ").append(agent_response.reasoning);
503 }
504
505 if (!agent_response.commands.empty()) {
506 if (!response_text.empty())
507 response_text.append("\n\n");
508 response_text.append("Commands:\n")
509 .append(absl::StrJoin(agent_response.commands, "\n"));
510 }
512 CountExecutableCommands(agent_response.commands);
513
514 if (proposal_result.has_value()) {
515 const auto& metadata = proposal_result->metadata;
516 if (!response_text.empty())
517 response_text.append("\n\n");
518 response_text.append(
519 absl::StrFormat("✅ Proposal %s ready with %d change%s (%d command%s).",
520 metadata.id, proposal_result->change_count,
521 proposal_result->change_count == 1 ? "" : "s",
522 proposal_result->executed_commands,
523 proposal_result->executed_commands == 1 ? "" : "s"));
525 } else if (attempted_proposal && !proposal_status.ok()) {
526 if (!response_text.empty())
527 response_text.append("\n\n");
528 response_text.append(absl::StrCat("⚠️ Failed to prepare proposal: ",
529 proposal_status.message()));
530 }
531
532 // Remove the "Thinking..." placeholder if present
533 if (!history_.empty() &&
534 history_.back().sender == ChatMessage::Sender::kAgent &&
535 history_.back().message == "Thinking...") {
536 history_.pop_back();
537 }
538
539 // Add final message
540 ChatMessage chat_response =
541 CreateMessage(ChatMessage::Sender::kAgent, response_text);
542 if (proposal_result.has_value()) {
544 summary.id = proposal_result->metadata.id;
545 summary.change_count = proposal_result->change_count;
546 summary.executed_commands = proposal_result->executed_commands;
547 chat_response.proposal = summary;
548 }
549
550 // Metadata
552 meta.provider = "external";
553 meta.model = "gemini"; // Could get this from JS
554 meta.tool_names = executed_tools;
555 chat_response.model_metadata = meta;
556
557 history_.push_back(chat_response);
559
562}
563
564absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
565 const std::string& message) {
566 if (message.empty() && history_.empty()) {
567 return absl::InvalidArgumentError(
568 "Conversation must start with a non-empty message.");
569 }
570
571 if (!message.empty()) {
572#ifdef Z3ED_AI
573 const std::string auto_hook =
574 BuildAutoOracleStateHook(rom_context_, message);
575 if (!auto_hook.empty()) {
576 auto hook_message = CreateMessage(ChatMessage::Sender::kUser, auto_hook);
577 hook_message.is_internal = true;
578 history_.push_back(std::move(hook_message));
580 }
581#endif
582 history_.push_back(CreateMessage(ChatMessage::Sender::kUser, message));
585 }
586
587 // External Driver Path (WASM/Sidecar)
590 // Return a placeholder that indicates waiting
591 // The UI should handle this update gracefully via callbacks
592 return CreateMessage(ChatMessage::Sender::kAgent, "Thinking...");
593 }
594
595 const int max_iterations = config_.max_tool_iterations;
596 bool waiting_for_text_response = false;
597 absl::Time turn_start = absl::Now();
598 std::vector<std::string> executed_tools;
599
600 if (config_.verbose) {
601 util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations,
602 " iterations)"));
604 absl::StrCat("History size: ", history_.size(), " messages"));
605 }
606
607 for (int iteration = 0; iteration < max_iterations; ++iteration) {
608 if (config_.verbose) {
610 std::cout << util::colors::kCyan << "Iteration " << (iteration + 1) << "/"
611 << max_iterations << util::colors::kReset << std::endl;
612 }
613
614 // Show loading indicator while waiting for AI response
616 waiting_for_text_response ? "Generating final response..."
617 : "Thinking...",
618 !config_.verbose); // Hide spinner in verbose mode
619 loader.Start();
620
621 auto response_or = ai_service_->GenerateResponse(history_);
622 loader.Stop();
623
624 if (!response_or.ok()) {
625 util::PrintError(absl::StrCat("Failed to get AI response: ",
626 response_or.status().message()));
627 return absl::InternalError(absl::StrCat("Failed to get AI response: ",
628 response_or.status().message()));
629 }
630
631 const auto& agent_response = response_or.value();
632
633 if (config_.verbose) {
634 util::PrintInfo("Received agent response:");
635 std::cout << util::colors::kDim
636 << " - Tool calls: " << agent_response.tool_calls.size()
637 << util::colors::kReset << std::endl;
638 std::cout << util::colors::kDim
639 << " - Commands: " << agent_response.commands.size()
640 << util::colors::kReset << std::endl;
641 std::cout << util::colors::kDim << " - Text response: "
642 << (agent_response.text_response.empty() ? "empty" : "present")
643 << util::colors::kReset << std::endl;
644 if (!agent_response.reasoning.empty() && config_.show_reasoning) {
645 std::cout << util::colors::kYellow
646 << " 💭 Reasoning: " << util::colors::kDim
647 << agent_response.reasoning << util::colors::kReset
648 << std::endl;
649 }
650 }
651
652 if (!agent_response.tool_calls.empty()) {
653 // Check if we were waiting for a text response but got more tool calls
654 // instead
655 if (waiting_for_text_response) {
657 absl::StrCat("LLM called tools again instead of providing final "
658 "response (Iteration: ",
659 iteration + 1, "/", max_iterations, ")"));
660 }
661
662 bool executed_tool = false;
663 for (const auto& tool_call : agent_response.tool_calls) {
664 // Format tool arguments for display
665 std::vector<std::string> arg_parts;
666 for (const auto& [key, value] : tool_call.args) {
667 arg_parts.push_back(absl::StrCat(key, "=", value));
668 }
669 std::string args_str = absl::StrJoin(arg_parts, ", ");
670
671 util::PrintToolCall(tool_call.tool_name, args_str);
672
673 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
674 if (!tool_result_or.ok()) {
675 util::PrintError(absl::StrCat("Tool execution failed: ",
676 tool_result_or.status().message()));
677 return absl::InternalError(absl::StrCat(
678 "Tool execution failed: ", tool_result_or.status().message()));
679 }
680
681 const std::string& tool_output = tool_result_or.value();
682 if (!tool_output.empty()) {
683 util::PrintSuccess("Tool executed successfully");
685
686 if (config_.verbose) {
687 std::cout << util::colors::kDim
688 << "Tool output (truncated):" << util::colors::kReset
689 << std::endl;
690 std::string preview = tool_output.substr(
691 0, std::min(size_t(200), tool_output.size()));
692 if (tool_output.size() > 200)
693 preview += "...";
694 std::cout << util::colors::kDim << preview << util::colors::kReset
695 << std::endl;
696 }
697
698 // Add tool result with a clear marker for the LLM
699 // Format as plain text to avoid confusing the LLM with nested JSON
700 std::string marked_output = absl::StrCat(
701 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
702 "The tool returned the following data:\n", tool_output, "\n\n",
703 "Please provide a text_response field in your JSON to summarize "
704 "this information for the user.");
705 auto tool_result_msg =
706 CreateMessage(ChatMessage::Sender::kUser, marked_output);
707 tool_result_msg.is_internal =
708 true; // Don't show this to the human user
709 history_.push_back(tool_result_msg);
710 }
711 executed_tool = true;
712 executed_tools.push_back(tool_call.tool_name);
713 }
714
715 if (executed_tool) {
716 // Now we're waiting for the LLM to provide a text response
717 waiting_for_text_response = true;
718 // Re-query the AI with updated context.
719 continue;
720 }
721 }
722
723 // Check if we received a text response after tool execution
724 if (waiting_for_text_response && agent_response.text_response.empty() &&
725 agent_response.commands.empty()) {
727 absl::StrCat("LLM did not provide text_response after receiving tool "
728 "results (Iteration: ",
729 iteration + 1, "/", max_iterations, ")"));
730 // Continue to give it another chance
731 continue;
732 }
733
734 std::optional<ProposalCreationResult> proposal_result;
735 absl::Status proposal_status = absl::OkStatus();
736 bool attempted_proposal = false;
737
738 if (!agent_response.commands.empty()) {
739 attempted_proposal = true;
740
741 if (rom_context_ == nullptr) {
742 proposal_status = absl::FailedPreconditionError(
743 "No ROM context available for proposal creation");
745 "Cannot create proposal because no ROM context is active.");
746 } else if (!rom_context_->is_loaded()) {
747 proposal_status =
748 absl::FailedPreconditionError("ROM context is not loaded");
750 "Cannot create proposal because the ROM context is not loaded.");
751 } else {
753 request.prompt = message;
754 request.response = &agent_response;
755 request.rom = rom_context_;
756 request.sandbox_label = "agent-chat";
757 request.ai_provider = absl::GetFlag(FLAGS_ai_provider);
758
759 auto creation_or = CreateProposalFromAgentResponse(request);
760 if (!creation_or.ok()) {
761 proposal_status = creation_or.status();
762 util::PrintError(absl::StrCat("Failed to create proposal: ",
763 proposal_status.message()));
764 } else {
765 proposal_result = std::move(creation_or.value());
766 if (config_.verbose) {
767 util::PrintSuccess(absl::StrCat(
768 "Created proposal ", proposal_result->metadata.id, " with ",
769 proposal_result->change_count, " change(s)."));
770 }
771 }
772 }
773 }
774
775 std::string response_text = agent_response.text_response;
776 if (!agent_response.reasoning.empty()) {
777 if (!response_text.empty()) {
778 response_text.append("\n\n");
779 }
780 response_text.append("Reasoning: ");
781 response_text.append(agent_response.reasoning);
782 }
783 const int executable_commands =
784 CountExecutableCommands(agent_response.commands);
785 if (!agent_response.commands.empty()) {
786 if (!response_text.empty()) {
787 response_text.append("\n\n");
788 }
789 response_text.append("Commands:\n");
790 response_text.append(absl::StrJoin(agent_response.commands, "\n"));
791 }
792 metrics_.commands_generated += executable_commands;
793
794 if (proposal_result.has_value()) {
795 const auto& metadata = proposal_result->metadata;
796 if (!response_text.empty()) {
797 response_text.append("\n\n");
798 }
799 response_text.append(absl::StrFormat(
800 "✅ Proposal %s ready with %d change%s (%d command%s).\n"
801 "Review it in the Proposal drawer or run `z3ed agent diff "
802 "--proposal-id %s`.\n"
803 "Sandbox ROM: %s\nProposal JSON: %s",
804 metadata.id, proposal_result->change_count,
805 proposal_result->change_count == 1 ? "" : "s",
806 proposal_result->executed_commands,
807 proposal_result->executed_commands == 1 ? "" : "s", metadata.id,
808 metadata.sandbox_rom_path.string(),
809 proposal_result->proposal_json_path.string()));
811 } else if (attempted_proposal && !proposal_status.ok()) {
812 if (!response_text.empty()) {
813 response_text.append("\n\n");
814 }
815 response_text.append(
816 absl::StrCat("⚠️ Failed to prepare a proposal automatically: ",
817 proposal_status.message()));
818 }
819 ChatMessage chat_response =
820 CreateMessage(ChatMessage::Sender::kAgent, response_text);
821 if (proposal_result.has_value()) {
823 summary.id = proposal_result->metadata.id;
824 summary.change_count = proposal_result->change_count;
825 summary.executed_commands = proposal_result->executed_commands;
826 summary.sandbox_rom_path = proposal_result->metadata.sandbox_rom_path;
827 summary.proposal_json_path = proposal_result->proposal_json_path;
828 chat_response.proposal = summary;
829 }
832 metrics_.total_latency += absl::Now() - turn_start;
833 chat_response.metrics = BuildMetricsSnapshot();
834 if (!agent_response.warnings.empty()) {
835 chat_response.warnings = agent_response.warnings;
836 }
838 meta.provider = !agent_response.provider.empty()
839 ? agent_response.provider
841 meta.model = !agent_response.model.empty() ? agent_response.model
843 meta.latency_seconds =
844 agent_response.latency_seconds > 0.0
845 ? agent_response.latency_seconds
846 : absl::ToDoubleSeconds(absl::Now() - turn_start);
847 meta.tool_iterations = metrics_.tool_calls;
848 meta.tool_names = executed_tools;
849 meta.parameters = agent_response.parameters;
850 chat_response.model_metadata = meta;
851 history_.push_back(chat_response);
853 return chat_response;
854 }
855
856 return absl::InternalError(
857 "Agent did not produce a response after executing tools.");
858}
859
861 const AIServiceConfig& config) {
862 auto service_or = CreateAIServiceStrict(config);
863 if (!service_or.ok()) {
864 // Keep the existing service running and fall back to mock so the UI stays
865 // responsive.
866 std::cerr << "Provider configuration failed: " << service_or.status()
867 << " — falling back to mock" << std::endl;
868 ai_service_ = std::make_unique<MockAIService>();
869 provider_config_.provider = "mock";
870 if (rom_context_) {
871 ai_service_->SetRomContext(rom_context_);
872 }
873 return service_or.status();
874 }
875
876 ai_service_ = std::move(service_or.value());
877 provider_config_ = config;
878 if (rom_context_) {
879 ai_service_->SetRomContext(rom_context_);
880 }
881 return absl::OkStatus();
882}
883
889
890const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
891 return history_;
892}
893
895 std::vector<ChatMessage> history) {
896 history_ = std::move(history);
899}
900
903
905 bool has_snapshot = false;
906
907 for (const auto& message : history_) {
908 if (message.sender == ChatMessage::Sender::kUser) {
910 } else if (message.sender == ChatMessage::Sender::kAgent) {
913 }
914
915 if (message.proposal.has_value()) {
917 }
918
919 if (message.metrics.has_value()) {
920 snapshot = *message.metrics;
921 has_snapshot = true;
922 }
923 }
924
925 if (has_snapshot) {
926 metrics_.user_messages = snapshot.total_user_messages;
927 metrics_.agent_messages = snapshot.total_agent_messages;
928 metrics_.tool_calls = snapshot.total_tool_calls;
929 metrics_.commands_generated = snapshot.total_commands;
930 metrics_.proposals_created = snapshot.total_proposals;
931 metrics_.turns_completed = snapshot.turn_index;
932 metrics_.total_latency = absl::Seconds(snapshot.total_elapsed_seconds);
933 }
934}
935
936#ifdef Z3ED_AI
937// === Advanced Feature Integration ===
938
939std::string ConversationalAgentService::BuildEnhancedPrompt(
940 const std::string& user_message) {
941 std::ostringstream enhanced;
942
943 // Inject pretraining on first message
944 if (inject_pretraining_ && !pretraining_injected_ && rom_context_) {
945 enhanced << InjectPretraining() << "\n\n";
946 pretraining_injected_ = true;
947 }
948
949 // Inject learned context
950 if (inject_learned_context_) {
951 enhanced << InjectLearnedContext(user_message) << "\n";
952 }
953
954 enhanced << user_message;
955 return enhanced.str();
956}
957
958std::string ConversationalAgentService::InjectLearnedContext(
959 const std::string& message) {
960 std::ostringstream context;
961
962 // Add relevant preferences
963 auto prefs = learned_knowledge_.GetAllPreferences();
964 if (!prefs.empty() && prefs.size() <= 5) { // Don't overwhelm with too many
965 context << "[User Preferences: ";
966 std::vector<std::string> pref_strings;
967 for (const auto& [key, value] : prefs) {
968 pref_strings.push_back(absl::StrCat(key, "=", value));
969 }
970 context << absl::StrJoin(pref_strings, ", ") << "]\n";
971 }
972
973 // Add ROM-specific patterns
975 // TODO: Get ROM hash
976 // auto patterns = learned_knowledge_.QueryPatterns("", rom_hash);
977 }
978
979 // Add recent relevant memories
980 std::vector<std::string> keywords;
981 // Extract keywords from message (simple word splitting)
982 for (const auto& word : absl::StrSplit(message, ' ')) {
983 if (word.length() > 4) { // Only meaningful words
984 keywords.push_back(std::string(word));
985 }
986 }
987
988 if (!keywords.empty()) {
989 auto memories = learned_knowledge_.SearchMemories(keywords[0]);
990 if (!memories.empty() && memories.size() <= 3) {
991 context << "[Relevant Past Context:\n";
992 for (const auto& mem : memories) {
993 context << "- " << mem.topic << ": " << mem.summary << "\n";
994 }
995 context << "]\n";
996 }
997 }
998
999 return context.str();
1000}
1001
1002std::string ConversationalAgentService::InjectPretraining() {
1003 if (!rom_context_) {
1004 return "";
1005 }
1006
1007 std::ostringstream pretraining;
1008 pretraining << "[SYSTEM KNOWLEDGE INJECTION - Read this first]\n\n";
1009#ifdef YAZE_AI_RUNTIME_AVAILABLE
1011#else
1012 pretraining << "AI Runtime not available - pretraining disabled.\n";
1013#endif
1014 pretraining << "\n[END KNOWLEDGE INJECTION]\n";
1015
1016 return pretraining.str();
1017}
1018
1019ChatMessage ConversationalAgentService::EnhanceResponse(
1020 const ChatMessage& response, const std::string& user_message) {
1021 // Use AdvancedRouter to enhance tool-based responses
1022 // This would synthesize multi-tool results into coherent insights
1023
1024 // For now, return response as-is
1025 // TODO: Integrate AdvancedRouter here
1026 return response;
1027}
1028#endif // Z3ED_AI
1029
1030} // namespace agent
1031} // namespace cli
1032} // namespace yaze
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:28
auto filename() const
Definition rom.h:145
const auto & vector() const
Definition rom.h:143
bool is_loaded() const
Definition rom.h:132
static std::string GeneratePretrainingPrompt(Rom *rom)
Generate pre-training prompt for agent.
absl::StatusOr< ChatMessage > SendMessage(const std::string &message)
absl::Status ConfigureProvider(const AIServiceConfig &config)
void SetToolPreferences(const ToolDispatcher::ToolPreferences &prefs)
const std::vector< ChatMessage > & GetHistory() const
void HandleExternalResponse(const AgentResponse &response)
std::function< void(const std::vector< ChatMessage > &history)> ExternalDriverCallback
void ReplaceHistory(std::vector< ChatMessage > history)
void SetToolPreferences(const ToolPreferences &prefs)
absl::StatusOr< std::string > Dispatch(const ::yaze::cli::ToolCall &tool_call)
const std::vector< RoomObject > & GetTileObjects() const
Definition room.h:314
ABSL_DECLARE_FLAG(std::string, ai_provider)
std::string BuildAutoOracleStateHook(Rom *rom, const std::string &user_message)
ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string &content)
std::optional< ChatMessage::TableData > BuildTableData(const nlohmann::json &data)
std::string TrimWhitespace(absl::string_view value)
absl::StatusOr< ProposalCreationResult > CreateProposalFromAgentResponse(const ProposalCreationRequest &)
constexpr const char * kDim
constexpr const char * kYellow
constexpr const char * kReset
constexpr const char * kCyan
void PrintWarning(const std::string &message)
void PrintToolCall(const std::string &tool_name, const std::string &details="")
void PrintInfo(const std::string &message)
void PrintSuccess(const std::string &message)
void PrintError(const std::string &message)
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
OracleRomSafetyPreflightResult RunOracleRomSafetyPreflight(Rom *rom, const OracleRomSafetyPreflightOptions &options)
Room LoadRoomFromRom(Rom *rom, int room_id)
Definition room.cc:253
constexpr bool HasCustomCollisionWriteSupport(std::size_t rom_size)
std::vector< std::string > commands
Definition common.h:26
std::string reasoning
Definition common.h:29
std::vector< ToolCall > tool_calls
Definition common.h:23
std::string text_response
Definition common.h:20
std::optional< ModelMetadata > model_metadata
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics