yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
conversational_agent_service.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cctype>
5#include <iostream>
6#include <optional>
7#include <set>
8#include <sstream>
9#include <string>
10#include <vector>
11
12#include "absl/flags/declare.h"
13#include "absl/flags/flag.h"
14#include "absl/status/status.h"
15#include "absl/status/statusor.h"
16#include "absl/strings/str_cat.h"
17#include "absl/strings/str_format.h"
18#include "absl/strings/str_join.h"
19#include "absl/strings/str_split.h"
20#include "absl/strings/string_view.h"
21#include "absl/time/clock.h"
22#include "absl/time/time.h"
28#include "nlohmann/json.hpp"
29#include "rom/rom.h"
30
31#ifdef SendMessage
32#undef SendMessage
33#endif
34
35ABSL_DECLARE_FLAG(std::string, ai_provider);
36
37namespace yaze {
38namespace cli {
39namespace agent {
40
41namespace {
42
43std::string TrimWhitespace(const std::string& input) {
44 auto begin =
45 std::find_if_not(input.begin(), input.end(),
46 [](unsigned char c) { return std::isspace(c); });
47 auto end =
48 std::find_if_not(input.rbegin(), input.rend(), [](unsigned char c) {
49 return std::isspace(c);
50 }).base();
51 if (begin >= end) {
52 return "";
53 }
54 return std::string(begin, end);
55}
56
57std::string JsonValueToString(const nlohmann::json& value) {
58 if (value.is_string()) {
59 return value.get<std::string>();
60 }
61 if (value.is_boolean()) {
62 return value.get<bool>() ? "true" : "false";
63 }
64 if (value.is_number()) {
65 return value.dump();
66 }
67 if (value.is_null()) {
68 return "null";
69 }
70 return value.dump();
71}
72
73std::set<std::string> CollectObjectKeys(const nlohmann::json& array) {
74 std::set<std::string> keys;
75 for (const auto& item : array) {
76 if (!item.is_object()) {
77 continue;
78 }
79 for (const auto& [key, _] : item.items()) {
80 keys.insert(key);
81 }
82 }
83 return keys;
84}
85
86std::optional<ChatMessage::TableData> BuildTableData(
87 const nlohmann::json& data) {
88 using TableData = ChatMessage::TableData;
89
90 if (data.is_object()) {
91 TableData table;
92 table.headers = {"Key", "Value"};
93 table.rows.reserve(data.size());
94 for (const auto& [key, value] : data.items()) {
95 table.rows.push_back({key, JsonValueToString(value)});
96 }
97 return table;
98 }
99
100 if (data.is_array()) {
101 TableData table;
102 if (data.empty()) {
103 table.headers = {"Value"};
104 return table;
105 }
106
107 const bool all_objects = std::all_of(
108 data.begin(), data.end(),
109 [](const nlohmann::json& item) { return item.is_object(); });
110
111 if (all_objects) {
112 auto keys = CollectObjectKeys(data);
113 if (keys.empty()) {
114 table.headers = {"Value"};
115 for (const auto& item : data) {
116 table.rows.push_back({JsonValueToString(item)});
117 }
118 return table;
119 }
120
121 table.headers.assign(keys.begin(), keys.end());
122 table.rows.reserve(data.size());
123 for (const auto& item : data) {
124 std::vector<std::string> row;
125 row.reserve(table.headers.size());
126 for (const auto& key : table.headers) {
127 if (item.contains(key)) {
128 row.push_back(JsonValueToString(item.at(key)));
129 } else {
130 row.emplace_back("-");
131 }
132 }
133 table.rows.push_back(std::move(row));
134 }
135 return table;
136 }
137
138 table.headers = {"Value"};
139 table.rows.reserve(data.size());
140 for (const auto& item : data) {
141 table.rows.push_back({JsonValueToString(item)});
142 }
143 return table;
144 }
145
146 return std::nullopt;
147}
148
149bool IsExecutableCommand(absl::string_view command) {
150 return !command.empty() && command.front() != '#';
151}
152
153int CountExecutableCommands(const std::vector<std::string>& commands) {
154 int count = 0;
155 for (const auto& command : commands) {
156 if (IsExecutableCommand(command)) {
157 ++count;
158 }
159 }
160 return count;
161}
162
164 const std::string& content) {
165 ChatMessage message;
166 message.sender = sender;
167 message.message = content;
168 message.timestamp = absl::Now();
169
170 if (sender == ChatMessage::Sender::kAgent) {
171 const std::string trimmed = TrimWhitespace(content);
172 if (!trimmed.empty() &&
173 (trimmed.front() == '{' || trimmed.front() == '[')) {
174 try {
175 nlohmann::json parsed = nlohmann::json::parse(trimmed);
176 message.table_data = BuildTableData(parsed);
177 message.json_pretty = parsed.dump(2);
178 } catch (const nlohmann::json::parse_error&) {
179 // Ignore parse errors, fall back to raw text.
180 }
181 }
182 }
183
184 return message;
185}
186
187} // namespace
188
190 // Default to a lightweight mock provider to avoid slow network checks during
191 // startup (especially on mac-ai builds). The real provider is created when
192 // ConfigureProvider is called from the UI.
193 provider_config_.provider = "mock";
194 ai_service_ = std::make_unique<MockAIService>();
196
197#ifdef Z3ED_AI
198 // Initialize advanced features
199 auto learn_status = learned_knowledge_.Initialize();
200 if (!learn_status.ok() && config_.verbose) {
201 std::cerr << "Warning: Failed to initialize learned knowledge: "
202 << learn_status.message() << std::endl;
203 }
204
205 auto todo_status = todo_manager_.Initialize();
206 if (!todo_status.ok() && config_.verbose) {
207 std::cerr << "Warning: Failed to initialize TODO manager: "
208 << todo_status.message() << std::endl;
209 }
210#endif
211}
212
214 const AgentConfig& config)
215 : config_(config) {
216 // Avoid auto-detecting providers (which can block on network) until the UI
217 // applies an explicit configuration.
218 provider_config_.provider = "mock";
219 ai_service_ = std::make_unique<MockAIService>();
221
222#ifdef Z3ED_AI
223 // Initialize advanced features
224 auto learn_status = learned_knowledge_.Initialize();
225 if (!learn_status.ok() && config_.verbose) {
226 std::cerr << "Warning: Failed to initialize learned knowledge: "
227 << learn_status.message() << std::endl;
228 }
229
230 auto todo_status = todo_manager_.Initialize();
231 if (!todo_status.ok() && config_.verbose) {
232 std::cerr << "Warning: Failed to initialize TODO manager: "
233 << todo_status.message() << std::endl;
234 }
235#endif
236}
237
246
251
254 return;
255 }
256
257 while (history_.size() > config_.max_history_messages) {
258 history_.erase(history_.begin());
259 }
260}
261
280
284
290
292 const AgentResponse& agent_response) {
293 // Process the response similar to the internal loop
294 // 1. Check for tool calls
295 // 2. Execute tools
296 // 3. Create proposal if needed
297 // 4. Append Agent message to history
298 // 5. If tools executed, call external driver again (loop)
299
300 bool executed_tool = false;
301 std::vector<std::string> executed_tools;
302
303 if (!agent_response.tool_calls.empty()) {
304 for (const auto& tool_call : agent_response.tool_calls) {
305 // Format tool arguments for display
306 std::vector<std::string> arg_parts;
307 for (const auto& [key, value] : tool_call.args) {
308 arg_parts.push_back(absl::StrCat(key, "=", value));
309 }
310 std::string args_str = absl::StrJoin(arg_parts, ", ");
311
312 util::PrintToolCall(tool_call.tool_name, args_str);
313
314 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
315 std::string tool_output;
316 if (!tool_result_or.ok()) {
317 tool_output =
318 absl::StrCat("Error: ", tool_result_or.status().message());
319 util::PrintError(tool_output);
320 } else {
321 tool_output = tool_result_or.value();
322 util::PrintSuccess("Tool executed successfully");
323 }
324
325 if (!tool_output.empty()) {
327 // Add tool result as internal message
328 std::string marked_output = absl::StrCat(
329 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
330 "The tool returned the following data:\n", tool_output, "\n\n",
331 "Please provide a text_response field in your JSON to summarize "
332 "this information for the user.");
333 auto tool_result_msg =
334 CreateMessage(ChatMessage::Sender::kUser, marked_output);
335 tool_result_msg.is_internal = true;
336 history_.push_back(tool_result_msg);
337 }
338 executed_tool = true;
339 executed_tools.push_back(tool_call.tool_name);
340 }
341 }
342
343 // If tools were executed, we need to loop back to the AI
344 if (executed_tool && has_external_driver_) {
346 return; // Wait for next response
347 }
348
349 // Final text response processing
350 std::optional<ProposalCreationResult> proposal_result;
351 absl::Status proposal_status = absl::OkStatus();
352 bool attempted_proposal = false;
353
354 if (!agent_response.commands.empty()) {
355 attempted_proposal = true;
358 // Use last user message as prompt context if available
359 if (!history_.empty()) {
360 for (auto it = history_.rbegin(); it != history_.rend(); ++it) {
361 if (it->sender == ChatMessage::Sender::kUser && !it->is_internal) {
362 request.prompt = it->message;
363 break;
364 }
365 }
366 }
367 request.response = &agent_response;
368 request.rom = rom_context_;
369 request.sandbox_label = "agent-chat";
370 request.ai_provider = "external";
371
372 auto creation_or = CreateProposalFromAgentResponse(request);
373 if (!creation_or.ok()) {
374 proposal_status = creation_or.status();
375 util::PrintError(absl::StrCat("Failed to create proposal: ",
376 proposal_status.message()));
377 } else {
378 proposal_result = std::move(creation_or.value());
379 }
380 }
381 }
382
383 // Construct text response
384 std::string response_text = agent_response.text_response;
385 if (!agent_response.reasoning.empty()) {
386 if (!response_text.empty())
387 response_text.append("\n\n");
388 response_text.append("Reasoning: ").append(agent_response.reasoning);
389 }
390
391 if (!agent_response.commands.empty()) {
392 if (!response_text.empty())
393 response_text.append("\n\n");
394 response_text.append("Commands:\n")
395 .append(absl::StrJoin(agent_response.commands, "\n"));
396 }
398 CountExecutableCommands(agent_response.commands);
399
400 if (proposal_result.has_value()) {
401 const auto& metadata = proposal_result->metadata;
402 if (!response_text.empty())
403 response_text.append("\n\n");
404 response_text.append(
405 absl::StrFormat("✅ Proposal %s ready with %d change%s (%d command%s).",
406 metadata.id, proposal_result->change_count,
407 proposal_result->change_count == 1 ? "" : "s",
408 proposal_result->executed_commands,
409 proposal_result->executed_commands == 1 ? "" : "s"));
411 } else if (attempted_proposal && !proposal_status.ok()) {
412 if (!response_text.empty())
413 response_text.append("\n\n");
414 response_text.append(absl::StrCat("⚠️ Failed to prepare proposal: ",
415 proposal_status.message()));
416 }
417
418 // Remove the "Thinking..." placeholder if present
419 if (!history_.empty() &&
420 history_.back().sender == ChatMessage::Sender::kAgent &&
421 history_.back().message == "Thinking...") {
422 history_.pop_back();
423 }
424
425 // Add final message
426 ChatMessage chat_response =
427 CreateMessage(ChatMessage::Sender::kAgent, response_text);
428 if (proposal_result.has_value()) {
430 summary.id = proposal_result->metadata.id;
431 summary.change_count = proposal_result->change_count;
432 summary.executed_commands = proposal_result->executed_commands;
433 chat_response.proposal = summary;
434 }
435
436 // Metadata
438 meta.provider = "external";
439 meta.model = "gemini"; // Could get this from JS
440 meta.tool_names = executed_tools;
441 chat_response.model_metadata = meta;
442
443 history_.push_back(chat_response);
445
448}
449
450absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
451 const std::string& message) {
452 if (message.empty() && history_.empty()) {
453 return absl::InvalidArgumentError(
454 "Conversation must start with a non-empty message.");
455 }
456
457 if (!message.empty()) {
458 history_.push_back(CreateMessage(ChatMessage::Sender::kUser, message));
461 }
462
463 // External Driver Path (WASM/Sidecar)
466 // Return a placeholder that indicates waiting
467 // The UI should handle this update gracefully via callbacks
468 return CreateMessage(ChatMessage::Sender::kAgent, "Thinking...");
469 }
470
471 const int max_iterations = config_.max_tool_iterations;
472 bool waiting_for_text_response = false;
473 absl::Time turn_start = absl::Now();
474 std::vector<std::string> executed_tools;
475
476 if (config_.verbose) {
477 util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations,
478 " iterations)"));
480 absl::StrCat("History size: ", history_.size(), " messages"));
481 }
482
483 for (int iteration = 0; iteration < max_iterations; ++iteration) {
484 if (config_.verbose) {
486 std::cout << util::colors::kCyan << "Iteration " << (iteration + 1) << "/"
487 << max_iterations << util::colors::kReset << std::endl;
488 }
489
490 // Show loading indicator while waiting for AI response
492 waiting_for_text_response ? "Generating final response..."
493 : "Thinking...",
494 !config_.verbose); // Hide spinner in verbose mode
495 loader.Start();
496
497 auto response_or = ai_service_->GenerateResponse(history_);
498 loader.Stop();
499
500 if (!response_or.ok()) {
501 util::PrintError(absl::StrCat("Failed to get AI response: ",
502 response_or.status().message()));
503 return absl::InternalError(absl::StrCat("Failed to get AI response: ",
504 response_or.status().message()));
505 }
506
507 const auto& agent_response = response_or.value();
508
509 if (config_.verbose) {
510 util::PrintInfo("Received agent response:");
511 std::cout << util::colors::kDim
512 << " - Tool calls: " << agent_response.tool_calls.size()
513 << util::colors::kReset << std::endl;
514 std::cout << util::colors::kDim
515 << " - Commands: " << agent_response.commands.size()
516 << util::colors::kReset << std::endl;
517 std::cout << util::colors::kDim << " - Text response: "
518 << (agent_response.text_response.empty() ? "empty" : "present")
519 << util::colors::kReset << std::endl;
520 if (!agent_response.reasoning.empty() && config_.show_reasoning) {
521 std::cout << util::colors::kYellow
522 << " 💭 Reasoning: " << util::colors::kDim
523 << agent_response.reasoning << util::colors::kReset
524 << std::endl;
525 }
526 }
527
528 if (!agent_response.tool_calls.empty()) {
529 // Check if we were waiting for a text response but got more tool calls
530 // instead
531 if (waiting_for_text_response) {
533 absl::StrCat("LLM called tools again instead of providing final "
534 "response (Iteration: ",
535 iteration + 1, "/", max_iterations, ")"));
536 }
537
538 bool executed_tool = false;
539 for (const auto& tool_call : agent_response.tool_calls) {
540 // Format tool arguments for display
541 std::vector<std::string> arg_parts;
542 for (const auto& [key, value] : tool_call.args) {
543 arg_parts.push_back(absl::StrCat(key, "=", value));
544 }
545 std::string args_str = absl::StrJoin(arg_parts, ", ");
546
547 util::PrintToolCall(tool_call.tool_name, args_str);
548
549 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
550 if (!tool_result_or.ok()) {
551 util::PrintError(absl::StrCat("Tool execution failed: ",
552 tool_result_or.status().message()));
553 return absl::InternalError(absl::StrCat(
554 "Tool execution failed: ", tool_result_or.status().message()));
555 }
556
557 const std::string& tool_output = tool_result_or.value();
558 if (!tool_output.empty()) {
559 util::PrintSuccess("Tool executed successfully");
561
562 if (config_.verbose) {
563 std::cout << util::colors::kDim
564 << "Tool output (truncated):" << util::colors::kReset
565 << std::endl;
566 std::string preview = tool_output.substr(
567 0, std::min(size_t(200), tool_output.size()));
568 if (tool_output.size() > 200)
569 preview += "...";
570 std::cout << util::colors::kDim << preview << util::colors::kReset
571 << std::endl;
572 }
573
574 // Add tool result with a clear marker for the LLM
575 // Format as plain text to avoid confusing the LLM with nested JSON
576 std::string marked_output = absl::StrCat(
577 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
578 "The tool returned the following data:\n", tool_output, "\n\n",
579 "Please provide a text_response field in your JSON to summarize "
580 "this information for the user.");
581 auto tool_result_msg =
582 CreateMessage(ChatMessage::Sender::kUser, marked_output);
583 tool_result_msg.is_internal =
584 true; // Don't show this to the human user
585 history_.push_back(tool_result_msg);
586 }
587 executed_tool = true;
588 executed_tools.push_back(tool_call.tool_name);
589 }
590
591 if (executed_tool) {
592 // Now we're waiting for the LLM to provide a text response
593 waiting_for_text_response = true;
594 // Re-query the AI with updated context.
595 continue;
596 }
597 }
598
599 // Check if we received a text response after tool execution
600 if (waiting_for_text_response && agent_response.text_response.empty() &&
601 agent_response.commands.empty()) {
603 absl::StrCat("LLM did not provide text_response after receiving tool "
604 "results (Iteration: ",
605 iteration + 1, "/", max_iterations, ")"));
606 // Continue to give it another chance
607 continue;
608 }
609
610 std::optional<ProposalCreationResult> proposal_result;
611 absl::Status proposal_status = absl::OkStatus();
612 bool attempted_proposal = false;
613
614 if (!agent_response.commands.empty()) {
615 attempted_proposal = true;
616
617 if (rom_context_ == nullptr) {
618 proposal_status = absl::FailedPreconditionError(
619 "No ROM context available for proposal creation");
621 "Cannot create proposal because no ROM context is active.");
622 } else if (!rom_context_->is_loaded()) {
623 proposal_status =
624 absl::FailedPreconditionError("ROM context is not loaded");
626 "Cannot create proposal because the ROM context is not loaded.");
627 } else {
629 request.prompt = message;
630 request.response = &agent_response;
631 request.rom = rom_context_;
632 request.sandbox_label = "agent-chat";
633 request.ai_provider = absl::GetFlag(FLAGS_ai_provider);
634
635 auto creation_or = CreateProposalFromAgentResponse(request);
636 if (!creation_or.ok()) {
637 proposal_status = creation_or.status();
638 util::PrintError(absl::StrCat("Failed to create proposal: ",
639 proposal_status.message()));
640 } else {
641 proposal_result = std::move(creation_or.value());
642 if (config_.verbose) {
643 util::PrintSuccess(absl::StrCat(
644 "Created proposal ", proposal_result->metadata.id, " with ",
645 proposal_result->change_count, " change(s)."));
646 }
647 }
648 }
649 }
650
651 std::string response_text = agent_response.text_response;
652 if (!agent_response.reasoning.empty()) {
653 if (!response_text.empty()) {
654 response_text.append("\n\n");
655 }
656 response_text.append("Reasoning: ");
657 response_text.append(agent_response.reasoning);
658 }
659 const int executable_commands =
660 CountExecutableCommands(agent_response.commands);
661 if (!agent_response.commands.empty()) {
662 if (!response_text.empty()) {
663 response_text.append("\n\n");
664 }
665 response_text.append("Commands:\n");
666 response_text.append(absl::StrJoin(agent_response.commands, "\n"));
667 }
668 metrics_.commands_generated += executable_commands;
669
670 if (proposal_result.has_value()) {
671 const auto& metadata = proposal_result->metadata;
672 if (!response_text.empty()) {
673 response_text.append("\n\n");
674 }
675 response_text.append(absl::StrFormat(
676 "✅ Proposal %s ready with %d change%s (%d command%s).\n"
677 "Review it in the Proposal drawer or run `z3ed agent diff "
678 "--proposal-id %s`.\n"
679 "Sandbox ROM: %s\nProposal JSON: %s",
680 metadata.id, proposal_result->change_count,
681 proposal_result->change_count == 1 ? "" : "s",
682 proposal_result->executed_commands,
683 proposal_result->executed_commands == 1 ? "" : "s", metadata.id,
684 metadata.sandbox_rom_path.string(),
685 proposal_result->proposal_json_path.string()));
687 } else if (attempted_proposal && !proposal_status.ok()) {
688 if (!response_text.empty()) {
689 response_text.append("\n\n");
690 }
691 response_text.append(
692 absl::StrCat("⚠️ Failed to prepare a proposal automatically: ",
693 proposal_status.message()));
694 }
695 ChatMessage chat_response =
696 CreateMessage(ChatMessage::Sender::kAgent, response_text);
697 if (proposal_result.has_value()) {
699 summary.id = proposal_result->metadata.id;
700 summary.change_count = proposal_result->change_count;
701 summary.executed_commands = proposal_result->executed_commands;
702 summary.sandbox_rom_path = proposal_result->metadata.sandbox_rom_path;
703 summary.proposal_json_path = proposal_result->proposal_json_path;
704 chat_response.proposal = summary;
705 }
708 metrics_.total_latency += absl::Now() - turn_start;
709 chat_response.metrics = BuildMetricsSnapshot();
710 if (!agent_response.warnings.empty()) {
711 chat_response.warnings = agent_response.warnings;
712 }
714 meta.provider = !agent_response.provider.empty()
715 ? agent_response.provider
717 meta.model = !agent_response.model.empty() ? agent_response.model
719 meta.latency_seconds =
720 agent_response.latency_seconds > 0.0
721 ? agent_response.latency_seconds
722 : absl::ToDoubleSeconds(absl::Now() - turn_start);
724 meta.tool_names = executed_tools;
725 meta.parameters = agent_response.parameters;
726 chat_response.model_metadata = meta;
727 history_.push_back(chat_response);
729 return chat_response;
730 }
731
732 return absl::InternalError(
733 "Agent did not produce a response after executing tools.");
734}
735
737 const AIServiceConfig& config) {
738 auto service_or = CreateAIServiceStrict(config);
739 if (!service_or.ok()) {
740 // Keep the existing service running and fall back to mock so the UI stays
741 // responsive.
742 std::cerr << "Provider configuration failed: " << service_or.status()
743 << " — falling back to mock" << std::endl;
744 ai_service_ = std::make_unique<MockAIService>();
745 provider_config_.provider = "mock";
746 if (rom_context_) {
747 ai_service_->SetRomContext(rom_context_);
748 }
749 return service_or.status();
750 }
751
752 ai_service_ = std::move(service_or.value());
753 provider_config_ = config;
754 if (rom_context_) {
755 ai_service_->SetRomContext(rom_context_);
756 }
757 return absl::OkStatus();
758}
759
765
766const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
767 return history_;
768}
769
771 std::vector<ChatMessage> history) {
772 history_ = std::move(history);
775}
776
779
781 bool has_snapshot = false;
782
783 for (const auto& message : history_) {
784 if (message.sender == ChatMessage::Sender::kUser) {
786 } else if (message.sender == ChatMessage::Sender::kAgent) {
789 }
790
791 if (message.proposal.has_value()) {
793 }
794
795 if (message.metrics.has_value()) {
796 snapshot = *message.metrics;
797 has_snapshot = true;
798 }
799 }
800
801 if (has_snapshot) {
802 metrics_.user_messages = snapshot.total_user_messages;
803 metrics_.agent_messages = snapshot.total_agent_messages;
804 metrics_.tool_calls = snapshot.total_tool_calls;
805 metrics_.commands_generated = snapshot.total_commands;
806 metrics_.proposals_created = snapshot.total_proposals;
807 metrics_.turns_completed = snapshot.turn_index;
808 metrics_.total_latency = absl::Seconds(snapshot.total_elapsed_seconds);
809 }
810}
811
812#ifdef Z3ED_AI
813// === Advanced Feature Integration ===
814
815std::string ConversationalAgentService::BuildEnhancedPrompt(
816 const std::string& user_message) {
817 std::ostringstream enhanced;
818
819 // Inject pretraining on first message
820 if (inject_pretraining_ && !pretraining_injected_ && rom_context_) {
821 enhanced << InjectPretraining() << "\n\n";
822 pretraining_injected_ = true;
823 }
824
825 // Inject learned context
826 if (inject_learned_context_) {
827 enhanced << InjectLearnedContext(user_message) << "\n";
828 }
829
830 enhanced << user_message;
831 return enhanced.str();
832}
833
834std::string ConversationalAgentService::InjectLearnedContext(
835 const std::string& message) {
836 std::ostringstream context;
837
838 // Add relevant preferences
839 auto prefs = learned_knowledge_.GetAllPreferences();
840 if (!prefs.empty() && prefs.size() <= 5) { // Don't overwhelm with too many
841 context << "[User Preferences: ";
842 std::vector<std::string> pref_strings;
843 for (const auto& [key, value] : prefs) {
844 pref_strings.push_back(absl::StrCat(key, "=", value));
845 }
846 context << absl::StrJoin(pref_strings, ", ") << "]\n";
847 }
848
849 // Add ROM-specific patterns
851 // TODO: Get ROM hash
852 // auto patterns = learned_knowledge_.QueryPatterns("", rom_hash);
853 }
854
855 // Add recent relevant memories
856 std::vector<std::string> keywords;
857 // Extract keywords from message (simple word splitting)
858 for (const auto& word : absl::StrSplit(message, ' ')) {
859 if (word.length() > 4) { // Only meaningful words
860 keywords.push_back(std::string(word));
861 }
862 }
863
864 if (!keywords.empty()) {
865 auto memories = learned_knowledge_.SearchMemories(keywords[0]);
866 if (!memories.empty() && memories.size() <= 3) {
867 context << "[Relevant Past Context:\n";
868 for (const auto& mem : memories) {
869 context << "- " << mem.topic << ": " << mem.summary << "\n";
870 }
871 context << "]\n";
872 }
873 }
874
875 return context.str();
876}
877
878std::string ConversationalAgentService::InjectPretraining() {
879 if (!rom_context_) {
880 return "";
881 }
882
883 std::ostringstream pretraining;
884 pretraining << "[SYSTEM KNOWLEDGE INJECTION - Read this first]\n\n";
886 pretraining << "\n[END KNOWLEDGE INJECTION]\n";
887
888 return pretraining.str();
889}
890
891ChatMessage ConversationalAgentService::EnhanceResponse(
892 const ChatMessage& response, const std::string& user_message) {
893 // Use AdvancedRouter to enhance tool-based responses
894 // This would synthesize multi-tool results into coherent insights
895
896 // For now, return response as-is
897 // TODO: Integrate AdvancedRouter here
898 return response;
899}
900#endif // Z3ED_AI
901
902} // namespace agent
903} // namespace cli
904} // namespace yaze
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:24
bool is_loaded() const
Definition rom.h:128
static std::string GeneratePretrainingPrompt(Rom *rom)
Generate pre-training prompt for agent.
absl::StatusOr< ChatMessage > SendMessage(const std::string &message)
absl::Status ConfigureProvider(const AIServiceConfig &config)
void SetToolPreferences(const ToolDispatcher::ToolPreferences &prefs)
const std::vector< ChatMessage > & GetHistory() const
void HandleExternalResponse(const AgentResponse &response)
std::function< void(const std::vector< ChatMessage > &history)> ExternalDriverCallback
void ReplaceHistory(std::vector< ChatMessage > history)
void SetToolPreferences(const ToolPreferences &prefs)
absl::StatusOr< std::string > Dispatch(const ::yaze::cli::ToolCall &tool_call)
ABSL_DECLARE_FLAG(std::string, ai_provider)
ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string &content)
std::optional< ChatMessage::TableData > BuildTableData(const nlohmann::json &data)
std::string TrimWhitespace(absl::string_view value)
absl::StatusOr< ProposalCreationResult > CreateProposalFromAgentResponse(const ProposalCreationRequest &)
constexpr const char * kDim
constexpr const char * kYellow
constexpr const char * kReset
constexpr const char * kCyan
void PrintWarning(const std::string &message)
void PrintToolCall(const std::string &tool_name, const std::string &details="")
void PrintInfo(const std::string &message)
void PrintSuccess(const std::string &message)
void PrintError(const std::string &message)
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
std::vector< std::string > commands
Definition common.h:26
std::string reasoning
Definition common.h:29
std::vector< ToolCall > tool_calls
Definition common.h:23
std::string text_response
Definition common.h:20
std::optional< ModelMetadata > model_metadata
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics