yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
conversational_agent_service.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cctype>
5#include <iostream>
6#include <optional>
7#include <set>
8#include <sstream>
9#include <string>
10#include <vector>
11
12#include "absl/flags/declare.h"
13#include "absl/flags/flag.h"
14#include "absl/status/status.h"
15#include "absl/status/statusor.h"
16#include "absl/strings/str_cat.h"
17#include "absl/strings/str_format.h"
18#include "absl/strings/str_join.h"
19#include "absl/strings/str_split.h"
20#include "absl/strings/string_view.h"
21#include "absl/time/clock.h"
22#include "absl/time/time.h"
23#include "rom/rom.h"
29#include "nlohmann/json.hpp"
30
31#ifdef SendMessage
32#undef SendMessage
33#endif
34
35ABSL_DECLARE_FLAG(std::string, ai_provider);
36
37namespace yaze {
38namespace cli {
39namespace agent {
40
41namespace {
42
43std::string TrimWhitespace(const std::string& input) {
44 auto begin =
45 std::find_if_not(input.begin(), input.end(),
46 [](unsigned char c) { return std::isspace(c); });
47 auto end =
48 std::find_if_not(input.rbegin(), input.rend(), [](unsigned char c) {
49 return std::isspace(c);
50 }).base();
51 if (begin >= end) {
52 return "";
53 }
54 return std::string(begin, end);
55}
56
57std::string JsonValueToString(const nlohmann::json& value) {
58 if (value.is_string()) {
59 return value.get<std::string>();
60 }
61 if (value.is_boolean()) {
62 return value.get<bool>() ? "true" : "false";
63 }
64 if (value.is_number()) {
65 return value.dump();
66 }
67 if (value.is_null()) {
68 return "null";
69 }
70 return value.dump();
71}
72
73std::set<std::string> CollectObjectKeys(const nlohmann::json& array) {
74 std::set<std::string> keys;
75 for (const auto& item : array) {
76 if (!item.is_object()) {
77 continue;
78 }
79 for (const auto& [key, _] : item.items()) {
80 keys.insert(key);
81 }
82 }
83 return keys;
84}
85
86std::optional<ChatMessage::TableData> BuildTableData(
87 const nlohmann::json& data) {
88 using TableData = ChatMessage::TableData;
89
90 if (data.is_object()) {
91 TableData table;
92 table.headers = {"Key", "Value"};
93 table.rows.reserve(data.size());
94 for (const auto& [key, value] : data.items()) {
95 table.rows.push_back({key, JsonValueToString(value)});
96 }
97 return table;
98 }
99
100 if (data.is_array()) {
101 TableData table;
102 if (data.empty()) {
103 table.headers = {"Value"};
104 return table;
105 }
106
107 const bool all_objects = std::all_of(
108 data.begin(), data.end(),
109 [](const nlohmann::json& item) { return item.is_object(); });
110
111 if (all_objects) {
112 auto keys = CollectObjectKeys(data);
113 if (keys.empty()) {
114 table.headers = {"Value"};
115 for (const auto& item : data) {
116 table.rows.push_back({JsonValueToString(item)});
117 }
118 return table;
119 }
120
121 table.headers.assign(keys.begin(), keys.end());
122 table.rows.reserve(data.size());
123 for (const auto& item : data) {
124 std::vector<std::string> row;
125 row.reserve(table.headers.size());
126 for (const auto& key : table.headers) {
127 if (item.contains(key)) {
128 row.push_back(JsonValueToString(item.at(key)));
129 } else {
130 row.emplace_back("-");
131 }
132 }
133 table.rows.push_back(std::move(row));
134 }
135 return table;
136 }
137
138 table.headers = {"Value"};
139 table.rows.reserve(data.size());
140 for (const auto& item : data) {
141 table.rows.push_back({JsonValueToString(item)});
142 }
143 return table;
144 }
145
146 return std::nullopt;
147}
148
149bool IsExecutableCommand(absl::string_view command) {
150 return !command.empty() && command.front() != '#';
151}
152
153int CountExecutableCommands(const std::vector<std::string>& commands) {
154 int count = 0;
155 for (const auto& command : commands) {
156 if (IsExecutableCommand(command)) {
157 ++count;
158 }
159 }
160 return count;
161}
162
164 const std::string& content) {
165 ChatMessage message;
166 message.sender = sender;
167 message.message = content;
168 message.timestamp = absl::Now();
169
170 if (sender == ChatMessage::Sender::kAgent) {
171 const std::string trimmed = TrimWhitespace(content);
172 if (!trimmed.empty() &&
173 (trimmed.front() == '{' || trimmed.front() == '[')) {
174 try {
175 nlohmann::json parsed = nlohmann::json::parse(trimmed);
176 message.table_data = BuildTableData(parsed);
177 message.json_pretty = parsed.dump(2);
178 } catch (const nlohmann::json::parse_error&) {
179 // Ignore parse errors, fall back to raw text.
180 }
181 }
182 }
183
184 return message;
185}
186
187} // namespace
188
190 // Default to a lightweight mock provider to avoid slow network checks during
191 // startup (especially on mac-ai builds). The real provider is created when
192 // ConfigureProvider is called from the UI.
193 provider_config_.provider = "mock";
194 ai_service_ = std::make_unique<MockAIService>();
196
197#ifdef Z3ED_AI
198 // Initialize advanced features
199 auto learn_status = learned_knowledge_.Initialize();
200 if (!learn_status.ok() && config_.verbose) {
201 std::cerr << "Warning: Failed to initialize learned knowledge: "
202 << learn_status.message() << std::endl;
203 }
204
205 auto todo_status = todo_manager_.Initialize();
206 if (!todo_status.ok() && config_.verbose) {
207 std::cerr << "Warning: Failed to initialize TODO manager: "
208 << todo_status.message() << std::endl;
209 }
210#endif
211}
212
214 const AgentConfig& config)
215 : config_(config) {
216 // Avoid auto-detecting providers (which can block on network) until the UI
217 // applies an explicit configuration.
218 provider_config_.provider = "mock";
219 ai_service_ = std::make_unique<MockAIService>();
221
222#ifdef Z3ED_AI
223 // Initialize advanced features
224 auto learn_status = learned_knowledge_.Initialize();
225 if (!learn_status.ok() && config_.verbose) {
226 std::cerr << "Warning: Failed to initialize learned knowledge: "
227 << learn_status.message() << std::endl;
228 }
229
230 auto todo_status = todo_manager_.Initialize();
231 if (!todo_status.ok() && config_.verbose) {
232 std::cerr << "Warning: Failed to initialize TODO manager: "
233 << todo_status.message() << std::endl;
234 }
235#endif
236}
237
246
251
254 return;
255 }
256
257 while (history_.size() > config_.max_history_messages) {
258 history_.erase(history_.begin());
259 }
260}
261
280
284
289
291 const AgentResponse& agent_response) {
292 // Process the response similar to the internal loop
293 // 1. Check for tool calls
294 // 2. Execute tools
295 // 3. Create proposal if needed
296 // 4. Append Agent message to history
297 // 5. If tools executed, call external driver again (loop)
298
299 bool executed_tool = false;
300 std::vector<std::string> executed_tools;
301
302 if (!agent_response.tool_calls.empty()) {
303 for (const auto& tool_call : agent_response.tool_calls) {
304 // Format tool arguments for display
305 std::vector<std::string> arg_parts;
306 for (const auto& [key, value] : tool_call.args) {
307 arg_parts.push_back(absl::StrCat(key, "=", value));
308 }
309 std::string args_str = absl::StrJoin(arg_parts, ", ");
310
311 util::PrintToolCall(tool_call.tool_name, args_str);
312
313 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
314 std::string tool_output;
315 if (!tool_result_or.ok()) {
316 tool_output = absl::StrCat("Error: ", tool_result_or.status().message());
317 util::PrintError(tool_output);
318 } else {
319 tool_output = tool_result_or.value();
320 util::PrintSuccess("Tool executed successfully");
321 }
322
323 if (!tool_output.empty()) {
325 // Add tool result as internal message
326 std::string marked_output = absl::StrCat(
327 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
328 "The tool returned the following data:\n", tool_output, "\n\n",
329 "Please provide a text_response field in your JSON to summarize "
330 "this information for the user.");
331 auto tool_result_msg =
332 CreateMessage(ChatMessage::Sender::kUser, marked_output);
333 tool_result_msg.is_internal = true;
334 history_.push_back(tool_result_msg);
335 }
336 executed_tool = true;
337 executed_tools.push_back(tool_call.tool_name);
338 }
339 }
340
341 // If tools were executed, we need to loop back to the AI
342 if (executed_tool && has_external_driver_) {
344 return; // Wait for next response
345 }
346
347 // Final text response processing
348 std::optional<ProposalCreationResult> proposal_result;
349 absl::Status proposal_status = absl::OkStatus();
350 bool attempted_proposal = false;
351
352 if (!agent_response.commands.empty()) {
353 attempted_proposal = true;
356 // Use last user message as prompt context if available
357 if (!history_.empty()) {
358 for (auto it = history_.rbegin(); it != history_.rend(); ++it) {
359 if (it->sender == ChatMessage::Sender::kUser && !it->is_internal) {
360 request.prompt = it->message;
361 break;
362 }
363 }
364 }
365 request.response = &agent_response;
366 request.rom = rom_context_;
367 request.sandbox_label = "agent-chat";
368 request.ai_provider = "external";
369
370 auto creation_or = CreateProposalFromAgentResponse(request);
371 if (!creation_or.ok()) {
372 proposal_status = creation_or.status();
373 util::PrintError(absl::StrCat("Failed to create proposal: ",
374 proposal_status.message()));
375 } else {
376 proposal_result = std::move(creation_or.value());
377 }
378 }
379 }
380
381 // Construct text response
382 std::string response_text = agent_response.text_response;
383 if (!agent_response.reasoning.empty()) {
384 if (!response_text.empty()) response_text.append("\n\n");
385 response_text.append("Reasoning: ").append(agent_response.reasoning);
386 }
387
388 if (!agent_response.commands.empty()) {
389 if (!response_text.empty()) response_text.append("\n\n");
390 response_text.append("Commands:\n").append(absl::StrJoin(agent_response.commands, "\n"));
391 }
392 metrics_.commands_generated += CountExecutableCommands(agent_response.commands);
393
394 if (proposal_result.has_value()) {
395 const auto& metadata = proposal_result->metadata;
396 if (!response_text.empty()) response_text.append("\n\n");
397 response_text.append(absl::StrFormat(
398 "✅ Proposal %s ready with %d change%s (%d command%s).",
399 metadata.id, proposal_result->change_count,
400 proposal_result->change_count == 1 ? "" : "s",
401 proposal_result->executed_commands,
402 proposal_result->executed_commands == 1 ? "" : "s"));
404 } else if (attempted_proposal && !proposal_status.ok()) {
405 if (!response_text.empty()) response_text.append("\n\n");
406 response_text.append(absl::StrCat("⚠️ Failed to prepare proposal: ", proposal_status.message()));
407 }
408
409 // Remove the "Thinking..." placeholder if present
410 if (!history_.empty() && history_.back().sender == ChatMessage::Sender::kAgent &&
411 history_.back().message == "Thinking...") {
412 history_.pop_back();
413 }
414
415 // Add final message
416 ChatMessage chat_response = CreateMessage(ChatMessage::Sender::kAgent, response_text);
417 if (proposal_result.has_value()) {
419 summary.id = proposal_result->metadata.id;
420 summary.change_count = proposal_result->change_count;
421 summary.executed_commands = proposal_result->executed_commands;
422 chat_response.proposal = summary;
423 }
424
425 // Metadata
427 meta.provider = "external";
428 meta.model = "gemini"; // Could get this from JS
429 meta.tool_names = executed_tools;
430 chat_response.model_metadata = meta;
431
432 history_.push_back(chat_response);
434
437}
438
439absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
440 const std::string& message) {
441 if (message.empty() && history_.empty()) {
442 return absl::InvalidArgumentError(
443 "Conversation must start with a non-empty message.");
444 }
445
446 if (!message.empty()) {
447 history_.push_back(CreateMessage(ChatMessage::Sender::kUser, message));
450 }
451
452 // External Driver Path (WASM/Sidecar)
455 // Return a placeholder that indicates waiting
456 // The UI should handle this update gracefully via callbacks
457 return CreateMessage(ChatMessage::Sender::kAgent, "Thinking...");
458 }
459
460 const int max_iterations = config_.max_tool_iterations;
461 bool waiting_for_text_response = false;
462 absl::Time turn_start = absl::Now();
463 std::vector<std::string> executed_tools;
464
465 if (config_.verbose) {
466 util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations,
467 " iterations)"));
469 absl::StrCat("History size: ", history_.size(), " messages"));
470 }
471
472 for (int iteration = 0; iteration < max_iterations; ++iteration) {
473 if (config_.verbose) {
475 std::cout << util::colors::kCyan << "Iteration " << (iteration + 1) << "/"
476 << max_iterations << util::colors::kReset << std::endl;
477 }
478
479 // Show loading indicator while waiting for AI response
481 waiting_for_text_response ? "Generating final response..."
482 : "Thinking...",
483 !config_.verbose); // Hide spinner in verbose mode
484 loader.Start();
485
486 auto response_or = ai_service_->GenerateResponse(history_);
487 loader.Stop();
488
489 if (!response_or.ok()) {
490 util::PrintError(absl::StrCat("Failed to get AI response: ",
491 response_or.status().message()));
492 return absl::InternalError(absl::StrCat("Failed to get AI response: ",
493 response_or.status().message()));
494 }
495
496 const auto& agent_response = response_or.value();
497
498 if (config_.verbose) {
499 util::PrintInfo("Received agent response:");
500 std::cout << util::colors::kDim
501 << " - Tool calls: " << agent_response.tool_calls.size()
502 << util::colors::kReset << std::endl;
503 std::cout << util::colors::kDim
504 << " - Commands: " << agent_response.commands.size()
505 << util::colors::kReset << std::endl;
506 std::cout << util::colors::kDim << " - Text response: "
507 << (agent_response.text_response.empty() ? "empty" : "present")
508 << util::colors::kReset << std::endl;
509 if (!agent_response.reasoning.empty() && config_.show_reasoning) {
510 std::cout << util::colors::kYellow
511 << " 💭 Reasoning: " << util::colors::kDim
512 << agent_response.reasoning << util::colors::kReset
513 << std::endl;
514 }
515 }
516
517 if (!agent_response.tool_calls.empty()) {
518 // Check if we were waiting for a text response but got more tool calls
519 // instead
520 if (waiting_for_text_response) {
522 absl::StrCat("LLM called tools again instead of providing final "
523 "response (Iteration: ",
524 iteration + 1, "/", max_iterations, ")"));
525 }
526
527 bool executed_tool = false;
528 for (const auto& tool_call : agent_response.tool_calls) {
529 // Format tool arguments for display
530 std::vector<std::string> arg_parts;
531 for (const auto& [key, value] : tool_call.args) {
532 arg_parts.push_back(absl::StrCat(key, "=", value));
533 }
534 std::string args_str = absl::StrJoin(arg_parts, ", ");
535
536 util::PrintToolCall(tool_call.tool_name, args_str);
537
538 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
539 if (!tool_result_or.ok()) {
540 util::PrintError(absl::StrCat("Tool execution failed: ",
541 tool_result_or.status().message()));
542 return absl::InternalError(absl::StrCat(
543 "Tool execution failed: ", tool_result_or.status().message()));
544 }
545
546 const std::string& tool_output = tool_result_or.value();
547 if (!tool_output.empty()) {
548 util::PrintSuccess("Tool executed successfully");
550
551 if (config_.verbose) {
552 std::cout << util::colors::kDim
553 << "Tool output (truncated):" << util::colors::kReset
554 << std::endl;
555 std::string preview = tool_output.substr(
556 0, std::min(size_t(200), tool_output.size()));
557 if (tool_output.size() > 200)
558 preview += "...";
559 std::cout << util::colors::kDim << preview << util::colors::kReset
560 << std::endl;
561 }
562
563 // Add tool result with a clear marker for the LLM
564 // Format as plain text to avoid confusing the LLM with nested JSON
565 std::string marked_output = absl::StrCat(
566 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
567 "The tool returned the following data:\n", tool_output, "\n\n",
568 "Please provide a text_response field in your JSON to summarize "
569 "this information for the user.");
570 auto tool_result_msg =
571 CreateMessage(ChatMessage::Sender::kUser, marked_output);
572 tool_result_msg.is_internal =
573 true; // Don't show this to the human user
574 history_.push_back(tool_result_msg);
575 }
576 executed_tool = true;
577 executed_tools.push_back(tool_call.tool_name);
578 }
579
580 if (executed_tool) {
581 // Now we're waiting for the LLM to provide a text response
582 waiting_for_text_response = true;
583 // Re-query the AI with updated context.
584 continue;
585 }
586 }
587
588 // Check if we received a text response after tool execution
589 if (waiting_for_text_response && agent_response.text_response.empty() &&
590 agent_response.commands.empty()) {
592 absl::StrCat("LLM did not provide text_response after receiving tool "
593 "results (Iteration: ",
594 iteration + 1, "/", max_iterations, ")"));
595 // Continue to give it another chance
596 continue;
597 }
598
599 std::optional<ProposalCreationResult> proposal_result;
600 absl::Status proposal_status = absl::OkStatus();
601 bool attempted_proposal = false;
602
603 if (!agent_response.commands.empty()) {
604 attempted_proposal = true;
605
606 if (rom_context_ == nullptr) {
607 proposal_status = absl::FailedPreconditionError(
608 "No ROM context available for proposal creation");
610 "Cannot create proposal because no ROM context is active.");
611 } else if (!rom_context_->is_loaded()) {
612 proposal_status =
613 absl::FailedPreconditionError("ROM context is not loaded");
615 "Cannot create proposal because the ROM context is not loaded.");
616 } else {
618 request.prompt = message;
619 request.response = &agent_response;
620 request.rom = rom_context_;
621 request.sandbox_label = "agent-chat";
622 request.ai_provider = absl::GetFlag(FLAGS_ai_provider);
623
624 auto creation_or = CreateProposalFromAgentResponse(request);
625 if (!creation_or.ok()) {
626 proposal_status = creation_or.status();
627 util::PrintError(absl::StrCat("Failed to create proposal: ",
628 proposal_status.message()));
629 } else {
630 proposal_result = std::move(creation_or.value());
631 if (config_.verbose) {
632 util::PrintSuccess(absl::StrCat(
633 "Created proposal ", proposal_result->metadata.id, " with ",
634 proposal_result->change_count, " change(s)."));
635 }
636 }
637 }
638 }
639
640 std::string response_text = agent_response.text_response;
641 if (!agent_response.reasoning.empty()) {
642 if (!response_text.empty()) {
643 response_text.append("\n\n");
644 }
645 response_text.append("Reasoning: ");
646 response_text.append(agent_response.reasoning);
647 }
648 const int executable_commands =
649 CountExecutableCommands(agent_response.commands);
650 if (!agent_response.commands.empty()) {
651 if (!response_text.empty()) {
652 response_text.append("\n\n");
653 }
654 response_text.append("Commands:\n");
655 response_text.append(absl::StrJoin(agent_response.commands, "\n"));
656 }
657 metrics_.commands_generated += executable_commands;
658
659 if (proposal_result.has_value()) {
660 const auto& metadata = proposal_result->metadata;
661 if (!response_text.empty()) {
662 response_text.append("\n\n");
663 }
664 response_text.append(absl::StrFormat(
665 "✅ Proposal %s ready with %d change%s (%d command%s).\n"
666 "Review it in the Proposal drawer or run `z3ed agent diff "
667 "--proposal-id %s`.\n"
668 "Sandbox ROM: %s\nProposal JSON: %s",
669 metadata.id, proposal_result->change_count,
670 proposal_result->change_count == 1 ? "" : "s",
671 proposal_result->executed_commands,
672 proposal_result->executed_commands == 1 ? "" : "s", metadata.id,
673 metadata.sandbox_rom_path.string(),
674 proposal_result->proposal_json_path.string()));
676 } else if (attempted_proposal && !proposal_status.ok()) {
677 if (!response_text.empty()) {
678 response_text.append("\n\n");
679 }
680 response_text.append(
681 absl::StrCat("⚠️ Failed to prepare a proposal automatically: ",
682 proposal_status.message()));
683 }
684 ChatMessage chat_response =
685 CreateMessage(ChatMessage::Sender::kAgent, response_text);
686 if (proposal_result.has_value()) {
688 summary.id = proposal_result->metadata.id;
689 summary.change_count = proposal_result->change_count;
690 summary.executed_commands = proposal_result->executed_commands;
691 summary.sandbox_rom_path = proposal_result->metadata.sandbox_rom_path;
692 summary.proposal_json_path = proposal_result->proposal_json_path;
693 chat_response.proposal = summary;
694 }
697 metrics_.total_latency += absl::Now() - turn_start;
698 chat_response.metrics = BuildMetricsSnapshot();
699 if (!agent_response.warnings.empty()) {
700 chat_response.warnings = agent_response.warnings;
701 }
703 meta.provider = !agent_response.provider.empty()
704 ? agent_response.provider
706 meta.model = !agent_response.model.empty() ? agent_response.model
708 meta.latency_seconds =
709 agent_response.latency_seconds > 0.0
710 ? agent_response.latency_seconds
711 : absl::ToDoubleSeconds(absl::Now() - turn_start);
713 meta.tool_names = executed_tools;
714 meta.parameters = agent_response.parameters;
715 chat_response.model_metadata = meta;
716 history_.push_back(chat_response);
718 return chat_response;
719 }
720
721 return absl::InternalError(
722 "Agent did not produce a response after executing tools.");
723}
724
726 const AIServiceConfig& config) {
727 auto service_or = CreateAIServiceStrict(config);
728 if (!service_or.ok()) {
729 // Keep the existing service running and fall back to mock so the UI stays
730 // responsive.
731 std::cerr << "Provider configuration failed: " << service_or.status()
732 << " — falling back to mock" << std::endl;
733 ai_service_ = std::make_unique<MockAIService>();
734 provider_config_.provider = "mock";
735 if (rom_context_) {
736 ai_service_->SetRomContext(rom_context_);
737 }
738 return service_or.status();
739 }
740
741 ai_service_ = std::move(service_or.value());
742 provider_config_ = config;
743 if (rom_context_) {
744 ai_service_->SetRomContext(rom_context_);
745 }
746 return absl::OkStatus();
747}
748
754
755const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
756 return history_;
757}
758
760 std::vector<ChatMessage> history) {
761 history_ = std::move(history);
764}
765
768
770 bool has_snapshot = false;
771
772 for (const auto& message : history_) {
773 if (message.sender == ChatMessage::Sender::kUser) {
775 } else if (message.sender == ChatMessage::Sender::kAgent) {
778 }
779
780 if (message.proposal.has_value()) {
782 }
783
784 if (message.metrics.has_value()) {
785 snapshot = *message.metrics;
786 has_snapshot = true;
787 }
788 }
789
790 if (has_snapshot) {
791 metrics_.user_messages = snapshot.total_user_messages;
792 metrics_.agent_messages = snapshot.total_agent_messages;
793 metrics_.tool_calls = snapshot.total_tool_calls;
794 metrics_.commands_generated = snapshot.total_commands;
795 metrics_.proposals_created = snapshot.total_proposals;
796 metrics_.turns_completed = snapshot.turn_index;
797 metrics_.total_latency = absl::Seconds(snapshot.total_elapsed_seconds);
798 }
799}
800
801#ifdef Z3ED_AI
802// === Advanced Feature Integration ===
803
804std::string ConversationalAgentService::BuildEnhancedPrompt(
805 const std::string& user_message) {
806 std::ostringstream enhanced;
807
808 // Inject pretraining on first message
809 if (inject_pretraining_ && !pretraining_injected_ && rom_context_) {
810 enhanced << InjectPretraining() << "\n\n";
811 pretraining_injected_ = true;
812 }
813
814 // Inject learned context
815 if (inject_learned_context_) {
816 enhanced << InjectLearnedContext(user_message) << "\n";
817 }
818
819 enhanced << user_message;
820 return enhanced.str();
821}
822
823std::string ConversationalAgentService::InjectLearnedContext(
824 const std::string& message) {
825 std::ostringstream context;
826
827 // Add relevant preferences
828 auto prefs = learned_knowledge_.GetAllPreferences();
829 if (!prefs.empty() && prefs.size() <= 5) { // Don't overwhelm with too many
830 context << "[User Preferences: ";
831 std::vector<std::string> pref_strings;
832 for (const auto& [key, value] : prefs) {
833 pref_strings.push_back(absl::StrCat(key, "=", value));
834 }
835 context << absl::StrJoin(pref_strings, ", ") << "]\n";
836 }
837
838 // Add ROM-specific patterns
840 // TODO: Get ROM hash
841 // auto patterns = learned_knowledge_.QueryPatterns("", rom_hash);
842 }
843
844 // Add recent relevant memories
845 std::vector<std::string> keywords;
846 // Extract keywords from message (simple word splitting)
847 for (const auto& word : absl::StrSplit(message, ' ')) {
848 if (word.length() > 4) { // Only meaningful words
849 keywords.push_back(std::string(word));
850 }
851 }
852
853 if (!keywords.empty()) {
854 auto memories = learned_knowledge_.SearchMemories(keywords[0]);
855 if (!memories.empty() && memories.size() <= 3) {
856 context << "[Relevant Past Context:\n";
857 for (const auto& mem : memories) {
858 context << "- " << mem.topic << ": " << mem.summary << "\n";
859 }
860 context << "]\n";
861 }
862 }
863
864 return context.str();
865}
866
867std::string ConversationalAgentService::InjectPretraining() {
868 if (!rom_context_) {
869 return "";
870 }
871
872 std::ostringstream pretraining;
873 pretraining << "[SYSTEM KNOWLEDGE INJECTION - Read this first]\n\n";
875 pretraining << "\n[END KNOWLEDGE INJECTION]\n";
876
877 return pretraining.str();
878}
879
880ChatMessage ConversationalAgentService::EnhanceResponse(
881 const ChatMessage& response, const std::string& user_message) {
882 // Use AdvancedRouter to enhance tool-based responses
883 // This would synthesize multi-tool results into coherent insights
884
885 // For now, return response as-is
886 // TODO: Integrate AdvancedRouter here
887 return response;
888}
889#endif // Z3ED_AI
890
891} // namespace agent
892} // namespace cli
893} // namespace yaze
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:24
bool is_loaded() const
Definition rom.h:128
static std::string GeneratePretrainingPrompt(Rom *rom)
Generate pre-training prompt for agent.
absl::StatusOr< ChatMessage > SendMessage(const std::string &message)
absl::Status ConfigureProvider(const AIServiceConfig &config)
void SetToolPreferences(const ToolDispatcher::ToolPreferences &prefs)
const std::vector< ChatMessage > & GetHistory() const
void HandleExternalResponse(const AgentResponse &response)
std::function< void(const std::vector< ChatMessage > &history)> ExternalDriverCallback
void ReplaceHistory(std::vector< ChatMessage > history)
void SetToolPreferences(const ToolPreferences &prefs)
absl::StatusOr< std::string > Dispatch(const ::yaze::cli::ToolCall &tool_call)
ABSL_DECLARE_FLAG(std::string, ai_provider)
ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string &content)
std::optional< ChatMessage::TableData > BuildTableData(const nlohmann::json &data)
std::string TrimWhitespace(absl::string_view value)
absl::StatusOr< ProposalCreationResult > CreateProposalFromAgentResponse(const ProposalCreationRequest &)
constexpr const char * kDim
constexpr const char * kYellow
constexpr const char * kReset
constexpr const char * kCyan
void PrintWarning(const std::string &message)
void PrintToolCall(const std::string &tool_name, const std::string &details="")
void PrintInfo(const std::string &message)
void PrintSuccess(const std::string &message)
void PrintError(const std::string &message)
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
std::vector< std::string > commands
Definition common.h:26
std::string reasoning
Definition common.h:29
std::vector< ToolCall > tool_calls
Definition common.h:23
std::string text_response
Definition common.h:20
std::optional< ModelMetadata > model_metadata
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics