yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
conversational_agent_service.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cctype>
5#include <iostream>
6#include <optional>
7#include <set>
8#include <sstream>
9#include <string>
10#include <vector>
11
12#include "absl/flags/declare.h"
13#include "absl/flags/flag.h"
14#include "absl/status/status.h"
15#include "absl/strings/str_cat.h"
16#include "absl/strings/str_format.h"
17#include "absl/strings/str_join.h"
18#include "absl/strings/str_split.h"
19#include "absl/strings/string_view.h"
20#include "absl/time/clock.h"
21#include "absl/time/time.h"
22#include "app/rom.h"
28#include "nlohmann/json.hpp"
29
30#ifdef SendMessage
31#undef SendMessage
32#endif
33
34ABSL_DECLARE_FLAG(std::string, ai_provider);
35
36namespace yaze {
37namespace cli {
38namespace agent {
39
40namespace {
41
42std::string TrimWhitespace(const std::string& input) {
43 auto begin = std::find_if_not(input.begin(), input.end(),
44 [](unsigned char c) { return std::isspace(c); });
45 auto end = std::find_if_not(input.rbegin(), input.rend(),
46 [](unsigned char c) { return std::isspace(c); })
47 .base();
48 if (begin >= end) {
49 return "";
50 }
51 return std::string(begin, end);
52}
53
54std::string JsonValueToString(const nlohmann::json& value) {
55 if (value.is_string()) {
56 return value.get<std::string>();
57 }
58 if (value.is_boolean()) {
59 return value.get<bool>() ? "true" : "false";
60 }
61 if (value.is_number()) {
62 return value.dump();
63 }
64 if (value.is_null()) {
65 return "null";
66 }
67 return value.dump();
68}
69
70std::set<std::string> CollectObjectKeys(const nlohmann::json& array) {
71 std::set<std::string> keys;
72 for (const auto& item : array) {
73 if (!item.is_object()) {
74 continue;
75 }
76 for (const auto& [key, _] : item.items()) {
77 keys.insert(key);
78 }
79 }
80 return keys;
81}
82
83std::optional<ChatMessage::TableData> BuildTableData(const nlohmann::json& data) {
84 using TableData = ChatMessage::TableData;
85
86 if (data.is_object()) {
87 TableData table;
88 table.headers = {"Key", "Value"};
89 table.rows.reserve(data.size());
90 for (const auto& [key, value] : data.items()) {
91 table.rows.push_back({key, JsonValueToString(value)});
92 }
93 return table;
94 }
95
96 if (data.is_array()) {
97 TableData table;
98 if (data.empty()) {
99 table.headers = {"Value"};
100 return table;
101 }
102
103 const bool all_objects = std::all_of(data.begin(), data.end(), [](const nlohmann::json& item) {
104 return item.is_object();
105 });
106
107 if (all_objects) {
108 auto keys = CollectObjectKeys(data);
109 if (keys.empty()) {
110 table.headers = {"Value"};
111 for (const auto& item : data) {
112 table.rows.push_back({JsonValueToString(item)});
113 }
114 return table;
115 }
116
117 table.headers.assign(keys.begin(), keys.end());
118 table.rows.reserve(data.size());
119 for (const auto& item : data) {
120 std::vector<std::string> row;
121 row.reserve(table.headers.size());
122 for (const auto& key : table.headers) {
123 if (item.contains(key)) {
124 row.push_back(JsonValueToString(item.at(key)));
125 } else {
126 row.emplace_back("-");
127 }
128 }
129 table.rows.push_back(std::move(row));
130 }
131 return table;
132 }
133
134 table.headers = {"Value"};
135 table.rows.reserve(data.size());
136 for (const auto& item : data) {
137 table.rows.push_back({JsonValueToString(item)});
138 }
139 return table;
140 }
141
142 return std::nullopt;
143}
144
145bool IsExecutableCommand(absl::string_view command) {
146 return !command.empty() && command.front() != '#';
147}
148
149int CountExecutableCommands(const std::vector<std::string>& commands) {
150 int count = 0;
151 for (const auto& command : commands) {
152 if (IsExecutableCommand(command)) {
153 ++count;
154 }
155 }
156 return count;
157}
158
159ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string& content) {
160 ChatMessage message;
161 message.sender = sender;
162 message.message = content;
163 message.timestamp = absl::Now();
164
165 if (sender == ChatMessage::Sender::kAgent) {
166 const std::string trimmed = TrimWhitespace(content);
167 if (!trimmed.empty() && (trimmed.front() == '{' || trimmed.front() == '[')) {
168 try {
169 nlohmann::json parsed = nlohmann::json::parse(trimmed);
170 message.table_data = BuildTableData(parsed);
171 message.json_pretty = parsed.dump(2);
172 } catch (const nlohmann::json::parse_error&) {
173 // Ignore parse errors, fall back to raw text.
174 }
175 }
176 }
177
178 return message;
179}
180
181} // namespace
182
185
186#ifdef Z3ED_AI
187 // Initialize advanced features
188 auto learn_status = learned_knowledge_.Initialize();
189 if (!learn_status.ok() && config_.verbose) {
190 std::cerr << "Warning: Failed to initialize learned knowledge: "
191 << learn_status.message() << std::endl;
192 }
193
194 auto todo_status = todo_manager_.Initialize();
195 if (!todo_status.ok() && config_.verbose) {
196 std::cerr << "Warning: Failed to initialize TODO manager: "
197 << todo_status.message() << std::endl;
198 }
199#endif
200}
201
203 : config_(config) {
205
206#ifdef Z3ED_AI
207 // Initialize advanced features
208 auto learn_status = learned_knowledge_.Initialize();
209 if (!learn_status.ok() && config_.verbose) {
210 std::cerr << "Warning: Failed to initialize learned knowledge: "
211 << learn_status.message() << std::endl;
212 }
213
214 auto todo_status = todo_manager_.Initialize();
215 if (!todo_status.ok() && config_.verbose) {
216 std::cerr << "Warning: Failed to initialize TODO manager: "
217 << todo_status.message() << std::endl;
218 }
219#endif
220}
221
230
235
238 return;
239 }
240
241 while (history_.size() > config_.max_history_messages) {
242 history_.erase(history_.begin());
243 }
244}
245
262
266
267absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
268 const std::string& message) {
269 if (message.empty() && history_.empty()) {
270 return absl::InvalidArgumentError(
271 "Conversation must start with a non-empty message.");
272 }
273
274 if (!message.empty()) {
275 history_.push_back(CreateMessage(ChatMessage::Sender::kUser, message));
278 }
279
280 const int max_iterations = config_.max_tool_iterations;
281 bool waiting_for_text_response = false;
282 absl::Time turn_start = absl::Now();
283
284 if (config_.verbose) {
285 util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations, " iterations)"));
286 util::PrintInfo(absl::StrCat("History size: ", history_.size(), " messages"));
287 }
288
289 for (int iteration = 0; iteration < max_iterations; ++iteration) {
290 if (config_.verbose) {
292 std::cout << util::colors::kCyan << "Iteration " << (iteration + 1)
293 << "/" << max_iterations << util::colors::kReset << std::endl;
294 }
295
296 // Show loading indicator while waiting for AI response
298 waiting_for_text_response
299 ? "Generating final response..."
300 : "Thinking...",
301 !config_.verbose); // Hide spinner in verbose mode
302 loader.Start();
303
304 auto response_or = ai_service_->GenerateResponse(history_);
305 loader.Stop();
306
307 if (!response_or.ok()) {
308 util::PrintError(absl::StrCat(
309 "Failed to get AI response: ", response_or.status().message()));
310 return absl::InternalError(absl::StrCat(
311 "Failed to get AI response: ", response_or.status().message()));
312 }
313
314 const auto& agent_response = response_or.value();
315
316 if (config_.verbose) {
317 util::PrintInfo("Received agent response:");
318 std::cout << util::colors::kDim << " - Tool calls: "
319 << agent_response.tool_calls.size() << util::colors::kReset << std::endl;
320 std::cout << util::colors::kDim << " - Commands: "
321 << agent_response.commands.size() << util::colors::kReset << std::endl;
322 std::cout << util::colors::kDim << " - Text response: "
323 << (agent_response.text_response.empty() ? "empty" : "present")
324 << util::colors::kReset << std::endl;
325 if (!agent_response.reasoning.empty() && config_.show_reasoning) {
326 std::cout << util::colors::kYellow << " 💭 Reasoning: "
327 << util::colors::kDim << agent_response.reasoning
328 << util::colors::kReset << std::endl;
329 }
330 }
331
332 if (!agent_response.tool_calls.empty()) {
333 // Check if we were waiting for a text response but got more tool calls instead
334 if (waiting_for_text_response) {
336 absl::StrCat("LLM called tools again instead of providing final response (Iteration: ",
337 iteration + 1, "/", max_iterations, ")"));
338 }
339
340 bool executed_tool = false;
341 for (const auto& tool_call : agent_response.tool_calls) {
342 // Format tool arguments for display
343 std::vector<std::string> arg_parts;
344 for (const auto& [key, value] : tool_call.args) {
345 arg_parts.push_back(absl::StrCat(key, "=", value));
346 }
347 std::string args_str = absl::StrJoin(arg_parts, ", ");
348
349 util::PrintToolCall(tool_call.tool_name, args_str);
350
351 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
352 if (!tool_result_or.ok()) {
353 util::PrintError(absl::StrCat(
354 "Tool execution failed: ", tool_result_or.status().message()));
355 return absl::InternalError(absl::StrCat(
356 "Tool execution failed: ", tool_result_or.status().message()));
357 }
358
359 const std::string& tool_output = tool_result_or.value();
360 if (!tool_output.empty()) {
361 util::PrintSuccess("Tool executed successfully");
363
364 if (config_.verbose) {
365 std::cout << util::colors::kDim << "Tool output (truncated):"
366 << util::colors::kReset << std::endl;
367 std::string preview = tool_output.substr(0, std::min(size_t(200), tool_output.size()));
368 if (tool_output.size() > 200) preview += "...";
369 std::cout << util::colors::kDim << preview << util::colors::kReset << std::endl;
370 }
371
372 // Add tool result with a clear marker for the LLM
373 // Format as plain text to avoid confusing the LLM with nested JSON
374 std::string marked_output = absl::StrCat(
375 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
376 "The tool returned the following data:\n",
377 tool_output, "\n\n",
378 "Please provide a text_response field in your JSON to summarize this information for the user.");
379 auto tool_result_msg = CreateMessage(ChatMessage::Sender::kUser, marked_output);
380 tool_result_msg.is_internal = true; // Don't show this to the human user
381 history_.push_back(tool_result_msg);
382 }
383 executed_tool = true;
384 }
385
386 if (executed_tool) {
387 // Now we're waiting for the LLM to provide a text response
388 waiting_for_text_response = true;
389 // Re-query the AI with updated context.
390 continue;
391 }
392 }
393
394 // Check if we received a text response after tool execution
395 if (waiting_for_text_response && agent_response.text_response.empty() &&
396 agent_response.commands.empty()) {
398 absl::StrCat("LLM did not provide text_response after receiving tool results (Iteration: ",
399 iteration + 1, "/", max_iterations, ")"));
400 // Continue to give it another chance
401 continue;
402 }
403
404 std::optional<ProposalCreationResult> proposal_result;
405 absl::Status proposal_status = absl::OkStatus();
406 bool attempted_proposal = false;
407
408 if (!agent_response.commands.empty()) {
409 attempted_proposal = true;
410
411 if (rom_context_ == nullptr) {
412 proposal_status = absl::FailedPreconditionError(
413 "No ROM context available for proposal creation");
415 "Cannot create proposal because no ROM context is active.");
416 } else if (!rom_context_->is_loaded()) {
417 proposal_status = absl::FailedPreconditionError(
418 "ROM context is not loaded");
420 "Cannot create proposal because the ROM context is not loaded.");
421 } else {
423 request.prompt = message;
424 request.response = &agent_response;
425 request.rom = rom_context_;
426 request.sandbox_label = "agent-chat";
427 request.ai_provider = absl::GetFlag(FLAGS_ai_provider);
428
429 auto creation_or = CreateProposalFromAgentResponse(request);
430 if (!creation_or.ok()) {
431 proposal_status = creation_or.status();
432 util::PrintError(absl::StrCat(
433 "Failed to create proposal: ", proposal_status.message()));
434 } else {
435 proposal_result = std::move(creation_or.value());
436 if (config_.verbose) {
437 util::PrintSuccess(absl::StrCat(
438 "Created proposal ", proposal_result->metadata.id,
439 " with ", proposal_result->change_count, " change(s)."));
440 }
441 }
442 }
443 }
444
445 std::string response_text = agent_response.text_response;
446 if (!agent_response.reasoning.empty()) {
447 if (!response_text.empty()) {
448 response_text.append("\n\n");
449 }
450 response_text.append("Reasoning: ");
451 response_text.append(agent_response.reasoning);
452 }
453 const int executable_commands =
454 CountExecutableCommands(agent_response.commands);
455 if (!agent_response.commands.empty()) {
456 if (!response_text.empty()) {
457 response_text.append("\n\n");
458 }
459 response_text.append("Commands:\n");
460 response_text.append(absl::StrJoin(agent_response.commands, "\n"));
461 }
462 metrics_.commands_generated += executable_commands;
463
464 if (proposal_result.has_value()) {
465 const auto& metadata = proposal_result->metadata;
466 if (!response_text.empty()) {
467 response_text.append("\n\n");
468 }
469 response_text.append(absl::StrFormat(
470 "✅ Proposal %s ready with %d change%s (%d command%s).\n"
471 "Review it in the Proposal drawer or run `z3ed agent diff --proposal-id %s`.\n"
472 "Sandbox ROM: %s\nProposal JSON: %s",
473 metadata.id, proposal_result->change_count,
474 proposal_result->change_count == 1 ? "" : "s",
475 proposal_result->executed_commands,
476 proposal_result->executed_commands == 1 ? "" : "s",
477 metadata.id, metadata.sandbox_rom_path.string(),
478 proposal_result->proposal_json_path.string()));
480 } else if (attempted_proposal && !proposal_status.ok()) {
481 if (!response_text.empty()) {
482 response_text.append("\n\n");
483 }
484 response_text.append(absl::StrCat(
485 "⚠️ Failed to prepare a proposal automatically: ",
486 proposal_status.message()));
487 }
488 ChatMessage chat_response =
489 CreateMessage(ChatMessage::Sender::kAgent, response_text);
490 if (proposal_result.has_value()) {
492 summary.id = proposal_result->metadata.id;
493 summary.change_count = proposal_result->change_count;
494 summary.executed_commands = proposal_result->executed_commands;
495 summary.sandbox_rom_path = proposal_result->metadata.sandbox_rom_path;
496 summary.proposal_json_path = proposal_result->proposal_json_path;
497 chat_response.proposal = summary;
498 }
501 metrics_.total_latency += absl::Now() - turn_start;
502 chat_response.metrics = BuildMetricsSnapshot();
503 history_.push_back(chat_response);
505 return chat_response;
506 }
507
508 return absl::InternalError(
509 "Agent did not produce a response after executing tools.");
510}
511
512const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
513 return history_;
514}
515
517 std::vector<ChatMessage> history) {
518 history_ = std::move(history);
521}
522
525
527 bool has_snapshot = false;
528
529 for (const auto& message : history_) {
530 if (message.sender == ChatMessage::Sender::kUser) {
532 } else if (message.sender == ChatMessage::Sender::kAgent) {
535 }
536
537 if (message.proposal.has_value()) {
539 }
540
541 if (message.metrics.has_value()) {
542 snapshot = *message.metrics;
543 has_snapshot = true;
544 }
545 }
546
547 if (has_snapshot) {
548 metrics_.user_messages = snapshot.total_user_messages;
549 metrics_.agent_messages = snapshot.total_agent_messages;
550 metrics_.tool_calls = snapshot.total_tool_calls;
551 metrics_.commands_generated = snapshot.total_commands;
552 metrics_.proposals_created = snapshot.total_proposals;
553 metrics_.turns_completed = snapshot.turn_index;
554 metrics_.total_latency = absl::Seconds(snapshot.total_elapsed_seconds);
555 }
556}
557
558#ifdef Z3ED_AI
559// === Advanced Feature Integration ===
560
561std::string ConversationalAgentService::BuildEnhancedPrompt(const std::string& user_message) {
562 std::ostringstream enhanced;
563
564 // Inject pretraining on first message
565 if (inject_pretraining_ && !pretraining_injected_ && rom_context_) {
566 enhanced << InjectPretraining() << "\n\n";
567 pretraining_injected_ = true;
568 }
569
570 // Inject learned context
571 if (inject_learned_context_) {
572 enhanced << InjectLearnedContext(user_message) << "\n";
573 }
574
575 enhanced << user_message;
576 return enhanced.str();
577}
578
579std::string ConversationalAgentService::InjectLearnedContext(const std::string& message) {
580 std::ostringstream context;
581
582 // Add relevant preferences
583 auto prefs = learned_knowledge_.GetAllPreferences();
584 if (!prefs.empty() && prefs.size() <= 5) { // Don't overwhelm with too many
585 context << "[User Preferences: ";
586 std::vector<std::string> pref_strings;
587 for (const auto& [key, value] : prefs) {
588 pref_strings.push_back(absl::StrCat(key, "=", value));
589 }
590 context << absl::StrJoin(pref_strings, ", ") << "]\n";
591 }
592
593 // Add ROM-specific patterns
595 // TODO: Get ROM hash
596 // auto patterns = learned_knowledge_.QueryPatterns("", rom_hash);
597 }
598
599 // Add recent relevant memories
600 std::vector<std::string> keywords;
601 // Extract keywords from message (simple word splitting)
602 for (const auto& word : absl::StrSplit(message, ' ')) {
603 if (word.length() > 4) { // Only meaningful words
604 keywords.push_back(std::string(word));
605 }
606 }
607
608 if (!keywords.empty()) {
609 auto memories = learned_knowledge_.SearchMemories(keywords[0]);
610 if (!memories.empty() && memories.size() <= 3) {
611 context << "[Relevant Past Context:\n";
612 for (const auto& mem : memories) {
613 context << "- " << mem.topic << ": " << mem.summary << "\n";
614 }
615 context << "]\n";
616 }
617 }
618
619 return context.str();
620}
621
622std::string ConversationalAgentService::InjectPretraining() {
623 if (!rom_context_) {
624 return "";
625 }
626
627 std::ostringstream pretraining;
628 pretraining << "[SYSTEM KNOWLEDGE INJECTION - Read this first]\n\n";
630 pretraining << "\n[END KNOWLEDGE INJECTION]\n";
631
632 return pretraining.str();
633}
634
635ChatMessage ConversationalAgentService::EnhanceResponse(
636 const ChatMessage& response,
637 const std::string& user_message) {
638 // Use AdvancedRouter to enhance tool-based responses
639 // This would synthesize multi-tool results into coherent insights
640
641 // For now, return response as-is
642 // TODO: Integrate AdvancedRouter here
643 return response;
644}
645#endif // Z3ED_AI
646
647} // namespace agent
648} // namespace cli
649} // namespace yaze
The Rom class is used to load, save, and modify Rom data.
Definition rom.h:74
bool is_loaded() const
Definition rom.h:200
static std::string GeneratePretrainingPrompt(Rom *rom)
Generate pre-training prompt for agent.
absl::StatusOr< ChatMessage > SendMessage(const std::string &message)
const std::vector< ChatMessage > & GetHistory() const
void ReplaceHistory(std::vector< ChatMessage > history)
absl::StatusOr< std::string > Dispatch(const ToolCall &tool_call)
ABSL_DECLARE_FLAG(std::string, ai_provider)
ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string &content)
std::optional< ChatMessage::TableData > BuildTableData(const nlohmann::json &data)
absl::StatusOr< ProposalCreationResult > CreateProposalFromAgentResponse(const ProposalCreationRequest &request)
std::string TrimWhitespace(absl::string_view value)
constexpr const char * kDim
constexpr const char * kYellow
constexpr const char * kReset
constexpr const char * kCyan
void PrintWarning(const std::string &message)
void PrintToolCall(const std::string &tool_name, const std::string &details="")
void PrintInfo(const std::string &message)
void PrintSuccess(const std::string &message)
void PrintError(const std::string &message)
std::unique_ptr< AIService > CreateAIService()
Main namespace for the application.
Definition controller.cc:20
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics