yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
agent_chat_history_codec.cc
Go to the documentation of this file.
2
3#include <filesystem>
4#include <fstream>
5#include <optional>
6#include <string>
7#include <vector>
8
9#include "absl/strings/str_format.h"
10#include "absl/time/clock.h"
11#include "absl/time/time.h"
12
13#if defined(YAZE_WITH_JSON)
14#include "nlohmann/json.hpp"
15#endif
16
17namespace yaze {
18namespace editor {
19
20namespace {
21
22#if defined(YAZE_WITH_JSON)
23using Json = nlohmann::json;
24
25absl::Time ParseTimestamp(const Json& value) {
26 if (!value.is_string()) {
27 return absl::Now();
28 }
29 absl::Time parsed;
30 if (absl::ParseTime(absl::RFC3339_full, value.get<std::string>(), &parsed,
31 nullptr)) {
32 return parsed;
33 }
34 return absl::Now();
35}
36
37Json SerializeTableData(const cli::agent::ChatMessage::TableData& table) {
38 Json json;
39 json["headers"] = table.headers;
40 json["rows"] = table.rows;
41 return json;
42}
43
44std::optional<cli::agent::ChatMessage::TableData> ParseTableData(
45 const Json& json) {
46 if (!json.is_object()) {
47 return std::nullopt;
48 }
49
51 if (json.contains("headers") && json["headers"].is_array()) {
52 for (const auto& header : json["headers"]) {
53 if (header.is_string()) {
54 table.headers.push_back(header.get<std::string>());
55 }
56 }
57 }
58
59 if (json.contains("rows") && json["rows"].is_array()) {
60 for (const auto& row : json["rows"]) {
61 if (!row.is_array()) {
62 continue;
63 }
64 std::vector<std::string> row_values;
65 for (const auto& value : row) {
66 if (value.is_string()) {
67 row_values.push_back(value.get<std::string>());
68 } else {
69 row_values.push_back(value.dump());
70 }
71 }
72 table.rows.push_back(std::move(row_values));
73 }
74 }
75
76 if (table.headers.empty() && table.rows.empty()) {
77 return std::nullopt;
78 }
79
80 return table;
81}
82
83Json SerializeProposal(
85 Json json;
86 json["id"] = proposal.id;
87 json["change_count"] = proposal.change_count;
88 json["executed_commands"] = proposal.executed_commands;
89 json["sandbox_rom_path"] = proposal.sandbox_rom_path.string();
90 json["proposal_json_path"] = proposal.proposal_json_path.string();
91 return json;
92}
93
94std::optional<cli::agent::ChatMessage::ProposalSummary> ParseProposal(
95 const Json& json) {
96 if (!json.is_object()) {
97 return std::nullopt;
98 }
99
101 summary.id = json.value("id", "");
102 summary.change_count = json.value("change_count", 0);
103 summary.executed_commands = json.value("executed_commands", 0);
104 if (json.contains("sandbox_rom_path") &&
105 json["sandbox_rom_path"].is_string()) {
106 summary.sandbox_rom_path = json["sandbox_rom_path"].get<std::string>();
107 }
108 if (json.contains("proposal_json_path") &&
109 json["proposal_json_path"].is_string()) {
110 summary.proposal_json_path = json["proposal_json_path"].get<std::string>();
111 }
112 if (summary.id.empty()) {
113 return std::nullopt;
114 }
115 return summary;
116}
117
118#endif // YAZE_WITH_GRPC
119
120} // namespace
121
123#if defined(YAZE_WITH_JSON)
124 return true;
125#else
126 return false;
127#endif
128}
129
130absl::StatusOr<AgentChatHistoryCodec::Snapshot> AgentChatHistoryCodec::Load(
131 const std::filesystem::path& path) {
132#if defined(YAZE_WITH_JSON)
133 Snapshot snapshot;
134
135 std::ifstream file(path);
136 if (!file.good()) {
137 return snapshot; // Treat missing file as empty history.
138 }
139
140 Json json;
141 try {
142 file >> json;
143 } catch (const std::exception& e) {
144 return absl::InternalError(
145 absl::StrFormat("Failed to parse chat history: %s", e.what()));
146 }
147
148 if (!json.contains("messages") || !json["messages"].is_array()) {
149 return snapshot;
150 }
151
152 for (const auto& item : json["messages"]) {
153 if (!item.is_object()) {
154 continue;
155 }
156
158 std::string sender = item.value("sender", "agent");
159 message.sender = sender == "user" ? cli::agent::ChatMessage::Sender::kUser
161 message.message = item.value("message", "");
162 message.timestamp = ParseTimestamp(item["timestamp"]);
163 message.is_internal = item.value("is_internal", false);
164
165 if (item.contains("json_pretty") && item["json_pretty"].is_string()) {
166 message.json_pretty = item["json_pretty"].get<std::string>();
167 }
168 if (item.contains("table_data")) {
169 message.table_data = ParseTableData(item["table_data"]);
170 }
171 if (item.contains("metrics") && item["metrics"].is_object()) {
173 const auto& metrics_json = item["metrics"];
174 metrics.turn_index = metrics_json.value("turn_index", 0);
175 metrics.total_user_messages =
176 metrics_json.value("total_user_messages", 0);
177 metrics.total_agent_messages =
178 metrics_json.value("total_agent_messages", 0);
179 metrics.total_tool_calls = metrics_json.value("total_tool_calls", 0);
180 metrics.total_commands = metrics_json.value("total_commands", 0);
181 metrics.total_proposals = metrics_json.value("total_proposals", 0);
182 metrics.total_elapsed_seconds =
183 metrics_json.value("total_elapsed_seconds", 0.0);
185 metrics_json.value("average_latency_seconds", 0.0);
186 message.metrics = metrics;
187 }
188 if (item.contains("proposal")) {
189 message.proposal = ParseProposal(item["proposal"]);
190 }
191 if (item.contains("warnings") && item["warnings"].is_array()) {
192 for (const auto& warning : item["warnings"]) {
193 if (warning.is_string()) {
194 message.warnings.push_back(warning.get<std::string>());
195 }
196 }
197 }
198 if (item.contains("model_metadata") && item["model_metadata"].is_object()) {
199 const auto& meta_json = item["model_metadata"];
201 meta.provider = meta_json.value("provider", "");
202 meta.model = meta_json.value("model", "");
203 meta.latency_seconds = meta_json.value("latency_seconds", 0.0);
204 meta.tool_iterations = meta_json.value("tool_iterations", 0);
205 if (meta_json.contains("tool_names") &&
206 meta_json["tool_names"].is_array()) {
207 for (const auto& name : meta_json["tool_names"]) {
208 if (name.is_string()) {
209 meta.tool_names.push_back(name.get<std::string>());
210 }
211 }
212 }
213 if (meta_json.contains("parameters") &&
214 meta_json["parameters"].is_object()) {
215 for (const auto& [key, value] : meta_json["parameters"].items()) {
216 if (value.is_string()) {
217 meta.parameters[key] = value.get<std::string>();
218 }
219 }
220 }
221 message.model_metadata = meta;
222 }
223
224 snapshot.history.push_back(std::move(message));
225 }
226
227 if (json.contains("collaboration") && json["collaboration"].is_object()) {
228 const auto& collab_json = json["collaboration"];
229 snapshot.collaboration.active = collab_json.value("active", false);
230 snapshot.collaboration.session_id = collab_json.value("session_id", "");
231 snapshot.collaboration.session_name = collab_json.value("session_name", "");
232 snapshot.collaboration.participants.clear();
233 if (collab_json.contains("participants") &&
234 collab_json["participants"].is_array()) {
235 for (const auto& participant : collab_json["participants"]) {
236 if (participant.is_string()) {
237 snapshot.collaboration.participants.push_back(
238 participant.get<std::string>());
239 }
240 }
241 }
242 if (collab_json.contains("last_synced")) {
243 snapshot.collaboration.last_synced =
244 ParseTimestamp(collab_json["last_synced"]);
245 }
246 if (snapshot.collaboration.session_name.empty() &&
247 !snapshot.collaboration.session_id.empty()) {
249 }
250 }
251
252 if (json.contains("multimodal") && json["multimodal"].is_object()) {
253 const auto& multimodal_json = json["multimodal"];
254 if (multimodal_json.contains("last_capture_path") &&
255 multimodal_json["last_capture_path"].is_string()) {
256 std::string path_value =
257 multimodal_json["last_capture_path"].get<std::string>();
258 if (!path_value.empty()) {
260 std::filesystem::path(path_value);
261 }
262 }
263 snapshot.multimodal.status_message =
264 multimodal_json.value("status_message", "");
265 if (multimodal_json.contains("last_updated")) {
266 snapshot.multimodal.last_updated =
267 ParseTimestamp(multimodal_json["last_updated"]);
268 }
269 }
270
271 if (json.contains("agent_config") && json["agent_config"].is_object()) {
272 const auto& config_json = json["agent_config"];
273 AgentConfigSnapshot config;
274 config.provider = config_json.value("provider", "");
275 config.model = config_json.value("model", "");
276 config.ollama_host =
277 config_json.value("ollama_host", "http://localhost:11434");
278 config.gemini_api_key = config_json.value("gemini_api_key", "");
279 config.anthropic_api_key = config_json.value("anthropic_api_key", "");
280 config.openai_api_key = config_json.value("openai_api_key", "");
281 config.openai_base_url =
282 config_json.value("openai_base_url", "https://api.openai.com");
283 config.host_id = config_json.value("host_id", "");
284 config.verbose = config_json.value("verbose", false);
285 config.show_reasoning = config_json.value("show_reasoning", true);
286 config.max_tool_iterations = config_json.value("max_tool_iterations", 4);
287 config.max_retry_attempts = config_json.value("max_retry_attempts", 3);
288 config.temperature = config_json.value("temperature", 0.25f);
289 config.top_p = config_json.value("top_p", 0.95f);
290 config.max_output_tokens = config_json.value("max_output_tokens", 2048);
291 config.stream_responses = config_json.value("stream_responses", false);
292 config.chain_mode = config_json.value("chain_mode", 0);
293 if (config_json.contains("favorite_models") &&
294 config_json["favorite_models"].is_array()) {
295 for (const auto& fav : config_json["favorite_models"]) {
296 if (fav.is_string()) {
297 config.favorite_models.push_back(fav.get<std::string>());
298 }
299 }
300 }
301 if (config_json.contains("model_chain") &&
302 config_json["model_chain"].is_array()) {
303 for (const auto& chain : config_json["model_chain"]) {
304 if (chain.is_string()) {
305 config.model_chain.push_back(chain.get<std::string>());
306 }
307 }
308 }
309 if (config_json.contains("goals") && config_json["goals"].is_array()) {
310 for (const auto& goal : config_json["goals"]) {
311 if (goal.is_string()) {
312 config.goals.push_back(goal.get<std::string>());
313 }
314 }
315 }
316 if (config_json.contains("model_presets") &&
317 config_json["model_presets"].is_array()) {
318 for (const auto& preset_json : config_json["model_presets"]) {
319 if (!preset_json.is_object())
320 continue;
322 preset.name = preset_json.value("name", "");
323 preset.model = preset_json.value("model", "");
324 preset.provider = preset_json.value("provider", "");
325 preset.host = preset_json.value("host", "");
326 preset.pinned = preset_json.value("pinned", false);
327 if (preset_json.contains("tags") && preset_json["tags"].is_array()) {
328 for (const auto& tag : preset_json["tags"]) {
329 if (tag.is_string()) {
330 preset.tags.push_back(tag.get<std::string>());
331 }
332 }
333 }
334 config.model_presets.push_back(std::move(preset));
335 }
336 }
337 if (config_json.contains("tools") && config_json["tools"].is_object()) {
338 const auto& tools_json = config_json["tools"];
339 config.tools.resources = tools_json.value("resources", true);
340 config.tools.dungeon = tools_json.value("dungeon", true);
341 config.tools.overworld = tools_json.value("overworld", true);
342 config.tools.dialogue = tools_json.value("dialogue", true);
343 config.tools.messages = tools_json.value("messages", true);
344 config.tools.gui = tools_json.value("gui", true);
345 config.tools.music = tools_json.value("music", true);
346 config.tools.sprite = tools_json.value("sprite", true);
347 config.tools.emulator = tools_json.value("emulator", true);
348 config.tools.memory_inspector = tools_json.value("memory_inspector", true);
349 }
350 config.persona_notes = config_json.value("persona_notes", "");
351 snapshot.agent_config = config;
352 }
353
354 return snapshot;
355#else
356 (void)path;
357 return absl::UnimplementedError(
358 "Chat history persistence requires YAZE_WITH_GRPC=ON");
359#endif
360}
361
362absl::Status AgentChatHistoryCodec::Save(const std::filesystem::path& path,
363 const Snapshot& snapshot) {
364#if defined(YAZE_WITH_JSON)
365 Json json;
366 json["version"] = 4;
367 json["messages"] = Json::array();
368
369 for (const auto& message : snapshot.history) {
370 Json entry;
371 entry["sender"] = message.sender == cli::agent::ChatMessage::Sender::kUser
372 ? "user"
373 : "agent";
374 entry["message"] = message.message;
375 entry["timestamp"] = absl::FormatTime(absl::RFC3339_full, message.timestamp,
376 absl::UTCTimeZone());
377 entry["is_internal"] = message.is_internal;
378
379 if (message.json_pretty.has_value()) {
380 entry["json_pretty"] = *message.json_pretty;
381 }
382 if (message.table_data.has_value()) {
383 entry["table_data"] = SerializeTableData(*message.table_data);
384 }
385 if (message.metrics.has_value()) {
386 const auto& metrics = *message.metrics;
387 Json metrics_json;
388 metrics_json["turn_index"] = metrics.turn_index;
389 metrics_json["total_user_messages"] = metrics.total_user_messages;
390 metrics_json["total_agent_messages"] = metrics.total_agent_messages;
391 metrics_json["total_tool_calls"] = metrics.total_tool_calls;
392 metrics_json["total_commands"] = metrics.total_commands;
393 metrics_json["total_proposals"] = metrics.total_proposals;
394 metrics_json["total_elapsed_seconds"] = metrics.total_elapsed_seconds;
395 metrics_json["average_latency_seconds"] = metrics.average_latency_seconds;
396 entry["metrics"] = metrics_json;
397 }
398 if (message.proposal.has_value()) {
399 entry["proposal"] = SerializeProposal(*message.proposal);
400 }
401 if (!message.warnings.empty()) {
402 entry["warnings"] = message.warnings;
403 }
404 if (message.model_metadata.has_value()) {
405 const auto& meta = *message.model_metadata;
406 Json meta_json;
407 meta_json["provider"] = meta.provider;
408 meta_json["model"] = meta.model;
409 meta_json["latency_seconds"] = meta.latency_seconds;
410 meta_json["tool_iterations"] = meta.tool_iterations;
411 meta_json["tool_names"] = meta.tool_names;
412 Json params_json;
413 for (const auto& [key, value] : meta.parameters) {
414 params_json[key] = value;
415 }
416 meta_json["parameters"] = std::move(params_json);
417 entry["model_metadata"] = std::move(meta_json);
418 }
419
420 json["messages"].push_back(std::move(entry));
421 }
422
423 Json collab_json;
424 collab_json["active"] = snapshot.collaboration.active;
425 collab_json["session_id"] = snapshot.collaboration.session_id;
426 collab_json["session_name"] = snapshot.collaboration.session_name;
427 collab_json["participants"] = snapshot.collaboration.participants;
428 if (snapshot.collaboration.last_synced != absl::InfinitePast()) {
429 collab_json["last_synced"] =
430 absl::FormatTime(absl::RFC3339_full, snapshot.collaboration.last_synced,
431 absl::UTCTimeZone());
432 }
433 json["collaboration"] = std::move(collab_json);
434
435 Json multimodal_json;
436 if (snapshot.multimodal.last_capture_path.has_value()) {
437 multimodal_json["last_capture_path"] =
438 snapshot.multimodal.last_capture_path->string();
439 } else {
440 multimodal_json["last_capture_path"] = "";
441 }
442 multimodal_json["status_message"] = snapshot.multimodal.status_message;
443 if (snapshot.multimodal.last_updated != absl::InfinitePast()) {
444 multimodal_json["last_updated"] =
445 absl::FormatTime(absl::RFC3339_full, snapshot.multimodal.last_updated,
446 absl::UTCTimeZone());
447 }
448 json["multimodal"] = std::move(multimodal_json);
449
450 if (snapshot.agent_config.has_value()) {
451 const auto& config = *snapshot.agent_config;
452 Json config_json;
453 config_json["provider"] = config.provider;
454 config_json["model"] = config.model;
455 config_json["ollama_host"] = config.ollama_host;
456 config_json["gemini_api_key"] = config.gemini_api_key;
457 config_json["anthropic_api_key"] = config.anthropic_api_key;
458 config_json["openai_api_key"] = config.openai_api_key;
459 config_json["openai_base_url"] = config.openai_base_url;
460 config_json["host_id"] = config.host_id;
461 config_json["verbose"] = config.verbose;
462 config_json["show_reasoning"] = config.show_reasoning;
463 config_json["max_tool_iterations"] = config.max_tool_iterations;
464 config_json["max_retry_attempts"] = config.max_retry_attempts;
465 config_json["temperature"] = config.temperature;
466 config_json["top_p"] = config.top_p;
467 config_json["max_output_tokens"] = config.max_output_tokens;
468 config_json["stream_responses"] = config.stream_responses;
469 config_json["chain_mode"] = config.chain_mode;
470 config_json["favorite_models"] = config.favorite_models;
471 config_json["model_chain"] = config.model_chain;
472 config_json["persona_notes"] = config.persona_notes;
473 config_json["goals"] = config.goals;
474
475 Json tools_json;
476 tools_json["resources"] = config.tools.resources;
477 tools_json["dungeon"] = config.tools.dungeon;
478 tools_json["overworld"] = config.tools.overworld;
479 tools_json["dialogue"] = config.tools.dialogue;
480 tools_json["messages"] = config.tools.messages;
481 tools_json["gui"] = config.tools.gui;
482 tools_json["music"] = config.tools.music;
483 tools_json["sprite"] = config.tools.sprite;
484 tools_json["emulator"] = config.tools.emulator;
485 tools_json["memory_inspector"] = config.tools.memory_inspector;
486 config_json["tools"] = std::move(tools_json);
487
488 Json presets_json = Json::array();
489 for (const auto& preset : config.model_presets) {
490 Json preset_json;
491 preset_json["name"] = preset.name;
492 preset_json["model"] = preset.model;
493 preset_json["provider"] = preset.provider;
494 preset_json["host"] = preset.host;
495 preset_json["tags"] = preset.tags;
496 preset_json["pinned"] = preset.pinned;
497 presets_json.push_back(std::move(preset_json));
498 }
499 config_json["model_presets"] = std::move(presets_json);
500
501 json["agent_config"] = std::move(config_json);
502 }
503
504 std::error_code ec;
505 auto directory = path.parent_path();
506 if (!directory.empty()) {
507 std::filesystem::create_directories(directory, ec);
508 if (ec) {
509 return absl::InternalError(absl::StrFormat(
510 "Unable to create chat history directory: %s", ec.message()));
511 }
512 }
513
514 std::ofstream file(path);
515 if (!file.is_open()) {
516 return absl::InternalError("Cannot write chat history file");
517 }
518
519 file << json.dump(2);
520 return absl::OkStatus();
521#else
522 (void)path;
523 (void)snapshot;
524 return absl::UnimplementedError(
525 "Chat history persistence requires YAZE_WITH_GRPC=ON");
526#endif
527}
528
529} // namespace editor
530} // namespace yaze
bool is_string() const
Definition json.h:59
static Json array()
Definition json.h:35
T get() const
Definition json.h:49
std::string dump(int=-1, char=' ', bool=false, int=0) const
Definition json.h:91
static absl::Status Save(const std::filesystem::path &path, const Snapshot &snapshot)
static absl::StatusOr< Snapshot > Load(const std::filesystem::path &path)
std::vector< std::vector< std::string > > rows
std::optional< ModelMetadata > model_metadata
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics
std::optional< std::filesystem::path > last_capture_path
std::vector< cli::agent::ChatMessage > history
std::optional< AgentConfigSnapshot > agent_config