22#if defined(YAZE_WITH_JSON)
23using Json = nlohmann::json;
25absl::Time ParseTimestamp(
const Json& value) {
30 if (absl::ParseTime(absl::RFC3339_full, value.
get<std::string>(), &parsed,
39 json[
"headers"] = table.
headers;
40 json[
"rows"] = table.
rows;
44std::optional<cli::agent::ChatMessage::TableData> ParseTableData(
46 if (!json.is_object()) {
51 if (json.contains(
"headers") && json[
"headers"].is_array()) {
52 for (
const auto& header : json[
"headers"]) {
53 if (header.is_string()) {
54 table.
headers.push_back(header.get<std::string>());
59 if (json.contains(
"rows") && json[
"rows"].is_array()) {
60 for (
const auto& row : json[
"rows"]) {
61 if (!row.is_array()) {
64 std::vector<std::string> row_values;
65 for (
const auto& value : row) {
67 row_values.push_back(value.
get<std::string>());
69 row_values.push_back(value.
dump());
72 table.
rows.push_back(std::move(row_values));
83Json SerializeProposal(
86 json[
"id"] = proposal.
id;
94std::optional<cli::agent::ChatMessage::ProposalSummary> ParseProposal(
96 if (!json.is_object()) {
101 summary.
id = json.value(
"id",
"");
104 if (json.contains(
"sandbox_rom_path") &&
105 json[
"sandbox_rom_path"].is_string()) {
108 if (json.contains(
"proposal_json_path") &&
109 json[
"proposal_json_path"].is_string()) {
112 if (summary.
id.empty()) {
131 const std::filesystem::path& path) {
132#if defined(YAZE_WITH_JSON)
135 std::ifstream file(path);
143 }
catch (
const std::exception& e) {
144 return absl::InternalError(
145 absl::StrFormat(
"Failed to parse chat history: %s", e.what()));
148 if (!json.contains(
"messages") || !json[
"messages"].is_array()) {
152 for (
const auto& item : json[
"messages"]) {
153 if (!item.is_object()) {
158 std::string sender = item.value(
"sender",
"agent");
161 message.
message = item.value(
"message",
"");
162 message.
timestamp = ParseTimestamp(item[
"timestamp"]);
163 message.
is_internal = item.value(
"is_internal",
false);
165 if (item.contains(
"json_pretty") && item[
"json_pretty"].is_string()) {
166 message.
json_pretty = item[
"json_pretty"].get<std::string>();
168 if (item.contains(
"table_data")) {
169 message.
table_data = ParseTableData(item[
"table_data"]);
171 if (item.contains(
"metrics") && item[
"metrics"].is_object()) {
173 const auto& metrics_json = item[
"metrics"];
174 metrics.
turn_index = metrics_json.value(
"turn_index", 0);
176 metrics_json.value(
"total_user_messages", 0);
178 metrics_json.value(
"total_agent_messages", 0);
183 metrics_json.value(
"total_elapsed_seconds", 0.0);
185 metrics_json.value(
"average_latency_seconds", 0.0);
188 if (item.contains(
"proposal")) {
189 message.
proposal = ParseProposal(item[
"proposal"]);
191 if (item.contains(
"warnings") && item[
"warnings"].is_array()) {
192 for (
const auto& warning : item[
"warnings"]) {
193 if (warning.is_string()) {
194 message.
warnings.push_back(warning.get<std::string>());
198 if (item.contains(
"model_metadata") && item[
"model_metadata"].is_object()) {
199 const auto& meta_json = item[
"model_metadata"];
201 meta.
provider = meta_json.value(
"provider",
"");
202 meta.
model = meta_json.value(
"model",
"");
205 if (meta_json.contains(
"tool_names") &&
206 meta_json[
"tool_names"].is_array()) {
207 for (
const auto& name : meta_json[
"tool_names"]) {
208 if (name.is_string()) {
209 meta.
tool_names.push_back(name.get<std::string>());
213 if (meta_json.contains(
"parameters") &&
214 meta_json[
"parameters"].is_object()) {
215 for (
const auto& [key, value] : meta_json[
"parameters"].items()) {
216 if (value.is_string()) {
217 meta.
parameters[key] = value.get<std::string>();
224 snapshot.
history.push_back(std::move(message));
227 if (json.contains(
"collaboration") && json[
"collaboration"].is_object()) {
228 const auto& collab_json = json[
"collaboration"];
233 if (collab_json.contains(
"participants") &&
234 collab_json[
"participants"].is_array()) {
235 for (
const auto& participant : collab_json[
"participants"]) {
236 if (participant.is_string()) {
238 participant.get<std::string>());
242 if (collab_json.contains(
"last_synced")) {
244 ParseTimestamp(collab_json[
"last_synced"]);
252 if (json.contains(
"multimodal") && json[
"multimodal"].is_object()) {
253 const auto& multimodal_json = json[
"multimodal"];
254 if (multimodal_json.contains(
"last_capture_path") &&
255 multimodal_json[
"last_capture_path"].is_string()) {
256 std::string path_value =
257 multimodal_json[
"last_capture_path"].get<std::string>();
258 if (!path_value.empty()) {
260 std::filesystem::path(path_value);
264 multimodal_json.value(
"status_message",
"");
265 if (multimodal_json.contains(
"last_updated")) {
267 ParseTimestamp(multimodal_json[
"last_updated"]);
271 if (json.contains(
"agent_config") && json[
"agent_config"].is_object()) {
272 const auto& config_json = json[
"agent_config"];
274 config.
provider = config_json.value(
"provider",
"");
275 config.
model = config_json.value(
"model",
"");
277 config_json.value(
"ollama_host",
"http://localhost:11434");
279 config.
verbose = config_json.value(
"verbose",
false);
280 config.
show_reasoning = config_json.value(
"show_reasoning",
true);
283 config.
temperature = config_json.value(
"temperature", 0.25f);
284 config.
top_p = config_json.value(
"top_p", 0.95f);
287 config.
chain_mode = config_json.value(
"chain_mode", 0);
288 if (config_json.contains(
"favorite_models") &&
289 config_json[
"favorite_models"].is_array()) {
290 for (
const auto& fav : config_json[
"favorite_models"]) {
291 if (fav.is_string()) {
296 if (config_json.contains(
"model_chain") &&
297 config_json[
"model_chain"].is_array()) {
298 for (
const auto& chain : config_json[
"model_chain"]) {
299 if (chain.is_string()) {
300 config.
model_chain.push_back(chain.get<std::string>());
304 if (config_json.contains(
"goals") && config_json[
"goals"].is_array()) {
305 for (
const auto& goal : config_json[
"goals"]) {
306 if (goal.is_string()) {
307 config.
goals.push_back(goal.get<std::string>());
311 if (config_json.contains(
"model_presets") &&
312 config_json[
"model_presets"].is_array()) {
313 for (
const auto& preset_json : config_json[
"model_presets"]) {
314 if (!preset_json.is_object())
317 preset.
name = preset_json.value(
"name",
"");
318 preset.
model = preset_json.value(
"model",
"");
319 preset.
provider = preset_json.value(
"provider",
"");
320 preset.
host = preset_json.value(
"host",
"");
321 preset.
pinned = preset_json.value(
"pinned",
false);
322 if (preset_json.contains(
"tags") && preset_json[
"tags"].is_array()) {
323 for (
const auto& tag : preset_json[
"tags"]) {
324 if (tag.is_string()) {
325 preset.
tags.push_back(tag.get<std::string>());
332 if (config_json.contains(
"tools") && config_json[
"tools"].is_object()) {
333 const auto& tools_json = config_json[
"tools"];
335 config.
tools.
dungeon = tools_json.value(
"dungeon",
true);
339 config.
tools.
gui = tools_json.value(
"gui",
true);
340 config.
tools.
music = tools_json.value(
"music",
true);
341 config.
tools.
sprite = tools_json.value(
"sprite",
true);
344 config.
persona_notes = config_json.value(
"persona_notes",
"");
351 return absl::UnimplementedError(
352 "Chat history persistence requires YAZE_WITH_GRPC=ON");
358#if defined(YAZE_WITH_JSON)
363 for (
const auto& message : snapshot.
history) {
368 entry[
"message"] = message.message;
369 entry[
"timestamp"] = absl::FormatTime(absl::RFC3339_full, message.timestamp,
370 absl::UTCTimeZone());
371 entry[
"is_internal"] = message.is_internal;
373 if (message.json_pretty.has_value()) {
374 entry[
"json_pretty"] = *message.json_pretty;
376 if (message.table_data.has_value()) {
377 entry[
"table_data"] = SerializeTableData(*message.table_data);
379 if (message.metrics.has_value()) {
380 const auto& metrics = *message.metrics;
382 metrics_json[
"turn_index"] = metrics.turn_index;
383 metrics_json[
"total_user_messages"] = metrics.total_user_messages;
384 metrics_json[
"total_agent_messages"] = metrics.total_agent_messages;
385 metrics_json[
"total_tool_calls"] = metrics.total_tool_calls;
386 metrics_json[
"total_commands"] = metrics.total_commands;
387 metrics_json[
"total_proposals"] = metrics.total_proposals;
388 metrics_json[
"total_elapsed_seconds"] = metrics.total_elapsed_seconds;
389 metrics_json[
"average_latency_seconds"] = metrics.average_latency_seconds;
390 entry[
"metrics"] = metrics_json;
392 if (message.proposal.has_value()) {
393 entry[
"proposal"] = SerializeProposal(*message.proposal);
395 if (!message.warnings.empty()) {
396 entry[
"warnings"] = message.warnings;
398 if (message.model_metadata.has_value()) {
399 const auto& meta = *message.model_metadata;
401 meta_json[
"provider"] = meta.provider;
402 meta_json[
"model"] = meta.model;
403 meta_json[
"latency_seconds"] = meta.latency_seconds;
404 meta_json[
"tool_iterations"] = meta.tool_iterations;
405 meta_json[
"tool_names"] = meta.tool_names;
407 for (
const auto& [key, value] : meta.parameters) {
408 params_json[key] = value;
410 meta_json[
"parameters"] = std::move(params_json);
411 entry[
"model_metadata"] = std::move(meta_json);
414 json[
"messages"].push_back(std::move(entry));
423 collab_json[
"last_synced"] =
425 absl::UTCTimeZone());
427 json[
"collaboration"] = std::move(collab_json);
429 Json multimodal_json;
431 multimodal_json[
"last_capture_path"] =
434 multimodal_json[
"last_capture_path"] =
"";
438 multimodal_json[
"last_updated"] =
440 absl::UTCTimeZone());
442 json[
"multimodal"] = std::move(multimodal_json);
447 config_json[
"provider"] = config.provider;
448 config_json[
"model"] = config.model;
449 config_json[
"ollama_host"] = config.ollama_host;
450 config_json[
"gemini_api_key"] = config.gemini_api_key;
451 config_json[
"verbose"] = config.verbose;
452 config_json[
"show_reasoning"] = config.show_reasoning;
453 config_json[
"max_tool_iterations"] = config.max_tool_iterations;
454 config_json[
"max_retry_attempts"] = config.max_retry_attempts;
455 config_json[
"temperature"] = config.temperature;
456 config_json[
"top_p"] = config.top_p;
457 config_json[
"max_output_tokens"] = config.max_output_tokens;
458 config_json[
"stream_responses"] = config.stream_responses;
459 config_json[
"chain_mode"] = config.chain_mode;
460 config_json[
"favorite_models"] = config.favorite_models;
461 config_json[
"model_chain"] = config.model_chain;
462 config_json[
"persona_notes"] = config.persona_notes;
463 config_json[
"goals"] = config.goals;
466 tools_json[
"resources"] = config.tools.resources;
467 tools_json[
"dungeon"] = config.tools.dungeon;
468 tools_json[
"overworld"] = config.tools.overworld;
469 tools_json[
"dialogue"] = config.tools.dialogue;
470 tools_json[
"messages"] = config.tools.messages;
471 tools_json[
"gui"] = config.tools.gui;
472 tools_json[
"music"] = config.tools.music;
473 tools_json[
"sprite"] = config.tools.sprite;
474 tools_json[
"emulator"] = config.tools.emulator;
475 config_json[
"tools"] = std::move(tools_json);
478 for (
const auto& preset : config.model_presets) {
480 preset_json[
"name"] = preset.name;
481 preset_json[
"model"] = preset.model;
482 preset_json[
"provider"] = preset.provider;
483 preset_json[
"host"] = preset.host;
484 preset_json[
"tags"] = preset.tags;
485 preset_json[
"pinned"] = preset.pinned;
486 presets_json.push_back(std::move(preset_json));
488 config_json[
"model_presets"] = std::move(presets_json);
490 json[
"agent_config"] = std::move(config_json);
494 auto directory = path.parent_path();
495 if (!directory.empty()) {
496 std::filesystem::create_directories(directory, ec);
498 return absl::InternalError(absl::StrFormat(
499 "Unable to create chat history directory: %s", ec.message()));
503 std::ofstream file(path);
504 if (!file.is_open()) {
505 return absl::InternalError(
"Cannot write chat history file");
508 file << json.dump(2);
509 return absl::OkStatus();
513 return absl::UnimplementedError(
514 "Chat history persistence requires YAZE_WITH_GRPC=ON");
static absl::Status Save(const std::filesystem::path &path, const Snapshot &snapshot)
static absl::StatusOr< Snapshot > Load(const std::filesystem::path &path)