yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
agent_editor.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cstdlib>
5#include <cstring>
6#include <filesystem>
7#include <fstream>
8#include <memory>
9#include <optional>
10#include <unordered_set>
11
13// Centralized UI theme
14#include "app/gui/style/theme.h"
15
18
19#include "absl/strings/ascii.h"
20#include "absl/strings/match.h"
21#include "absl/strings/str_format.h"
22#include "absl/time/clock.h"
23#include "absl/time/time.h"
36#include "app/gui/core/icons.h"
45#ifndef __EMSCRIPTEN__
46#include "httplib.h"
47#endif
48#include "imgui/misc/cpp/imgui_stdlib.h"
49#include "implot.h"
50#include "rom/rom.h"
51#include "util/file_util.h"
52#include "util/platform_paths.h"
53
54#if defined(__APPLE__)
55#include <TargetConditionals.h>
56#include <CoreFoundation/CoreFoundation.h>
57#include <Security/Security.h>
58#endif
59
60#ifdef YAZE_WITH_GRPC
63#endif
64
65#if defined(YAZE_WITH_JSON)
66#include "nlohmann/json.hpp"
67#endif
68
69namespace yaze {
70namespace editor {
71
72namespace {
73
74std::optional<std::string> LoadKeychainValue(const std::string& key);
75
76template <size_t N>
77void CopyStringToBuffer(const std::string& src, char (&dest)[N]) {
78 std::strncpy(dest, src.c_str(), N - 1);
79 dest[N - 1] = '\0';
80}
81
82std::filesystem::path ExpandUserPath(const std::string& input) {
83 if (input.empty()) {
84 return {};
85 }
86 if (input.front() != '~') {
87 return std::filesystem::path(input);
88 }
89 const auto home_dir = util::PlatformPaths::GetHomeDirectory();
90 if (home_dir.empty() || home_dir == ".") {
91 return std::filesystem::path(input);
92 }
93 if (input.size() == 1) {
94 return home_dir;
95 }
96 if (input[1] == '/' || input[1] == '\\') {
97 return home_dir / input.substr(2);
98 }
99 return home_dir / input.substr(1);
100}
101
102bool HasModelExtension(const std::filesystem::path& path) {
103 const std::string ext = absl::AsciiStrToLower(path.extension().string());
104 return ext == ".gguf" || ext == ".ggml" || ext == ".bin" ||
105 ext == ".safetensors";
106}
107
108void AddUniqueModelName(const std::string& name,
109 std::vector<std::string>* output,
110 std::unordered_set<std::string>* seen) {
111 if (!output || !seen || name.empty()) {
112 return;
113 }
114 if (output->size() >= 512) {
115 return;
116 }
117 if (seen->insert(name).second) {
118 output->push_back(name);
119 }
120}
121
122bool IsOllamaModelsPath(const std::filesystem::path& path) {
123 if (path.filename() != "models") {
124 return false;
125 }
126 return path.parent_path().filename() == ".ollama";
127}
128
129void CollectOllamaManifestModels(const std::filesystem::path& models_root,
130 std::vector<std::string>* output,
131 std::unordered_set<std::string>* seen) {
132 if (!output || !seen) {
133 return;
134 }
135 std::error_code ec;
136 const auto library_path =
137 models_root / "manifests" / "registry.ollama.ai" / "library";
138 if (!std::filesystem::exists(library_path, ec)) {
139 return;
140 }
141 std::filesystem::directory_options options =
142 std::filesystem::directory_options::skip_permission_denied;
143 for (std::filesystem::recursive_directory_iterator
144 it(library_path, options, ec),
145 end;
146 it != end; it.increment(ec)) {
147 if (ec) {
148 ec.clear();
149 continue;
150 }
151 if (!it->is_regular_file(ec)) {
152 continue;
153 }
154 const auto rel = it->path().lexically_relative(library_path);
155 if (rel.empty()) {
156 continue;
157 }
158 std::vector<std::string> parts;
159 for (const auto& part : rel) {
160 if (!part.empty()) {
161 parts.push_back(part.string());
162 }
163 }
164 if (parts.empty()) {
165 continue;
166 }
167 std::string model = parts.front();
168 std::string tag;
169 for (size_t i = 1; i < parts.size(); ++i) {
170 if (!tag.empty()) {
171 tag += "/";
172 }
173 tag += parts[i];
174 }
175 const std::string name = tag.empty() ? model : model + ":" + tag;
176 AddUniqueModelName(name, output, seen);
177 if (output->size() >= 512) {
178 return;
179 }
180 }
181}
182
183void CollectModelFiles(const std::filesystem::path& base_path,
184 std::vector<std::string>* output,
185 std::unordered_set<std::string>* seen) {
186 if (!output || !seen) {
187 return;
188 }
189 std::error_code ec;
190 if (!std::filesystem::exists(base_path, ec)) {
191 return;
192 }
193 std::filesystem::directory_options options =
194 std::filesystem::directory_options::skip_permission_denied;
195 constexpr int kMaxDepth = 4;
196 for (std::filesystem::recursive_directory_iterator it(base_path, options, ec),
197 end;
198 it != end; it.increment(ec)) {
199 if (ec) {
200 ec.clear();
201 continue;
202 }
203 if (it->is_directory(ec)) {
204 if (it.depth() >= kMaxDepth) {
205 it.disable_recursion_pending();
206 }
207 continue;
208 }
209 if (!it->is_regular_file(ec)) {
210 continue;
211 }
212 if (!HasModelExtension(it->path())) {
213 continue;
214 }
215 std::filesystem::path rel = it->path().lexically_relative(base_path);
216 if (rel.empty()) {
217 rel = it->path().filename();
218 }
219 rel.replace_extension();
220 std::string name = rel.generic_string();
221 AddUniqueModelName(name, output, seen);
222 if (output->size() >= 512) {
223 return;
224 }
225 }
226}
227
228std::vector<std::string> CollectLocalModelNames(
229 const UserSettings::Preferences* prefs) {
230 std::vector<std::string> results;
231 if (!prefs) {
232 return results;
233 }
234 std::unordered_set<std::string> seen;
235 for (const auto& raw_path : prefs->ai_model_paths) {
236 auto expanded = ExpandUserPath(raw_path);
237 if (expanded.empty()) {
238 continue;
239 }
240 std::error_code ec;
241 if (!std::filesystem::exists(expanded, ec)) {
242 continue;
243 }
244 if (std::filesystem::is_regular_file(expanded, ec)) {
245 std::filesystem::path rel = expanded.filename();
246 rel.replace_extension();
247 AddUniqueModelName(rel.string(), &results, &seen);
248 continue;
249 }
250 if (!std::filesystem::is_directory(expanded, ec)) {
251 continue;
252 }
253 if (IsOllamaModelsPath(expanded)) {
254 CollectOllamaManifestModels(expanded, &results, &seen);
255 continue;
256 }
257 CollectModelFiles(expanded, &results, &seen);
258 }
259 std::sort(results.begin(), results.end());
260 return results;
261}
262
265 if (!host.api_key.empty()) {
266 return host.api_key;
267 }
268 if (!host.credential_id.empty()) {
269 if (auto key = LoadKeychainValue(host.credential_id)) {
270 return *key;
271 }
272 }
273 if (!prefs) {
274 return {};
275 }
276 std::string api_type = host.api_type.empty() ? "openai" : host.api_type;
277 if (api_type == "lmstudio") {
278 api_type = "openai";
279 }
280 if (api_type == "openai") {
281 return prefs->openai_api_key;
282 }
283 if (api_type == "gemini") {
284 return prefs->gemini_api_key;
285 }
286 if (api_type == "anthropic") {
287 return prefs->anthropic_api_key;
288 }
289 return {};
290}
291
294 const UserSettings::Preferences* prefs) {
295 if (!profile) {
296 return;
297 }
298 std::string api_key = ResolveHostApiKey(prefs, host);
299 profile->host_id = host.id;
300 std::string api_type = host.api_type;
301 if (api_type == "lmstudio") {
302 api_type = "openai";
303 }
304 if (api_type == "openai" || api_type == "ollama" || api_type == "gemini" ||
305 api_type == "anthropic") {
306 profile->provider = api_type;
307 }
308 if (profile->provider == "openai") {
309 if (!host.base_url.empty()) {
311 }
312 if (!api_key.empty()) {
313 profile->openai_api_key = api_key;
314 }
315 } else if (profile->provider == "ollama") {
316 if (!host.base_url.empty()) {
317 profile->ollama_host = host.base_url;
318 }
319 } else if (profile->provider == "gemini") {
320 if (!api_key.empty()) {
321 profile->gemini_api_key = api_key;
322 }
323 } else if (profile->provider == "anthropic") {
324 if (!api_key.empty()) {
325 profile->anthropic_api_key = api_key;
326 }
327 }
328}
329
330std::string BuildTagsString(const std::vector<std::string>& tags) {
331 std::string result;
332 for (size_t i = 0; i < tags.size(); ++i) {
333 if (i > 0) {
334 result.append(", ");
335 }
336 result.append(tags[i]);
337 }
338 return result;
339}
340
341bool ContainsText(const std::string& haystack, const std::string& needle) {
342 return haystack.find(needle) != std::string::npos;
343}
344
345bool StartsWithText(const std::string& text, const std::string& prefix) {
346 return text.rfind(prefix, 0) == 0;
347}
348
349bool IsLocalOpenAiBaseUrl(const std::string& base_url) {
350 if (base_url.empty()) {
351 return false;
352 }
353 std::string lower = absl::AsciiStrToLower(base_url);
354 return ContainsText(lower, "localhost") || ContainsText(lower, "127.0.0.1") ||
355 ContainsText(lower, "0.0.0.0");
356}
357
358bool IsTailscaleEndpoint(const std::string& base_url) {
359 if (base_url.empty()) {
360 return false;
361 }
362 std::string lower = absl::AsciiStrToLower(base_url);
363 return ContainsText(lower, ".ts.net") || ContainsText(lower, "tailscale");
364}
365
366bool IsLocalOrTrustedEndpoint(const std::string& base_url,
367 bool allow_insecure) {
368 if (allow_insecure) {
369 return true;
370 }
371 if (IsTailscaleEndpoint(base_url)) {
372 return true;
373 }
374 if (base_url.empty()) {
375 return false;
376 }
377 std::string lower = absl::AsciiStrToLower(base_url);
378 return ContainsText(lower, "localhost") || ContainsText(lower, "127.0.0.1") ||
379 ContainsText(lower, "0.0.0.0") || ContainsText(lower, "::1") ||
380 ContainsText(lower, "192.168.") || StartsWithText(lower, "10.") ||
381 ContainsText(lower, "100.64.");
382}
383
384#ifndef __EMSCRIPTEN__
385bool ProbeHttpEndpoint(const std::string& base_url, const char* path) {
386 if (base_url.empty()) {
387 return false;
388 }
389 httplib::Client client(base_url);
390 client.set_connection_timeout(0, 200000);
391 client.set_read_timeout(0, 250000);
392 client.set_write_timeout(0, 250000);
393 client.set_follow_location(true);
394 auto response = client.Get(path);
395 if (!response) {
396 return false;
397 }
398 return response->status > 0 && response->status < 500;
399}
400
401bool ProbeOllamaHost(const std::string& base_url) {
402 return ProbeHttpEndpoint(base_url, "/api/version");
403}
404
405bool ProbeOpenAICompatible(const std::string& base_url) {
406 return ProbeHttpEndpoint(base_url, "/v1/models");
407}
408#else
409bool ProbeOllamaHost(const std::string&) {
410 return false;
411}
412bool ProbeOpenAICompatible(const std::string&) {
413 return false;
414}
415#endif
416
417std::optional<std::string> LoadKeychainValue(const std::string& key) {
418#if defined(__APPLE__)
419 if (key.empty()) {
420 return std::nullopt;
421 }
422 CFStringRef key_ref = CFStringCreateWithCString(
423 kCFAllocatorDefault, key.c_str(), kCFStringEncodingUTF8);
424 const void* keys[] = {kSecClass, kSecAttrAccount, kSecReturnData,
425 kSecMatchLimit};
426 const void* values[] = {kSecClassGenericPassword, key_ref, kCFBooleanTrue,
427 kSecMatchLimitOne};
428 CFDictionaryRef query = CFDictionaryCreate(
429 kCFAllocatorDefault, keys, values,
430 static_cast<CFIndex>(sizeof(keys) / sizeof(keys[0])),
431 &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
432 CFTypeRef item = nullptr;
433 OSStatus status = SecItemCopyMatching(query, &item);
434 if (query) {
435 CFRelease(query);
436 }
437 if (key_ref) {
438 CFRelease(key_ref);
439 }
440 if (status == errSecItemNotFound) {
441 return std::nullopt;
442 }
443 if (status != errSecSuccess || !item) {
444 if (item) {
445 CFRelease(item);
446 }
447 return std::nullopt;
448 }
449 CFDataRef data_ref = static_cast<CFDataRef>(item);
450 const UInt8* data_ptr = CFDataGetBytePtr(data_ref);
451 CFIndex data_len = CFDataGetLength(data_ref);
452 std::string value(reinterpret_cast<const char*>(data_ptr),
453 static_cast<size_t>(data_len));
454 CFRelease(item);
455 return value;
456#else
457 (void)key;
458 return std::nullopt;
459#endif
460}
461
462} // namespace
463
466 agent_chat_ = std::make_unique<AgentChat>();
467 local_coordinator_ = std::make_unique<AgentCollaborationCoordinator>();
468 config_panel_ = std::make_unique<AgentConfigPanel>();
469 feature_flag_panel_ = std::make_unique<FeatureFlagEditorPanel>();
470 manifest_panel_ = std::make_unique<ManifestPanel>();
471 mesen_debug_panel_ = std::make_unique<MesenDebugPanel>();
472 mesen_screenshot_panel_ = std::make_unique<MesenScreenshotPanel>();
473 oracle_state_panel_ = std::make_unique<OracleStateLibraryPanel>();
474 sram_viewer_panel_ = std::make_unique<SramViewerPanel>();
475 prompt_editor_ = std::make_unique<TextEditor>();
476 common_tiles_editor_ = std::make_unique<TextEditor>();
477
478 // Initialize default configuration (legacy)
479 current_config_.provider = "mock";
482
483 // Initialize default bot profile
484 current_profile_.name = "Default Z3ED Bot";
485 current_profile_.description = "Default bot for Zelda 3 ROM editing";
486 current_profile_.provider = "mock";
490 current_profile_.tags = {"default", "z3ed"};
491
492 // Setup text editors
493 prompt_editor_->SetLanguageDefinition(
495 prompt_editor_->SetReadOnly(false);
496 prompt_editor_->SetShowWhitespaces(false);
497
498 common_tiles_editor_->SetLanguageDefinition(
500 common_tiles_editor_->SetReadOnly(false);
501 common_tiles_editor_->SetShowWhitespaces(false);
502
503 // Ensure profiles directory exists
505
507 {"Persona", "Define persona and goals", false},
508 {"Tool Stack", "Select the agent's tools", false},
509 {"Automation", "Configure automation hooks", false},
510 {"Validation", "Describe E2E validation", false},
511 {"E2E Checklist", "Track readiness for end-to-end runs", false}};
513 "Describe the persona, tone, and constraints for this agent.";
514}
515
516AgentEditor::~AgentEditor() = default;
517
519 // Base initialization
521
522 // Register cards with the card registry
524
525 // Register EditorPanel instances with PanelManager
527 auto* panel_manager = dependencies_.panel_manager;
528
529 // Register all agent EditorPanels with callbacks
530 panel_manager->RegisterEditorPanel(
531 std::make_unique<AgentConfigurationPanel>(
532 [this]() { DrawConfigurationPanel(); }));
533 panel_manager->RegisterEditorPanel(
534 std::make_unique<AgentStatusPanel>([this]() { DrawStatusPanel(); }));
535 panel_manager->RegisterEditorPanel(std::make_unique<AgentPromptEditorPanel>(
536 [this]() { DrawPromptEditorPanel(); }));
537 panel_manager->RegisterEditorPanel(std::make_unique<AgentBotProfilesPanel>(
538 [this]() { DrawBotProfilesPanel(); }));
539 panel_manager->RegisterEditorPanel(std::make_unique<AgentBuilderPanel>(
540 [this]() { DrawAgentBuilderPanel(); }));
541 panel_manager->RegisterEditorPanel(
542 std::make_unique<AgentChatPanel>(agent_chat_.get()));
543
544 // Knowledge Base panel (callback set by AgentUiController)
545 panel_manager->RegisterEditorPanel(
546 std::make_unique<AgentKnowledgeBasePanel>([this]() {
549 } else {
550 ImGui::TextDisabled("Knowledge service not available");
551 ImGui::TextWrapped(
552 "Build with Z3ED_AI=ON to enable the knowledge service.");
553 }
554 }));
555
556 panel_manager->RegisterEditorPanel(std::make_unique<AgentMesenDebugPanel>(
557 [this]() { DrawMesenDebugPanel(); }));
558
559 panel_manager->RegisterEditorPanel(
560 std::make_unique<MesenScreenshotEditorPanel>(
561 [this]() { DrawMesenScreenshotPanel(); }));
562
563 panel_manager->RegisterEditorPanel(
564 std::make_unique<OracleStateLibraryEditorPanel>(
565 [this]() { DrawOracleStatePanel(); }));
566
567 panel_manager->RegisterEditorPanel(
568 std::make_unique<FeatureFlagEditorEditorPanel>(
569 [this]() { DrawFeatureFlagPanel(); }));
570
571 panel_manager->RegisterEditorPanel(
572 std::make_unique<ManifestEditorPanel>(
573 [this]() { DrawManifestPanel(); }));
574
575 panel_manager->RegisterEditorPanel(
576 std::make_unique<SramViewerEditorPanel>(
577 [this]() { DrawSramViewerPanel(); }));
578
579 if (agent_chat_) {
580 agent_chat_->SetPanelOpener([panel_manager](const std::string& panel_id) {
581 if (!panel_id.empty()) {
582 panel_manager->ShowPanel(panel_id);
583 }
584 });
585 }
586 }
587
589}
590
592 auto* settings = dependencies_.user_settings;
593 if (!settings) {
594 return;
595 }
596 const auto& prefs = settings->prefs();
597 if (prefs.ai_hosts.empty() && prefs.ai_profiles.empty()) {
598 return;
599 }
600 bool applied = false;
601 if (!force) {
602 if (!current_profile_.host_id.empty()) {
603 return;
604 }
605 if (current_profile_.provider != "mock") {
606 return;
607 }
608 }
609 if (!prefs.ai_hosts.empty()) {
610 const std::string& active_id = prefs.active_ai_host_id.empty()
611 ? prefs.ai_hosts.front().id
612 : prefs.active_ai_host_id;
613 if (!active_id.empty()) {
614 for (const auto& host : prefs.ai_hosts) {
615 if (host.id == active_id) {
616 ApplyHostPresetToProfile(&current_profile_, host, &prefs);
617 applied = true;
618 break;
619 }
620 }
621 }
622 }
623 if (!prefs.ai_profiles.empty()) {
624 const UserSettings::Preferences::AiModelProfile* active_profile = nullptr;
625 if (!prefs.active_ai_profile.empty()) {
626 for (const auto& profile : prefs.ai_profiles) {
627 if (profile.name == prefs.active_ai_profile) {
628 active_profile = &profile;
629 break;
630 }
631 }
632 }
633 if (!active_profile) {
634 active_profile = &prefs.ai_profiles.front();
635 }
636 if (active_profile && (force || current_profile_.model.empty())) {
637 if (!active_profile->model.empty()) {
638 current_profile_.model = active_profile->model;
639 current_profile_.temperature = active_profile->temperature;
640 current_profile_.top_p = active_profile->top_p;
642 applied = true;
643 }
644 }
645 }
646 if (current_profile_.openai_api_key.empty() &&
647 !prefs.openai_api_key.empty()) {
648 current_profile_.openai_api_key = prefs.openai_api_key;
649 applied = true;
650 }
651 if (current_profile_.gemini_api_key.empty() &&
652 !prefs.gemini_api_key.empty()) {
653 current_profile_.gemini_api_key = prefs.gemini_api_key;
654 applied = true;
655 }
657 !prefs.anthropic_api_key.empty()) {
658 current_profile_.anthropic_api_key = prefs.anthropic_api_key;
659 applied = true;
660 }
661 if (applied) {
664 }
665}
666
670
673 return;
674 }
675 auto& ui = profile_ui_state_;
676 CopyStringToBuffer(current_profile_.model, ui.model_buf);
677 CopyStringToBuffer(current_profile_.ollama_host.empty()
678 ? "http://localhost:11434"
680 ui.ollama_host_buf);
681 CopyStringToBuffer(current_profile_.gemini_api_key, ui.gemini_key_buf);
682 CopyStringToBuffer(current_profile_.anthropic_api_key, ui.anthropic_key_buf);
683 CopyStringToBuffer(current_profile_.openai_api_key, ui.openai_key_buf);
684 CopyStringToBuffer(current_profile_.openai_base_url.empty()
685 ? "https://api.openai.com"
687 ui.openai_base_buf);
688 CopyStringToBuffer(current_profile_.name, ui.name_buf);
689 CopyStringToBuffer(current_profile_.description, ui.desc_buf);
690 CopyStringToBuffer(BuildTagsString(current_profile_.tags), ui.tags_buf);
691 ui.dirty = false;
692}
693
695 // Panel descriptors are now auto-created by RegisterEditorPanel() calls
696 // in Initialize(). No need for duplicate RegisterPanel() calls here.
697}
698
699absl::Status AgentEditor::Load() {
700 // Load agent configuration from project/settings
701 // Try to load all bot profiles
702 loaded_profiles_.clear();
703 auto profiles_dir = GetProfilesDirectory();
704 if (std::filesystem::exists(profiles_dir)) {
705 for (const auto& entry :
706 std::filesystem::directory_iterator(profiles_dir)) {
707 if (entry.path().extension() == ".json") {
708 std::ifstream file(entry.path());
709 if (file.is_open()) {
710 std::string json_content((std::istreambuf_iterator<char>(file)),
711 std::istreambuf_iterator<char>());
712 auto profile_or = JsonToProfile(json_content);
713 if (profile_or.ok()) {
714 loaded_profiles_.push_back(profile_or.value());
715 }
716 }
717 }
718 }
719 }
720 return absl::OkStatus();
721}
722
723absl::Status AgentEditor::Save() {
724 // Save current profile
725 current_profile_.modified_at = absl::Now();
727}
728
729absl::Status AgentEditor::Update() {
730 if (!active_)
731 return absl::OkStatus();
732
733 // Draw configuration dashboard
735
736 // Chat widget is drawn separately (not here)
737
738 return absl::OkStatus();
739}
740
742 ProposalDrawer* proposal_drawer,
743 Rom* rom) {
744 toast_manager_ = toast_manager;
745 proposal_drawer_ = proposal_drawer;
746 rom_ = rom;
747
748 // Auto-load API keys from environment
749 bool profile_updated = false;
750 auto env_value = [](const char* key) -> std::string {
751 const char* value = std::getenv(key);
752 return value ? std::string(value) : std::string();
753 };
754
755 std::string env_openai_base = env_value("OPENAI_BASE_URL");
756 if (env_openai_base.empty()) {
757 env_openai_base = env_value("OPENAI_API_BASE");
758 }
759 std::string env_openai_model = env_value("OPENAI_MODEL");
760 std::string env_ollama_host = env_value("OLLAMA_HOST");
761 std::string env_ollama_model = env_value("OLLAMA_MODEL");
762 std::string env_gemini_model = env_value("GEMINI_MODEL");
763 std::string env_anthropic_model = env_value("ANTHROPIC_MODEL");
764
765 if (!env_ollama_host.empty() &&
766 current_profile_.ollama_host != env_ollama_host) {
767 current_profile_.ollama_host = env_ollama_host;
768 current_config_.ollama_host = env_ollama_host;
769 profile_updated = true;
770 }
771 if (!env_openai_base.empty()) {
772 std::string normalized_base = cli::NormalizeOpenAiBaseUrl(env_openai_base);
773 if (current_profile_.openai_base_url.empty() ||
775 "https://api.openai.com") {
776 current_profile_.openai_base_url = normalized_base;
777 current_config_.openai_base_url = normalized_base;
778 profile_updated = true;
779 }
780 }
781
782 if (const char* gemini_key = std::getenv("GEMINI_API_KEY")) {
783 current_profile_.gemini_api_key = gemini_key;
784 current_config_.gemini_api_key = gemini_key;
785 profile_updated = true;
786 }
787
788 if (const char* anthropic_key = std::getenv("ANTHROPIC_API_KEY")) {
789 current_profile_.anthropic_api_key = anthropic_key;
790 current_config_.anthropic_api_key = anthropic_key;
791 profile_updated = true;
792 }
793
794 if (const char* openai_key = std::getenv("OPENAI_API_KEY")) {
795 current_profile_.openai_api_key = openai_key;
796 current_config_.openai_api_key = openai_key;
797 profile_updated = true;
798 }
799
800 bool provider_is_default =
802
803 if (provider_is_default) {
804 if (!current_profile_.gemini_api_key.empty()) {
805 current_profile_.provider = "gemini";
806 current_config_.provider = "gemini";
807 if (current_profile_.model.empty()) {
809 env_gemini_model.empty() ? "gemini-2.5-flash" : env_gemini_model;
811 }
812 profile_updated = true;
813 } else if (!current_profile_.anthropic_api_key.empty()) {
814 current_profile_.provider = "anthropic";
815 current_config_.provider = "anthropic";
816 if (current_profile_.model.empty()) {
817 current_profile_.model = env_anthropic_model.empty()
818 ? "claude-3-5-sonnet-20241022"
819 : env_anthropic_model;
821 }
822 profile_updated = true;
823 } else if (!current_profile_.openai_api_key.empty() ||
824 !env_openai_base.empty()) {
825 current_profile_.provider = "openai";
826 current_config_.provider = "openai";
827 if (current_profile_.model.empty()) {
828 if (!env_openai_model.empty()) {
829 current_profile_.model = env_openai_model;
830 } else if (!current_profile_.openai_api_key.empty()) {
831 current_profile_.model = "gpt-4o-mini";
832 }
834 }
835 profile_updated = true;
836 } else if (!env_ollama_host.empty() || !env_ollama_model.empty()) {
837 current_profile_.provider = "ollama";
838 current_config_.provider = "ollama";
839 if (current_profile_.model.empty() && !env_ollama_model.empty()) {
840 current_profile_.model = env_ollama_model;
842 }
843 profile_updated = true;
844 }
845 }
846
847 if (current_profile_.provider == "ollama" && current_profile_.model.empty() &&
848 !env_ollama_model.empty()) {
849 current_profile_.model = env_ollama_model;
850 current_config_.model = env_ollama_model;
851 profile_updated = true;
852 }
853 if (current_profile_.provider == "openai" && current_profile_.model.empty() &&
854 !env_openai_model.empty()) {
855 current_profile_.model = env_openai_model;
856 current_config_.model = env_openai_model;
857 profile_updated = true;
858 }
859 if (current_profile_.provider == "anthropic" &&
860 current_profile_.model.empty() && !env_anthropic_model.empty()) {
861 current_profile_.model = env_anthropic_model;
862 current_config_.model = env_anthropic_model;
863 profile_updated = true;
864 }
865 if (current_profile_.provider == "gemini" && current_profile_.model.empty() &&
866 !env_gemini_model.empty()) {
867 current_profile_.model = env_gemini_model;
868 current_config_.model = env_gemini_model;
869 profile_updated = true;
870 }
871 if (profile_updated) {
873 }
874
876 profile_updated = true;
877 }
878
879 if (agent_chat_) {
880 agent_chat_->Initialize(toast_manager, proposal_drawer);
881 if (rom) {
882 agent_chat_->SetRomContext(rom);
883 }
884 }
885
888
889#ifdef YAZE_WITH_GRPC
890 if (agent_chat_) {
891 harness_telemetry_bridge_.SetAgentChat(agent_chat_.get());
892 test::TestManager::Get().SetHarnessListener(&harness_telemetry_bridge_);
893 }
894#endif
895
896 // Push initial configuration to the agent service
898}
899
901 rom_ = rom;
902 if (agent_chat_) {
903 agent_chat_->SetRomContext(rom);
904 }
905}
906
909 if (agent_chat_) {
910 agent_chat_->SetContext(context_);
911 }
913}
914
933
935 if (!context_) {
936 return;
937 }
938
939 auto& ctx_config = context_->agent_config();
940 ctx_config.ai_provider =
942 ctx_config.ai_model = current_profile_.model;
943 ctx_config.ollama_host = current_profile_.ollama_host.empty()
944 ? "http://localhost:11434"
946 ctx_config.gemini_api_key = current_profile_.gemini_api_key;
947 ctx_config.anthropic_api_key = current_profile_.anthropic_api_key;
948 ctx_config.openai_api_key = current_profile_.openai_api_key;
949 ctx_config.openai_base_url =
951 current_profile_.openai_base_url = ctx_config.openai_base_url;
952 ctx_config.host_id = current_profile_.host_id;
953 ctx_config.verbose = current_profile_.verbose;
954 ctx_config.show_reasoning = current_profile_.show_reasoning;
955 ctx_config.max_tool_iterations = current_profile_.max_tool_iterations;
956 ctx_config.max_retry_attempts = current_profile_.max_retry_attempts;
957 ctx_config.temperature = current_profile_.temperature;
958 ctx_config.top_p = current_profile_.top_p;
959 ctx_config.max_output_tokens = current_profile_.max_output_tokens;
960 ctx_config.stream_responses = current_profile_.stream_responses;
961
962 CopyStringToBuffer(ctx_config.ai_provider, ctx_config.provider_buffer);
963 CopyStringToBuffer(ctx_config.ai_model, ctx_config.model_buffer);
964 CopyStringToBuffer(ctx_config.ollama_host, ctx_config.ollama_host_buffer);
965 CopyStringToBuffer(ctx_config.gemini_api_key, ctx_config.gemini_key_buffer);
966 CopyStringToBuffer(ctx_config.anthropic_api_key,
967 ctx_config.anthropic_key_buffer);
968 CopyStringToBuffer(ctx_config.openai_api_key, ctx_config.openai_key_buffer);
969 CopyStringToBuffer(ctx_config.openai_base_url,
970 ctx_config.openai_base_url_buffer);
971
973
975}
976
978 if (!context_) {
979 return;
980 }
981
982 auto& ctx_config = context_->agent_config();
983 const std::string prev_provider = ctx_config.ai_provider;
984 const std::string prev_openai_base = ctx_config.openai_base_url;
985 const std::string prev_ollama_host = ctx_config.ollama_host;
986 ctx_config.ai_provider =
987 config.ai_provider.empty() ? "mock" : config.ai_provider;
988 ctx_config.ai_model = config.ai_model;
989 ctx_config.ollama_host = config.ollama_host.empty() ? "http://localhost:11434"
990 : config.ollama_host;
991 ctx_config.gemini_api_key = config.gemini_api_key;
992 ctx_config.anthropic_api_key = config.anthropic_api_key;
993 ctx_config.openai_api_key = config.openai_api_key;
994 ctx_config.openai_base_url =
996 ctx_config.host_id = config.host_id;
997 ctx_config.verbose = config.verbose;
998 ctx_config.show_reasoning = config.show_reasoning;
999 ctx_config.max_tool_iterations = config.max_tool_iterations;
1000 ctx_config.max_retry_attempts = config.max_retry_attempts;
1001 ctx_config.temperature = config.temperature;
1002 ctx_config.top_p = config.top_p;
1003 ctx_config.max_output_tokens = config.max_output_tokens;
1004 ctx_config.stream_responses = config.stream_responses;
1005 ctx_config.favorite_models = config.favorite_models;
1006 ctx_config.model_chain = config.model_chain;
1007 ctx_config.model_presets = config.model_presets;
1008 ctx_config.chain_mode = config.chain_mode;
1009 ctx_config.tool_config = config.tool_config;
1010
1011 if (prev_provider != ctx_config.ai_provider ||
1012 prev_openai_base != ctx_config.openai_base_url ||
1013 prev_ollama_host != ctx_config.ollama_host) {
1014 auto& model_cache = context_->model_cache();
1015 model_cache.available_models.clear();
1016 model_cache.model_names.clear();
1017 model_cache.last_refresh = absl::InfinitePast();
1018 model_cache.auto_refresh_requested = false;
1019 model_cache.last_provider = ctx_config.ai_provider;
1020 model_cache.last_openai_base = ctx_config.openai_base_url;
1021 model_cache.last_ollama_host = ctx_config.ollama_host;
1022 }
1023
1024 CopyStringToBuffer(ctx_config.ai_provider, ctx_config.provider_buffer);
1025 CopyStringToBuffer(ctx_config.ai_model, ctx_config.model_buffer);
1026 CopyStringToBuffer(ctx_config.ollama_host, ctx_config.ollama_host_buffer);
1027 CopyStringToBuffer(ctx_config.gemini_api_key, ctx_config.gemini_key_buffer);
1028 CopyStringToBuffer(ctx_config.anthropic_api_key,
1029 ctx_config.anthropic_key_buffer);
1030 CopyStringToBuffer(ctx_config.openai_api_key, ctx_config.openai_key_buffer);
1031 CopyStringToBuffer(ctx_config.openai_base_url,
1032 ctx_config.openai_base_url_buffer);
1033
1034 current_profile_.provider = ctx_config.ai_provider;
1035 current_profile_.model = ctx_config.ai_model;
1036 current_profile_.ollama_host = ctx_config.ollama_host;
1037 current_profile_.gemini_api_key = ctx_config.gemini_api_key;
1038 current_profile_.anthropic_api_key = ctx_config.anthropic_api_key;
1039 current_profile_.openai_api_key = ctx_config.openai_api_key;
1040 current_profile_.openai_base_url = ctx_config.openai_base_url;
1041 current_profile_.host_id = ctx_config.host_id;
1042 current_profile_.verbose = ctx_config.verbose;
1043 current_profile_.show_reasoning = ctx_config.show_reasoning;
1044 current_profile_.max_tool_iterations = ctx_config.max_tool_iterations;
1045 current_profile_.max_retry_attempts = ctx_config.max_retry_attempts;
1046 current_profile_.temperature = ctx_config.temperature;
1047 current_profile_.top_p = ctx_config.top_p;
1048 current_profile_.max_output_tokens = ctx_config.max_output_tokens;
1049 current_profile_.stream_responses = ctx_config.stream_responses;
1050 current_profile_.modified_at = absl::Now();
1051
1053
1054 current_config_.provider = ctx_config.ai_provider;
1055 current_config_.model = ctx_config.ai_model;
1056 current_config_.ollama_host = ctx_config.ollama_host;
1057 current_config_.gemini_api_key = ctx_config.gemini_api_key;
1058 current_config_.openai_api_key = ctx_config.openai_api_key;
1059 current_config_.openai_base_url = ctx_config.openai_base_url;
1060 current_config_.verbose = ctx_config.verbose;
1061 current_config_.show_reasoning = ctx_config.show_reasoning;
1062 current_config_.max_tool_iterations = ctx_config.max_tool_iterations;
1063 current_config_.max_retry_attempts = ctx_config.max_retry_attempts;
1064 current_config_.temperature = ctx_config.temperature;
1065 current_config_.top_p = ctx_config.top_p;
1066 current_config_.max_output_tokens = ctx_config.max_output_tokens;
1067 current_config_.stream_responses = ctx_config.stream_responses;
1068
1071}
1072
1074 if (!context_ || !agent_chat_) {
1075 return;
1076 }
1077 auto* service = agent_chat_->GetAgentService();
1078 if (!service) {
1079 return;
1080 }
1081
1082 const auto& tool_config = context_->agent_config().tool_config;
1084 prefs.resources = tool_config.resources;
1085 prefs.dungeon = tool_config.dungeon;
1086 prefs.overworld = tool_config.overworld;
1087 prefs.messages = tool_config.messages;
1088 prefs.dialogue = tool_config.dialogue;
1089 prefs.gui = tool_config.gui;
1090 prefs.music = tool_config.music;
1091 prefs.sprite = tool_config.sprite;
1092#ifdef YAZE_WITH_GRPC
1093 prefs.emulator = tool_config.emulator;
1094#else
1095 prefs.emulator = false;
1096#endif
1097 prefs.memory_inspector = tool_config.memory_inspector;
1098 service->SetToolPreferences(prefs);
1099}
1100
1102 if (!context_) {
1103 return;
1104 }
1105
1106 auto& model_cache = context_->model_cache();
1107 if (model_cache.loading) {
1108 return;
1109 }
1110 if (!force && model_cache.last_refresh != absl::InfinitePast()) {
1111 absl::Duration since_refresh = absl::Now() - model_cache.last_refresh;
1112 if (since_refresh < absl::Seconds(15)) {
1113 return;
1114 }
1115 }
1116
1117 model_cache.loading = true;
1118 model_cache.auto_refresh_requested = true;
1119 model_cache.available_models.clear();
1120 model_cache.model_names.clear();
1122 const auto& prefs = dependencies_.user_settings->prefs();
1123 bool needs_local_refresh = force;
1124 if (!needs_local_refresh) {
1125 if (prefs.ai_model_paths != last_local_model_paths_) {
1126 needs_local_refresh = true;
1127 } else if (last_local_model_scan_ == absl::InfinitePast() ||
1128 (absl::Now() - last_local_model_scan_) > absl::Seconds(30)) {
1129 needs_local_refresh = true;
1130 }
1131 }
1132 if (needs_local_refresh) {
1133 model_cache.local_model_names = CollectLocalModelNames(&prefs);
1134 last_local_model_paths_ = prefs.ai_model_paths;
1135 last_local_model_scan_ = absl::Now();
1136 }
1137 } else {
1138 model_cache.local_model_names.clear();
1139 }
1140
1141 const auto& config = context_->agent_config();
1142 ModelServiceKey next_key;
1143 next_key.provider = config.ai_provider.empty() ? "mock" : config.ai_provider;
1144 next_key.model = config.ai_model;
1145 next_key.ollama_host = config.ollama_host;
1146 next_key.gemini_api_key = config.gemini_api_key;
1147 next_key.anthropic_api_key = config.anthropic_api_key;
1148 next_key.openai_api_key = config.openai_api_key;
1149 next_key.openai_base_url =
1150 cli::NormalizeOpenAiBaseUrl(config.openai_base_url);
1151 next_key.verbose = config.verbose;
1152
1153 auto same_key = [](const ModelServiceKey& a, const ModelServiceKey& b) {
1154 return a.provider == b.provider && a.model == b.model &&
1155 a.ollama_host == b.ollama_host &&
1156 a.gemini_api_key == b.gemini_api_key &&
1157 a.anthropic_api_key == b.anthropic_api_key &&
1158 a.openai_api_key == b.openai_api_key &&
1159 a.openai_base_url == b.openai_base_url && a.verbose == b.verbose;
1160 };
1161
1162 if (next_key.provider == "mock") {
1163 model_cache.loading = false;
1164 model_cache.model_names = model_cache.local_model_names;
1165 model_cache.last_refresh = absl::Now();
1166 return;
1167 }
1168
1169 if (!model_service_ || !same_key(next_key, last_model_service_key_)) {
1170 cli::AIServiceConfig service_config;
1171 service_config.provider = next_key.provider;
1172 service_config.model = next_key.model;
1173 service_config.ollama_host = next_key.ollama_host;
1174 service_config.gemini_api_key = next_key.gemini_api_key;
1175 service_config.anthropic_api_key = next_key.anthropic_api_key;
1176 service_config.openai_api_key = next_key.openai_api_key;
1177 service_config.openai_base_url = next_key.openai_base_url;
1178 service_config.verbose = next_key.verbose;
1179
1180 auto service_or = cli::CreateAIServiceStrict(service_config);
1181 if (!service_or.ok()) {
1182 model_service_.reset();
1183 model_cache.loading = false;
1184 model_cache.model_names = model_cache.local_model_names;
1185 model_cache.last_refresh = absl::Now();
1186 if (toast_manager_) {
1187 toast_manager_->Show(std::string(service_or.status().message()),
1188 ToastType::kWarning, 2.0f);
1189 }
1190 return;
1191 }
1192 model_service_ = std::move(service_or.value());
1193 last_model_service_key_ = next_key;
1194 }
1195
1196 auto models_or = model_service_->ListAvailableModels();
1197 if (!models_or.ok()) {
1198 model_cache.loading = false;
1199 model_cache.model_names = model_cache.local_model_names;
1200 model_cache.last_refresh = absl::Now();
1201 if (toast_manager_) {
1202 toast_manager_->Show(std::string(models_or.status().message()),
1203 ToastType::kWarning, 2.0f);
1204 }
1205 return;
1206 }
1207
1208 model_cache.available_models = models_or.value();
1209 std::unordered_set<std::string> seen;
1210 for (const auto& info : model_cache.available_models) {
1211 if (!info.name.empty()) {
1212 AddUniqueModelName(info.name, &model_cache.model_names, &seen);
1213 }
1214 }
1215 std::sort(model_cache.model_names.begin(), model_cache.model_names.end());
1216 if (context_->agent_config().ai_model.empty()) {
1217 auto& ctx_config = context_->agent_config();
1218 std::string selected;
1219 for (const auto& info : model_cache.available_models) {
1220 if (ctx_config.ai_provider.empty() ||
1221 info.provider == ctx_config.ai_provider) {
1222 selected = info.name;
1223 break;
1224 }
1225 }
1226 if (selected.empty() && !model_cache.model_names.empty()) {
1227 selected = model_cache.model_names.front();
1228 }
1229 if (!selected.empty()) {
1230 ctx_config.ai_model = selected;
1231 CopyStringToBuffer(ctx_config.ai_model, ctx_config.model_buffer);
1232 }
1233 }
1234 model_cache.last_refresh = absl::Now();
1235 model_cache.loading = false;
1236}
1237
1239 if (!context_) {
1240 return;
1241 }
1242
1243 auto& config = context_->agent_config();
1244 if (!preset.provider.empty()) {
1245 config.ai_provider = preset.provider;
1246 }
1247 if (!preset.model.empty()) {
1248 config.ai_model = preset.model;
1249 }
1250 if (!preset.host.empty()) {
1251 if (config.ai_provider == "ollama") {
1252 config.ollama_host = preset.host;
1253 } else if (config.ai_provider == "openai") {
1254 config.openai_base_url = cli::NormalizeOpenAiBaseUrl(preset.host);
1255 }
1256 }
1257
1258 for (auto& entry : config.model_presets) {
1259 if (entry.name == preset.name) {
1260 entry.last_used = absl::Now();
1261 break;
1262 }
1263 }
1264
1265 CopyStringToBuffer(config.ai_provider, config.provider_buffer);
1266 CopyStringToBuffer(config.ai_model, config.model_buffer);
1267 CopyStringToBuffer(config.ollama_host, config.ollama_host_buffer);
1268 CopyStringToBuffer(config.openai_base_url, config.openai_base_url_buffer);
1269
1270 ApplyConfigFromContext(config);
1271}
1272
1274 if (auto_probe_done_) {
1275 return false;
1276 }
1277 auto_probe_done_ = true;
1278
1279 auto* settings = dependencies_.user_settings;
1280 if (!settings) {
1281 return false;
1282 }
1283 const auto& prefs = settings->prefs();
1284 if (prefs.ai_hosts.empty()) {
1285 return false;
1286 }
1287 if (!current_profile_.host_id.empty() ||
1288 current_profile_.provider != "mock") {
1289 return false;
1290 }
1291
1292 auto build_host = [&](const UserSettings::Preferences::AiHost& host) {
1293 auto resolved = host;
1294 if (resolved.api_key.empty()) {
1295 resolved.api_key = ResolveHostApiKey(&prefs, host);
1296 }
1297 return resolved;
1298 };
1299
1300 auto select_host = [&](const UserSettings::Preferences::AiHost& host) {
1301 ApplyHostPresetToProfile(&current_profile_, host, &prefs);
1305 if (context_) {
1307 }
1308 return true;
1309 };
1310
1311 auto try_host = [&](const UserSettings::Preferences::AiHost& host,
1312 bool probe_only_local) {
1313 std::string api_type = host.api_type.empty() ? "openai" : host.api_type;
1314 if (api_type == "lmstudio") {
1315 api_type = "openai";
1316 }
1317 auto resolved = build_host(host);
1318 bool has_key = !resolved.api_key.empty();
1319
1320 if (api_type == "ollama") {
1321 if (resolved.base_url.empty()) {
1322 return false;
1323 }
1324 if (probe_only_local && !IsLocalOrTrustedEndpoint(
1325 resolved.base_url, resolved.allow_insecure)) {
1326 return false;
1327 }
1328 if (!ProbeOllamaHost(resolved.base_url)) {
1329 return false;
1330 }
1331 return select_host(resolved);
1332 }
1333
1334 if (api_type == "openai") {
1335 if (resolved.base_url.empty()) {
1336 return false;
1337 }
1338 bool trusted =
1339 IsLocalOrTrustedEndpoint(resolved.base_url, resolved.allow_insecure);
1340 if (probe_only_local && !trusted) {
1341 return false;
1342 }
1343 if (trusted && ProbeOpenAICompatible(resolved.base_url)) {
1344 return select_host(resolved);
1345 }
1346 if (!probe_only_local && has_key) {
1347 return select_host(resolved);
1348 }
1349 return false;
1350 }
1351
1352 if (api_type == "gemini") {
1353 if (!has_key) {
1354 return false;
1355 }
1356 return select_host(resolved);
1357 }
1358
1359 if (api_type == "anthropic") {
1360 if (!has_key) {
1361 return false;
1362 }
1363 return select_host(resolved);
1364 }
1365
1366 return false;
1367 };
1368
1369 std::vector<const UserSettings::Preferences::AiHost*> candidates;
1370 if (!prefs.active_ai_host_id.empty()) {
1371 for (const auto& host : prefs.ai_hosts) {
1372 if (host.id == prefs.active_ai_host_id) {
1373 candidates.push_back(&host);
1374 break;
1375 }
1376 }
1377 }
1378 for (const auto& host : prefs.ai_hosts) {
1379 if (!candidates.empty() && candidates.front()->id == host.id) {
1380 continue;
1381 }
1382 candidates.push_back(&host);
1383 }
1384
1385 for (const auto* host : candidates) {
1386 if (try_host(*host, true)) {
1387 return true;
1388 }
1389 }
1390 for (const auto* host : candidates) {
1391 if (try_host(*host, false)) {
1392 return true;
1393 }
1394 }
1395
1396 return false;
1397}
1398
1400 if (!active_) {
1401 return;
1402 }
1403
1404 // Animate retro effects
1405 ImGuiIO& imgui_io = ImGui::GetIO();
1406 pulse_animation_ += imgui_io.DeltaTime * 2.0f;
1407 scanline_offset_ += imgui_io.DeltaTime * 0.4f;
1408 if (scanline_offset_ > 1.0f) {
1409 scanline_offset_ -= 1.0f;
1410 }
1411 glitch_timer_ += imgui_io.DeltaTime * 5.0f;
1412 blink_counter_ = static_cast<int>(pulse_animation_ * 2.0f) % 2;
1413}
1414
1416 if (!context_) {
1417 ImGui::TextDisabled("Agent configuration unavailable.");
1418 ImGui::TextWrapped("Initialize the Agent UI to edit provider settings.");
1419 return;
1420 }
1421
1422 const auto& theme = AgentUI::GetTheme();
1423
1425 const auto& prefs = dependencies_.user_settings->prefs();
1426 if (!prefs.ai_hosts.empty()) {
1428 theme.accent_color);
1429 const auto& hosts = prefs.ai_hosts;
1430 std::string active_id = current_profile_.host_id.empty()
1431 ? prefs.active_ai_host_id
1433 int active_index = -1;
1434 for (size_t i = 0; i < hosts.size(); ++i) {
1435 if (!active_id.empty() && hosts[i].id == active_id) {
1436 active_index = static_cast<int>(i);
1437 break;
1438 }
1439 }
1440 const char* preview = (active_index >= 0)
1441 ? hosts[active_index].label.c_str()
1442 : "Select host";
1443 if (ImGui::BeginCombo("##ai_host_preset", preview)) {
1444 for (size_t i = 0; i < hosts.size(); ++i) {
1445 const bool selected = (static_cast<int>(i) == active_index);
1446 if (ImGui::Selectable(hosts[i].label.c_str(), selected)) {
1447 ApplyHostPresetToProfile(&current_profile_, hosts[i], &prefs);
1450 }
1451 if (selected) {
1452 ImGui::SetItemDefaultFocus();
1453 }
1454 }
1455 ImGui::EndCombo();
1456 }
1457 if (active_index >= 0) {
1458 const auto& host = hosts[active_index];
1459 ImGui::TextDisabled("Active host: %s", host.label.c_str());
1460 ImGui::TextDisabled("Endpoint: %s", host.base_url.c_str());
1461 ImGui::TextDisabled("API type: %s", host.api_type.empty()
1462 ? "openai"
1463 : host.api_type.c_str());
1464 } else {
1465 ImGui::TextDisabled(
1466 "Host presets come from settings.json (Documents/Yaze).");
1467 }
1468 ImGui::Spacing();
1469 ImGui::Separator();
1470 ImGui::Spacing();
1471 }
1472 }
1473
1475 callbacks.update_config = [this](const AgentConfigState& config) {
1476 ApplyConfigFromContext(config);
1477 };
1478 callbacks.refresh_models = [this](bool force) {
1479 RefreshModelCache(force);
1480 };
1481 callbacks.apply_preset = [this](const ModelPreset& preset) {
1482 ApplyModelPreset(preset);
1483 };
1484 callbacks.apply_tool_preferences = [this]() {
1486 };
1487
1488 if (config_panel_) {
1489 config_panel_->Draw(context_, callbacks, toast_manager_);
1490 }
1491}
1492
1494 const auto& theme = AgentUI::GetTheme();
1495
1497 if (ImGui::BeginChild("AgentStatusCard", ImVec2(0, 150), true)) {
1499 theme.accent_color);
1500
1501 bool chat_active = agent_chat_ && *agent_chat_->active();
1502 if (ImGui::BeginTable("AgentStatusTable", 2,
1503 ImGuiTableFlags_SizingStretchProp)) {
1504 ImGui::TableNextRow();
1505 ImGui::TableSetColumnIndex(0);
1506 ImGui::TextDisabled("Chat");
1507 ImGui::TableSetColumnIndex(1);
1508 AgentUI::RenderStatusIndicator(chat_active ? "Active" : "Inactive",
1509 chat_active);
1510 if (!chat_active) {
1511 ImGui::SameLine();
1512 if (ImGui::SmallButton("Open")) {
1514 }
1515 }
1516
1517 ImGui::TableNextRow();
1518 ImGui::TableSetColumnIndex(0);
1519 ImGui::TextDisabled("Provider");
1520 ImGui::TableSetColumnIndex(1);
1522 if (!current_profile_.model.empty()) {
1523 ImGui::SameLine();
1524 ImGui::TextDisabled("%s", current_profile_.model.c_str());
1525 }
1526
1527 ImGui::TableNextRow();
1528 ImGui::TableSetColumnIndex(0);
1529 ImGui::TextDisabled("ROM");
1530 ImGui::TableSetColumnIndex(1);
1531 if (rom_ && rom_->is_loaded()) {
1532 ImGui::TextColored(theme.status_success,
1533 ICON_MD_CHECK_CIRCLE " Loaded");
1534 ImGui::SameLine();
1535 ImGui::TextDisabled("Tools ready");
1536 } else {
1537 ImGui::TextColored(theme.status_warning, ICON_MD_WARNING " Not Loaded");
1538 }
1539 ImGui::EndTable();
1540 }
1541 }
1542 ImGui::EndChild();
1543
1544 ImGui::Spacing();
1545
1546 if (ImGui::BeginChild("AgentMetricsCard", ImVec2(0, 170), true)) {
1548 theme.accent_color);
1549 if (agent_chat_) {
1550 auto metrics = agent_chat_->GetAgentService()->GetMetrics();
1551 ImGui::TextDisabled("Messages: %d user / %d agent",
1552 metrics.total_user_messages,
1553 metrics.total_agent_messages);
1554 ImGui::TextDisabled("Tool calls: %d Proposals: %d Commands: %d",
1555 metrics.total_tool_calls, metrics.total_proposals,
1556 metrics.total_commands);
1557 ImGui::TextDisabled("Avg latency: %.2fs Elapsed: %.2fs",
1558 metrics.average_latency_seconds,
1559 metrics.total_elapsed_seconds);
1560
1561 std::vector<double> latencies;
1562 for (const auto& msg : agent_chat_->GetAgentService()->GetHistory()) {
1563 if (msg.sender == cli::agent::ChatMessage::Sender::kAgent &&
1564 msg.model_metadata.has_value() &&
1565 msg.model_metadata->latency_seconds > 0.0) {
1566 latencies.push_back(msg.model_metadata->latency_seconds);
1567 }
1568 }
1569 if (latencies.size() > 30) {
1570 latencies.erase(latencies.begin(),
1571 latencies.end() - static_cast<long>(30));
1572 }
1573 if (!latencies.empty()) {
1574 std::vector<double> xs(latencies.size());
1575 for (size_t i = 0; i < xs.size(); ++i) {
1576 xs[i] = static_cast<double>(i);
1577 }
1578 ImPlotFlags plot_flags = ImPlotFlags_NoLegend | ImPlotFlags_NoMenus |
1579 ImPlotFlags_NoBoxSelect;
1580 if (ImPlot::BeginPlot("##LatencyPlot", ImVec2(-1, 90), plot_flags)) {
1581 ImPlot::SetupAxes(nullptr, nullptr, ImPlotAxisFlags_NoDecorations,
1582 ImPlotAxisFlags_NoDecorations);
1583 ImPlot::SetupAxisLimits(ImAxis_X1, 0, xs.back(), ImGuiCond_Always);
1584 double max_latency =
1585 *std::max_element(latencies.begin(), latencies.end());
1586 ImPlot::SetupAxisLimits(ImAxis_Y1, 0, max_latency * 1.2,
1587 ImGuiCond_Always);
1588 ImPlot::PlotLine("Latency", xs.data(), latencies.data(),
1589 static_cast<int>(latencies.size()));
1590 ImPlot::EndPlot();
1591 }
1592 }
1593 } else {
1594 ImGui::TextDisabled("Initialize the chat system to see metrics.");
1595 }
1596 }
1597 ImGui::EndChild();
1598
1600}
1601
1603 if (ImGui::CollapsingHeader(ICON_MD_ANALYTICS " Quick Metrics")) {
1604 ImGui::TextDisabled("View detailed metrics in the Metrics tab");
1605 }
1606}
1607
1609 const auto& theme = AgentUI::GetTheme();
1611 theme.accent_color);
1612
1613 ImGui::Text("File:");
1614 ImGui::SetNextItemWidth(-45);
1615 if (ImGui::BeginCombo("##prompt_file", active_prompt_file_.c_str())) {
1616 const char* options[] = {"system_prompt.txt", "system_prompt_v2.txt",
1617 "system_prompt_v3.txt"};
1618 for (const char* option : options) {
1619 bool selected = active_prompt_file_ == option;
1620 if (ImGui::Selectable(option, selected)) {
1621 active_prompt_file_ = option;
1623 }
1624 }
1625 ImGui::EndCombo();
1626 }
1627
1628 ImGui::SameLine();
1629 if (ImGui::SmallButton(ICON_MD_REFRESH)) {
1631 }
1632 if (ImGui::IsItemHovered()) {
1633 ImGui::SetTooltip("Reload from disk");
1634 }
1635
1637 std::string asset_path = "agent/" + active_prompt_file_;
1638 auto content_result = AssetLoader::LoadTextFile(asset_path);
1639 if (content_result.ok()) {
1640 prompt_editor_->SetText(*content_result);
1641 current_profile_.system_prompt = *content_result;
1643 if (toast_manager_) {
1644 toast_manager_->Show(absl::StrFormat(ICON_MD_CHECK_CIRCLE " Loaded %s",
1646 ToastType::kSuccess, 2.0f);
1647 }
1648 } else {
1649 std::string placeholder = absl::StrFormat(
1650 "# System prompt file not found: %s\n"
1651 "# Error: %s\n\n"
1652 "# Ensure the file exists in assets/agent/%s\n",
1653 active_prompt_file_, content_result.status().message(),
1655 prompt_editor_->SetText(placeholder);
1657 }
1658 }
1659
1660 ImGui::Spacing();
1661 if (prompt_editor_) {
1662 ImVec2 editor_size(ImGui::GetContentRegionAvail().x,
1663 ImGui::GetContentRegionAvail().y - 60);
1664 prompt_editor_->Render("##prompt_editor", editor_size, true);
1665
1666 ImGui::Spacing();
1667 if (ImGui::Button(ICON_MD_SAVE " Save Prompt to Profile", ImVec2(-1, 0))) {
1669 if (toast_manager_) {
1670 toast_manager_->Show("System prompt saved to profile",
1672 }
1673 }
1674 }
1675
1676 ImGui::Spacing();
1677 ImGui::TextWrapped(
1678 "Edit the system prompt that guides the agent's behavior. Changes are "
1679 "stored on the active bot profile.");
1680}
1681
1683 const auto& theme = AgentUI::GetTheme();
1684 AgentUI::RenderSectionHeader(ICON_MD_FOLDER, "Bot Profile Manager",
1685 theme.accent_color);
1686 ImGui::Spacing();
1687
1688 ImGui::BeginChild("CurrentProfile", ImVec2(0, 150), true);
1689 AgentUI::RenderSectionHeader(ICON_MD_STAR, "Current Profile",
1690 theme.accent_color);
1691 ImGui::Text("Name: %s", current_profile_.name.c_str());
1692 ImGui::Text("Provider: %s", current_profile_.provider.c_str());
1693 if (!current_profile_.model.empty()) {
1694 ImGui::Text("Model: %s", current_profile_.model.c_str());
1695 }
1696 ImGui::TextWrapped("Description: %s",
1698 ? "No description"
1699 : current_profile_.description.c_str());
1700 ImGui::EndChild();
1701
1702 ImGui::Spacing();
1703
1704 if (ImGui::Button(ICON_MD_ADD " Create New Profile", ImVec2(-1, 0))) {
1705 BotProfile new_profile = current_profile_;
1706 new_profile.name = "New Profile";
1707 new_profile.created_at = absl::Now();
1708 new_profile.modified_at = absl::Now();
1709 current_profile_ = new_profile;
1711 if (toast_manager_) {
1712 toast_manager_->Show("New profile created. Configure and save it.",
1714 }
1715 }
1716
1717 ImGui::Spacing();
1719 theme.accent_color);
1720
1721 ImGui::BeginChild("ProfilesList", ImVec2(0, 0), true);
1722 if (loaded_profiles_.empty()) {
1723 ImGui::TextDisabled(
1724 "No saved profiles. Create and save a profile to see it here.");
1725 } else {
1726 for (size_t i = 0; i < loaded_profiles_.size(); ++i) {
1727 const auto& profile = loaded_profiles_[i];
1728 ImGui::PushID(static_cast<int>(i));
1729
1730 bool is_current = (profile.name == current_profile_.name);
1731 ImVec2 button_size(ImGui::GetContentRegionAvail().x - 80, 0);
1732 ImVec4 button_color =
1733 is_current ? theme.accent_color : theme.panel_bg_darker;
1734 if (AgentUI::StyledButton(profile.name.c_str(), button_color,
1735 button_size)) {
1736 if (auto status = LoadBotProfile(profile.name); status.ok()) {
1737 if (toast_manager_) {
1739 absl::StrFormat("Loaded profile: %s", profile.name),
1741 }
1742 } else if (toast_manager_) {
1743 toast_manager_->Show(std::string(status.message()),
1745 }
1746 }
1747
1748 ImGui::SameLine();
1749 {
1750 gui::StyleColorGuard del_guard(ImGuiCol_Button, theme.status_warning);
1751 if (ImGui::SmallButton(ICON_MD_DELETE)) {
1752 if (auto status = DeleteBotProfile(profile.name); status.ok()) {
1753 if (toast_manager_) {
1755 absl::StrFormat("Deleted profile: %s", profile.name),
1757 }
1758 } else if (toast_manager_) {
1759 toast_manager_->Show(std::string(status.message()),
1761 }
1762 }
1763 }
1764
1765 ImGui::TextDisabled(" %s | %s", profile.provider.c_str(),
1766 profile.description.empty()
1767 ? "No description"
1768 : profile.description.c_str());
1769 ImGui::Spacing();
1770 ImGui::PopID();
1771 }
1772 }
1773 ImGui::EndChild();
1774}
1775
1777 const auto& theme = AgentUI::GetTheme();
1778 AgentUI::RenderSectionHeader(ICON_MD_HISTORY, "Chat History Viewer",
1779 theme.accent_color);
1780
1781 if (ImGui::Button(ICON_MD_REFRESH " Refresh History")) {
1783 }
1784 ImGui::SameLine();
1785 if (ImGui::Button(ICON_MD_DELETE_FOREVER " Clear History")) {
1786 if (agent_chat_) {
1787 agent_chat_->ClearHistory();
1788 cached_history_.clear();
1789 }
1790 }
1791
1793 cached_history_ = agent_chat_->GetAgentService()->GetHistory();
1794 history_needs_refresh_ = false;
1795 }
1796
1797 ImGui::Spacing();
1798 ImGui::Separator();
1799
1800 ImGui::BeginChild("HistoryList", ImVec2(0, 0), true);
1801 if (cached_history_.empty()) {
1802 ImGui::TextDisabled(
1803 "No chat history. Start a conversation in the chat window.");
1804 } else {
1805 for (const auto& msg : cached_history_) {
1806 bool from_user = (msg.sender == cli::agent::ChatMessage::Sender::kUser);
1807 ImVec4 color =
1808 from_user ? theme.user_message_color : theme.agent_message_color;
1809
1810 gui::ColoredTextF(color, "%s:", from_user ? "User" : "Agent");
1811
1812 ImGui::SameLine();
1813 ImGui::TextDisabled("%s", absl::FormatTime("%H:%M:%S", msg.timestamp,
1814 absl::LocalTimeZone())
1815 .c_str());
1816
1817 ImGui::TextWrapped("%s", msg.message.c_str());
1818 ImGui::Spacing();
1819 ImGui::Separator();
1820 }
1821 }
1822 ImGui::EndChild();
1823}
1824
1826 const auto& theme = AgentUI::GetTheme();
1828 theme.accent_color);
1829 ImGui::Spacing();
1830
1831 if (agent_chat_) {
1832 auto metrics = agent_chat_->GetAgentService()->GetMetrics();
1833 if (ImGui::BeginTable("MetricsTable", 2,
1834 ImGuiTableFlags_Borders | ImGuiTableFlags_RowBg)) {
1835 ImGui::TableSetupColumn("Metric", ImGuiTableColumnFlags_WidthFixed,
1836 200.0f);
1837 ImGui::TableSetupColumn("Value", ImGuiTableColumnFlags_WidthStretch);
1838 ImGui::TableHeadersRow();
1839
1840 auto Row = [](const char* label, const std::string& value) {
1841 ImGui::TableNextRow();
1842 ImGui::TableSetColumnIndex(0);
1843 ImGui::Text("%s", label);
1844 ImGui::TableSetColumnIndex(1);
1845 ImGui::TextDisabled("%s", value.c_str());
1846 };
1847
1848 Row("Total Messages",
1849 absl::StrFormat("%d user / %d agent", metrics.total_user_messages,
1850 metrics.total_agent_messages));
1851 Row("Tool Calls", absl::StrFormat("%d", metrics.total_tool_calls));
1852 Row("Commands", absl::StrFormat("%d", metrics.total_commands));
1853 Row("Proposals", absl::StrFormat("%d", metrics.total_proposals));
1854 Row("Average Latency (s)",
1855 absl::StrFormat("%.2f", metrics.average_latency_seconds));
1856 Row("Elapsed (s)",
1857 absl::StrFormat("%.2f", metrics.total_elapsed_seconds));
1858
1859 ImGui::EndTable();
1860 }
1861 } else {
1862 ImGui::TextDisabled("Initialize the chat system to see metrics.");
1863 }
1864}
1865
1867 const auto& theme = AgentUI::GetTheme();
1868 AgentUI::RenderSectionHeader(ICON_MD_GRID_ON, "Common Tiles Reference",
1869 theme.accent_color);
1870 ImGui::Spacing();
1871
1872 ImGui::TextWrapped(
1873 "Customize the tile reference file that AI uses for tile placement. "
1874 "Organize tiles by category and provide hex IDs with descriptions.");
1875
1876 ImGui::Spacing();
1877
1878 if (ImGui::Button(ICON_MD_FOLDER_OPEN " Load", ImVec2(100, 0))) {
1879 auto content = AssetLoader::LoadTextFile("agent/common_tiles.txt");
1880 if (content.ok()) {
1881 common_tiles_editor_->SetText(*content);
1883 if (toast_manager_) {
1884 toast_manager_->Show(ICON_MD_CHECK_CIRCLE " Common tiles loaded",
1885 ToastType::kSuccess, 2.0f);
1886 }
1887 }
1888 }
1889
1890 ImGui::SameLine();
1891 if (ImGui::Button(ICON_MD_SAVE " Save", ImVec2(100, 0))) {
1892 if (toast_manager_) {
1894 " Save to project directory (coming soon)",
1895 ToastType::kInfo, 2.0f);
1896 }
1897 }
1898
1899 ImGui::SameLine();
1900 if (ImGui::SmallButton(ICON_MD_REFRESH)) {
1902 }
1903 if (ImGui::IsItemHovered()) {
1904 ImGui::SetTooltip("Reload from disk");
1905 }
1906
1908 auto content = AssetLoader::LoadTextFile("agent/common_tiles.txt");
1909 if (content.ok()) {
1910 common_tiles_editor_->SetText(*content);
1911 } else {
1912 std::string default_tiles =
1913 "# Common Tile16 Reference\n"
1914 "# Format: 0xHEX = Description\n\n"
1915 "[grass_tiles]\n"
1916 "0x020 = Grass (standard)\n\n"
1917 "[nature_tiles]\n"
1918 "0x02E = Tree (oak)\n"
1919 "0x003 = Bush\n\n"
1920 "[water_tiles]\n"
1921 "0x14C = Water (top edge)\n"
1922 "0x14D = Water (middle)\n";
1923 common_tiles_editor_->SetText(default_tiles);
1924 }
1926 }
1927
1928 ImGui::Separator();
1929 ImGui::Spacing();
1930
1932 ImVec2 editor_size(ImGui::GetContentRegionAvail().x,
1933 ImGui::GetContentRegionAvail().y);
1934 common_tiles_editor_->Render("##tiles_editor", editor_size, true);
1935 }
1936}
1937
1939 const auto& theme = AgentUI::GetTheme();
1940 AgentUI::RenderSectionHeader(ICON_MD_ADD, "Create New System Prompt",
1941 theme.accent_color);
1942 ImGui::Spacing();
1943
1944 ImGui::TextWrapped(
1945 "Create a custom system prompt from scratch or start from a template.");
1946 ImGui::Separator();
1947
1948 ImGui::Text("Prompt Name:");
1949 ImGui::SetNextItemWidth(-1);
1950 ImGui::InputTextWithHint("##new_prompt_name", "e.g., custom_prompt.txt",
1952
1953 ImGui::Spacing();
1954 ImGui::Text("Start from template:");
1955
1956 auto LoadTemplate = [&](const char* path, const char* label) {
1957 if (ImGui::Button(label, ImVec2(-1, 0))) {
1958 auto content = AssetLoader::LoadTextFile(path);
1959 if (content.ok() && prompt_editor_) {
1960 prompt_editor_->SetText(*content);
1961 if (toast_manager_) {
1962 toast_manager_->Show("Template loaded", ToastType::kSuccess, 1.5f);
1963 }
1964 }
1965 }
1966 };
1967
1968 LoadTemplate("agent/system_prompt.txt", ICON_MD_FILE_COPY " v1 (Basic)");
1969 LoadTemplate("agent/system_prompt_v2.txt",
1970 ICON_MD_FILE_COPY " v2 (Enhanced)");
1971 LoadTemplate("agent/system_prompt_v3.txt",
1972 ICON_MD_FILE_COPY " v3 (Proactive)");
1973
1974 if (ImGui::Button(ICON_MD_NOTE_ADD " Blank Template", ImVec2(-1, 0))) {
1975 if (prompt_editor_) {
1976 std::string blank_template =
1977 "# Custom System Prompt\n\n"
1978 "You are an AI assistant for ROM hacking.\n\n"
1979 "## Your Role\n"
1980 "- Help users understand ROM data\n"
1981 "- Provide accurate information\n"
1982 "- Use tools when needed\n\n"
1983 "## Guidelines\n"
1984 "1. Always provide text_response after tool calls\n"
1985 "2. Be helpful and accurate\n"
1986 "3. Explain your reasoning\n";
1987 prompt_editor_->SetText(blank_template);
1988 if (toast_manager_) {
1989 toast_manager_->Show("Blank template created", ToastType::kSuccess,
1990 1.5f);
1991 }
1992 }
1993 }
1994
1995 ImGui::Spacing();
1996 ImGui::Separator();
1997
1998 {
1999 gui::StyleColorGuard save_guard(ImGuiCol_Button, theme.status_success);
2000 if (ImGui::Button(ICON_MD_SAVE " Save New Prompt", ImVec2(-1, 40))) {
2001 if (std::strlen(new_prompt_name_) > 0 && prompt_editor_) {
2002 std::string filename = new_prompt_name_;
2003 if (!absl::EndsWith(filename, ".txt")) {
2004 filename += ".txt";
2005 }
2006 if (toast_manager_) {
2008 absl::StrFormat(ICON_MD_SAVE " Prompt saved as %s", filename),
2009 ToastType::kSuccess, 3.0f);
2010 }
2011 std::memset(new_prompt_name_, 0, sizeof(new_prompt_name_));
2012 } else if (toast_manager_) {
2013 toast_manager_->Show(ICON_MD_WARNING " Enter a name for the prompt",
2014 ToastType::kWarning, 2.0f);
2015 }
2016 }
2017 }
2018
2019 ImGui::Spacing();
2020 ImGui::TextWrapped(
2021 "Note: New prompts are saved to your project. Use the Prompt Editor to "
2022 "edit existing prompts.");
2023}
2024
2026 const auto& theme = AgentUI::GetTheme();
2028 theme.accent_color);
2029
2030 if (!agent_chat_) {
2031 ImGui::TextDisabled("Chat system not initialized.");
2032 return;
2033 }
2034
2035 ImGui::BeginChild("AgentBuilderPanel", ImVec2(0, 0), false);
2036
2037 int stage_index =
2038 std::clamp(builder_state_.active_stage, 0,
2039 static_cast<int>(builder_state_.stages.size()) - 1);
2040 int completed_stages = 0;
2041 for (const auto& stage : builder_state_.stages) {
2042 if (stage.completed) {
2043 ++completed_stages;
2044 }
2045 }
2046 float completion_ratio =
2047 builder_state_.stages.empty()
2048 ? 0.0f
2049 : static_cast<float>(completed_stages) /
2050 static_cast<float>(builder_state_.stages.size());
2051 auto truncate_summary = [](const std::string& text) {
2052 constexpr size_t kMaxLen = 64;
2053 if (text.size() <= kMaxLen) {
2054 return text;
2055 }
2056 return text.substr(0, kMaxLen - 3) + "...";
2057 };
2058
2059 const float left_width =
2060 std::min(260.0f, ImGui::GetContentRegionAvail().x * 0.32f);
2061
2062 ImGui::BeginChild("BuilderStages", ImVec2(left_width, 0), true);
2063 AgentUI::RenderSectionHeader(ICON_MD_LIST, "Stages", theme.accent_color);
2064 ImGui::TextDisabled("%d/%zu complete", completed_stages,
2065 builder_state_.stages.size());
2066 ImGui::ProgressBar(completion_ratio, ImVec2(-1, 0));
2067 ImGui::Spacing();
2068
2069 for (size_t i = 0; i < builder_state_.stages.size(); ++i) {
2070 auto& stage = builder_state_.stages[i];
2071 ImGui::PushID(static_cast<int>(i));
2072 bool selected = builder_state_.active_stage == static_cast<int>(i);
2073 if (ImGui::Selectable(stage.name.c_str(), selected)) {
2074 builder_state_.active_stage = static_cast<int>(i);
2075 stage_index = static_cast<int>(i);
2076 }
2077 ImGui::SameLine(ImGui::GetContentRegionAvail().x - 24.0f);
2078 ImGui::Checkbox("##stage_done", &stage.completed);
2079 ImGui::TextDisabled("%s", truncate_summary(stage.summary).c_str());
2080 ImGui::Separator();
2081 ImGui::PopID();
2082 }
2083 ImGui::EndChild();
2084
2085 ImGui::SameLine();
2086
2087 ImGui::BeginChild("BuilderDetails", ImVec2(0, 0), false);
2089 theme.accent_color);
2090 if (stage_index >= 0 &&
2091 stage_index < static_cast<int>(builder_state_.stages.size())) {
2092 ImGui::TextColored(theme.text_secondary_color, "%s",
2093 builder_state_.stages[stage_index].summary.c_str());
2094 }
2095 ImGui::Spacing();
2096
2097 switch (stage_index) {
2098 case 0: {
2099 static std::string new_goal;
2100 ImGui::Text("Persona + Goals");
2101 ImGui::TextWrapped(
2102 "Define the agent's voice, boundaries, and success criteria. Keep "
2103 "goals short and action-focused.");
2104 ImGui::InputTextMultiline("##persona_notes",
2105 &builder_state_.persona_notes, ImVec2(-1, 140));
2106 ImGui::Spacing();
2107 ImGui::TextDisabled("Add Goal");
2108 ImGui::InputTextWithHint("##goal_input",
2109 "e.g. Review collision edge cases", &new_goal);
2110 ImGui::SameLine();
2111 if (ImGui::Button(ICON_MD_ADD) && !new_goal.empty()) {
2112 builder_state_.goals.push_back(new_goal);
2113 new_goal.clear();
2114 }
2115 for (size_t i = 0; i < builder_state_.goals.size(); ++i) {
2116 ImGui::BulletText("%s", builder_state_.goals[i].c_str());
2117 ImGui::SameLine();
2118 ImGui::PushID(static_cast<int>(i));
2119 if (ImGui::SmallButton(ICON_MD_CLOSE)) {
2120 builder_state_.goals.erase(builder_state_.goals.begin() + i);
2121 ImGui::PopID();
2122 break;
2123 }
2124 ImGui::PopID();
2125 }
2126 break;
2127 }
2128 case 1: {
2129 ImGui::Text("Tool Stack");
2130 ImGui::TextWrapped(
2131 "Enable only what the plan needs. Fewer tools = clearer responses.");
2132 auto tool_checkbox = [&](const char* label, bool* value,
2133 const char* hint) {
2134 ImGui::Checkbox(label, value);
2135 ImGui::SameLine();
2136 ImGui::TextDisabled("%s", hint);
2137 };
2138 tool_checkbox("Resources", &builder_state_.tools.resources,
2139 "Project files, docs, refs");
2140 tool_checkbox("Dungeon", &builder_state_.tools.dungeon,
2141 "Rooms, objects, entrances");
2142 tool_checkbox("Overworld", &builder_state_.tools.overworld,
2143 "Maps, tile16, entities");
2144 tool_checkbox("Dialogue", &builder_state_.tools.dialogue,
2145 "NPC text + scripts");
2146 tool_checkbox("GUI Automation", &builder_state_.tools.gui,
2147 "Test harness + screenshots");
2148 tool_checkbox("Music", &builder_state_.tools.music, "Trackers + SPC");
2149 tool_checkbox("Sprite", &builder_state_.tools.sprite,
2150 "Sprites + palettes");
2151 tool_checkbox("Emulator", &builder_state_.tools.emulator,
2152 "Runtime probes");
2153 tool_checkbox("Memory Inspector", &builder_state_.tools.memory_inspector,
2154 "RAM/SRAM watch + inspection");
2155 break;
2156 }
2157 case 2: {
2158 ImGui::Text("Automation");
2159 ImGui::TextWrapped(
2160 "Use automation to validate fixes quickly. Pair with gRPC harness "
2161 "for repeatable checks.");
2162 ImGui::Checkbox("Auto-run harness plan", &builder_state_.auto_run_tests);
2163 ImGui::Checkbox("Auto-sync ROM context", &builder_state_.auto_sync_rom);
2164 ImGui::Checkbox("Auto-focus proposal drawer",
2166 break;
2167 }
2168 case 3: {
2169 ImGui::Text("Validation Criteria");
2170 ImGui::TextWrapped(
2171 "Capture the acceptance criteria and what a passing run looks like.");
2172 ImGui::InputTextMultiline("##validation_notes",
2173 &builder_state_.stages[stage_index].summary,
2174 ImVec2(-1, 140));
2175 break;
2176 }
2177 case 4: {
2178 ImGui::Text("E2E Checklist");
2179 ImGui::ProgressBar(completion_ratio, ImVec2(-1, 0),
2180 absl::StrFormat("%d/%zu complete", completed_stages,
2181 builder_state_.stages.size())
2182 .c_str());
2183 ImGui::Checkbox("Ready for automation handoff",
2185 ImGui::TextDisabled("Auto-sync ROM: %s",
2186 builder_state_.auto_sync_rom ? "ON" : "OFF");
2187 ImGui::TextDisabled("Auto-focus proposals: %s",
2188 builder_state_.auto_focus_proposals ? "ON" : "OFF");
2189 break;
2190 }
2191 }
2192
2193 ImGui::Spacing();
2194 ImGui::Separator();
2195 ImGui::TextDisabled("Builder Output");
2196 ImGui::BulletText("Persona notes sync to the chat summary");
2197 ImGui::BulletText("Tool stack applies to the agent tool preferences");
2198 ImGui::BulletText("E2E readiness gates automation handoff");
2199
2200 ImGui::Spacing();
2201 gui::StyleColorGuard apply_guard(ImGuiCol_Button, theme.accent_color);
2202 if (ImGui::Button(ICON_MD_LINK " Apply to Chat", ImVec2(-1, 0))) {
2203 auto* service = agent_chat_->GetAgentService();
2204 if (service) {
2210 prefs.gui = builder_state_.tools.gui;
2213#ifdef YAZE_WITH_GRPC
2215#endif
2217 service->SetToolPreferences(prefs);
2218
2219 auto agent_cfg = service->GetConfig();
2220 agent_cfg.max_tool_iterations = current_profile_.max_tool_iterations;
2221 agent_cfg.max_retry_attempts = current_profile_.max_retry_attempts;
2222 agent_cfg.verbose = current_profile_.verbose;
2223 agent_cfg.show_reasoning = current_profile_.show_reasoning;
2224 service->SetConfig(agent_cfg);
2225 }
2226
2227 agent_chat_->SetLastPlanSummary(builder_state_.persona_notes);
2228
2229 if (toast_manager_) {
2230 toast_manager_->Show("Builder tool plan synced to chat",
2231 ToastType::kSuccess, 2.0f);
2232 }
2233 }
2234
2235 ImGui::Spacing();
2236 ImGui::InputTextWithHint("##blueprint_path",
2237 "Path to blueprint (optional)...",
2239 std::filesystem::path blueprint_path =
2241 ? (std::filesystem::temp_directory_path() / "agent_builder.json")
2242 : std::filesystem::path(builder_state_.blueprint_path);
2243
2244 if (ImGui::Button(ICON_MD_SAVE " Save Blueprint")) {
2245 auto status = SaveBuilderBlueprint(blueprint_path);
2246 if (toast_manager_) {
2247 if (status.ok()) {
2248 toast_manager_->Show("Builder blueprint saved", ToastType::kSuccess,
2249 2.0f);
2250 } else {
2251 toast_manager_->Show(std::string(status.message()), ToastType::kError,
2252 3.5f);
2253 }
2254 }
2255 }
2256 ImGui::SameLine();
2257 if (ImGui::Button(ICON_MD_FOLDER_OPEN " Load Blueprint")) {
2258 auto status = LoadBuilderBlueprint(blueprint_path);
2259 if (toast_manager_) {
2260 if (status.ok()) {
2261 toast_manager_->Show("Builder blueprint loaded", ToastType::kSuccess,
2262 2.0f);
2263 } else {
2264 toast_manager_->Show(std::string(status.message()), ToastType::kError,
2265 3.5f);
2266 }
2267 }
2268 }
2269
2270 ImGui::EndChild();
2271 ImGui::EndChild();
2272}
2273
2275 if (mesen_debug_panel_) {
2276 mesen_debug_panel_->Draw();
2277 } else {
2278 ImGui::TextDisabled("Mesen2 debug panel unavailable.");
2279 }
2280}
2281
2285 } else {
2286 ImGui::TextDisabled("Mesen2 screenshot panel unavailable.");
2287 }
2288}
2289
2291 if (oracle_state_panel_) {
2292 // Share the Mesen client if available
2293 if (mesen_debug_panel_ && mesen_debug_panel_->IsConnected()) {
2294 // The panels can share the client from the registry
2295 }
2296 oracle_state_panel_->Draw();
2297 } else {
2298 ImGui::TextDisabled("Oracle state panel unavailable.");
2299 }
2300}
2301
2303 if (feature_flag_panel_) {
2304 // Wire up the project pointer so the panel can access the manifest
2306 feature_flag_panel_->Draw();
2307 } else {
2308 ImGui::TextDisabled("Feature flag panel unavailable.");
2309 }
2310}
2311
2313 if (manifest_panel_) {
2315 manifest_panel_->Draw();
2316 } else {
2317 ImGui::TextDisabled("Manifest panel unavailable.");
2318 }
2319}
2320
2322 if (sram_viewer_panel_) {
2324 sram_viewer_panel_->Draw();
2325 } else {
2326 ImGui::TextDisabled("SRAM viewer panel unavailable.");
2327 }
2328}
2329
2331 const std::filesystem::path& path) {
2332#if defined(YAZE_WITH_JSON)
2333 nlohmann::json json;
2334 json["persona_notes"] = builder_state_.persona_notes;
2335 json["goals"] = builder_state_.goals;
2336 json["auto_run_tests"] = builder_state_.auto_run_tests;
2337 json["auto_sync_rom"] = builder_state_.auto_sync_rom;
2338 json["auto_focus_proposals"] = builder_state_.auto_focus_proposals;
2339 json["ready_for_e2e"] = builder_state_.ready_for_e2e;
2340 json["tools"] = {
2341 {"resources", builder_state_.tools.resources},
2342 {"dungeon", builder_state_.tools.dungeon},
2343 {"overworld", builder_state_.tools.overworld},
2344 {"dialogue", builder_state_.tools.dialogue},
2345 {"gui", builder_state_.tools.gui},
2346 {"music", builder_state_.tools.music},
2347 {"sprite", builder_state_.tools.sprite},
2348 {"emulator", builder_state_.tools.emulator},
2349 {"memory_inspector", builder_state_.tools.memory_inspector},
2350 };
2351 json["stages"] = nlohmann::json::array();
2352 for (const auto& stage : builder_state_.stages) {
2353 json["stages"].push_back({{"name", stage.name},
2354 {"summary", stage.summary},
2355 {"completed", stage.completed}});
2356 }
2357
2358 std::error_code ec;
2359 std::filesystem::create_directories(path.parent_path(), ec);
2360 std::ofstream file(path);
2361 if (!file.is_open()) {
2362 return absl::InternalError(
2363 absl::StrFormat("Failed to open blueprint: %s", path.string()));
2364 }
2365 file << json.dump(2);
2366 builder_state_.blueprint_path = path.string();
2367 return absl::OkStatus();
2368#else
2369 (void)path;
2370 return absl::UnimplementedError("Blueprint export requires JSON support");
2371#endif
2372}
2373
2375 const std::filesystem::path& path) {
2376#if defined(YAZE_WITH_JSON)
2377 std::ifstream file(path);
2378 if (!file.is_open()) {
2379 return absl::NotFoundError(
2380 absl::StrFormat("Blueprint not found: %s", path.string()));
2381 }
2382
2383 nlohmann::json json;
2384 file >> json;
2385
2386 builder_state_.persona_notes = json.value("persona_notes", "");
2387 builder_state_.goals.clear();
2388 if (json.contains("goals") && json["goals"].is_array()) {
2389 for (const auto& goal : json["goals"]) {
2390 if (goal.is_string()) {
2391 builder_state_.goals.push_back(goal.get<std::string>());
2392 }
2393 }
2394 }
2395 if (json.contains("tools") && json["tools"].is_object()) {
2396 auto tools = json["tools"];
2397 builder_state_.tools.resources = tools.value("resources", true);
2398 builder_state_.tools.dungeon = tools.value("dungeon", true);
2399 builder_state_.tools.overworld = tools.value("overworld", true);
2400 builder_state_.tools.dialogue = tools.value("dialogue", true);
2401 builder_state_.tools.gui = tools.value("gui", false);
2402 builder_state_.tools.music = tools.value("music", false);
2403 builder_state_.tools.sprite = tools.value("sprite", false);
2404 builder_state_.tools.emulator = tools.value("emulator", false);
2406 tools.value("memory_inspector", false);
2407 }
2408 builder_state_.auto_run_tests = json.value("auto_run_tests", false);
2409 builder_state_.auto_sync_rom = json.value("auto_sync_rom", true);
2411 json.value("auto_focus_proposals", true);
2412 builder_state_.ready_for_e2e = json.value("ready_for_e2e", false);
2413 if (json.contains("stages") && json["stages"].is_array()) {
2414 builder_state_.stages.clear();
2415 for (const auto& stage : json["stages"]) {
2416 AgentBuilderState::Stage builder_stage;
2417 builder_stage.name = stage.value("name", std::string{});
2418 builder_stage.summary = stage.value("summary", std::string{});
2419 builder_stage.completed = stage.value("completed", false);
2420 builder_state_.stages.push_back(builder_stage);
2421 }
2422 }
2423 builder_state_.blueprint_path = path.string();
2424 return absl::OkStatus();
2425#else
2426 (void)path;
2427 return absl::UnimplementedError("Blueprint import requires JSON support");
2428#endif
2429}
2430
2431absl::Status AgentEditor::SaveBotProfile(const BotProfile& profile) {
2432#if defined(YAZE_WITH_JSON)
2433 auto dir_status = EnsureProfilesDirectory();
2434 if (!dir_status.ok())
2435 return dir_status;
2436
2437 std::filesystem::path profile_path =
2438 GetProfilesDirectory() / (profile.name + ".json");
2439 std::ofstream file(profile_path);
2440 if (!file.is_open()) {
2441 return absl::InternalError("Failed to open profile file for writing");
2442 }
2443
2444 file << ProfileToJson(profile);
2445 file.close();
2446 return Load();
2447#else
2448 return absl::UnimplementedError(
2449 "JSON support required for profile management");
2450#endif
2451}
2452
2453absl::Status AgentEditor::LoadBotProfile(const std::string& name) {
2454#if defined(YAZE_WITH_JSON)
2455 std::filesystem::path profile_path =
2456 GetProfilesDirectory() / (name + ".json");
2457 if (!std::filesystem::exists(profile_path)) {
2458 return absl::NotFoundError(absl::StrFormat("Profile '%s' not found", name));
2459 }
2460
2461 std::ifstream file(profile_path);
2462 if (!file.is_open()) {
2463 return absl::InternalError("Failed to open profile file");
2464 }
2465
2466 std::string json_content((std::istreambuf_iterator<char>(file)),
2467 std::istreambuf_iterator<char>());
2468
2469 auto profile_or = JsonToProfile(json_content);
2470 if (!profile_or.ok()) {
2471 return profile_or.status();
2472 }
2473 current_profile_ = *profile_or;
2475
2490
2493 return absl::OkStatus();
2494#else
2495 return absl::UnimplementedError(
2496 "JSON support required for profile management");
2497#endif
2498}
2499
2500absl::Status AgentEditor::DeleteBotProfile(const std::string& name) {
2501 std::filesystem::path profile_path =
2502 GetProfilesDirectory() / (name + ".json");
2503 if (!std::filesystem::exists(profile_path)) {
2504 return absl::NotFoundError(absl::StrFormat("Profile '%s' not found", name));
2505 }
2506
2507 std::filesystem::remove(profile_path);
2508 return Load();
2509}
2510
2511std::vector<AgentEditor::BotProfile> AgentEditor::GetAllProfiles() const {
2512 return loaded_profiles_;
2513}
2514
2536
2537absl::Status AgentEditor::ExportProfile(const BotProfile& profile,
2538 const std::filesystem::path& path) {
2539#if defined(YAZE_WITH_JSON)
2540 auto status = SaveBotProfile(profile);
2541 if (!status.ok())
2542 return status;
2543
2544 std::ofstream file(path);
2545 if (!file.is_open()) {
2546 return absl::InternalError("Failed to open file for export");
2547 }
2548 file << ProfileToJson(profile);
2549 return absl::OkStatus();
2550#else
2551 (void)profile;
2552 (void)path;
2553 return absl::UnimplementedError("JSON support required");
2554#endif
2555}
2556
2557absl::Status AgentEditor::ImportProfile(const std::filesystem::path& path) {
2558#if defined(YAZE_WITH_JSON)
2559 if (!std::filesystem::exists(path)) {
2560 return absl::NotFoundError("Import file not found");
2561 }
2562
2563 std::ifstream file(path);
2564 if (!file.is_open()) {
2565 return absl::InternalError("Failed to open import file");
2566 }
2567
2568 std::string json_content((std::istreambuf_iterator<char>(file)),
2569 std::istreambuf_iterator<char>());
2570
2571 auto profile_or = JsonToProfile(json_content);
2572 if (!profile_or.ok()) {
2573 return profile_or.status();
2574 }
2575
2576 return SaveBotProfile(*profile_or);
2577#else
2578 (void)path;
2579 return absl::UnimplementedError("JSON support required");
2580#endif
2581}
2582
2583std::filesystem::path AgentEditor::GetProfilesDirectory() const {
2584 auto agent_dir = yaze::util::PlatformPaths::GetAppDataSubdirectory("agent");
2585 if (agent_dir.ok()) {
2586 return *agent_dir / "profiles";
2587 }
2589 if (temp_dir.ok()) {
2590 return *temp_dir / "agent" / "profiles";
2591 }
2592 return std::filesystem::current_path() / "agent" / "profiles";
2593}
2594
2596 auto dir = GetProfilesDirectory();
2597 std::error_code ec;
2598 std::filesystem::create_directories(dir, ec);
2599 if (ec) {
2600 return absl::InternalError(absl::StrFormat(
2601 "Failed to create profiles directory: %s", ec.message()));
2602 }
2603 return absl::OkStatus();
2604}
2605
2606std::string AgentEditor::ProfileToJson(const BotProfile& profile) const {
2607#if defined(YAZE_WITH_JSON)
2608 nlohmann::json json;
2609 json["name"] = profile.name;
2610 json["description"] = profile.description;
2611 json["provider"] = profile.provider;
2612 json["host_id"] = profile.host_id;
2613 json["model"] = profile.model;
2614 json["ollama_host"] = profile.ollama_host;
2615 json["gemini_api_key"] = profile.gemini_api_key;
2616 json["anthropic_api_key"] = profile.anthropic_api_key;
2617 json["openai_api_key"] = profile.openai_api_key;
2618 json["openai_base_url"] = profile.openai_base_url;
2619 json["system_prompt"] = profile.system_prompt;
2620 json["verbose"] = profile.verbose;
2621 json["show_reasoning"] = profile.show_reasoning;
2622 json["max_tool_iterations"] = profile.max_tool_iterations;
2623 json["max_retry_attempts"] = profile.max_retry_attempts;
2624 json["temperature"] = profile.temperature;
2625 json["top_p"] = profile.top_p;
2626 json["max_output_tokens"] = profile.max_output_tokens;
2627 json["stream_responses"] = profile.stream_responses;
2628 json["tags"] = profile.tags;
2629 json["created_at"] = absl::FormatTime(absl::RFC3339_full, profile.created_at,
2630 absl::UTCTimeZone());
2631 json["modified_at"] = absl::FormatTime(
2632 absl::RFC3339_full, profile.modified_at, absl::UTCTimeZone());
2633
2634 return json.dump(2);
2635#else
2636 return "{}";
2637#endif
2638}
2639
2640absl::StatusOr<AgentEditor::BotProfile> AgentEditor::JsonToProfile(
2641 const std::string& json_str) const {
2642#if defined(YAZE_WITH_JSON)
2643 try {
2644 nlohmann::json json = nlohmann::json::parse(json_str);
2645
2646 BotProfile profile;
2647 profile.name = json.value("name", "Unnamed Profile");
2648 profile.description = json.value("description", "");
2649 profile.provider = json.value("provider", "mock");
2650 profile.host_id = json.value("host_id", "");
2651 profile.model = json.value("model", "");
2652 profile.ollama_host = json.value("ollama_host", "http://localhost:11434");
2653 profile.gemini_api_key = json.value("gemini_api_key", "");
2654 profile.anthropic_api_key = json.value("anthropic_api_key", "");
2655 profile.openai_api_key = json.value("openai_api_key", "");
2656 profile.openai_base_url =
2657 json.value("openai_base_url", "https://api.openai.com");
2658 profile.system_prompt = json.value("system_prompt", "");
2659 profile.verbose = json.value("verbose", false);
2660 profile.show_reasoning = json.value("show_reasoning", true);
2661 profile.max_tool_iterations = json.value("max_tool_iterations", 4);
2662 profile.max_retry_attempts = json.value("max_retry_attempts", 3);
2663 profile.temperature = json.value("temperature", 0.25f);
2664 profile.top_p = json.value("top_p", 0.95f);
2665 profile.max_output_tokens = json.value("max_output_tokens", 2048);
2666 profile.stream_responses = json.value("stream_responses", false);
2667
2668 if (json.contains("tags") && json["tags"].is_array()) {
2669 for (const auto& tag : json["tags"]) {
2670 profile.tags.push_back(tag.get<std::string>());
2671 }
2672 }
2673
2674 if (json.contains("created_at")) {
2675 absl::Time created;
2676 if (absl::ParseTime(absl::RFC3339_full,
2677 json["created_at"].get<std::string>(), &created,
2678 nullptr)) {
2679 profile.created_at = created;
2680 }
2681 }
2682
2683 if (json.contains("modified_at")) {
2684 absl::Time modified;
2685 if (absl::ParseTime(absl::RFC3339_full,
2686 json["modified_at"].get<std::string>(), &modified,
2687 nullptr)) {
2688 profile.modified_at = modified;
2689 }
2690 }
2691
2692 return profile;
2693 } catch (const std::exception& e) {
2694 return absl::InternalError(
2695 absl::StrFormat("Failed to parse profile JSON: %s", e.what()));
2696 }
2697#else
2698 return absl::UnimplementedError("JSON support required");
2699#endif
2700}
2701
2705
2707 current_config_ = config;
2708
2709 if (agent_chat_) {
2710 auto* service = agent_chat_->GetAgentService();
2711 if (service) {
2712 cli::AIServiceConfig provider_config;
2713 provider_config.provider =
2714 config.provider.empty() ? "auto" : config.provider;
2715 provider_config.model = config.model;
2716 provider_config.ollama_host = config.ollama_host;
2717 provider_config.gemini_api_key = config.gemini_api_key;
2718 provider_config.anthropic_api_key = config.anthropic_api_key;
2719 provider_config.openai_api_key = config.openai_api_key;
2720 provider_config.openai_base_url =
2722 provider_config.verbose = config.verbose;
2723
2724 auto status = service->ConfigureProvider(provider_config);
2725 if (!status.ok() && toast_manager_) {
2726 toast_manager_->Show(std::string(status.message()), ToastType::kError);
2727 }
2728
2729 auto agent_cfg = service->GetConfig();
2730 agent_cfg.max_tool_iterations = config.max_tool_iterations;
2731 agent_cfg.max_retry_attempts = config.max_retry_attempts;
2732 agent_cfg.verbose = config.verbose;
2733 agent_cfg.show_reasoning = config.show_reasoning;
2734 service->SetConfig(agent_cfg);
2735 }
2736 }
2737}
2738
2740 return agent_chat_ && *agent_chat_->active();
2741}
2742
2744 if (agent_chat_) {
2745 agent_chat_->set_active(active);
2746 }
2747}
2748
2752
2754 if (agent_chat_) {
2755 agent_chat_->set_active(true);
2756 }
2757}
2758
2759absl::StatusOr<AgentEditor::SessionInfo> AgentEditor::HostSession(
2760 const std::string& session_name, CollaborationMode mode) {
2761 current_mode_ = mode;
2762
2763 if (mode == CollaborationMode::kLocal) {
2764 auto session_or = local_coordinator_->HostSession(session_name);
2765 if (!session_or.ok())
2766 return session_or.status();
2767
2768 SessionInfo info;
2769 info.session_id = session_or->session_id;
2770 info.session_name = session_or->session_name;
2771 info.participants = session_or->participants;
2772
2773 in_session_ = true;
2777
2778 if (toast_manager_) {
2780 absl::StrFormat("Hosting local session: %s", session_name),
2781 ToastType::kSuccess, 3.0f);
2782 }
2783 return info;
2784 }
2785
2786#ifdef YAZE_WITH_GRPC
2787 if (mode == CollaborationMode::kNetwork) {
2788 if (!network_coordinator_) {
2789 return absl::FailedPreconditionError(
2790 "Network coordinator not initialized. Connect to a server first.");
2791 }
2792
2793 const char* username = std::getenv("USER");
2794 if (!username) {
2795 username = std::getenv("USERNAME");
2796 }
2797 if (!username) {
2798 username = "unknown";
2799 }
2800
2801 auto session_or = network_coordinator_->HostSession(session_name, username);
2802 if (!session_or.ok())
2803 return session_or.status();
2804
2805 SessionInfo info;
2806 info.session_id = session_or->session_id;
2807 info.session_name = session_or->session_name;
2808 info.participants = session_or->participants;
2809
2810 in_session_ = true;
2814
2815 if (toast_manager_) {
2817 absl::StrFormat("Hosting network session: %s", session_name),
2818 ToastType::kSuccess, 3.0f);
2819 }
2820
2821 return info;
2822 }
2823#endif
2824
2825 return absl::InvalidArgumentError("Unsupported collaboration mode");
2826}
2827
2828absl::StatusOr<AgentEditor::SessionInfo> AgentEditor::JoinSession(
2829 const std::string& session_code, CollaborationMode mode) {
2830 current_mode_ = mode;
2831
2832 if (mode == CollaborationMode::kLocal) {
2833 auto session_or = local_coordinator_->JoinSession(session_code);
2834 if (!session_or.ok())
2835 return session_or.status();
2836
2837 SessionInfo info;
2838 info.session_id = session_or->session_id;
2839 info.session_name = session_or->session_name;
2840 info.participants = session_or->participants;
2841
2842 in_session_ = true;
2846
2847 if (toast_manager_) {
2849 absl::StrFormat("Joined local session: %s", session_code),
2850 ToastType::kSuccess, 3.0f);
2851 }
2852
2853 return info;
2854 }
2855
2856#ifdef YAZE_WITH_GRPC
2857 if (mode == CollaborationMode::kNetwork) {
2858 if (!network_coordinator_) {
2859 return absl::FailedPreconditionError(
2860 "Network coordinator not initialized. Connect to a server first.");
2861 }
2862
2863 const char* username = std::getenv("USER");
2864 if (!username) {
2865 username = std::getenv("USERNAME");
2866 }
2867 if (!username) {
2868 username = "unknown";
2869 }
2870
2871 auto session_or = network_coordinator_->JoinSession(session_code, username);
2872 if (!session_or.ok())
2873 return session_or.status();
2874
2875 SessionInfo info;
2876 info.session_id = session_or->session_id;
2877 info.session_name = session_or->session_name;
2878 info.participants = session_or->participants;
2879
2880 in_session_ = true;
2884
2885 if (toast_manager_) {
2887 absl::StrFormat("Joined network session: %s", session_code),
2888 ToastType::kSuccess, 3.0f);
2889 }
2890
2891 return info;
2892 }
2893#endif
2894
2895 return absl::InvalidArgumentError("Unsupported collaboration mode");
2896}
2897
2899 if (!in_session_) {
2900 return absl::FailedPreconditionError("Not in a session");
2901 }
2902
2904 auto status = local_coordinator_->LeaveSession();
2905 if (!status.ok())
2906 return status;
2907 }
2908#ifdef YAZE_WITH_GRPC
2910 if (network_coordinator_) {
2911 auto status = network_coordinator_->LeaveSession();
2912 if (!status.ok())
2913 return status;
2914 }
2915 }
2916#endif
2917
2918 in_session_ = false;
2919 current_session_id_.clear();
2920 current_session_name_.clear();
2921 current_participants_.clear();
2922
2923 if (toast_manager_) {
2924 toast_manager_->Show("Left collaboration session", ToastType::kInfo, 3.0f);
2925 }
2926
2927 return absl::OkStatus();
2928}
2929
2930absl::StatusOr<AgentEditor::SessionInfo> AgentEditor::RefreshSession() {
2931 if (!in_session_) {
2932 return absl::FailedPreconditionError("Not in a session");
2933 }
2934
2936 auto session_or = local_coordinator_->RefreshSession();
2937 if (!session_or.ok())
2938 return session_or.status();
2939
2940 SessionInfo info;
2941 info.session_id = session_or->session_id;
2942 info.session_name = session_or->session_name;
2943 info.participants = session_or->participants;
2945 return info;
2946 }
2947
2948 SessionInfo info;
2952 return info;
2953}
2954
2955absl::Status AgentEditor::CaptureSnapshot(std::filesystem::path* output_path,
2956 const CaptureConfig& config) {
2957#ifdef YAZE_WITH_GRPC
2958 using yaze::test::CaptureActiveWindow;
2959 using yaze::test::CaptureHarnessScreenshot;
2960 using yaze::test::CaptureWindowByName;
2961
2962 absl::StatusOr<yaze::test::ScreenshotArtifact> result;
2963 switch (config.mode) {
2965 result = CaptureHarnessScreenshot("");
2966 break;
2968 result = CaptureActiveWindow("");
2969 if (!result.ok()) {
2970 result = CaptureHarnessScreenshot("");
2971 }
2972 break;
2974 if (!config.specific_window_name.empty()) {
2975 result = CaptureWindowByName(config.specific_window_name, "");
2976 } else {
2977 result = CaptureActiveWindow("");
2978 }
2979 if (!result.ok()) {
2980 result = CaptureHarnessScreenshot("");
2981 }
2982 break;
2983 }
2984 }
2985
2986 if (!result.ok()) {
2987 return result.status();
2988 }
2989 *output_path = result->file_path;
2990 return absl::OkStatus();
2991#else
2992 (void)output_path;
2993 (void)config;
2994 return absl::UnimplementedError("Screenshot capture requires YAZE_WITH_GRPC");
2995#endif
2996}
2997
2998absl::Status AgentEditor::SendToGemini(const std::filesystem::path& image_path,
2999 const std::string& prompt) {
3000#ifdef YAZE_WITH_GRPC
3001 const char* api_key = current_profile_.gemini_api_key.empty()
3002 ? std::getenv("GEMINI_API_KEY")
3004 if (!api_key || std::strlen(api_key) == 0) {
3005 return absl::FailedPreconditionError(
3006 "Gemini API key not configured (set GEMINI_API_KEY)");
3007 }
3008
3009 cli::GeminiConfig config;
3010 config.api_key = api_key;
3011 config.model = current_profile_.model.empty() ? "gemini-2.5-flash"
3014
3015 cli::GeminiAIService gemini_service(config);
3016 auto response =
3017 gemini_service.GenerateMultimodalResponse(image_path.string(), prompt);
3018 if (!response.ok()) {
3019 return response.status();
3020 }
3021
3022 if (agent_chat_) {
3023 auto* service = agent_chat_->GetAgentService();
3024 if (service) {
3025 auto history = service->GetHistory();
3026 cli::agent::ChatMessage agent_msg;
3028 agent_msg.message = response->text_response;
3029 agent_msg.timestamp = absl::Now();
3030 history.push_back(agent_msg);
3031 service->ReplaceHistory(history);
3032 }
3033 }
3034
3035 if (toast_manager_) {
3036 toast_manager_->Show("Gemini vision response added to chat",
3037 ToastType::kSuccess, 2.5f);
3038 }
3039 return absl::OkStatus();
3040#else
3041 (void)image_path;
3042 (void)prompt;
3043 return absl::UnimplementedError("Gemini integration requires YAZE_WITH_GRPC");
3044#endif
3045}
3046
3047#ifdef YAZE_WITH_GRPC
3048absl::Status AgentEditor::ConnectToServer(const std::string& server_url) {
3049 try {
3050 network_coordinator_ =
3051 std::make_unique<NetworkCollaborationCoordinator>(server_url);
3052
3053 if (toast_manager_) {
3055 absl::StrFormat("Connected to server: %s", server_url),
3056 ToastType::kSuccess, 3.0f);
3057 }
3058
3059 return absl::OkStatus();
3060 } catch (const std::exception& e) {
3061 return absl::InternalError(
3062 absl::StrFormat("Failed to connect to server: %s", e.what()));
3063 }
3064}
3065
3066void AgentEditor::DisconnectFromServer() {
3068 LeaveSession();
3069 }
3070 network_coordinator_.reset();
3071
3072 if (toast_manager_) {
3073 toast_manager_->Show("Disconnected from server", ToastType::kInfo, 2.5f);
3074 }
3075}
3076
3077bool AgentEditor::IsConnectedToServer() const {
3078 return network_coordinator_ && network_coordinator_->IsConnected();
3079}
3080#endif
3081
3083 return in_session_;
3084}
3085
3089
3090std::optional<AgentEditor::SessionInfo> AgentEditor::GetCurrentSession() const {
3091 if (!in_session_)
3092 return std::nullopt;
3095}
3096
3098
3100
3101} // namespace editor
3102} // namespace yaze
static absl::StatusOr< std::string > LoadTextFile(const std::string &relative_path)
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:28
bool is_loaded() const
Definition rom.h:132
absl::StatusOr< AgentResponse > GenerateMultimodalResponse(const std::string &, const std::string &)
void RefreshModelCache(bool force)
std::unique_ptr< cli::AIService > model_service_
void SetCurrentProfile(const BotProfile &profile)
void ApplyConfig(const AgentConfig &config)
void InitializeWithDependencies(ToastManager *toast_manager, ProposalDrawer *proposal_drawer, Rom *rom)
absl::Status SaveBuilderBlueprint(const std::filesystem::path &path)
absl::StatusOr< SessionInfo > JoinSession(const std::string &session_code, CollaborationMode mode=CollaborationMode::kLocal)
void SetChatActive(bool active)
std::unique_ptr< MesenScreenshotPanel > mesen_screenshot_panel_
KnowledgePanelCallback knowledge_panel_callback_
absl::StatusOr< BotProfile > JsonToProfile(const std::string &json) const
std::unique_ptr< OracleStateLibraryPanel > oracle_state_panel_
absl::StatusOr< SessionInfo > RefreshSession()
absl::Status ImportProfile(const std::filesystem::path &path)
void ApplyModelPreset(const ModelPreset &preset)
ProposalDrawer * proposal_drawer_
std::vector< cli::agent::ChatMessage > cached_history_
AgentBuilderState builder_state_
ModelServiceKey last_model_service_key_
absl::Status Save() override
std::string current_session_name_
CollaborationMode GetCurrentMode() const
std::unique_ptr< FeatureFlagEditorPanel > feature_flag_panel_
absl::Status SendToGemini(const std::filesystem::path &image_path, const std::string &prompt)
std::unique_ptr< AgentConfigPanel > config_panel_
absl::Status EnsureProfilesDirectory()
void SetContext(AgentUIContext *context)
std::unique_ptr< MesenDebugPanel > mesen_debug_panel_
AgentUIContext * context_
absl::Status LoadBuilderBlueprint(const std::filesystem::path &path)
ProfileUiState profile_ui_state_
absl::Status ExportProfile(const BotProfile &profile, const std::filesystem::path &path)
CollaborationMode current_mode_
absl::Status SaveBotProfile(const BotProfile &profile)
absl::Status DeleteBotProfile(const std::string &name)
std::filesystem::path GetProfilesDirectory() const
ToastManager * toast_manager_
AgentConfig GetCurrentConfig() const
std::unique_ptr< TextEditor > common_tiles_editor_
absl::StatusOr< SessionInfo > HostSession(const std::string &session_name, CollaborationMode mode=CollaborationMode::kLocal)
std::string ProfileToJson(const BotProfile &profile) const
std::unique_ptr< TextEditor > prompt_editor_
std::unique_ptr< AgentCollaborationCoordinator > local_coordinator_
std::unique_ptr< SramViewerPanel > sram_viewer_panel_
absl::Status CaptureSnapshot(std::filesystem::path *output_path, const CaptureConfig &config)
absl::Status Load() override
absl::Status LoadBotProfile(const std::string &name)
std::vector< BotProfile > GetAllProfiles() const
std::vector< std::string > current_participants_
std::vector< std::string > last_local_model_paths_
void ApplyConfigFromContext(const AgentConfigState &config)
std::unique_ptr< AgentChat > agent_chat_
absl::Status Update() override
void ApplyUserSettingsDefaults(bool force=false)
std::unique_ptr< ManifestPanel > manifest_panel_
std::vector< BotProfile > loaded_profiles_
std::optional< SessionInfo > GetCurrentSession() const
Unified context for agent UI components.
AgentConfigState & agent_config()
EditorContext context() const
Definition editor.h:300
Rom * rom() const
Definition editor.h:296
EditorDependencies dependencies_
Definition editor.h:306
EditorType type_
Definition editor.h:305
void RegisterEditorPanel(std::unique_ptr< EditorPanel > panel)
Register an EditorPanel instance for central drawing.
ImGui drawer for displaying and managing agent proposals.
void Show(const std::string &message, ToastType type=ToastType::kInfo, float ttl_seconds=3.0f)
RAII guard for ImGui style colors.
Definition style_guard.h:27
static TestManager & Get()
static absl::StatusOr< std::filesystem::path > GetTempDirectory()
Get a temporary directory for the application.
static absl::StatusOr< std::filesystem::path > GetAppDataSubdirectory(const std::string &subdir)
Get a subdirectory within the app data folder.
static std::filesystem::path GetHomeDirectory()
Get the user's home directory in a cross-platform way.
#define ICON_MD_FOLDER_OPEN
Definition icons.h:813
#define ICON_MD_LINK
Definition icons.h:1090
#define ICON_MD_INFO
Definition icons.h:993
#define ICON_MD_CHAT
Definition icons.h:394
#define ICON_MD_STORAGE
Definition icons.h:1865
#define ICON_MD_WARNING
Definition icons.h:2123
#define ICON_MD_NOTE_ADD
Definition icons.h:1330
#define ICON_MD_STAR
Definition icons.h:1848
#define ICON_MD_REFRESH
Definition icons.h:1572
#define ICON_MD_EDIT
Definition icons.h:645
#define ICON_MD_AUTO_FIX_HIGH
Definition icons.h:218
#define ICON_MD_GRID_ON
Definition icons.h:896
#define ICON_MD_LIST
Definition icons.h:1094
#define ICON_MD_ADD
Definition icons.h:86
#define ICON_MD_FILE_COPY
Definition icons.h:743
#define ICON_MD_CHECK_CIRCLE
Definition icons.h:400
#define ICON_MD_SAVE
Definition icons.h:1644
#define ICON_MD_DELETE
Definition icons.h:530
#define ICON_MD_FOLDER
Definition icons.h:809
#define ICON_MD_DELETE_FOREVER
Definition icons.h:531
#define ICON_MD_CLOSE
Definition icons.h:418
#define ICON_MD_ANALYTICS
Definition icons.h:154
#define ICON_MD_HISTORY
Definition icons.h:946
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
std::string NormalizeOpenAiBaseUrl(std::string base)
void RenderStatusIndicator(const char *label, bool active)
bool StyledButton(const char *label, const ImVec4 &color, const ImVec2 &size)
const AgentUITheme & GetTheme()
void RenderSectionHeader(const char *icon, const char *label, const ImVec4 &color)
void RenderProviderBadge(const char *provider)
bool IsLocalOrTrustedEndpoint(const std::string &base_url, bool allow_insecure)
void ApplyHostPresetToProfile(AgentEditor::BotProfile *profile, const UserSettings::Preferences::AiHost &host, const UserSettings::Preferences *prefs)
void CollectOllamaManifestModels(const std::filesystem::path &models_root, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
void CopyStringToBuffer(const std::string &src, char(&dest)[N])
void CollectModelFiles(const std::filesystem::path &base_path, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
bool IsLocalOpenAiBaseUrl(const std::string &base_url)
std::string BuildTagsString(const std::vector< std::string > &tags)
std::string ResolveHostApiKey(const UserSettings::Preferences *prefs, const UserSettings::Preferences::AiHost &host)
std::optional< std::string > LoadKeychainValue(const std::string &key)
bool ProbeOllamaHost(const std::string &base_url)
bool IsTailscaleEndpoint(const std::string &base_url)
bool HasModelExtension(const std::filesystem::path &path)
bool ContainsText(const std::string &haystack, const std::string &needle)
bool IsOllamaModelsPath(const std::filesystem::path &path)
void AddUniqueModelName(const std::string &name, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
bool StartsWithText(const std::string &text, const std::string &prefix)
bool ProbeOpenAICompatible(const std::string &base_url)
bool ProbeHttpEndpoint(const std::string &base_url, const char *path)
std::vector< std::string > CollectLocalModelNames(const UserSettings::Preferences *prefs)
void ColoredTextF(const ImVec4 &color, const char *fmt,...)
static const LanguageDefinition & CPlusPlus()
std::function< void(const AgentConfigState &) update_config)
std::function< void(bool force)> refresh_models
std::function< void(const ModelPreset &) apply_preset)
Agent configuration state.
std::vector< std::string > model_chain
std::vector< ModelPreset > model_presets
std::vector< std::string > favorite_models
struct yaze::editor::AgentEditor::AgentBuilderState::ToolPlan tools
std::vector< std::string > tags
std::vector< std::string > participants
project::YazeProject * project
Definition editor.h:167
std::vector< cli::ModelInfo > available_models
Model preset for quick switching.
std::vector< std::string > ai_model_paths