yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
openai_ai_service.cc
Go to the documentation of this file.
2
3#include <atomic>
4#include <cstdlib>
5#include <iostream>
6#include <map>
7#include <mutex>
8#include <string>
9#include <vector>
10
11#include "absl/strings/str_cat.h"
12#include "absl/strings/match.h"
13#include "absl/strings/str_format.h"
14#include "absl/strings/str_split.h"
15#include "absl/strings/strip.h"
16#include "absl/time/clock.h"
17#include "absl/time/time.h"
19#include "util/platform_paths.h"
20
21#if defined(__APPLE__)
22#include <TargetConditionals.h>
23#endif
24
25#if defined(__APPLE__) && (TARGET_OS_IPHONE == 1 || TARGET_IPHONE_SIMULATOR == 1)
27#define YAZE_AI_IOS_URLSESSION 1
28#endif
29
30#ifdef YAZE_WITH_JSON
31#include <filesystem>
32#include <fstream>
33
34#include "httplib.h"
35#include "nlohmann/json.hpp"
36
37// OpenSSL initialization for HTTPS support
38#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
39#include <openssl/crypto.h>
40#include <openssl/err.h>
41#include <openssl/ssl.h>
42
43// OpenSSL initialization guards (local to this TU)
44static std::atomic<bool> g_openssl_initialized{false};
45static std::mutex g_openssl_init_mutex;
46
47static void EnsureOpenSSLInitialized() {
48 std::lock_guard<std::mutex> lock(g_openssl_init_mutex);
49 if (!g_openssl_initialized.exchange(true)) {
50 OPENSSL_init_ssl(
51 OPENSSL_INIT_LOAD_SSL_STRINGS | OPENSSL_INIT_LOAD_CRYPTO_STRINGS,
52 nullptr);
53 std::cerr << "✓ OpenSSL initialized for HTTPS support" << std::endl;
54 }
55}
56#endif
57#endif
58
59namespace yaze {
60namespace cli {
61
62#ifdef YAZE_AI_RUNTIME_AVAILABLE
63
64OpenAIAIService::OpenAIAIService(const OpenAIConfig& config)
65 : function_calling_enabled_(config.use_function_calling), config_(config) {
66 if (config_.verbose) {
67 std::cerr << "[DEBUG] Initializing OpenAI service..." << std::endl;
68 std::cerr << "[DEBUG] Model: " << config_.model << std::endl;
69 std::cerr << "[DEBUG] Function calling: "
70 << (function_calling_enabled_ ? "enabled" : "disabled")
71 << std::endl;
72 }
73
74#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
75 EnsureOpenSSLInitialized();
76 if (config_.verbose) {
77 std::cerr << "[DEBUG] OpenSSL initialized for HTTPS" << std::endl;
78 }
79#endif
80
81 // Load command documentation into prompt builder
82 std::string catalogue_path = config_.prompt_version == "v2"
83 ? "assets/agent/prompt_catalogue_v2.yaml"
84 : "assets/agent/prompt_catalogue.yaml";
85 if (auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
86 !status.ok()) {
87 std::cerr << "⚠️ Failed to load agent prompt catalogue: "
88 << status.message() << std::endl;
89 }
90
91 if (config_.system_instruction.empty()) {
92 // Load system prompt file
93 std::string prompt_file;
94 if (config_.prompt_version == "v3") {
95 prompt_file = "agent/system_prompt_v3.txt";
96 } else if (config_.prompt_version == "v2") {
97 prompt_file = "agent/system_prompt_v2.txt";
98 } else {
99 prompt_file = "agent/system_prompt.txt";
100 }
101
102 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
103 if (prompt_path.ok()) {
104 std::ifstream file(prompt_path->string());
105 if (file.good()) {
106 std::stringstream buffer;
107 buffer << file.rdbuf();
108 config_.system_instruction = buffer.str();
109 if (config_.verbose) {
110 std::cerr << "[DEBUG] Loaded prompt: " << prompt_path->string()
111 << std::endl;
112 }
113 }
114 }
115
116 if (config_.system_instruction.empty()) {
117 config_.system_instruction = BuildSystemInstruction();
118 }
119 }
120
121 if (config_.verbose) {
122 std::cerr << "[DEBUG] OpenAI service initialized" << std::endl;
123 }
124}
125
126void OpenAIAIService::EnableFunctionCalling(bool enable) {
127 function_calling_enabled_ = enable;
128}
129
130std::vector<std::string> OpenAIAIService::GetAvailableTools() const {
131 return {"resource-list", "resource-search",
132 "dungeon-list-sprites", "dungeon-describe-room",
133 "overworld-find-tile", "overworld-describe-map",
134 "overworld-list-warps"};
135}
136
137std::string OpenAIAIService::BuildFunctionCallSchemas() {
138#ifndef YAZE_WITH_JSON
139 return "[]";
140#else
141 std::string schemas = prompt_builder_.BuildFunctionCallSchemas();
142 if (!schemas.empty() && schemas != "[]") {
143 return schemas;
144 }
145
146 auto schema_path_or =
147 util::PlatformPaths::FindAsset("agent/function_schemas.json");
148
149 if (!schema_path_or.ok()) {
150 return "[]";
151 }
152
153 std::ifstream file(schema_path_or->string());
154 if (!file.is_open()) {
155 return "[]";
156 }
157
158 try {
159 nlohmann::json schemas_json;
160 file >> schemas_json;
161 return schemas_json.dump();
162 } catch (const nlohmann::json::exception& e) {
163 std::cerr << "⚠️ Failed to parse function schemas JSON: " << e.what()
164 << std::endl;
165 return "[]";
166 }
167#endif
168}
169
170std::string OpenAIAIService::BuildSystemInstruction() {
171 return prompt_builder_.BuildSystemInstruction();
172}
173
174void OpenAIAIService::SetRomContext(Rom* rom) {
175 prompt_builder_.SetRom(rom);
176}
177
178absl::StatusOr<std::vector<ModelInfo>> OpenAIAIService::ListAvailableModels() {
179#ifndef YAZE_WITH_JSON
180 return absl::UnimplementedError("OpenAI AI service requires JSON support");
181#else
182 const bool is_openai_cloud =
183 absl::StrContains(config_.base_url, "api.openai.com");
184 if (config_.api_key.empty() && is_openai_cloud) {
185 // Return default known models if API key is missing
186 std::vector<ModelInfo> defaults = {
187 {.name = "gpt-4o",
188 .display_name = "GPT-4o",
189 .provider = "openai",
190 .description = "Most capable GPT-4 model"},
191 {.name = "gpt-4o-mini",
192 .display_name = "GPT-4o Mini",
193 .provider = "openai",
194 .description = "Fast and cost-effective"},
195 {.name = "gpt-4-turbo",
196 .display_name = "GPT-4 Turbo",
197 .provider = "openai",
198 .description = "GPT-4 with larger context"},
199 {.name = "gpt-3.5-turbo",
200 .display_name = "GPT-3.5 Turbo",
201 .provider = "openai",
202 .description = "Fast and efficient"}};
203 return defaults;
204 }
205
206 try {
207 if (config_.verbose) {
208 std::cerr << "[DEBUG] Listing OpenAI models..." << std::endl;
209 }
210
211 std::string response_str;
212#if defined(YAZE_AI_IOS_URLSESSION)
213 std::map<std::string, std::string> headers;
214 if (!config_.api_key.empty()) {
215 headers.emplace("Authorization", "Bearer " + config_.api_key);
216 }
217 auto resp_or = ios::UrlSessionHttpRequest(
218 "GET", config_.base_url + "/v1/models", headers, "", 8000);
219 if (!resp_or.ok()) {
220 if (config_.verbose) {
221 std::cerr << "[DEBUG] OpenAI /v1/models failed: "
222 << resp_or.status().message() << std::endl;
223 }
224 // Return defaults on failure so the UI remains usable.
225 std::vector<ModelInfo> defaults = {
226 {.name = "gpt-4o-mini",
227 .display_name = "GPT-4o Mini",
228 .provider = "openai"},
229 {.name = "gpt-4o", .display_name = "GPT-4o", .provider = "openai"},
230 {.name = "gpt-3.5-turbo",
231 .display_name = "GPT-3.5 Turbo",
232 .provider = "openai"}};
233 return defaults;
234 }
235 if (resp_or->status_code != 200) {
236 if (config_.verbose) {
237 std::cerr << "[DEBUG] OpenAI /v1/models HTTP " << resp_or->status_code
238 << std::endl;
239 }
240 std::vector<ModelInfo> defaults = {
241 {.name = "gpt-4o-mini",
242 .display_name = "GPT-4o Mini",
243 .provider = "openai"},
244 {.name = "gpt-4o", .display_name = "GPT-4o", .provider = "openai"},
245 {.name = "gpt-3.5-turbo",
246 .display_name = "GPT-3.5 Turbo",
247 .provider = "openai"}};
248 return defaults;
249 }
250 response_str = resp_or->body;
251#else
252 // Use curl to list models from the API
253 std::string auth_header = config_.api_key.empty()
254 ? ""
255 : "-H 'Authorization: Bearer " + config_.api_key + "' ";
256 std::string curl_cmd =
257 "curl -s -X GET '" + config_.base_url + "/v1/models' " +
258 auth_header + "2>&1";
259
260#ifdef _WIN32
261 FILE* pipe = _popen(curl_cmd.c_str(), "r");
262#else
263 FILE* pipe = popen(curl_cmd.c_str(), "r");
264#endif
265 if (!pipe) {
266 return absl::InternalError("Failed to execute curl command");
267 }
268
269 char buffer[4096];
270 while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
271 response_str += buffer;
272 }
273
274#ifdef _WIN32
275 _pclose(pipe);
276#else
277 pclose(pipe);
278#endif
279#endif // YAZE_AI_IOS_URLSESSION
280
281 auto models_json = nlohmann::json::parse(response_str, nullptr, false);
282 if (models_json.is_discarded()) {
283 return absl::InternalError("Failed to parse OpenAI models JSON");
284 }
285
286 if (!models_json.contains("data")) {
287 // Return defaults on error
288 std::vector<ModelInfo> defaults = {
289 {.name = "gpt-4o-mini",
290 .display_name = "GPT-4o Mini",
291 .provider = "openai"},
292 {.name = "gpt-4o", .display_name = "GPT-4o", .provider = "openai"},
293 {.name = "gpt-3.5-turbo",
294 .display_name = "GPT-3.5 Turbo",
295 .provider = "openai"}};
296 return defaults;
297 }
298
299 std::vector<ModelInfo> models;
300 for (const auto& m : models_json["data"]) {
301 std::string id = m.value("id", "");
302
303 // Filter for chat models (gpt-4*, gpt-3.5-turbo*, o1*, chatgpt*)
304 // For local servers (LM Studio), we accept all models.
305 bool is_local = !absl::StrContains(config_.base_url, "api.openai.com");
306
307 if (is_local || absl::StartsWith(id, "gpt-4") || absl::StartsWith(id, "gpt-3.5") ||
308 absl::StartsWith(id, "o1") || absl::StartsWith(id, "chatgpt")) {
309 ModelInfo info;
310 info.name = id;
311 info.display_name = id;
312 info.provider = "openai";
313 info.family = is_local ? "local" : "gpt";
314 info.is_local = is_local;
315
316 // Set display name based on model
317 if (id == "gpt-4o")
318 info.display_name = "GPT-4o";
319 else if (id == "gpt-4o-mini")
320 info.display_name = "GPT-4o Mini";
321 else if (id == "gpt-4-turbo")
322 info.display_name = "GPT-4 Turbo";
323 else if (id == "gpt-3.5-turbo")
324 info.display_name = "GPT-3.5 Turbo";
325 else if (id == "o1-preview")
326 info.display_name = "o1 Preview";
327 else if (id == "o1-mini")
328 info.display_name = "o1 Mini";
329
330 models.push_back(std::move(info));
331 }
332 }
333 return models;
334
335 } catch (const std::exception& e) {
336 return absl::InternalError(
337 absl::StrCat("Failed to list models: ", e.what()));
338 }
339#endif
340}
341
342absl::Status OpenAIAIService::CheckAvailability() {
343#ifndef YAZE_WITH_JSON
344 return absl::UnimplementedError(
345 "OpenAI AI service requires JSON support. Build with "
346 "-DYAZE_WITH_JSON=ON");
347#else
348 try {
349 // LMStudio and other local servers don't require API keys
350 bool is_local_server = config_.base_url != "https://api.openai.com";
351 if (config_.api_key.empty() && !is_local_server) {
352 return absl::FailedPreconditionError(
353 "❌ OpenAI API key not configured\n"
354 " Set OPENAI_API_KEY environment variable\n"
355 " Get your API key at: https://platform.openai.com/api-keys\n"
356 " For LMStudio, use --openai_base_url=http://localhost:1234");
357 }
358
359 // Test API connectivity with a simple request
360#if defined(YAZE_AI_IOS_URLSESSION)
361 std::map<std::string, std::string> headers;
362 if (!config_.api_key.empty()) {
363 headers.emplace("Authorization", "Bearer " + config_.api_key);
364 }
365 auto resp_or = ios::UrlSessionHttpRequest(
366 "GET", config_.base_url + "/v1/models", headers, "", 8000);
367 if (!resp_or.ok()) {
368 return absl::UnavailableError(
369 absl::StrCat("❌ Cannot reach OpenAI API\n ",
370 resp_or.status().message()));
371 }
372 if (resp_or->status_code == 401) {
373 return absl::PermissionDeniedError(
374 "❌ Invalid OpenAI API key\n"
375 " Verify your key at: https://platform.openai.com/api-keys");
376 }
377 if (resp_or->status_code != 200) {
378 return absl::InternalError(absl::StrCat(
379 "❌ OpenAI API error: ", resp_or->status_code, "\n ",
380 resp_or->body));
381 }
382#else
383 httplib::Client cli(config_.base_url);
384 cli.set_connection_timeout(5, 0);
385
386 httplib::Headers headers = {};
387 if (!config_.api_key.empty()) {
388 headers.emplace("Authorization", "Bearer " + config_.api_key);
389 }
390
391 auto res = cli.Get("/v1/models", headers);
392
393 if (!res) {
394 return absl::UnavailableError(
395 "❌ Cannot reach OpenAI API\n"
396 " Check your internet connection");
397 }
398
399 if (res->status == 401) {
400 return absl::PermissionDeniedError(
401 "❌ Invalid OpenAI API key\n"
402 " Verify your key at: https://platform.openai.com/api-keys");
403 }
404
405 if (res->status != 200) {
406 return absl::InternalError(absl::StrCat(
407 "❌ OpenAI API error: ", res->status, "\n ", res->body));
408 }
409#endif
410
411 return absl::OkStatus();
412 } catch (const std::exception& e) {
413 return absl::InternalError(
414 absl::StrCat("Exception during availability check: ", e.what()));
415 }
416#endif
417}
418
419absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
420 const std::string& prompt) {
421 return GenerateResponse(
422 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
423}
424
425absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
426 const std::vector<agent::ChatMessage>& history) {
427#ifndef YAZE_WITH_JSON
428 return absl::UnimplementedError(
429 "OpenAI AI service requires JSON support. Build with "
430 "-DYAZE_WITH_JSON=ON");
431#else
432 if (history.empty()) {
433 return absl::InvalidArgumentError("History cannot be empty.");
434 }
435
436 const bool is_openai_cloud =
437 absl::StrContains(config_.base_url, "api.openai.com");
438 if (config_.api_key.empty() && is_openai_cloud) {
439 return absl::FailedPreconditionError("OpenAI API key not configured");
440 }
441
442 absl::Time request_start = absl::Now();
443
444 try {
445 if (config_.verbose) {
446 std::cerr << "[DEBUG] Using curl for OpenAI HTTPS request" << std::endl;
447 std::cerr << "[DEBUG] Processing " << history.size()
448 << " messages in history" << std::endl;
449 }
450
451 // Build messages array for OpenAI format
452 nlohmann::json messages = nlohmann::json::array();
453
454 // Add system message
455 messages.push_back(
456 {{"role", "system"}, {"content", config_.system_instruction}});
457
458 // Add conversation history (up to last 10 messages for context window)
459 int start_idx = std::max(0, static_cast<int>(history.size()) - 10);
460 for (size_t i = start_idx; i < history.size(); ++i) {
461 const auto& msg = history[i];
462 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
463 ? "user"
464 : "assistant";
465
466 messages.push_back({{"role", role}, {"content", msg.message}});
467 }
468
469 // Build request body
470 nlohmann::json request_body = {{"model", config_.model},
471 {"messages", messages},
472 {"temperature", config_.temperature},
473 {"max_tokens", config_.max_output_tokens}};
474
475 // Add function calling tools if enabled
476 if (function_calling_enabled_) {
477 try {
478 std::string schemas_str = BuildFunctionCallSchemas();
479 if (config_.verbose) {
480 std::cerr << "[DEBUG] Function calling schemas: "
481 << schemas_str.substr(0, 200) << "..." << std::endl;
482 }
483
484 nlohmann::json schemas = nlohmann::json::parse(schemas_str);
485
486 if (schemas.is_array() && !schemas.empty()) {
487 // Convert to OpenAI tools format
488 nlohmann::json tools = nlohmann::json::array();
489 for (const auto& schema : schemas) {
490 tools.push_back({{"type", "function"}, {"function", schema}});
491 }
492 request_body["tools"] = tools;
493 }
494 } catch (const nlohmann::json::exception& e) {
495 std::cerr << "⚠️ Failed to parse function schemas: " << e.what()
496 << std::endl;
497 }
498 }
499
500 if (config_.verbose) {
501 std::cerr << "[DEBUG] Sending " << messages.size()
502 << " messages to OpenAI" << std::endl;
503 }
504
505 std::string response_str;
506#if defined(YAZE_AI_IOS_URLSESSION)
507 std::map<std::string, std::string> headers;
508 headers.emplace("Content-Type", "application/json");
509 if (!config_.api_key.empty()) {
510 headers.emplace("Authorization", "Bearer " + config_.api_key);
511 }
512 auto resp_or = ios::UrlSessionHttpRequest(
513 "POST", config_.base_url + "/v1/chat/completions", headers,
514 request_body.dump(), 60000);
515 if (!resp_or.ok()) {
516 return resp_or.status();
517 }
518 if (resp_or->status_code == 401) {
519 return absl::PermissionDeniedError(
520 "❌ Invalid OpenAI API key\n"
521 " Verify your key at: https://platform.openai.com/api-keys");
522 }
523 if (resp_or->status_code != 200) {
524 return absl::InternalError(absl::StrCat(
525 "❌ OpenAI API error: ", resp_or->status_code, "\n ",
526 resp_or->body));
527 }
528 response_str = resp_or->body;
529#else
530 // Write request body to temp file
531 std::string temp_file = "/tmp/openai_request.json";
532 std::ofstream out(temp_file);
533 out << request_body.dump();
534 out.close();
535
536 // Use curl to make the request
537 std::string auth_header = config_.api_key.empty()
538 ? ""
539 : "-H 'Authorization: Bearer " + config_.api_key + "' ";
540 std::string curl_cmd =
541 "curl -s -X POST '" + config_.base_url + "/v1/chat/completions' "
542 "-H 'Content-Type: application/json' " +
543 auth_header +
544 "-d @" +
545 temp_file + " 2>&1";
546
547 if (config_.verbose) {
548 std::cerr << "[DEBUG] Executing OpenAI API request..." << std::endl;
549 }
550
551#ifdef _WIN32
552 FILE* pipe = _popen(curl_cmd.c_str(), "r");
553#else
554 FILE* pipe = popen(curl_cmd.c_str(), "r");
555#endif
556 if (!pipe) {
557 return absl::InternalError("Failed to execute curl command");
558 }
559
560 char buffer[4096];
561 while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
562 response_str += buffer;
563 }
564
565#ifdef _WIN32
566 int status = _pclose(pipe);
567#else
568 int status = pclose(pipe);
569#endif
570 std::remove(temp_file.c_str());
571
572 if (status != 0) {
573 return absl::InternalError(
574 absl::StrCat("Curl failed with status ", status));
575 }
576#endif // YAZE_AI_IOS_URLSESSION
577
578 if (response_str.empty()) {
579 return absl::InternalError("Empty response from OpenAI API");
580 }
581
582 if (config_.verbose) {
583 std::cout << "\n"
584 << "\033[35m"
585 << "🔍 Raw OpenAI API Response:"
586 << "\033[0m"
587 << "\n"
588 << "\033[2m" << response_str.substr(0, 500) << "\033[0m"
589 << "\n\n";
590 }
591
592 if (config_.verbose) {
593 std::cerr << "[DEBUG] Parsing response..." << std::endl;
594 }
595
596 auto parsed_or = ParseOpenAIResponse(response_str);
597 if (!parsed_or.ok()) {
598 return parsed_or.status();
599 }
600
601 AgentResponse agent_response = std::move(parsed_or.value());
602 agent_response.provider = "openai";
603 agent_response.model = config_.model;
604 agent_response.latency_seconds =
605 absl::ToDoubleSeconds(absl::Now() - request_start);
606 agent_response.parameters["prompt_version"] = config_.prompt_version;
607 agent_response.parameters["temperature"] =
608 absl::StrFormat("%.2f", config_.temperature);
609 agent_response.parameters["max_output_tokens"] =
610 absl::StrFormat("%d", config_.max_output_tokens);
611 agent_response.parameters["function_calling"] =
612 function_calling_enabled_ ? "true" : "false";
613
614 return agent_response;
615
616 } catch (const std::exception& e) {
617 if (config_.verbose) {
618 std::cerr << "[ERROR] Exception: " << e.what() << std::endl;
619 }
620 return absl::InternalError(
621 absl::StrCat("Exception during generation: ", e.what()));
622 }
623#endif
624}
625
626absl::StatusOr<AgentResponse> OpenAIAIService::ParseOpenAIResponse(
627 const std::string& response_body) {
628#ifndef YAZE_WITH_JSON
629 return absl::UnimplementedError("JSON support required");
630#else
631 AgentResponse agent_response;
632
633 auto response_json = nlohmann::json::parse(response_body, nullptr, false);
634 if (response_json.is_discarded()) {
635 return absl::InternalError("❌ Failed to parse OpenAI response JSON");
636 }
637
638 // Check for errors
639 if (response_json.contains("error")) {
640 std::string error_msg =
641 response_json["error"].value("message", "Unknown error");
642 return absl::InternalError(absl::StrCat("❌ OpenAI API error: ", error_msg));
643 }
644
645 // Navigate OpenAI's response structure
646 if (!response_json.contains("choices") || response_json["choices"].empty()) {
647 return absl::InternalError("❌ No choices in OpenAI response");
648 }
649
650 const auto& choice = response_json["choices"][0];
651 if (!choice.contains("message")) {
652 return absl::InternalError("❌ No message in OpenAI response");
653 }
654
655 const auto& message = choice["message"];
656
657 // Extract text content
658 if (message.contains("content") && !message["content"].is_null()) {
659 std::string text_content = message["content"].get<std::string>();
660
661 if (config_.verbose) {
662 std::cout << "\n"
663 << "\033[35m"
664 << "🔍 Raw LLM Response:"
665 << "\033[0m"
666 << "\n"
667 << "\033[2m" << text_content << "\033[0m"
668 << "\n\n";
669 }
670
671 // Strip markdown code blocks if present
672 text_content = std::string(absl::StripAsciiWhitespace(text_content));
673 if (absl::StartsWith(text_content, "```json")) {
674 text_content = text_content.substr(7);
675 } else if (absl::StartsWith(text_content, "```")) {
676 text_content = text_content.substr(3);
677 }
678 if (absl::EndsWith(text_content, "```")) {
679 text_content = text_content.substr(0, text_content.length() - 3);
680 }
681 text_content = std::string(absl::StripAsciiWhitespace(text_content));
682
683 // Try to parse as JSON object
684 auto parsed_text = nlohmann::json::parse(text_content, nullptr, false);
685 if (!parsed_text.is_discarded()) {
686 // Extract text_response
687 if (parsed_text.contains("text_response") &&
688 parsed_text["text_response"].is_string()) {
689 agent_response.text_response =
690 parsed_text["text_response"].get<std::string>();
691 }
692
693 // Extract reasoning
694 if (parsed_text.contains("reasoning") &&
695 parsed_text["reasoning"].is_string()) {
696 agent_response.reasoning = parsed_text["reasoning"].get<std::string>();
697 }
698
699 // Extract commands
700 if (parsed_text.contains("commands") &&
701 parsed_text["commands"].is_array()) {
702 for (const auto& cmd : parsed_text["commands"]) {
703 if (cmd.is_string()) {
704 std::string command = cmd.get<std::string>();
705 if (absl::StartsWith(command, "z3ed ")) {
706 command = command.substr(5);
707 }
708 agent_response.commands.push_back(command);
709 }
710 }
711 }
712
713 // Extract tool_calls from parsed JSON
714 if (parsed_text.contains("tool_calls") &&
715 parsed_text["tool_calls"].is_array()) {
716 for (const auto& call : parsed_text["tool_calls"]) {
717 if (call.contains("tool_name") && call["tool_name"].is_string()) {
718 ToolCall tool_call;
719 tool_call.tool_name = call["tool_name"].get<std::string>();
720 if (call.contains("args") && call["args"].is_object()) {
721 for (auto& [key, value] : call["args"].items()) {
722 if (value.is_string()) {
723 tool_call.args[key] = value.get<std::string>();
724 } else if (value.is_number()) {
725 tool_call.args[key] = std::to_string(value.get<double>());
726 } else if (value.is_boolean()) {
727 tool_call.args[key] = value.get<bool>() ? "true" : "false";
728 }
729 }
730 }
731 agent_response.tool_calls.push_back(tool_call);
732 }
733 }
734 }
735 } else {
736 // Use raw text as response
737 agent_response.text_response = text_content;
738 }
739 }
740
741 // Handle native OpenAI tool calls
742 if (message.contains("tool_calls") && message["tool_calls"].is_array()) {
743 for (const auto& call : message["tool_calls"]) {
744 if (call.contains("function")) {
745 const auto& func = call["function"];
746 ToolCall tool_call;
747 tool_call.tool_name = func.value("name", "");
748
749 if (func.contains("arguments") && func["arguments"].is_string()) {
750 auto args_json = nlohmann::json::parse(
751 func["arguments"].get<std::string>(), nullptr, false);
752 if (!args_json.is_discarded() && args_json.is_object()) {
753 for (auto& [key, value] : args_json.items()) {
754 if (value.is_string()) {
755 tool_call.args[key] = value.get<std::string>();
756 } else if (value.is_number()) {
757 tool_call.args[key] = std::to_string(value.get<double>());
758 }
759 }
760 }
761 }
762 agent_response.tool_calls.push_back(tool_call);
763 }
764 }
765 }
766
767 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
768 agent_response.tool_calls.empty()) {
769 return absl::InternalError(
770 "❌ No valid response extracted from OpenAI\n"
771 " Expected at least one of: text_response, commands, or tool_calls");
772 }
773
774 return agent_response;
775#endif
776}
777
778#endif // YAZE_AI_RUNTIME_AVAILABLE
779
780} // namespace cli
781} // namespace yaze
OpenAIAIService(const OpenAIConfig &)