10#include "absl/strings/str_cat.h"
11#include "absl/strings/str_split.h"
12#include "absl/strings/strip.h"
13#include "absl/time/clock.h"
14#include "absl/time/time.h"
23#include "nlohmann/json.hpp"
26#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
27#include <openssl/crypto.h>
28#include <openssl/err.h>
29#include <openssl/ssl.h>
32static std::atomic<bool> g_openssl_initialized{
false};
33static std::mutex g_openssl_init_mutex;
35static void EnsureOpenSSLInitialized() {
36 std::lock_guard<std::mutex> lock(g_openssl_init_mutex);
37 if (!g_openssl_initialized.exchange(
true)) {
39 OPENSSL_INIT_LOAD_SSL_STRINGS | OPENSSL_INIT_LOAD_CRYPTO_STRINGS,
41 std::cerr <<
"✓ OpenSSL initialized for HTTPS support" << std::endl;
50#ifdef YAZE_AI_RUNTIME_AVAILABLE
53 : function_calling_enabled_(config.use_function_calling), config_(config) {
54 if (config_.verbose) {
55 std::cerr <<
"[DEBUG] Initializing OpenAI service..." << std::endl;
56 std::cerr <<
"[DEBUG] Model: " << config_.model << std::endl;
57 std::cerr <<
"[DEBUG] Function calling: "
58 << (function_calling_enabled_ ?
"enabled" :
"disabled")
62#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
63 EnsureOpenSSLInitialized();
64 if (config_.verbose) {
65 std::cerr <<
"[DEBUG] OpenSSL initialized for HTTPS" << std::endl;
70 std::string catalogue_path = config_.prompt_version ==
"v2"
71 ?
"assets/agent/prompt_catalogue_v2.yaml"
72 :
"assets/agent/prompt_catalogue.yaml";
73 if (
auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
75 std::cerr <<
"⚠️ Failed to load agent prompt catalogue: "
76 << status.message() << std::endl;
79 if (config_.system_instruction.empty()) {
81 std::string prompt_file;
82 if (config_.prompt_version ==
"v3") {
83 prompt_file =
"agent/system_prompt_v3.txt";
84 }
else if (config_.prompt_version ==
"v2") {
85 prompt_file =
"agent/system_prompt_v2.txt";
87 prompt_file =
"agent/system_prompt.txt";
90 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
91 if (prompt_path.ok()) {
92 std::ifstream file(prompt_path->string());
94 std::stringstream buffer;
95 buffer << file.rdbuf();
96 config_.system_instruction = buffer.str();
97 if (config_.verbose) {
98 std::cerr <<
"[DEBUG] Loaded prompt: " << prompt_path->string()
104 if (config_.system_instruction.empty()) {
105 config_.system_instruction = BuildSystemInstruction();
109 if (config_.verbose) {
110 std::cerr <<
"[DEBUG] OpenAI service initialized" << std::endl;
114void OpenAIAIService::EnableFunctionCalling(
bool enable) {
115 function_calling_enabled_ = enable;
118std::vector<std::string> OpenAIAIService::GetAvailableTools()
const {
119 return {
"resource-list",
"resource-search",
120 "dungeon-list-sprites",
"dungeon-describe-room",
121 "overworld-find-tile",
"overworld-describe-map",
122 "overworld-list-warps"};
125std::string OpenAIAIService::BuildFunctionCallSchemas() {
126#ifndef YAZE_WITH_JSON
129 std::string schemas = prompt_builder_.BuildFunctionCallSchemas();
130 if (!schemas.empty() && schemas !=
"[]") {
134 auto schema_path_or =
135 util::PlatformPaths::FindAsset(
"agent/function_schemas.json");
137 if (!schema_path_or.ok()) {
141 std::ifstream file(schema_path_or->string());
142 if (!file.is_open()) {
147 nlohmann::json schemas_json;
148 file >> schemas_json;
149 return schemas_json.dump();
150 }
catch (
const nlohmann::json::exception& e) {
151 std::cerr <<
"⚠️ Failed to parse function schemas JSON: " << e.what()
158std::string OpenAIAIService::BuildSystemInstruction() {
159 return prompt_builder_.BuildSystemInstruction();
162void OpenAIAIService::SetRomContext(Rom* rom) {
163 prompt_builder_.SetRom(rom);
166absl::StatusOr<std::vector<ModelInfo>> OpenAIAIService::ListAvailableModels() {
167#ifndef YAZE_WITH_JSON
168 return absl::UnimplementedError(
"OpenAI AI service requires JSON support");
170 if (config_.api_key.empty()) {
172 std::vector<ModelInfo> defaults = {
174 .display_name =
"GPT-4o",
175 .provider =
"openai",
176 .description =
"Most capable GPT-4 model"},
177 {.name =
"gpt-4o-mini",
178 .display_name =
"GPT-4o Mini",
179 .provider =
"openai",
180 .description =
"Fast and cost-effective"},
181 {.name =
"gpt-4-turbo",
182 .display_name =
"GPT-4 Turbo",
183 .provider =
"openai",
184 .description =
"GPT-4 with larger context"},
185 {.name =
"gpt-3.5-turbo",
186 .display_name =
"GPT-3.5 Turbo",
187 .provider =
"openai",
188 .description =
"Fast and efficient"}};
194 std::string curl_cmd =
195 "curl -s -X GET 'https://api.openai.com/v1/models' "
196 "-H 'Authorization: Bearer " +
197 config_.api_key +
"' 2>&1";
199 if (config_.verbose) {
200 std::cerr <<
"[DEBUG] Listing OpenAI models..." << std::endl;
204 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
206 FILE* pipe = popen(curl_cmd.c_str(),
"r");
209 return absl::InternalError(
"Failed to execute curl command");
212 std::string response_str;
214 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
215 response_str += buffer;
224 auto models_json = nlohmann::json::parse(response_str,
nullptr,
false);
225 if (models_json.is_discarded()) {
226 return absl::InternalError(
"Failed to parse OpenAI models JSON");
229 if (!models_json.contains(
"data")) {
231 std::vector<ModelInfo> defaults = {
232 {.name =
"gpt-4o-mini",
233 .display_name =
"GPT-4o Mini",
234 .provider =
"openai"},
236 .display_name =
"GPT-4o",
237 .provider =
"openai"},
238 {.name =
"gpt-3.5-turbo",
239 .display_name =
"GPT-3.5 Turbo",
240 .provider =
"openai"}};
244 std::vector<ModelInfo> models;
245 for (
const auto& m : models_json[
"data"]) {
246 std::string
id = m.value(
"id",
"");
249 if (absl::StartsWith(
id,
"gpt-4") || absl::StartsWith(
id,
"gpt-3.5") ||
250 absl::StartsWith(
id,
"o1") || absl::StartsWith(
id,
"chatgpt")) {
253 info.display_name = id;
254 info.provider =
"openai";
256 info.is_local =
false;
259 if (
id ==
"gpt-4o") info.display_name =
"GPT-4o";
260 else if (
id ==
"gpt-4o-mini") info.display_name =
"GPT-4o Mini";
261 else if (
id ==
"gpt-4-turbo") info.display_name =
"GPT-4 Turbo";
262 else if (
id ==
"gpt-3.5-turbo") info.display_name =
"GPT-3.5 Turbo";
263 else if (
id ==
"o1-preview") info.display_name =
"o1 Preview";
264 else if (
id ==
"o1-mini") info.display_name =
"o1 Mini";
266 models.push_back(std::move(info));
271 }
catch (
const std::exception& e) {
272 return absl::InternalError(
273 absl::StrCat(
"Failed to list models: ", e.what()));
278absl::Status OpenAIAIService::CheckAvailability() {
279#ifndef YAZE_WITH_JSON
280 return absl::UnimplementedError(
281 "OpenAI AI service requires JSON support. Build with "
282 "-DYAZE_WITH_JSON=ON");
285 if (config_.api_key.empty()) {
286 return absl::FailedPreconditionError(
287 "❌ OpenAI API key not configured\n"
288 " Set OPENAI_API_KEY environment variable\n"
289 " Get your API key at: https://platform.openai.com/api-keys");
293 httplib::Client cli(
"https://api.openai.com");
294 cli.set_connection_timeout(5, 0);
296 httplib::Headers headers = {
297 {
"Authorization",
"Bearer " + config_.api_key},
300 auto res = cli.Get(
"/v1/models", headers);
303 return absl::UnavailableError(
304 "❌ Cannot reach OpenAI API\n"
305 " Check your internet connection");
308 if (res->status == 401) {
309 return absl::PermissionDeniedError(
310 "❌ Invalid OpenAI API key\n"
311 " Verify your key at: https://platform.openai.com/api-keys");
314 if (res->status != 200) {
315 return absl::InternalError(absl::StrCat(
316 "❌ OpenAI API error: ", res->status,
"\n ", res->body));
319 return absl::OkStatus();
320 }
catch (
const std::exception& e) {
321 return absl::InternalError(
322 absl::StrCat(
"Exception during availability check: ", e.what()));
327absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
328 const std::string& prompt) {
329 return GenerateResponse(
330 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
333absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
334 const std::vector<agent::ChatMessage>& history) {
335#ifndef YAZE_WITH_JSON
336 return absl::UnimplementedError(
337 "OpenAI AI service requires JSON support. Build with "
338 "-DYAZE_WITH_JSON=ON");
340 if (history.empty()) {
341 return absl::InvalidArgumentError(
"History cannot be empty.");
344 if (config_.api_key.empty()) {
345 return absl::FailedPreconditionError(
"OpenAI API key not configured");
348 absl::Time request_start = absl::Now();
351 if (config_.verbose) {
352 std::cerr <<
"[DEBUG] Using curl for OpenAI HTTPS request" << std::endl;
353 std::cerr <<
"[DEBUG] Processing " << history.size()
354 <<
" messages in history" << std::endl;
358 nlohmann::json messages = nlohmann::json::array();
363 {
"content", config_.system_instruction}
367 int start_idx = std::max(0,
static_cast<int>(history.size()) - 10);
368 for (
size_t i = start_idx; i < history.size(); ++i) {
369 const auto& msg = history[i];
371 (msg.sender == agent::ChatMessage::Sender::kUser) ?
"user" :
"assistant";
375 {
"content", msg.message}
380 nlohmann::json request_body = {
381 {
"model", config_.model},
382 {
"messages", messages},
383 {
"temperature", config_.temperature},
384 {
"max_tokens", config_.max_output_tokens}
388 if (function_calling_enabled_) {
390 std::string schemas_str = BuildFunctionCallSchemas();
391 if (config_.verbose) {
392 std::cerr <<
"[DEBUG] Function calling schemas: "
393 << schemas_str.substr(0, 200) <<
"..." << std::endl;
396 nlohmann::json schemas = nlohmann::json::parse(schemas_str);
398 if (schemas.is_array() && !schemas.empty()) {
400 nlohmann::json tools = nlohmann::json::array();
401 for (
const auto& schema : schemas) {
403 {
"type",
"function"},
407 request_body[
"tools"] = tools;
409 }
catch (
const nlohmann::json::exception& e) {
410 std::cerr <<
"⚠️ Failed to parse function schemas: " << e.what()
415 if (config_.verbose) {
416 std::cerr <<
"[DEBUG] Sending " << messages.size()
417 <<
" messages to OpenAI" << std::endl;
421 std::string temp_file =
"/tmp/openai_request.json";
422 std::ofstream out(temp_file);
423 out << request_body.dump();
427 std::string curl_cmd =
428 "curl -s -X POST 'https://api.openai.com/v1/chat/completions' "
429 "-H 'Content-Type: application/json' "
430 "-H 'Authorization: Bearer " +
436 if (config_.verbose) {
437 std::cerr <<
"[DEBUG] Executing OpenAI API request..." << std::endl;
441 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
443 FILE* pipe = popen(curl_cmd.c_str(),
"r");
446 return absl::InternalError(
"Failed to execute curl command");
449 std::string response_str;
451 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
452 response_str += buffer;
456 int status = _pclose(pipe);
458 int status = pclose(pipe);
460 std::remove(temp_file.c_str());
463 return absl::InternalError(
464 absl::StrCat(
"Curl failed with status ", status));
467 if (response_str.empty()) {
468 return absl::InternalError(
"Empty response from OpenAI API");
471 if (config_.verbose) {
473 <<
"\033[35m" <<
"🔍 Raw OpenAI API Response:" <<
"\033[0m"
475 <<
"\033[2m" << response_str.substr(0, 500) <<
"\033[0m"
479 if (config_.verbose) {
480 std::cerr <<
"[DEBUG] Parsing response..." << std::endl;
483 auto parsed_or = ParseOpenAIResponse(response_str);
484 if (!parsed_or.ok()) {
485 return parsed_or.status();
488 AgentResponse agent_response = std::move(parsed_or.value());
489 agent_response.provider =
"openai";
490 agent_response.model = config_.model;
491 agent_response.latency_seconds =
492 absl::ToDoubleSeconds(absl::Now() - request_start);
493 agent_response.parameters[
"prompt_version"] = config_.prompt_version;
494 agent_response.parameters[
"temperature"] =
495 absl::StrFormat(
"%.2f", config_.temperature);
496 agent_response.parameters[
"max_output_tokens"] =
497 absl::StrFormat(
"%d", config_.max_output_tokens);
498 agent_response.parameters[
"function_calling"] =
499 function_calling_enabled_ ?
"true" :
"false";
501 return agent_response;
503 }
catch (
const std::exception& e) {
504 if (config_.verbose) {
505 std::cerr <<
"[ERROR] Exception: " << e.what() << std::endl;
507 return absl::InternalError(
508 absl::StrCat(
"Exception during generation: ", e.what()));
513absl::StatusOr<AgentResponse> OpenAIAIService::ParseOpenAIResponse(
514 const std::string& response_body) {
515#ifndef YAZE_WITH_JSON
516 return absl::UnimplementedError(
"JSON support required");
518 AgentResponse agent_response;
520 auto response_json = nlohmann::json::parse(response_body,
nullptr,
false);
521 if (response_json.is_discarded()) {
522 return absl::InternalError(
"❌ Failed to parse OpenAI response JSON");
526 if (response_json.contains(
"error")) {
527 std::string error_msg = response_json[
"error"].value(
"message",
"Unknown error");
528 return absl::InternalError(absl::StrCat(
"❌ OpenAI API error: ", error_msg));
532 if (!response_json.contains(
"choices") || response_json[
"choices"].empty()) {
533 return absl::InternalError(
"❌ No choices in OpenAI response");
536 const auto& choice = response_json[
"choices"][0];
537 if (!choice.contains(
"message")) {
538 return absl::InternalError(
"❌ No message in OpenAI response");
541 const auto& message = choice[
"message"];
544 if (message.contains(
"content") && !message[
"content"].is_null()) {
545 std::string text_content = message[
"content"].get<std::string>();
547 if (config_.verbose) {
549 <<
"\033[35m" <<
"🔍 Raw LLM Response:" <<
"\033[0m" <<
"\n"
550 <<
"\033[2m" << text_content <<
"\033[0m" <<
"\n\n";
554 text_content = std::string(absl::StripAsciiWhitespace(text_content));
555 if (absl::StartsWith(text_content,
"```json")) {
556 text_content = text_content.substr(7);
557 }
else if (absl::StartsWith(text_content,
"```")) {
558 text_content = text_content.substr(3);
560 if (absl::EndsWith(text_content,
"```")) {
561 text_content = text_content.substr(0, text_content.length() - 3);
563 text_content = std::string(absl::StripAsciiWhitespace(text_content));
566 auto parsed_text = nlohmann::json::parse(text_content,
nullptr,
false);
567 if (!parsed_text.is_discarded()) {
569 if (parsed_text.contains(
"text_response") &&
570 parsed_text[
"text_response"].is_string()) {
571 agent_response.text_response =
572 parsed_text[
"text_response"].get<std::string>();
576 if (parsed_text.contains(
"reasoning") &&
577 parsed_text[
"reasoning"].is_string()) {
578 agent_response.reasoning = parsed_text[
"reasoning"].get<std::string>();
582 if (parsed_text.contains(
"commands") && parsed_text[
"commands"].is_array()) {
583 for (
const auto& cmd : parsed_text[
"commands"]) {
584 if (cmd.is_string()) {
585 std::string command = cmd.get<std::string>();
586 if (absl::StartsWith(command,
"z3ed ")) {
587 command = command.substr(5);
589 agent_response.commands.push_back(command);
595 if (parsed_text.contains(
"tool_calls") &&
596 parsed_text[
"tool_calls"].is_array()) {
597 for (
const auto& call : parsed_text[
"tool_calls"]) {
598 if (call.contains(
"tool_name") && call[
"tool_name"].is_string()) {
600 tool_call.tool_name = call[
"tool_name"].get<std::string>();
601 if (call.contains(
"args") && call[
"args"].is_object()) {
602 for (
auto& [key, value] : call[
"args"].items()) {
603 if (value.is_string()) {
604 tool_call.args[
key] = value.get<std::string>();
605 }
else if (value.is_number()) {
606 tool_call.args[
key] = std::to_string(value.get<
double>());
607 }
else if (value.is_boolean()) {
608 tool_call.args[
key] = value.get<
bool>() ?
"true" :
"false";
612 agent_response.tool_calls.push_back(tool_call);
618 agent_response.text_response = text_content;
623 if (message.contains(
"tool_calls") && message[
"tool_calls"].is_array()) {
624 for (
const auto& call : message[
"tool_calls"]) {
625 if (call.contains(
"function")) {
626 const auto& func = call[
"function"];
628 tool_call.tool_name = func.value(
"name",
"");
630 if (func.contains(
"arguments") && func[
"arguments"].is_string()) {
632 nlohmann::json::parse(func[
"arguments"].get<std::string>(),
634 if (!args_json.is_discarded() && args_json.is_object()) {
635 for (
auto& [key, value] : args_json.items()) {
636 if (value.is_string()) {
637 tool_call.args[
key] = value.get<std::string>();
638 }
else if (value.is_number()) {
639 tool_call.args[
key] = std::to_string(value.get<
double>());
644 agent_response.tool_calls.push_back(tool_call);
649 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
650 agent_response.tool_calls.empty()) {
651 return absl::InternalError(
652 "❌ No valid response extracted from OpenAI\n"
653 " Expected at least one of: text_response, commands, or tool_calls");
656 return agent_response;
OpenAIAIService(const OpenAIConfig &)