10#include "absl/strings/str_cat.h"
11#include "absl/strings/str_format.h"
12#include "absl/strings/str_split.h"
13#include "absl/strings/strip.h"
14#include "absl/time/clock.h"
15#include "absl/time/time.h"
24#include "nlohmann/json.hpp"
27#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
28#include <openssl/crypto.h>
29#include <openssl/err.h>
30#include <openssl/ssl.h>
33static std::atomic<bool> g_openssl_initialized{
false};
34static std::mutex g_openssl_init_mutex;
36static void EnsureOpenSSLInitialized() {
37 std::lock_guard<std::mutex> lock(g_openssl_init_mutex);
38 if (!g_openssl_initialized.exchange(
true)) {
40 OPENSSL_INIT_LOAD_SSL_STRINGS | OPENSSL_INIT_LOAD_CRYPTO_STRINGS,
42 std::cerr <<
"✓ OpenSSL initialized for HTTPS support" << std::endl;
51#ifdef YAZE_AI_RUNTIME_AVAILABLE
54 : function_calling_enabled_(config.use_function_calling), config_(config) {
55 if (config_.verbose) {
56 std::cerr <<
"[DEBUG] Initializing OpenAI service..." << std::endl;
57 std::cerr <<
"[DEBUG] Model: " << config_.model << std::endl;
58 std::cerr <<
"[DEBUG] Function calling: "
59 << (function_calling_enabled_ ?
"enabled" :
"disabled")
63#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
64 EnsureOpenSSLInitialized();
65 if (config_.verbose) {
66 std::cerr <<
"[DEBUG] OpenSSL initialized for HTTPS" << std::endl;
71 std::string catalogue_path = config_.prompt_version ==
"v2"
72 ?
"assets/agent/prompt_catalogue_v2.yaml"
73 :
"assets/agent/prompt_catalogue.yaml";
74 if (
auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
76 std::cerr <<
"⚠️ Failed to load agent prompt catalogue: "
77 << status.message() << std::endl;
80 if (config_.system_instruction.empty()) {
82 std::string prompt_file;
83 if (config_.prompt_version ==
"v3") {
84 prompt_file =
"agent/system_prompt_v3.txt";
85 }
else if (config_.prompt_version ==
"v2") {
86 prompt_file =
"agent/system_prompt_v2.txt";
88 prompt_file =
"agent/system_prompt.txt";
91 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
92 if (prompt_path.ok()) {
93 std::ifstream file(prompt_path->string());
95 std::stringstream buffer;
96 buffer << file.rdbuf();
97 config_.system_instruction = buffer.str();
98 if (config_.verbose) {
99 std::cerr <<
"[DEBUG] Loaded prompt: " << prompt_path->string()
105 if (config_.system_instruction.empty()) {
106 config_.system_instruction = BuildSystemInstruction();
110 if (config_.verbose) {
111 std::cerr <<
"[DEBUG] OpenAI service initialized" << std::endl;
115void OpenAIAIService::EnableFunctionCalling(
bool enable) {
116 function_calling_enabled_ = enable;
119std::vector<std::string> OpenAIAIService::GetAvailableTools()
const {
120 return {
"resource-list",
"resource-search",
121 "dungeon-list-sprites",
"dungeon-describe-room",
122 "overworld-find-tile",
"overworld-describe-map",
123 "overworld-list-warps"};
126std::string OpenAIAIService::BuildFunctionCallSchemas() {
127#ifndef YAZE_WITH_JSON
130 std::string schemas = prompt_builder_.BuildFunctionCallSchemas();
131 if (!schemas.empty() && schemas !=
"[]") {
135 auto schema_path_or =
136 util::PlatformPaths::FindAsset(
"agent/function_schemas.json");
138 if (!schema_path_or.ok()) {
142 std::ifstream file(schema_path_or->string());
143 if (!file.is_open()) {
148 nlohmann::json schemas_json;
149 file >> schemas_json;
150 return schemas_json.dump();
151 }
catch (
const nlohmann::json::exception& e) {
152 std::cerr <<
"⚠️ Failed to parse function schemas JSON: " << e.what()
159std::string OpenAIAIService::BuildSystemInstruction() {
160 return prompt_builder_.BuildSystemInstruction();
163void OpenAIAIService::SetRomContext(Rom* rom) {
164 prompt_builder_.SetRom(rom);
167absl::StatusOr<std::vector<ModelInfo>> OpenAIAIService::ListAvailableModels() {
168#ifndef YAZE_WITH_JSON
169 return absl::UnimplementedError(
"OpenAI AI service requires JSON support");
171 if (config_.api_key.empty()) {
173 std::vector<ModelInfo> defaults = {
175 .display_name =
"GPT-4o",
176 .provider =
"openai",
177 .description =
"Most capable GPT-4 model"},
178 {.name =
"gpt-4o-mini",
179 .display_name =
"GPT-4o Mini",
180 .provider =
"openai",
181 .description =
"Fast and cost-effective"},
182 {.name =
"gpt-4-turbo",
183 .display_name =
"GPT-4 Turbo",
184 .provider =
"openai",
185 .description =
"GPT-4 with larger context"},
186 {.name =
"gpt-3.5-turbo",
187 .display_name =
"GPT-3.5 Turbo",
188 .provider =
"openai",
189 .description =
"Fast and efficient"}};
195 std::string curl_cmd =
196 "curl -s -X GET 'https://api.openai.com/v1/models' "
197 "-H 'Authorization: Bearer " +
198 config_.api_key +
"' 2>&1";
200 if (config_.verbose) {
201 std::cerr <<
"[DEBUG] Listing OpenAI models..." << std::endl;
205 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
207 FILE* pipe = popen(curl_cmd.c_str(),
"r");
210 return absl::InternalError(
"Failed to execute curl command");
213 std::string response_str;
215 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
216 response_str += buffer;
225 auto models_json = nlohmann::json::parse(response_str,
nullptr,
false);
226 if (models_json.is_discarded()) {
227 return absl::InternalError(
"Failed to parse OpenAI models JSON");
230 if (!models_json.contains(
"data")) {
232 std::vector<ModelInfo> defaults = {
233 {.name =
"gpt-4o-mini",
234 .display_name =
"GPT-4o Mini",
235 .provider =
"openai"},
236 {.name =
"gpt-4o", .display_name =
"GPT-4o", .provider =
"openai"},
237 {.name =
"gpt-3.5-turbo",
238 .display_name =
"GPT-3.5 Turbo",
239 .provider =
"openai"}};
243 std::vector<ModelInfo> models;
244 for (
const auto& m : models_json[
"data"]) {
245 std::string
id = m.value(
"id",
"");
248 if (absl::StartsWith(
id,
"gpt-4") || absl::StartsWith(
id,
"gpt-3.5") ||
249 absl::StartsWith(
id,
"o1") || absl::StartsWith(
id,
"chatgpt")) {
252 info.display_name = id;
253 info.provider =
"openai";
255 info.is_local =
false;
259 info.display_name =
"GPT-4o";
260 else if (
id ==
"gpt-4o-mini")
261 info.display_name =
"GPT-4o Mini";
262 else if (
id ==
"gpt-4-turbo")
263 info.display_name =
"GPT-4 Turbo";
264 else if (
id ==
"gpt-3.5-turbo")
265 info.display_name =
"GPT-3.5 Turbo";
266 else if (
id ==
"o1-preview")
267 info.display_name =
"o1 Preview";
268 else if (
id ==
"o1-mini")
269 info.display_name =
"o1 Mini";
271 models.push_back(std::move(info));
276 }
catch (
const std::exception& e) {
277 return absl::InternalError(
278 absl::StrCat(
"Failed to list models: ", e.what()));
283absl::Status OpenAIAIService::CheckAvailability() {
284#ifndef YAZE_WITH_JSON
285 return absl::UnimplementedError(
286 "OpenAI AI service requires JSON support. Build with "
287 "-DYAZE_WITH_JSON=ON");
290 if (config_.api_key.empty()) {
291 return absl::FailedPreconditionError(
292 "❌ OpenAI API key not configured\n"
293 " Set OPENAI_API_KEY environment variable\n"
294 " Get your API key at: https://platform.openai.com/api-keys");
298 httplib::Client cli(
"https://api.openai.com");
299 cli.set_connection_timeout(5, 0);
301 httplib::Headers headers = {
302 {
"Authorization",
"Bearer " + config_.api_key},
305 auto res = cli.Get(
"/v1/models", headers);
308 return absl::UnavailableError(
309 "❌ Cannot reach OpenAI API\n"
310 " Check your internet connection");
313 if (res->status == 401) {
314 return absl::PermissionDeniedError(
315 "❌ Invalid OpenAI API key\n"
316 " Verify your key at: https://platform.openai.com/api-keys");
319 if (res->status != 200) {
320 return absl::InternalError(absl::StrCat(
321 "❌ OpenAI API error: ", res->status,
"\n ", res->body));
324 return absl::OkStatus();
325 }
catch (
const std::exception& e) {
326 return absl::InternalError(
327 absl::StrCat(
"Exception during availability check: ", e.what()));
332absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
333 const std::string& prompt) {
334 return GenerateResponse(
335 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
338absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
339 const std::vector<agent::ChatMessage>& history) {
340#ifndef YAZE_WITH_JSON
341 return absl::UnimplementedError(
342 "OpenAI AI service requires JSON support. Build with "
343 "-DYAZE_WITH_JSON=ON");
345 if (history.empty()) {
346 return absl::InvalidArgumentError(
"History cannot be empty.");
349 if (config_.api_key.empty()) {
350 return absl::FailedPreconditionError(
"OpenAI API key not configured");
353 absl::Time request_start = absl::Now();
356 if (config_.verbose) {
357 std::cerr <<
"[DEBUG] Using curl for OpenAI HTTPS request" << std::endl;
358 std::cerr <<
"[DEBUG] Processing " << history.size()
359 <<
" messages in history" << std::endl;
363 nlohmann::json messages = nlohmann::json::array();
367 {{
"role",
"system"}, {
"content", config_.system_instruction}});
370 int start_idx = std::max(0,
static_cast<int>(history.size()) - 10);
371 for (
size_t i = start_idx; i < history.size(); ++i) {
372 const auto& msg = history[i];
373 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
377 messages.push_back({{
"role", role}, {
"content", msg.message}});
381 nlohmann::json request_body = {{
"model", config_.model},
382 {
"messages", messages},
383 {
"temperature", config_.temperature},
384 {
"max_tokens", config_.max_output_tokens}};
387 if (function_calling_enabled_) {
389 std::string schemas_str = BuildFunctionCallSchemas();
390 if (config_.verbose) {
391 std::cerr <<
"[DEBUG] Function calling schemas: "
392 << schemas_str.substr(0, 200) <<
"..." << std::endl;
395 nlohmann::json schemas = nlohmann::json::parse(schemas_str);
397 if (schemas.is_array() && !schemas.empty()) {
399 nlohmann::json tools = nlohmann::json::array();
400 for (
const auto& schema : schemas) {
401 tools.push_back({{
"type",
"function"}, {
"function", schema}});
403 request_body[
"tools"] = tools;
405 }
catch (
const nlohmann::json::exception& e) {
406 std::cerr <<
"⚠️ Failed to parse function schemas: " << e.what()
411 if (config_.verbose) {
412 std::cerr <<
"[DEBUG] Sending " << messages.size()
413 <<
" messages to OpenAI" << std::endl;
417 std::string temp_file =
"/tmp/openai_request.json";
418 std::ofstream out(temp_file);
419 out << request_body.dump();
423 std::string curl_cmd =
424 "curl -s -X POST 'https://api.openai.com/v1/chat/completions' "
425 "-H 'Content-Type: application/json' "
426 "-H 'Authorization: Bearer " +
432 if (config_.verbose) {
433 std::cerr <<
"[DEBUG] Executing OpenAI API request..." << std::endl;
437 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
439 FILE* pipe = popen(curl_cmd.c_str(),
"r");
442 return absl::InternalError(
"Failed to execute curl command");
445 std::string response_str;
447 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
448 response_str += buffer;
452 int status = _pclose(pipe);
454 int status = pclose(pipe);
456 std::remove(temp_file.c_str());
459 return absl::InternalError(
460 absl::StrCat(
"Curl failed with status ", status));
463 if (response_str.empty()) {
464 return absl::InternalError(
"Empty response from OpenAI API");
467 if (config_.verbose) {
470 <<
"🔍 Raw OpenAI API Response:"
473 <<
"\033[2m" << response_str.substr(0, 500) <<
"\033[0m"
477 if (config_.verbose) {
478 std::cerr <<
"[DEBUG] Parsing response..." << std::endl;
481 auto parsed_or = ParseOpenAIResponse(response_str);
482 if (!parsed_or.ok()) {
483 return parsed_or.status();
486 AgentResponse agent_response = std::move(parsed_or.value());
487 agent_response.provider =
"openai";
488 agent_response.model = config_.model;
489 agent_response.latency_seconds =
490 absl::ToDoubleSeconds(absl::Now() - request_start);
491 agent_response.parameters[
"prompt_version"] = config_.prompt_version;
492 agent_response.parameters[
"temperature"] =
493 absl::StrFormat(
"%.2f", config_.temperature);
494 agent_response.parameters[
"max_output_tokens"] =
495 absl::StrFormat(
"%d", config_.max_output_tokens);
496 agent_response.parameters[
"function_calling"] =
497 function_calling_enabled_ ?
"true" :
"false";
499 return agent_response;
501 }
catch (
const std::exception& e) {
502 if (config_.verbose) {
503 std::cerr <<
"[ERROR] Exception: " << e.what() << std::endl;
505 return absl::InternalError(
506 absl::StrCat(
"Exception during generation: ", e.what()));
511absl::StatusOr<AgentResponse> OpenAIAIService::ParseOpenAIResponse(
512 const std::string& response_body) {
513#ifndef YAZE_WITH_JSON
514 return absl::UnimplementedError(
"JSON support required");
516 AgentResponse agent_response;
518 auto response_json = nlohmann::json::parse(response_body,
nullptr,
false);
519 if (response_json.is_discarded()) {
520 return absl::InternalError(
"❌ Failed to parse OpenAI response JSON");
524 if (response_json.contains(
"error")) {
525 std::string error_msg =
526 response_json[
"error"].value(
"message",
"Unknown error");
527 return absl::InternalError(absl::StrCat(
"❌ OpenAI API error: ", error_msg));
531 if (!response_json.contains(
"choices") || response_json[
"choices"].empty()) {
532 return absl::InternalError(
"❌ No choices in OpenAI response");
535 const auto& choice = response_json[
"choices"][0];
536 if (!choice.contains(
"message")) {
537 return absl::InternalError(
"❌ No message in OpenAI response");
540 const auto& message = choice[
"message"];
543 if (message.contains(
"content") && !message[
"content"].is_null()) {
544 std::string text_content = message[
"content"].get<std::string>();
546 if (config_.verbose) {
549 <<
"🔍 Raw LLM Response:"
552 <<
"\033[2m" << text_content <<
"\033[0m"
557 text_content = std::string(absl::StripAsciiWhitespace(text_content));
558 if (absl::StartsWith(text_content,
"```json")) {
559 text_content = text_content.substr(7);
560 }
else if (absl::StartsWith(text_content,
"```")) {
561 text_content = text_content.substr(3);
563 if (absl::EndsWith(text_content,
"```")) {
564 text_content = text_content.substr(0, text_content.length() - 3);
566 text_content = std::string(absl::StripAsciiWhitespace(text_content));
569 auto parsed_text = nlohmann::json::parse(text_content,
nullptr,
false);
570 if (!parsed_text.is_discarded()) {
572 if (parsed_text.contains(
"text_response") &&
573 parsed_text[
"text_response"].is_string()) {
574 agent_response.text_response =
575 parsed_text[
"text_response"].get<std::string>();
579 if (parsed_text.contains(
"reasoning") &&
580 parsed_text[
"reasoning"].is_string()) {
581 agent_response.reasoning = parsed_text[
"reasoning"].get<std::string>();
585 if (parsed_text.contains(
"commands") &&
586 parsed_text[
"commands"].is_array()) {
587 for (
const auto& cmd : parsed_text[
"commands"]) {
588 if (cmd.is_string()) {
589 std::string command = cmd.get<std::string>();
590 if (absl::StartsWith(command,
"z3ed ")) {
591 command = command.substr(5);
593 agent_response.commands.push_back(command);
599 if (parsed_text.contains(
"tool_calls") &&
600 parsed_text[
"tool_calls"].is_array()) {
601 for (
const auto& call : parsed_text[
"tool_calls"]) {
602 if (call.contains(
"tool_name") && call[
"tool_name"].is_string()) {
604 tool_call.tool_name = call[
"tool_name"].get<std::string>();
605 if (call.contains(
"args") && call[
"args"].is_object()) {
606 for (
auto& [key, value] : call[
"args"].items()) {
607 if (value.is_string()) {
608 tool_call.args[
key] = value.get<std::string>();
609 }
else if (value.is_number()) {
610 tool_call.args[
key] = std::to_string(value.get<
double>());
611 }
else if (value.is_boolean()) {
612 tool_call.args[
key] = value.get<
bool>() ?
"true" :
"false";
616 agent_response.tool_calls.push_back(tool_call);
622 agent_response.text_response = text_content;
627 if (message.contains(
"tool_calls") && message[
"tool_calls"].is_array()) {
628 for (
const auto& call : message[
"tool_calls"]) {
629 if (call.contains(
"function")) {
630 const auto& func = call[
"function"];
632 tool_call.tool_name = func.value(
"name",
"");
634 if (func.contains(
"arguments") && func[
"arguments"].is_string()) {
635 auto args_json = nlohmann::json::parse(
636 func[
"arguments"].get<std::string>(),
nullptr,
false);
637 if (!args_json.is_discarded() && args_json.is_object()) {
638 for (
auto& [key, value] : args_json.items()) {
639 if (value.is_string()) {
640 tool_call.args[
key] = value.get<std::string>();
641 }
else if (value.is_number()) {
642 tool_call.args[
key] = std::to_string(value.get<
double>());
647 agent_response.tool_calls.push_back(tool_call);
652 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
653 agent_response.tool_calls.empty()) {
654 return absl::InternalError(
655 "❌ No valid response extracted from OpenAI\n"
656 " Expected at least one of: text_response, commands, or tool_calls");
659 return agent_response;
OpenAIAIService(const OpenAIConfig &)