11#include "absl/strings/str_cat.h"
12#include "absl/strings/str_format.h"
13#include "absl/strings/str_split.h"
14#include "absl/strings/strip.h"
15#include "absl/time/clock.h"
16#include "absl/time/time.h"
21#include <TargetConditionals.h>
24#if defined(__APPLE__) && (TARGET_OS_IPHONE == 1 || TARGET_IPHONE_SIMULATOR == 1)
26#define YAZE_AI_IOS_URLSESSION 1
34#include "nlohmann/json.hpp"
40#ifdef YAZE_AI_RUNTIME_AVAILABLE
43 : function_calling_enabled_(config.use_function_calling), config_(config) {
44 if (config_.verbose) {
45 std::cerr <<
"[DEBUG] Initializing Anthropic service..." << std::endl;
46 std::cerr <<
"[DEBUG] Model: " << config_.model << std::endl;
50 std::string catalogue_path = config_.prompt_version ==
"v2"
51 ?
"assets/agent/prompt_catalogue_v2.yaml"
52 :
"assets/agent/prompt_catalogue.yaml";
53 if (
auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
55 std::cerr <<
"⚠️ Failed to load agent prompt catalogue: "
56 << status.message() << std::endl;
59 if (config_.system_instruction.empty()) {
61 std::string prompt_file;
62 if (config_.prompt_version ==
"v3") {
63 prompt_file =
"agent/system_prompt_v3.txt";
64 }
else if (config_.prompt_version ==
"v2") {
65 prompt_file =
"agent/system_prompt_v2.txt";
67 prompt_file =
"agent/system_prompt.txt";
70 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
71 if (prompt_path.ok()) {
72 std::ifstream file(prompt_path->string());
74 std::stringstream buffer;
75 buffer << file.rdbuf();
76 config_.system_instruction = buffer.str();
77 if (config_.verbose) {
78 std::cerr <<
"[DEBUG] Loaded prompt: " << prompt_path->string()
84 if (config_.system_instruction.empty()) {
85 config_.system_instruction = BuildSystemInstruction();
89 if (config_.verbose) {
90 std::cerr <<
"[DEBUG] Anthropic service initialized" << std::endl;
94void AnthropicAIService::EnableFunctionCalling(
bool enable) {
95 function_calling_enabled_ = enable;
98std::vector<std::string> AnthropicAIService::GetAvailableTools()
const {
99 return {
"resource-list",
"resource-search",
100 "dungeon-list-sprites",
"dungeon-describe-room",
101 "overworld-find-tile",
"overworld-describe-map",
102 "overworld-list-warps"};
105std::string AnthropicAIService::BuildFunctionCallSchemas() {
106#ifndef YAZE_WITH_JSON
109 std::string schemas = prompt_builder_.BuildFunctionCallSchemas();
110 if (!schemas.empty() && schemas !=
"[]") {
114 auto schema_path_or =
115 util::PlatformPaths::FindAsset(
"agent/function_schemas.json");
117 if (!schema_path_or.ok()) {
121 std::ifstream file(schema_path_or->string());
122 if (!file.is_open()) {
127 nlohmann::json schemas_json;
128 file >> schemas_json;
129 return schemas_json.dump();
130 }
catch (
const nlohmann::json::exception& e) {
131 std::cerr <<
"⚠️ Failed to parse function schemas JSON: " << e.what()
138std::string AnthropicAIService::BuildSystemInstruction() {
139 return prompt_builder_.BuildSystemInstruction();
142void AnthropicAIService::SetRomContext(Rom* rom) {
143 prompt_builder_.SetRom(rom);
146absl::StatusOr<std::vector<ModelInfo>>
147AnthropicAIService::ListAvailableModels() {
150 std::vector<ModelInfo> defaults = {
151 {.name =
"claude-3-5-sonnet-20241022",
152 .display_name =
"Claude 3.5 Sonnet",
153 .provider =
"anthropic",
154 .description =
"Most intelligent model"},
155 {.name =
"claude-3-5-haiku-20241022",
156 .display_name =
"Claude 3.5 Haiku",
157 .provider =
"anthropic",
158 .description =
"Fastest and most cost-effective"},
159 {.name =
"claude-3-opus-20240229",
160 .display_name =
"Claude 3 Opus",
161 .provider =
"anthropic",
162 .description =
"Strong reasoning model"}};
166absl::Status AnthropicAIService::CheckAvailability() {
167#ifndef YAZE_WITH_JSON
168 return absl::UnimplementedError(
169 "Anthropic AI service requires JSON support. Build with "
170 "-DYAZE_WITH_JSON=ON");
172 if (config_.api_key.empty()) {
173 return absl::FailedPreconditionError(
174 "❌ Anthropic API key not configured\n"
175 " Set ANTHROPIC_API_KEY environment variable\n"
176 " Get your API key at: https://console.anthropic.com/");
178 return absl::OkStatus();
182absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
183 const std::string& prompt) {
184 return GenerateResponse(
185 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
188absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
189 const std::vector<agent::ChatMessage>& history) {
190#ifndef YAZE_WITH_JSON
191 return absl::UnimplementedError(
192 "Anthropic AI service requires JSON support. Build with "
193 "-DYAZE_WITH_JSON=ON");
195 if (history.empty()) {
196 return absl::InvalidArgumentError(
"History cannot be empty.");
199 if (config_.api_key.empty()) {
200 return absl::FailedPreconditionError(
"Anthropic API key not configured");
203 absl::Time request_start = absl::Now();
206 if (config_.verbose) {
207 std::cerr <<
"[DEBUG] Using curl for Anthropic HTTPS request"
212 nlohmann::json messages = nlohmann::json::array();
215 int start_idx = std::max(0,
static_cast<int>(history.size()) - 10);
216 for (
size_t i = start_idx; i < history.size(); ++i) {
217 const auto& msg = history[i];
218 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
222 messages.push_back({{
"role", role}, {
"content", msg.message}});
226 nlohmann::json request_body = {{
"model", config_.model},
227 {
"max_tokens", config_.max_output_tokens},
228 {
"system", config_.system_instruction},
229 {
"messages", messages}};
232 if (function_calling_enabled_) {
234 std::string schemas_str = BuildFunctionCallSchemas();
235 if (config_.verbose) {
236 std::cerr <<
"[DEBUG] Function calling schemas: "
237 << schemas_str.substr(0, 200) <<
"..." << std::endl;
240 nlohmann::json schemas = nlohmann::json::parse(schemas_str);
242 if (schemas.is_array() && !schemas.empty()) {
244 nlohmann::json tools = nlohmann::json::array();
245 for (
const auto& schema : schemas) {
247 nlohmann::json tool_def;
250 nlohmann::json func_schema = schema;
251 if (schema.contains(
"function")) {
252 func_schema = schema[
"function"];
256 {
"name", func_schema.value(
"name",
"")},
257 {
"description", func_schema.value(
"description",
"")},
259 func_schema.value(
"parameters", nlohmann::json::object())}};
261 tools.push_back(tool_def);
263 request_body[
"tools"] = tools;
265 }
catch (
const nlohmann::json::exception& e) {
266 std::cerr <<
"⚠️ Failed to parse function schemas: " << e.what()
271 if (config_.verbose) {
272 std::cerr <<
"[DEBUG] Sending " << messages.size()
273 <<
" messages to Anthropic" << std::endl;
276 std::string response_str;
277#if defined(YAZE_AI_IOS_URLSESSION)
278 std::map<std::string, std::string> headers;
279 headers.emplace(
"x-api-key", config_.api_key);
280 headers.emplace(
"anthropic-version",
"2023-06-01");
281 headers.emplace(
"content-type",
"application/json");
282 auto resp_or = ios::UrlSessionHttpRequest(
283 "POST",
"https://api.anthropic.com/v1/messages", headers,
284 request_body.dump(), 60000);
286 return resp_or.status();
288 if (resp_or->status_code != 200) {
289 return absl::InternalError(
290 absl::StrCat(
"Anthropic API error: ", resp_or->status_code,
"\n",
293 response_str = resp_or->body;
296 std::string temp_file =
"/tmp/anthropic_request.json";
297 std::ofstream out(temp_file);
298 out << request_body.dump();
302 std::string curl_cmd =
303 "curl -s -X POST 'https://api.anthropic.com/v1/messages' "
307 "-H 'anthropic-version: 2023-06-01' "
308 "-H 'content-type: application/json' "
312 if (config_.verbose) {
313 std::cerr <<
"[DEBUG] Executing Anthropic API request..." << std::endl;
317 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
319 FILE* pipe = popen(curl_cmd.c_str(),
"r");
322 return absl::InternalError(
"Failed to execute curl command");
326 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
327 response_str += buffer;
331 int status = _pclose(pipe);
333 int status = pclose(pipe);
335 std::remove(temp_file.c_str());
338 return absl::InternalError(
339 absl::StrCat(
"Curl failed with status ", status));
343 if (response_str.empty()) {
344 return absl::InternalError(
"Empty response from Anthropic API");
347 if (config_.verbose) {
350 <<
"🔍 Raw Anthropic API Response:"
353 <<
"\033[2m" << response_str.substr(0, 500) <<
"\033[0m"
357 if (config_.verbose) {
358 std::cerr <<
"[DEBUG] Parsing response..." << std::endl;
361 auto parsed_or = ParseAnthropicResponse(response_str);
362 if (!parsed_or.ok()) {
363 return parsed_or.status();
366 AgentResponse agent_response = std::move(parsed_or.value());
367 agent_response.provider =
"anthropic";
368 agent_response.model = config_.model;
369 agent_response.latency_seconds =
370 absl::ToDoubleSeconds(absl::Now() - request_start);
371 agent_response.parameters[
"prompt_version"] = config_.prompt_version;
372 agent_response.parameters[
"temperature"] =
373 absl::StrFormat(
"%.2f", config_.temperature);
374 agent_response.parameters[
"max_output_tokens"] =
375 absl::StrFormat(
"%d", config_.max_output_tokens);
376 agent_response.parameters[
"function_calling"] =
377 function_calling_enabled_ ?
"true" :
"false";
379 return agent_response;
381 }
catch (
const std::exception& e) {
382 if (config_.verbose) {
383 std::cerr <<
"[ERROR] Exception: " << e.what() << std::endl;
385 return absl::InternalError(
386 absl::StrCat(
"Exception during generation: ", e.what()));
391absl::StatusOr<AgentResponse> AnthropicAIService::ParseAnthropicResponse(
392 const std::string& response_body) {
393#ifndef YAZE_WITH_JSON
394 return absl::UnimplementedError(
"JSON support required");
396 AgentResponse agent_response;
398 auto response_json = nlohmann::json::parse(response_body,
nullptr,
false);
399 if (response_json.is_discarded()) {
400 return absl::InternalError(
"❌ Failed to parse Anthropic response JSON");
404 if (response_json.contains(
"error")) {
405 std::string error_msg =
406 response_json[
"error"].value(
"message",
"Unknown error");
407 return absl::InternalError(
408 absl::StrCat(
"❌ Anthropic API error: ", error_msg));
412 if (!response_json.contains(
"content") ||
413 !response_json[
"content"].is_array()) {
414 return absl::InternalError(
"❌ No content in Anthropic response");
417 for (
const auto& block : response_json[
"content"]) {
418 std::string type = block.value(
"type",
"");
420 if (type ==
"text") {
421 std::string text_content = block.value(
"text",
"");
423 if (config_.verbose) {
429 <<
"\033[2m" << text_content <<
"\033[0m"
437 std::string clean_text =
438 std::string(absl::StripAsciiWhitespace(text_content));
439 if (absl::StartsWith(clean_text,
"```json")) {
440 clean_text = clean_text.substr(7);
441 }
else if (absl::StartsWith(clean_text,
"```")) {
442 clean_text = clean_text.substr(3);
444 if (absl::EndsWith(clean_text,
"```")) {
445 clean_text = clean_text.substr(0, clean_text.length() - 3);
447 clean_text = std::string(absl::StripAsciiWhitespace(clean_text));
450 auto parsed_text = nlohmann::json::parse(clean_text,
nullptr,
false);
451 if (!parsed_text.is_discarded()) {
452 if (parsed_text.contains(
"text_response") &&
453 parsed_text[
"text_response"].is_string()) {
454 agent_response.text_response =
455 parsed_text[
"text_response"].get<std::string>();
457 if (parsed_text.contains(
"commands") &&
458 parsed_text[
"commands"].is_array()) {
459 for (
const auto& cmd : parsed_text[
"commands"]) {
460 if (cmd.is_string()) {
461 std::string command = cmd.get<std::string>();
462 if (absl::StartsWith(command,
"z3ed ")) {
463 command = command.substr(5);
465 agent_response.commands.push_back(command);
471 if (agent_response.text_response.empty()) {
472 agent_response.text_response = text_content;
474 agent_response.text_response +=
"\n\n" + text_content;
477 }
else if (type ==
"tool_use") {
479 tool_call.tool_name = block.value(
"name",
"");
481 if (block.contains(
"input") && block[
"input"].is_object()) {
482 for (
auto& [key, value] : block[
"input"].items()) {
483 if (value.is_string()) {
484 tool_call.args[
key] = value.get<std::string>();
485 }
else if (value.is_number()) {
486 tool_call.args[
key] = std::to_string(value.get<
double>());
487 }
else if (value.is_boolean()) {
488 tool_call.args[
key] = value.get<
bool>() ?
"true" :
"false";
492 agent_response.tool_calls.push_back(tool_call);
496 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
497 agent_response.tool_calls.empty()) {
498 return absl::InternalError(
499 "❌ No valid response extracted from Anthropic\n"
500 " Expected text or tool use");
503 return agent_response;
AnthropicAIService(const AnthropicConfig &)