10#include "absl/strings/str_cat.h"
11#include "absl/strings/str_format.h"
12#include "absl/strings/str_split.h"
13#include "absl/strings/strip.h"
14#include "absl/time/clock.h"
15#include "absl/time/time.h"
24#include "nlohmann/json.hpp"
30#ifdef YAZE_AI_RUNTIME_AVAILABLE
33 : function_calling_enabled_(config.use_function_calling), config_(config) {
34 if (config_.verbose) {
35 std::cerr <<
"[DEBUG] Initializing Anthropic service..." << std::endl;
36 std::cerr <<
"[DEBUG] Model: " << config_.model << std::endl;
40 std::string catalogue_path = config_.prompt_version ==
"v2"
41 ?
"assets/agent/prompt_catalogue_v2.yaml"
42 :
"assets/agent/prompt_catalogue.yaml";
43 if (
auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
45 std::cerr <<
"⚠️ Failed to load agent prompt catalogue: "
46 << status.message() << std::endl;
49 if (config_.system_instruction.empty()) {
51 std::string prompt_file;
52 if (config_.prompt_version ==
"v3") {
53 prompt_file =
"agent/system_prompt_v3.txt";
54 }
else if (config_.prompt_version ==
"v2") {
55 prompt_file =
"agent/system_prompt_v2.txt";
57 prompt_file =
"agent/system_prompt.txt";
60 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
61 if (prompt_path.ok()) {
62 std::ifstream file(prompt_path->string());
64 std::stringstream buffer;
65 buffer << file.rdbuf();
66 config_.system_instruction = buffer.str();
67 if (config_.verbose) {
68 std::cerr <<
"[DEBUG] Loaded prompt: " << prompt_path->string()
74 if (config_.system_instruction.empty()) {
75 config_.system_instruction = BuildSystemInstruction();
79 if (config_.verbose) {
80 std::cerr <<
"[DEBUG] Anthropic service initialized" << std::endl;
84void AnthropicAIService::EnableFunctionCalling(
bool enable) {
85 function_calling_enabled_ = enable;
88std::vector<std::string> AnthropicAIService::GetAvailableTools()
const {
89 return {
"resource-list",
"resource-search",
90 "dungeon-list-sprites",
"dungeon-describe-room",
91 "overworld-find-tile",
"overworld-describe-map",
92 "overworld-list-warps"};
95std::string AnthropicAIService::BuildFunctionCallSchemas() {
99 std::string schemas = prompt_builder_.BuildFunctionCallSchemas();
100 if (!schemas.empty() && schemas !=
"[]") {
104 auto schema_path_or =
105 util::PlatformPaths::FindAsset(
"agent/function_schemas.json");
107 if (!schema_path_or.ok()) {
111 std::ifstream file(schema_path_or->string());
112 if (!file.is_open()) {
117 nlohmann::json schemas_json;
118 file >> schemas_json;
119 return schemas_json.dump();
120 }
catch (
const nlohmann::json::exception& e) {
121 std::cerr <<
"⚠️ Failed to parse function schemas JSON: " << e.what()
128std::string AnthropicAIService::BuildSystemInstruction() {
129 return prompt_builder_.BuildSystemInstruction();
132void AnthropicAIService::SetRomContext(Rom* rom) {
133 prompt_builder_.SetRom(rom);
136absl::StatusOr<std::vector<ModelInfo>>
137AnthropicAIService::ListAvailableModels() {
140 std::vector<ModelInfo> defaults = {
141 {.name =
"claude-3-5-sonnet-20241022",
142 .display_name =
"Claude 3.5 Sonnet",
143 .provider =
"anthropic",
144 .description =
"Most intelligent model"},
145 {.name =
"claude-3-5-haiku-20241022",
146 .display_name =
"Claude 3.5 Haiku",
147 .provider =
"anthropic",
148 .description =
"Fastest and most cost-effective"},
149 {.name =
"claude-3-opus-20240229",
150 .display_name =
"Claude 3 Opus",
151 .provider =
"anthropic",
152 .description =
"Strong reasoning model"}};
156absl::Status AnthropicAIService::CheckAvailability() {
157#ifndef YAZE_WITH_JSON
158 return absl::UnimplementedError(
159 "Anthropic AI service requires JSON support. Build with "
160 "-DYAZE_WITH_JSON=ON");
162 if (config_.api_key.empty()) {
163 return absl::FailedPreconditionError(
164 "❌ Anthropic API key not configured\n"
165 " Set ANTHROPIC_API_KEY environment variable\n"
166 " Get your API key at: https://console.anthropic.com/");
168 return absl::OkStatus();
172absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
173 const std::string& prompt) {
174 return GenerateResponse(
175 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
178absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
179 const std::vector<agent::ChatMessage>& history) {
180#ifndef YAZE_WITH_JSON
181 return absl::UnimplementedError(
182 "Anthropic AI service requires JSON support. Build with "
183 "-DYAZE_WITH_JSON=ON");
185 if (history.empty()) {
186 return absl::InvalidArgumentError(
"History cannot be empty.");
189 if (config_.api_key.empty()) {
190 return absl::FailedPreconditionError(
"Anthropic API key not configured");
193 absl::Time request_start = absl::Now();
196 if (config_.verbose) {
197 std::cerr <<
"[DEBUG] Using curl for Anthropic HTTPS request"
202 nlohmann::json messages = nlohmann::json::array();
205 int start_idx = std::max(0,
static_cast<int>(history.size()) - 10);
206 for (
size_t i = start_idx; i < history.size(); ++i) {
207 const auto& msg = history[i];
208 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
212 messages.push_back({{
"role", role}, {
"content", msg.message}});
216 nlohmann::json request_body = {{
"model", config_.model},
217 {
"max_tokens", config_.max_output_tokens},
218 {
"system", config_.system_instruction},
219 {
"messages", messages}};
222 if (function_calling_enabled_) {
224 std::string schemas_str = BuildFunctionCallSchemas();
225 if (config_.verbose) {
226 std::cerr <<
"[DEBUG] Function calling schemas: "
227 << schemas_str.substr(0, 200) <<
"..." << std::endl;
230 nlohmann::json schemas = nlohmann::json::parse(schemas_str);
232 if (schemas.is_array() && !schemas.empty()) {
234 nlohmann::json tools = nlohmann::json::array();
235 for (
const auto& schema : schemas) {
237 nlohmann::json tool_def;
240 nlohmann::json func_schema = schema;
241 if (schema.contains(
"function")) {
242 func_schema = schema[
"function"];
246 {
"name", func_schema.value(
"name",
"")},
247 {
"description", func_schema.value(
"description",
"")},
249 func_schema.value(
"parameters", nlohmann::json::object())}};
251 tools.push_back(tool_def);
253 request_body[
"tools"] = tools;
255 }
catch (
const nlohmann::json::exception& e) {
256 std::cerr <<
"⚠️ Failed to parse function schemas: " << e.what()
261 if (config_.verbose) {
262 std::cerr <<
"[DEBUG] Sending " << messages.size()
263 <<
" messages to Anthropic" << std::endl;
267 std::string temp_file =
"/tmp/anthropic_request.json";
268 std::ofstream out(temp_file);
269 out << request_body.dump();
273 std::string curl_cmd =
274 "curl -s -X POST 'https://api.anthropic.com/v1/messages' "
278 "-H 'anthropic-version: 2023-06-01' "
279 "-H 'content-type: application/json' "
283 if (config_.verbose) {
284 std::cerr <<
"[DEBUG] Executing Anthropic API request..." << std::endl;
288 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
290 FILE* pipe = popen(curl_cmd.c_str(),
"r");
293 return absl::InternalError(
"Failed to execute curl command");
296 std::string response_str;
298 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
299 response_str += buffer;
303 int status = _pclose(pipe);
305 int status = pclose(pipe);
307 std::remove(temp_file.c_str());
310 return absl::InternalError(
311 absl::StrCat(
"Curl failed with status ", status));
314 if (response_str.empty()) {
315 return absl::InternalError(
"Empty response from Anthropic API");
318 if (config_.verbose) {
321 <<
"🔍 Raw Anthropic API Response:"
324 <<
"\033[2m" << response_str.substr(0, 500) <<
"\033[0m"
328 if (config_.verbose) {
329 std::cerr <<
"[DEBUG] Parsing response..." << std::endl;
332 auto parsed_or = ParseAnthropicResponse(response_str);
333 if (!parsed_or.ok()) {
334 return parsed_or.status();
337 AgentResponse agent_response = std::move(parsed_or.value());
338 agent_response.provider =
"anthropic";
339 agent_response.model = config_.model;
340 agent_response.latency_seconds =
341 absl::ToDoubleSeconds(absl::Now() - request_start);
342 agent_response.parameters[
"prompt_version"] = config_.prompt_version;
343 agent_response.parameters[
"temperature"] =
344 absl::StrFormat(
"%.2f", config_.temperature);
345 agent_response.parameters[
"max_output_tokens"] =
346 absl::StrFormat(
"%d", config_.max_output_tokens);
347 agent_response.parameters[
"function_calling"] =
348 function_calling_enabled_ ?
"true" :
"false";
350 return agent_response;
352 }
catch (
const std::exception& e) {
353 if (config_.verbose) {
354 std::cerr <<
"[ERROR] Exception: " << e.what() << std::endl;
356 return absl::InternalError(
357 absl::StrCat(
"Exception during generation: ", e.what()));
362absl::StatusOr<AgentResponse> AnthropicAIService::ParseAnthropicResponse(
363 const std::string& response_body) {
364#ifndef YAZE_WITH_JSON
365 return absl::UnimplementedError(
"JSON support required");
367 AgentResponse agent_response;
369 auto response_json = nlohmann::json::parse(response_body,
nullptr,
false);
370 if (response_json.is_discarded()) {
371 return absl::InternalError(
"❌ Failed to parse Anthropic response JSON");
375 if (response_json.contains(
"error")) {
376 std::string error_msg =
377 response_json[
"error"].value(
"message",
"Unknown error");
378 return absl::InternalError(
379 absl::StrCat(
"❌ Anthropic API error: ", error_msg));
383 if (!response_json.contains(
"content") ||
384 !response_json[
"content"].is_array()) {
385 return absl::InternalError(
"❌ No content in Anthropic response");
388 for (
const auto& block : response_json[
"content"]) {
389 std::string type = block.value(
"type",
"");
391 if (type ==
"text") {
392 std::string text_content = block.value(
"text",
"");
394 if (config_.verbose) {
400 <<
"\033[2m" << text_content <<
"\033[0m"
408 std::string clean_text =
409 std::string(absl::StripAsciiWhitespace(text_content));
410 if (absl::StartsWith(clean_text,
"```json")) {
411 clean_text = clean_text.substr(7);
412 }
else if (absl::StartsWith(clean_text,
"```")) {
413 clean_text = clean_text.substr(3);
415 if (absl::EndsWith(clean_text,
"```")) {
416 clean_text = clean_text.substr(0, clean_text.length() - 3);
418 clean_text = std::string(absl::StripAsciiWhitespace(clean_text));
421 auto parsed_text = nlohmann::json::parse(clean_text,
nullptr,
false);
422 if (!parsed_text.is_discarded()) {
423 if (parsed_text.contains(
"text_response") &&
424 parsed_text[
"text_response"].is_string()) {
425 agent_response.text_response =
426 parsed_text[
"text_response"].get<std::string>();
428 if (parsed_text.contains(
"commands") &&
429 parsed_text[
"commands"].is_array()) {
430 for (
const auto& cmd : parsed_text[
"commands"]) {
431 if (cmd.is_string()) {
432 std::string command = cmd.get<std::string>();
433 if (absl::StartsWith(command,
"z3ed ")) {
434 command = command.substr(5);
436 agent_response.commands.push_back(command);
442 if (agent_response.text_response.empty()) {
443 agent_response.text_response = text_content;
445 agent_response.text_response +=
"\n\n" + text_content;
448 }
else if (type ==
"tool_use") {
450 tool_call.tool_name = block.value(
"name",
"");
452 if (block.contains(
"input") && block[
"input"].is_object()) {
453 for (
auto& [key, value] : block[
"input"].items()) {
454 if (value.is_string()) {
455 tool_call.args[
key] = value.get<std::string>();
456 }
else if (value.is_number()) {
457 tool_call.args[
key] = std::to_string(value.get<
double>());
458 }
else if (value.is_boolean()) {
459 tool_call.args[
key] = value.get<
bool>() ?
"true" :
"false";
463 agent_response.tool_calls.push_back(tool_call);
467 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
468 agent_response.tool_calls.empty()) {
469 return absl::InternalError(
470 "❌ No valid response extracted from Anthropic\n"
471 " Expected text or tool use");
474 return agent_response;
AnthropicAIService(const AnthropicConfig &)