yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
service_factory.cc
Go to the documentation of this file.
2
3#include <cstdlib>
4#include <cstring>
5#include <iostream>
6
7#include "absl/flags/declare.h"
8#include "absl/flags/flag.h"
9#include "absl/strings/ascii.h"
10#include "absl/strings/str_format.h"
14
15#ifdef YAZE_WITH_JSON
19#endif
20
21ABSL_DECLARE_FLAG(std::string, ai_provider);
22ABSL_DECLARE_FLAG(std::string, ai_model);
23ABSL_DECLARE_FLAG(std::string, gemini_api_key);
24ABSL_DECLARE_FLAG(std::string, anthropic_api_key);
25ABSL_DECLARE_FLAG(std::string, ollama_host);
26ABSL_DECLARE_FLAG(std::string, prompt_version);
27ABSL_DECLARE_FLAG(bool, use_function_calling);
28
29namespace yaze {
30namespace cli {
31
32std::unique_ptr<AIService> CreateAIService() {
33 // Read configuration from flags
34 AIServiceConfig config;
35 config.provider = absl::AsciiStrToLower(absl::GetFlag(FLAGS_ai_provider));
36 config.model = absl::GetFlag(FLAGS_ai_model);
37 config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
38 config.anthropic_api_key = absl::GetFlag(FLAGS_anthropic_api_key);
39 config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
40
41 // Fall back to environment variables if flags not set
42 if (config.gemini_api_key.empty()) {
43 const char* env_key = std::getenv("GEMINI_API_KEY");
44 if (env_key)
45 config.gemini_api_key = env_key;
46 }
47 if (config.anthropic_api_key.empty()) {
48 const char* env_key = std::getenv("ANTHROPIC_API_KEY");
49 if (env_key)
50 config.anthropic_api_key = env_key;
51 }
52 if (config.openai_api_key.empty()) {
53 const char* openai_key = std::getenv("OPENAI_API_KEY");
54 if (openai_key) {
55 config.openai_api_key = openai_key;
56 }
57 }
58 if (config.model.empty()) {
59 const char* env_model = std::getenv("OLLAMA_MODEL");
60 if (env_model)
61 config.model = env_model;
62 }
63
64 return CreateAIService(config);
65}
66
67std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
68 AIServiceConfig effective_config = config;
69 if (effective_config.provider.empty()) {
70 effective_config.provider = "auto";
71 }
72
73 if (effective_config.provider == "auto") {
74#ifdef YAZE_WITH_JSON
75 if (!effective_config.gemini_api_key.empty()) {
76 std::cout << "🤖 Auto-detecting AI provider...\n";
77 std::cout << " Found Gemini API key, using Gemini\n";
78 effective_config.provider = "gemini";
79 } else if (!effective_config.anthropic_api_key.empty()) {
80 std::cout << "🤖 Auto-detecting AI provider...\n";
81 std::cout << " Found Anthropic API key, using Anthropic\n";
82 effective_config.provider = "anthropic";
83 } else if (!effective_config.openai_api_key.empty()) {
84 std::cout << "🤖 Auto-detecting AI provider...\n";
85 std::cout << " Found OpenAI API key, using OpenAI\n";
86 effective_config.provider = "openai";
87 if (effective_config.model.empty()) {
88 effective_config.model = "gpt-4o-mini";
89 }
90 } else
91#endif
92 {
93 std::cout << "🤖 No AI provider configured, using MockAIService\n";
94 std::cout << " Tip: Set GEMINI_API_KEY, ANTHROPIC_API_KEY, or "
95 "OPENAI_API_KEY\n";
96 effective_config.provider = "mock";
97 }
98 }
99
100 if (effective_config.provider != "mock") {
101 std::cout << "🤖 AI Provider: " << effective_config.provider << "\n";
102 }
103
104 auto service_or = CreateAIServiceStrict(effective_config);
105 if (service_or.ok()) {
106 return std::move(service_or.value());
107 }
108
109 std::cerr << "⚠️ " << service_or.status().message() << std::endl;
110 std::cerr << " Falling back to MockAIService" << std::endl;
111 return std::make_unique<MockAIService>();
112}
113
114absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
115 const AIServiceConfig& config) {
116 std::string provider = absl::AsciiStrToLower(config.provider);
117 if (provider.empty() || provider == "auto") {
118 return absl::InvalidArgumentError(
119 "CreateAIServiceStrict requires an explicit provider (not 'auto')");
120 }
121
122 if (provider == "mock") {
123 return std::make_unique<MockAIService>();
124 }
125
126 if (provider == "ollama") {
127 OllamaConfig ollama_config;
128 ollama_config.base_url = config.ollama_host;
129 if (!config.model.empty()) {
130 ollama_config.model = config.model;
131 } else if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
132 ollama_config.model = env_model;
133 }
134
135 return std::make_unique<OllamaAIService>(ollama_config);
136 }
137
138 if (provider == "gemini-cli" || provider == "local-gemini") {
139 return std::make_unique<LocalGeminiCliService>(
140 config.model.empty() ? "gemini-2.5-flash" : config.model);
141 }
142
143#ifdef YAZE_WITH_JSON
144 if (provider == "gemini") {
145 if (config.gemini_api_key.empty()) {
146 return absl::FailedPreconditionError(
147 "Gemini API key not provided. Set --gemini_api_key or "
148 "GEMINI_API_KEY.");
149 }
150 GeminiConfig gemini_config(config.gemini_api_key);
151 if (!config.model.empty()) {
152 gemini_config.model = config.model;
153 }
154 gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
155 gemini_config.use_function_calling =
156 absl::GetFlag(FLAGS_use_function_calling);
157 gemini_config.verbose = config.verbose;
158 return std::make_unique<GeminiAIService>(gemini_config);
159 }
160 if (provider == "anthropic") {
161 if (config.anthropic_api_key.empty()) {
162 return absl::FailedPreconditionError(
163 "Anthropic API key not provided. Set --anthropic_api_key or "
164 "ANTHROPIC_API_KEY.");
165 }
166 AnthropicConfig anthropic_config(config.anthropic_api_key);
167 if (!config.model.empty()) {
168 anthropic_config.model = config.model;
169 }
170 anthropic_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
171 anthropic_config.use_function_calling =
172 absl::GetFlag(FLAGS_use_function_calling);
173 anthropic_config.verbose = config.verbose;
174 return std::make_unique<AnthropicAIService>(anthropic_config);
175 }
176 if (provider == "openai") {
177 if (config.openai_api_key.empty()) {
178 return absl::FailedPreconditionError(
179 "OpenAI API key not provided. Set OPENAI_API_KEY.");
180 }
181 OpenAIConfig openai_config(config.openai_api_key);
182 if (!config.model.empty()) {
183 openai_config.model = config.model;
184 }
185 openai_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
186 openai_config.use_function_calling =
187 absl::GetFlag(FLAGS_use_function_calling);
188 openai_config.verbose = config.verbose;
189 return std::make_unique<OpenAIAIService>(openai_config);
190 }
191#else
192 if (provider == "gemini" || provider == "anthropic") {
193 return absl::FailedPreconditionError(
194 "AI support not available: rebuild with YAZE_WITH_JSON=ON");
195 }
196#endif
197
198 return absl::InvalidArgumentError(
199 absl::StrFormat("Unknown AI provider: %s", config.provider));
200}
201
202} // namespace cli
203} // namespace yaze
std::unique_ptr< AIService > CreateAIService()
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
ABSL_DECLARE_FLAG(std::string, ai_provider)