yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
service_factory.cc
Go to the documentation of this file.
2
3#include <cstdlib>
4#include <cstring>
5#include <iostream>
6
7#include "absl/flags/declare.h"
8#include "absl/flags/flag.h"
9#include "absl/strings/ascii.h"
10#include "absl/strings/str_format.h"
15
16#ifdef YAZE_WITH_JSON
20#endif
21
22namespace {
23
24constexpr char kDefaultOpenAiBaseUrl[] = "https://api.openai.com";
25constexpr char kDefaultOllamaHost[] = "http://localhost:11434";
26
27std::string NormalizeProviderAlias(std::string provider) {
28 provider = absl::AsciiStrToLower(provider);
29 if (provider == "claude" || provider == "anthropic-claude" ||
30 provider == "sonnet" || provider == "opus") {
31 return "anthropic";
32 }
33 if (provider == "chatgpt" || provider == "gpt" || provider == "lmstudio" ||
34 provider == "lm-studio" || provider == "custom-openai" ||
35 provider == "openai-compatible") {
36 return "openai";
37 }
38 if (provider == "google" || provider == "google-gemini") {
39 return "gemini";
40 }
41 return provider;
42}
43
45 if (config.ollama_host != kDefaultOllamaHost) {
46 return true;
47 }
48 const char* env_ollama_host = std::getenv("OLLAMA_HOST");
49 if (env_ollama_host && *env_ollama_host) {
50 return true;
51 }
52 const char* env_ollama_model = std::getenv("OLLAMA_MODEL");
53 return env_ollama_model && *env_ollama_model;
54}
55
56} // namespace
57
58ABSL_DECLARE_FLAG(std::string, ai_provider);
59ABSL_DECLARE_FLAG(std::string, ai_model);
60ABSL_DECLARE_FLAG(std::string, gemini_api_key);
61ABSL_DECLARE_FLAG(std::string, anthropic_api_key);
62ABSL_DECLARE_FLAG(std::string, ollama_host);
63ABSL_DECLARE_FLAG(std::string, openai_base_url);
64ABSL_DECLARE_FLAG(std::string, prompt_version);
65ABSL_DECLARE_FLAG(bool, use_function_calling);
66
67namespace yaze {
68namespace cli {
69
70std::unique_ptr<AIService> CreateAIService() {
71 // Read configuration from flags
72 AIServiceConfig config;
73 config.provider = NormalizeProviderAlias(absl::GetFlag(FLAGS_ai_provider));
74 config.model = absl::GetFlag(FLAGS_ai_model);
75 config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
76 config.anthropic_api_key = absl::GetFlag(FLAGS_anthropic_api_key);
77 config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
78 config.openai_base_url = absl::GetFlag(FLAGS_openai_base_url);
79
80 // Fall back to environment variables if flags not set
81 if (config.gemini_api_key.empty()) {
82 const char* env_key = std::getenv("GEMINI_API_KEY");
83 if (env_key)
84 config.gemini_api_key = env_key;
85 }
86 if (config.anthropic_api_key.empty()) {
87 const char* env_key = std::getenv("ANTHROPIC_API_KEY");
88 if (env_key)
89 config.anthropic_api_key = env_key;
90 }
91 if (config.openai_api_key.empty()) {
92 const char* openai_key = std::getenv("OPENAI_API_KEY");
93 if (openai_key) {
94 config.openai_api_key = openai_key;
95 }
96 }
97 if (config.openai_base_url.empty() ||
98 config.openai_base_url == kDefaultOpenAiBaseUrl) {
99 const char* env_openai_base = std::getenv("OPENAI_BASE_URL");
100 if (!env_openai_base || !*env_openai_base) {
101 env_openai_base = std::getenv("OPENAI_API_BASE");
102 }
103 if (env_openai_base && *env_openai_base) {
104 config.openai_base_url = env_openai_base;
105 }
106 }
107 if (config.ollama_host.empty() || config.ollama_host == kDefaultOllamaHost) {
108 const char* env_ollama_host = std::getenv("OLLAMA_HOST");
109 if (env_ollama_host && *env_ollama_host) {
110 config.ollama_host = env_ollama_host;
111 }
112 }
113 if (config.model.empty()) {
114 const char* env_model = std::getenv("OLLAMA_MODEL");
115 if (env_model)
116 config.model = env_model;
117 }
119
120 return CreateAIService(config);
121}
122
123std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
124 AIServiceConfig effective_config = config;
125 effective_config.provider = NormalizeProviderAlias(effective_config.provider);
126 effective_config.openai_base_url =
127 NormalizeOpenAiBaseUrl(effective_config.openai_base_url);
128 if (effective_config.provider.empty()) {
129 effective_config.provider = "auto";
130 }
131
132 if (effective_config.provider == "auto") {
133#ifdef YAZE_WITH_JSON
134 if (!effective_config.gemini_api_key.empty()) {
135 std::cout << "🤖 Auto-detecting AI provider...\n";
136 std::cout << " Found Gemini API key, using Gemini\n";
137 effective_config.provider = "gemini";
138 } else if (!effective_config.anthropic_api_key.empty()) {
139 std::cout << "🤖 Auto-detecting AI provider...\n";
140 std::cout << " Found Anthropic API key, using Anthropic\n";
141 effective_config.provider = "anthropic";
142 } else if (!effective_config.openai_api_key.empty()) {
143 std::cout << "🤖 Auto-detecting AI provider...\n";
144 std::cout << " Found OpenAI API key, using OpenAI\n";
145 effective_config.provider = "openai";
146 if (effective_config.model.empty()) {
147 effective_config.model = "gpt-4o-mini";
148 }
149 } else if (effective_config.openai_base_url != kDefaultOpenAiBaseUrl) {
150 std::cout << "🤖 Auto-detecting AI provider...\n";
151 std::cout << " Found OpenAI-compatible base URL, using OpenAI\n";
152 if (effective_config.model.empty()) {
153 std::cout << " Tip: Set --ai_model for local servers\n";
154 }
155 effective_config.provider = "openai";
156 } else
157#endif
158 if (HasOllamaHint(effective_config)) {
159 std::cout << "🤖 Auto-detecting AI provider...\n";
160 std::cout << " Found Ollama configuration, using Ollama\n";
161 effective_config.provider = "ollama";
162 } else {
163 std::cout << "🤖 No AI provider configured, using MockAIService\n";
164 std::cout
165 << " Tip: Set GEMINI_API_KEY, ANTHROPIC_API_KEY, OPENAI_API_KEY,"
166 " OPENAI_BASE_URL, or OLLAMA_HOST/OLLAMA_MODEL\n";
167 effective_config.provider = "mock";
168 }
169 }
170
171 if (effective_config.provider != "mock") {
172 std::cout << "🤖 AI Provider: " << effective_config.provider << "\n";
173 }
174
175 auto service_or = CreateAIServiceStrict(effective_config);
176 if (service_or.ok()) {
177 return std::move(service_or.value());
178 }
179
180 std::cerr << "⚠️ " << service_or.status().message() << std::endl;
181 std::cerr << " Falling back to MockAIService" << std::endl;
182 return std::make_unique<MockAIService>();
183}
184
185absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
186 const AIServiceConfig& config) {
187 std::string provider = NormalizeProviderAlias(config.provider);
188 if (provider.empty() || provider == "auto") {
189 return absl::InvalidArgumentError(
190 "CreateAIServiceStrict requires an explicit provider (not 'auto')");
191 }
192
193 if (provider == "mock") {
194 return std::make_unique<MockAIService>();
195 }
196
197 if (provider == "ollama") {
198 OllamaConfig ollama_config;
199 ollama_config.base_url = config.ollama_host;
200 if (!config.model.empty()) {
201 ollama_config.model = config.model;
202 } else if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
203 ollama_config.model = env_model;
204 }
205
206 return std::make_unique<OllamaAIService>(ollama_config);
207 }
208
209 if (provider == "gemini-cli" || provider == "local-gemini") {
210 return std::make_unique<LocalGeminiCliService>(
211 config.model.empty() ? "gemini-2.5-flash" : config.model);
212 }
213
214#ifdef YAZE_WITH_JSON
215 if (provider == "gemini") {
216 if (config.gemini_api_key.empty()) {
217 return absl::FailedPreconditionError(
218 "Gemini API key not provided. Set --gemini_api_key or "
219 "GEMINI_API_KEY.");
220 }
221 GeminiConfig gemini_config(config.gemini_api_key);
222 if (!config.model.empty()) {
223 gemini_config.model = config.model;
224 }
225 gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
226 gemini_config.use_function_calling =
227 absl::GetFlag(FLAGS_use_function_calling);
228 gemini_config.verbose = config.verbose;
229 return std::make_unique<GeminiAIService>(gemini_config);
230 }
231 if (provider == "anthropic") {
232 if (config.anthropic_api_key.empty()) {
233 return absl::FailedPreconditionError(
234 "Anthropic API key not provided. Set --anthropic_api_key or "
235 "ANTHROPIC_API_KEY.");
236 }
237 AnthropicConfig anthropic_config(config.anthropic_api_key);
238 if (!config.model.empty()) {
239 anthropic_config.model = config.model;
240 }
241 anthropic_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
242 anthropic_config.use_function_calling =
243 absl::GetFlag(FLAGS_use_function_calling);
244 anthropic_config.verbose = config.verbose;
245 return std::make_unique<AnthropicAIService>(anthropic_config);
246 }
247 if (provider == "openai") {
248 // LMStudio doesn't require an API key - allow empty key for local servers
249 bool is_local_server = config.openai_base_url != kDefaultOpenAiBaseUrl;
250 if (config.openai_api_key.empty() && !is_local_server) {
251 return absl::FailedPreconditionError(
252 "OpenAI API key not provided. Set OPENAI_API_KEY.\n"
253 "For LMStudio, use --openai_base_url=http://localhost:1234");
254 }
255 OpenAIConfig openai_config(config.openai_api_key);
256 openai_config.base_url = config.openai_base_url;
257 if (!config.model.empty()) {
258 openai_config.model = config.model;
259 }
260 openai_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
261 openai_config.use_function_calling =
262 absl::GetFlag(FLAGS_use_function_calling);
263 openai_config.verbose = config.verbose;
264 return std::make_unique<OpenAIAIService>(openai_config);
265 }
266#else
267 if (provider == "gemini" || provider == "anthropic") {
268 return absl::FailedPreconditionError(
269 "AI support not available: rebuild with YAZE_WITH_JSON=ON");
270 }
271#endif
272
273 return absl::InvalidArgumentError(
274 absl::StrFormat("Unknown AI provider: %s", config.provider));
275}
276
277} // namespace cli
278} // namespace yaze
bool HasOllamaHint(const yaze::cli::AIServiceConfig &config)
std::string NormalizeProviderAlias(std::string provider)
std::unique_ptr< AIService > CreateAIService()
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
std::string NormalizeOpenAiBaseUrl(std::string base)
ABSL_DECLARE_FLAG(std::string, ai_provider)