73 config.
provider = NormalizeProviderAlias(absl::GetFlag(FLAGS_ai_provider));
74 config.
model = absl::GetFlag(FLAGS_ai_model);
77 config.
ollama_host = absl::GetFlag(FLAGS_ollama_host);
82 const char* env_key = std::getenv(
"GEMINI_API_KEY");
87 const char* env_key = std::getenv(
"ANTHROPIC_API_KEY");
92 const char* openai_key = std::getenv(
"OPENAI_API_KEY");
99 const char* env_openai_base = std::getenv(
"OPENAI_BASE_URL");
100 if (!env_openai_base || !*env_openai_base) {
101 env_openai_base = std::getenv(
"OPENAI_API_BASE");
103 if (env_openai_base && *env_openai_base) {
108 const char* env_ollama_host = std::getenv(
"OLLAMA_HOST");
109 if (env_ollama_host && *env_ollama_host) {
113 if (config.
model.empty()) {
114 const char* env_model = std::getenv(
"OLLAMA_MODEL");
116 config.
model = env_model;
125 effective_config.
provider = NormalizeProviderAlias(effective_config.
provider);
128 if (effective_config.
provider.empty()) {
132 if (effective_config.
provider ==
"auto") {
135 std::cout <<
"🤖 Auto-detecting AI provider...\n";
136 std::cout <<
" Found Gemini API key, using Gemini\n";
137 effective_config.
provider =
"gemini";
139 std::cout <<
"🤖 Auto-detecting AI provider...\n";
140 std::cout <<
" Found Anthropic API key, using Anthropic\n";
141 effective_config.
provider =
"anthropic";
143 std::cout <<
"🤖 Auto-detecting AI provider...\n";
144 std::cout <<
" Found OpenAI API key, using OpenAI\n";
145 effective_config.
provider =
"openai";
146 if (effective_config.
model.empty()) {
147 effective_config.
model =
"gpt-4o-mini";
149 }
else if (effective_config.
openai_base_url != kDefaultOpenAiBaseUrl) {
150 std::cout <<
"🤖 Auto-detecting AI provider...\n";
151 std::cout <<
" Found OpenAI-compatible base URL, using OpenAI\n";
152 if (effective_config.
model.empty()) {
153 std::cout <<
" Tip: Set --ai_model for local servers\n";
155 effective_config.
provider =
"openai";
158 if (HasOllamaHint(effective_config)) {
159 std::cout <<
"🤖 Auto-detecting AI provider...\n";
160 std::cout <<
" Found Ollama configuration, using Ollama\n";
161 effective_config.
provider =
"ollama";
163 std::cout <<
"🤖 No AI provider configured, using MockAIService\n";
165 <<
" Tip: Set GEMINI_API_KEY, ANTHROPIC_API_KEY, OPENAI_API_KEY,"
166 " OPENAI_BASE_URL, or OLLAMA_HOST/OLLAMA_MODEL\n";
171 if (effective_config.
provider !=
"mock") {
172 std::cout <<
"🤖 AI Provider: " << effective_config.
provider <<
"\n";
176 if (service_or.ok()) {
177 return std::move(service_or.value());
180 std::cerr <<
"⚠️ " << service_or.status().message() << std::endl;
181 std::cerr <<
" Falling back to MockAIService" << std::endl;
182 return std::make_unique<MockAIService>();
187 std::string provider = NormalizeProviderAlias(config.
provider);
188 if (provider.empty() || provider ==
"auto") {
189 return absl::InvalidArgumentError(
190 "CreateAIServiceStrict requires an explicit provider (not 'auto')");
193 if (provider ==
"mock") {
194 return std::make_unique<MockAIService>();
197 if (provider ==
"ollama") {
200 if (!config.
model.empty()) {
202 }
else if (
const char* env_model = std::getenv(
"OLLAMA_MODEL")) {
203 ollama_config.
model = env_model;
206 return std::make_unique<OllamaAIService>(ollama_config);
209 if (provider ==
"gemini-cli" || provider ==
"local-gemini") {
210 return std::make_unique<LocalGeminiCliService>(
211 config.
model.empty() ?
"gemini-2.5-flash" : config.
model);
215 if (provider ==
"gemini") {
217 return absl::FailedPreconditionError(
218 "Gemini API key not provided. Set --gemini_api_key or "
222 if (!config.
model.empty()) {
225 gemini_config.
prompt_version = absl::GetFlag(FLAGS_prompt_version);
227 absl::GetFlag(FLAGS_use_function_calling);
229 return std::make_unique<GeminiAIService>(gemini_config);
231 if (provider ==
"anthropic") {
233 return absl::FailedPreconditionError(
234 "Anthropic API key not provided. Set --anthropic_api_key or "
235 "ANTHROPIC_API_KEY.");
238 if (!config.
model.empty()) {
241 anthropic_config.
prompt_version = absl::GetFlag(FLAGS_prompt_version);
243 absl::GetFlag(FLAGS_use_function_calling);
245 return std::make_unique<AnthropicAIService>(anthropic_config);
247 if (provider ==
"openai") {
249 bool is_local_server = config.
openai_base_url != kDefaultOpenAiBaseUrl;
251 return absl::FailedPreconditionError(
252 "OpenAI API key not provided. Set OPENAI_API_KEY.\n"
253 "For LMStudio, use --openai_base_url=http://localhost:1234");
257 if (!config.
model.empty()) {
260 openai_config.
prompt_version = absl::GetFlag(FLAGS_prompt_version);
262 absl::GetFlag(FLAGS_use_function_calling);
264 return std::make_unique<OpenAIAIService>(openai_config);
267 if (provider ==
"gemini" || provider ==
"anthropic") {
268 return absl::FailedPreconditionError(
269 "AI support not available: rebuild with YAZE_WITH_JSON=ON");
273 return absl::InvalidArgumentError(
274 absl::StrFormat(
"Unknown AI provider: %s", config.
provider));