yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
service_factory.cc
Go to the documentation of this file.
2
3#include <cstdlib>
4#include <cstring>
5#include <iostream>
6
7#include "absl/flags/declare.h"
8#include "absl/flags/flag.h"
9#include "absl/strings/ascii.h"
10#include "absl/strings/str_format.h"
14
15#ifdef YAZE_WITH_JSON
18#endif
19
20ABSL_DECLARE_FLAG(std::string, ai_provider);
21ABSL_DECLARE_FLAG(std::string, ai_model);
22ABSL_DECLARE_FLAG(std::string, gemini_api_key);
23ABSL_DECLARE_FLAG(std::string, ollama_host);
24ABSL_DECLARE_FLAG(std::string, prompt_version);
25ABSL_DECLARE_FLAG(bool, use_function_calling);
26
27namespace yaze {
28namespace cli {
29
30std::unique_ptr<AIService> CreateAIService() {
31 // Read configuration from flags
32 AIServiceConfig config;
33 config.provider = absl::AsciiStrToLower(absl::GetFlag(FLAGS_ai_provider));
34 config.model = absl::GetFlag(FLAGS_ai_model);
35 config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
36 config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
37
38 // Fall back to environment variables if flags not set
39 if (config.gemini_api_key.empty()) {
40 const char* env_key = std::getenv("GEMINI_API_KEY");
41 if (env_key)
42 config.gemini_api_key = env_key;
43 }
44 if (config.openai_api_key.empty()) {
45 const char* openai_key = std::getenv("OPENAI_API_KEY");
46 if (openai_key) {
47 config.openai_api_key = openai_key;
48 }
49 }
50 if (config.model.empty()) {
51 const char* env_model = std::getenv("OLLAMA_MODEL");
52 if (env_model)
53 config.model = env_model;
54 }
55
56 return CreateAIService(config);
57}
58
59std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
60 AIServiceConfig effective_config = config;
61 if (effective_config.provider.empty()) {
62 effective_config.provider = "auto";
63 }
64
65 if (effective_config.provider == "auto") {
66#ifdef YAZE_WITH_JSON
67 if (!effective_config.gemini_api_key.empty()) {
68 std::cout << "🤖 Auto-detecting AI provider...\n";
69 std::cout << " Found Gemini API key, using Gemini\n";
70 effective_config.provider = "gemini";
71 } else if (!effective_config.openai_api_key.empty()) {
72 std::cout << "🤖 Auto-detecting AI provider...\n";
73 std::cout << " Found OpenAI API key, using OpenAI\n";
74 effective_config.provider = "openai";
75 if (effective_config.model.empty()) {
76 effective_config.model = "gpt-4o-mini";
77 }
78 } else
79#endif
80 {
81 std::cout << "🤖 No AI provider configured, using MockAIService\n";
82 std::cout << " Tip: Set GEMINI_API_KEY/OPENAI_API_KEY or start Ollama for real AI\n";
83 effective_config.provider = "mock";
84 }
85 }
86
87 if (effective_config.provider != "mock") {
88 std::cout << "🤖 AI Provider: " << effective_config.provider << "\n";
89 }
90
91 auto service_or = CreateAIServiceStrict(effective_config);
92 if (service_or.ok()) {
93 return std::move(service_or.value());
94 }
95
96 std::cerr << "⚠️ " << service_or.status().message() << std::endl;
97 std::cerr << " Falling back to MockAIService" << std::endl;
98 return std::make_unique<MockAIService>();
99}
100
101absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
102 const AIServiceConfig& config) {
103 std::string provider = absl::AsciiStrToLower(config.provider);
104 if (provider.empty() || provider == "auto") {
105 return absl::InvalidArgumentError(
106 "CreateAIServiceStrict requires an explicit provider (not 'auto')");
107 }
108
109 if (provider == "mock") {
110 return std::make_unique<MockAIService>();
111 }
112
113 if (provider == "ollama") {
114 OllamaConfig ollama_config;
115 ollama_config.base_url = config.ollama_host;
116 if (!config.model.empty()) {
117 ollama_config.model = config.model;
118 } else if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
119 ollama_config.model = env_model;
120 }
121
122 return std::make_unique<OllamaAIService>(ollama_config);
123 }
124
125 if (provider == "gemini-cli" || provider == "local-gemini") {
126 return std::make_unique<LocalGeminiCliService>(
127 config.model.empty() ? "gemini-2.5-flash" : config.model);
128 }
129
130#ifdef YAZE_WITH_JSON
131 if (provider == "gemini") {
132 if (config.gemini_api_key.empty()) {
133 return absl::FailedPreconditionError(
134 "Gemini API key not provided. Set --gemini_api_key or "
135 "GEMINI_API_KEY.");
136 }
137 GeminiConfig gemini_config(config.gemini_api_key);
138 if (!config.model.empty()) {
139 gemini_config.model = config.model;
140 }
141 gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
142 gemini_config.use_function_calling =
143 absl::GetFlag(FLAGS_use_function_calling);
144 gemini_config.verbose = config.verbose;
145 return std::make_unique<GeminiAIService>(gemini_config);
146 }
147 if (provider == "openai") {
148 if (config.openai_api_key.empty()) {
149 return absl::FailedPreconditionError(
150 "OpenAI API key not provided. Set OPENAI_API_KEY.");
151 }
152 OpenAIConfig openai_config(config.openai_api_key);
153 if (!config.model.empty()) {
154 openai_config.model = config.model;
155 }
156 openai_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
157 openai_config.use_function_calling =
158 absl::GetFlag(FLAGS_use_function_calling);
159 openai_config.verbose = config.verbose;
160 return std::make_unique<OpenAIAIService>(openai_config);
161 }
162#else
163 if (provider == "gemini") {
164 return absl::FailedPreconditionError(
165 "Gemini support not available: rebuild with YAZE_WITH_JSON=ON");
166 }
167#endif
168
169 return absl::InvalidArgumentError(
170 absl::StrFormat("Unknown AI provider: %s", config.provider));
171}
172
173} // namespace cli
174} // namespace yaze
std::unique_ptr< AIService > CreateAIService()
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
ABSL_DECLARE_FLAG(std::string, ai_provider)