yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
service_factory.cc
Go to the documentation of this file.
2
3#include <cstring>
4#include <iostream>
5
6#include "absl/flags/declare.h"
7#include "absl/flags/flag.h"
8#include "absl/strings/ascii.h"
11
12#ifdef YAZE_WITH_JSON
14#endif
15
16ABSL_DECLARE_FLAG(std::string, ai_provider);
17ABSL_DECLARE_FLAG(std::string, ai_model);
18ABSL_DECLARE_FLAG(std::string, gemini_api_key);
19ABSL_DECLARE_FLAG(std::string, ollama_host);
20ABSL_DECLARE_FLAG(std::string, prompt_version);
21ABSL_DECLARE_FLAG(bool, use_function_calling);
22
23namespace yaze {
24namespace cli {
25
26std::unique_ptr<AIService> CreateAIService() {
27 // Read configuration from flags
28 AIServiceConfig config;
29 config.provider = absl::AsciiStrToLower(absl::GetFlag(FLAGS_ai_provider));
30 config.model = absl::GetFlag(FLAGS_ai_model);
31 config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
32 config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
33
34 // Fall back to environment variables if flags not set
35 if (config.gemini_api_key.empty()) {
36 const char* env_key = std::getenv("GEMINI_API_KEY");
37 if (env_key) config.gemini_api_key = env_key;
38 }
39 if (config.model.empty()) {
40 const char* env_model = std::getenv("OLLAMA_MODEL");
41 if (env_model) config.model = env_model;
42 }
43
44 return CreateAIService(config);
45}
46
47std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
48 std::string provider = config.provider;
49
50 // Auto-detection: try gemini → ollama → mock
51 if (provider == "auto") {
52 // Try Gemini first if API key is available
53#ifdef YAZE_WITH_JSON
54 if (!config.gemini_api_key.empty()) {
55 std::cout << "🤖 Auto-detecting AI provider...\n";
56 std::cout << " Found Gemini API key, using Gemini\n";
57 provider = "gemini";
58 } else
59#endif
60 {
61 // Try Ollama next
62 OllamaConfig test_config;
63 test_config.base_url = config.ollama_host;
64 auto test_service = std::make_unique<OllamaAIService>(test_config);
65 if (test_service->CheckAvailability().ok()) {
66 std::cout << "🤖 Auto-detecting AI provider...\n";
67 std::cout << " Ollama available, using Ollama\n";
68 provider = "ollama";
69 } else {
70 std::cout << "🤖 No AI provider configured, using MockAIService\n";
71 std::cout << " Tip: Set GEMINI_API_KEY or start Ollama for real AI\n";
72 provider = "mock";
73 }
74 }
75 }
76
77 if (provider != "mock") {
78 std::cout << "🤖 AI Provider: " << provider << "\n";
79 }
80
81 // Ollama provider
82 if (provider == "ollama") {
83 OllamaConfig ollama_config;
84 ollama_config.base_url = config.ollama_host;
85 if (!config.model.empty()) {
86 ollama_config.model = config.model;
87 }
88
89 auto service = std::make_unique<OllamaAIService>(ollama_config);
90
91 // Health check
92 if (auto status = service->CheckAvailability(); !status.ok()) {
93 std::cerr << "⚠️ Ollama unavailable: " << status.message() << std::endl;
94 std::cerr << " Falling back to MockAIService" << std::endl;
95 return std::make_unique<MockAIService>();
96 }
97
98 std::cout << " Using model: " << ollama_config.model << std::endl;
99 return std::unique_ptr<AIService>(std::move(service));
100 }
101
102 // Gemini provider
103#ifdef YAZE_WITH_JSON
104 if (provider == "gemini") {
105 if (config.gemini_api_key.empty()) {
106 std::cerr << "⚠️ Gemini API key not provided" << std::endl;
107 std::cerr << " Use --gemini_api_key=<key> or GEMINI_API_KEY environment variable" << std::endl;
108 std::cerr << " Falling back to MockAIService" << std::endl;
109 return std::make_unique<MockAIService>();
110 }
111
112 GeminiConfig gemini_config(config.gemini_api_key);
113 if (!config.model.empty()) {
114 gemini_config.model = config.model;
115 }
116 gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
117 gemini_config.use_function_calling = absl::GetFlag(FLAGS_use_function_calling);
118 gemini_config.verbose = config.verbose;
119
120 std::cout << " Model: " << gemini_config.model << std::endl;
121 if (config.verbose) {
122 std::cerr << " Prompt: " << gemini_config.prompt_version << std::endl;
123 }
124
125 auto service = std::make_unique<GeminiAIService>(gemini_config);
126 // Health check - DISABLED due to SSL issues
127 // if (auto status = service->CheckAvailability(); !status.ok()) {
128 // std::cerr << "⚠️ Gemini unavailable: " << status.message() << std::endl;
129 // std::cerr << " Falling back to MockAIService" << std::endl;
130 // return std::make_unique<MockAIService>();
131 // }
132
133 if (config.verbose) {
134 std::cerr << "[DEBUG] Gemini service ready" << std::endl;
135 }
136 return service;
137 }
138#else
139 if (provider == "gemini") {
140 std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl;
141 std::cerr << " Falling back to MockAIService" << std::endl;
142 }
143#endif
144
145 // Default: Mock service
146 if (provider == "mock") {
147 std::cout << " Using MockAIService (no real AI)\n";
148 }
149 return std::make_unique<MockAIService>();
150}
151
152} // namespace cli
153} // namespace yaze
std::unique_ptr< AIService > CreateAIService()
Main namespace for the application.
ABSL_DECLARE_FLAG(std::string, ai_provider)