#include <ollama_ai_service.h>
Public Attributes | |
| std::string | base_url = "http://localhost:11434" |
| std::string | model |
| float | temperature = 0.1 |
| int | max_tokens = 2048 |
| std::string | system_prompt |
| bool | use_enhanced_prompting = true |
| float | top_p = 0.92f |
| int | top_k = 40 |
| int | num_ctx = 4096 |
| bool | stream = false |
| bool | use_chat_completions = true |
| std::vector< std::string > | favorite_models |
Definition at line 21 of file ollama_ai_service.h.
| std::string yaze::cli::OllamaConfig::base_url = "http://localhost:11434" |
Definition at line 22 of file ollama_ai_service.h.
Referenced by yaze::cli::CreateAIServiceStrict().
| std::string yaze::cli::OllamaConfig::model |
Definition at line 23 of file ollama_ai_service.h.
Referenced by yaze::cli::CreateAIServiceStrict().
| float yaze::cli::OllamaConfig::temperature = 0.1 |
Definition at line 25 of file ollama_ai_service.h.
| int yaze::cli::OllamaConfig::max_tokens = 2048 |
Definition at line 26 of file ollama_ai_service.h.
| std::string yaze::cli::OllamaConfig::system_prompt |
Definition at line 27 of file ollama_ai_service.h.
| bool yaze::cli::OllamaConfig::use_enhanced_prompting = true |
Definition at line 28 of file ollama_ai_service.h.
| float yaze::cli::OllamaConfig::top_p = 0.92f |
Definition at line 29 of file ollama_ai_service.h.
| int yaze::cli::OllamaConfig::top_k = 40 |
Definition at line 30 of file ollama_ai_service.h.
| int yaze::cli::OllamaConfig::num_ctx = 4096 |
Definition at line 31 of file ollama_ai_service.h.
| bool yaze::cli::OllamaConfig::stream = false |
Definition at line 32 of file ollama_ai_service.h.
| bool yaze::cli::OllamaConfig::use_chat_completions = true |
Definition at line 33 of file ollama_ai_service.h.
| std::vector<std::string> yaze::cli::OllamaConfig::favorite_models |
Definition at line 34 of file ollama_ai_service.h.