1use serde::{Deserialize, Serialize};
6use std::env;
7
8#[derive(Debug, thiserror::Error)]
10pub enum ConfigError {
11 #[error("Missing environment variable: {0}")]
12 MissingEnvVar(String),
13 #[error("Invalid configuration: {0}")]
14 Invalid(String),
15}
16
17#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LlmConfig {
20 pub deepseek_api_key: Option<String>,
22 pub mistral_api_key: Option<String>,
24 pub openai_api_key: Option<String>,
26 pub anthropic_api_key: Option<String>,
28 pub ollama_url: String,
30 pub default_provider: String,
32 pub default_model: String,
34}
35
36impl Default for LlmConfig {
37 fn default() -> Self {
38 Self {
39 deepseek_api_key: None,
40 mistral_api_key: None,
41 openai_api_key: None,
42 anthropic_api_key: None,
43 ollama_url: "http://localhost:11434".to_string(),
44 default_provider: "deepseek".to_string(),
45 default_model: "deepseek-chat".to_string(),
46 }
47 }
48}
49
50impl LlmConfig {
51 pub fn from_env() -> Self {
53 Self {
54 deepseek_api_key: env::var("DEEPSEEK_API_KEY").ok(),
55 mistral_api_key: env::var("MISTRAL_API_KEY").ok(),
56 openai_api_key: env::var("OPENAI_API_KEY").ok(),
57 anthropic_api_key: env::var("ANTHROPIC_API_KEY").ok(),
58 ollama_url: env::var("OLLAMA_URL")
59 .unwrap_or_else(|_| "http://localhost:11434".to_string()),
60 default_provider: env::var("VEX_DEFAULT_PROVIDER")
61 .unwrap_or_else(|_| "deepseek".to_string()),
62 default_model: env::var("VEX_DEFAULT_MODEL")
63 .unwrap_or_else(|_| "deepseek-chat".to_string()),
64 }
65 }
66
67 pub fn api_key(&self, provider: &str) -> Option<&str> {
69 match provider.to_lowercase().as_str() {
70 "deepseek" => self.deepseek_api_key.as_deref(),
71 "mistral" => self.mistral_api_key.as_deref(),
72 "openai" => self.openai_api_key.as_deref(),
73 "anthropic" => self.anthropic_api_key.as_deref(),
74 _ => None,
75 }
76 }
77
78 pub fn is_configured(&self, provider: &str) -> bool {
80 match provider.to_lowercase().as_str() {
81 "deepseek" => self.deepseek_api_key.is_some(),
82 "mistral" => self.mistral_api_key.is_some(),
83 "openai" => self.openai_api_key.is_some(),
84 "anthropic" => self.anthropic_api_key.is_some(),
85 "ollama" | "mock" => true, _ => false,
87 }
88 }
89
90 pub fn available_providers(&self) -> Vec<&str> {
92 let mut providers = vec!["mock", "ollama"];
93 if self.deepseek_api_key.is_some() {
94 providers.push("deepseek");
95 }
96 if self.mistral_api_key.is_some() {
97 providers.push("mistral");
98 }
99 if self.openai_api_key.is_some() {
100 providers.push("openai");
101 }
102 if self.anthropic_api_key.is_some() {
103 providers.push("anthropic");
104 }
105 providers
106 }
107}
108
109#[derive(Debug, Clone, Default)]
111pub struct VexConfig {
112 pub llm: LlmConfig,
114 pub debug: bool,
116 pub max_agent_depth: u8,
118 pub adversarial_enabled: bool,
120}
121
122impl VexConfig {
123 pub fn from_env() -> Self {
125 Self {
126 llm: LlmConfig::from_env(),
127 debug: env::var("VEX_DEBUG")
128 .map(|v| v == "1" || v == "true")
129 .unwrap_or(false),
130 max_agent_depth: env::var("VEX_MAX_DEPTH")
131 .ok()
132 .and_then(|v| v.parse().ok())
133 .unwrap_or(3),
134 adversarial_enabled: env::var("VEX_ADVERSARIAL")
135 .map(|v| v != "0" && v != "false")
136 .unwrap_or(true),
137 }
138 }
139}
140
141#[cfg(test)]
142mod tests {
143 use super::*;
144
145 #[test]
146 fn test_default_config() {
147 let config = LlmConfig::default();
148 assert_eq!(config.default_provider, "deepseek");
149 assert!(config.is_configured("mock"));
150 assert!(config.is_configured("ollama"));
151 }
152
153 #[test]
154 fn test_available_providers() {
155 let config = LlmConfig::default();
156 let providers = config.available_providers();
157 assert!(providers.contains(&"mock"));
158 assert!(providers.contains(&"ollama"));
159 }
160}