vex_llm/
config.rs

1//! Configuration management for VEX
2//!
3//! Handles API keys, provider settings, and runtime configuration.
4
5use serde::{Deserialize, Serialize};
6use std::env;
7
8/// Error types for configuration
9#[derive(Debug, thiserror::Error)]
10pub enum ConfigError {
11    #[error("Missing environment variable: {0}")]
12    MissingEnvVar(String),
13    #[error("Invalid configuration: {0}")]
14    Invalid(String),
15}
16
17/// LLM provider configuration
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LlmConfig {
20    /// DeepSeek API key (env: DEEPSEEK_API_KEY)
21    pub deepseek_api_key: Option<String>,
22    /// Mistral API key (env: MISTRAL_API_KEY)
23    pub mistral_api_key: Option<String>,
24    /// OpenAI API key (env: OPENAI_API_KEY)
25    pub openai_api_key: Option<String>,
26    /// Anthropic API key (env: ANTHROPIC_API_KEY)
27    pub anthropic_api_key: Option<String>,
28    /// Ollama base URL (default: http://localhost:11434)
29    pub ollama_url: String,
30    /// Default provider
31    pub default_provider: String,
32    /// Default model
33    pub default_model: String,
34}
35
36impl Default for LlmConfig {
37    fn default() -> Self {
38        Self {
39            deepseek_api_key: None,
40            mistral_api_key: None,
41            openai_api_key: None,
42            anthropic_api_key: None,
43            ollama_url: "http://localhost:11434".to_string(),
44            default_provider: "deepseek".to_string(),
45            default_model: "deepseek-chat".to_string(),
46        }
47    }
48}
49
50impl LlmConfig {
51    /// Load configuration from environment variables
52    pub fn from_env() -> Self {
53        Self {
54            deepseek_api_key: env::var("DEEPSEEK_API_KEY").ok(),
55            mistral_api_key: env::var("MISTRAL_API_KEY").ok(),
56            openai_api_key: env::var("OPENAI_API_KEY").ok(),
57            anthropic_api_key: env::var("ANTHROPIC_API_KEY").ok(),
58            ollama_url: env::var("OLLAMA_URL")
59                .unwrap_or_else(|_| "http://localhost:11434".to_string()),
60            default_provider: env::var("VEX_DEFAULT_PROVIDER")
61                .unwrap_or_else(|_| "deepseek".to_string()),
62            default_model: env::var("VEX_DEFAULT_MODEL")
63                .unwrap_or_else(|_| "deepseek-chat".to_string()),
64        }
65    }
66
67    /// Get API key for a provider
68    pub fn api_key(&self, provider: &str) -> Option<&str> {
69        match provider.to_lowercase().as_str() {
70            "deepseek" => self.deepseek_api_key.as_deref(),
71            "mistral" => self.mistral_api_key.as_deref(),
72            "openai" => self.openai_api_key.as_deref(),
73            "anthropic" => self.anthropic_api_key.as_deref(),
74            _ => None,
75        }
76    }
77
78    /// Check if a provider is configured
79    pub fn is_configured(&self, provider: &str) -> bool {
80        match provider.to_lowercase().as_str() {
81            "deepseek" => self.deepseek_api_key.is_some(),
82            "mistral" => self.mistral_api_key.is_some(),
83            "openai" => self.openai_api_key.is_some(),
84            "anthropic" => self.anthropic_api_key.is_some(),
85            "ollama" | "mock" => true, // Always available
86            _ => false,
87        }
88    }
89
90    /// List available providers
91    pub fn available_providers(&self) -> Vec<&str> {
92        let mut providers = vec!["mock", "ollama"];
93        if self.deepseek_api_key.is_some() {
94            providers.push("deepseek");
95        }
96        if self.mistral_api_key.is_some() {
97            providers.push("mistral");
98        }
99        if self.openai_api_key.is_some() {
100            providers.push("openai");
101        }
102        if self.anthropic_api_key.is_some() {
103            providers.push("anthropic");
104        }
105        providers
106    }
107}
108
109/// Full VEX configuration
110#[derive(Debug, Clone, Default)]
111pub struct VexConfig {
112    /// LLM provider settings
113    pub llm: LlmConfig,
114    /// Enable debug logging
115    pub debug: bool,
116    /// Maximum agent depth
117    pub max_agent_depth: u8,
118    /// Enable adversarial verification
119    pub adversarial_enabled: bool,
120}
121
122impl VexConfig {
123    /// Load from environment
124    pub fn from_env() -> Self {
125        Self {
126            llm: LlmConfig::from_env(),
127            debug: env::var("VEX_DEBUG")
128                .map(|v| v == "1" || v == "true")
129                .unwrap_or(false),
130            max_agent_depth: env::var("VEX_MAX_DEPTH")
131                .ok()
132                .and_then(|v| v.parse().ok())
133                .unwrap_or(3),
134            adversarial_enabled: env::var("VEX_ADVERSARIAL")
135                .map(|v| v != "0" && v != "false")
136                .unwrap_or(true),
137        }
138    }
139}
140
141#[cfg(test)]
142mod tests {
143    use super::*;
144
145    #[test]
146    fn test_default_config() {
147        let config = LlmConfig::default();
148        assert_eq!(config.default_provider, "deepseek");
149        assert!(config.is_configured("mock"));
150        assert!(config.is_configured("ollama"));
151    }
152
153    #[test]
154    fn test_available_providers() {
155        let config = LlmConfig::default();
156        let providers = config.available_providers();
157        assert!(providers.contains(&"mock"));
158        assert!(providers.contains(&"ollama"));
159    }
160}