diff --git a/.ai-assistents/README.md b/.ai-assistents/README.md index f6b14f4..ae7dd1d 100644 --- a/.ai-assistents/README.md +++ b/.ai-assistents/README.md @@ -20,6 +20,9 @@ Other files you should read and in git system also: - [TODO](TODO.md) +And, use standard docstring format for docs.rs in source code. You should check and fix for this at any time. If no need, do not add other comments. +> Docstring must be written in English. + ## Upstream APIs/SDKs 上游API/SDK > Only APIs/SDKs worth remembering are listed here. diff --git a/.ai-assistents/TODO.md b/.ai-assistents/TODO.md index c4b25f9..d4c3fde 100644 --- a/.ai-assistents/TODO.md +++ b/.ai-assistents/TODO.md @@ -1,5 +1,22 @@ # To-dos -Empty at present. Should it be generated now? +- [x] Initialize project structure (`main.rs`, `chat.rs`, `mcp.rs`, `knowledge.rs`, `config.rs`) +> 初始化项目结构 -Translations follow "Description" section in [README](README.md). \ No newline at end of file +- [ ] Define configuration schema in `config.rs` (load from YAML/ENV) [In Progress] +> 定义配置结构 + +- [ ] Implement MCP tool registration and execution in `mcp.rs` using `mcp-sdk-rs` +> 使用 `mcp-sdk-rs` 在 `mcp.rs` 中实现 MCP 工具注册和执行 + +- [ ] Set up basic dialog management in `chat.rs` using `rig` +> 使用 `rig` 在 `chat.rs` 中设置基本对话管理 + +- [ ] Implement knowledge base interface in `knowledge.rs` +> 在 `knowledge.rs` 中实现知识库接口 + +- [ ] Integrate all components in `main.rs` to create a working CLI chat bot +> 在 `main.rs` 中集成所有组件以创建可工作的 CLI 聊天机器人 + +- [ ] Add unit tests for each module +> 为每个模块添加单元测试 \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index a89d45b..660915a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,3 +6,6 @@ edition = "2024" [dependencies] mcp-sdk-rs = "0.3.4" rig-core = "0.31.0" +serde = { version = "1.0", features = ["derive"] } +serde_yaml = "0.9" +tokio = { version = "1.0", features = ["full"] } diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..362a031 --- /dev/null +++ b/config.yaml @@ -0,0 +1,7 @@ +llm: + provider: "openai" + model: "gpt-4o" + api_key: "your-api-key-here" +mcp: + servers: + - "http://localhost:3000" diff --git a/src/chat.rs b/src/chat.rs new file mode 100644 index 0000000..b831753 --- /dev/null +++ b/src/chat.rs @@ -0,0 +1,11 @@ +//! Dialog management for the LLM chat. + +pub struct ChatManager { + // TODO: Add chat manager fields +} + +impl ChatManager { + pub fn new() -> Self { + ChatManager {} + } +} diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..91b8e76 --- /dev/null +++ b/src/config.rs @@ -0,0 +1,42 @@ +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::Path; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Config { + pub llm: LlmConfig, + pub mcp: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LlmConfig { + pub provider: String, + pub model: String, + pub api_key: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct McpConfig { + pub servers: Vec, +} + +impl Config { + pub fn load() -> Result> { + let config_path = "config.yaml"; + if Path::new(config_path).exists() { + let content = fs::read_to_string(config_path)?; + let config: Config = serde_yaml::from_str(&content)?; + Ok(config) + } else { + // Provide a default or return error + Ok(Config { + llm: LlmConfig { + provider: "openai".to_string(), + model: "gpt-4".to_string(), + api_key: None, + }, + mcp: None, + }) + } + } +} diff --git a/src/knowledge.rs b/src/knowledge.rs new file mode 100644 index 0000000..9dab12d --- /dev/null +++ b/src/knowledge.rs @@ -0,0 +1,11 @@ +//! Knowledge services management. + +pub struct KnowledgeManager { + // TODO: Add knowledge manager fields +} + +impl KnowledgeManager { + pub fn new() -> Self { + KnowledgeManager {} + } +} diff --git a/src/main.rs b/src/main.rs index e7a11a9..eeadeae 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,27 @@ -fn main() { - println!("Hello, world!"); +mod chat; +mod config; +mod knowledge; +mod mcp; + +use chat::ChatManager; +use config::Config; +use knowledge::KnowledgeManager; +use mcp::McpManager; + +#[tokio::main] +async fn main() -> Result<(), Box> { + println!("Initializing llm-chat-core..."); + + // Initialize configuration + let _config = Config::load()?; + println!("Configuration loaded: {:?}", _config); + + // Initialize managers + let _mcp_manager = McpManager::new(); + let _knowledge_manager = KnowledgeManager::new(); + let _chat_manager = ChatManager::new(); + + println!("Core initialized successfully."); + + Ok(()) } diff --git a/src/mcp.rs b/src/mcp.rs new file mode 100644 index 0000000..1726ff9 --- /dev/null +++ b/src/mcp.rs @@ -0,0 +1,11 @@ +//! MCP (Model Context Protocol) tool services management. + +pub struct McpManager { + // TODO: Add MCP manager fields +} + +impl McpManager { + pub fn new() -> Self { + McpManager {} + } +}