feat: init source codes

This commit is contained in:
2026-02-26 22:17:30 +08:00
parent ef49511846
commit c2d3da5664
9 changed files with 133 additions and 4 deletions

View File

@@ -20,6 +20,9 @@ Other files you should read and in git system also:
- [TODO](TODO.md)
And, use standard docstring format for docs.rs in source code. You should check and fix for this at any time. If no need, do not add other comments.
> Docstring must be written in English.
## Upstream APIs/SDKs 上游API/SDK
> Only APIs/SDKs worth remembering are listed here.

View File

@@ -1,5 +1,22 @@
# To-dos
Empty at present. Should it be generated now?
- [x] Initialize project structure (`main.rs`, `chat.rs`, `mcp.rs`, `knowledge.rs`, `config.rs`)
> 初始化项目结构
Translations follow "Description" section in [README](README.md).
- [ ] Define configuration schema in `config.rs` (load from YAML/ENV) [In Progress]
> 定义配置结构
- [ ] Implement MCP tool registration and execution in `mcp.rs` using `mcp-sdk-rs`
> 使用 `mcp-sdk-rs` 在 `mcp.rs` 中实现 MCP 工具注册和执行
- [ ] Set up basic dialog management in `chat.rs` using `rig`
> 使用 `rig` 在 `chat.rs` 中设置基本对话管理
- [ ] Implement knowledge base interface in `knowledge.rs`
> 在 `knowledge.rs` 中实现知识库接口
- [ ] Integrate all components in `main.rs` to create a working CLI chat bot
> 在 `main.rs` 中集成所有组件以创建可工作的 CLI 聊天机器人
- [ ] Add unit tests for each module
> 为每个模块添加单元测试

View File

@@ -6,3 +6,6 @@ edition = "2024"
[dependencies]
mcp-sdk-rs = "0.3.4"
rig-core = "0.31.0"
serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.9"
tokio = { version = "1.0", features = ["full"] }

7
config.yaml Normal file
View File

@@ -0,0 +1,7 @@
llm:
provider: "openai"
model: "gpt-4o"
api_key: "your-api-key-here"
mcp:
servers:
- "http://localhost:3000"

11
src/chat.rs Normal file
View File

@@ -0,0 +1,11 @@
//! Dialog management for the LLM chat.
pub struct ChatManager {
// TODO: Add chat manager fields
}
impl ChatManager {
pub fn new() -> Self {
ChatManager {}
}
}

42
src/config.rs Normal file
View File

@@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
#[derive(Debug, Serialize, Deserialize)]
pub struct Config {
pub llm: LlmConfig,
pub mcp: Option<McpConfig>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LlmConfig {
pub provider: String,
pub model: String,
pub api_key: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct McpConfig {
pub servers: Vec<String>,
}
impl Config {
pub fn load() -> Result<Self, Box<dyn std::error::Error>> {
let config_path = "config.yaml";
if Path::new(config_path).exists() {
let content = fs::read_to_string(config_path)?;
let config: Config = serde_yaml::from_str(&content)?;
Ok(config)
} else {
// Provide a default or return error
Ok(Config {
llm: LlmConfig {
provider: "openai".to_string(),
model: "gpt-4".to_string(),
api_key: None,
},
mcp: None,
})
}
}
}

11
src/knowledge.rs Normal file
View File

@@ -0,0 +1,11 @@
//! Knowledge services management.
pub struct KnowledgeManager {
// TODO: Add knowledge manager fields
}
impl KnowledgeManager {
pub fn new() -> Self {
KnowledgeManager {}
}
}

View File

@@ -1,3 +1,27 @@
fn main() {
println!("Hello, world!");
mod chat;
mod config;
mod knowledge;
mod mcp;
use chat::ChatManager;
use config::Config;
use knowledge::KnowledgeManager;
use mcp::McpManager;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("Initializing llm-chat-core...");
// Initialize configuration
let _config = Config::load()?;
println!("Configuration loaded: {:?}", _config);
// Initialize managers
let _mcp_manager = McpManager::new();
let _knowledge_manager = KnowledgeManager::new();
let _chat_manager = ChatManager::new();
println!("Core initialized successfully.");
Ok(())
}

11
src/mcp.rs Normal file
View File

@@ -0,0 +1,11 @@
//! MCP (Model Context Protocol) tool services management.
pub struct McpManager {
// TODO: Add MCP manager fields
}
impl McpManager {
pub fn new() -> Self {
McpManager {}
}
}