fix(security): Gemini API key header + Mutex safety + Agent validation
Some checks failed
CI / Build Frontend (push) Has been cancelled
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

M1-01: Move Gemini API key from URL query param to x-goog-api-key header,
     preventing key leakage in logs/proxy/telemetry (matches Anthropic/OpenAI pattern)

M1-03/M1-04: Replace Mutex .unwrap() with .unwrap_or_else(|e| e.into_inner())
     in MemoryMiddleware and LoopGuardMiddleware — recovers from poison
     instead of panicking async runtime

M2-08: Add input validation to agent_create — reject empty names,
     out-of-range temperature (0-2), and zero max_tokens

M11-06: Replace Date.now() message ID with crypto.randomUUID()
     to prevent collisions in classroom chat
This commit is contained in:
iven
2026-04-04 19:15:50 +08:00
parent 985644dd9a
commit 619bad30cb
5 changed files with 19 additions and 7 deletions

View File

@@ -68,10 +68,9 @@ impl LlmDriver for GeminiDriver {
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
let api_request = self.build_api_request(&request);
let url = format!(
"{}/models/{}:generateContent?key={}",
"{}/models/{}:generateContent",
self.base_url,
request.model,
self.api_key.expose_secret()
);
tracing::debug!(target: "gemini_driver", "Sending request to: {}", url);
@@ -79,6 +78,7 @@ impl LlmDriver for GeminiDriver {
let response = self.client
.post(&url)
.header("content-type", "application/json")
.header("x-goog-api-key", self.api_key.expose_secret())
.json(&api_request)
.send()
.await
@@ -105,10 +105,9 @@ impl LlmDriver for GeminiDriver {
) -> Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send + '_>> {
let api_request = self.build_api_request(&request);
let url = format!(
"{}/models/{}:streamGenerateContent?alt=sse&key={}",
"{}/models/{}:streamGenerateContent?alt=sse",
self.base_url,
request.model,
self.api_key.expose_secret()
);
tracing::debug!(target: "gemini_driver", "Starting stream request to: {}", url);
@@ -117,6 +116,7 @@ impl LlmDriver for GeminiDriver {
let response = match self.client
.post(&url)
.header("content-type", "application/json")
.header("x-goog-api-key", self.api_key.expose_secret())
.timeout(std::time::Duration::from_secs(120))
.json(&api_request)
.send()

View File

@@ -37,7 +37,7 @@ impl AgentMiddleware for LoopGuardMiddleware {
tool_name: &str,
tool_input: &Value,
) -> Result<ToolCallDecision> {
let result = self.guard.lock().unwrap().check(tool_name, tool_input);
let result = self.guard.lock().unwrap_or_else(|e| e.into_inner()).check(tool_name, tool_input);
match result {
LoopGuardResult::CircuitBreaker => {
tracing::warn!("[LoopGuardMiddleware] Circuit breaker triggered by tool '{}'", tool_name);

View File

@@ -43,7 +43,7 @@ impl MemoryMiddleware {
/// Check if enough time has passed since the last extraction for this agent.
fn should_extract(&self, agent_id: &str) -> bool {
let now = std::time::Instant::now();
let mut map = self.last_extraction.lock().unwrap();
let mut map = self.last_extraction.lock().unwrap_or_else(|e| e.into_inner());
if let Some(last) = map.get(agent_id) {
if now.duration_since(*last).as_secs() < self.debounce_secs {
return false;

View File

@@ -73,6 +73,18 @@ pub async fn agent_create(
state: State<'_, KernelState>,
request: CreateAgentRequest,
) -> Result<CreateAgentResponse, String> {
// Input validation
let name_trimmed = request.name.trim();
if name_trimmed.is_empty() {
return Err("Agent name cannot be empty".to_string());
}
if request.temperature < 0.0 || request.temperature > 2.0 {
return Err(format!("Temperature must be between 0 and 2, got {}", request.temperature));
}
if request.max_tokens == 0 {
return Err("max_tokens must be greater than 0".to_string());
}
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()

View File

@@ -173,7 +173,7 @@ export const useClassroomStore = create<ClassroomStore>()((set, get) => ({
// Create a local user message for display
const userMsg: ClassroomChatMessage = {
id: `user-${Date.now()}`,
id: `user-${crypto.randomUUID()}`,
agentId: 'user',
agentName: '你',
agentAvatar: '👤',