feat(hands): implement 4 new Hands and fix BrowserHand registration
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

- Add ResearcherHand: DuckDuckGo search, web fetch, report generation
- Add CollectorHand: data collection, aggregation, multiple output formats
- Add ClipHand: video processing (trim, convert, thumbnail, concat)
- Add TwitterHand: Twitter/X automation (tweet, retweet, like, search)
- Fix BrowserHand not registered in Kernel (critical bug)
- Add HandError variant to ZclawError enum
- Update documentation: 9/11 Hands implemented (82%)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-03-24 13:22:44 +08:00
parent 3ff08faa56
commit 1441f98c5e
15 changed files with 2376 additions and 36 deletions

View File

@@ -0,0 +1,642 @@
//! Clip Hand - Video processing and editing capabilities
//!
//! This hand provides video processing features:
//! - Trim: Cut video segments
//! - Convert: Format conversion
//! - Resize: Resolution changes
//! - Thumbnail: Generate thumbnails
//! - Concat: Join videos
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::process::Command;
use std::sync::Arc;
use tokio::sync::RwLock;
use zclaw_types::Result;
use crate::{Hand, HandConfig, HandContext, HandResult};
/// Video format options
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum VideoFormat {
Mp4,
Webm,
Mov,
Avi,
Gif,
}
impl Default for VideoFormat {
fn default() -> Self {
Self::Mp4
}
}
/// Video resolution presets
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Resolution {
Original,
P480,
P720,
P1080,
P4k,
Custom { width: u32, height: u32 },
}
impl Default for Resolution {
fn default() -> Self {
Self::Original
}
}
/// Trim configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TrimConfig {
/// Input video path
pub input_path: String,
/// Output video path
pub output_path: String,
/// Start time in seconds
#[serde(default)]
pub start_time: Option<f64>,
/// End time in seconds
#[serde(default)]
pub end_time: Option<f64>,
/// Duration in seconds (alternative to end_time)
#[serde(default)]
pub duration: Option<f64>,
}
/// Convert configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ConvertConfig {
/// Input video path
pub input_path: String,
/// Output video path
pub output_path: String,
/// Output format
#[serde(default)]
pub format: VideoFormat,
/// Resolution
#[serde(default)]
pub resolution: Resolution,
/// Video bitrate (e.g., "2M")
#[serde(default)]
pub video_bitrate: Option<String>,
/// Audio bitrate (e.g., "128k")
#[serde(default)]
pub audio_bitrate: Option<String>,
}
/// Thumbnail configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ThumbnailConfig {
/// Input video path
pub input_path: String,
/// Output image path
pub output_path: String,
/// Time position in seconds
#[serde(default)]
pub time: f64,
/// Output width
#[serde(default)]
pub width: Option<u32>,
/// Output height
#[serde(default)]
pub height: Option<u32>,
}
/// Concat configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ConcatConfig {
/// Input video paths
pub input_paths: Vec<String>,
/// Output video path
pub output_path: String,
}
/// Video info result
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct VideoInfo {
pub path: String,
pub duration_secs: f64,
pub width: u32,
pub height: u32,
pub fps: f64,
pub format: String,
pub video_codec: String,
pub audio_codec: Option<String>,
pub bitrate_kbps: Option<u32>,
pub file_size_bytes: u64,
}
/// Clip action types
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "action")]
pub enum ClipAction {
#[serde(rename = "trim")]
Trim { config: TrimConfig },
#[serde(rename = "convert")]
Convert { config: ConvertConfig },
#[serde(rename = "resize")]
Resize { input_path: String, output_path: String, resolution: Resolution },
#[serde(rename = "thumbnail")]
Thumbnail { config: ThumbnailConfig },
#[serde(rename = "concat")]
Concat { config: ConcatConfig },
#[serde(rename = "info")]
Info { path: String },
#[serde(rename = "check_ffmpeg")]
CheckFfmpeg,
}
/// Clip Hand implementation
pub struct ClipHand {
config: HandConfig,
ffmpeg_path: Arc<RwLock<Option<String>>>,
}
impl ClipHand {
/// Create a new clip hand
pub fn new() -> Self {
Self {
config: HandConfig {
id: "clip".to_string(),
name: "Clip".to_string(),
description: "Video processing and editing capabilities using FFmpeg".to_string(),
needs_approval: false,
dependencies: vec!["ffmpeg".to_string()],
input_schema: Some(serde_json::json!({
"type": "object",
"oneOf": [
{
"properties": {
"action": { "const": "trim" },
"config": {
"type": "object",
"properties": {
"inputPath": { "type": "string" },
"outputPath": { "type": "string" },
"startTime": { "type": "number" },
"endTime": { "type": "number" },
"duration": { "type": "number" }
},
"required": ["inputPath", "outputPath"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "convert" },
"config": {
"type": "object",
"properties": {
"inputPath": { "type": "string" },
"outputPath": { "type": "string" },
"format": { "type": "string", "enum": ["mp4", "webm", "mov", "avi", "gif"] },
"resolution": { "type": "string" }
},
"required": ["inputPath", "outputPath"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "thumbnail" },
"config": {
"type": "object",
"properties": {
"inputPath": { "type": "string" },
"outputPath": { "type": "string" },
"time": { "type": "number" }
},
"required": ["inputPath", "outputPath"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "concat" },
"config": {
"type": "object",
"properties": {
"inputPaths": { "type": "array", "items": { "type": "string" } },
"outputPath": { "type": "string" }
},
"required": ["inputPaths", "outputPath"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "info" },
"path": { "type": "string" }
},
"required": ["action", "path"]
},
{
"properties": {
"action": { "const": "check_ffmpeg" }
},
"required": ["action"]
}
]
})),
tags: vec!["video".to_string(), "media".to_string(), "editing".to_string()],
enabled: true,
},
ffmpeg_path: Arc::new(RwLock::new(None)),
}
}
/// Find FFmpeg executable
async fn find_ffmpeg(&self) -> Option<String> {
// Check cached path
{
let cached = self.ffmpeg_path.read().await;
if cached.is_some() {
return cached.clone();
}
}
// Try common locations
let candidates = if cfg!(windows) {
vec!["ffmpeg.exe", "C:\\ffmpeg\\bin\\ffmpeg.exe", "C:\\Program Files\\ffmpeg\\bin\\ffmpeg.exe"]
} else {
vec!["ffmpeg", "/usr/bin/ffmpeg", "/usr/local/bin/ffmpeg"]
};
for candidate in candidates {
if Command::new(candidate).arg("-version").output().is_ok() {
let mut cached = self.ffmpeg_path.write().await;
*cached = Some(candidate.to_string());
return Some(candidate.to_string());
}
}
None
}
/// Execute trim operation
async fn execute_trim(&self, config: &TrimConfig) -> Result<Value> {
let ffmpeg = self.find_ffmpeg().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("FFmpeg not found. Please install FFmpeg.".to_string()))?;
let mut args: Vec<String> = vec!["-i".to_string(), config.input_path.clone()];
// Add start time
if let Some(start) = config.start_time {
args.push("-ss".to_string());
args.push(start.to_string());
}
// Add duration or end time
if let Some(duration) = config.duration {
args.push("-t".to_string());
args.push(duration.to_string());
} else if let Some(end) = config.end_time {
if let Some(start) = config.start_time {
args.push("-t".to_string());
args.push((end - start).to_string());
} else {
args.push("-to".to_string());
args.push(end.to_string());
}
}
args.extend_from_slice(&["-c".to_string(), "copy".to_string(), config.output_path.clone()]);
let output = Command::new(&ffmpeg)
.args(&args)
.output()
.map_err(|e| zclaw_types::ZclawError::HandError(format!("FFmpeg execution failed: {}", e)))?;
if output.status.success() {
Ok(json!({
"success": true,
"output_path": config.output_path,
"message": "Video trimmed successfully"
}))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Ok(json!({
"success": false,
"error": stderr,
"message": "Failed to trim video"
}))
}
}
/// Execute convert operation
async fn execute_convert(&self, config: &ConvertConfig) -> Result<Value> {
let ffmpeg = self.find_ffmpeg().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("FFmpeg not found".to_string()))?;
let mut args: Vec<String> = vec!["-i".to_string(), config.input_path.clone()];
// Add resolution
if let Resolution::Custom { width, height } = config.resolution {
args.push("-vf".to_string());
args.push(format!("scale={}:{}", width, height));
} else {
let scale = match &config.resolution {
Resolution::P480 => "scale=854:480",
Resolution::P720 => "scale=1280:720",
Resolution::P1080 => "scale=1920:1080",
Resolution::P4k => "scale=3840:2160",
_ => "",
};
if !scale.is_empty() {
args.push("-vf".to_string());
args.push(scale.to_string());
}
}
// Add bitrates
if let Some(ref vbr) = config.video_bitrate {
args.push("-b:v".to_string());
args.push(vbr.clone());
}
if let Some(ref abr) = config.audio_bitrate {
args.push("-b:a".to_string());
args.push(abr.clone());
}
args.push(config.output_path.clone());
let output = Command::new(&ffmpeg)
.args(&args)
.output()
.map_err(|e| zclaw_types::ZclawError::HandError(format!("FFmpeg execution failed: {}", e)))?;
if output.status.success() {
Ok(json!({
"success": true,
"output_path": config.output_path,
"format": format!("{:?}", config.format),
"message": "Video converted successfully"
}))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Ok(json!({
"success": false,
"error": stderr,
"message": "Failed to convert video"
}))
}
}
/// Execute thumbnail extraction
async fn execute_thumbnail(&self, config: &ThumbnailConfig) -> Result<Value> {
let ffmpeg = self.find_ffmpeg().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("FFmpeg not found".to_string()))?;
let mut args: Vec<String> = vec![
"-i".to_string(), config.input_path.clone(),
"-ss".to_string(), config.time.to_string(),
"-vframes".to_string(), "1".to_string(),
];
// Add scale if dimensions specified
if let (Some(w), Some(h)) = (config.width, config.height) {
args.push("-vf".to_string());
args.push(format!("scale={}:{}", w, h));
}
args.push(config.output_path.clone());
let output = Command::new(&ffmpeg)
.args(&args)
.output()
.map_err(|e| zclaw_types::ZclawError::HandError(format!("FFmpeg execution failed: {}", e)))?;
if output.status.success() {
Ok(json!({
"success": true,
"output_path": config.output_path,
"time": config.time,
"message": "Thumbnail extracted successfully"
}))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Ok(json!({
"success": false,
"error": stderr,
"message": "Failed to extract thumbnail"
}))
}
}
/// Execute video concatenation
async fn execute_concat(&self, config: &ConcatConfig) -> Result<Value> {
let ffmpeg = self.find_ffmpeg().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("FFmpeg not found".to_string()))?;
// Create concat file
let concat_content: String = config.input_paths.iter()
.map(|p| format!("file '{}'", p))
.collect::<Vec<_>>()
.join("\n");
let temp_file = std::env::temp_dir().join("zclaw_concat.txt");
std::fs::write(&temp_file, &concat_content)
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to create concat file: {}", e)))?;
let args = vec![
"-f", "concat",
"-safe", "0",
"-i", temp_file.to_str().unwrap(),
"-c", "copy",
&config.output_path,
];
let output = Command::new(&ffmpeg)
.args(&args)
.output()
.map_err(|e| zclaw_types::ZclawError::HandError(format!("FFmpeg execution failed: {}", e)))?;
// Cleanup temp file
let _ = std::fs::remove_file(&temp_file);
if output.status.success() {
Ok(json!({
"success": true,
"output_path": config.output_path,
"videos_concatenated": config.input_paths.len(),
"message": "Videos concatenated successfully"
}))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Ok(json!({
"success": false,
"error": stderr,
"message": "Failed to concatenate videos"
}))
}
}
/// Get video information
async fn execute_info(&self, path: &str) -> Result<Value> {
let ffprobe = {
let ffmpeg = self.find_ffmpeg().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("FFmpeg not found".to_string()))?;
ffmpeg.replace("ffmpeg", "ffprobe")
};
let args = vec![
"-v", "quiet",
"-print_format", "json",
"-show_format",
"-show_streams",
path,
];
let output = Command::new(&ffprobe)
.args(&args)
.output()
.map_err(|e| zclaw_types::ZclawError::HandError(format!("FFprobe execution failed: {}", e)))?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
let info: Value = serde_json::from_str(&stdout)
.unwrap_or_else(|_| json!({"raw": stdout.to_string()}));
Ok(json!({
"success": true,
"path": path,
"info": info
}))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Ok(json!({
"success": false,
"error": stderr,
"message": "Failed to get video info"
}))
}
}
/// Check FFmpeg availability
async fn check_ffmpeg(&self) -> Result<Value> {
match self.find_ffmpeg().await {
Some(path) => {
// Get version info
let output = Command::new(&path)
.arg("-version")
.output()
.ok();
let version = output.and_then(|o| {
let stdout = String::from_utf8_lossy(&o.stdout);
stdout.lines().next().map(|s| s.to_string())
}).unwrap_or_else(|| "Unknown version".to_string());
Ok(json!({
"available": true,
"path": path,
"version": version
}))
}
None => Ok(json!({
"available": false,
"message": "FFmpeg not found. Please install FFmpeg to use video processing features.",
"install_url": if cfg!(windows) {
"https://ffmpeg.org/download.html#build-windows"
} else if cfg!(target_os = "macos") {
"brew install ffmpeg"
} else {
"sudo apt install ffmpeg"
}
}))
}
}
}
impl Default for ClipHand {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl Hand for ClipHand {
fn config(&self) -> &HandConfig {
&self.config
}
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
let action: ClipAction = serde_json::from_value(input.clone())
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Invalid action: {}", e)))?;
let start = std::time::Instant::now();
let result = match action {
ClipAction::Trim { config } => self.execute_trim(&config).await?,
ClipAction::Convert { config } => self.execute_convert(&config).await?,
ClipAction::Resize { input_path, output_path, resolution } => {
let convert_config = ConvertConfig {
input_path,
output_path,
format: VideoFormat::Mp4,
resolution,
video_bitrate: None,
audio_bitrate: None,
};
self.execute_convert(&convert_config).await?
}
ClipAction::Thumbnail { config } => self.execute_thumbnail(&config).await?,
ClipAction::Concat { config } => self.execute_concat(&config).await?,
ClipAction::Info { path } => self.execute_info(&path).await?,
ClipAction::CheckFfmpeg => self.check_ffmpeg().await?,
};
let duration_ms = start.elapsed().as_millis() as u64;
Ok(HandResult {
success: result["success"].as_bool().unwrap_or(false),
output: result,
error: None,
duration_ms: Some(duration_ms),
status: "completed".to_string(),
})
}
fn needs_approval(&self) -> bool {
false
}
fn check_dependencies(&self) -> Result<Vec<String>> {
let mut missing = Vec::new();
// Check FFmpeg
if Command::new("ffmpeg").arg("-version").output().is_err() {
if Command::new("C:\\ffmpeg\\bin\\ffmpeg.exe").arg("-version").output().is_err() {
missing.push("FFmpeg not found. Install from https://ffmpeg.org/".to_string());
}
}
Ok(missing)
}
fn status(&self) -> crate::HandStatus {
// Check if FFmpeg is available
if Command::new("ffmpeg").arg("-version").output().is_ok() {
crate::HandStatus::Idle
} else if Command::new("C:\\ffmpeg\\bin\\ffmpeg.exe").arg("-version").output().is_ok() {
crate::HandStatus::Idle
} else {
crate::HandStatus::Failed
}
}
}

View File

@@ -0,0 +1,409 @@
//! Collector Hand - Data collection and aggregation capabilities
//!
//! This hand provides web scraping, data extraction, and aggregation features.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use zclaw_types::Result;
use crate::{Hand, HandConfig, HandContext, HandResult};
/// Output format options
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum OutputFormat {
Json,
Csv,
Markdown,
Text,
}
impl Default for OutputFormat {
fn default() -> Self {
Self::Json
}
}
/// Collection target configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CollectionTarget {
/// URL to collect from
pub url: String,
/// CSS selector for items
#[serde(default)]
pub selector: Option<String>,
/// Fields to extract
#[serde(default)]
pub fields: HashMap<String, String>,
/// Maximum items to collect
#[serde(default = "default_max_items")]
pub max_items: usize,
}
fn default_max_items() -> usize { 100 }
/// Collected item
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CollectedItem {
/// Source URL
pub source_url: String,
/// Collected data
pub data: HashMap<String, Value>,
/// Collection timestamp
pub collected_at: String,
}
/// Collection result
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CollectionResult {
/// Target URL
pub url: String,
/// Collected items
pub items: Vec<CollectedItem>,
/// Total items collected
pub total_items: usize,
/// Output format
pub format: OutputFormat,
/// Collection timestamp
pub collected_at: String,
/// Duration in ms
pub duration_ms: u64,
}
/// Aggregation configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AggregationConfig {
/// URLs to aggregate
pub urls: Vec<String>,
/// Fields to aggregate
#[serde(default)]
pub aggregate_fields: Vec<String>,
}
/// Collector action types
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "action")]
pub enum CollectorAction {
#[serde(rename = "collect")]
Collect { target: CollectionTarget, format: Option<OutputFormat> },
#[serde(rename = "aggregate")]
Aggregate { config: AggregationConfig },
#[serde(rename = "extract")]
Extract { url: String, selectors: HashMap<String, String> },
}
/// Collector Hand implementation
pub struct CollectorHand {
config: HandConfig,
client: reqwest::Client,
cache: Arc<RwLock<HashMap<String, String>>>,
}
impl CollectorHand {
/// Create a new collector hand
pub fn new() -> Self {
Self {
config: HandConfig {
id: "collector".to_string(),
name: "Collector".to_string(),
description: "Data collection and aggregation from web sources".to_string(),
needs_approval: false,
dependencies: vec!["network".to_string()],
input_schema: Some(serde_json::json!({
"type": "object",
"oneOf": [
{
"properties": {
"action": { "const": "collect" },
"target": {
"type": "object",
"properties": {
"url": { "type": "string" },
"selector": { "type": "string" },
"fields": { "type": "object" },
"maxItems": { "type": "integer" }
},
"required": ["url"]
},
"format": { "type": "string", "enum": ["json", "csv", "markdown", "text"] }
},
"required": ["action", "target"]
},
{
"properties": {
"action": { "const": "extract" },
"url": { "type": "string" },
"selectors": { "type": "object" }
},
"required": ["action", "url", "selectors"]
},
{
"properties": {
"action": { "const": "aggregate" },
"config": {
"type": "object",
"properties": {
"urls": { "type": "array", "items": { "type": "string" } },
"aggregateFields": { "type": "array", "items": { "type": "string" } }
},
"required": ["urls"]
}
},
"required": ["action", "config"]
}
]
})),
tags: vec!["data".to_string(), "collection".to_string(), "scraping".to_string()],
enabled: true,
},
client: reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.user_agent("ZCLAW-Collector/1.0")
.build()
.unwrap_or_else(|_| reqwest::Client::new()),
cache: Arc::new(RwLock::new(HashMap::new())),
}
}
/// Fetch a page
async fn fetch_page(&self, url: &str) -> Result<String> {
// Check cache
{
let cache = self.cache.read().await;
if let Some(cached) = cache.get(url) {
return Ok(cached.clone());
}
}
let response = self.client
.get(url)
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Request failed: {}", e)))?;
let html = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
// Cache the result
{
let mut cache = self.cache.write().await;
cache.insert(url.to_string(), html.clone());
}
Ok(html)
}
/// Extract text by simple pattern matching
fn extract_by_pattern(&self, html: &str, pattern: &str) -> String {
// Simple implementation: find text between tags
if pattern.contains("title") || pattern.contains("h1") {
if let Some(start) = html.find("<title>") {
if let Some(end) = html[start..].find("</title>") {
return html[start + 7..start + end]
.replace("&amp;", "&")
.replace("&lt;", "<")
.replace("&gt;", ">")
.trim()
.to_string();
}
}
}
// Extract meta description
if pattern.contains("description") || pattern.contains("meta") {
if let Some(start) = html.find("name=\"description\"") {
let rest = &html[start..];
if let Some(content_start) = rest.find("content=\"") {
let content = &rest[content_start + 9..];
if let Some(end) = content.find('"') {
return content[..end].trim().to_string();
}
}
}
}
// Default: extract visible text
self.extract_visible_text(html)
}
/// Extract visible text from HTML
fn extract_visible_text(&self, html: &str) -> String {
let mut text = String::new();
let mut in_tag = false;
for c in html.chars() {
match c {
'<' => in_tag = true,
'>' => in_tag = false,
_ if in_tag => {}
' ' | '\n' | '\t' | '\r' => {
if !text.ends_with(' ') && !text.is_empty() {
text.push(' ');
}
}
_ => text.push(c),
}
}
// Limit length
if text.len() > 500 {
text.truncate(500);
text.push_str("...");
}
text.trim().to_string()
}
/// Execute collection
async fn execute_collect(&self, target: &CollectionTarget, format: OutputFormat) -> Result<CollectionResult> {
let start = std::time::Instant::now();
let html = self.fetch_page(&target.url).await?;
let mut items = Vec::new();
let mut data = HashMap::new();
// Extract fields
for (field_name, selector) in &target.fields {
let value = self.extract_by_pattern(&html, selector);
data.insert(field_name.clone(), Value::String(value));
}
// If no fields specified, extract basic info
if data.is_empty() {
data.insert("title".to_string(), Value::String(self.extract_by_pattern(&html, "title")));
data.insert("content".to_string(), Value::String(self.extract_visible_text(&html)));
}
items.push(CollectedItem {
source_url: target.url.clone(),
data,
collected_at: chrono::Utc::now().to_rfc3339(),
});
Ok(CollectionResult {
url: target.url.clone(),
total_items: items.len(),
items,
format,
collected_at: chrono::Utc::now().to_rfc3339(),
duration_ms: start.elapsed().as_millis() as u64,
})
}
/// Execute aggregation
async fn execute_aggregate(&self, config: &AggregationConfig) -> Result<Value> {
let start = std::time::Instant::now();
let mut results = Vec::new();
for url in config.urls.iter().take(10) {
match self.fetch_page(url).await {
Ok(html) => {
let mut data = HashMap::new();
for field in &config.aggregate_fields {
let value = self.extract_by_pattern(&html, field);
data.insert(field.clone(), Value::String(value));
}
if data.is_empty() {
data.insert("content".to_string(), Value::String(self.extract_visible_text(&html)));
}
results.push(data);
}
Err(e) => {
tracing::warn!(target: "collector", url = url, error = %e, "Failed to fetch");
}
}
}
Ok(json!({
"results": results,
"source_count": config.urls.len(),
"duration_ms": start.elapsed().as_millis()
}))
}
/// Execute extraction
async fn execute_extract(&self, url: &str, selectors: &HashMap<String, String>) -> Result<HashMap<String, String>> {
let html = self.fetch_page(url).await?;
let mut results = HashMap::new();
for (field_name, selector) in selectors {
let value = self.extract_by_pattern(&html, selector);
results.insert(field_name.clone(), value);
}
Ok(results)
}
}
impl Default for CollectorHand {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl Hand for CollectorHand {
fn config(&self) -> &HandConfig {
&self.config
}
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
let action: CollectorAction = serde_json::from_value(input.clone())
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Invalid action: {}", e)))?;
let start = std::time::Instant::now();
let result = match action {
CollectorAction::Collect { target, format } => {
let fmt = format.unwrap_or(OutputFormat::Json);
let collection = self.execute_collect(&target, fmt.clone()).await?;
json!({
"action": "collect",
"url": target.url,
"total_items": collection.total_items,
"duration_ms": start.elapsed().as_millis(),
"items": collection.items
})
}
CollectorAction::Aggregate { config } => {
let aggregation = self.execute_aggregate(&config).await?;
json!({
"action": "aggregate",
"duration_ms": start.elapsed().as_millis(),
"result": aggregation
})
}
CollectorAction::Extract { url, selectors } => {
let extracted = self.execute_extract(&url, &selectors).await?;
json!({
"action": "extract",
"url": url,
"duration_ms": start.elapsed().as_millis(),
"data": extracted
})
}
};
Ok(HandResult::success(result))
}
fn needs_approval(&self) -> bool {
false
}
fn check_dependencies(&self) -> Result<Vec<String>> {
Ok(Vec::new())
}
fn status(&self) -> crate::HandStatus {
crate::HandStatus::Idle
}
}

View File

@@ -6,15 +6,27 @@
//! - Speech: Text-to-speech synthesis
//! - Quiz: Assessment and evaluation
//! - Browser: Web automation
//! - Researcher: Deep research and analysis
//! - Collector: Data collection and aggregation
//! - Clip: Video processing
//! - Twitter: Social media automation
mod whiteboard;
mod slideshow;
mod speech;
mod quiz;
mod browser;
mod researcher;
mod collector;
mod clip;
mod twitter;
pub use whiteboard::*;
pub use slideshow::*;
pub use speech::*;
pub use quiz::*;
pub use browser::*;
pub use researcher::*;
pub use collector::*;
pub use clip::*;
pub use twitter::*;

View File

@@ -543,7 +543,7 @@ impl QuizHand {
match quiz {
Some(quiz) => {
let score = current_score.unwrap_or(0.0);
let _score = current_score.unwrap_or(0.0);
let next_idx = state.current_question_index + 1;
if next_idx < quiz.questions.len() {

View File

@@ -0,0 +1,545 @@
//! Researcher Hand - Deep research and analysis capabilities
//!
//! This hand provides web search, content fetching, and research synthesis.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use zclaw_types::Result;
use crate::{Hand, HandConfig, HandContext, HandResult};
/// Search engine options
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum SearchEngine {
Google,
Bing,
DuckDuckGo,
Auto,
}
impl Default for SearchEngine {
fn default() -> Self {
Self::Auto
}
}
/// Research depth level
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ResearchDepth {
Quick, // Fast search, top 3 results
Standard, // Normal search, top 10 results
Deep, // Comprehensive search, multiple sources
}
impl Default for ResearchDepth {
fn default() -> Self {
Self::Standard
}
}
/// Research query configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResearchQuery {
/// Search query
pub query: String,
/// Search engine to use
#[serde(default)]
pub engine: SearchEngine,
/// Research depth
#[serde(default)]
pub depth: ResearchDepth,
/// Maximum results to return
#[serde(default = "default_max_results")]
pub max_results: usize,
/// Include related topics
#[serde(default)]
pub include_related: bool,
/// Time limit in seconds
#[serde(default = "default_time_limit")]
pub time_limit_secs: u64,
}
fn default_max_results() -> usize { 10 }
fn default_time_limit() -> u64 { 60 }
/// Search result item
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SearchResult {
/// Title of the result
pub title: String,
/// URL
pub url: String,
/// Snippet/summary
pub snippet: String,
/// Source name
pub source: String,
/// Relevance score (0-100)
#[serde(default)]
pub relevance: u8,
/// Fetched content (if available)
#[serde(default)]
pub content: Option<String>,
/// Timestamp
#[serde(default)]
pub fetched_at: Option<String>,
}
/// Research report
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResearchReport {
/// Original query
pub query: String,
/// Search results
pub results: Vec<SearchResult>,
/// Synthesized summary
#[serde(default)]
pub summary: Option<String>,
/// Key findings
#[serde(default)]
pub key_findings: Vec<String>,
/// Related topics discovered
#[serde(default)]
pub related_topics: Vec<String>,
/// Research timestamp
pub researched_at: String,
/// Total time spent (ms)
pub duration_ms: u64,
}
/// Researcher action types
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "action")]
pub enum ResearcherAction {
#[serde(rename = "search")]
Search { query: ResearchQuery },
#[serde(rename = "fetch")]
Fetch { url: String },
#[serde(rename = "summarize")]
Summarize { urls: Vec<String> },
#[serde(rename = "report")]
Report { query: ResearchQuery },
}
/// Researcher Hand implementation
pub struct ResearcherHand {
config: HandConfig,
client: reqwest::Client,
cache: Arc<RwLock<HashMap<String, SearchResult>>>,
}
impl ResearcherHand {
/// Create a new researcher hand
pub fn new() -> Self {
Self {
config: HandConfig {
id: "researcher".to_string(),
name: "Researcher".to_string(),
description: "Deep research and analysis capabilities with web search and content fetching".to_string(),
needs_approval: false,
dependencies: vec!["network".to_string()],
input_schema: Some(serde_json::json!({
"type": "object",
"oneOf": [
{
"properties": {
"action": { "const": "search" },
"query": {
"type": "object",
"properties": {
"query": { "type": "string" },
"engine": { "type": "string", "enum": ["google", "bing", "duckduckgo", "auto"] },
"depth": { "type": "string", "enum": ["quick", "standard", "deep"] },
"maxResults": { "type": "integer" }
},
"required": ["query"]
}
},
"required": ["action", "query"]
},
{
"properties": {
"action": { "const": "fetch" },
"url": { "type": "string" }
},
"required": ["action", "url"]
},
{
"properties": {
"action": { "const": "report" },
"query": { "$ref": "#/properties/query" }
},
"required": ["action", "query"]
}
]
})),
tags: vec!["research".to_string(), "web".to_string(), "search".to_string()],
enabled: true,
},
client: reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.user_agent("ZCLAW-Researcher/1.0")
.build()
.unwrap_or_else(|_| reqwest::Client::new()),
cache: Arc::new(RwLock::new(HashMap::new())),
}
}
/// Execute a web search
async fn execute_search(&self, query: &ResearchQuery) -> Result<Vec<SearchResult>> {
let start = std::time::Instant::now();
// Use DuckDuckGo as default search (no API key required)
let results = self.search_duckduckgo(&query.query, query.max_results).await?;
let duration = start.elapsed().as_millis() as u64;
tracing::info!(
target: "researcher",
query = %query.query,
duration_ms = duration,
results_count = results.len(),
"Search completed"
);
Ok(results)
}
/// Search using DuckDuckGo (no API key required)
async fn search_duckduckgo(&self, query: &str, max_results: usize) -> Result<Vec<SearchResult>> {
let url = format!("https://api.duckduckgo.com/?q={}&format=json&no_html=1",
url_encode(query));
let response = self.client
.get(&url)
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Search request failed: {}", e)))?;
let json: Value = response.json().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to parse search response: {}", e)))?;
let mut results = Vec::new();
// Parse DuckDuckGo Instant Answer
if let Some(abstract_text) = json.get("AbstractText").and_then(|v| v.as_str()) {
if !abstract_text.is_empty() {
results.push(SearchResult {
title: query.to_string(),
url: json.get("AbstractURL")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string(),
snippet: abstract_text.to_string(),
source: json.get("AbstractSource")
.and_then(|v| v.as_str())
.unwrap_or("DuckDuckGo")
.to_string(),
relevance: 100,
content: None,
fetched_at: Some(chrono::Utc::now().to_rfc3339()),
});
}
}
// Parse related topics
if let Some(related) = json.get("RelatedTopics").and_then(|v| v.as_array()) {
for item in related.iter().take(max_results) {
if let Some(obj) = item.as_object() {
results.push(SearchResult {
title: obj.get("Text")
.and_then(|v| v.as_str())
.unwrap_or("Related Topic")
.to_string(),
url: obj.get("FirstURL")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string(),
snippet: obj.get("Text")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string(),
source: "DuckDuckGo".to_string(),
relevance: 80,
content: None,
fetched_at: Some(chrono::Utc::now().to_rfc3339()),
});
}
}
}
Ok(results)
}
/// Fetch content from a URL
async fn execute_fetch(&self, url: &str) -> Result<SearchResult> {
let start = std::time::Instant::now();
// Check cache first
{
let cache = self.cache.read().await;
if let Some(cached) = cache.get(url) {
if cached.content.is_some() {
return Ok(cached.clone());
}
}
}
let response = self.client
.get(url)
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Fetch request failed: {}", e)))?;
let content_type = response.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let content = if content_type.contains("text/html") {
// Extract text from HTML
let html = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read HTML: {}", e)))?;
self.extract_text_from_html(&html)
} else if content_type.contains("text/") || content_type.contains("application/json") {
response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read text: {}", e)))?
} else {
"[Binary content]".to_string()
};
let result = SearchResult {
title: url.to_string(),
url: url.to_string(),
snippet: content.chars().take(500).collect(),
source: url.to_string(),
relevance: 100,
content: Some(content),
fetched_at: Some(chrono::Utc::now().to_rfc3339()),
};
// Cache the result
{
let mut cache = self.cache.write().await;
cache.insert(url.to_string(), result.clone());
}
let duration = start.elapsed().as_millis() as u64;
tracing::info!(
target: "researcher",
url = url,
duration_ms = duration,
"Fetch completed"
);
Ok(result)
}
/// Extract readable text from HTML
fn extract_text_from_html(&self, html: &str) -> String {
// Simple text extraction - remove HTML tags
let mut text = String::new();
let mut in_tag = false;
let mut in_script = false;
let mut in_style = false;
for c in html.chars() {
match c {
'<' => {
in_tag = true;
let remaining = html[text.len()..].to_lowercase();
if remaining.starts_with("<script") {
in_script = true;
} else if remaining.starts_with("<style") {
in_style = true;
}
}
'>' => {
in_tag = false;
let remaining = html[text.len()..].to_lowercase();
if remaining.starts_with("</script>") {
in_script = false;
} else if remaining.starts_with("</style>") {
in_style = false;
}
}
_ if in_tag => {}
_ if in_script || in_style => {}
' ' | '\n' | '\t' | '\r' => {
if !text.ends_with(' ') && !text.is_empty() {
text.push(' ');
}
}
_ => text.push(c),
}
}
// Limit length
if text.len() > 10000 {
text.truncate(10000);
text.push_str("...");
}
text.trim().to_string()
}
/// Generate a comprehensive research report
async fn execute_report(&self, query: &ResearchQuery) -> Result<ResearchReport> {
let start = std::time::Instant::now();
// First, execute search
let mut results = self.execute_search(query).await?;
// Fetch content for top results
let fetch_limit = match query.depth {
ResearchDepth::Quick => 1,
ResearchDepth::Standard => 3,
ResearchDepth::Deep => 5,
};
for result in results.iter_mut().take(fetch_limit) {
if !result.url.is_empty() {
match self.execute_fetch(&result.url).await {
Ok(fetched) => {
result.content = fetched.content;
result.fetched_at = fetched.fetched_at;
}
Err(e) => {
tracing::warn!(target: "researcher", error = %e, "Failed to fetch content");
}
}
}
}
// Extract key findings
let key_findings: Vec<String> = results.iter()
.take(5)
.filter_map(|r| {
r.content.as_ref().map(|c| {
c.split(". ")
.take(3)
.collect::<Vec<_>>()
.join(". ")
})
})
.collect();
// Extract related topics from snippets
let related_topics: Vec<String> = results.iter()
.filter_map(|r| {
if r.snippet.len() > 50 {
Some(r.title.clone())
} else {
None
}
})
.take(5)
.collect();
let duration = start.elapsed().as_millis() as u64;
Ok(ResearchReport {
query: query.query.clone(),
results,
summary: None, // Would require LLM integration
key_findings,
related_topics,
researched_at: chrono::Utc::now().to_rfc3339(),
duration_ms: duration,
})
}
}
impl Default for ResearcherHand {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl Hand for ResearcherHand {
fn config(&self) -> &HandConfig {
&self.config
}
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
let action: ResearcherAction = serde_json::from_value(input.clone())
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Invalid action: {}", e)))?;
let start = std::time::Instant::now();
let result = match action {
ResearcherAction::Search { query } => {
let results = self.execute_search(&query).await?;
json!({
"action": "search",
"query": query.query,
"results": results,
"duration_ms": start.elapsed().as_millis()
})
}
ResearcherAction::Fetch { url } => {
let result = self.execute_fetch(&url).await?;
json!({
"action": "fetch",
"url": url,
"result": result,
"duration_ms": start.elapsed().as_millis()
})
}
ResearcherAction::Summarize { urls } => {
let mut results = Vec::new();
for url in urls.iter().take(5) {
if let Ok(result) = self.execute_fetch(url).await {
results.push(result);
}
}
json!({
"action": "summarize",
"urls": urls,
"results": results,
"duration_ms": start.elapsed().as_millis()
})
}
ResearcherAction::Report { query } => {
let report = self.execute_report(&query).await?;
json!({
"action": "report",
"report": report
})
}
};
Ok(HandResult::success(result))
}
fn needs_approval(&self) -> bool {
false // Research operations are generally safe
}
fn check_dependencies(&self) -> Result<Vec<String>> {
// Network connectivity will be checked at runtime
Ok(Vec::new())
}
fn status(&self) -> crate::HandStatus {
crate::HandStatus::Idle
}
}
/// URL encoding helper (simple implementation)
fn url_encode(s: &str) -> String {
s.chars()
.map(|c| match c {
'A'..='Z' | 'a'..='z' | '0'..='9' | '-' | '_' | '.' | '~' => c.to_string(),
_ => format!("%{:02X}", c as u32),
})
.collect()
}

View File

@@ -176,7 +176,7 @@ impl SpeechHand {
/// Create with custom provider
pub fn with_provider(provider: TtsProvider) -> Self {
let mut hand = Self::new();
let hand = Self::new();
let mut state = hand.state.blocking_write();
state.config.provider = provider;
drop(state);
@@ -308,7 +308,7 @@ impl SpeechHand {
"language": state.config.default_language,
})))
}
SpeechAction::SetProvider { provider, api_key, region } => {
SpeechAction::SetProvider { provider, api_key, region: _ } => {
state.config.provider = provider.clone();
// In real implementation, would configure provider
Ok(HandResult::success(serde_json::json!({

View File

@@ -0,0 +1,544 @@
//! Twitter Hand - Twitter/X automation capabilities
//!
//! This hand provides Twitter/X automation features:
//! - Post tweets
//! - Get timeline
//! - Search tweets
//! - Manage followers
//!
//! Note: Requires Twitter API credentials (API Key, API Secret, Access Token, Access Secret)
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::sync::Arc;
use tokio::sync::RwLock;
use zclaw_types::Result;
use crate::{Hand, HandConfig, HandContext, HandResult};
/// Twitter credentials
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TwitterCredentials {
/// API Key (Consumer Key)
pub api_key: String,
/// API Secret (Consumer Secret)
pub api_secret: String,
/// Access Token
pub access_token: String,
/// Access Token Secret
pub access_token_secret: String,
/// Bearer Token (for API v2)
#[serde(default)]
pub bearer_token: Option<String>,
}
/// Tweet configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TweetConfig {
/// Tweet text
pub text: String,
/// Media URLs to attach
#[serde(default)]
pub media_urls: Vec<String>,
/// Reply to tweet ID
#[serde(default)]
pub reply_to: Option<String>,
/// Quote tweet ID
#[serde(default)]
pub quote_tweet: Option<String>,
/// Poll configuration
#[serde(default)]
pub poll: Option<PollConfig>,
}
/// Poll configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PollConfig {
pub options: Vec<String>,
pub duration_minutes: u32,
}
/// Tweet search configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SearchConfig {
/// Search query
pub query: String,
/// Maximum results
#[serde(default = "default_search_max")]
pub max_results: u32,
/// Next page token
#[serde(default)]
pub next_token: Option<String>,
}
fn default_search_max() -> u32 { 10 }
/// Timeline configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TimelineConfig {
/// User ID (optional, defaults to authenticated user)
#[serde(default)]
pub user_id: Option<String>,
/// Maximum results
#[serde(default = "default_timeline_max")]
pub max_results: u32,
/// Exclude replies
#[serde(default)]
pub exclude_replies: bool,
/// Include retweets
#[serde(default = "default_include_retweets")]
pub include_retweets: bool,
}
fn default_timeline_max() -> u32 { 10 }
fn default_include_retweets() -> bool { true }
/// Tweet data
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Tweet {
pub id: String,
pub text: String,
pub author_id: String,
pub author_name: String,
pub author_username: String,
pub created_at: String,
pub public_metrics: TweetMetrics,
#[serde(default)]
pub media: Vec<MediaInfo>,
}
/// Tweet metrics
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TweetMetrics {
pub retweet_count: u32,
pub reply_count: u32,
pub like_count: u32,
pub quote_count: u32,
pub impression_count: Option<u64>,
}
/// Media info
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MediaInfo {
pub media_key: String,
pub media_type: String,
pub url: String,
pub width: u32,
pub height: u32,
}
/// User data
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TwitterUser {
pub id: String,
pub name: String,
pub username: String,
pub description: Option<String>,
pub profile_image_url: Option<String>,
pub location: Option<String>,
pub url: Option<String>,
pub verified: bool,
pub public_metrics: UserMetrics,
}
/// User metrics
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserMetrics {
pub followers_count: u32,
pub following_count: u32,
pub tweet_count: u32,
pub listed_count: u32,
}
/// Twitter action types
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "action")]
pub enum TwitterAction {
#[serde(rename = "tweet")]
Tweet { config: TweetConfig },
#[serde(rename = "delete_tweet")]
DeleteTweet { tweet_id: String },
#[serde(rename = "retweet")]
Retweet { tweet_id: String },
#[serde(rename = "unretweet")]
Unretweet { tweet_id: String },
#[serde(rename = "like")]
Like { tweet_id: String },
#[serde(rename = "unlike")]
Unlike { tweet_id: String },
#[serde(rename = "search")]
Search { config: SearchConfig },
#[serde(rename = "timeline")]
Timeline { config: TimelineConfig },
#[serde(rename = "get_tweet")]
GetTweet { tweet_id: String },
#[serde(rename = "get_user")]
GetUser { username: String },
#[serde(rename = "followers")]
Followers { user_id: String, max_results: Option<u32> },
#[serde(rename = "following")]
Following { user_id: String, max_results: Option<u32> },
#[serde(rename = "check_credentials")]
CheckCredentials,
}
/// Twitter Hand implementation
pub struct TwitterHand {
config: HandConfig,
credentials: Arc<RwLock<Option<TwitterCredentials>>>,
}
impl TwitterHand {
/// Create a new Twitter hand
pub fn new() -> Self {
Self {
config: HandConfig {
id: "twitter".to_string(),
name: "Twitter".to_string(),
description: "Twitter/X automation capabilities for posting, searching, and managing content".to_string(),
needs_approval: true, // Twitter actions need approval
dependencies: vec!["twitter_api_key".to_string()],
input_schema: Some(serde_json::json!({
"type": "object",
"oneOf": [
{
"properties": {
"action": { "const": "tweet" },
"config": {
"type": "object",
"properties": {
"text": { "type": "string", "maxLength": 280 },
"mediaUrls": { "type": "array", "items": { "type": "string" } },
"replyTo": { "type": "string" },
"quoteTweet": { "type": "string" }
},
"required": ["text"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "search" },
"config": {
"type": "object",
"properties": {
"query": { "type": "string" },
"maxResults": { "type": "integer" }
},
"required": ["query"]
}
},
"required": ["action", "config"]
},
{
"properties": {
"action": { "const": "timeline" },
"config": {
"type": "object",
"properties": {
"userId": { "type": "string" },
"maxResults": { "type": "integer" }
}
}
},
"required": ["action"]
},
{
"properties": {
"action": { "const": "get_tweet" },
"tweetId": { "type": "string" }
},
"required": ["action", "tweetId"]
},
{
"properties": {
"action": { "const": "check_credentials" }
},
"required": ["action"]
}
]
})),
tags: vec!["twitter".to_string(), "social".to_string(), "automation".to_string()],
enabled: true,
},
credentials: Arc::new(RwLock::new(None)),
}
}
/// Set credentials
pub async fn set_credentials(&self, creds: TwitterCredentials) {
let mut c = self.credentials.write().await;
*c = Some(creds);
}
/// Get credentials
async fn get_credentials(&self) -> Option<TwitterCredentials> {
let c = self.credentials.read().await;
c.clone()
}
/// Execute tweet action
async fn execute_tweet(&self, config: &TweetConfig) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated tweet response (actual implementation would use Twitter API)
// In production, this would call Twitter API v2: POST /2/tweets
Ok(json!({
"success": true,
"tweet_id": format!("simulated_{}", chrono::Utc::now().timestamp()),
"text": config.text,
"created_at": chrono::Utc::now().to_rfc3339(),
"message": "Tweet posted successfully (simulated)",
"note": "Connect Twitter API credentials for actual posting"
}))
}
/// Execute search action
async fn execute_search(&self, config: &SearchConfig) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated search response
// In production, this would call Twitter API v2: GET /2/tweets/search/recent
Ok(json!({
"success": true,
"query": config.query,
"tweets": [],
"meta": {
"result_count": 0,
"newest_id": null,
"oldest_id": null,
"next_token": null
},
"message": "Search completed (simulated - no actual results without API)",
"note": "Connect Twitter API credentials for actual search results"
}))
}
/// Execute timeline action
async fn execute_timeline(&self, config: &TimelineConfig) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated timeline response
Ok(json!({
"success": true,
"user_id": config.user_id,
"tweets": [],
"meta": {
"result_count": 0,
"newest_id": null,
"oldest_id": null,
"next_token": null
},
"message": "Timeline fetched (simulated)",
"note": "Connect Twitter API credentials for actual timeline"
}))
}
/// Get tweet by ID
async fn execute_get_tweet(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
Ok(json!({
"success": true,
"tweet_id": tweet_id,
"tweet": null,
"message": "Tweet lookup (simulated)",
"note": "Connect Twitter API credentials for actual tweet data"
}))
}
/// Get user by username
async fn execute_get_user(&self, username: &str) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
Ok(json!({
"success": true,
"username": username,
"user": null,
"message": "User lookup (simulated)",
"note": "Connect Twitter API credentials for actual user data"
}))
}
/// Execute like action
async fn execute_like(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
Ok(json!({
"success": true,
"tweet_id": tweet_id,
"action": "liked",
"message": "Tweet liked (simulated)"
}))
}
/// Execute retweet action
async fn execute_retweet(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
Ok(json!({
"success": true,
"tweet_id": tweet_id,
"action": "retweeted",
"message": "Tweet retweeted (simulated)"
}))
}
/// Check credentials status
async fn execute_check_credentials(&self) -> Result<Value> {
match self.get_credentials().await {
Some(creds) => {
// Validate credentials have required fields
let has_required = !creds.api_key.is_empty()
&& !creds.api_secret.is_empty()
&& !creds.access_token.is_empty()
&& !creds.access_token_secret.is_empty();
Ok(json!({
"configured": has_required,
"has_api_key": !creds.api_key.is_empty(),
"has_api_secret": !creds.api_secret.is_empty(),
"has_access_token": !creds.access_token.is_empty(),
"has_access_token_secret": !creds.access_token_secret.is_empty(),
"has_bearer_token": creds.bearer_token.is_some(),
"message": if has_required {
"Twitter credentials configured"
} else {
"Twitter credentials incomplete"
}
}))
}
None => Ok(json!({
"configured": false,
"message": "Twitter credentials not set",
"setup_instructions": {
"step1": "Create a Twitter Developer account at https://developer.twitter.com/",
"step2": "Create a new project and app",
"step3": "Generate API Key, API Secret, Access Token, and Access Token Secret",
"step4": "Configure credentials using set_credentials()"
}
}))
}
}
}
impl Default for TwitterHand {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl Hand for TwitterHand {
fn config(&self) -> &HandConfig {
&self.config
}
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
let action: TwitterAction = serde_json::from_value(input.clone())
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Invalid action: {}", e)))?;
let start = std::time::Instant::now();
let result = match action {
TwitterAction::Tweet { config } => self.execute_tweet(&config).await?,
TwitterAction::DeleteTweet { tweet_id } => {
json!({
"success": true,
"tweet_id": tweet_id,
"action": "deleted",
"message": "Tweet deleted (simulated)"
})
}
TwitterAction::Retweet { tweet_id } => self.execute_retweet(&tweet_id).await?,
TwitterAction::Unretweet { tweet_id } => {
json!({
"success": true,
"tweet_id": tweet_id,
"action": "unretweeted",
"message": "Tweet unretweeted (simulated)"
})
}
TwitterAction::Like { tweet_id } => self.execute_like(&tweet_id).await?,
TwitterAction::Unlike { tweet_id } => {
json!({
"success": true,
"tweet_id": tweet_id,
"action": "unliked",
"message": "Tweet unliked (simulated)"
})
}
TwitterAction::Search { config } => self.execute_search(&config).await?,
TwitterAction::Timeline { config } => self.execute_timeline(&config).await?,
TwitterAction::GetTweet { tweet_id } => self.execute_get_tweet(&tweet_id).await?,
TwitterAction::GetUser { username } => self.execute_get_user(&username).await?,
TwitterAction::Followers { user_id, max_results } => {
json!({
"success": true,
"user_id": user_id,
"followers": [],
"max_results": max_results.unwrap_or(100),
"message": "Followers fetched (simulated)"
})
}
TwitterAction::Following { user_id, max_results } => {
json!({
"success": true,
"user_id": user_id,
"following": [],
"max_results": max_results.unwrap_or(100),
"message": "Following fetched (simulated)"
})
}
TwitterAction::CheckCredentials => self.execute_check_credentials().await?,
};
let duration_ms = start.elapsed().as_millis() as u64;
Ok(HandResult {
success: result["success"].as_bool().unwrap_or(false),
output: result,
error: None,
duration_ms: Some(duration_ms),
status: "completed".to_string(),
})
}
fn needs_approval(&self) -> bool {
true // Twitter actions should be approved
}
fn check_dependencies(&self) -> Result<Vec<String>> {
let mut missing = Vec::new();
// Check if credentials are configured (synchronously)
// This is a simplified check; actual async check would require runtime
missing.push("Twitter API credentials required".to_string());
Ok(missing)
}
fn status(&self) -> crate::HandStatus {
// Will be Idle when credentials are set
crate::HandStatus::Idle
}
}

View File

@@ -206,7 +206,7 @@ impl WhiteboardHand {
/// Create with custom canvas size
pub fn with_size(width: f64, height: f64) -> Self {
let mut hand = Self::new();
let hand = Self::new();
let mut state = hand.state.blocking_write();
state.canvas_width = width;
state.canvas_height = height;