✅ TICKET-006: Wake-word Detection Service - Implemented wake-word detection using openWakeWord - HTTP/WebSocket server on port 8002 - Real-time detection with configurable threshold - Event emission for ASR integration - Location: home-voice-agent/wake-word/ ✅ TICKET-010: ASR Service - Implemented ASR using faster-whisper - HTTP endpoint for file transcription - WebSocket endpoint for streaming transcription - Support for multiple audio formats - Auto language detection - GPU acceleration support - Location: home-voice-agent/asr/ ✅ TICKET-014: TTS Service - Implemented TTS using Piper - HTTP endpoint for text-to-speech synthesis - Low-latency processing (< 500ms) - Multiple voice support - WAV audio output - Location: home-voice-agent/tts/ ✅ TICKET-047: Updated Hardware Purchases - Marked Pi5 kit, SSD, microphone, and speakers as purchased - Updated progress log with purchase status 📚 Documentation: - Added VOICE_SERVICES_README.md with complete testing guide - Each service includes README.md with usage instructions - All services ready for Pi5 deployment 🧪 Testing: - Created test files for each service - All imports validated - FastAPI apps created successfully - Code passes syntax validation 🚀 Ready for: - Pi5 deployment - End-to-end voice flow testing - Integration with MCP server Files Added: - wake-word/detector.py - wake-word/server.py - wake-word/requirements.txt - wake-word/README.md - wake-word/test_detector.py - asr/service.py - asr/server.py - asr/requirements.txt - asr/README.md - asr/test_service.py - tts/service.py - tts/server.py - tts/requirements.txt - tts/README.md - tts/test_service.py - VOICE_SERVICES_README.md Files Modified: - tickets/done/TICKET-047_hardware-purchases.md Files Moved: - tickets/backlog/TICKET-006_prototype-wake-word-node.md → tickets/done/ - tickets/backlog/TICKET-010_streaming-asr-service.md → tickets/done/ - tickets/backlog/TICKET-014_tts-service.md → tickets/done/
417 lines
14 KiB
Python
417 lines
14 KiB
Python
"""
|
|
Home Tasks Tool - Manage home tasks using Markdown Kanban format.
|
|
"""
|
|
|
|
import re
|
|
import uuid
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional
|
|
from tools.base import BaseTool
|
|
|
|
# Path whitelist - only allow tasks in home directory
|
|
# Store in data directory for now (can be moved to family-agent-config repo later)
|
|
HOME_TASKS_DIR = Path(__file__).parent.parent.parent / "data" / "tasks" / "home"
|
|
FORBIDDEN_PATTERNS = ["work", "atlas/code", "projects"] # Safety: reject paths containing these (but allow atlas/data)
|
|
|
|
|
|
def _validate_path(path: Path) -> bool:
|
|
"""Validate that path is within allowed directory and doesn't contain forbidden patterns."""
|
|
# Convert to absolute path
|
|
path = path.resolve()
|
|
home_dir = HOME_TASKS_DIR.resolve()
|
|
|
|
# Must be within home tasks directory
|
|
try:
|
|
path.relative_to(home_dir)
|
|
except ValueError:
|
|
return False
|
|
|
|
# Check for forbidden patterns in path
|
|
path_str = str(path).lower()
|
|
for pattern in FORBIDDEN_PATTERNS:
|
|
if pattern in path_str:
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
def _ensure_tasks_dir():
|
|
"""Ensure tasks directory exists."""
|
|
HOME_TASKS_DIR.mkdir(parents=True, exist_ok=True)
|
|
# Create status subdirectories
|
|
for status in ["backlog", "todo", "in-progress", "review", "done"]:
|
|
(HOME_TASKS_DIR / status).mkdir(exist_ok=True)
|
|
|
|
|
|
def _generate_task_id() -> str:
|
|
"""Generate a unique task ID."""
|
|
return f"TASK-{uuid.uuid4().hex[:8].upper()}"
|
|
|
|
|
|
def _sanitize_filename(title: str) -> str:
|
|
"""Convert task title to safe filename."""
|
|
# Remove special characters, keep alphanumeric, spaces, hyphens
|
|
filename = re.sub(r'[^\w\s-]', '', title)
|
|
# Replace spaces with hyphens
|
|
filename = re.sub(r'\s+', '-', filename)
|
|
# Limit length
|
|
filename = filename[:50]
|
|
return filename.lower()
|
|
|
|
|
|
def _read_task_file(file_path: Path) -> Dict[str, Any]:
|
|
"""Read task file and parse YAML frontmatter."""
|
|
if not file_path.exists():
|
|
raise ValueError(f"Task file not found: {file_path}")
|
|
|
|
content = file_path.read_text()
|
|
|
|
# Parse YAML frontmatter
|
|
if not content.startswith("---"):
|
|
raise ValueError(f"Invalid task file format: {file_path}")
|
|
|
|
# Extract frontmatter
|
|
parts = content.split("---", 2)
|
|
if len(parts) < 3:
|
|
raise ValueError(f"Invalid task file format: {file_path}")
|
|
|
|
frontmatter = parts[1].strip()
|
|
body = parts[2].strip()
|
|
|
|
# Parse YAML (simple parser)
|
|
metadata = {}
|
|
for line in frontmatter.split("\n"):
|
|
if ":" in line:
|
|
key, value = line.split(":", 1)
|
|
key = key.strip()
|
|
value = value.strip().strip('"').strip("'")
|
|
if key == "tags":
|
|
# Parse list
|
|
value = [t.strip() for t in value.strip("[]").split(",") if t.strip()]
|
|
elif key in ["created", "updated"]:
|
|
# Keep as string
|
|
pass
|
|
else:
|
|
# Try to parse as int if numeric
|
|
try:
|
|
if value.isdigit():
|
|
value = int(value)
|
|
except:
|
|
pass
|
|
metadata[key] = value
|
|
|
|
metadata["body"] = body
|
|
metadata["file_path"] = file_path
|
|
|
|
return metadata
|
|
|
|
|
|
def _write_task_file(file_path: Path, metadata: Dict[str, Any], body: str = ""):
|
|
"""Write task file with YAML frontmatter."""
|
|
if not _validate_path(file_path):
|
|
raise ValueError(f"Path not allowed: {file_path}")
|
|
|
|
# Build YAML frontmatter
|
|
frontmatter_lines = ["---"]
|
|
for key, value in metadata.items():
|
|
if key == "body" or key == "file_path":
|
|
continue
|
|
if isinstance(value, list):
|
|
frontmatter_lines.append(f"{key}: [{', '.join(str(v) for v in value)}]")
|
|
else:
|
|
frontmatter_lines.append(f"{key}: {value}")
|
|
frontmatter_lines.append("---")
|
|
|
|
# Write file
|
|
content = "\n".join(frontmatter_lines) + "\n\n" + body
|
|
file_path.write_text(content)
|
|
|
|
|
|
class AddTaskTool(BaseTool):
|
|
"""Tool for adding new tasks."""
|
|
|
|
@property
|
|
def name(self) -> str:
|
|
return "add_task"
|
|
|
|
@property
|
|
def description(self) -> str:
|
|
return "Add a new task to the home Kanban board. Creates a Markdown file with YAML frontmatter."
|
|
|
|
def get_schema(self) -> Dict[str, Any]:
|
|
"""Get tool schema."""
|
|
return {
|
|
"name": self.name,
|
|
"description": self.description,
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"title": {
|
|
"type": "string",
|
|
"description": "Task title"
|
|
},
|
|
"description": {
|
|
"type": "string",
|
|
"description": "Task description/body"
|
|
},
|
|
"status": {
|
|
"type": "string",
|
|
"description": "Initial status",
|
|
"enum": ["backlog", "todo", "in-progress", "review", "done"],
|
|
"default": "backlog"
|
|
},
|
|
"priority": {
|
|
"type": "string",
|
|
"description": "Task priority",
|
|
"enum": ["high", "medium", "low"],
|
|
"default": "medium"
|
|
},
|
|
"tags": {
|
|
"type": "array",
|
|
"items": {"type": "string"},
|
|
"description": "Optional tags for the task"
|
|
}
|
|
},
|
|
"required": ["title"]
|
|
}
|
|
}
|
|
|
|
def execute(self, arguments: Dict[str, Any]) -> str:
|
|
"""Execute add_task tool."""
|
|
_ensure_tasks_dir()
|
|
|
|
title = arguments.get("title", "").strip()
|
|
if not title:
|
|
raise ValueError("Missing required argument: title")
|
|
|
|
description = arguments.get("description", "").strip()
|
|
status = arguments.get("status", "backlog")
|
|
priority = arguments.get("priority", "medium")
|
|
tags = arguments.get("tags", [])
|
|
|
|
if status not in ["backlog", "todo", "in-progress", "review", "done"]:
|
|
raise ValueError(f"Invalid status: {status}")
|
|
|
|
if priority not in ["high", "medium", "low"]:
|
|
raise ValueError(f"Invalid priority: {priority}")
|
|
|
|
# Generate task ID and filename
|
|
task_id = _generate_task_id()
|
|
filename = _sanitize_filename(title)
|
|
file_path = HOME_TASKS_DIR / status / f"{filename}.md"
|
|
|
|
# Ensure unique filename
|
|
counter = 1
|
|
while file_path.exists():
|
|
file_path = HOME_TASKS_DIR / status / f"{filename}-{counter}.md"
|
|
counter += 1
|
|
|
|
# Create task metadata
|
|
now = datetime.now().strftime("%Y-%m-%d")
|
|
metadata = {
|
|
"id": task_id,
|
|
"title": title,
|
|
"status": status,
|
|
"priority": priority,
|
|
"created": now,
|
|
"updated": now,
|
|
"tags": tags if tags else []
|
|
}
|
|
|
|
# Write task file
|
|
_write_task_file(file_path, metadata, description)
|
|
|
|
return f"Task '{title}' created (ID: {task_id}) in {status} column."
|
|
|
|
|
|
class UpdateTaskStatusTool(BaseTool):
|
|
"""Tool for updating task status (moving between columns)."""
|
|
|
|
@property
|
|
def name(self) -> str:
|
|
return "update_task_status"
|
|
|
|
@property
|
|
def description(self) -> str:
|
|
return "Update task status (move between Kanban columns: backlog, todo, in-progress, review, done)."
|
|
|
|
def get_schema(self) -> Dict[str, Any]:
|
|
"""Get tool schema."""
|
|
return {
|
|
"name": self.name,
|
|
"description": self.description,
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"task_id": {
|
|
"type": "string",
|
|
"description": "Task ID (e.g., TASK-ABC123)"
|
|
},
|
|
"status": {
|
|
"type": "string",
|
|
"description": "New status",
|
|
"enum": ["backlog", "todo", "in-progress", "review", "done"]
|
|
}
|
|
},
|
|
"required": ["task_id", "status"]
|
|
}
|
|
}
|
|
|
|
def execute(self, arguments: Dict[str, Any]) -> str:
|
|
"""Execute update_task_status tool."""
|
|
_ensure_tasks_dir()
|
|
|
|
task_id = arguments.get("task_id", "").strip()
|
|
new_status = arguments.get("status", "").strip()
|
|
|
|
if not task_id:
|
|
raise ValueError("Missing required argument: task_id")
|
|
|
|
if new_status not in ["backlog", "todo", "in-progress", "review", "done"]:
|
|
raise ValueError(f"Invalid status: {new_status}")
|
|
|
|
# Find task file
|
|
task_file = None
|
|
for status_dir in ["backlog", "todo", "in-progress", "review", "done"]:
|
|
status_path = HOME_TASKS_DIR / status_dir
|
|
if not status_path.exists():
|
|
continue
|
|
for file_path in status_path.glob("*.md"):
|
|
try:
|
|
metadata = _read_task_file(file_path)
|
|
if metadata.get("id") == task_id:
|
|
task_file = (file_path, metadata)
|
|
break
|
|
except Exception:
|
|
continue
|
|
if task_file:
|
|
break
|
|
|
|
if not task_file:
|
|
raise ValueError(f"Task not found: {task_id}")
|
|
|
|
old_file_path, metadata = task_file
|
|
old_status = metadata.get("status")
|
|
|
|
if old_status == new_status:
|
|
return f"Task {task_id} is already in {new_status} status."
|
|
|
|
# Read body
|
|
body = metadata.get("body", "")
|
|
|
|
# Update metadata
|
|
metadata["status"] = new_status
|
|
metadata["updated"] = datetime.now().strftime("%Y-%m-%d")
|
|
|
|
# Create new file in new status directory
|
|
filename = old_file_path.name
|
|
new_file_path = HOME_TASKS_DIR / new_status / filename
|
|
|
|
# Write to new location
|
|
_write_task_file(new_file_path, metadata, body)
|
|
|
|
# Delete old file
|
|
old_file_path.unlink()
|
|
|
|
return f"Task {task_id} moved from {old_status} to {new_status}."
|
|
|
|
|
|
class ListTasksTool(BaseTool):
|
|
"""Tool for listing tasks."""
|
|
|
|
@property
|
|
def name(self) -> str:
|
|
return "list_tasks"
|
|
|
|
@property
|
|
def description(self) -> str:
|
|
return "List tasks from the home Kanban board, optionally filtered by status or priority."
|
|
|
|
def get_schema(self) -> Dict[str, Any]:
|
|
"""Get tool schema."""
|
|
return {
|
|
"name": self.name,
|
|
"description": self.description,
|
|
"inputSchema": {
|
|
"type": "object",
|
|
"properties": {
|
|
"status": {
|
|
"type": "string",
|
|
"description": "Filter by status",
|
|
"enum": ["backlog", "todo", "in-progress", "review", "done"]
|
|
},
|
|
"priority": {
|
|
"type": "string",
|
|
"description": "Filter by priority",
|
|
"enum": ["high", "medium", "low"]
|
|
},
|
|
"limit": {
|
|
"type": "integer",
|
|
"description": "Maximum number of tasks to return",
|
|
"default": 20
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
def execute(self, arguments: Dict[str, Any]) -> str:
|
|
"""Execute list_tasks tool."""
|
|
_ensure_tasks_dir()
|
|
|
|
status_filter = arguments.get("status")
|
|
priority_filter = arguments.get("priority")
|
|
limit = arguments.get("limit", 20)
|
|
|
|
tasks = []
|
|
|
|
# Search in all status directories
|
|
status_dirs = [status_filter] if status_filter else ["backlog", "todo", "in-progress", "review", "done"]
|
|
|
|
for status_dir in status_dirs:
|
|
status_path = HOME_TASKS_DIR / status_dir
|
|
if not status_path.exists():
|
|
continue
|
|
|
|
for file_path in status_path.glob("*.md"):
|
|
try:
|
|
metadata = _read_task_file(file_path)
|
|
# Apply filters
|
|
if priority_filter and metadata.get("priority") != priority_filter:
|
|
continue
|
|
tasks.append(metadata)
|
|
except Exception:
|
|
continue
|
|
|
|
if not tasks:
|
|
filter_str = ""
|
|
if status_filter:
|
|
filter_str += f" with status '{status_filter}'"
|
|
if priority_filter:
|
|
filter_str += f" and priority '{priority_filter}'"
|
|
return f"No tasks found{filter_str}."
|
|
|
|
# Sort by updated date (newest first)
|
|
tasks.sort(key=lambda t: t.get("updated", ""), reverse=True)
|
|
|
|
# Apply limit
|
|
tasks = tasks[:limit]
|
|
|
|
# Format output
|
|
result = f"Found {len(tasks)} task(s):\n\n"
|
|
for task in tasks:
|
|
task_id = task.get("id", "unknown")
|
|
title = task.get("title", "Untitled")
|
|
status = task.get("status", "unknown")
|
|
priority = task.get("priority", "medium")
|
|
updated = task.get("updated", "unknown")
|
|
|
|
result += f"{task_id}: {title}\n"
|
|
result += f" Status: {status}, Priority: {priority}, Updated: {updated}\n"
|
|
tags = task.get("tags", [])
|
|
if tags:
|
|
result += f" Tags: {', '.join(tags)}\n"
|
|
result += "\n"
|
|
|
|
return result.strip()
|