nanobot/setup.sh

398 lines
10 KiB
Bash
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/bin/bash
# Nanobot Setup Script
# Automates installation and configuration of nanobot with Ollama/AirLLM
set -e # Exit on error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
VENV_DIR="venv"
CONFIG_DIR="$HOME/.nanobot"
CONFIG_FILE="$CONFIG_DIR/config.json"
MODEL_DIR="$HOME/.local/models/llama3.2-3b-instruct"
MODEL_NAME="meta-llama/Llama-3.2-3B-Instruct"
# Functions
print_header() {
echo -e "\n${BLUE}========================================${NC}"
echo -e "${BLUE}$1${NC}"
echo -e "${BLUE}========================================${NC}\n"
}
print_success() {
echo -e "${GREEN}$1${NC}"
}
print_warning() {
echo -e "${YELLOW}$1${NC}"
}
print_error() {
echo -e "${RED}$1${NC}"
}
print_info() {
echo -e "${BLUE} $1${NC}"
}
# Check if command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Check prerequisites
check_prerequisites() {
print_header "Checking Prerequisites"
local missing=0
if ! command_exists python3; then
print_error "Python 3 is not installed"
missing=1
else
PYTHON_VERSION=$(python3 --version 2>&1 | awk '{print $2}')
print_success "Python $PYTHON_VERSION found"
# Check Python version (need 3.10+)
PYTHON_MAJOR=$(echo $PYTHON_VERSION | cut -d. -f1)
PYTHON_MINOR=$(echo $PYTHON_VERSION | cut -d. -f2)
if [ "$PYTHON_MAJOR" -lt 3 ] || ([ "$PYTHON_MAJOR" -eq 3 ] && [ "$PYTHON_MINOR" -lt 10 ]); then
print_error "Python 3.10+ required, found $PYTHON_VERSION"
missing=1
fi
fi
if ! command_exists git; then
print_warning "Git is not installed (optional, but recommended)"
else
print_success "Git found"
fi
if ! command_exists pip3 && ! python3 -m pip --version >/dev/null 2>&1; then
print_error "pip is not installed"
missing=1
else
print_success "pip found"
fi
if [ $missing -eq 1 ]; then
print_error "Missing required prerequisites. Please install them first."
exit 1
fi
print_success "All prerequisites met"
}
# Create virtual environment
setup_venv() {
print_header "Setting Up Virtual Environment"
if [ -d "$VENV_DIR" ]; then
print_warning "Virtual environment already exists at $VENV_DIR"
read -p "Recreate it? (y/n): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
rm -rf "$VENV_DIR"
print_info "Removed existing virtual environment"
else
print_info "Using existing virtual environment"
return
fi
fi
print_info "Creating virtual environment..."
python3 -m venv "$VENV_DIR"
print_success "Virtual environment created"
print_info "Activating virtual environment..."
source "$VENV_DIR/bin/activate"
print_success "Virtual environment activated"
print_info "Upgrading pip..."
pip install --upgrade pip --quiet
print_success "pip upgraded"
}
# Install dependencies
install_dependencies() {
print_header "Installing Dependencies"
if [ -z "$VIRTUAL_ENV" ]; then
source "$VENV_DIR/bin/activate"
fi
print_info "Installing nanobot and dependencies..."
pip install -e . --quiet
print_success "Nanobot installed"
# Check if AirLLM should be installed
read -p "Do you want to use AirLLM? (y/n): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
print_info "Installing AirLLM..."
pip install airllm bitsandbytes --quiet || {
print_warning "AirLLM installation had issues, but continuing..."
print_info "You can install it later with: pip install airllm bitsandbytes"
}
print_success "AirLLM installed (or attempted)"
USE_AIRLLM=true
else
USE_AIRLLM=false
fi
}
# Check for Ollama
check_ollama() {
if command_exists ollama; then
print_success "Ollama is installed"
if ollama list >/dev/null 2>&1; then
print_success "Ollama is running"
return 0
else
print_warning "Ollama is installed but not running"
return 1
fi
else
print_warning "Ollama is not installed"
return 1
fi
}
# Setup Ollama configuration
setup_ollama() {
print_header "Setting Up Ollama"
if ! check_ollama; then
print_info "Ollama is not installed or not running"
read -p "Do you want to install Ollama? (y/n): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
print_info "Installing Ollama..."
curl -fsSL https://ollama.ai/install.sh | sh || {
print_error "Failed to install Ollama automatically"
print_info "Please install manually from: https://ollama.ai"
return 1
}
print_success "Ollama installed"
else
return 1
fi
fi
# Check if llama3.2 is available
if ollama list | grep -q "llama3.2"; then
print_success "llama3.2 model found"
else
print_info "Downloading llama3.2 model (this may take a while)..."
ollama pull llama3.2:latest || {
print_error "Failed to pull llama3.2 model"
return 1
}
print_success "llama3.2 model downloaded"
fi
# Create config
mkdir -p "$CONFIG_DIR"
cat > "$CONFIG_FILE" << EOF
{
"providers": {
"ollama": {
"apiKey": "dummy",
"apiBase": "http://localhost:11434/v1"
}
},
"agents": {
"defaults": {
"model": "llama3.2:latest"
}
}
}
EOF
chmod 600 "$CONFIG_FILE"
print_success "Ollama configuration created at $CONFIG_FILE"
return 0
}
# Setup AirLLM configuration
setup_airllm() {
print_header "Setting Up AirLLM"
# Check if model already exists
if [ -d "$MODEL_DIR" ] && [ -f "$MODEL_DIR/config.json" ]; then
print_success "Model already exists at $MODEL_DIR"
else
print_info "Model needs to be downloaded"
print_info "You'll need a Hugging Face token to download gated models"
echo
print_info "Steps:"
echo " 1. Get token: https://huggingface.co/settings/tokens"
echo " 2. Accept license: https://huggingface.co/$MODEL_NAME"
echo
read -p "Do you have a Hugging Face token? (y/n): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_warning "Skipping model download. You can download it later."
print_info "To download later, run:"
echo " huggingface-cli download $MODEL_NAME --local-dir $MODEL_DIR --token YOUR_TOKEN"
return 1
fi
read -p "Enter your Hugging Face token: " -s HF_TOKEN
echo
if [ -z "$HF_TOKEN" ]; then
print_error "Token is required"
return 1
fi
# Install huggingface_hub if needed
if [ -z "$VIRTUAL_ENV" ]; then
source "$VENV_DIR/bin/activate"
fi
pip install huggingface_hub --quiet
print_info "Downloading model (this may take a while, ~2GB)..."
mkdir -p "$MODEL_DIR"
huggingface-cli download "$MODEL_NAME" \
--local-dir "$MODEL_DIR" \
--token "$HF_TOKEN" \
--local-dir-use-symlinks False || {
print_error "Failed to download model"
print_info "Make sure you've accepted the license at: https://huggingface.co/$MODEL_NAME"
return 1
}
print_success "Model downloaded to $MODEL_DIR"
fi
# Create config
mkdir -p "$CONFIG_DIR"
cat > "$CONFIG_FILE" << EOF
{
"providers": {
"airllm": {
"apiKey": "$MODEL_DIR",
"apiBase": null,
"extraHeaders": {}
}
},
"agents": {
"defaults": {
"model": "$MODEL_DIR"
}
}
}
EOF
chmod 600 "$CONFIG_FILE"
print_success "AirLLM configuration created at $CONFIG_FILE"
return 0
}
# Test installation
test_installation() {
print_header "Testing Installation"
if [ -z "$VIRTUAL_ENV" ]; then
source "$VENV_DIR/bin/activate"
fi
print_info "Testing nanobot installation..."
if nanobot --help >/dev/null 2>&1; then
print_success "Nanobot is installed and working"
else
print_error "Nanobot test failed"
return 1
fi
print_info "Testing with a simple query..."
if nanobot agent -m "Hello, what is 2+5?" >/dev/null 2>&1; then
print_success "Test query successful!"
else
print_warning "Test query had issues (this might be normal if model is still loading)"
print_info "Try running manually: nanobot agent -m 'Hello'"
fi
}
# Main setup flow
main() {
print_header "Nanobot Setup Script"
print_info "This script will set up nanobot with Ollama or AirLLM"
echo
# Check prerequisites
check_prerequisites
# Setup virtual environment
setup_venv
# Install dependencies
install_dependencies
# Choose provider
echo
print_header "Choose Provider"
echo "1. Ollama (easiest, no tokens needed)"
echo "2. AirLLM (direct local inference, no HTTP server)"
echo "3. Both (configure both, use either)"
echo
read -p "Choose option (1-3): " -n 1 -r
echo
PROVIDER_SETUP=false
case $REPLY in
1)
if setup_ollama; then
PROVIDER_SETUP=true
fi
;;
2)
if setup_airllm; then
PROVIDER_SETUP=true
fi
;;
3)
if setup_ollama || setup_airllm; then
PROVIDER_SETUP=true
fi
;;
*)
print_warning "Invalid choice, skipping provider setup"
;;
esac
if [ "$PROVIDER_SETUP" = false ]; then
print_warning "Provider setup incomplete. You can configure manually later."
print_info "Config file location: $CONFIG_FILE"
fi
# Test installation
test_installation
# Final instructions
echo
print_header "Setup Complete!"
echo
print_success "Nanobot is ready to use!"
echo
print_info "To activate the virtual environment:"
echo " source $VENV_DIR/bin/activate"
echo
print_info "To use nanobot:"
echo " nanobot agent -m 'Your message here'"
echo
print_info "Configuration file: $CONFIG_FILE"
echo
print_info "For more information, see SETUP.md"
echo
}
# Run main function
main