nanobot/download_llama3.2_local.sh
Tanya e6b5ead3fd Merge origin/main into feature branch
- Merged latest 166 commits from origin/main
- Resolved conflicts in .gitignore, commands.py, schema.py, providers/__init__.py, and registry.py
- Kept both local providers (Ollama, AirLLM) and new providers from main
- Preserved transformers 4.39.3 compatibility fixes
- Combined error handling improvements with new features
2026-02-18 13:03:19 -05:00

67 lines
2.0 KiB
Bash

#!/bin/bash
# Download llama3.2 in Hugging Face format to local directory (one-time token needed)
MODEL_NAME="meta-llama/Llama-3.2-3B-Instruct"
MODEL_DIR="$HOME/.local/models/llama3.2-3b-instruct"
echo "======================================================================"
echo "DOWNLOAD LLAMA3.2 FOR AIRLLM (ONE-TIME TOKEN NEEDED)"
echo "======================================================================"
echo ""
echo "This will download $MODEL_NAME to:"
echo " $MODEL_DIR"
echo ""
echo "After download, no tokens will be needed!"
echo ""
# Check if model already exists
if [ -d "$MODEL_DIR" ] && [ -f "$MODEL_DIR/config.json" ]; then
echo "✓ Model already exists at: $MODEL_DIR"
echo " You're all set! No download needed."
exit 0
fi
# Check if huggingface-cli is available
if ! command -v huggingface-cli &> /dev/null; then
echo "⚠ huggingface-cli not found. Installing..."
pip install -q huggingface_hub
fi
echo "You'll need a Hugging Face token (one-time only):"
echo " 1. Get token: https://huggingface.co/settings/tokens"
echo " 2. Accept license: https://huggingface.co/$MODEL_NAME"
echo ""
read -p "Enter your Hugging Face token (or press Enter to skip): " HF_TOKEN
if [ -z "$HF_TOKEN" ]; then
echo ""
echo "Skipping download. To download later, run:"
echo " huggingface-cli download $MODEL_NAME --local-dir $MODEL_DIR"
exit 0
fi
echo ""
echo "Downloading model (this may take a while)..."
mkdir -p "$MODEL_DIR"
huggingface-cli download "$MODEL_NAME" \
--local-dir "$MODEL_DIR" \
--token "$HF_TOKEN" \
--local-dir-use-symlinks False
if [ $? -eq 0 ]; then
echo ""
echo "✓ Model downloaded successfully!"
echo " Location: $MODEL_DIR"
echo ""
echo "🎉 No tokens needed anymore - using local model!"
echo ""
echo "Test it with:"
echo " nanobot agent -m 'Hello, what is 2+5?'"
else
echo ""
echo "⚠ Download failed. You can try again with:"
echo " huggingface-cli download $MODEL_NAME --local-dir $MODEL_DIR --token YOUR_TOKEN"
fi