nanobot/test_airllm.py
Tanya e6b5ead3fd Merge origin/main into feature branch
- Merged latest 166 commits from origin/main
- Resolved conflicts in .gitignore, commands.py, schema.py, providers/__init__.py, and registry.py
- Kept both local providers (Ollama, AirLLM) and new providers from main
- Preserved transformers 4.39.3 compatibility fixes
- Combined error handling improvements with new features
2026-02-18 13:03:19 -05:00

27 lines
848 B
Python

#!/usr/bin/env python3
"""Test script for AirLLM with Nanobot"""
import sys
import traceback
print("Starting test...", file=sys.stderr)
print("Starting test...", file=sys.stdout)
try:
from nanobot.providers.airllm_wrapper import create_ollama_client
print("✓ Imported create_ollama_client", file=sys.stderr)
print("Creating client with model path...", file=sys.stderr)
client = create_ollama_client('/home/ladmin/.local/models/llama3.2-3b-instruct')
print("✓ Client created", file=sys.stderr)
print("Testing generate...", file=sys.stderr)
result = client.generate('Hello, what is 2+5?', max_tokens=20)
print(f"✓ Result: {result}", file=sys.stderr)
print(result)
except Exception as e:
print(f"✗ ERROR: {e}", file=sys.stderr)
traceback.print_exc(file=sys.stderr)
sys.exit(1)