#!/bin/bash # Check if Ollama models still exist on GPU VM # Run this ON THE GPU VM echo "=== Checking Ollama Models ===" echo "" # Check Ollama's model storage locations echo "1. Checking Ollama API for models:" curl -s http://localhost:11434/api/tags | python3 -m json.tool 2>/dev/null | grep -E '"name"|"model"' | head -10 echo "" echo "2. Checking common model storage locations:" echo " ~/.ollama/models:" if [ -d ~/.ollama/models ]; then du -sh ~/.ollama/models ls -lh ~/.ollama/models | head -5 else echo " ✗ Not found" fi echo "" echo " /usr/share/ollama/models:" if [ -d /usr/share/ollama/models ]; then du -sh /usr/share/ollama/models ls -lh /usr/share/ollama/models | head -5 else echo " ✗ Not found" fi echo "" echo " /var/lib/ollama/models:" if [ -d /var/lib/ollama/models ]; then du -sh /var/lib/ollama/models ls -lh /var/lib/ollama/models | head -5 else echo " ✗ Not found" fi echo "" echo "3. Finding Ollama data directory:" if command -v ollama > /dev/null; then ollama show 2>&1 | head -5 fi echo "" echo "4. Checking systemd service for OLLAMA_MODELS path:" systemctl show ollama | grep -i model || echo " No OLLAMA_MODELS env var set" echo "" echo "=== What we did ===" echo "We only created: /etc/systemd/system/ollama.service.d/override.conf" echo "This file only sets: OLLAMA_HOST=0.0.0.0:11434" echo "It does NOT delete models." echo "" echo "If models are missing, they might be:" echo " 1. In a different location (check above)" echo " 2. Ollama needs to be restarted to see them" echo " 3. Models were deleted separately (not by our script)"