faq / startup.sh
brendon-ai's picture
Update startup.sh
588ad6b verified
#!/bin/bash
# Exit on any error
set -e
echo "Starting Ollama and FastAPI application..."
# Create necessary directories
mkdir -p /tmp/ollama/models
export OLLAMA_MODELS=/tmp/ollama/models
export OLLAMA_HOME=/tmp/ollama
# Start Ollama server in the background
echo "Starting Ollama server..."
ollama serve &
OLLAMA_PID=$!
# Wait for Ollama to be ready
echo "Waiting for Ollama server to start..."
for i in {1..30}; do
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
echo "Ollama server is ready!"
break
fi
if [ $i -eq 30 ]; then
echo "Timeout waiting for Ollama server to start"
exit 1
fi
sleep 2
done
ollama list
# Optional: Pull a model (uncomment and modify as needed)
# echo "Pulling llama2 model..."
# ollama pull tinyllama:1.1b
ollama pull tinyllama:1.1b
# Start FastAPI application
echo "Starting FastAPI application..."
python app.py
# Keep the script running and handle cleanup
cleanup() {
echo "Shutting down..."
kill $OLLAMA_PID 2>/dev/null || true
exit 0
}
trap cleanup SIGTERM SIGINT
# Wait for background processes
wait