File size: 1,126 Bytes
62e7314
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
588ad6b
 
 
62e7314
 
e9cac30
6340c61
62e7314
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#!/bin/bash

# Exit on any error
set -e

echo "Starting Ollama and FastAPI application..."

# Create necessary directories
mkdir -p /tmp/ollama/models
export OLLAMA_MODELS=/tmp/ollama/models
export OLLAMA_HOME=/tmp/ollama

# Start Ollama server in the background
echo "Starting Ollama server..."
ollama serve &
OLLAMA_PID=$!

# Wait for Ollama to be ready
echo "Waiting for Ollama server to start..."
for i in {1..30}; do
    if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
        echo "Ollama server is ready!"
        break
    fi
    if [ $i -eq 30 ]; then
        echo "Timeout waiting for Ollama server to start"
        exit 1
    fi
    sleep 2
done

ollama list


# Optional: Pull a model (uncomment and modify as needed)
# echo "Pulling llama2 model..."
# ollama pull tinyllama:1.1b
ollama pull tinyllama:1.1b

# Start FastAPI application
echo "Starting FastAPI application..."
python app.py

# Keep the script running and handle cleanup
cleanup() {
    echo "Shutting down..."
    kill $OLLAMA_PID 2>/dev/null || true
    exit 0
}

trap cleanup SIGTERM SIGINT

# Wait for background processes
wait