Install Ollama & Open WebUI with Persistence Ubuntu 24.04 Virtual Machine on Proxmox

#!/bin/bash

# Log everything for debugging

exec > /var/log/setup_open_webui_ollama.log 2>&1

echo "Starting the automated setup script..."

# Update and upgrade the system

echo "Updating and upgrading the system..."

apt-get update && apt-get upgrade -y

# Install basic dependencies

echo "Installing required dependencies..."

apt-get install -y net-tools curl wget git unzip build-essential python3 python3-pip python3-venv

# Install Node.js (required for Ollama)

echo "Installing Node.js..."

curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -

sudo apt install -y nodejs

# Install Ollama

echo "Installing Ollama..."

curl -fsSL https://ollama.com/install.sh | sh

# Pull the llama3.2 model

echo "Downloading the llama3.2 model..."

ollama pull llama3.2

# Set up a virtual environment for Open WebUI

echo "Setting up a virtual environment for Open WebUI..."

python3 -m venv /opt/open-webui/venv

source /opt/open-webui/venv/bin/activate

# Install Open WebUI

echo "Installing Open WebUI..."

pip install open-webui

# Install PyTorch for CPU (adjustable for GPU if needed)

echo "Installing PyTorch for CPU..."

pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu

# Install Flask, Gradio, and Transformers

echo "Installing Flask, Gradio, and Transformers..."

pip install flask gradio transformers

deactivate

# Create a systemd service for Open WebUI

cat <<EOL | sudo tee /etc/systemd/system/open-webui.service

[Unit]

Description=Open WebUI Server

After=network.target

[Service]

Type=simple

User=root

WorkingDirectory=/opt/open-webui

ExecStart=/opt/open-webui/venv/bin/open-webui serve

Restart=always

[Install]

WantedBy=multi-user.target

EOL

# Reload systemd and enable the Open WebUI service

echo "Enabling Open WebUI systemd service..."

systemctl daemon-reload

systemctl enable open-webui.service

systemctl start open-webui.service

# Create a systemd service for Ollama

cat <<EOL | sudo tee /etc/systemd/system/ollama.service

[Unit]

Description=Ollama Service

After=network.target

[Service]

Type=simple

User=root

ExecStart=/usr/local/bin/ollama serve

Restart=always

[Install]

WantedBy=multi-user.target

EOL

# Reload systemd and enable the Ollama service

echo "Enabling Ollama systemd service..."

systemctl daemon-reload

systemctl enable ollama.service

systemctl start ollama.service

echo "Setup completed! Both Open WebUI and Ollama are now persistent after reboot."