Compare commits

..

No commits in common. "32cd74e49f68868308e8549eeee3f14e4ea3749a" and "77b06ee3f9b1e22f7aae98400a34c8485e748759" have entirely different histories.

3 changed files with 1 additions and 37 deletions

View File

@ -13,13 +13,6 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
> [!NOTE] > [!NOTE]
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment. All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
## 2024-10-27
### Changed
- **Open WebUI LXC** [(Commit)](https://github.com/tteck/Proxmox/commit/8a21f6e7f025a911865395d4c0fa9a001bd0d512)
- Refactor Script to add an option to add Ollama.
## 2024-10-26 ## 2024-10-26
### Changed ### Changed

View File

@ -67,7 +67,6 @@ then
fi fi
systemctl stop open-webui.service systemctl stop open-webui.service
npm install &>/dev/null npm install &>/dev/null
export NODE_OPTIONS="--max-old-space-size=3584"
npm run build &>/dev/null npm run build &>/dev/null
cd ./backend cd ./backend
pip install -r requirements.txt -U &>/dev/null pip install -r requirements.txt -U &>/dev/null

View File

@ -49,40 +49,12 @@ cp .env.example .env
cat <<EOF >/opt/open-webui/.env cat <<EOF >/opt/open-webui/.env
ENV=prod ENV=prod
ENABLE_OLLAMA_API=false ENABLE_OLLAMA_API=false
OLLAMA_BASE_URL=http://0.0.0.0:11434
EOF EOF
$STD npm install $STD npm install
export NODE_OPTIONS="--max-old-space-size=3584" export NODE_OPTIONS="--max-old-space-size=4096"
$STD npm run build $STD npm run build
msg_ok "Installed Open WebUI" msg_ok "Installed Open WebUI"
read -r -p "Would you like to add Ollama? <y/N> " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Ollama"
curl -fsSLO https://ollama.com/download/ollama-linux-amd64.tgz
tar -C /usr -xzf ollama-linux-amd64.tgz
rm -rf ollama-linux-amd64.tgz
cat <<EOF >/etc/systemd/system/ollama.service
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/ollama serve
Environment=HOME=$HOME
Environment=OLLAMA_HOST=0.0.0.0
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now ollama.service
sed -i 's/ENABLE_OLLAMA_API=false/ENABLE_OLLAMA_API=true/g' /opt/open-webui/.env
msg_ok "Installed Ollama"
fi
msg_info "Creating Service" msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/open-webui.service cat <<EOF >/etc/systemd/system/open-webui.service
[Unit] [Unit]