Compare commits

..

4 Commits

Author SHA1 Message Date
tteckster 32cd74e49f
Update CHANGELOG.md 2024-10-27 19:05:32 -04:00
Håvard Gjøby Thom 28896707bc
Add max-old-space-size in openwebui.sh (#3993) 2024-10-27 18:36:01 -04:00
Håvard Gjøby Thom 0340aec142
Adjust max-old-space-size in openwebui-install.sh (#3992) 2024-10-27 18:24:22 -04:00
Håvard Gjøby Thom 8a21f6e7f0
Add option to install Ollama in Open WebUI LXC (#3991) 2024-10-27 18:14:57 -04:00
3 changed files with 37 additions and 1 deletions

View File

@ -13,6 +13,13 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
> [!NOTE] > [!NOTE]
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment. All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
## 2024-10-27
### Changed
- **Open WebUI LXC** [(Commit)](https://github.com/tteck/Proxmox/commit/8a21f6e7f025a911865395d4c0fa9a001bd0d512)
- Refactor Script to add an option to add Ollama.
## 2024-10-26 ## 2024-10-26
### Changed ### Changed

View File

@ -67,6 +67,7 @@ then
fi fi
systemctl stop open-webui.service systemctl stop open-webui.service
npm install &>/dev/null npm install &>/dev/null
export NODE_OPTIONS="--max-old-space-size=3584"
npm run build &>/dev/null npm run build &>/dev/null
cd ./backend cd ./backend
pip install -r requirements.txt -U &>/dev/null pip install -r requirements.txt -U &>/dev/null

View File

@ -49,12 +49,40 @@ cp .env.example .env
cat <<EOF >/opt/open-webui/.env cat <<EOF >/opt/open-webui/.env
ENV=prod ENV=prod
ENABLE_OLLAMA_API=false ENABLE_OLLAMA_API=false
OLLAMA_BASE_URL=http://0.0.0.0:11434
EOF EOF
$STD npm install $STD npm install
export NODE_OPTIONS="--max-old-space-size=4096" export NODE_OPTIONS="--max-old-space-size=3584"
$STD npm run build $STD npm run build
msg_ok "Installed Open WebUI" msg_ok "Installed Open WebUI"
read -r -p "Would you like to add Ollama? <y/N> " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Ollama"
curl -fsSLO https://ollama.com/download/ollama-linux-amd64.tgz
tar -C /usr -xzf ollama-linux-amd64.tgz
rm -rf ollama-linux-amd64.tgz
cat <<EOF >/etc/systemd/system/ollama.service
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/ollama serve
Environment=HOME=$HOME
Environment=OLLAMA_HOST=0.0.0.0
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now ollama.service
sed -i 's/ENABLE_OLLAMA_API=false/ENABLE_OLLAMA_API=true/g' /opt/open-webui/.env
msg_ok "Installed Ollama"
fi
msg_info "Creating Service" msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/open-webui.service cat <<EOF >/etc/systemd/system/open-webui.service
[Unit] [Unit]