mirror of https://github.com/tteck/Proxmox.git
Compare commits
4 Commits
77b06ee3f9
...
32cd74e49f
Author | SHA1 | Date |
---|---|---|
tteckster | 32cd74e49f | |
Håvard Gjøby Thom | 28896707bc | |
Håvard Gjøby Thom | 0340aec142 | |
Håvard Gjøby Thom | 8a21f6e7f0 |
|
@ -13,6 +13,13 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
|
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
|
||||||
|
|
||||||
|
## 2024-10-27
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **Open WebUI LXC** [(Commit)](https://github.com/tteck/Proxmox/commit/8a21f6e7f025a911865395d4c0fa9a001bd0d512)
|
||||||
|
- Refactor Script to add an option to add Ollama.
|
||||||
|
|
||||||
## 2024-10-26
|
## 2024-10-26
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
|
@ -67,6 +67,7 @@ then
|
||||||
fi
|
fi
|
||||||
systemctl stop open-webui.service
|
systemctl stop open-webui.service
|
||||||
npm install &>/dev/null
|
npm install &>/dev/null
|
||||||
|
export NODE_OPTIONS="--max-old-space-size=3584"
|
||||||
npm run build &>/dev/null
|
npm run build &>/dev/null
|
||||||
cd ./backend
|
cd ./backend
|
||||||
pip install -r requirements.txt -U &>/dev/null
|
pip install -r requirements.txt -U &>/dev/null
|
||||||
|
|
|
@ -49,12 +49,40 @@ cp .env.example .env
|
||||||
cat <<EOF >/opt/open-webui/.env
|
cat <<EOF >/opt/open-webui/.env
|
||||||
ENV=prod
|
ENV=prod
|
||||||
ENABLE_OLLAMA_API=false
|
ENABLE_OLLAMA_API=false
|
||||||
|
OLLAMA_BASE_URL=http://0.0.0.0:11434
|
||||||
EOF
|
EOF
|
||||||
$STD npm install
|
$STD npm install
|
||||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
export NODE_OPTIONS="--max-old-space-size=3584"
|
||||||
$STD npm run build
|
$STD npm run build
|
||||||
msg_ok "Installed Open WebUI"
|
msg_ok "Installed Open WebUI"
|
||||||
|
|
||||||
|
read -r -p "Would you like to add Ollama? <y/N> " prompt
|
||||||
|
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||||
|
msg_info "Installing Ollama"
|
||||||
|
curl -fsSLO https://ollama.com/download/ollama-linux-amd64.tgz
|
||||||
|
tar -C /usr -xzf ollama-linux-amd64.tgz
|
||||||
|
rm -rf ollama-linux-amd64.tgz
|
||||||
|
cat <<EOF >/etc/systemd/system/ollama.service
|
||||||
|
[Unit]
|
||||||
|
Description=Ollama Service
|
||||||
|
After=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=exec
|
||||||
|
ExecStart=/usr/bin/ollama serve
|
||||||
|
Environment=HOME=$HOME
|
||||||
|
Environment=OLLAMA_HOST=0.0.0.0
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now ollama.service
|
||||||
|
sed -i 's/ENABLE_OLLAMA_API=false/ENABLE_OLLAMA_API=true/g' /opt/open-webui/.env
|
||||||
|
msg_ok "Installed Ollama"
|
||||||
|
fi
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/open-webui.service
|
cat <<EOF >/etc/systemd/system/open-webui.service
|
||||||
[Unit]
|
[Unit]
|
||||||
|
|
Loading…
Reference in New Issue