summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
authorBen Sima <ben@bsima.me>2025-02-04 21:17:29 -0500
committerBen Sima <ben@bsima.me>2025-02-04 21:17:29 -0500
commitd97ddbe2d844e5fae8bfa4fd4f1a047df81e36b5 (patch)
tree13611b5ee72ec08cfe3f4a58ec5d40e53be043f2 /Omni
parent9fd4b5da05b7ff5c248e3e3f96f13e7c98ec72f6 (diff)
Deploy open-webui
This is a very nice web ui frontend similar to ChatGPT that can use both OpenAI and Ollama as backends at the same time. Currently I'm just using it locally but eventually I think I'll expose it over the internet and use it as my go-to LLM interface.
Diffstat (limited to 'Omni')
-rw-r--r--Omni/Cloud/Ports.nix2
-rwxr-xr-xOmni/Dev/Beryllium.nix1
-rw-r--r--Omni/Dev/Beryllium/Configuration.nix1
-rw-r--r--Omni/Dev/Beryllium/Ollama.nix3
-rw-r--r--Omni/Dev/Beryllium/OpenWebui.nix14
5 files changed, 20 insertions, 1 deletions
diff --git a/Omni/Cloud/Ports.nix b/Omni/Cloud/Ports.nix
index 5b8446c..9d5bc4c 100644
--- a/Omni/Cloud/Ports.nix
+++ b/Omni/Cloud/Ports.nix
@@ -31,6 +31,8 @@
mpd-stream = 8097;
murmur = 64738;
nostr-relay = 8084;
+ ollama = 11434;
+ open-webui = 8088;
radicale = 5232;
sabten = 8081;
ssh = 22;
diff --git a/Omni/Dev/Beryllium.nix b/Omni/Dev/Beryllium.nix
index 0327822..9a72353 100755
--- a/Omni/Dev/Beryllium.nix
+++ b/Omni/Dev/Beryllium.nix
@@ -10,6 +10,7 @@ bild.os {
./Beryllium/Ollama.nix
./Docker.nix
./Vpn.nix
+ ./Beryllium/OpenWebui.nix
];
networking.hostName = "beryllium";
networking.domain = "beryl.simatime.com";
diff --git a/Omni/Dev/Beryllium/Configuration.nix b/Omni/Dev/Beryllium/Configuration.nix
index a371649..4a792ef 100644
--- a/Omni/Dev/Beryllium/Configuration.nix
+++ b/Omni/Dev/Beryllium/Configuration.nix
@@ -83,6 +83,7 @@ in {
hardware.nvidia.powerManagement.finegrained = false;
hardware.nvidia.open = true;
hardware.nvidia.nvidiaSettings = true;
+ hardware.nvidia-container-toolkit.enable = true;
hardware.keyboard.zsa.enable = true;
diff --git a/Omni/Dev/Beryllium/Ollama.nix b/Omni/Dev/Beryllium/Ollama.nix
index 0018f49..3f2398e 100644
--- a/Omni/Dev/Beryllium/Ollama.nix
+++ b/Omni/Dev/Beryllium/Ollama.nix
@@ -10,6 +10,7 @@ If you want to spend time on it, spend time over there.
*/
let
pkg = pkgs.unstable.ollama;
+ ports = import ../../Cloud/Ports.nix;
in {
systemd.services.ollama = {
description = "ollama";
@@ -17,7 +18,7 @@ in {
wantedBy = ["multi-user.target"];
environment = {
- OLLAMA_HOST = "localhost:11434";
+ OLLAMA_HOST = "0.0.0.0:${toString ports.ollama}";
# Where to store LLM model files.
HOME = "%S/ollama";
OLLAMA_MODELS = "%S/ollama/models";
diff --git a/Omni/Dev/Beryllium/OpenWebui.nix b/Omni/Dev/Beryllium/OpenWebui.nix
new file mode 100644
index 0000000..7b95331
--- /dev/null
+++ b/Omni/Dev/Beryllium/OpenWebui.nix
@@ -0,0 +1,14 @@
+{config, ...}: let
+ ports = import ../../Cloud/Ports.nix;
+in {
+ config.virtualisation.oci-containers.backend = "docker";
+ config.virtualisation.oci-containers.containers.open-webui = {
+ image = "ghcr.io/open-webui/open-webui:main";
+ volumes = ["/var/lib/open-webui:/app/backend/data"];
+ environment = {
+ OLLAMA_BASE_URL = "http://127.0.0.1:${toString ports.ollama}";
+ PORT = ports.open-webui;
+ };
+ extraOptions = ["--network=host"];
+ };
+}