summaryrefslogtreecommitdiff
path: root/Omni/Dev/Beryllium/OpenWebui.nix
diff options
context:
space:
mode:
authorBen Sima <ben@bsima.me>2025-02-04 21:17:29 -0500
committerBen Sima <ben@bsima.me>2025-02-04 21:17:29 -0500
commitd97ddbe2d844e5fae8bfa4fd4f1a047df81e36b5 (patch)
tree13611b5ee72ec08cfe3f4a58ec5d40e53be043f2 /Omni/Dev/Beryllium/OpenWebui.nix
parent9fd4b5da05b7ff5c248e3e3f96f13e7c98ec72f6 (diff)
Deploy open-webui
This is a very nice web ui frontend similar to ChatGPT that can use both OpenAI and Ollama as backends at the same time. Currently I'm just using it locally but eventually I think I'll expose it over the internet and use it as my go-to LLM interface.
Diffstat (limited to 'Omni/Dev/Beryllium/OpenWebui.nix')
-rw-r--r--Omni/Dev/Beryllium/OpenWebui.nix14
1 files changed, 14 insertions, 0 deletions
diff --git a/Omni/Dev/Beryllium/OpenWebui.nix b/Omni/Dev/Beryllium/OpenWebui.nix
new file mode 100644
index 0000000..7b95331
--- /dev/null
+++ b/Omni/Dev/Beryllium/OpenWebui.nix
@@ -0,0 +1,14 @@
+{config, ...}: let
+ ports = import ../../Cloud/Ports.nix;
+in {
+ config.virtualisation.oci-containers.backend = "docker";
+ config.virtualisation.oci-containers.containers.open-webui = {
+ image = "ghcr.io/open-webui/open-webui:main";
+ volumes = ["/var/lib/open-webui:/app/backend/data"];
+ environment = {
+ OLLAMA_BASE_URL = "http://127.0.0.1:${toString ports.ollama}";
+ PORT = ports.open-webui;
+ };
+ extraOptions = ["--network=host"];
+ };
+}