From d97ddbe2d844e5fae8bfa4fd4f1a047df81e36b5 Mon Sep 17 00:00:00 2001 From: Ben Sima Date: Tue, 4 Feb 2025 21:17:29 -0500 Subject: Deploy open-webui This is a very nice web ui frontend similar to ChatGPT that can use both OpenAI and Ollama as backends at the same time. Currently I'm just using it locally but eventually I think I'll expose it over the internet and use it as my go-to LLM interface. --- Omni/Dev/Beryllium/OpenWebui.nix | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 Omni/Dev/Beryllium/OpenWebui.nix (limited to 'Omni/Dev/Beryllium/OpenWebui.nix') diff --git a/Omni/Dev/Beryllium/OpenWebui.nix b/Omni/Dev/Beryllium/OpenWebui.nix new file mode 100644 index 0000000..7b95331 --- /dev/null +++ b/Omni/Dev/Beryllium/OpenWebui.nix @@ -0,0 +1,14 @@ +{config, ...}: let + ports = import ../../Cloud/Ports.nix; +in { + config.virtualisation.oci-containers.backend = "docker"; + config.virtualisation.oci-containers.containers.open-webui = { + image = "ghcr.io/open-webui/open-webui:main"; + volumes = ["/var/lib/open-webui:/app/backend/data"]; + environment = { + OLLAMA_BASE_URL = "http://127.0.0.1:${toString ports.ollama}"; + PORT = ports.open-webui; + }; + extraOptions = ["--network=host"]; + }; +} -- cgit v1.2.3