diff options
Diffstat (limited to 'Omni/Dev')
-rwxr-xr-x | Omni/Dev/Beryllium.nix | 1 | ||||
-rw-r--r-- | Omni/Dev/Beryllium/Configuration.nix | 1 | ||||
-rw-r--r-- | Omni/Dev/Beryllium/Ollama.nix | 3 | ||||
-rw-r--r-- | Omni/Dev/Beryllium/OpenWebui.nix | 14 |
4 files changed, 18 insertions, 1 deletions
diff --git a/Omni/Dev/Beryllium.nix b/Omni/Dev/Beryllium.nix index 0327822..9a72353 100755 --- a/Omni/Dev/Beryllium.nix +++ b/Omni/Dev/Beryllium.nix @@ -10,6 +10,7 @@ bild.os { ./Beryllium/Ollama.nix ./Docker.nix ./Vpn.nix + ./Beryllium/OpenWebui.nix ]; networking.hostName = "beryllium"; networking.domain = "beryl.simatime.com"; diff --git a/Omni/Dev/Beryllium/Configuration.nix b/Omni/Dev/Beryllium/Configuration.nix index a371649..4a792ef 100644 --- a/Omni/Dev/Beryllium/Configuration.nix +++ b/Omni/Dev/Beryllium/Configuration.nix @@ -83,6 +83,7 @@ in { hardware.nvidia.powerManagement.finegrained = false; hardware.nvidia.open = true; hardware.nvidia.nvidiaSettings = true; + hardware.nvidia-container-toolkit.enable = true; hardware.keyboard.zsa.enable = true; diff --git a/Omni/Dev/Beryllium/Ollama.nix b/Omni/Dev/Beryllium/Ollama.nix index 0018f49..3f2398e 100644 --- a/Omni/Dev/Beryllium/Ollama.nix +++ b/Omni/Dev/Beryllium/Ollama.nix @@ -10,6 +10,7 @@ If you want to spend time on it, spend time over there. */ let pkg = pkgs.unstable.ollama; + ports = import ../../Cloud/Ports.nix; in { systemd.services.ollama = { description = "ollama"; @@ -17,7 +18,7 @@ in { wantedBy = ["multi-user.target"]; environment = { - OLLAMA_HOST = "localhost:11434"; + OLLAMA_HOST = "0.0.0.0:${toString ports.ollama}"; # Where to store LLM model files. HOME = "%S/ollama"; OLLAMA_MODELS = "%S/ollama/models"; diff --git a/Omni/Dev/Beryllium/OpenWebui.nix b/Omni/Dev/Beryllium/OpenWebui.nix new file mode 100644 index 0000000..7b95331 --- /dev/null +++ b/Omni/Dev/Beryllium/OpenWebui.nix @@ -0,0 +1,14 @@ +{config, ...}: let + ports = import ../../Cloud/Ports.nix; +in { + config.virtualisation.oci-containers.backend = "docker"; + config.virtualisation.oci-containers.containers.open-webui = { + image = "ghcr.io/open-webui/open-webui:main"; + volumes = ["/var/lib/open-webui:/app/backend/data"]; + environment = { + OLLAMA_BASE_URL = "http://127.0.0.1:${toString ports.ollama}"; + PORT = ports.open-webui; + }; + extraOptions = ["--network=host"]; + }; +} |