add bambu!
This commit is contained in:
32
services/ollama.nix
Normal file
32
services/ollama.nix
Normal file
@@ -0,0 +1,32 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
systemd.services.my-ollama = {
|
||||
description = "Custom Ollama-like Service";
|
||||
|
||||
# Ensure the service starts after the network is up
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
# Environment variables
|
||||
environment = {
|
||||
OLLAMA_KV_CACHE_TYPE = "q8_0";
|
||||
OLLAMA_NUM_GPU = "1";
|
||||
GGML_CUDA_ENABLE_UNIFIED_MEMORY = "1";
|
||||
OLLAMA_KEEP_ALIVE = "-1";
|
||||
OLLAMA_MODELS = "/mnt/ssd1/ollama";
|
||||
OLLAMA_NUM_PARALLEL = "4";
|
||||
# Adding CUDA paths for NixOS
|
||||
LD_LIBRARY_PATH = "/run/opengl-driver/lib:/run/cudatoolkit/lib";
|
||||
};
|
||||
|
||||
serviceConfig = {
|
||||
# Use the package reference so Nix finds the correct path
|
||||
ExecStart = "/usr/bin/ollama serve";
|
||||
|
||||
# Recommended security/reliability settings
|
||||
Restart = "always";
|
||||
User = "root"; # Or a specific user if permissions allow
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
# container and vm config
|
||||
|
||||
{ ... }: {
|
||||
{ ... }:
|
||||
{
|
||||
virtualisation = {
|
||||
containers.enable = true;
|
||||
podman = {
|
||||
@@ -11,6 +12,9 @@
|
||||
};
|
||||
libvirtd.enable = true;
|
||||
spiceUSBRedirection.enable = true;
|
||||
|
||||
incus.enable = true;
|
||||
lxc.enable = true;
|
||||
};
|
||||
|
||||
programs.virt-manager.enable = true;
|
||||
|
||||
@@ -5,7 +5,12 @@
|
||||
services.xserver.enable = true;
|
||||
services.xserver.wacom.enable = true;
|
||||
services.xserver.videoDrivers = [ "nvidia" ];
|
||||
hardware.nvidia.open = false;
|
||||
|
||||
hardware.nvidia = {
|
||||
open = false;
|
||||
modesetting.enable = true;
|
||||
powerManagement.enable = false;
|
||||
};
|
||||
|
||||
xdg.portal = {
|
||||
enable = true;
|
||||
|
||||
Reference in New Issue
Block a user