add bambu!
This commit is contained in:
32
services/ollama.nix
Normal file
32
services/ollama.nix
Normal file
@@ -0,0 +1,32 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
systemd.services.my-ollama = {
|
||||
description = "Custom Ollama-like Service";
|
||||
|
||||
# Ensure the service starts after the network is up
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
# Environment variables
|
||||
environment = {
|
||||
OLLAMA_KV_CACHE_TYPE = "q8_0";
|
||||
OLLAMA_NUM_GPU = "1";
|
||||
GGML_CUDA_ENABLE_UNIFIED_MEMORY = "1";
|
||||
OLLAMA_KEEP_ALIVE = "-1";
|
||||
OLLAMA_MODELS = "/mnt/ssd1/ollama";
|
||||
OLLAMA_NUM_PARALLEL = "4";
|
||||
# Adding CUDA paths for NixOS
|
||||
LD_LIBRARY_PATH = "/run/opengl-driver/lib:/run/cudatoolkit/lib";
|
||||
};
|
||||
|
||||
serviceConfig = {
|
||||
# Use the package reference so Nix finds the correct path
|
||||
ExecStart = "/usr/bin/ollama serve";
|
||||
|
||||
# Recommended security/reliability settings
|
||||
Restart = "always";
|
||||
User = "root"; # Or a specific user if permissions allow
|
||||
};
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user