2025-02-02 02:19:35 -08:00
|
|
|
{ pkgs, sys, ... }: {
|
2024-12-09 11:01:02 -08:00
|
|
|
services.ollama = {
|
|
|
|
enable = true;
|
|
|
|
port = 11434;
|
|
|
|
host = "127.0.0.1";
|
|
|
|
home = "/var/lib/ollama";
|
|
|
|
group = "users";
|
|
|
|
models = "/var/lib/ollama/models";
|
|
|
|
loadModels = [ "llama3.2:3b" "llama3.1:8b" "codellama:13b" ];
|
|
|
|
package = pkgs.ollama-rocm;
|
|
|
|
rocmOverrideGfx = "11.0.0";
|
|
|
|
acceleration = "rocm";
|
|
|
|
};
|
2025-02-02 02:19:35 -08:00
|
|
|
home-manager.users."${sys.username}" = {
|
2025-02-05 17:40:34 -08:00
|
|
|
home.packages = with pkgs; [
|
|
|
|
lmstudio
|
|
|
|
docker
|
|
|
|
docker-compose
|
|
|
|
( writeShellApplication {
|
|
|
|
name = "ai";
|
|
|
|
runtimeInputs = [
|
|
|
|
libnotify
|
|
|
|
jq
|
|
|
|
git
|
|
|
|
];
|
|
|
|
text = ''
|
|
|
|
#!/bin/bash
|
|
|
|
'';
|
|
|
|
} )
|
|
|
|
];
|
2025-02-02 02:19:35 -08:00
|
|
|
};
|
2025-02-05 17:40:34 -08:00
|
|
|
virtualisation.docker = {
|
|
|
|
enable = true;
|
|
|
|
daemon.settings.data-root = "/docker";
|
|
|
|
rootless.enable = true;
|
|
|
|
rootless.setSocketVariable = true;
|
|
|
|
};
|
|
|
|
users.users.${sys.username}.extraGroups = [ "docker" ];
|
2024-12-09 11:01:02 -08:00
|
|
|
}
|