[Unit] Description=LLama.cpp (full‑vulkan) inference server After=network-online.target Wants=network-online.target [Container] Image=localhost/lamaswap:latest #AutoRemove=yes #PublishPort=8080:8080 Network=none Volume=%h/models:/models:ro,Z Volume=%h/config.yaml:/config.yaml:ro,Z AddDevice=/dev/dri Exec= #[Service] #Restart=always [Install] WantedBy=default.target