2024-07-13 21:26:29 +00:00
|
|
|
{ lib, ... }:
|
2024-03-12 00:53:45 +00:00
|
|
|
let
|
2024-03-14 07:25:05 +00:00
|
|
|
mainPort = 11434;
|
|
|
|
altPort = 11435;
|
2024-03-12 00:53:45 +00:00
|
|
|
in
|
|
|
|
{
|
|
|
|
name = "ollama";
|
2024-07-13 21:26:29 +00:00
|
|
|
meta.maintainers = with lib.maintainers; [ abysssol ];
|
2024-03-12 00:53:45 +00:00
|
|
|
|
|
|
|
nodes = {
|
2024-07-13 21:26:29 +00:00
|
|
|
cpu =
|
|
|
|
{ ... }:
|
|
|
|
{
|
|
|
|
services.ollama.enable = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
altAddress =
|
|
|
|
{ ... }:
|
|
|
|
{
|
|
|
|
services.ollama.enable = true;
|
|
|
|
services.ollama.port = altPort;
|
|
|
|
};
|
2024-03-12 00:53:45 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
testScript = ''
|
2024-07-13 21:26:29 +00:00
|
|
|
import json
|
2024-03-12 00:53:45 +00:00
|
|
|
|
2024-07-13 21:26:29 +00:00
|
|
|
def curl_request_ollama(prompt, port):
|
|
|
|
json_prompt = json.dumps(prompt)
|
|
|
|
return f"""curl http://127.0.0.1:{port}/api/generate -d '{json_prompt}'"""
|
|
|
|
|
|
|
|
prompt = {
|
|
|
|
"model": "tinydolphin",
|
|
|
|
"prompt": "lorem ipsum",
|
|
|
|
"options": {
|
|
|
|
"seed": 69,
|
|
|
|
"temperature": 0,
|
|
|
|
},
|
|
|
|
}
|
2024-03-12 00:53:45 +00:00
|
|
|
|
|
|
|
|
2024-07-13 21:26:29 +00:00
|
|
|
vms = [
|
|
|
|
(cpu, ${toString mainPort}),
|
|
|
|
(altAddress, ${toString altPort}),
|
|
|
|
]
|
|
|
|
|
|
|
|
start_all()
|
|
|
|
for (vm, port) in vms:
|
|
|
|
vm.wait_for_unit("multi-user.target")
|
|
|
|
vm.wait_for_open_port(port)
|
|
|
|
stdout = vm.succeed(curl_request_ollama(prompt, port), timeout = 100)
|
2024-03-12 00:53:45 +00:00
|
|
|
'';
|
2024-07-13 21:26:29 +00:00
|
|
|
}
|