feat: Prepare for llama-server when ROCm is fixed upstream

This commit is contained in:
matthew.binning 2026-02-11 06:30:34 -08:00
parent b717ea973a
commit cfd3aeecaf
4 changed files with 61 additions and 6 deletions

7
flake.lock generated
View file

@ -140,15 +140,16 @@
"llama-cpp": {
"flake": false,
"locked": {
"lastModified": 1755068833,
"narHash": "sha256-U2bNRei5Q+fpMmk0Oc2HVSIY6KSBhgcNNkNhGykpG2c=",
"lastModified": 1770704370,
"narHash": "sha256-atYUuXBZFbJxmswd694YwHfAWj1NClZ6mXiQbP1ABG8=",
"owner": "ggerganov",
"repo": "llama.cpp",
"rev": "bc5182272c373267352bc689e5fca276934bea2d",
"rev": "f0bfe54f552f4783588f333b90d73920a57c5096",
"type": "github"
},
"original": {
"owner": "ggerganov",
"ref": "b7984",
"repo": "llama.cpp",
"type": "github"
}