Tweak llamafile port

This commit is contained in:
doylet 2025-08-13 20:28:44 +10:00
parent 579ef3c118
commit cb4c40af43

View File

@ -118,7 +118,7 @@ lua <<EOF
llamafile = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "http://127.0.0.1:8080", -- optional: default value is ollama url http://127.0.0.1:11434
url = "http://127.0.0.1:7950", -- optional: default value is ollama url http://127.0.0.1:11434
chat_url = "/v1/chat/completions", -- optional: default value, override if different
-- api_key = "OpenAI_API_KEY", -- optional: if your endpoint is authenticated
-- models_endpoint = "/v1/models", -- optional: attaches to the end of the URL to form the endpoint to retrieve models