Change llm port

This commit is contained in:
doylet 2025-08-13 22:04:29 +10:00
parent 778ed542a6
commit c4e98d0f18

View File

@ -118,7 +118,7 @@ lua <<EOF
llamafile = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "http://127.0.0.1:7950", -- optional: default value is ollama url http://127.0.0.1:11434
url = "http://127.0.0.1:7483", -- optional: default value is ollama url http://127.0.0.1:11434
chat_url = "/v1/chat/completions", -- optional: default value, override if different
-- api_key = "OpenAI_API_KEY", -- optional: if your endpoint is authenticated
-- models_endpoint = "/v1/models", -- optional: attaches to the end of the URL to form the endpoint to retrieve models