feat: update LOCALAGI_LLM_API_URL to Ollama-compatible endpoint and enhance documentation

This commit is contained in:
John Doe
2026-03-04 00:42:39 -05:00
parent d49a8d04cd
commit 7ec67de15c
3 changed files with 12 additions and 2 deletions

View File

@@ -273,6 +273,11 @@ function update_script() {
exit
fi
if grep -q '^LOCALAGI_LLM_API_URL=http://127.0.0.1:8081$' /opt/localagi/.env; then
set_env_var /opt/localagi/.env "LOCALAGI_LLM_API_URL" "http://127.0.0.1:11434/v1"
msg_warn "Migrated LOCALAGI_LLM_API_URL from 127.0.0.1:8081 to 127.0.0.1:11434/v1"
fi
# Provision ROCm runtime only when AMD backend is selected.
if [[ "${BACKEND}" == "rocm7.2" ]]; then
install_rocm_runtime_debian || msg_warn "ROCm runtime package installation failed"

View File

@@ -41,7 +41,11 @@
"type": "info"
},
{
"text": "By default, LocalAGI is configured to call an external OpenAI-compatible backend at `http://127.0.0.1:8081` via `LOCALAGI_LLM_API_URL`.",
"text": "By default, LocalAGI is configured to call an OpenAI-compatible backend at `http://127.0.0.1:11434/v1` (Ollama-compatible) via `LOCALAGI_LLM_API_URL`.",
"type": "info"
},
{
"text": "To use an external Ollama host, edit `/opt/localagi/.env` and set `LOCALAGI_LLM_API_URL=http://<ollama-host>:11434/v1`, then restart LocalAGI with `systemctl restart localagi`.",
"type": "info"
}
]

View File

@@ -241,12 +241,13 @@ fi
# Generate runtime configuration file used by systemd service.
# Note: `LOCALAGI_LLM_API_URL` points to an OpenAI-compatible backend endpoint.
# Defaulting to Ollama's OpenAI-compatible API avoids a dead 127.0.0.1:8081 endpoint.
msg_info "Configuring LocalAGI"
cat <<EOF >/opt/localagi/.env
LOCALAGI_MODEL=gemma-3-4b-it-qat
LOCALAGI_MULTIMODAL_MODEL=moondream2-20250414
LOCALAGI_IMAGE_MODEL=sd-1.5-ggml
LOCALAGI_LLM_API_URL=http://127.0.0.1:8081
LOCALAGI_LLM_API_URL=http://127.0.0.1:11434/v1
LOCALAGI_STATE_DIR=/opt/localagi/pool
LOCALAGI_TIMEOUT=5m
LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false