fixes
This commit is contained in:
parent
2b23fd043b
commit
9021c2beba
@ -11,7 +11,7 @@ var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-24}"
|
||||
var_os="${var_os:-ubuntu}"
|
||||
var_version="${var_version:-22.04}"
|
||||
var_version="${var_version:-24.04}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
@ -40,4 +40,4 @@ description
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:14434${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:11434${CL}"
|
||||
|
@ -12,7 +12,7 @@
|
||||
"documentation": "https://github.com/librespeed/speedtest/blob/master/doc.md",
|
||||
"config_path": "",
|
||||
"website": "https://librespeed.org",
|
||||
"logo": "https://github.com/librespeed/speedtest/blob/master/.logo/icon_huge.png",
|
||||
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/librespeed.svg",
|
||||
"description": "No Flash, No Java, No Websocket, No Bullshit. This is a very lightweight speed test implemented in Javascript, using XMLHttpRequest and Web Workers.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
35
frontend/public/json/ollama.json
Normal file
35
frontend/public/json/ollama.json
Normal file
@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "Ollama",
|
||||
"slug": "ollama",
|
||||
"categories": [
|
||||
20
|
||||
],
|
||||
"date_created": "2024-10-26",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 11434,
|
||||
"documentation": "https://github.com/ollama/ollama/tree/main/docs",
|
||||
"config_path": "/usr/local/lib/ollama",
|
||||
"website": "https://ollama.com/",
|
||||
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/ollama.svg",
|
||||
"description": "Ollama is a tool that allows you to run large language models locally on your own computer. This means you can experiment with and use these AI models without needing an internet connection or relying on cloud-based services. It simplifies the process of managing and running these models, offering a way to keep your data private and potentially work faster. 1 You can use Ollama to create local chatbots, conduct AI research, develop privacy-focused AI applications, and integrate AI into existing systems.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/ollama.sh",
|
||||
"resources": {
|
||||
"cpu": 4,
|
||||
"ram": 4096,
|
||||
"hdd": 24,
|
||||
"os": "Ubuntu",
|
||||
"version": "24.04"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": []
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user