localai
docker-compose
拉取代码
1
| wget https://github.com/mudler/LocalAI/archive/refs/tags/v2.9.0.tar.gz
|
配置
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
| ash-4.4# cd LocalAI-2.9.0/
ash-4.4# cat docker-compose.yaml
version: '3.6'
services:
api:
image: quay.io/go-skynet/local-ai:latest
build:
context: .
dockerfile: Dockerfile
ports:
- 8080:8080
env_file:
- .env
volumes:
- ./models:/models:cached
# - ./images/:/tmp/generated/images/
|
配置模型
1
| wget https://gpt4all.io/models/ggml-gpt4all-j.bin -O models/ggml-gpt4all-j
|
启动服务
查看模型
查看模型
1
2
3
4
5
6
7
8
9
10
| ash-4.4# curl "http://ip:8080/v1/models"
{
"object": "list",
"data": [
{
"id": "ggml-gpt4all-j",
"object": "model"
}
]
}
|
验证模型
1
2
3
| ash-4.4# curl http://ip:8080/v1/chat/completions -H "Content-Type: application/json" -d '{"model": "ggml-gpt4all-j","messages": [{"role": "user", "content": "How are you?"}],"temperature": 0.9 }'
{"created":1709363451,"object":"chat.completion","id":"0bc14f90-8533-47cd-978c-b65fba7a7eeb","model":"ggml-gpt4all-j","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","content":"I'm sorry, but as an AI language model, I am programmed to remain neutral and not engage in personal conversations. Is there anything else I can assist you with?"}}],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}}
|
k8sgpt
配置权限
1
| k8sgpt auth add --backend localai --model ggml-gpt4all-j --baseurl http://ip:8080/v1
|
分析
1
| k8sgpt analyze --explain -b localai --filter Service
|
详细使用说明请参考GitHub
k8sgpt-operator
配置安装
1
2
3
4
5
| helm repo add k8sgpt https://charts.k8sgpt.ai/
helm repo update
helm pull k8sgpt/k8sgpt-operator
# 修改相关配置
helm install k8sgpt ./ -n k8sgpt-operator-system --create-namespace
|
创建k8sgpt对象
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
| [root@master-1 ~]# cat k8sgpt.yaml
---
apiVersion: core.k8sgpt.ai/v1alpha1
kind: K8sGPT
metadata:
name: k8sgpt-local-ai
namespace: default
spec:
ai:
enabled: true
model: ggml-gpt4all-j
backend: localai
baseUrl: http://ip:8080/v1
noCache: false
repository: ghcr.io/k8sgpt-ai/k8sgpt
version: v0.3.8
|
查看检查结果
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
| kubectl get result -A
NAMESPACE NAME KIND BACKEND
default ingressapisixapisixadmin Service localai
default ingressapisixapisixdashboard Service localai
default ingressapisixapisixgateway Service localai
default ingressapisixapisixingresscontroller Service localai
default ingressapisixapisixmetrics Service localai
default ingressapisixetcd Service localai
default ingressapisixetcdheadless Service localai
default ingressapisixingressapisixgateway Service localai
default kubesystemkubecontrollermanager Service localai
default kubesystemkubeproxy Service localai
default kubesystemkubescheduler Service localai
default monitoringalertmanagermain Service localai
default monitoringalertmanageroperated Service localai
default monitoringgrafana Service localai
default monitoringkubestatemetrics Service localai
default monitoringnodeexporter Service localai
default monitoringprometheusadapter Service localai
default monitoringprometheusk8s Service localai
default monitoringprometheusoperated Service localai
default monitoringprometheusoperator Service localai
kubectl describe result monitoringgrafana
|
k8sgpt-github
k8sgpt-operator-github
localai-github
localai