diff --git a/README.md b/README.md index ef1a8d50..f6ae9eea 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,9 @@ spec: # anonymized: false # language: english noCache: false - version: v0.3.17 + image: + repository: ghcr.io/k8sgpt-ai/k8gpt + tag: v0.3.8 #integrations: # trivy: # enabled: true @@ -101,14 +103,17 @@ metadata: name: k8sgpt-sample namespace: k8sgpt-operator-system spec: - model: gpt-3.5-turbo - backend: openai + ai: + model: gpt-3.5-turbo + backend: openai + enabled: true + secret: + name: k8sgpt-sample-secret + key: openai-api-key noCache: false - version: v0.3.0 - enableAI: true - secret: - name: k8sgpt-sample-secret - key: openai-api-key + image: + repository: ghcr.io/k8sgpt-ai/k8gpt + tag: v0.3.8 remoteCache: credentials: name: k8sgpt-sample-cache-secret @@ -143,14 +148,17 @@ metadata: name: k8sgpt-sample namespace: k8sgpt-operator-system spec: - model: gpt-3.5-turbo - backend: openai + ai: + model: gpt-3.5-turbo + backend: openai + enabled: true + secret: + name: k8sgpt-sample-secret + key: openai-api-key noCache: false - version: v0.3.0 - enableAI: true - secret: - name: k8sgpt-sample-secret - key: openai-api-key + image: + repository: ghcr.io/k8sgpt-ai/k8gpt + tag: v0.3.8 remoteCache: credentials: name: k8sgpt-sample-cache-secret @@ -194,7 +202,9 @@ spec: baseUrl: https://k8sgpt.openai.azure.com/ engine: llm noCache: false - version: v0.3.8 + image: + repository: ghcr.io/k8sgpt-ai/k8gpt + tag: v0.3.8 EOF ``` @@ -224,7 +234,9 @@ spec: backend: localai baseUrl: http://local-ai.local-ai.svc.cluster.local:8080/v1 noCache: false - version: v0.3.8 + image: + repository: ghcr.io/k8sgpt-ai/k8gpt + tag: v0.3.8 EOF ``` Note: ensure that the value of `baseUrl` is a properly constructed [DNS name](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#services) for the LocalAI Service. It should take the form: `http://local-ai..svc.cluster.local:8080/v1`.