diff --git a/ollama.environment.sh b/ollama.environment.sh index acf16f9..1bb5f05 100644 --- a/ollama.environment.sh +++ b/ollama.environment.sh @@ -2,7 +2,7 @@ yes|/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"&&eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)" # Install Terraform, Flux, and htop using Homebrew -yes|brew install kubectx opentofu fluxcd/tap/flux htop kind derailed/k9s/k9s +yes|brew install kubectx opentofu fluxcd/tap/flux htop kind derailed/k9s/k9s kubeseal # Initialize kind cluster cat <> kind-config.yaml @@ -59,11 +59,24 @@ flux create hr ollama -n default --interval=10m --source=HelmRepository/ollama-c # check the resources flux stats +#add sealed-secrets controller +kubectl apply -f https://github.com/bitnami-labs/sealed-secrets/releases/download/v0.27.0/controller.yaml + +#install kubeseal manager +brew install kubeseal + +kubectl create secret generic openwebui-secret-values --from-file=values.yaml=./values.yaml --dry-run=client -o yaml > openwebui-secret.yaml +kubeseal < openwebui-secret.yaml > mysealedsecret.yaml +kubectl apply -f mysealedsecret.yaml + # openwebui # copy values.yaml https://github.com/open-webui/helm-charts/blob/main/charts/open-webui/values.yaml -# create secret with values.yaml -k create secret generic openwebui-secret-values --from-file=values.yaml=../values.yaml -k describe secrets openwebui-secret-values +# create sealedsecret with values.yaml + +kubectl create secret generic openwebui-secret-values --from-file=values.yaml=./values.yaml --dry-run=client -o yaml > openwebui-secret.yaml +kubeseal < openwebui-secret.yaml > mysealedsecret.yaml +kubectl apply -f mysealedsecret.yaml + # create git source flux create source git -n default openwebui --url=https://github.com/open-webui/helm-charts --branch=main # create helm release diff --git a/openwebui.yaml b/openwebui.yaml deleted file mode 100644 index 9a53ba0..0000000 --- a/openwebui.yaml +++ /dev/null @@ -1,99 +0,0 @@ -nameOverride: "" - -ollama: - # -- Automatically install Ollama Helm chart from https://otwld.github.io/ollama-helm/. Use [Helm Values](https://github.com/otwld/ollama-helm/#helm-values) to configure - enabled: true - # -- If enabling embedded Ollama, update fullnameOverride to your desired Ollama name value, or else it will use the default ollama.name value from the Ollama chart - fullnameOverride: "open-webui-ollama" - # -- Example Ollama configuration with nvidia GPU enabled, automatically downloading a model, and deploying a PVC for model persistence - # ollama: - # gpu: - # enabled: true - # type: 'nvidia' - # number: 1 - # models: - # - llama3 - # runtimeClassName: nvidia - # persistentVolume: - # enabled: true - -pipelines: - # -- Automatically install Pipelines chart to extend Open WebUI functionality using Pipelines: https://github.com/open-webui/pipelines - enabled: true - # -- This section can be used to pass required environment variables to your pipelines (e.g. Langfuse hostname) - extraEnvVars: [] - -# -- A list of Ollama API endpoints. These can be added in lieu of automatically installing the Ollama Helm chart, or in addition to it. -ollamaUrls: [] - -# -- Value of cluster domain -clusterDomain: cluster.local - -annotations: {} -podAnnotations: {} -replicaCount: 1 -# -- Open WebUI image tags can be found here: https://github.com/open-webui/open-webui/pkgs/container/open-webui -image: - repository: ghcr.io/open-webui/open-webui - tag: "latest" - pullPolicy: Always -resources: {} -ingress: - enabled: false - class: "" - # -- Use appropriate annotations for your Ingress controller, e.g., for NGINX: - # nginx.ingress.kubernetes.io/rewrite-target: / - annotations: {} - host: "" - tls: false - existingSecret: "" -persistence: - enabled: true - size: 2Gi - # -- Use existingClaim if you want to re-use an existing Open WebUI PVC instead of creating a new one - existingClaim: "" - # -- If using multiple replicas, you must update accessModes to ReadWriteMany - accessModes: - - ReadWriteOnce - storageClass: "" - selector: {} - annotations: {} - -# -- Node labels for pod assignment. -nodeSelector: {} - -# -- Tolerations for pod assignment -tolerations: [] - -# -- Affinity for pod assignment -affinity: {} - -# -- Service values to expose Open WebUI pods to cluster -service: - type: ClusterIP - annotations: {} - port: 80 - containerPort: 8080 - nodePort: "" - labels: {} - loadBalancerClass: "" - -# -- OpenAI base API URL to use. Defaults to the Pipelines service endpoint when Pipelines are enabled, and "https://api.openai.com/v1" if Pipelines are not enabled and this value is blank -openaiBaseApiUrl: "" - -# -- Additional environments variables on the output Deployment definition. Most up-to-date environment variables can be found here: https://docs.openwebui.com/getting-started/env-configuration/ -extraEnvVars: - # -- Default API key value for Pipelines. Should be updated in a production deployment, or be changed to the required API key if not using Pipelines - - name: OPENAI_API_KEY - value: "0p3n-w3bu!" - # valueFrom: - # secretKeyRef: - # name: pipelines-api-key - # key: api-key - # - name: OPENAI_API_KEY - # valueFrom: - # secretKeyRef: - # name: openai-api-key - # key: api-key - # - name: OLLAMA_DEBUG - # value: "1" \ No newline at end of file