2424 <title >&owui; override file with &ollama; included</title >
2525 <para >
2626 The following override file installs &ollama; during the &owui;
27- installation. Replace <replaceable >SUSE_AI_NAMESPACE</replaceable > with
28- your &kube; namespace.
27+ installation.
2928 </para >
3029<screen >global:
3130 imagePullSecrets:
@@ -51,16 +50,26 @@ ollama:
5150 number: 1
5251 persistentVolume:<co xml : id =" co-ollama-persistent1" />
5352 enabled: true
54- storageClass: local-path< co xml : id = " co-ollama-localpath2 " />
53+ storageClass: local-path
5554pipelines:
56- enabled: False
55+ enabled: true
5756 persistence:
58- storageClass: local-path<co xml : id =" co-ollama-localpath3" />
57+ storageClass: local-path
58+ extraEnvVars: <co xml : id =" co-pipelines-extraenvvars" />
59+ - name: PIPELINES_URLS <co xml : id =" co-pipelines-extraenvvars-pipelines-urls" />
60+ value: "https://raw.githubusercontent.com/SUSE/suse-ai-observability-extension/refs/heads/main/integrations/oi-filter/suse_ai_filter.py"
61+ - name: OTEL_SERVICE_NAME <co xml : id =" co-pipelines-extraenvvars-otel-service-name" />
62+ value: "Open WebUI"
63+ - name: OTEL_EXPORTER_HTTP_OTLP_ENDPONT <co xml : id =" co-pipelines-extraenvvars-otel-exporter-http-otlp-endpoint" />
64+ value: "http://opentelemetry-collector.suse-observability.svc.cluster.local:4318"
65+ - name: PRICING_JSON <co xml : id =" co-pipelines-extraenvvars-pricing-json" />
66+ value: "https://raw.githubusercontent.com/SUSE/suse-ai-observability-extension/refs/heads/main/integrations/oi-filter/pricing.json"
5967ingress:
6068 enabled: true
6169 class: ""
6270 annotations:
6371 nginx.ingress.kubernetes.io/ssl-redirect: "true"
72+ nginx.ingress.kubernetes.io/proxy-body-size: "1024m"
6473 host: suse-ollama-webui<co xml : id =" co-ollama-webui" />
6574 tls: true
6675extraEnvVars:
@@ -79,8 +88,20 @@ extraEnvVars:
7988- name: MILVUS_URI
8089 value: http://milvus.<replaceable >SUSE_AI_NAMESPACE</replaceable >.svc.cluster.local:19530
8190- name: INSTALL_NLTK_DATASETS<co xml : id =" co-ollama-extravars-nltk" />
82- value: "true"</screen >
91+ value: "true"
92+ - name: OMP_NUM_THREADS
93+ value: "1"
94+ - name: OPENAI_API_KEY <co xml : id =" co-extraenvvars-openai-api-key" />
95+ value: "0p3n-w3bu!"</screen >
8396 <calloutlist >
97+ <callout arearefs =" co-ollama-localpath1" >
98+ <para >
99+ Use <option >local-path</option > storage only for testing purposes. For
100+ production use, we recommend using a storage solution more suitable
101+ for persistent storage. To use &sstorage; , specify
102+ <literal >longhorn</literal >.
103+ </para >
104+ </callout >
84105 <callout arearefs =" co-ollama-models" >
85106 <para >
86107 Specifies that two large language models (LLM) will be loaded in
@@ -102,11 +123,44 @@ extraEnvVars:
102123 when the container is restarted.
103124 </para >
104125 </callout >
105- <callout arearefs =" co-ollama-localpath1 co-ollama-localpath2 co-ollama-localpath3 " >
126+ <callout arearefs =" co-pipelines-extraenvvars " >
106127 <para >
107- Use <option >local-path</option > storage only for testing purposes. For
108- production use, we recommend using a storage solution suitable for
109- persistent storage, such as &sstorage; .
128+ The environment variables that you are making available for the
129+ pipeline's runtime container.
130+ </para >
131+ </callout >
132+ <callout arearefs =" co-pipelines-extraenvvars-pipelines-urls" >
133+ <para >
134+ A list of pipeline URLs to be downloaded and installed by default.
135+ Individual URLs are separated by a semicolon <literal >;</literal >.
136+ </para >
137+ <para condition =" deployment_airgap" >
138+ For air-gapped deployments, you need to provide the pipelines at URLs
139+ that are accessible from the local host, such as an internal GitLab
140+ instance.
141+ </para >
142+ </callout >
143+ <callout arearefs =" co-pipelines-extraenvvars-otel-service-name" >
144+ <para >
145+ The service name that appears in traces and topological
146+ representations in &sobservability; .
147+ </para >
148+ </callout >
149+ <callout arearefs =" co-pipelines-extraenvvars-otel-exporter-http-otlp-endpoint" >
150+ <para >
151+ The endpoint for the &otelemetry; collector. Make sure to use the HTTP
152+ port of your collector.
153+ </para >
154+ </callout >
155+ <callout arearefs =" co-pipelines-extraenvvars-pricing-json" >
156+ <para >
157+ A file for the model multipliers in cost estimation. You can customize
158+ it to match your actual infrastructure experimentally.
159+ </para >
160+ <para condition =" deployment_airgap" >
161+ For air-gapped deployments, you need to provide the pipelines at URLs
162+ that are accessible from the local host, such as an internal GitLab
163+ instance.
110164 </para >
111165 </callout >
112166 <callout arearefs =" co-ollama-extravars" >
@@ -128,14 +182,19 @@ extraEnvVars:
128182 licensing information.
129183 </para >
130184 </callout >
185+ <callout arearefs =" co-extraenvvars-openai-api-key" >
186+ <para >
187+ API key value for communication between &owui; and &owui; Pipelines.
188+ The default value is <quote >0p3n-w3bu!</quote >.
189+ </para >
190+ </callout >
131191 </calloutlist >
132192 </example >
133193 <example xml : id =" owui-ollama-deploy-separate" >
134194 <title >&owui; override file with &ollama; installed separately</title >
135195 <para >
136196 The following override file installs &ollama; separately from the &owui;
137- installation. Replace <replaceable >SUSE_AI_NAMESPACE</replaceable > with
138- your &kube; namespace.
197+ installation.
139198 </para >
140199<screen >global:
141200 imagePullSecrets:
@@ -177,7 +236,9 @@ extraEnvVars:
177236- name: ENABLE_OTEL<co xml : id =" co-owui-otel1" />
178237 value: "true"
179238- name: OTEL_EXPORTER_OTLP_ENDPOINT<co xml : id =" co-owui-otel2" />
180- value: http://opentelemetry-collector.observability.svc.cluster.local:4317<co xml : id =" co-llama-otel" /></screen >
239+ value: http://opentelemetry-collector.observability.svc.cluster.local:4317<co xml : id =" co-llama-otel" />
240+ - name: OMP_NUM_THREADS
241+ value: "1"</screen >
181242 <calloutlist >
182243 <callout arearefs =" co-ollama-localpath4 co-ollama-localpath5" >
183244 <para >
@@ -204,6 +265,59 @@ extraEnvVars:
204265 </callout >
205266 </calloutlist >
206267 </example >
268+ <example xml : id =" owui-ollama-deploy-pipelines" >
269+ <title >&owui; override file with pipelines enabled</title >
270+ <para >
271+ The following override file installs &ollama; separately and enables
272+ &owui; pipelines. This simple filter adds a limit to the number of
273+ question and answer turns during the LLM chat.
274+ </para >
275+ <tip >
276+ <para >
277+ Pipelines normally require additional configuration provided either via
278+ environment variables or specified in the &owui; Web UI.
279+ </para >
280+ </tip >
281+ <screen >global:
282+ imagePullSecrets:
283+ - application-collection
284+ <phrase condition =" deployment_airgap" >imageRegistry: <replaceable >LOCAL_DOCKER_REGISTRY_URL</replaceable >:5043</phrase >
285+ ollamaUrls:
286+ - http://ollama.<replaceable >SUSE_AI_NAMESPACE</replaceable >.svc.cluster.local:11434
287+ persistence:
288+ enabled: true
289+ storageClass: local-path
290+ ollama:
291+ enabled: false
292+ pipelines:
293+ enabled: true
294+ persistence:
295+ storageClass: local-path
296+ extraEnvVars:
297+ - name: PIPELINES_URLS <co xml : id =" co-extraenvvars-pipelines-urls" />
298+ value: "https://raw.githubusercontent.com/SUSE/suse-ai-observability-extension/refs/heads/main/integrations/oi-filter/conversation_turn_limit_filter.py"
299+ ingress:
300+ enabled: true
301+ class: ""
302+ annotations:
303+ nginx.ingress.kubernetes.io/ssl-redirect: "true"
304+ host: suse-ollama-webui
305+ tls: true
306+ [...]</screen >
307+ <calloutlist >
308+ <callout arearefs =" co-extraenvvars-pipelines-urls" >
309+ <para >
310+ A list of pipeline URLs to be downloaded and installed by default.
311+ Individual URLs are separated by a semicolon <literal >;</literal >.
312+ </para >
313+ <para condition =" deployment_airgap" >
314+ For air-gapped deployments, you need to provide the pipelines at URLs
315+ that are accessible from the local host, such as an internal GitLab
316+ instance.
317+ </para >
318+ </callout >
319+ </calloutlist >
320+ </example >
207321 <example xml : id =" owui-ollama-deploy-vllm" condition =" deployment_standard" >
208322 <title >&owui; override file with a connection to &vllm; </title >
209323 <para >
0 commit comments