Skip to content

Commit 0fffcbf

Browse files
Support for ollama
1 parent 5d10eb3 commit 0fffcbf

File tree

6 files changed

+66
-16
lines changed

6 files changed

+66
-16
lines changed

PolyAIExample/PolyAIExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

PolyAIExample/PolyAIExample/ApiKeysIntroView.swift

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,10 @@ struct ApiKeyIntroView: View {
1313
@State private var anthropicAPIKey = ""
1414
@State private var openAIAPIKey = ""
1515
@State private var geminiAPIKey = ""
16+
@State private var ollamaLocalHostURL = ""
1617
@State private var anthropicConfigAdded: Bool = false
1718
@State private var openAIConfigAdded: Bool = false
19+
@State private var ollamaConfigAdded: Bool = false
1820
@State private var geminiConfigAdded: Bool = false
1921

2022
@State private var configurations: [LLMConfiguration] = []
@@ -46,6 +48,12 @@ struct ApiKeyIntroView: View {
4648
apiKey: $geminiAPIKey) {
4749
configurations.append(.gemini(apiKey: geminiAPIKey))
4850
}
51+
LLMConfigurationView(
52+
provider: "Ollama",
53+
configurationAdded: $ollamaConfigAdded,
54+
apiKey: $ollamaLocalHostURL) {
55+
configurations.append(.ollama(url: ollamaLocalHostURL))
56+
}
4957
}
5058
.buttonStyle(.bordered)
5159
.padding()
@@ -63,7 +71,7 @@ struct ApiKeyIntroView: View {
6371
Spacer()
6472
}
6573
.padding()
66-
.navigationTitle("Enter API Keys")
74+
.navigationTitle("Enter API Keys or URL's")
6775
}
6876
}
6977
}
@@ -75,11 +83,10 @@ struct LLMConfigurationView: View {
7583
@Binding var apiKey: String
7684
let addConfig: () -> Void
7785

78-
7986
var body: some View {
8087
VStack(alignment: .leading) {
8188
HStack {
82-
TextField("Enter \(provider) API Key", text: $apiKey)
89+
TextField("Enter \(provider) API Key or URL", text: $apiKey)
8390
Button {
8491
addConfig()
8592
configurationAdded = true

PolyAIExample/PolyAIExample/MessageDemoView.swift

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,13 @@ struct MessageDemoView: View {
2020
@State private var selectedImagesEncoded: [String] = []
2121
@State private var selectedSegment: LLM = .anthropic
2222

23-
enum LLM {
23+
enum LLM: String, Identifiable, CaseIterable {
2424
case openAI
2525
case anthropic
2626
case gemini
27+
case llama3
28+
29+
var id: String { rawValue }
2730
}
2831

2932
var body: some View {
@@ -80,6 +83,13 @@ struct MessageDemoView: View {
8083
model: "gemini-1.5-pro-latest", messages: [
8184
.init(role: .user, content: prompt)
8285
], maxTokens: 2000)
86+
case .llama3:
87+
parameters = .ollama(
88+
model: "llama3",
89+
messages: [
90+
.init(role: .user, content: prompt)
91+
],
92+
maxTokens: 1000)
8393
}
8494
try await observable.streamMessage(parameters: parameters)
8595
}
@@ -93,9 +103,9 @@ struct MessageDemoView: View {
93103

94104
var picker: some View {
95105
Picker("Options", selection: $selectedSegment) {
96-
Text("Anthropic").tag(LLM.anthropic)
97-
Text("OpenAI").tag(LLM.openAI)
98-
Text("Gemini").tag(LLM.gemini)
106+
ForEach(LLM.allCases) { llm in
107+
Text(llm.rawValue).tag(llm)
108+
}
99109
}
100110
.pickerStyle(SegmentedPickerStyle())
101111
.padding()

Sources/PolyAI/Interfaces/Parameters/LLMParameter.swift

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,20 @@ public enum LLMParameter {
3333
/// - maxTokens: The maximum number of tokens to generate.
3434
case gemini(model: String, messages: [LLMMessage], maxTokens: Int)
3535

36+
/// Represents a configuration for interacting with Gemini's models.
37+
/// - Parameters:
38+
/// - model: The specific model. e.g: "llama3"
39+
/// - messages: An array of messages to send to the model.
40+
/// - maxTokens: The maximum number of tokens to generate.
41+
case ollama(model: String, messages: [LLMMessage], maxTokens: Int)
42+
3643
/// A computed property that returns the name of the LLM service based on the case.
3744
var llmService: String {
3845
switch self {
3946
case .openAI: return "OpenAI"
4047
case .anthropic: return "Anthropic"
4148
case .gemini: return "Gemini"
49+
case .ollama(let model, _, _): return model
4250
}
4351
}
4452
}

Sources/PolyAI/Service/DefaultPolyAIService.swift

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ extension GenerativeModel {
2727
struct DefaultPolyAIService: PolyAIService {
2828

2929
private var openAIService: OpenAIService?
30+
private var ollamaOpenAIServiceCompatible: OpenAIService?
3031
private var anthropicService: AnthropicService?
3132
private var gemini: GenerativeModel.Configuration?
3233

@@ -51,6 +52,9 @@ struct DefaultPolyAIService: PolyAIService {
5152

5253
case .gemini(let apiKey):
5354
gemini = .init(apiKey: apiKey)
55+
56+
case .ollama(let url):
57+
ollamaOpenAIServiceCompatible = OpenAIServiceFactory.ollama(baseURL: url)
5458
}
5559
}
5660
}
@@ -76,13 +80,13 @@ struct DefaultPolyAIService: PolyAIService {
7680
}
7781
// Remove all system messages as Anthropic uses the system message as a parameter and not as part of the messages array.
7882
let messageParams: [SwiftAnthropic.MessageParameter.Message] = messages.compactMap { message in
79-
guard message.role != "system" else {
80-
return nil // Skip "system" roles
81-
}
82-
return MessageParameter.Message(
83-
role: SwiftAnthropic.MessageParameter.Message.Role(rawValue: message.role) ?? .user,
84-
content: .text(message.content)
85-
)
83+
guard message.role != "system" else {
84+
return nil // Skip "system" roles
85+
}
86+
return MessageParameter.Message(
87+
role: SwiftAnthropic.MessageParameter.Message.Role(rawValue: message.role) ?? .user,
88+
content: .text(message.content)
89+
)
8690
}
8791
let systemMessage = messages.first { $0.role == "system" }
8892
let messageParameter = MessageParameter(model: model, messages: messageParams, maxTokens: maxTokens, system: systemMessage?.content, stream: false)
@@ -105,6 +109,13 @@ struct DefaultPolyAIService: PolyAIService {
105109
message.role == "user"
106110
}?.content ?? ""
107111
return try await generativeModel.generateContent(userMessage)
112+
case .ollama(let model, let messages, let maxTokens):
113+
guard let ollamaOpenAIServiceCompatible else {
114+
throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
115+
}
116+
let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
117+
let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
118+
return try await ollamaOpenAIServiceCompatible.startChat(parameters: messageParameter)
108119
}
109120
}
110121

@@ -161,6 +172,15 @@ struct DefaultPolyAIService: PolyAIService {
161172
}?.content ?? ""
162173
let stream = generativeModel.generateContentStream(userMessage)
163174
return try mapToLLMMessageStreamResponse(stream: stream)
175+
case .ollama(let model, let messages, let maxTokens):
176+
guard let ollamaOpenAIServiceCompatible else {
177+
throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
178+
}
179+
let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
180+
let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
181+
182+
let stream = try await ollamaOpenAIServiceCompatible.startStreamedChat(parameters: messageParameter)
183+
return try mapToLLMMessageStreamResponse(stream: stream)
164184
}
165185
}
166186

Sources/PolyAI/Service/PolyAIService.swift

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,11 @@ public enum LLMConfiguration {
4646
/// - Parameters:
4747
/// - apiKey: The API key for authenticating requests to Gemini.
4848
case gemini(apiKey: String)
49+
50+
/// Configuration for accessing Ollama models using OpenAI endpoints compatibility.
51+
/// - Parameters:
52+
/// - url: The local host URL. e.g "http://localhost:11434"
53+
case ollama(url: String)
4954
}
5055

5156
/// Defines the interface for a service that interacts with Large Language Models (LLMs).

0 commit comments

Comments
 (0)