Skip to content

Commit 9ee9d00

Browse files
committed
Update package
1 parent 65304d1 commit 9ee9d00

5 files changed

+41
-24
lines changed

Sources/LargeLanguageModels/Intramodular/LLMs/AbstractLLM.ChatOrTextCompletion.swift

+3-5
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,10 @@
55
import Foundation
66
import Swallow
77

8-
public protocol __AbstractLLM_Completion: Codable, CustomDebugStringConvertible, Hashable, Sendable {
9-
static var _completionType: AbstractLLM.CompletionType? { get }
10-
}
11-
128
extension AbstractLLM {
13-
public typealias Completion = __AbstractLLM_Completion
9+
public protocol Completion: Codable, CustomDebugStringConvertible, Hashable, Sendable {
10+
static var _completionType: AbstractLLM.CompletionType? { get }
11+
}
1412
}
1513

1614
extension AbstractLLM {

Sources/LargeLanguageModels/Intramodular/LLMs/AbstractLLM.ChatOrTextPrompt.swift

+11-13
Original file line numberDiff line numberDiff line change
@@ -6,33 +6,31 @@ import Compute
66
import Foundation
77
import Swallow
88

9-
public protocol __AbstractLLM_Prompt: Hashable, Sendable {
10-
associatedtype CompletionParameters: __AbstractLLM_CompletionParameters
11-
associatedtype Completion: Partializable
12-
13-
static var completionType: AbstractLLM.CompletionType? { get }
14-
15-
var context: PromptContextValues { get set }
16-
}
17-
189
extension AbstractLLM {
1910
public enum CompletionType: CaseIterable, Hashable, Sendable {
2011
case text
2112
case chat
2213
}
2314

24-
public typealias Prompt = __AbstractLLM_Prompt
15+
public protocol Prompt: Hashable, Sendable {
16+
associatedtype CompletionParameters: AbstractLLM.CompletionParameters
17+
associatedtype Completion: Partializable
18+
19+
static var completionType: AbstractLLM.CompletionType? { get }
20+
21+
var context: PromptContextValues { get set }
22+
}
2523
}
2624

2725
extension AbstractLLM {
2826
public enum ChatOrTextPrompt: Prompt {
2927
public typealias CompletionParameters = AbstractLLM.ChatOrTextCompletionParameters
3028
public typealias Completion = AbstractLLM.ChatOrTextCompletion
31-
29+
3230
public static var completionType: AbstractLLM.CompletionType? {
3331
nil
3432
}
35-
33+
3634
public var context: PromptContextValues {
3735
get {
3836
switch self {
@@ -54,7 +52,7 @@ extension AbstractLLM {
5452
}
5553
}
5654
}
57-
55+
5856
case text(TextPrompt)
5957
case chat(ChatPrompt)
6058
}

Sources/LargeLanguageModels/Intramodular/LLMs/AbstractLLM.CompletionParameters.swift

+5-6
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@ import CorePersistence
66
import Foundation
77
import Swallow
88

9-
public protocol __AbstractLLM_CompletionParameters: Hashable, Sendable {
10-
11-
}
12-
139
extension AbstractLLM {
14-
public typealias CompletionParameters = __AbstractLLM_CompletionParameters
15-
10+
public protocol CompletionParameters: Hashable, Sendable {
11+
12+
}
13+
1614
public enum TokenLimit: Hashable, Sendable {
1715
case max
1816
case fixed(Int)
@@ -26,6 +24,7 @@ extension AbstractLLM {
2624
}
2725
}
2826

27+
/// Either temperature or top-p should be used, both cannot be specified at the same time.
2928
public enum TemperatureOrTopP: Hashable, Sendable {
3029
case temperature(Double)
3130
case topProbabilityMass(Double)

Sources/LargeLanguageModels/Intramodular/LLMs/Chat/AbstractLLM.ChatCompletionParameters.swift

+1
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ extension AbstractLLM {
3131
public let tokenLimit: TokenLimit?
3232
public let temperatureOrTopP: TemperatureOrTopP?
3333
public let stops: [String]?
34+
/// Only for ChatGPT, Anthropic and other models that support function calling.
3435
public let functions: IdentifierIndexingArrayOf<ChatFunctionDefinition>?
3536

3637
public init(

Sources/LargeLanguageModels/Intramodular/LLMs/LLMRequestHandling+Chat.swift

+21
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,26 @@ extension LLMRequestHandling {
1616
)
1717
}
1818

19+
public func complete(
20+
prompt: AbstractLLM.ChatPrompt,
21+
parameters: AbstractLLM.ChatCompletionParameters,
22+
model: some _MLModelIdentifierConvertible
23+
) async throws -> AbstractLLM.ChatCompletion {
24+
var prompt = prompt
25+
26+
prompt.context = try withMutableScope(prompt.context) { context in
27+
context.completionType = .chat
28+
context.modelIdentifier = try .one(model.__conversion())
29+
}
30+
31+
let completion = try await complete(
32+
prompt: prompt,
33+
parameters: parameters
34+
)
35+
36+
return completion
37+
}
38+
1939
public func complete(
2040
_ messages: [AbstractLLM.ChatMessage],
2141
parameters: AbstractLLM.ChatCompletionParameters,
@@ -51,6 +71,7 @@ extension LLMRequestHandling {
5171
return try await completion(for: prompt)
5272
}
5373

74+
5475
/// Stream a completion for a given chat prompt.
5576
public func completion(
5677
for messages: [AbstractLLM.ChatMessage],

0 commit comments

Comments
 (0)