Skip to content

Commit

Permalink
Update package
Browse files Browse the repository at this point in the history
  • Loading branch information
vmanot committed Apr 22, 2024
1 parent ec7202f commit ef417ad
Showing 1 changed file with 50 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,45 @@ extension LLMRequestHandling {
_ messages: [AbstractLLM.ChatMessage],
parameters: AbstractLLM.ChatCompletionParameters
) async throws -> AbstractLLM.ChatCompletion {
try await complete(prompt: AbstractLLM.ChatPrompt(messages: messages), parameters: parameters)
try await complete(
prompt: AbstractLLM.ChatPrompt(messages: messages),
parameters: parameters
)
}

public func complete(
_ messages: [AbstractLLM.ChatMessage],
parameters: AbstractLLM.ChatCompletionParameters,
model: some _MLModelIdentifierConvertible
) async throws -> AbstractLLM.ChatCompletion {
var prompt = AbstractLLM.ChatPrompt(messages: messages)

prompt.context = try withMutableScope(prompt.context) { context in
context.completionType = .chat
context.modelIdentifier = try .one(model.__conversion())
}

let completion = try await complete(
prompt: prompt,
parameters: parameters
)

return completion
}

/// Stream a completion for a given chat prompt.
public func completion(
for prompt: AbstractLLM.ChatPrompt,
model: some _MLModelIdentifierConvertible
) async throws -> AbstractLLM.ChatCompletionStream {
var prompt = prompt

prompt.context = try withMutableScope(prompt.context) { context in
context.completionType = .chat
context.modelIdentifier = try .one(model.__conversion())
}

return try await completion(for: prompt)
}

/// Stream a completion for a given chat prompt.
Expand All @@ -28,14 +66,24 @@ extension LLMRequestHandling {

return try await completion(for: prompt)
}

}

extension LLMRequestHandling {
/// Stream a completion for a given chat prompt.
public func stream(
_ prompt: AbstractLLM.ChatPrompt
) async throws -> AbstractLLM.ChatCompletionStream {
try await completion(for: prompt)
}

/// Stream a completion for a given chat prompt.
public func stream(
_ prompt: AbstractLLM.ChatPrompt,
model: some _MLModelIdentifierConvertible
) async throws -> AbstractLLM.ChatCompletionStream {
try await completion(for: prompt, model: model)
}

/// Stream a completion for a given chat prompt and a desired model.
public func stream(
_ messages: [AbstractLLM.ChatMessage],
Expand Down

0 comments on commit ef417ad

Please sign in to comment.