Skip to content

Commit

Permalink
Introduce LLMOpenAIRequestType type alias
Browse files Browse the repository at this point in the history
  • Loading branch information
paulhdk committed Oct 28, 2024
1 parent f389fd8 commit bdd7127
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import OpenAPIRuntime
/// Represents the model-specific parameters of OpenAIs LLMs.
public struct LLMOpenAIModelParameters: Sendable {
/// The format for model responses.
let responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload?
let responseFormat: LLMOpenAIRequestType.response_formatPayload?
/// The sampling temperature (0 to 2). Higher values increase randomness, lower values enhance focus.
let temperature: Double?
/// Nucleus sampling threshold. Considers tokens with top_p probability mass. Alternative to temperature sampling.
Expand All @@ -30,7 +30,7 @@ public struct LLMOpenAIModelParameters: Sendable {
/// Controls repetition (-2.0 to 2.0). Higher values reduce the likelihood of repeating content.
let frequencyPenalty: Double?
/// Alters specific token's likelihood in completion.
let logitBias: Components.Schemas.CreateChatCompletionRequest.logit_biasPayload
let logitBias: LLMOpenAIRequestType.logit_biasPayload
/// Unique identifier for the end-user, aiding in abuse monitoring.
let user: String?

Expand All @@ -50,7 +50,7 @@ public struct LLMOpenAIModelParameters: Sendable {
/// - logitBias: Alters specific token's likelihood in completion.
/// - user: Unique identifier for the end-user, aiding in abuse monitoring.
public init(
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? = nil,
responseFormat: LLMOpenAIRequestType.response_formatPayload? = nil,
temperature: Double? = nil,
topP: Double? = nil,
completionsPerOutput: Int? = nil,
Expand All @@ -71,8 +71,7 @@ public struct LLMOpenAIModelParameters: Sendable {
self.seed = seed
self.presencePenalty = presencePenalty
self.frequencyPenalty = frequencyPenalty
self.logitBias = Components.Schemas.CreateChatCompletionRequest
.logit_biasPayload(additionalProperties: logitBias)
self.logitBias = LLMOpenAIRequestType.logit_biasPayload(additionalProperties: logitBias)
self.user = user
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public struct LLMOpenAIParameters: Sendable {


/// The to-be-used OpenAI model.
let modelType: LLMOpenAIModelType
let modelType: LLMOpenAIRequestType.modelPayload
/// The to-be-used system prompt(s) of the LLM.
let systemPrompts: [String]
/// Indicates if a model access test should be made during LLM setup.
Expand All @@ -37,7 +37,7 @@ public struct LLMOpenAIParameters: Sendable {
/// - modelAccessTest: Indicates if access to the configured OpenAI model via the specified token should be made upon LLM setup.
/// - overwritingToken: Separate OpenAI token that overrides the one defined within the ``LLMOpenAIPlatform``.
public init(
modelType: LLMOpenAIModelType,
modelType: LLMOpenAIRequestType.modelPayload,
systemPrompt: String? = Defaults.defaultOpenAISystemPrompt,
modelAccessTest: Bool = false,
overwritingToken: String? = nil
Expand All @@ -59,7 +59,7 @@ public struct LLMOpenAIParameters: Sendable {
/// - overwritingToken: Separate OpenAI token that overrides the one defined within the ``LLMOpenAIPlatform``.
@_disfavoredOverload
public init(
modelType: LLMOpenAIModelType,
modelType: LLMOpenAIRequestType.modelPayload,
systemPrompts: [String] = [Defaults.defaultOpenAISystemPrompt],
modelAccessTest: Bool = false,
overwritingToken: String? = nil
Expand Down
8 changes: 4 additions & 4 deletions Sources/SpeziLLMOpenAI/Helpers/OpenAI+Export.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@

import OpenAPIRuntime

/// Convenience export of the `OpenAI/Model` type.
/// Convenience export of the generated chat completion request type.
///
/// The ``LLMOpenAIModelType`` exports the `OpenAI/Model` describing the type of the to-be-used OpenAI Model.
/// This enables convenience access to the `OpenAI/Model` without naming conflicts resulting from the `OpenAI/Model` name.
public typealias LLMOpenAIModelType = Components.Schemas.CreateChatCompletionRequest.modelPayload
/// The ``LLMOpenAIRequestType`` exports the generated chat completion request type.
/// This enables shorthand access to further request-related generated types.
public typealias LLMOpenAIRequestType = Components.Schemas.CreateChatCompletionRequest
5 changes: 2 additions & 3 deletions Sources/SpeziLLMOpenAI/LLMOpenAISession+Configuration.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ extension LLMOpenAISession {
}

return await Operations.createChatCompletion
.Input(body: .json(Components.Schemas.CreateChatCompletionRequest(
.Input(body: .json(LLMOpenAIRequestType(
messages: openAIContext,
model: schema.parameters.modelType,
frequency_penalty: schema.modelParameters.frequencyPenalty,
Expand All @@ -47,8 +47,7 @@ extension LLMOpenAISession {
presence_penalty: schema.modelParameters.presencePenalty,
response_format: schema.modelParameters.responseFormat,
seed: schema.modelParameters.seed,
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload
.case2(schema.modelParameters.stopSequence),
stop: LLMOpenAIRequestType.stopPayload.case2(schema.modelParameters.stopSequence),
stream: true,
temperature: schema.modelParameters.temperature,
top_p: schema.modelParameters.topP,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import SwiftUI
/// View to display an onboarding step for the user to enter change the OpenAI model.
public struct LLMOpenAIModelOnboardingStep: View {
public enum Default {
public static let models: [LLMOpenAIModelType] = [
public static let models: [LLMOpenAIRequestType.modelPayload] = [
.init(
value1: "GPT 3.5 Turbo",
value2: .gpt_hyphen_3_period_5_hyphen_turbo
Expand All @@ -31,10 +31,10 @@ public struct LLMOpenAIModelOnboardingStep: View {
}


@State private var modelSelection: LLMOpenAIModelType
@State private var modelSelection: LLMOpenAIRequestType.modelPayload
private let actionText: String
private let action: (LLMOpenAIModelType) -> Void
private let models: [LLMOpenAIModelType]
private let action: (LLMOpenAIRequestType.modelPayload) -> Void
private let models: [LLMOpenAIRequestType.modelPayload]


public var body: some View {
Expand Down Expand Up @@ -81,8 +81,8 @@ public struct LLMOpenAIModelOnboardingStep: View {
/// - action: Action that should be performed after the openAI model selection has been done, selection is passed as closure argument.
public init(
actionText: LocalizedStringResource? = nil,
models: [LLMOpenAIModelType] = Default.models,
_ action: @escaping (LLMOpenAIModelType) -> Void
models: [LLMOpenAIRequestType.modelPayload] = Default.models,
_ action: @escaping (LLMOpenAIRequestType.modelPayload) -> Void
) {
self.init(
actionText: actionText?.localizedString() ?? String(localized: "OPENAI_MODEL_SELECTION_SAVE_BUTTON", bundle: .module),
Expand All @@ -98,8 +98,8 @@ public struct LLMOpenAIModelOnboardingStep: View {
@_disfavoredOverload
public init<ActionText: StringProtocol>(
actionText: ActionText,
models: [LLMOpenAIModelType] = Default.models,
_ action: @escaping (LLMOpenAIModelType) -> Void
models: [LLMOpenAIRequestType.modelPayload] = Default.models,
_ action: @escaping (LLMOpenAIRequestType.modelPayload) -> Void
) {
self.actionText = String(actionText)
self.models = models
Expand Down

0 comments on commit bdd7127

Please sign in to comment.