Skip to content

Commit

Permalink
SpeziLLMOpenAI: Move global logger into local scope
Browse files Browse the repository at this point in the history
This reverts commit 1a6cc12.
  • Loading branch information
paulhdk committed Dec 21, 2024
1 parent 55897f8 commit 10fcb63
Show file tree
Hide file tree
Showing 12 changed files with 64 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import Foundation
import OpenAPIRuntime
import OSLog


/// Defines the `LLMFunctionParameterSchemaCollector/schema` requirement to collect the function calling parameter schema's from the ``LLMFunction/Parameter``s.
Expand Down Expand Up @@ -44,7 +45,7 @@ extension LLMFunction {
"required": requiredPropertyNames
])
} catch {
logger.error("Error creating OpenAPIObjectContainer.")
Logger(subsystem: "edu.stanford.spezi", category: "SpeziLLMOpenAI").error("Error creating OpenAPIObjectContainer.")
}
return functionParameterSchema
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: BinaryInteg
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("LLMFunctionParameterWrapper+ArrayTypes")
self.init(description: "")
logger.error("LLMFunctionParameterWrapper+ArrayTypes")
}
}
}
Expand Down Expand Up @@ -89,8 +89,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: BinaryFloat
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LMMFunctionParameter+ArrayTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LMMFunctionParameter+ArrayTypes")
}
}
}
Expand Down Expand Up @@ -124,8 +124,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element == Bool {
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+ArrayTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+ArrayTypes")
}
}
}
Expand Down Expand Up @@ -165,8 +165,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: StringProto
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+ArrayTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+ArrayTypes")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: LLMFunction
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("Couldn't create FunctionParameterWrapper+CustomType \(error)")
self.init(description: "")
logger.error("Couldn't create FunctionParameterWrapper+CustomType \(error)")
}
}
}
Expand Down Expand Up @@ -85,8 +85,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: AnyArray
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("Couldn't create LLMFunctionParameterWrapper+CustomTypes")
self.init(description: "")
logger.error("Couldn't create LLMFunctionParameterWrapper+CustomTypes")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,8 @@ extension _LLMFunctionParameterWrapper where T: LLMFunctionParameterEnum, T.RawV
"enum": T.allCases.map { String($0.rawValue) }
].compactMapValues { $0 }))
} catch {
logger
.error(
"SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)"
)
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)")
}
}
}
Expand All @@ -58,11 +55,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: LLMFunct
"enum": T.Wrapped.allCases.map { String($0.rawValue) }
].compactMapValues { $0 }))
} catch {
logger
.error(
"SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)"
)
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)")
}
}
}
Expand Down Expand Up @@ -99,11 +93,8 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: LLMFunction
"uniqueItems": uniqueItems
].compactMapValues { $0 }))
} catch {
logger
.error(
"SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)"
)
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)")
}
}
}
Expand Down Expand Up @@ -142,11 +133,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional,
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger
.error(
"SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)"
)
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+Enum \(error.localizedDescription)")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: BinaryIn
"maximum": maximum.map { Double($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionparaemter+OptionalType")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionparaemter+OptionalType")
}
}
}
Expand Down Expand Up @@ -65,8 +65,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: BinaryFl
"maximum": maximum.map { Double($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalType")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalType")
}
}
}
Expand All @@ -88,8 +88,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped == Bool {
"const": const.map { String($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionalParameterWrapper+OptionalTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionalParameterWrapper+OptionalTypes")
}
}
}
Expand Down Expand Up @@ -120,8 +120,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: StringPr
"enum": `enum`.map { $0.map { String($0) as Any? } }
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes")
}
}
}
Expand Down Expand Up @@ -165,8 +165,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: AnyArray
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionPropertyWrapper+OptionalType")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionPropertyWrapper+OptionalType")
}
}
}
Expand Down Expand Up @@ -207,8 +207,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: AnyArray
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes")
}
}
}
Expand Down Expand Up @@ -242,8 +242,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: AnyArray
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes.swift")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalTypes.swift")
}
}
}
Expand Down Expand Up @@ -284,8 +284,8 @@ extension _LLMFunctionParameterWrapper where T: AnyOptional, T.Wrapped: AnyArray
"uniqueItems": uniqueItems as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalType")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+OptionalType")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ extension _LLMFunctionParameterWrapper where T: BinaryInteger {
"maximum": maximum.map { Double($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameter+PrimitveTypes")
self.init(description: "")
self.logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameter+PrimitveTypes")
}
}
}
Expand Down Expand Up @@ -64,8 +64,8 @@ extension _LLMFunctionParameterWrapper where T: BinaryFloatingPoint {
"maximum": maximum.map { Double($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimitveTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimitveTypes")
}
}
}
Expand All @@ -88,8 +88,8 @@ extension _LLMFunctionParameterWrapper where T == Bool {
"const": const.map { String($0) } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimiteveTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimiteveTypes")
}
}
}
Expand Down Expand Up @@ -122,8 +122,8 @@ extension _LLMFunctionParameterWrapper where T: StringProtocol {
"enum": `enum`.map { $0.map { String($0) } } as Any?
].compactMapValues { $0 }))
} catch {
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimitiveTypes")
self.init(description: "")
logger.error("SpeziLLMOpenAI - initialization error - LLMFunctionParameterWrapper+PrimitiveTypes")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
//

import OpenAPIRuntime
import OSLog


// NOTE: OpenAPIRuntime.OpenAPIObjectContainer is the underlying type for Components.Schemas.FunctionParameters.additionalProperties
Expand All @@ -20,6 +21,9 @@ public typealias LLMFunctionParameterItemSchema = OpenAPIRuntime.OpenAPIObjectCo
/// Refer to the documentation of ``LLMFunction/Parameter`` for information on how to use the `@Parameter` property wrapper.
@propertyWrapper
public class _LLMFunctionParameterWrapper<T: Decodable>: LLMFunctionParameterSchemaCollector { // swiftlint:disable:this type_name
/// A Swift Logger that logs important information and errors.
var logger = Logger(subsystem: "edu.stanford.spezi", category: "SpeziLLMOpenAI")

private var injectedValue: T?


Expand Down
12 changes: 6 additions & 6 deletions Sources/SpeziLLMOpenAI/LLMOpenAIError.swift
Original file line number Diff line number Diff line change
Expand Up @@ -141,22 +141,22 @@ extension LLMOpenAISession {
func handleErrorCode(_ statusCode: Int) -> LLMOpenAIError {
switch statusCode {
case 401:
logger.error("SpeziLLMOpenAI: Invalid OpenAI API token")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: Invalid OpenAI API token")
return LLMOpenAIError.invalidAPIToken
case 403:
logger.error("SpeziLLMOpenAI: Model access check - Country, region, or territory not supported")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: Model access check - Country, region, or territory not supported")
return LLMOpenAIError.invalidAPIToken
case 429:
logger.error("SpeziLLMOpenAI: Rate limit reached for requests")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: Rate limit reached for requests")
return LLMOpenAIError.insufficientQuota
case 500:
logger.error("SpeziLLMOpenAI: The server had an error while processing your request")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: The server had an error while processing your request")
return LLMOpenAIError.generationError
case 503:
logger.error("SpeziLLMOpenAI: The engine is currently overloaded, please try again later")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: The engine is currently overloaded, please try again later")
return LLMOpenAIError.generationError
default:
logger.error("SpeziLLMOpenAI: Received unknown return code from OpenAI")
LLMOpenAISession.logger.error("SpeziLLMOpenAI: Received unknown return code from OpenAI")
return LLMOpenAIError.generationError
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ extension LLMOpenAISession {
guard let role = Components.Schemas.ChatCompletionRequestSystemMessage
.rolePayload(rawValue: contextEntity.role.openAIRepresentation.rawValue)
else {
logger.error("Could not create ChatCompletionRequestSystemMessage payload")
Self.logger.error("Could not create ChatCompletionRequestSystemMessage payload")
return nil
}
return Components.Schemas.ChatCompletionRequestMessage.ChatCompletionRequestSystemMessage(
Expand Down
Loading

0 comments on commit 10fcb63

Please sign in to comment.