Skip to content

Commit 8e8911f

Browse files
committed
fix: gemini to google
1 parent 6657fb7 commit 8e8911f

File tree

3 files changed

+7
-7
lines changed

3 files changed

+7
-7
lines changed

Sources/CompilerSwiftAI/Model Calling/ModelMetadata.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ struct ModelMetadata: Codable, Sendable, Equatable {
4848
ModelMetadata(provider: .deepseek, model: model.rawValue, temperature: temperature, maxTokens: maxTokens)
4949
}
5050

51-
static func gemini(_ model: GeminiModel, temperature: Float? = nil, maxTokens: Int? = nil) -> ModelMetadata {
52-
ModelMetadata(provider: .gemini, model: model.rawValue, temperature: temperature, maxTokens: maxTokens)
51+
static func google(_ model: GeminiModel, temperature: Float? = nil, maxTokens: Int? = nil) -> ModelMetadata {
52+
ModelMetadata(provider: .google, model: model.rawValue, temperature: temperature, maxTokens: maxTokens)
5353
}
5454

5555
static func == (lhs: ModelMetadata, rhs: ModelMetadata) -> Bool {

Sources/CompilerSwiftAI/Model Calling/Models.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ public enum ModelProvider: String, Codable, Sendable, Equatable {
66
case anthropic
77
case perplexity
88
case deepseek
9-
case gemini
9+
case google
1010
}
1111

1212
public enum OpenAIModel: String, Codable {

Sources/CompilerSwiftAI/Model Calling/StreamConfiguration.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -103,21 +103,21 @@ public extension StreamConfiguration {
103103
))
104104
}
105105

106-
/// Create a Gemini streaming configuration
106+
/// Create a Google streaming configuration
107107
/// - Parameters:
108108
/// - model: The Gemini model to use
109109
/// - temperature: Optional temperature parameter (0.0 - 1.0)
110110
/// - maxTokens: Optional maximum tokens to generate
111-
static func gemini(
111+
static func google(
112112
_ model: GeminiModel,
113113
temperature: Float? = nil,
114114
maxTokens: Int? = nil
115115
) -> StreamConfiguration {
116116
.init(metadata: ModelMetadata(
117-
provider: .gemini,
117+
provider: .google,
118118
model: model.rawValue,
119119
temperature: temperature,
120120
maxTokens: maxTokens
121121
))
122122
}
123-
}
123+
}

0 commit comments

Comments
 (0)