Skip to content

Commit 8ea9b6f

Browse files
committed
Make temperature a nullable value so that it can be set to 0
Fixes sashabaranov#9
1 parent 2a0ff5a commit 8ea9b6f

File tree

5 files changed

+34
-8
lines changed

5 files changed

+34
-8
lines changed

audio.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,8 @@ type AudioRequest struct {
4545
Reader io.Reader
4646

4747
Prompt string
48-
Temperature float32
49-
Language string // Only for transcription.
48+
Temperature float32 // defaults to 0, so fine to not be a pointer
49+
Language string // Only for transcription.
5050
Format AudioResponseFormat
5151
TimestampGranularities []TranscriptionTimestampGranularity // Only for transcription.
5252
}

chat.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ type ChatCompletionRequest struct {
222222
// MaxCompletionTokens An upper bound for the number of tokens that can be generated for a completion,
223223
// including visible output tokens and reasoning tokens https://platform.openai.com/docs/guides/reasoning
224224
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"`
225-
Temperature float32 `json:"temperature,omitempty"`
225+
Temperature *float32 `json:"temperature,omitempty"`
226226
TopP float32 `json:"top_p,omitempty"`
227227
N int `json:"n,omitempty"`
228228
Stream bool `json:"stream,omitempty"`

chat_test.go

+19-3
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
153153
Role: openai.ChatMessageRoleAssistant,
154154
},
155155
},
156-
Temperature: float32(2),
156+
Temperature: openai.NewFloat(2),
157157
},
158158
expectedError: openai.ErrO1BetaLimitationsOther,
159159
},
@@ -170,7 +170,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
170170
Role: openai.ChatMessageRoleAssistant,
171171
},
172172
},
173-
Temperature: float32(1),
173+
Temperature: openai.NewFloat(1),
174174
TopP: float32(0.1),
175175
},
176176
expectedError: openai.ErrO1BetaLimitationsOther,
@@ -188,7 +188,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
188188
Role: openai.ChatMessageRoleAssistant,
189189
},
190190
},
191-
Temperature: float32(1),
191+
Temperature: openai.NewFloat(1),
192192
TopP: float32(1),
193193
N: 2,
194194
},
@@ -259,6 +259,22 @@ func TestChatRequestOmitEmpty(t *testing.T) {
259259
}
260260
}
261261

262+
func TestChatRequestOmitEmptyWithZeroTemp(t *testing.T) {
263+
data, err := json.Marshal(openai.ChatCompletionRequest{
264+
// We set model b/c it's required, so omitempty doesn't make sense
265+
Model: "gpt-4",
266+
Temperature: openai.NewFloat(0),
267+
})
268+
checks.NoError(t, err)
269+
270+
// messages is also required so isn't omitted
271+
// but the zero-value for temp is not excluded, b/c that's a valid value to set the temp to!
272+
const expected = `{"model":"gpt-4","messages":null,"temperature":0}`
273+
if string(data) != expected {
274+
t.Errorf("expected JSON with all empty fields to be %v but was %v", expected, string(data))
275+
}
276+
}
277+
262278
func TestChatCompletionsWithStream(t *testing.T) {
263279
config := openai.DefaultConfig("whatever")
264280
config.BaseURL = "http://localhost/v1"

common.go

+10
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,13 @@ type PromptTokensDetails struct {
2222
AudioTokens int `json:"audio_tokens"`
2323
CachedTokens int `json:"cached_tokens"`
2424
}
25+
26+
// NewFloat returns a pointer to a float, useful for setting the temperature on some APIs
27+
func NewFloat(v float32) *float32 {
28+
return &v
29+
}
30+
31+
// NewInt returns a pointer to an int, useful for setting the seed and other nullable parameters
32+
func NewInt(v int) *int {
33+
return &v
34+
}

completion.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ func validateRequestForO1Models(request ChatCompletionRequest) error {
220220
}
221221

222222
// Other: temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0.
223-
if request.Temperature > 0 && request.Temperature != 1 {
223+
if request.Temperature != nil && *request.Temperature != 1 {
224224
return ErrO1BetaLimitationsOther
225225
}
226226
if request.TopP > 0 && request.TopP != 1 {
@@ -263,7 +263,7 @@ type CompletionRequest struct {
263263
Stop []string `json:"stop,omitempty"`
264264
Stream bool `json:"stream,omitempty"`
265265
Suffix string `json:"suffix,omitempty"`
266-
Temperature float32 `json:"temperature,omitempty"`
266+
Temperature *float32 `json:"temperature,omitempty"`
267267
TopP float32 `json:"top_p,omitempty"`
268268
User string `json:"user,omitempty"`
269269
}

0 commit comments

Comments
 (0)