@@ -959,6 +959,56 @@ func TestCreateChatCompletionStreamReasoningValidatorFails(t *testing.T) {
959
959
}
960
960
}
961
961
962
+ func TestCreateChatCompletionStreamO3ReasoningValidatorFails (t * testing.T ) {
963
+ client , _ , _ := setupOpenAITestServer ()
964
+
965
+ stream , err := client .CreateChatCompletionStream (context .Background (), openai.ChatCompletionRequest {
966
+ MaxTokens : 100 , // This will trigger the validator to fail
967
+ Model : openai .O3 ,
968
+ Messages : []openai.ChatCompletionMessage {
969
+ {
970
+ Role : openai .ChatMessageRoleUser ,
971
+ Content : "Hello!" ,
972
+ },
973
+ },
974
+ Stream : true ,
975
+ })
976
+
977
+ if stream != nil {
978
+ t .Error ("Expected nil stream when validation fails" )
979
+ stream .Close ()
980
+ }
981
+
982
+ if ! errors .Is (err , openai .ErrReasoningModelMaxTokensDeprecated ) {
983
+ t .Errorf ("Expected ErrReasoningModelMaxTokensDeprecated for O3, got: %v" , err )
984
+ }
985
+ }
986
+
987
+ func TestCreateChatCompletionStreamO4MiniReasoningValidatorFails (t * testing.T ) {
988
+ client , _ , _ := setupOpenAITestServer ()
989
+
990
+ stream , err := client .CreateChatCompletionStream (context .Background (), openai.ChatCompletionRequest {
991
+ MaxTokens : 100 , // This will trigger the validator to fail
992
+ Model : openai .O4Mini ,
993
+ Messages : []openai.ChatCompletionMessage {
994
+ {
995
+ Role : openai .ChatMessageRoleUser ,
996
+ Content : "Hello!" ,
997
+ },
998
+ },
999
+ Stream : true ,
1000
+ })
1001
+
1002
+ if stream != nil {
1003
+ t .Error ("Expected nil stream when validation fails" )
1004
+ stream .Close ()
1005
+ }
1006
+
1007
+ if ! errors .Is (err , openai .ErrReasoningModelMaxTokensDeprecated ) {
1008
+ t .Errorf ("Expected ErrReasoningModelMaxTokensDeprecated for O4Mini, got: %v" , err )
1009
+ }
1010
+ }
1011
+
962
1012
func compareChatStreamResponseChoices (c1 , c2 openai.ChatCompletionStreamChoice ) bool {
963
1013
if c1 .Index != c2 .Index {
964
1014
return false
0 commit comments