@@ -26,7 +26,7 @@ func TestAPI(t *testing.T) {
26
26
_ , err = c .ListEngines (ctx )
27
27
checks .NoError (t , err , "ListEngines error" )
28
28
29
- _ , err = c .GetEngine (ctx , "davinci" )
29
+ _ , err = c .GetEngine (ctx , openai . GPT3Davinci002 )
30
30
checks .NoError (t , err , "GetEngine error" )
31
31
32
32
fileRes , err := c .ListFiles (ctx )
@@ -42,7 +42,7 @@ func TestAPI(t *testing.T) {
42
42
"The food was delicious and the waiter" ,
43
43
"Other examples of embedding request" ,
44
44
},
45
- Model : openai .AdaSearchQuery ,
45
+ Model : openai .AdaEmbeddingV2 ,
46
46
}
47
47
_ , err = c .CreateEmbeddings (ctx , embeddingReq )
48
48
checks .NoError (t , err , "Embedding error" )
@@ -77,31 +77,6 @@ func TestAPI(t *testing.T) {
77
77
)
78
78
checks .NoError (t , err , "CreateChatCompletion (with name) returned error" )
79
79
80
- stream , err := c .CreateCompletionStream (ctx , openai.CompletionRequest {
81
- Prompt : "Ex falso quodlibet" ,
82
- Model : openai .GPT3Ada ,
83
- MaxTokens : 5 ,
84
- Stream : true ,
85
- })
86
- checks .NoError (t , err , "CreateCompletionStream returned error" )
87
- defer stream .Close ()
88
-
89
- counter := 0
90
- for {
91
- _ , err = stream .Recv ()
92
- if err != nil {
93
- if errors .Is (err , io .EOF ) {
94
- break
95
- }
96
- t .Errorf ("Stream error: %v" , err )
97
- } else {
98
- counter ++
99
- }
100
- }
101
- if counter == 0 {
102
- t .Error ("Stream did not return any responses" )
103
- }
104
-
105
80
_ , err = c .CreateChatCompletion (
106
81
context .Background (),
107
82
openai.ChatCompletionRequest {
@@ -134,6 +109,41 @@ func TestAPI(t *testing.T) {
134
109
checks .NoError (t , err , "CreateChatCompletion (with functions) returned error" )
135
110
}
136
111
112
+ func TestCompletionStream (t * testing.T ) {
113
+ apiToken := os .Getenv ("OPENAI_TOKEN" )
114
+ if apiToken == "" {
115
+ t .Skip ("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it." )
116
+ }
117
+
118
+ c := openai .NewClient (apiToken )
119
+ ctx := context .Background ()
120
+
121
+ stream , err := c .CreateCompletionStream (ctx , openai.CompletionRequest {
122
+ Prompt : "Ex falso quodlibet" ,
123
+ Model : openai .GPT3Babbage002 ,
124
+ MaxTokens : 5 ,
125
+ Stream : true ,
126
+ })
127
+ checks .NoError (t , err , "CreateCompletionStream returned error" )
128
+ defer stream .Close ()
129
+
130
+ counter := 0
131
+ for {
132
+ _ , err = stream .Recv ()
133
+ if err != nil {
134
+ if errors .Is (err , io .EOF ) {
135
+ break
136
+ }
137
+ t .Errorf ("Stream error: %v" , err )
138
+ } else {
139
+ counter ++
140
+ }
141
+ }
142
+ if counter == 0 {
143
+ t .Error ("Stream did not return any responses" )
144
+ }
145
+ }
146
+
137
147
func TestAPIError (t * testing.T ) {
138
148
apiToken := os .Getenv ("OPENAI_TOKEN" )
139
149
if apiToken == "" {
0 commit comments