@@ -85,6 +85,8 @@ class GenerationConfigDict(TypedDict, total=False):
85
85
temperature : float
86
86
response_mime_type : str
87
87
response_schema : protos .Schema | Mapping [str , Any ] # fmt: off
88
+ presence_penalty : float
89
+ frequency_penalty : float
88
90
89
91
90
92
@dataclasses .dataclass
@@ -144,8 +146,6 @@ class GenerationConfig:
144
146
Note: The default value varies by model, see the
145
147
`Model.top_k` attribute of the `Model` returned the
146
148
`genai.get_model` function.
147
- seed:
148
- Optional. Seed used in decoding. If not set, the request uses a randomly generated seed.
149
149
response_mime_type:
150
150
Optional. Output response mimetype of the generated candidate text.
151
151
@@ -161,10 +161,6 @@ class GenerationConfig:
161
161
Optional.
162
162
frequency_penalty:
163
163
Optional.
164
- response_logprobs:
165
- Optional. If true, export the `logprobs` results in response.
166
- logprobs:
167
- Optional. Number of candidates of log probabilities to return at each step of decoding.
168
164
"""
169
165
170
166
candidate_count : int | None = None
@@ -173,13 +169,10 @@ class GenerationConfig:
173
169
temperature : float | None = None
174
170
top_p : float | None = None
175
171
top_k : int | None = None
176
- seed : int | None = None
177
172
response_mime_type : str | None = None
178
173
response_schema : protos .Schema | Mapping [str , Any ] | type | None = None
179
174
presence_penalty : float | None = None
180
175
frequency_penalty : float | None = None
181
- response_logprobs : bool | None = None
182
- logprobs : int | None = None
183
176
184
177
185
178
GenerationConfigType = Union [protos .GenerationConfig , GenerationConfigDict , GenerationConfig ]
0 commit comments