79
79
),
80
80
)
81
81
82
+ MODEL_ARG = click .argument ("model" , type = str , default = None , required = False )
82
83
MODEL_OPTION = click .option (
83
84
"--model_path" ,
84
85
type = str ,
152
153
@PORT_OPTION
153
154
@LOG_LEVEL_OPTION
154
155
@HOT_RELOAD_OPTION
156
+ @MODEL_ARG
155
157
@MODEL_OPTION
156
158
@BATCH_OPTION
157
159
@CORES_OPTION
@@ -167,6 +169,7 @@ def main(
167
169
log_level : str ,
168
170
hot_reload_config : bool ,
169
171
model_path : str ,
172
+ model : str ,
170
173
batch_size : int ,
171
174
num_cores : int ,
172
175
num_workers : int ,
@@ -216,6 +219,17 @@ def main(
216
219
...
217
220
```
218
221
"""
222
+ # the server cli can take a model argument or --model_path option
223
+ # if the --model_path option is provided, use that
224
+ # otherwise if the argument is given and --model_path is not used, use the
225
+ # argument instead
226
+ if model and model_path == "default" :
227
+ model_path = model
228
+
229
+ if integration == INTEGRATION_OPENAI :
230
+ if task is None or task != "text_generation" :
231
+ task = "text_generation"
232
+
219
233
if ctx .invoked_subcommand is not None :
220
234
return
221
235
@@ -254,24 +268,6 @@ def main(
254
268
server .start_server (host , port , log_level , hot_reload_config = hot_reload_config )
255
269
256
270
257
- @main .command (
258
- context_settings = dict (
259
- token_normalize_func = lambda x : x .replace ("-" , "_" ), show_default = True
260
- ),
261
- )
262
- @click .argument ("config-file" , type = str )
263
- @HOST_OPTION
264
- @PORT_OPTION
265
- @LOG_LEVEL_OPTION
266
- @HOT_RELOAD_OPTION
267
- def openai (
268
- config_file : str , host : str , port : int , log_level : str , hot_reload_config : bool
269
- ):
270
-
271
- server = OpenAIServer (server_config = config_file )
272
- server .start_server (host , port , log_level , hot_reload_config = hot_reload_config )
273
-
274
-
275
271
@main .command (
276
272
context_settings = dict (
277
273
token_normalize_func = lambda x : x .replace ("-" , "_" ), show_default = True
0 commit comments