diff --git a/openai/api_requestor.py b/openai/api_requestor.py index 3b5e0c1c92..186bdd284a 100644 --- a/openai/api_requestor.py +++ b/openai/api_requestor.py @@ -45,11 +45,11 @@ def _api_encode(data): elif isinstance(value, list) or isinstance(value, tuple): for i, sv in enumerate(value): if isinstance(sv, dict): - subdict = _encode_nested_dict("%s[]" % (key,), sv) + subdict = _encode_nested_dict("%s[%d]" % (key, i), sv) for k, v in _api_encode(subdict): yield (k, v) else: - yield ("%s[]" % (key,), util.utf8(sv)) + yield ("%s[%d]" % (key, i), util.utf8(sv)) elif isinstance(value, dict): subdict = _encode_nested_dict(key, value) for subkey, subvalue in _api_encode(subdict): diff --git a/openai/api_resources/__init__.py b/openai/api_resources/__init__.py index 2a77f47097..2cce27ae9b 100644 --- a/openai/api_resources/__init__.py +++ b/openai/api_resources/__init__.py @@ -4,7 +4,8 @@ from openai.api_resources.error_object import ErrorObject from openai.api_resources.event import Event from openai.api_resources.file import File -from openai.api_resources.higherlevel import HigherLevel +from openai.api_resources.answer import Answer +from openai.api_resources.classification import Classification from openai.api_resources.plan import Plan from openai.api_resources.run import Run from openai.api_resources.snapshot import Snapshot diff --git a/openai/api_resources/answer.py b/openai/api_resources/answer.py new file mode 100644 index 0000000000..8dd3e84d23 --- /dev/null +++ b/openai/api_resources/answer.py @@ -0,0 +1,14 @@ +from openai.openai_object import OpenAIObject + + +class Answer(OpenAIObject): + api_prefix = "v1" + + @classmethod + def get_url(self, base): + return "/%s/%s" % (self.api_prefix, base) + + @classmethod + def create(cls, **params): + instance = cls() + return instance.request("post", cls.get_url("answers"), params) diff --git a/openai/api_resources/classification.py b/openai/api_resources/classification.py new file mode 100644 index 0000000000..b659164e5a --- /dev/null +++ b/openai/api_resources/classification.py @@ -0,0 +1,14 @@ +from openai.openai_object import OpenAIObject + + +class Classification(OpenAIObject): + api_prefix = "v1" + + @classmethod + def get_url(self, base): + return "/%s/%s" % (self.api_prefix, base) + + @classmethod + def create(cls, **params): + instance = cls() + return instance.request("post", cls.get_url("classifications"), params) diff --git a/openai/api_resources/file.py b/openai/api_resources/file.py index 3411a4a3bb..dcdb9b94df 100644 --- a/openai/api_resources/file.py +++ b/openai/api_resources/file.py @@ -7,8 +7,6 @@ import openai from openai import api_requestor, util from openai.api_resources.abstract import ( - APIResource, - CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource, @@ -16,7 +14,7 @@ from openai.util import log_info -class File(ListableAPIResource): +class File(ListableAPIResource, DeletableAPIResource): OBJECT_NAME = "file" @classmethod diff --git a/openai/api_resources/higherlevel.py b/openai/api_resources/higherlevel.py deleted file mode 100644 index da14918670..0000000000 --- a/openai/api_resources/higherlevel.py +++ /dev/null @@ -1,17 +0,0 @@ -from openai.api_resources.abstract.engine_api_resource import EngineAPIResource - - -class HigherLevel(EngineAPIResource): - api_prefix = "higherlevel" - - def get_url(self, base): - return "/%s/%s" % (self.api_prefix, base) - - def classification(self, **params): - return self.request("post", self.get_url("classifications"), params) - - def answer(self, **params): - return self.request("post", self.get_url("answers"), params) - - def retriever_file_set_search(self, **params): - return self.request("post", self.get_url("retriever_file_set_search"), params) diff --git a/openai/cli.py b/openai/cli.py index ba29fe6bc1..2c2429dc62 100644 --- a/openai/cli.py +++ b/openai/cli.py @@ -76,7 +76,7 @@ def generate(cls, args): top_p=args.top_p, logprobs=args.logprobs, stop=args.stop, - **kwargs + **kwargs, ) if not args.stream: resp = [resp] @@ -94,17 +94,34 @@ def generate(cls, args): @classmethod def search(cls, args): # Will soon be deprecated and replaced by a Search.create - resp = openai.Engine(id=args.id).search( - documents=args.documents, query=args.query - ) + params = { + "query": args.query, + "max_rerank": args.max_rerank, + "return_metadata": args.return_metadata, + } + if args.documents: + params["documents"] = args.documents + if args.file: + params["file"] = args.file + + resp = openai.Engine(id=args.id).search(**params) scores = [ (search_result["score"], search_result["document"]) for search_result in resp["data"] ] scores.sort(reverse=True) + dataset = ( + args.documents if args.documents else [x["text"] for x in resp["data"]] + ) for score, document_idx in scores: print("=== score {:.3f} ===".format(score)) - print(args.documents[document_idx]) + print(dataset[document_idx]) + if ( + args.return_metadata + and args.file + and "metadata" in resp["data"][document_idx] + ): + print(f"METADATA: {resp['data'][document_idx]['metadata']}") @classmethod def list(cls, args): @@ -195,6 +212,31 @@ def list(cls, args): print(tags) +class File: + @classmethod + def create(cls, args): + resp = openai.File.create( + file=open(args.file), + purpose=args.purpose, + ) + print(resp) + + @classmethod + def get(cls, args): + resp = openai.File.retrieve(id=args.id) + print(resp) + + @classmethod + def delete(cls, args): + file = openai.File(id=args.id).delete() + print(file) + + @classmethod + def list(cls, args): + file = openai.File.list() + print(file) + + class FineTuneCLI: @classmethod def list(cls, args): @@ -311,8 +353,26 @@ def help(args): "-d", "--documents", action="append", - help="List of documents to search over", - required=True, + help="List of documents to search over. Only one of `documents` or `file` may be supplied.", + required=False, + ) + sub.add_argument( + "-f", + "--file", + help="A file id to search over. Only one of `documents` or `file` may be supplied.", + required=False, + ) + sub.add_argument( + "--max_rerank", + help="The maximum number of documents to be re-ranked and returned by search. This flag only takes effect when `file` is set.", + type=int, + default=200, + ) + sub.add_argument( + "--return_metadata", + help="A special boolean flag for showing metadata. If set `true`, each document entry in the returned json will contain a 'metadata' field. Default to be `false`. This flag only takes effect when `file` is set.", + type=bool, + default=False, ) sub.add_argument("-q", "--query", required=True, help="Search query") sub.set_defaults(func=Engine.search) @@ -424,7 +484,35 @@ def help(args): sub = subparsers.add_parser("tags.list") sub.set_defaults(func=Tag.list) - # /fine-tunes API + # Files + sub = subparsers.add_parser("files.create") + + sub.add_argument( + "-f", + "--file", + required=True, + help="File to upload", + ) + sub.add_argument( + "-p", + "--purpose", + help="Why are you uploading this file? (see https://beta.openai.com/docs/api-reference/ for purposes)", + required=True, + ) + sub.set_defaults(func=File.create) + + sub = subparsers.add_parser("files.get") + sub.add_argument("-i", "--id", required=True, help="The files ID") + sub.set_defaults(func=File.get) + + sub = subparsers.add_parser("files.delete") + sub.add_argument("-i", "--id", required=True, help="The files ID") + sub.set_defaults(func=File.delete) + + sub = subparsers.add_parser("files.list") + sub.set_defaults(func=File.list) + + # Finetune sub = subparsers.add_parser("fine_tunes.list") sub.set_defaults(func=FineTuneCLI.list) diff --git a/openai/multipart_data_generator.py b/openai/multipart_data_generator.py index 8b29a2b551..93a683ee7b 100644 --- a/openai/multipart_data_generator.py +++ b/openai/multipart_data_generator.py @@ -4,6 +4,7 @@ import io import openai +import re class MultipartDataGenerator(object): @@ -13,11 +14,19 @@ def __init__(self, chunk_size=1028): self.boundary = self._initialize_boundary() self.chunk_size = chunk_size + def _remove_array_element(self, input_string): + match = re.match(r"^(.*)\[.*\]$", input_string) + return match[1] if match else input_string + def add_params(self, params): # Flatten parameters first params = dict(openai.api_requestor._api_encode(params)) for key, value in openai.six.iteritems(params): + + # strip array elements if present from key + key = self._remove_array_element(key) + if value is None: continue diff --git a/openai/tests/test_endpoints.py b/openai/tests/test_endpoints.py new file mode 100644 index 0000000000..25cc1cf764 --- /dev/null +++ b/openai/tests/test_endpoints.py @@ -0,0 +1,26 @@ +import openai +import io +import json +import uuid + +### FILE TESTS +def test_file_upload(): + result = openai.File.create( + file=io.StringIO(json.dumps({"text": "test file data"})), + purpose="search", + ) + assert result.purpose == "search" + assert "id" in result + + +### COMPLETION TESTS +def test_completions(): + result = openai.Completion.create(prompt="This was a test", n=5, engine="davinci") + assert len(result.choices) == 5 + + +def test_completions_multiple_prompts(): + result = openai.Completion.create( + prompt=["This was a test", "This was another test"], n=5, engine="davinci" + ) + assert len(result.choices) == 10 diff --git a/openai/version.py b/openai/version.py index 6cc293ea98..5a2867da19 100644 --- a/openai/version.py +++ b/openai/version.py @@ -1 +1 @@ -VERSION = "0.4.0" +VERSION = "0.6.0"