Skip to content

Prompt support for Inference SDK #37917

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 24 commits into from
Nov 5, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
d7f8a46
Prompty support within Azure AI Inference SDK
YusakuNo1 Oct 16, 2024
1e25075
Fix unit test
YusakuNo1 Oct 16, 2024
ffeaab8
Address PR feedback with copyright, merge PromptConfig to PromptTemplate
YusakuNo1 Oct 18, 2024
44d2f2c
Add comment and set model_name as optional
YusakuNo1 Oct 18, 2024
2d1d132
Bug fixes
YusakuNo1 Oct 22, 2024
9f7b679
Updated parameter names from PM feedbacks
YusakuNo1 Oct 22, 2024
b4f2d5b
Merge branch 'main' into users/daviwu/prompty
YusakuNo1 Oct 22, 2024
b7657e5
Merge branch 'main' into users/daviwu/prompty
YusakuNo1 Oct 28, 2024
38eb258
Improve sample code and unit tests
YusakuNo1 Oct 28, 2024
aa28df4
Update readme and comments
YusakuNo1 Oct 28, 2024
9a1eb79
Rename files
YusakuNo1 Oct 28, 2024
1252b3a
Address PR comment
YusakuNo1 Oct 29, 2024
b3e8616
add Pydantic as dependency
YusakuNo1 Oct 29, 2024
c43f88e
Fix type errors
YusakuNo1 Oct 29, 2024
e9cab12
Fix spelling issues
YusakuNo1 Oct 29, 2024
24c3ced
Address PR comments and fix linter issues
YusakuNo1 Oct 29, 2024
19316b8
Fix type import for "Self"
YusakuNo1 Oct 30, 2024
ed718cb
Change to keyword-only constructor and fix linter issues
YusakuNo1 Oct 30, 2024
ebfa1f8
Rename function `from_message` to `from_str`; `render` to `create_mes…
YusakuNo1 Nov 1, 2024
25a0365
Change from `from_str` to `from_string`
YusakuNo1 Nov 1, 2024
6b8ad60
Merge branch 'main' into users/daviwu/prompty
YusakuNo1 Nov 3, 2024
a7a0bf2
Merge latest code from `microsoft/prompty` and resolve linter issues
YusakuNo1 Nov 3, 2024
4b43b46
Fix PR comment
YusakuNo1 Nov 4, 2024
633c84f
Fix PR comments
YusakuNo1 Nov 5, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from .core import InvokerFactory
from .core import Prompty

from .renderers import MustacheRenderer
from .parsers import PromptyChatParser
from .utils import load
from ._patch import patch_sdk as _patch_sdk, PromptTemplate

# Register the Mustache renderer and parser
InvokerFactory().register_renderer("mustache", MustacheRenderer)
InvokerFactory().register_parser("prompty.chat", PromptyChatParser)

__all__ = [
"load",
"Prompty",
"PromptTemplate",
]

_patch_sdk()
85 changes: 85 additions & 0 deletions sdk/ai/azure-ai-inference/azure/ai/inference/prompts/_patch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
"""Customize generated code here.

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""

import azure.ai.inference.prompts as prompts
from .core import Prompty
from .utils import prepare
from .mustache import render


class PromptTemplate:
"""The helper class which takes varient of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array.

:param prompty: Prompty object which contains both model config and prompt template.
:type prompty: Prompty
:param prompt_template: The prompt template string.
:type prompt_template: str
:param api: The API type, e.g. "chat" or "completion".
:type api: str
:param model_name: The model name, e.g. "gpt-4o-mini".
:type model_name: str
"""

@staticmethod
def from_prompty(file_path: str):
if not file_path:
raise ValueError("Please provide file_path")
prompty = prompts.load(file_path)
return PromptTemplate(prompty=prompty)

@staticmethod
def from_message(
prompt_template: str,
api: str = "chat",
model_name: str | None = None
):
return PromptTemplate(api=api, prompt_template=prompt_template, model_name=model_name, prompty=None)

def __init__(
self,
prompty: Prompty | None = None,
api: str | None = None,
prompt_template: str | None = None,
model_name: str | None = None,
) -> None:
self.prompty = prompty
if self.prompty is not None:
self.model_name = prompty.model.configuration["azure_deployment"] if "azure_deployment" in prompty.model.configuration else None
self.parameters = prompty.model.parameters
self._parameters = {}
elif prompt_template is not None:
self.model_name = model_name
self.parameters = {}
# _parameters is a dict to hold the internal configuration
self._parameters = {
"api": api if api is not None else "chat",
"prompt_template": prompt_template
}
else:
raise ValueError("Please invalid arguments for PromptConfig")

def render(self, data: dict[str, any] | None = None, **kwargs):
if data is None:
data = kwargs

if self.prompty is not None:
parsed = prepare(self.prompty, data)
return parsed
elif "prompt_template" in self._parameters:
system_prompt = render(self._parameters["prompt_template"], data)
return [{"role": "system", "content": system_prompt}]


def patch_sdk():
"""Do not remove from this file.

`patch_sdk` is a last resort escape hatch that allows you to do customizations
you can't accomplish using the techniques described in
https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
Loading