from pathlib import Path
from promptflow.core import OpenAIModelConfiguration, Prompty
BASE_DIR = Path(__file__).absolute().parent
class ChatFlow:
def __init__(self, model_config: OpenAIModelConfiguration, max_total_token=4096):
self.model_config = model_config
self.max_total_token = max_total_token
def __call__(
self,
question: str = "What's Azure Machine Learning?",
chat_history: list = [],
) -> str:
"""Flow entry function."""
prompty = Prompty.load(
source=BASE_DIR / "chat.prompty",
model={"configuration": self.model_config},
)
output = prompty(question=question, chat_history=chat_history)
return output