I have created a sample project using Teams toolkit to create an assistant in Teams that can avail Azure open AI service (with gpt-4o model). The default code is working fine that is I can run the emulator and send and receive messages via bot framework emulator and it is processing all messages using gpt-4o. However, I want to update the flow and I want to include functionality that for some particular messages the Azure Open AI gpt-4o is not called and a direct response is sent back to user. For any other message, Azure open AI gpt-4o should process the message and send reply back to user.
There are two python files, app.py and bot.py. I tried handling what happens based on input messages in app.py. However, I am not being able to skip Azure open AI in certain scenario without disturbing whole flow.
app.py
"""
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License.
"""
from http import HTTPStatus
from aiohttp import web
from botbuilder.core.integration import aiohttp_error_middleware
from botbuilder.schema import ChannelAccount
from bot import bot_app
import pandas as pd
from io import BytesIO
import json
from aiohttp import web, ClientRequest
import spacy
from sklearn.metrics.pairwise import cosine_similarity
import numpy as np
from rapidfuzz import fuzz, process
import re
from botbuilder.schema import Activity
import logging
routes = web.RouteTableDef()
@routes.post("/api/messages")
async def on_messages(req: web.Request) -> web.Response:
print("on_messages ************* ")
res = await bot_app.process(req)
if res is not None:
# Return the response from the bot app processing
return res
return web.Response(status=HTTPStatus.OK)
# Load the spaCy model with GloVe vectors
nlp = spacy.load("en_core_web_md")
#added below function to handle when a particular message comes, change the prompt to be sent to Azure Open AI for processing - this is working fine
async def modify_request_middleware(app, handler):
async def middleware_handler(req):
print(f"Next handler: {handler.__name__} of type {type(handler)}")
print("Middleware triggered")
if req.path == "/api/messages" and req.method == "POST":
print("Processing POST request to /api/messages")
data = await req.json()
user_input = data.get('text', "").lower()
print("User Input:", user_input)
if "hey" in user_input:
data['text'] = "Hey, give me recipe of cake which is extremely moist without eggs"
# Create a new payload with the modified data
new_payload = json.dumps(data).encode('utf-8')
req._read_bytes = new_payload
return await handler(req)
return middleware_handler
app = web.Application(middlewares=[modify_request_middleware])
app.add_routes(routes)
from config import Config
if __name__ == "__main__":
web.run_app(app, host="localhost", port=Config.PORT)
Below is code for bot.py
import os
import sys
import traceback
from botbuilder.core import MemoryStorage, TurnContext
from teams import Application, ApplicationOptions, TeamsAdapter
from teams.ai import AIOptions
from teams.ai.models import AzureOpenAIModelOptions, OpenAIModel, OpenAIModelOptions
from teams.ai.planners import ActionPlanner, ActionPlannerOptions
from teams.ai.prompts import PromptManager, PromptManagerOptions
from teams.state import TurnState
from botbuilder.core.teams import TeamsInfo
import aiohttp
from config import Config
import pandas as pd
from botbuilder.schema import Activity, ActivityTypes
config = Config()
# Create AI components
model: OpenAIModel
model = OpenAIModel(
AzureOpenAIModelOptions(
api_key=config.AZURE_OPENAI_API_KEY,
default_model=config.AZURE_OPENAI_MODEL_DEPLOYMENT_NAME,
endpoint=config.AZURE_OPENAI_ENDPOINT,
)
)
prompts = PromptManager(PromptManagerOptions(prompts_folder=f"{os.getcwd()}/prompts"))
planner = ActionPlanner(
ActionPlannerOptions(model=model, prompts=prompts, default_prompt="chat")
)
# Define storage and application
storage = MemoryStorage()
bot_app = Application[TurnState](
ApplicationOptions(
bot_app_id=config.APP_ID,
storage=storage,
adapter=TeamsAdapter(config),
ai=AIOptions(planner=planner),
)
)
@bot_app.conversation_update("membersAdded")
async def on_members_added(context: TurnContext, state: TurnState):
print("***************in on_members_added**************************")
print(f"Activity type: {context.activity.type}")
try:
await context.send_activity(f"Hello! How can I assist you today? ")
except Exception as e:
await context.send_activity("Hello! How can I assist you today? There is some error here")
print(f"Error fetching user info: {e}")
@bot_app.error
async def on_error(context: TurnContext, error: Exception):
# This check writes out errors to console log .vs. app insights.
# NOTE: In production environment, you should consider logging this to Azure
# application insights.
print(f"\n [on_turn_error] unhandled error: {error}", file=sys.stderr)
traceback.print_exc()
# Send a message to the user
await context.send_activity("The bot encountered an error or bug.")
I tried adding following function to bot.py but it didn't work. Its checking the input text correctly and then responds back to user on emulator but it doesn't go back to normal process of listening and waiting for next input. Also, it doesn't call Azure Open AI service in else part.
# Sample test for bot_app.on_turn
@bot_app.before_turn
async def test_on_turn(context: TurnContext, state: TurnState):
print("in test_on_turn", context.activity.type)
if context.activity.type == ActivityTypes.message:
user_message = context.activity.text.strip().lower()
# Simulate different responses based on the user's message
if "hey" in user_message:
await context.send_activity(f"Hey receiveddd {context.activity.type}")
else:
# how to call the handler for normal process?
else:
# Handle other types of activities
await context.send_activity(f"Hello! How can I assist you today?)