Update app.py
Browse files
app.py
CHANGED
@@ -457,7 +457,8 @@ async def chatbot_response(request: Request, background_tasks: BackgroundTasks):
|
|
457 |
if selected_dish:
|
458 |
state = ConversationState()
|
459 |
state.flow = "order"
|
460 |
-
|
|
|
461 |
state.data["dish"] = selected_dish
|
462 |
state.update_last_active()
|
463 |
user_state[user_id] = state
|
@@ -496,7 +497,6 @@ async def chatbot_response(request: Request, background_tasks: BackgroundTasks):
|
|
496 |
return JSONResponse(content={"response": sentiment_modifier + response_text})
|
497 |
|
498 |
# --- Fallback: LLM Response Streaming with Conversation Context ---
|
499 |
-
# Build recent conversation history (e.g., last 5 messages)
|
500 |
recent_context = conversation_context.get(user_id, [])[-5:]
|
501 |
context_str = "\n".join([f"{entry['role'].capitalize()}: {entry['message']}" for entry in recent_context])
|
502 |
prompt = f"Conversation context:\n{context_str}\nUser query: {user_message}\nGenerate a helpful, personalized response for a restaurant chatbot."
|
@@ -505,15 +505,13 @@ async def chatbot_response(request: Request, background_tasks: BackgroundTasks):
|
|
505 |
yield chunk
|
506 |
fallback_log = f"LLM fallback response for prompt: {prompt}"
|
507 |
background_tasks.add_task(log_chat_to_db, user_id, "outbound", fallback_log)
|
508 |
-
# (Optionally, you might want to capture and add the streamed response to conversation_context)
|
509 |
return StreamingResponse(stream_response(), media_type="text/plain")
|
510 |
|
511 |
-
|
|
|
|
|
512 |
@app.get("/chat_history/{user_id}")
|
513 |
async def get_chat_history(user_id: str):
|
514 |
-
"""
|
515 |
-
Retrieve chat history for a user.
|
516 |
-
"""
|
517 |
async with async_session() as session:
|
518 |
result = await session.execute(
|
519 |
ChatHistory.__table__.select().where(ChatHistory.user_id == user_id)
|
@@ -521,12 +519,8 @@ async def get_chat_history(user_id: str):
|
|
521 |
history = result.fetchall()
|
522 |
return [dict(row) for row in history]
|
523 |
|
524 |
-
# --- Order Details Endpoint ---
|
525 |
@app.get("/order/{order_id}")
|
526 |
async def get_order(order_id: str):
|
527 |
-
"""
|
528 |
-
Retrieve details for a specific order.
|
529 |
-
"""
|
530 |
async with async_session() as session:
|
531 |
result = await session.execute(
|
532 |
Order.__table__.select().where(Order.order_id == order_id)
|
@@ -537,12 +531,8 @@ async def get_order(order_id: str):
|
|
537 |
else:
|
538 |
raise HTTPException(status_code=404, detail="Order not found.")
|
539 |
|
540 |
-
# --- User Profile Endpoint ---
|
541 |
@app.get("/user_profile/{user_id}")
|
542 |
async def get_user_profile(user_id: str):
|
543 |
-
"""
|
544 |
-
Retrieve the user profile.
|
545 |
-
"""
|
546 |
profile = await get_or_create_user_profile(user_id)
|
547 |
return {
|
548 |
"user_id": profile.user_id,
|
@@ -553,54 +543,30 @@ async def get_user_profile(user_id: str):
|
|
553 |
"last_interaction": profile.last_interaction.isoformat()
|
554 |
}
|
555 |
|
556 |
-
# --- Analytics Endpoint ---
|
557 |
@app.get("/analytics")
|
558 |
async def get_analytics():
|
559 |
-
"""
|
560 |
-
Simple analytics dashboard endpoint.
|
561 |
-
Returns counts of messages, orders, and average sentiment.
|
562 |
-
"""
|
563 |
async with async_session() as session:
|
564 |
-
# Total messages count
|
565 |
msg_result = await session.execute(ChatHistory.__table__.count())
|
566 |
total_messages = msg_result.scalar() or 0
|
567 |
-
|
568 |
-
# Total orders count
|
569 |
order_result = await session.execute(Order.__table__.count())
|
570 |
total_orders = order_result.scalar() or 0
|
571 |
-
|
572 |
-
# Average sentiment score
|
573 |
sentiment_result = await session.execute("SELECT AVG(sentiment_score) FROM sentiment_logs")
|
574 |
avg_sentiment = sentiment_result.scalar() or 0
|
575 |
-
|
576 |
return {
|
577 |
"total_messages": total_messages,
|
578 |
"total_orders": total_orders,
|
579 |
"average_sentiment": avg_sentiment
|
580 |
}
|
581 |
|
582 |
-
# --- Voice Integration Endpoint ---
|
583 |
@app.post("/voice")
|
584 |
async def process_voice(file: UploadFile = File(...)):
|
585 |
-
"""
|
586 |
-
Accept a voice file upload, perform speech-to-text (simulated), and process the resulting text.
|
587 |
-
In production, integrate with a real STT service.
|
588 |
-
"""
|
589 |
contents = await file.read()
|
590 |
-
# Simulated Speech-to-Text: In real implementation, send `contents` to an STT service.
|
591 |
simulated_text = "Simulated speech-to-text conversion result."
|
592 |
return {"transcription": simulated_text}
|
593 |
|
594 |
-
# --- Payment Callback Endpoint (Stub) ---
|
595 |
@app.post("/payment_callback")
|
596 |
async def payment_callback(request: Request):
|
597 |
-
"""
|
598 |
-
Endpoint to handle payment callbacks from Paystack.
|
599 |
-
Update order status based on callback data.
|
600 |
-
"""
|
601 |
data = await request.json()
|
602 |
-
# Extract order reference and update order status accordingly.
|
603 |
-
# In production, verify callback signature and extract data.
|
604 |
order_id = data.get("reference")
|
605 |
new_status = data.get("status", "Paid")
|
606 |
async with async_session() as session:
|
@@ -617,4 +583,4 @@ async def payment_callback(request: Request):
|
|
617 |
|
618 |
if __name__ == "__main__":
|
619 |
import uvicorn
|
620 |
-
uvicorn.run(app, host="0.0.0.0", port=8000)
|
|
|
457 |
if selected_dish:
|
458 |
state = ConversationState()
|
459 |
state.flow = "order"
|
460 |
+
# **** FIX: Set step to 2 since the dish is already selected ****
|
461 |
+
state.step = 2
|
462 |
state.data["dish"] = selected_dish
|
463 |
state.update_last_active()
|
464 |
user_state[user_id] = state
|
|
|
497 |
return JSONResponse(content={"response": sentiment_modifier + response_text})
|
498 |
|
499 |
# --- Fallback: LLM Response Streaming with Conversation Context ---
|
|
|
500 |
recent_context = conversation_context.get(user_id, [])[-5:]
|
501 |
context_str = "\n".join([f"{entry['role'].capitalize()}: {entry['message']}" for entry in recent_context])
|
502 |
prompt = f"Conversation context:\n{context_str}\nUser query: {user_message}\nGenerate a helpful, personalized response for a restaurant chatbot."
|
|
|
505 |
yield chunk
|
506 |
fallback_log = f"LLM fallback response for prompt: {prompt}"
|
507 |
background_tasks.add_task(log_chat_to_db, user_id, "outbound", fallback_log)
|
|
|
508 |
return StreamingResponse(stream_response(), media_type="text/plain")
|
509 |
|
510 |
+
# --- Other Endpoints (Chat History, Order Details, User Profile, Analytics, Voice, Payment Callback) ---
|
511 |
+
# ... (Implement other endpoints as needed) ...
|
512 |
+
|
513 |
@app.get("/chat_history/{user_id}")
|
514 |
async def get_chat_history(user_id: str):
|
|
|
|
|
|
|
515 |
async with async_session() as session:
|
516 |
result = await session.execute(
|
517 |
ChatHistory.__table__.select().where(ChatHistory.user_id == user_id)
|
|
|
519 |
history = result.fetchall()
|
520 |
return [dict(row) for row in history]
|
521 |
|
|
|
522 |
@app.get("/order/{order_id}")
|
523 |
async def get_order(order_id: str):
|
|
|
|
|
|
|
524 |
async with async_session() as session:
|
525 |
result = await session.execute(
|
526 |
Order.__table__.select().where(Order.order_id == order_id)
|
|
|
531 |
else:
|
532 |
raise HTTPException(status_code=404, detail="Order not found.")
|
533 |
|
|
|
534 |
@app.get("/user_profile/{user_id}")
|
535 |
async def get_user_profile(user_id: str):
|
|
|
|
|
|
|
536 |
profile = await get_or_create_user_profile(user_id)
|
537 |
return {
|
538 |
"user_id": profile.user_id,
|
|
|
543 |
"last_interaction": profile.last_interaction.isoformat()
|
544 |
}
|
545 |
|
|
|
546 |
@app.get("/analytics")
|
547 |
async def get_analytics():
|
|
|
|
|
|
|
|
|
548 |
async with async_session() as session:
|
|
|
549 |
msg_result = await session.execute(ChatHistory.__table__.count())
|
550 |
total_messages = msg_result.scalar() or 0
|
|
|
|
|
551 |
order_result = await session.execute(Order.__table__.count())
|
552 |
total_orders = order_result.scalar() or 0
|
|
|
|
|
553 |
sentiment_result = await session.execute("SELECT AVG(sentiment_score) FROM sentiment_logs")
|
554 |
avg_sentiment = sentiment_result.scalar() or 0
|
|
|
555 |
return {
|
556 |
"total_messages": total_messages,
|
557 |
"total_orders": total_orders,
|
558 |
"average_sentiment": avg_sentiment
|
559 |
}
|
560 |
|
|
|
561 |
@app.post("/voice")
|
562 |
async def process_voice(file: UploadFile = File(...)):
|
|
|
|
|
|
|
|
|
563 |
contents = await file.read()
|
|
|
564 |
simulated_text = "Simulated speech-to-text conversion result."
|
565 |
return {"transcription": simulated_text}
|
566 |
|
|
|
567 |
@app.post("/payment_callback")
|
568 |
async def payment_callback(request: Request):
|
|
|
|
|
|
|
|
|
569 |
data = await request.json()
|
|
|
|
|
570 |
order_id = data.get("reference")
|
571 |
new_status = data.get("status", "Paid")
|
572 |
async with async_session() as session:
|
|
|
583 |
|
584 |
if __name__ == "__main__":
|
585 |
import uvicorn
|
586 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|