Adinarayana02 commited on
Commit
afcaca0
·
verified ·
1 Parent(s): 5f0d39d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -77
app.py CHANGED
@@ -1,48 +1,9 @@
1
  import streamlit as st
2
  import cv2
3
- import openai
4
  import numpy as np
5
- import mediapipe as mp
6
- import tempfile
7
- import os
8
-
9
- # Set your OpenAI API key (or get it from user input)
10
- openai.api_key = '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'
11
-
12
- # Initialize MediaPipe Hands
13
- mp_hands = mp.solutions.hands
14
- mp_drawing = mp.solutions.drawing_utils
15
-
16
- def detect_gesture(hand_landmarks):
17
- # Example gesture recognition logic
18
- thumb_tip = hand_landmarks.landmark[mp_hands.HandLandmark.THUMB_TIP].y
19
- index_finger_tip = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].y
20
- middle_finger_tip = hand_landmarks.landmark[mp_hands.HandLandmark.MIDDLE_FINGER_TIP].y
21
- ring_finger_tip = hand_landmarks.landmark[mp_hands.HandLandmark.RING_FINGER_TIP].y
22
- pinky_tip = hand_landmarks.landmark[mp_hands.HandLandmark.PINKY_TIP].y
23
-
24
- # Conditions for different gestures
25
- if index_finger_tip < thumb_tip and middle_finger_tip > index_finger_tip:
26
- return "draw"
27
- elif middle_finger_tip < index_finger_tip and ring_finger_tip > middle_finger_tip:
28
- return "navigate"
29
- elif thumb_tip < index_finger_tip and pinky_tip > ring_finger_tip:
30
- return "reset"
31
- elif pinky_tip < ring_finger_tip:
32
- return "submit"
33
- return None
34
-
35
- def solve_math_problem_with_openai(problem):
36
- response = openai.Completion.create(
37
- engine="gpt-3.5-turbo", # Or another model of your choice
38
- prompt=f"Solve this math problem: {problem}",
39
- max_tokens=150
40
- )
41
- solution = response.choices[0].text.strip()
42
- return solution
43
 
44
  def main():
45
- st.title("Visual Math Solver with OpenAI")
46
 
47
  # Create a placeholder for the video feed
48
  stframe = st.empty()
@@ -58,43 +19,25 @@ def main():
58
  st.error("Failed to open the camera. Please check your camera settings and permissions.")
59
  return
60
 
61
- # Initialize MediaPipe hands model
62
- with mp_hands.Hands(
63
- model_complexity=1,
64
- min_detection_confidence=0.5,
65
- min_tracking_confidence=0.5) as hands:
66
-
67
- # Create a loop to keep capturing video
68
- while True:
69
- ret, frame = cap.read()
70
- if not ret:
71
- st.error("Failed to grab frame from the camera.")
72
- break
73
-
74
- # Flip the frame horizontally for natural interaction
75
- frame = cv2.flip(frame, 1)
76
-
77
- # Convert the frame to RGB
78
- frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
79
-
80
- # Process the frame with MediaPipe hands
81
- results = hands.process(frame_rgb)
82
-
83
- # Draw hand landmarks on the frame
84
- if results.multi_hand_landmarks:
85
- for hand_landmarks in results.multi_hand_landmarks:
86
- mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)
87
- gesture = detect_gesture(hand_landmarks)
88
- if gesture:
89
- st.write(f"Detected Gesture: {gesture}")
90
-
91
- # Display the frame in the Streamlit app
92
- stframe.image(frame, channels="BGR")
93
-
94
- # Break the loop if the stop button is pressed
95
- if st.button('Stop'):
96
- st.write("Stopping the camera...")
97
- break
98
 
99
  # Release the camera resource
100
  cap.release()
 
1
  import streamlit as st
2
  import cv2
 
3
  import numpy as np
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
  def main():
6
+ st.title("Camera Access with Streamlit")
7
 
8
  # Create a placeholder for the video feed
9
  stframe = st.empty()
 
19
  st.error("Failed to open the camera. Please check your camera settings and permissions.")
20
  return
21
 
22
+ while True:
23
+ ret, frame = cap.read()
24
+ if not ret:
25
+ st.error("Failed to grab frame from the camera.")
26
+ break
27
+
28
+ # Flip the frame horizontally for natural interaction
29
+ frame = cv2.flip(frame, 1)
30
+
31
+ # Convert the frame to RGB
32
+ frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
33
+
34
+ # Display the frame in the Streamlit app
35
+ stframe.image(frame_rgb, channels="RGB")
36
+
37
+ # Break the loop if the stop button is pressed
38
+ if st.button('Stop'):
39
+ st.write("Stopping the camera...")
40
+ break
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  # Release the camera resource
43
  cap.release()