|
|
@ -6,9 +6,14 @@ import numpy as np
|
|
|
|
from tensorflow.keras.models import load_model
|
|
|
|
from tensorflow.keras.models import load_model
|
|
|
|
from tkinter import Tk, Canvas, Button, Label, LEFT, RIGHT, NW
|
|
|
|
from tkinter import Tk, Canvas, Button, Label, LEFT, RIGHT, NW
|
|
|
|
from PIL import Image, ImageTk
|
|
|
|
from PIL import Image, ImageTk
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 设置环境变量以关闭oneDNN自定义操作
|
|
|
|
|
|
|
|
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
|
|
|
|
|
|
|
|
|
|
|
|
warnings.filterwarnings("ignore", category=UserWarning, message='SymbolDatabase.GetPrototype() is deprecated')
|
|
|
|
warnings.filterwarnings("ignore", category=UserWarning, message='SymbolDatabase.GetPrototype() is deprecated')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 初始化全局变量
|
|
|
|
hands = None
|
|
|
|
hands = None
|
|
|
|
mp_draw = mp.solutions.drawing_utils
|
|
|
|
mp_draw = mp.solutions.drawing_utils
|
|
|
|
cap = None
|
|
|
|
cap = None
|
|
|
@ -16,13 +21,13 @@ keep_running = False
|
|
|
|
paused = False
|
|
|
|
paused = False
|
|
|
|
popup_open = False # 用于标记当前是否有弹窗打开
|
|
|
|
popup_open = False # 用于标记当前是否有弹窗打开
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 模型路径和加载
|
|
|
|
model_path = 'D:/hand/hand_gesture_model.h5'
|
|
|
|
model_path = 'D:/hand/hand_gesture_model.h5'
|
|
|
|
model = load_model(model_path)
|
|
|
|
model = load_model(model_path)
|
|
|
|
|
|
|
|
|
|
|
|
gesture_classes = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09']
|
|
|
|
gesture_classes = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def start_recognition(root, callback=None):
|
|
|
|
def start_recognition(callback=None):
|
|
|
|
|
|
|
|
global keep_running, cap, hands
|
|
|
|
global keep_running, cap, hands
|
|
|
|
if cap is None or not cap.isOpened():
|
|
|
|
if cap is None or not cap.isOpened():
|
|
|
|
cap = cv2.VideoCapture(0)
|
|
|
|
cap = cv2.VideoCapture(0)
|
|
|
@ -31,10 +36,9 @@ def start_recognition(callback=None):
|
|
|
|
model_complexity=1, min_detection_confidence=0.5,
|
|
|
|
model_complexity=1, min_detection_confidence=0.5,
|
|
|
|
min_tracking_confidence=0.5)
|
|
|
|
min_tracking_confidence=0.5)
|
|
|
|
keep_running = True
|
|
|
|
keep_running = True
|
|
|
|
threading.Thread(target=run_recognition, args=(callback,)).start()
|
|
|
|
threading.Thread(target=run_recognition, args=(root, callback)).start()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_recognition(root, callback=None):
|
|
|
|
def run_recognition(callback=None):
|
|
|
|
|
|
|
|
global keep_running, paused
|
|
|
|
global keep_running, paused
|
|
|
|
|
|
|
|
|
|
|
|
while keep_running and cap.isOpened():
|
|
|
|
while keep_running and cap.isOpened():
|
|
|
@ -51,22 +55,18 @@ def run_recognition(callback=None):
|
|
|
|
|
|
|
|
|
|
|
|
if results.multi_hand_landmarks:
|
|
|
|
if results.multi_hand_landmarks:
|
|
|
|
for handLms in results.multi_hand_landmarks:
|
|
|
|
for handLms in results.multi_hand_landmarks:
|
|
|
|
mp_draw.draw_landmarks(img, handLms, mp.solutions.hands.HAND_CONNECTIONS)
|
|
|
|
mp_draw.draw_landmarks(img_rgb, handLms, mp.solutions.hands.HAND_CONNECTIONS)
|
|
|
|
gesture, raised_fingers = detect_gesture_and_fingers(handLms)
|
|
|
|
gesture, raised_fingers = detect_gesture_and_fingers(handLms)
|
|
|
|
total_raised_fingers += raised_fingers
|
|
|
|
total_raised_fingers += raised_fingers
|
|
|
|
|
|
|
|
|
|
|
|
if gesture == "OK":
|
|
|
|
|
|
|
|
handle_ok_gesture()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if total_raised_fingers > 0:
|
|
|
|
if total_raised_fingers > 0:
|
|
|
|
handle_finger_detection(total_raised_fingers)
|
|
|
|
handle_finger_detection(total_raised_fingers)
|
|
|
|
|
|
|
|
|
|
|
|
cv2.putText(img, f'Total Raised Fingers: {total_raised_fingers}', (10, 30),
|
|
|
|
cv2.putText(img_rgb, f'Total Raised Fingers: {total_raised_fingers}', (10, 30),
|
|
|
|
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2, cv2.LINE_AA, )
|
|
|
|
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2, cv2.LINE_AA)
|
|
|
|
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
if callback:
|
|
|
|
callback(img)
|
|
|
|
root.after(0, callback, img_rgb)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def stop_recognition():
|
|
|
|
def stop_recognition():
|
|
|
|
global keep_running, cap
|
|
|
|
global keep_running, cap
|
|
|
@ -76,31 +76,24 @@ def stop_recognition():
|
|
|
|
cap = None
|
|
|
|
cap = None
|
|
|
|
cv2.destroyAllWindows()
|
|
|
|
cv2.destroyAllWindows()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def release_camera():
|
|
|
|
def release_camera():
|
|
|
|
global cap
|
|
|
|
global cap
|
|
|
|
if cap is not None and cap.isOpened():
|
|
|
|
if cap is not None and cap.isOpened():
|
|
|
|
cap.release()
|
|
|
|
cap.release()
|
|
|
|
cap = None
|
|
|
|
cap = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def detect_gesture_and_fingers(hand_landmarks):
|
|
|
|
def detect_gesture_and_fingers(hand_landmarks):
|
|
|
|
gesture_image = get_hand_image(hand_landmarks)
|
|
|
|
gesture_image = get_hand_image(hand_landmarks)
|
|
|
|
gesture = predict_gesture(gesture_image)
|
|
|
|
gesture = predict_gesture(gesture_image)
|
|
|
|
|
|
|
|
|
|
|
|
raised_fingers = count_raised_fingers(hand_landmarks)
|
|
|
|
raised_fingers = count_raised_fingers(hand_landmarks)
|
|
|
|
|
|
|
|
|
|
|
|
if is_ok_gesture(hand_landmarks):
|
|
|
|
|
|
|
|
gesture = "OK"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return gesture, raised_fingers
|
|
|
|
return gesture, raised_fingers
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_hand_image(hand_landmarks):
|
|
|
|
def get_hand_image(hand_landmarks):
|
|
|
|
img = np.zeros((150, 150, 3), dtype=np.uint8)
|
|
|
|
img = np.zeros((150, 150, 3), dtype=np.uint8)
|
|
|
|
return img
|
|
|
|
return img
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def predict_gesture(img):
|
|
|
|
def predict_gesture(img):
|
|
|
|
img = cv2.resize(img, (150, 150))
|
|
|
|
img = cv2.resize(img, (150, 150))
|
|
|
|
img_array = np.expand_dims(img, axis=0) / 255.0
|
|
|
|
img_array = np.expand_dims(img, axis=0) / 255.0
|
|
|
@ -108,7 +101,6 @@ def predict_gesture(img):
|
|
|
|
predicted_class = gesture_classes[np.argmax(predictions)]
|
|
|
|
predicted_class = gesture_classes[np.argmax(predictions)]
|
|
|
|
return predicted_class
|
|
|
|
return predicted_class
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def count_raised_fingers(hand_landmarks):
|
|
|
|
def count_raised_fingers(hand_landmarks):
|
|
|
|
fingers_status = [0, 0, 0, 0, 0]
|
|
|
|
fingers_status = [0, 0, 0, 0, 0]
|
|
|
|
|
|
|
|
|
|
|
@ -135,7 +127,6 @@ def count_raised_fingers(hand_landmarks):
|
|
|
|
|
|
|
|
|
|
|
|
return sum(fingers_status)
|
|
|
|
return sum(fingers_status)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def calculate_angle(point1, point2, point3):
|
|
|
|
def calculate_angle(point1, point2, point3):
|
|
|
|
angle = np.arctan2(point3.y - point2.y, point3.x - point2.x) - np.arctan2(point1.y - point2.y, point1.x - point2.x)
|
|
|
|
angle = np.arctan2(point3.y - point2.y, point3.x - point2.x) - np.arctan2(point1.y - point2.y, point1.x - point2.x)
|
|
|
|
angle = np.abs(angle)
|
|
|
|
angle = np.abs(angle)
|
|
|
@ -143,100 +134,17 @@ def calculate_angle(point1, point2, point3):
|
|
|
|
angle = 2 * np.pi - angle
|
|
|
|
angle = 2 * np.pi - angle
|
|
|
|
return angle * 180 / np.pi
|
|
|
|
return angle * 180 / np.pi
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_ok_gesture(hand_landmarks):
|
|
|
|
|
|
|
|
thumb_tip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.THUMB_TIP]
|
|
|
|
|
|
|
|
index_tip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.INDEX_FINGER_TIP]
|
|
|
|
|
|
|
|
distance = np.linalg.norm(np.array([thumb_tip.x, thumb_tip.y]) - np.array([index_tip.x, index_tip.y]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 检查其他手指是否弯曲
|
|
|
|
|
|
|
|
middle_tip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.MIDDLE_FINGER_TIP]
|
|
|
|
|
|
|
|
ring_tip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.RING_FINGER_TIP]
|
|
|
|
|
|
|
|
pinky_tip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.PINKY_TIP]
|
|
|
|
|
|
|
|
middle_pip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.MIDDLE_FINGER_PIP]
|
|
|
|
|
|
|
|
ring_pip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.RING_FINGER_PIP]
|
|
|
|
|
|
|
|
pinky_pip = hand_landmarks.landmark[mp.solutions.hands.HandLandmark.PINKY_FINGER_PIP]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
middle_finger_bent = middle_tip.y > middle_pip.y
|
|
|
|
|
|
|
|
ring_finger_bent = ring_tip.y > ring_pip.y
|
|
|
|
|
|
|
|
pinky_finger_bent = pinky_tip.y > pinky_pip.y
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return distance < 0.05 and middle_finger_bent and ring_finger_bent and pinky_finger_bent # 根据实际情况调整这个阈值
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_ok_gesture():
|
|
|
|
|
|
|
|
global paused, popup_open
|
|
|
|
|
|
|
|
if not popup_open:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_ok_window()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_ok_window():
|
|
|
|
|
|
|
|
def on_continue():
|
|
|
|
|
|
|
|
global paused, popup_open
|
|
|
|
|
|
|
|
paused = False
|
|
|
|
|
|
|
|
popup_open = False # 关闭弹窗后将标志设置为False
|
|
|
|
|
|
|
|
ok_window.destroy()
|
|
|
|
|
|
|
|
start_recognition(show_frame)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ok_window = Tk()
|
|
|
|
|
|
|
|
ok_window.title("手势检测")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
label = Label(ok_window, text="检测到OK手势", font=('Helvetica', 24, 'bold'))
|
|
|
|
|
|
|
|
label.pack(pady=20)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
continue_button = Button(ok_window, text="继续识别", command=on_continue)
|
|
|
|
|
|
|
|
continue_button.pack(pady=10)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ok_window.protocol("WM_DELETE_WINDOW", on_continue)
|
|
|
|
|
|
|
|
ok_window.mainloop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_finger_detection(finger_count):
|
|
|
|
def handle_finger_detection(finger_count):
|
|
|
|
global paused, popup_open
|
|
|
|
global paused, popup_open
|
|
|
|
if not popup_open: # 只有在没有弹窗打开的情况下才处理手指检测并显示弹窗
|
|
|
|
if not popup_open:
|
|
|
|
if finger_count == 1:
|
|
|
|
if finger_count == 5:
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了一根手指")
|
|
|
|
|
|
|
|
elif finger_count == 2:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了两根手指")
|
|
|
|
|
|
|
|
elif finger_count == 3:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了三根手指")
|
|
|
|
|
|
|
|
elif finger_count == 4:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了四根手指")
|
|
|
|
|
|
|
|
elif finger_count == 5:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
paused = True
|
|
|
|
popup_open = True
|
|
|
|
popup_open = True
|
|
|
|
show_stop_recognition_window()
|
|
|
|
show_stop_recognition_window()
|
|
|
|
elif finger_count == 6:
|
|
|
|
# if finger_count == 1:
|
|
|
|
paused = True
|
|
|
|
# paused = True
|
|
|
|
popup_open = True
|
|
|
|
# popup_open = True
|
|
|
|
show_finger_window("您竖起了六根手指")
|
|
|
|
# show_stop_recognition_window()
|
|
|
|
elif finger_count == 7:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了七根手指")
|
|
|
|
|
|
|
|
elif finger_count == 8:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了八根手指")
|
|
|
|
|
|
|
|
elif finger_count == 9:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了九根手指")
|
|
|
|
|
|
|
|
elif finger_count == 10:
|
|
|
|
|
|
|
|
paused = True
|
|
|
|
|
|
|
|
popup_open = True
|
|
|
|
|
|
|
|
show_finger_window("您竖起了十根手指")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_finger_window(message):
|
|
|
|
def show_finger_window(message):
|
|
|
|
def on_continue():
|
|
|
|
def on_continue():
|
|
|
@ -258,7 +166,6 @@ def show_finger_window(message):
|
|
|
|
finger_window.protocol("WM_DELETE_WINDOW", on_continue)
|
|
|
|
finger_window.protocol("WM_DELETE_WINDOW", on_continue)
|
|
|
|
finger_window.mainloop()
|
|
|
|
finger_window.mainloop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_stop_recognition_window():
|
|
|
|
def show_stop_recognition_window():
|
|
|
|
def on_continue():
|
|
|
|
def on_continue():
|
|
|
|
global paused, popup_open
|
|
|
|
global paused, popup_open
|
|
|
@ -288,13 +195,11 @@ def show_stop_recognition_window():
|
|
|
|
stop_window.protocol("WM_DELETE_WINDOW", on_continue)
|
|
|
|
stop_window.protocol("WM_DELETE_WINDOW", on_continue)
|
|
|
|
stop_window.mainloop()
|
|
|
|
stop_window.mainloop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_frame(img=None):
|
|
|
|
def show_frame(img=None):
|
|
|
|
global paused
|
|
|
|
global paused, canvas
|
|
|
|
if keep_running and cap.isOpened():
|
|
|
|
if keep_running and cap.isOpened():
|
|
|
|
if img is not None:
|
|
|
|
if img is not None:
|
|
|
|
frame = img
|
|
|
|
frame_rgb = img
|
|
|
|
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
|
|
|
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
ret, frame = cap.read()
|
|
|
|
ret, frame = cap.read()
|
|
|
|
if not ret:
|
|
|
|
if not ret:
|
|
|
@ -314,7 +219,6 @@ def show_frame(img=None):
|
|
|
|
root.update_idletasks()
|
|
|
|
root.update_idletasks()
|
|
|
|
root.update()
|
|
|
|
root.update()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
if __name__ == "__main__":
|
|
|
|
root = Tk()
|
|
|
|
root = Tk()
|
|
|
|
root.title("手势识别")
|
|
|
|
root.title("手势识别")
|
|
|
|