-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgesture_control.py
More file actions
139 lines (104 loc) · 3.44 KB
/
gesture_control.py
File metadata and controls
139 lines (104 loc) · 3.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
# webcam reading through OpenCv
# detects hand movement using MediaPipe
# Converting gesture into keyboard action using PyAutoGUI
# -----------------------------------------------------------------------------------------------------------------
# Controls:-
# pinky up -> move right
# index up -> move left
# all fingers up -> jump
# pinky + ring -> jump + right
# index + middle -> jump + left
# ------------------------------------------------------------------------------------------------------------------
import cv2
import pyautogui
import mediapipe as mp
pyautogui.PAUSE = 0
# capturing the camera
cap = cv2.VideoCapture(0)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 100)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 80)
cap.set(cv2.CAP_PROP_BUFFERSIZE, 1)
# Mediapipe initialisation
mp_hands = mp.solutions.hands
mp_draw = mp.solutions.drawing_utils
hands = mp_hands.Hands(
static_image_mode=False,
max_num_hands=1,
min_detection_confidence=0.7,
min_tracking_confidence=0.7
)
# jump state initially false
jumping = False
# ------------------ KEY ACTION FUNCTIONS ------------------
def move_left():
pyautogui.keyDown('left')
pyautogui.keyUp('right')
def move_right():
pyautogui.keyDown('right')
pyautogui.keyUp('left')
def stop_move():
pyautogui.keyUp('left')
pyautogui.keyUp('right')
def start_jump():
pyautogui.keyDown('space')
def stop_jump():
pyautogui.keyUp('space')
# ------------------ MAIN LOOP ------------------
while True:
# reads webcam frame
success, frame = cap.read()
if not success:
break
# OpenCV gives BGR, MediaPipe needs RGB
rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
results = hands.process(rgb)
if results.multi_hand_landmarks:
hand = results.multi_hand_landmarks[0]
mp_draw.draw_landmarks(frame, hand, mp_hands.HAND_CONNECTIONS)
lm = hand.landmark
# Finger states (tip above pip = finger up)
index_up = lm[8].y < lm[6].y
middle_up = lm[12].y < lm[10].y
ring_up = lm[16].y < lm[14].y
pinky_up = lm[20].y < lm[18].y
all_fingers_up = index_up and middle_up and ring_up and pinky_up
# ---------------- COMBO GESTURES (HIGHEST PRIORITY) ----------------
if pinky_up and ring_up and not index_up and not middle_up:
# pinky + ring -> jump + right
move_right()
if not jumping:
start_jump()
jumping = True
elif index_up and middle_up and not ring_up and not pinky_up:
# index + middle -> jump + left
move_left()
if not jumping:
start_jump()
jumping = True
else:
# ---------------- JUMP ONLY ----------------
if all_fingers_up:
if not jumping:
start_jump()
jumping = True
else:
if jumping:
stop_jump()
jumping = False
# ---------------- MOVEMENT ONLY ----------------
if pinky_up and not index_up:
move_right()
elif index_up and not pinky_up:
move_left()
else:
stop_move()
else:
stop_move()
if jumping:
stop_jump()
jumping = False
cv2.imshow("Mario Hand Control", frame)
if cv2.waitKey(1) & 0xFF == 27:
break
cap.release()
cv2.destroyAllWindows()