-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
113 lines (85 loc) · 3.71 KB
/
main.py
File metadata and controls
113 lines (85 loc) · 3.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import cv2
import mediapipe as mp
import pyautogui
import time
mp_hands = mp.solutions.hands
mp_drawing = mp.solutions.drawing_utils
hands = mp_hands.Hands(min_detection_confidence=0.7, min_tracking_confidence=0.7)
SCREEN_WIDTH, SCREEN_HEIGHT = pyautogui.size()
SMOOTHING_FACTOR = 0.2
DEBOUNCE_TIME = 0.2 # Time to wait to prevent multiple clicks
previous_x, previous_y = 0, 0
last_action_time = 0
def smooth_movement(current_x, current_y):
global previous_x, previous_y
smoothed_x = int(previous_x + SMOOTHING_FACTOR * (current_x - previous_x))
smoothed_y = int(previous_y + SMOOTHING_FACTOR * (current_y - previous_y))
previous_x, previous_y = smoothed_x, smoothed_y
return smoothed_x, smoothed_y
# Function for finger detection
def are_fingers_up(landmarks):
fingers = []
# Thumb
if landmarks[4].x < landmarks[3].x:
fingers.append(1)
else:
fingers.append(0)
# 4 Fingers
for id in range(8, 21, 4):
if landmarks[id].y < landmarks[id - 2].y:
fingers.append(1)
else:
fingers.append(0)
return fingers
def main():
global last_action_time
cap = cv2.VideoCapture(0) # Opens the webcam for capturing video.
while cap.isOpened():
ret, frame = cap.read()
if not ret:
break
frame = cv2.flip(frame, 1) # Flip the frame
rgb_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
results = hands.process(rgb_frame)
if results.multi_hand_landmarks:
for hand_landmarks in results.multi_hand_landmarks:
mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)
fingers = are_fingers_up(hand_landmarks.landmark)
if fingers == [1, 0, 0, 0, 1]:
print("Exiting... 'Hang Loose' gesture detected.")
break
index_finger_tip = hand_landmarks.landmark[8]
index_x = int(index_finger_tip.x * SCREEN_WIDTH)
index_y = int(index_finger_tip.y * SCREEN_HEIGHT)
# Smooth cursor movement
smoothed_x, smoothed_y = smooth_movement(index_x, index_y)
# Navigation
if fingers == [0, 1, 0, 0, 0]:
pyautogui.moveTo(smoothed_x, smoothed_y)
# Scroll Up
elif fingers == [0, 1, 1, 0, 0]:
current_time = time.time()
if (current_time - last_action_time) > DEBOUNCE_TIME:
pyautogui.scroll(100)
last_action_time = current_time
# Scroll Down (Fist)
elif fingers == [0, 0, 0, 0, 0]:
current_time = time.time()
if (current_time - last_action_time) > DEBOUNCE_TIME:
pyautogui.scroll(-100)
last_action_time = current_time
# Click
elif fingers == [1, 1, 1, 1, 1]:
current_time = time.time()
if (current_time - last_action_time) > DEBOUNCE_TIME:
pyautogui.click()
last_action_time = current_time
cv2.putText(frame, 'Clicked!', (smoothed_x - 50, smoothed_y - 50),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
cv2.imshow('Virtual Mouse', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()