1
+ import itertools
2
+ import cv2
3
+ import mediapipe as mp
4
+ import time
5
+ import pyautogui
6
+
7
+ COLOR = (0 ,0 ,255 )
8
+ mp_drawing = mp .solutions .drawing_utils
9
+ mp_drawing_styles = mp .solutions .drawing_styles
10
+ mp_hands = mp .solutions .hands
11
+ drawing_spec = mp_drawing .DrawingSpec (thickness = 1 , circle_radius = 1 )
12
+
13
+ cap = cv2 .VideoCapture (0 )
14
+ cap .set (cv2 .CAP_PROP_FRAME_WIDTH , 1600 ) # my laptop width size
15
+ cap .set (cv2 .CAP_PROP_FRAME_HEIGHT , 900 ) # my laptop height size
16
+
17
+ hands = mp_hands .Hands (model_complexity = 0 , min_detection_confidence = 0.8 , min_tracking_confidence = 0.8 )
18
+
19
+ while cap .isOpened ():
20
+ _ , image = cap .read ()
21
+ image = cv2 .flip (image ,1 )
22
+ image .flags .writeable = False
23
+ image = cv2 .cvtColor (image , cv2 .COLOR_BGR2RGB )
24
+ result = hands .process (image )
25
+ image .flags .writeable = True
26
+ image = cv2 .cvtColor (image , cv2 .COLOR_RGB2BGR )
27
+
28
+ if result .multi_hand_landmarks :
29
+ height , width ,_ = image .shape #480 640
30
+ #print(height,width)
31
+ annotated_image = image .copy ()
32
+ for hands_landmarks in result .multi_hand_landmarks :
33
+ wrist_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .WRIST ].x * width * 2.5 )
34
+ wrist_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .WRIST ].y * height * 1.875 )
35
+ index_finger_mcp_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_MCP ].x * width * 2.5 )
36
+ index_finger_mcp_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_MCP ].y * height * 1.875 )
37
+ center_x = (wrist_x + index_finger_mcp_x )// 2
38
+ center_y = (wrist_y + index_finger_mcp_y )// 2
39
+ cv2 .circle (image , (int (center_x / 2.5 ), int (center_y / 1.875 )), 1 , COLOR , 7 )
40
+ wrist_coor = (int (center_x / 2.5 ), int (center_y / 1.875 ))
41
+ center_y = center_y - 400
42
+ center_x = center_x - 400
43
+ index_finger_top_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_TIP ].x * width )
44
+ index_finger_top_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_TIP ].y * height )
45
+ index_finger_dip_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_DIP ].x * width )
46
+ index_finger_dip_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .INDEX_FINGER_DIP ].y * height )
47
+ cv2 .line (image , (index_finger_top_x , index_finger_top_y ), (index_finger_dip_x , index_finger_dip_y ), COLOR , 1 )
48
+ cv2 .circle (image , (index_finger_dip_x ,index_finger_dip_y ), 1 , COLOR , 3 )
49
+ cv2 .circle (image , (index_finger_top_x , index_finger_top_y ), 1 , COLOR , 3 )
50
+ middle_finger_top_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .MIDDLE_FINGER_TIP ].x * width )
51
+ middle_finger_top_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .MIDDLE_FINGER_TIP ].y * height )
52
+ cv2 .circle (image , (middle_finger_top_x , middle_finger_top_y ), 1 , COLOR , 3 )
53
+ middle_finger_dip_x = int (hands_landmarks .landmark [mp_hands .HandLandmark .MIDDLE_FINGER_DIP ].x * width )
54
+ middle_finger_dip_y = int (hands_landmarks .landmark [mp_hands .HandLandmark .MIDDLE_FINGER_DIP ].y * height )
55
+ cv2 .circle (image , (middle_finger_dip_x , middle_finger_dip_y ), 1 , COLOR , 3 )
56
+ cv2 .line (image , (middle_finger_top_x , middle_finger_top_y ), (middle_finger_dip_x , middle_finger_dip_y ), COLOR , 1 )
57
+ cv2 .line (image , (middle_finger_top_x , middle_finger_top_y ), wrist_coor ,
58
+ COLOR , 1 )
59
+ cv2 .line (image , (index_finger_top_x , index_finger_top_y ), wrist_coor ,
60
+ COLOR , 1 )
61
+ if center_x >= 0 and center_x <= 1600 and center_y >= 0 and center_y <= 900 :
62
+ pyautogui .moveTo (center_x , center_y )
63
+ if index_finger_top_y > index_finger_dip_y :
64
+ COLOR = (255 ,0 ,0 )
65
+ cv2 .putText (image , "Left Click" ,(50 ,50 ),cv2 .FONT_HERSHEY_SIMPLEX ,1 , COLOR , 8 )
66
+ print ('left click' )
67
+ pyautogui .dragTo (button = 'left' )
68
+ #pyautogui.click(center_x, center_y)
69
+ elif middle_finger_top_y > middle_finger_dip_y :
70
+ print ("right clicl" )
71
+ COLOR = (255 , 0 , 0 )
72
+ pyautogui .dragTo (button = 'right' )
73
+ cv2 .putText (image , "Right Click" , (50 , 50 ), cv2 .FONT_HERSHEY_SIMPLEX ,1 , COLOR , 8 )
74
+
75
+ #pyautogui.click(center_x,center_y, button='right')
76
+ else :
77
+ COLOR = (0 ,0 ,255 )
78
+
79
+
80
+
81
+ #image = cv2.resize(image, (1600,900))
82
+ cv2 .imshow ("Hands Experiment:" , image )
83
+ if cv2 .waitKey (5 )& 0xFF == 27 :
84
+ break
85
+ cap .release ()
0 commit comments