-
Notifications
You must be signed in to change notification settings - Fork 1
/
GestureControl.py
151 lines (117 loc) · 4.53 KB
/
GestureControl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# Imports
import numpy as np
import cv2
import math
from key import *
def forward():
PressKey(W)
ReleaseKey(A)
ReleaseKey(S)
ReleaseKey(D)
def backward():
PressKey(S)
ReleaseKey(A)
ReleaseKey(W)
ReleaseKey(D)
def left():
PressKey(A)
ReleaseKey(W)
ReleaseKey(S)
ReleaseKey(D)
def right():
PressKey(D)
ReleaseKey(A)
ReleaseKey(S)
ReleaseKey(W)
def stop():
ReleaseKey(W)
ReleaseKey(A)
ReleaseKey(S)
ReleaseKey(D)
# Open Camera
capture = cv2.VideoCapture(0)
while capture.isOpened():
# Capture frames from the camera
ret, frame = capture.read()
# Get hand data from the rectangle sub window
cv2.rectangle(frame, (100, 100), (300, 300), (0, 255, 0), 0)
crop_image = frame[100:300, 100:300]
# Apply Gaussian blur
blur = cv2.GaussianBlur(crop_image, (3, 3), 0)
# Change color-space from BGR -> HSV
hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV)
# Create a binary image with where white will be skin colors and rest is black
mask2 = cv2.inRange(hsv, np.array([2, 0, 0]), np.array([20, 255, 255]))
# Kernel for morphological transformation
kernel = np.ones((5, 5))
# Apply morphological transformations to filter out the background noise
dilation = cv2.dilate(mask2, kernel, iterations=1)
erosion = cv2.erode(dilation, kernel, iterations=1)
# Apply Gaussian Blur and Threshold
filtered = cv2.GaussianBlur(erosion, (3, 3), 0)
ret, thresh = cv2.threshold(filtered, 127, 255, 0)
# Show threshold image
cv2.imshow("Thresholded", thresh)
# Find contours
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
try:
# Find contour with maximum area
contour = max(contours, key=lambda x: cv2.contourArea(x))
# Create bounding rectangle around the contour
x, y, w, h = cv2.boundingRect(contour)
cv2.rectangle(crop_image, (x, y), (x + w, y + h), (0, 0, 255), 0)
# Find convex hull
hull = cv2.convexHull(contour)
# Draw contour
drawing = np.zeros(crop_image.shape, np.uint8)
cv2.drawContours(drawing, [contour], -1, (0, 255, 0), 0)
cv2.drawContours(drawing, [hull], -1, (0, 0, 255), 0)
# Find convexity defects
hull = cv2.convexHull(contour, returnPoints=False)
defects = cv2.convexityDefects(contour, hull)
# Use cosine rule to find angle of the far point from the start and end point i.e. the convex points (the finger
# tips) for all defects
count_defects = 0
for i in range(defects.shape[0]):
s, e, f, d = defects[i, 0]
start = tuple(contour[s][0])
end = tuple(contour[e][0])
far = tuple(contour[f][0])
a = math.sqrt((end[0] - start[0]) ** 2 + (end[1] - start[1]) ** 2)
b = math.sqrt((far[0] - start[0]) ** 2 + (far[1] - start[1]) ** 2)
c = math.sqrt((end[0] - far[0]) ** 2 + (end[1] - far[1]) ** 2)
angle = (math.acos((b ** 2 + c ** 2 - a ** 2) / (2 * b * c)) * 180) / 3.14
# if angle > 90 draw a circle at the far point
if angle <= 90:
count_defects += 1
cv2.circle(crop_image, far, 1, [0, 0, 255], -1)
cv2.line(crop_image, start, end, [0, 255, 0], 2)
# Print number of fingers
if count_defects == 0:
cv2.putText(frame, "Forward", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255),2)
forward()
elif count_defects == 1:
cv2.putText(frame, "Backward", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2)
backward()
elif count_defects == 2:
cv2.putText(frame, "Left", (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2)
left()
elif count_defects == 3:
cv2.putText(frame, "Right", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2)
right()
elif count_defects == 4:
cv2.putText(frame, "Stop", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2)
stop()
else:
pass
except:
pass
# Show required images
cv2.imshow("Gesture", frame)
# all_image = np.hstack((drawing, crop_image))
# cv2.imshow('Contours', all_image)
# Close the camera if 'q' is pressed
if cv2.waitKey(1) == ord('q'):
break
capture.release()
cv2.destroyAllWindows()