forked from vipul-sharma20/gesture-opencv
-
Notifications
You must be signed in to change notification settings - Fork 0
/
gesture.py
111 lines (87 loc) · 3.68 KB
/
gesture.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import cv2
import numpy as np
import math
cap = cv2.VideoCapture(0)
while(cap.isOpened()):
# read image
ret, img = cap.read()
# get hand data from the rectangle sub window on the screen
cv2.rectangle(img, (300,300), (100,100), (0,255,0),0)
crop_img = img[100:300, 100:300]
# convert to grayscale
grey = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY)
# applying gaussian blur
value = (35, 35)
blurred = cv2.GaussianBlur(grey, value, 0)
# thresholdin: Otsu's Binarization method
_, thresh1 = cv2.threshold(blurred, 127, 255,
cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
# show thresholded image
cv2.imshow('Thresholded', thresh1)
# check OpenCV version to avoid unpacking error
(version, _, _) = cv2.__version__.split('.')
if version == '3':
image, contours, hierarchy = cv2.findContours(thresh1.copy(), \
cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
elif version == '2':
contours, hierarchy = cv2.findContours(thresh1.copy(),cv2.RETR_TREE, \
cv2.CHAIN_APPROX_NONE)
# find contour with max area
cnt = max(contours, key = lambda x: cv2.contourArea(x))
# create bounding rectangle around the contour (can skip below two lines)
x, y, w, h = cv2.boundingRect(cnt)
cv2.rectangle(crop_img, (x, y), (x+w, y+h), (0, 0, 255), 0)
# finding convex hull
hull = cv2.convexHull(cnt)
# drawing contours
drawing = np.zeros(crop_img.shape,np.uint8)
cv2.drawContours(drawing, [cnt], 0, (0, 255, 0), 0)
cv2.drawContours(drawing, [hull], 0,(0, 0, 255), 0)
# finding convex hull
hull = cv2.convexHull(cnt, returnPoints=False)
# finding convexity defects
defects = cv2.convexityDefects(cnt, hull)
count_defects = 0
cv2.drawContours(thresh1, contours, -1, (0, 255, 0), 3)
# applying Cosine Rule to find angle for all defects (between fingers)
# with angle > 90 degrees and ignore defects
for i in range(defects.shape[0]):
s,e,f,d = defects[i,0]
start = tuple(cnt[s][0])
end = tuple(cnt[e][0])
far = tuple(cnt[f][0])
# find length of all sides of triangle
a = math.sqrt((end[0] - start[0])**2 + (end[1] - start[1])**2)
b = math.sqrt((far[0] - start[0])**2 + (far[1] - start[1])**2)
c = math.sqrt((end[0] - far[0])**2 + (end[1] - far[1])**2)
# apply cosine rule here
angle = math.acos((b**2 + c**2 - a**2)/(2*b*c)) * 57
# ignore angles > 90 and highlight rest with red dots
if angle <= 90:
count_defects += 1
cv2.circle(crop_img, far, 1, [0,0,255], -1)
#dist = cv2.pointPolygonTest(cnt,far,True)
# draw a line from start to end i.e. the convex points (finger tips)
# (can skip this part)
cv2.line(crop_img,start, end, [0,255,0], 2)
#cv2.circle(crop_img,far,5,[0,0,255],-1)
# define actions required
if count_defects == 1:
cv2.putText(img,"I am Vipul", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
elif count_defects == 2:
str = "This is a basic hand gesture recognizer"
cv2.putText(img, str, (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, 2)
elif count_defects == 3:
cv2.putText(img,"This is 4 :P", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
elif count_defects == 4:
cv2.putText(img,"Hi!!!", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
else:
cv2.putText(img,"Hello World!!!", (50, 50),\
cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
# show appropriate images in windows
cv2.imshow('Gesture', img)
all_img = np.hstack((drawing, crop_img))
cv2.imshow('Contours', all_img)
k = cv2.waitKey(10)
if k == 27:
break