Add 'car/' from commit 'eee0e8dc445691e600680f4abc77f2814b20b054'
git-subtree-dir: car git-subtree-mainline:1d29a5526cgit-subtree-split:eee0e8dc44
This commit is contained in:
121
car/GestureRecognition/HandRecHSV.py
Normal file
121
car/GestureRecognition/HandRecHSV.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Thu Nov 22 10:51:21 2018
|
||||
|
||||
@author: pivatom
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import cv2
|
||||
|
||||
img = cv2.imread('H:\car\GestureRecognition\IMG_0825.jpg', 1)
|
||||
# img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1)
|
||||
|
||||
# Downscale the image
|
||||
img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA)
|
||||
|
||||
e1 = cv2.getTickCount()
|
||||
|
||||
# Hand Localization... possibly with YOLOv3? v2 is faster though...
|
||||
|
||||
|
||||
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
|
||||
|
||||
# Need to shift red pixels so they can be 0-20 rather than 250-~20
|
||||
img_hsv[:,:,0] = img_hsv[:,:,0] + 30
|
||||
img_hsv[:,:,0] = np.where(img_hsv[:,:,0] > 179, img_hsv[:,:,0] - 179, img_hsv[:,:,0])
|
||||
|
||||
img_hsv = cv2.GaussianBlur(img_hsv,(5,5),0)
|
||||
|
||||
lower_skin = (0, 0, 153)
|
||||
upper_skin = (45, 153, 255)
|
||||
|
||||
# Only need mask, as we can just use this to do the hand segmentation.
|
||||
mask = cv2.inRange(img_hsv, lower_skin, upper_skin)
|
||||
|
||||
# This takes a whole millisecond (approx), and does not seem very worth the cost.
|
||||
blur = cv2.GaussianBlur(mask,(5,5),0)
|
||||
ret, img_thresh = cv2.threshold(blur, 50, 255, cv2.THRESH_BINARY)
|
||||
|
||||
# Uncomment if not using blur and threshold.
|
||||
# img_thresh = mask
|
||||
|
||||
k = np.sum(img_thresh) / 255
|
||||
|
||||
# Taking indices for num of rows.
|
||||
x_ind = np.arange(0,img_thresh.shape[1])
|
||||
y_ind = np.arange(0,img_thresh.shape[0])
|
||||
coords_x = np.zeros((img_thresh.shape[0], img_thresh.shape[1]))
|
||||
coords_y = np.zeros((img_thresh.shape[0], img_thresh.shape[1]))
|
||||
coords_x[:,:] = x_ind
|
||||
|
||||
|
||||
# Even this is extremely quick as it goes through rows in the numpy array, which in python is much faster than columns
|
||||
for element in y_ind:
|
||||
coords_y[element,:] = element
|
||||
|
||||
# Now need to get the average x value and y value for centre of gravity
|
||||
xb = int(np.sum(coords_x[img_thresh == 255])/k)
|
||||
yb = int(np.sum(coords_y[img_thresh == 255])/k)
|
||||
|
||||
centre = (int(np.sum(coords_x[img_thresh == 255])/k), int(np.sum(coords_y[img_thresh == 255])/k))
|
||||
|
||||
# Calculate radius of circle:
|
||||
# May need to calculate diameter as well.
|
||||
# Just take min/max x values and y values
|
||||
x_min = np.min(coords_x[img_thresh == 255])
|
||||
x_max = np.max(coords_x[img_thresh == 255])
|
||||
y_min = np.min(coords_y[img_thresh == 255])
|
||||
y_max = np.max(coords_y[img_thresh == 255])
|
||||
|
||||
candidate_pts = [(x_min, y_min), (x_min, y_max), (x_max, y_min), (x_max, y_max)]
|
||||
radius = 0
|
||||
|
||||
# Check with each point to see which is furthest from the centre.
|
||||
for pt in candidate_pts:
|
||||
# Calculate Euclydian Distance
|
||||
new_distance = ((pt[0] - centre[0])**2 + (pt[1] - centre[1])**2)**(1/2)
|
||||
if new_distance > radius:
|
||||
radius = new_distance
|
||||
|
||||
radius = int(radius * 0.52)
|
||||
|
||||
# 140 needs to be replaced with a predicted value. i.e. not be a magic number.
|
||||
# cv2.circle(img_thresh, centre, radius, (120,0,0), 3)
|
||||
|
||||
def calc_pos_y(x):
|
||||
return int((radius**2 - (x - centre[0])**2)**(1/2) + centre[1])
|
||||
|
||||
# Now go around the circle to calculate num of times going 0->255 or vice-versa.
|
||||
# First just do it the naive way with loops.
|
||||
# Equation of the circle:
|
||||
# y = sqrt(r2 - (x-c)2) + c
|
||||
# Will just increment x to check, no need to loop y as well.
|
||||
# This is extremely slow, need to speed it up by removing for loop.
|
||||
# Brings speed down to 20 fps.
|
||||
# This is actually fast, it was just the print debug statements that made it slow, takes just 6ms...
|
||||
# Could try a kerel method?
|
||||
prev_x = centre[0] - radius
|
||||
prev_y = [calc_pos_y(centre[0] - radius), calc_pos_y(centre[0] - radius)]
|
||||
num_change = 0
|
||||
for x in range(centre[0] - radius + 1, centre[0] + radius):
|
||||
ypos = calc_pos_y(x)
|
||||
y = [ypos, centre[1] - (ypos-centre[1])]
|
||||
if(img_thresh[y[0], x] != img_thresh[prev_y[0], prev_x]):
|
||||
num_change += 1
|
||||
if img_thresh[y[1], x] != img_thresh[prev_y[1], prev_x] and y[0] != y[1]:
|
||||
num_change += 1
|
||||
prev_x = x
|
||||
prev_y = y
|
||||
|
||||
fingers = num_change / 2 - 1
|
||||
|
||||
print("Num Fingers: " + str(fingers))
|
||||
|
||||
e2 = cv2.getTickCount()
|
||||
t = (e2 - e1)/cv2.getTickFrequency()
|
||||
print( t )
|
||||
|
||||
cv2.imshow("Threshold", img_thresh)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
Reference in New Issue
Block a user