Update to working greyscale recognition
This commit is contained in:
62
GestureRecognition/HandRecGray.py
Normal file
62
GestureRecognition/HandRecGray.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Created on Thu Nov 22 14:16:46 2018
|
||||||
|
|
||||||
|
@author: pivatom
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import cv2
|
||||||
|
|
||||||
|
img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1)
|
||||||
|
|
||||||
|
# Downscale the image
|
||||||
|
img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA)
|
||||||
|
|
||||||
|
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||||
|
|
||||||
|
img_gray[img_gray[:,:] > 90] = 255
|
||||||
|
img_gray[img_gray[:,:] < 90] = 0
|
||||||
|
|
||||||
|
# Threshold to binary.
|
||||||
|
ret,img_thresh = cv2.threshold(img_gray, 127, 255, cv2.THRESH_BINARY)
|
||||||
|
|
||||||
|
# Doesn't take too long.
|
||||||
|
k = np.sum(img_thresh) / 255
|
||||||
|
|
||||||
|
x_ind = np.indices(img_thresh.shape[1])
|
||||||
|
coords = np.zeros(img_thresh.shape)
|
||||||
|
|
||||||
|
|
||||||
|
# generate individual coordinates for x then transpose the matrix o
|
||||||
|
#
|
||||||
|
# First sum x coordinates.
|
||||||
|
#xb = int(img_ind[img_thresh == 255].sum(axis=1).sum()/k)
|
||||||
|
#print(xb)
|
||||||
|
# Then sum y coordinates
|
||||||
|
#yb = int(img_ind[img_thresh == 255].sum(axis=0).sum()/k)
|
||||||
|
#print(yb)
|
||||||
|
|
||||||
|
x,y,k,xb,yb = 0,0,0,0,0
|
||||||
|
|
||||||
|
# this is inherently slow...like very very slow...
|
||||||
|
for pix in img_thresh:
|
||||||
|
for j in pix:
|
||||||
|
if j == 255:
|
||||||
|
k += 1
|
||||||
|
xb += x
|
||||||
|
yb += y
|
||||||
|
x += 1
|
||||||
|
y += 1
|
||||||
|
x = 0
|
||||||
|
|
||||||
|
centre = (int(xb/k), int(yb/k))
|
||||||
|
|
||||||
|
cv2.rectangle(img_thresh, centre, (centre[0] + 20, centre[1] + 20), (0,0,255), 3)
|
||||||
|
cv2.circle(img_thresh, centre, 140, (0,0,0), 3)
|
||||||
|
|
||||||
|
# Now need to trace around the circle to figure out where the fingers are.
|
||||||
|
|
||||||
|
cv2.imshow("Binary-cot-out", img_thresh)
|
||||||
|
cv2.waitKey(0)
|
||||||
|
cv2.destroyAllWindows()
|
||||||
61
GestureRecognition/HandRecHSV.py
Normal file
61
GestureRecognition/HandRecHSV.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Created on Thu Nov 22 10:51:21 2018
|
||||||
|
|
||||||
|
@author: pivatom
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import cv2
|
||||||
|
|
||||||
|
img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1)
|
||||||
|
|
||||||
|
# Downscale the image
|
||||||
|
img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA)
|
||||||
|
|
||||||
|
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||||
|
|
||||||
|
img_gray[img_gray[:,:] > 90] = 255
|
||||||
|
img_gray[img_gray[:,:] < 90] = 0
|
||||||
|
|
||||||
|
# Threshold to binary.
|
||||||
|
ret,img_thresh = cv2.threshold(img_gray, 127, 255, cv2.THRESH_BINARY)
|
||||||
|
|
||||||
|
x,y,k,xb,yb = 0,0,0,0,0
|
||||||
|
|
||||||
|
# this is inherently slow...
|
||||||
|
for pix in img_thresh:
|
||||||
|
for j in pix:
|
||||||
|
if j == 255:
|
||||||
|
k += 1
|
||||||
|
xb += x
|
||||||
|
yb += y
|
||||||
|
x += 1
|
||||||
|
y += 1
|
||||||
|
x = 0
|
||||||
|
|
||||||
|
centre = (int(xb/k), int(yb/k))
|
||||||
|
print(centre)
|
||||||
|
|
||||||
|
cv2.rectangle(img_thresh, centre, (centre[0] + 20, centre[1] + 20), (0,0,255), 3)
|
||||||
|
cv2.circle(img_thresh, centre, 140, (0,0,0), 3)
|
||||||
|
|
||||||
|
# Now need to trace around the circle to figure out where the fingers are.
|
||||||
|
|
||||||
|
cv2.imshow("Binary-cot-out", img_thresh)
|
||||||
|
cv2.waitKey(0)
|
||||||
|
cv2.destroyAllWindows()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
|
||||||
|
|
||||||
|
#lower_skin = np.array([2, 102, 153])
|
||||||
|
#upper_skin = np.array([7.5, 153, 255])
|
||||||
|
#
|
||||||
|
## Only need mask, as we can just use this to calculate the
|
||||||
|
#mask = cv2.inRange(img_hsv, lower_skin, upper_skin)
|
||||||
|
#
|
||||||
|
#cv2.imshow("Mask", mask)
|
||||||
|
#cv2.waitKey(0)
|
||||||
|
#cv2.destroyAllWindows()
|
||||||
49
GestureRecognition/HandRecV2.py
Normal file
49
GestureRecognition/HandRecV2.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Created on Thu Nov 22 09:21:04 2018
|
||||||
|
|
||||||
|
@author: pivatom
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import cv2
|
||||||
|
|
||||||
|
min_seg_threshold = 1.05
|
||||||
|
max_seg_threshold = 4
|
||||||
|
|
||||||
|
def calcSkinSample(event, x, y, flags, param):
|
||||||
|
if event == cv2.EVENT_FLAG_LBUTTON:
|
||||||
|
sample = img[x:x+10, y:y+10]
|
||||||
|
min = 255
|
||||||
|
max = 0
|
||||||
|
for line in sample:
|
||||||
|
avg = np.sum(line)/10
|
||||||
|
if avg < min:
|
||||||
|
min = avg
|
||||||
|
if avg > max:
|
||||||
|
max = avg
|
||||||
|
min_seg_threshold = min
|
||||||
|
max_seg_threshold = max
|
||||||
|
|
||||||
|
def draw_rect(event, x, y, flags, param):
|
||||||
|
if event == cv2.EVENT_FLAG_LBUTTON:
|
||||||
|
print("LbuttonClick")
|
||||||
|
cv2.rectangle(img, (x,y), (x+10, y+10), (0,0,255), 3)
|
||||||
|
|
||||||
|
img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1)
|
||||||
|
|
||||||
|
# Downscale the image
|
||||||
|
img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA)
|
||||||
|
|
||||||
|
cv2.namedWindow("Hand")
|
||||||
|
cv2.setMouseCallback("Hand", draw_rect)
|
||||||
|
|
||||||
|
# prevent divide by zero, by just forcing pixel to be ignored.
|
||||||
|
#np.where(img[:,:,1] == 0, 0, img[:,:,1])
|
||||||
|
#img[(img[:,:,2]/img[:,:,1] > min_seg_threshold) & (img[:,:,2]/img[:,:,1] < max_seg_threshold)] = [255,255,255]
|
||||||
|
|
||||||
|
while(1):
|
||||||
|
cv2.imshow("Hand", img)
|
||||||
|
if cv2.waitKey(0):
|
||||||
|
break
|
||||||
|
cv2.destroyAllWindows()
|
||||||
@@ -1,15 +1,34 @@
|
|||||||
from PIL import Image
|
import numpy as np
|
||||||
from PIL import ImageDraw
|
import cv2
|
||||||
|
|
||||||
img = Image.open('/Users/piv/Desktop/IMG_0818.png')
|
img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1)
|
||||||
|
|
||||||
# Create a new image of the cutout.
|
# Downscale the image
|
||||||
blkimg = Image.new('1', (img.width, img.height)
|
img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA)
|
||||||
blkdraw = ImageDraw.Draw(blkimg)
|
|
||||||
|
|
||||||
for i in range(1, img.width):
|
min_seg_threshold = 1.2
|
||||||
for j in range(1, img.height):
|
max_seg_threshold = 1.8
|
||||||
# getpixel returns tuple (r,g,b,a)
|
|
||||||
pixel = img.getpixel((i, j))
|
|
||||||
if (pixel[0]/pixel[1]) > 1.05 and (pixel[0]/pixel[1]) < 4:
|
|
||||||
|
|
||||||
|
# prevent divide by zero, by just forcing pixel to be ignored.
|
||||||
|
np.where(img[:,:,1] == 0, 0, img[:,:,1])
|
||||||
|
img[(img[:,:,2]/img[:,:,1] > min_seg_threshold) & (img[:,:,2]/img[:,:,1] < max_seg_threshold)] = [255,255,255]
|
||||||
|
|
||||||
|
# Try removing image noise.
|
||||||
|
#img = cv2.fastNlMeansDenoising(img)
|
||||||
|
|
||||||
|
cv2.imshow('image', img)
|
||||||
|
cv2.waitKey(0)
|
||||||
|
cv2.destroyAllWindows()
|
||||||
|
|
||||||
|
# Remove non-hand parts
|
||||||
|
|
||||||
|
# Find centre of the hand
|
||||||
|
# Hand parts are white pixels.
|
||||||
|
# Find sum of each col/row to find the left/rightmost and top/bottommost white pixels.
|
||||||
|
# Have used a for loop but obviously that is going to be slow.
|
||||||
|
|
||||||
|
# Draw appropriate circle
|
||||||
|
|
||||||
|
# Calculate number of different peaks.
|
||||||
|
# Article just traced around the circle and counted number of times switched from
|
||||||
|
# zero to one.
|
||||||
Reference in New Issue
Block a user