#!/usr/bin/env python3
import cv2
import numpy as np
# import matplotlib.pyplot as plt
import serial
from time import sleep
import struct
# import os
# import imutils
# initialize the serial port
ser = serial.Serial ("/dev/ttyS2", 115200) #Open port with baud rate
cv2.destroyAllWindows()
# plt.close('all')
########################### Define Variables ###############################
bordersize = 10
samples = 10
num_fruit = 6 # banana, lemon, apple, orange, pear
size_up = 1.6
banana_size = 45
lemon_size = 45
apple_size = 48
orange_size = 48
pear_size = 52
home_size = 30
fruit_size = np.array([banana_size,lemon_size,apple_size,orange_size,pear_size,home_size])
# Size of each fruit when in gripping range
current_fruit_size = np.zeros([num_fruit])
########################## Begin Code ######################################
def empty(a):
pass
# cap = cv2.VideoCapture(cv2.CAP_DSHOW)
width = 160
height = 120
cap = cv2.VideoCapture(1)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, width)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
# dirname = os.path.dirname(__file__)
# filename = os.path.join(dirname, 'Course_Images/banana_close1.bmp')
# frame = cv2.imread(filename)
# cv2.imshow("Import",frame)
###################### Banana Blob Detector #################################
banana_params = cv2.SimpleBlobDetector_Params()
# Change thresholds
# banana_params.minThreshold = 70
# banana_params.maxThreshold = 200
# Filter by Area.
banana_params.filterByArea = True
banana_params.minArea = 150
banana_params.maxArea = 900
# Filter by Circularity
banana_params.filterByCircularity = False
banana_params.minCircularity = 0.9
# Filter by Convexity
banana_params.filterByConvexity = True
banana_params.minConvexity = 0.8
# Filter by Inertia
banana_params.filterByInertia = True
banana_params.minInertiaRatio = 0.01
banana_params.maxInertiaRatio = 0.3
banana_detector = cv2.SimpleBlobDetector_create(banana_params)
###################### Lemon Blob Detector #########################
lemon_params = cv2.SimpleBlobDetector_Params()
# Change thresholds
# params.minThreshold = 10
# params.maxThreshold = 200
# Filter by Area.
lemon_params.filterByArea = True
# circular_params.minArea = 5e3
lemon_params.minArea = 150
lemon_params.maxArea = 900
# Filter by Circularity
lemon_params.filterByCircularity = False
lemon_params.minCircularity = 0.6
lemon_params.maxCircularity = 0.99
# Filter by Convexity
lemon_params.filterByConvexity = True
lemon_params.minConvexity = 0.7
# Filter by Inertia
lemon_params.filterByInertia = True
lemon_params.minInertiaRatio = 0.3
lemon_params.maxInertiaRatio = 0.99
lemon_detector = cv2.SimpleBlobDetector_create(lemon_params)
###################### Circular Fruit Blob Detector #########################
circular_params = cv2.SimpleBlobDetector_Params()
# Change thresholds
# params.minThreshold = 10
# params.maxThreshold = 200
# Filter by Area.
circular_params.filterByArea = True
# circular_params.minArea = 5e3
circular_params.minArea = 100
circular_params.maxArea = 1e4
# Filter by Circularity
circular_params.filterByCircularity = False
circular_params.minCircularity = 0.6
circular_params.maxCircularity = 0.99
# Filter by Convexity
circular_params.filterByConvexity = True
circular_params.minConvexity = 0.7
# Filter by Inertia
circular_params.filterByInertia = True
circular_params.minInertiaRatio = 0.3
circular_params.maxInertiaRatio = 0.99
circular_detector = cv2.SimpleBlobDetector_create(circular_params)
###################### Home Blob Detector #########################
home_params = cv2.SimpleBlobDetector_Params()
# Change thresholds
# params.minThreshold = 10
# params.maxThreshold = 200
# Filter by Area.
home_params.filterByArea = True
home_params.minArea = 100
home_params.maxArea = 3000
# Filter by Circularity
home_params.filterByCircularity = True
home_params.minCircularity = 0.2
# Filter by Convexity
home_params.filterByConvexity = True
home_params.minConvexity = 0.5
# Filter by Inertia
home_params.filterByInertia = False
home_detector = cv2.SimpleBlobDetector_create(home_params)
##################### Filter and Determine Keypoints ########################
# Filters for a lemon
lower_lemon = np.array([int(28*180/255),70,125])
upper_lemon = np.array([int(49*180/255),255,255])
# filters for a banana
lower_banana = np.array([int(18*180/255),75,90])
upper_banana = np.array([int(56*180/255),255,255])
# filters for an apple
lower_red = np.array([int(234*180/255),91,62])
upper_red = np.array([int(253*180/255),255,255])
# filters for a pear
lower_green = np.array([int(46*180/255),70,32])
upper_green = np.array([int(85*180/255),255,255])
# filters for an orange
lower_orange = np.array([int(3*180/255),70,175])
upper_orange = np.array([int(22*180/255),255,255])
lower_home = np.array([0,0,0])
upper_home = np.array([255,0,0])
lower_blue = np.array([int(130*180/255),40,40])
upper_blue = np.array([int(185*180/255),255,255])
# pre-define some variables
old_data = np.zeros([samples,num_fruit,3])
old_data[:] = np.NaN
old_blue = np.zeros([int(samples/2),3])
old_blue[:] = np.NaN
trust_data = np.zeros([num_fruit,2])
state = 0
recent_fruit_choice = 0
################# Blob Detector Functions #################################
def color_detect(detector_color,mask_color,set_name,frame):
key_color = detector_color.detect(mask_color)
x = np.NaN
y = np.NaN
size = np.NaN
size_temp = 0
# choose largest object for color
for point in key_color:
if point.size > size_temp:
x = point.pt[0]
y = point.pt[1]
size = point.size
size_temp = size
# if there are keypoints
if ~np.isnan(x):
frame = cv2.drawKeypoints(frame, key_color, np.array([]), (0,255,0),
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
frame = cv2.putText(frame, set_name, (int(x) - 20,int(y) - 20),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# cv2.imwrite("result.png",yellow_with_keypoints)
######## Show Masks ##########
'''
color_with_keypoints = cv2.drawKeypoints(mask_color, key_color, np.array([]), (0,255,0),
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
cv2.imshow(set_name,color_with_keypoints)
cv2.waitKey(1)'''
##############
#print(set_name,size)
return np.array([x,y,size]),frame
#################### Evaluate Trust in Data ##############################
def trust_evaluator(num_fruit,old_data,frame,state,recent_fruit_choice):
sight_vec = np.zeros([num_fruit])
average_fruit_data = np.zeros([num_fruit,3])
for i in range(num_fruit):
# calculate number of times that color has been sighted in sample history
sight_count = np.count_nonzero(~np.isnan(old_data[:,i,0]))
sight_vec[i] = sight_count
# if sighted over 80% of the time, assume it is a real object
if sight_count >= samples*0.8:
area_var = max(old_data[:,i,2]) / min(old_data[:,i,2])
# if the area is relatively constant, assume that we are not switching between different objects
if area_var <= 2:
# Save data of existing values
average_fruit_data[i,0] = np.nanmean(old_data[:,i,0])
average_fruit_data[i,1] = np.nanmean(old_data[:,i,1])
average_fruit_data[i,2] = np.nanmean(old_data[:,i,2])
## Then determine the largest (if any) and position
max_size = np.NaN
x = np.NaN
y = np.NaN
# if any of the fruits meet our requirements for being seen (80% and limited area variation)
if np.any(average_fruit_data[:,2]):
# max_ratio = np.max(average_fruit_data[:,2] / fruit_size)
fruit_choice = np.argmax(average_fruit_data[:,2] / fruit_size)
recent_fruit_choice = fruit_choice
# final fruit is the home color
if fruit_choice == 5:
print("Found Home")
state = 7
else:
# treat the x and y coordinates as the last seen x and y
output_index = np.min(np.argwhere(~np.isnan(old_data[:,fruit_choice,2])))
x = old_data[output_index,fruit_choice,0]
y = old_data[output_index,fruit_choice,1]
# we output relative fruit size, aka blob size divided by fruit size
max_size = old_data[output_index,fruit_choice,2] / fruit_size[fruit_choice]
# print("X:",x," Y:",y, "Size Ratio:",max_ratio)
# print("Size:",average_fruit_data[fruit_choice,2])
frame = cv2.putText(frame, "Target Lock Confirm",
(int(x) - 50,int(y) - 50),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# if the fruit is close by
if np.abs(max_size-1) < 0.05:
state = 3
# if our size isn't perfect but we still see it
else:
state = 1
# if we have seen a fruit in our last set of samples, save off the sample where this occured
if np.count_nonzero(~np.isnan(old_data[:,recent_fruit_choice,0])):
recent_index = np.min(np.argwhere(~np.isnan(old_data[:,recent_fruit_choice,2])))
recent_x = old_data[recent_index,recent_fruit_choice,0]
max_recent_size = old_data[recent_index,recent_fruit_choice,2] / fruit_size[recent_fruit_choice]
print("Size:",max_size,"Size Vec:",sight_vec)
# if the fruit is no longer seen
if np.max(sight_vec) <= 5 and state == 1:
# if we were relatively close
print("Most recent not-NaN:",old_data[recent_index,recent_fruit_choice,2])
if max_recent_size > 0.6:
state = 3
# if we weren't that close
else:
state = 2
# State Machine
# state 1 is we are tracking a fruit
if state == 1:
# print("Size:",max_size,"Size Vec:",sight_vec)
ser.write(str.encode('*'))
ser.write(str.encode('*'))
ser.write(struct.pack('ff',recent_x,max_recent_size))
print("sending x:",recent_x,",size:",max_recent_size)
# ser.write('*').encode('utf-8')
# ser.write('*').encode()
# state 2 is we saw the fruit but no longer do, return to state 0
elif state == 2:
print("In state 2","Size Vec:",sight_vec)
ser.write(str.encode('*'))
ser.write(str.encode('%'))
# ser.write('*').encode()
# ser.write('%').encode()
state = 0
# state 3 is we should grab the fruit
elif state == 3:
print("In state 3")
ser.write(str.encode('*'))
ser.write(str.encode('*'))
ser.write(struct.pack('ff',old_data[recent_index,recent_fruit_choice,0],max_recent_size))
ser.write(str.encode('*'))
ser.write(str.encode('='))
state = 4
# state 6 tells the red board that we are home (not used for final project)
elif state == 7:
ser.write(str.encode('*'))
ser.write(str.encode('&'))
print("Made it to state 7")
state = 0
#ser.write(struct.pack('ff',320,0))
# else:
# print("Did not confirm a target")
print("state:",state)
return frame, state, recent_fruit_choice
######################## Detect Home #######################################
def blue_trust_evaluator(blue_detector,blue_mask,frame,state,blue_data):
blue_size = 120
key_blue = blue_detector.detect(blue_mask)
x = np.NaN
y = np.NaN
size = np.NaN
size_temp = 0
# choose largest object for color
for point in key_blue:
if point.size > size_temp:
x = point.pt[0]
y = point.pt[1]
size = point.size
size_temp = size
# move old data to make room for new sample
for i in range(5 - 1,0,-1):
blue_data[i,:] = blue_data[i-1,:]
blue_data[0,:] = np.array([x,y,size])
sight_count = np.count_nonzero(~np.isnan(blue_data[:,0]))
print(sight_count)
# if we have seen blue at least 3 of the recent times, track it
if sight_count >= 3:
avg_size = np.nanmean(blue_data[:,2])
relative_size = avg_size / blue_size
# state 4 is looking for home
# state 5 is tracking home
# state 6 is at home
if relative_size - 1 > 0 and state == 5:
# state 6 is that we are home
state = 6
ser.write(str.encode('*'))
ser.write(str.encode('0'))
print("sent final command")
# sleep(0.2)
# ser.write(str.encode('*'))
# ser.write(str.encode('-'))
# print("sent final command")
state = 0
blue_data[:] = np.NaN
# we add a sleep state so that the fruit can be removed and the pi does not see it.
# during this time, the robot will be reorienting itself for wall following
sleep(10)
else:
# state 5 is that we see blue and track it
state = 5
x_out = np.nanmean(blue_data[:,0])
y_out = np.nanmean(blue_data[:,1])
size_out = np.nanmean(blue_data[:,2])/blue_size
frame = cv2.drawKeypoints(frame, key_blue, np.array([]), (0,255,0),
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
frame = cv2.putText(frame, "ET Phone Home", (int(x_out) - 5,int(y_out) - 5),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
ser.write(str.encode('*'))
ser.write(str.encode('^'))
ser.write(struct.pack('ff',x_out,size_out))
print("sending x:",x_out,",size:",size_out)
# cv2.imwrite("result.png",yellow_with_keypoints)
# if we were tracking home and no longer see it
elif state == 5 and sight_count < 3:
ser.write(str.encode('*'))
ser.write(str.encode('+'))
state = 4
# state 4 is we see nothing
else:
state = 4
######## Show Masks ##########
'''
color_with_keypoints = cv2.drawKeypoints(blue_mask, key_blue, np.array([]), (0,255,0),
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
cv2.imshow("Blue",color_with_keypoints)
cv2.waitKey(1)'''
return blue_data,frame,state
######################## While Loop #########################################
while (1):
ret, frame = cap.read()
imgHSV = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
############### Create image for each color ############################
mask_banana = cv2.inRange(imgHSV,lower_banana,upper_banana)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray_and = cv2.bitwise_and(mask_banana,gray)
th, gray_thresh = cv2.threshold(gray_and,115,255,cv2.THRESH_BINARY)
mask_banana = cv2.bitwise_not(gray_thresh)
mask_banana = cv2.bitwise_not(mask_banana)
mask_banana = cv2.bitwise_not(mask_banana)
mask_lemon = cv2.inRange(imgHSV,lower_lemon,upper_lemon)
mask_lemon = cv2.bitwise_not(mask_lemon)
mask_red = cv2.inRange(imgHSV,lower_red,upper_red)
mask_red = cv2.bitwise_not(mask_red)
mask_green = cv2.inRange(imgHSV,lower_green,upper_green)
mask_green = cv2.bitwise_not(mask_green)
mask_orange = cv2.inRange(imgHSV,lower_orange,upper_orange)
mask_orange = cv2.bitwise_not(mask_orange)
mask_home = cv2.inRange(imgHSV,lower_home,upper_home)
mask_home = cv2.bitwise_not(mask_home)
###################### Move Old Data ####################################
for i in range(samples - 1,0,-1):
old_data[i,:,:] = old_data[i-1,:,:]
##################### Blob Detector for Each Color ######################
# Detect blobs.
old_data[0,0,:], frame = color_detect(banana_detector,mask_banana,"Banana", frame)
old_data[0,1,:], frame = color_detect(lemon_detector,mask_lemon,"Lemon", frame)
old_data[0,2,:], frame = color_detect(circular_detector,mask_red,"Apple", frame)
old_data[0,3,:], frame = color_detect(circular_detector,mask_orange,"Orange", frame)
old_data[0,4,:], frame = color_detect(circular_detector,mask_green,"Pear", frame)
old_data[0,5,:], frame = color_detect(home_detector,mask_home,"Home", frame)
################### Evaluate Trust in Data #############################
if state == 4 or state == 5 or state == 6:
blue_mask = cv2.inRange(imgHSV,lower_blue,upper_blue)
blue_mask = cv2.bitwise_not(blue_mask)
# add border so that if our home is on the edge, we still see it
blue_mask = cv2.copyMakeBorder(
blue_mask,
top=bordersize,
bottom=bordersize,
left=bordersize,
right=bordersize,
borderType=cv2.BORDER_CONSTANT,
value=[255, 255, 255]
)
old_blue,frame,state = blue_trust_evaluator(home_detector,blue_mask,frame,state,old_blue)
else:
frame,state,recent_fruit_choice = trust_evaluator(num_fruit,old_data,frame,state,recent_fruit_choice)
##################### Plot overlayed image ##############################
cv2.imshow("Image with Keypoints",frame)
cv2.waitKey(1)
Comments