-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathball_tracking.py
359 lines (286 loc) · 10.5 KB
/
ball_tracking.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
# import the necessary packages
from collections import deque
from imutils.video import VideoStream
import argparse
import cv2
import imutils
import time
from multiprocessing.pool import ThreadPool
from multiprocessing import Process
import multiprocessing as multip
# Create an instance of the GoPiGo3 class. GPG will be the GoPiGo3 object.
#gpg = gopigo3.GoPiGo3()
from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
robot_is_moving = False
robot_is_turning = False
robot_speed = 127
#possibility to use the easygopigo3 lib
# importing the EasyGoPiGo3 class
#from easygopigo3 import EasyGoPiGo3
# instantiating a EasyGoPiGo3 object
#gpg = EasyGoPiGo3()
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
help="path to the (optional) video file")
ap.add_argument("-b", "--buffer", type=int, default=64,
help="max buffer size")
args = vars(ap.parse_args())
# if a video path was not supplied, grab the reference
# to the webcam
if not args.get("video", False):
video_stream = VideoStream(src=0).start()
# otherwise, grab a reference to the video file
else:
video_stream = cv2.VideoCapture(args["video"])
#time.sleep(2)
#width and height of the screen
#width = video_stream.get(cv2.CAP_PROP_FRAME_WIDTH)
#height = video_stream.get(cv2.CAP_PROP_FRAME_HEIGHT)
#move the robot forward
def __move_forward__():
#gpg.forward()
#fwd()
motor1(1,127)
motor2(1,127)
print("Moving forward")
#move the robot backward
def __move_backward__():
#gpg.backward
bwd()
print("Reversing")
#move the robot left
def __turn_left__():
#gpg.left()
left()
print("Left turn")
#move the robot right
def __turn_right__():
#gpg.right()
right()
print("Right turn")
#stop both motors
def __stop_motors():
motor1(1,0)
motor2(1,0)
#check if the object is somewhat in the center of the view
def check_object_centered(objectCenter, w):
if(objectCenter < (w/2)+20) and (objectCenter > (w/2)-20):
return True
else:
return False
#turn the robot left or right
def __turn_robot__(objectCenter, w):
#the object is on the right of the screen
if(objectCenter > (w/2)+20):
__turn_right__()
#the object is on the lrft of the screen
if(objectCenter < (w/2)-20):
__turn_left__()
'''
else:
#gpg.stop()
stop()
print("Stopping")
'''
#move the robot forward/backward
def __move_robot__(radius, center, w, pipe_in):
#print("Moving robot... Radius", radius)
global robot_is_moving, robot_is_turning
print('moving', robot_is_moving)
print('turning', robot_is_turning)
# start moving toward the object if it is far & the robot isn't moving
#if the object is far
if(radius < 150):
#and the object is centered
if check_object_centered(center, w):
print("Should move...")
if robot_is_turning:
robot_is_turning = False
stop()
#move toward it if not already doing so
if not robot_is_moving and not robot_is_turning:
#pipe_in.send("forward")
robot_is_moving = True
__move_forward__()
#the object is not centered
else:
if robot_is_moving:
robot_is_moving = False
stop()
#turn toward it if not already moving
if not robot_is_moving and not robot_is_turning:
#pipe_in.send("forward")
robot_is_turning = True
__turn_robot__(center, w)
else:
if check_object_centered(center, w):
robot_is_moving = False
robot_is_turning = False
stop()
#the object is not centered
else:
if robot_is_moving:
robot_is_moving = False
stop()
#turn toward it if not already moving
if not robot_is_moving and not robot_is_turning:
#pipe_in.send("forward")
robot_is_turning = True
__turn_robot__(center, w)
# set the robot's movements
#radius, center, w
def control_robot(radius, object_center, w):
#print("Moving robot... Radius", radius)
global robot_is_moving, robot_is_turning
print('radius', radius)
center_left_anchor = (w/2) - 20
center_right_anchor = (w/2) + 20
center_to_border = w/2 +40
#motor1(1,robot_speed)
#motor2(1,robot_speed)
#object is on the left
#if center < center_left_anchor and objectCenter > center_right_anchor:
if(radius < 225):
if object_center < center_left_anchor:
motor2_speed = center_left_anchor - object_center + robot_speed
motor1_speed = 1 + robot_speed
elif object_center > center_right_anchor:
motor1_speed = object_center - center_right_anchor + robot_speed
motor2_speed = 1 + robot_speed
else:
motor1_speed = 220 + robot_speed
motor2_speed = 220 + robot_speed
print('object center', object_center)
print('left_anch {}, right_anch {}'.format(motor1_speed,
motor2_speed))
if motor1_speed >= 255:
motor1_speed = 255
if motor2_speed >= 255:
motor2_speed = 255
print('mot1 {}, mot2 {}'.format(motor1_speed, motor2_speed))
motor2(1,int(motor1_speed))
motor1(1,int(motor2_speed))
else:
stop()
# start moving toward the object if it is far & the robot isn't moving
#if the object is far
# define the lower and upper boundaries of the possible colors of the
# ball in the HSV color space, then initialize the list of tracked points
# hsv color picking example: https://stackoverflow.com/a/48367205
greenLower = (29, 86, 6)
greenUpper = (64, 255, 255)
blueLower = (110, 140, 50)
blueUpper = (125, 255, 255)
orangeLower = (10, 100, 20)
orangeUpper = (25, 255, 255)
redLower = (160, 120, 20)
redUpper = (180, 255, 255)
lowHue = 160
highHue = 180
lowSat = 120
highSat = 255
lowVal = 20
highVal = 255
pts = deque(maxlen=args["buffer"])
# allow the camera or video file to warm up
time.sleep(2.0)
if __name__ == '__main__':
#global robot_is_moving, robot_is_turning
# create the pipes used for the communication between 2 process
parent_p, child_p = multip.Pipe()
# start processing
#robot_process = init_multiprocessing(child_p, set_movement)
#robot_process.start()
w = 640
shape = 'none'
#fourcc = cv2.VideoWriter_fourcc(*'MJPG')
#video_record = cv2.VideoWriter('video.avi', fourcc, 20, (640, 480))
# keep looping
while True:
# grab the current frame
frame = video_stream.read()
# handle the frame from VideoCapture or VideoStream
frame = frame[1] if args.get("video", False) else frame
# if we are viewing a video and we did not grab a frame,
# then we have reached the end of the video
if frame is None:
break
#flip the image (the gopigo camera is physically flip, so we
#have to flip it in the program to have a normal image)
frame = cv2.flip(frame, -1)
#img = cv2.flip(frame, -1)
# resize the frame, blur it, and convert it to the HSV
# color space
#frame = imutils.resize(frame, width=600)
#blurred = cv2.GaussianBlur(frame, (11, 11), 0)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# construct a mask for the color "green", then perform
# a series of dilations and erosions to remove any small
# blobs left in the mask
mask = cv2.inRange(hsv, redLower, redUpper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
# find contours in the mask and initialize the current
# (x, y) center of the ball
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnt = imutils.grab_contours(cnts)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
center = None
# only proceed if at least one contour was found
if len(cnt) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# its center
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
peri = cv2.arcLength(cnts[0], True)
approx = cv2.approxPolyDP(cnts[0], 0.04 * peri, True)
#print('poly', approx)
print('size-length', len(approx))
#get the center of the video
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
#print("center = " + str(center))
# only proceed if the radius meets a minimum size
if len(approx) > 4:
print('in approx')
if radius > 10:
# draw the circle and its center on the frame,
# then update the list of tracked points
cv2.circle(frame, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
#cv2.circle(frame, center, 5, (0, 0, 255), -1)
try:
control_robot(radius, center[0], w)
except:
print('exception error')
else:
pass
#stop()
#if no object is detected stop the robot
else:
robot_is_moving = False
robot_is_turning = False
stop()
#left()
# display the frame on the screen
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, stop the loop
if key == ord("q"):
break
# close process & pipes
parent_p.close()
child_p.close()
stop()
# if we are not using a video file, stop the camera video stream
if not args.get("video", False):
video_stream.stop()
# otherwise, release the camera
else:
video_stream.release()
#video_record.release()
# close all windows
cv2.destroyAllWindows()