renamed scripts folder to pykick
This commit is contained in:
0
pykick/__init__.py
Normal file
0
pykick/__init__.py
Normal file
112
pykick/ball_approach.py
Normal file
112
pykick/ball_approach.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
from math import tan, pi
|
||||
|
||||
from .utils import read_config
|
||||
from .imagereaders import NaoImageReader
|
||||
from .finders import BallFinder
|
||||
from .movements import NaoMover
|
||||
|
||||
|
||||
class BallFollower(object):
|
||||
|
||||
def __init__(self, nao_ip, nao_port, res, hsv_lower, hsv_upper,
|
||||
min_radius, run_after):
|
||||
self.mover = NaoMover(nao_ip=nao_ip, nao_port=nao_port)
|
||||
self.mover.stand_up()
|
||||
self.video_top = NaoImageReader(nao_ip, port=nao_port, res=res,
|
||||
fps=30, cam_id=0)
|
||||
self.video_bot = NaoImageReader(nao_ip, port=nao_port, res=res,
|
||||
fps=30, cam_id=0)
|
||||
self.finder = BallFinder(hsv_lower, hsv_upper, min_radius, None)
|
||||
self.lock_counter = 0
|
||||
self.loss_counter = 0
|
||||
self.run_after = run_after
|
||||
|
||||
def ball_scan(self):
|
||||
yaw = self.mover.get_head_angles()[0]
|
||||
mag = abs(yaw)
|
||||
sign = 1 if yaw >= 0 else -1
|
||||
if mag > 2:
|
||||
self.mover.move_to(0, 0, sign * pi / 12)
|
||||
else:
|
||||
self.mover.change_head_angles(sign * pi / 4, 0, 0.5)
|
||||
|
||||
def update(self):
|
||||
#print('in update loop')
|
||||
try:
|
||||
(x, y), radius = self.finder.find_colored_ball(
|
||||
self.video_top.get_frame()
|
||||
)
|
||||
self.loss_counter = 0
|
||||
x, y = self.video_top.to_relative(x, y)
|
||||
x, y = self.video_top.to_angles(x,y)
|
||||
# print("y (in radians) angle is:"+str(angles[1]))
|
||||
# y_angle=angles[1]
|
||||
# y_angle=pi/2-y_angle-15*pi/180
|
||||
# distance = 0.5 * tan(y_angle)
|
||||
# print("Distance="+str(distance))
|
||||
# print('Top camera\n')
|
||||
except TypeError:
|
||||
try:
|
||||
(x, y), radius = self.finder.find_colored_ball(
|
||||
self.video_bot.get_frame()
|
||||
)
|
||||
x, y = self.video_bot.to_relative(x, y)
|
||||
self.loss_counter = 0
|
||||
#print('Low camera')
|
||||
except TypeError:
|
||||
print('No ball in sight')
|
||||
self.loss_counter += 1
|
||||
if self.loss_counter > 5:
|
||||
self.ball_scan()
|
||||
return
|
||||
#print(x, y)
|
||||
self.process_coordinates(x, y)
|
||||
|
||||
def process_coordinates(self, x, y):
|
||||
# x_diff = x - 0.5
|
||||
# y_diff = y - 0.5
|
||||
# print("x_diff: " + str(x_diff))
|
||||
# print("y_diff: " + str(y_diff))
|
||||
|
||||
d_yaw, d_pitch = x, 0
|
||||
print(d_yaw)
|
||||
|
||||
self.mover.change_head_angles(d_yaw * 0.7, d_pitch,
|
||||
abs(d_yaw) / 2)
|
||||
# self.counter = 0
|
||||
yaw = self.mover.get_head_angles()[0]
|
||||
if abs(yaw) > 0.4:
|
||||
# self.counter = 0
|
||||
print('Going to rotate')
|
||||
# self.mover.set_head_angles(0, 0, 0.05)
|
||||
self.mover.move_to(0, 0, yaw)
|
||||
self.mover.wait()
|
||||
if self.run_after:
|
||||
self.mover.move_to(0.3, 0, 0)
|
||||
|
||||
def close(self):
|
||||
self.mover.rest()
|
||||
self.video_top.close()
|
||||
self.video_bot.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
cfg = read_config()
|
||||
follower = BallFollower(
|
||||
nao_ip=cfg['ip'],
|
||||
nao_port=cfg['port'],
|
||||
res=cfg['res'],
|
||||
hsv_lower=tuple(map(cfg.get, ('low_h', 'low_s', 'low_v'))),
|
||||
hsv_upper=tuple(map(cfg.get, ('high_h', 'high_s', 'high_v'))),
|
||||
min_radius=cfg['min_radius'],
|
||||
run_after=False
|
||||
)
|
||||
try:
|
||||
while True:
|
||||
follower.update()
|
||||
finally:
|
||||
follower.close()
|
||||
177
pykick/colorpicker.py
Normal file
177
pykick/colorpicker.py
Normal file
@@ -0,0 +1,177 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import json
|
||||
import argparse
|
||||
import cv2
|
||||
|
||||
from .imagereaders import VideoReader, NaoImageReader, PictureReader
|
||||
from .utils import read_config
|
||||
|
||||
class Colorpicker(object):
|
||||
|
||||
WINDOW_CAPTURE_NAME = 'Video Capture'
|
||||
WINDOW_DETECTION_NAME = 'Object Detection'
|
||||
|
||||
def __init__(self):
|
||||
parameters = ['low_h', 'low_s', 'low_v', 'high_h', 'high_s', 'high_v']
|
||||
maxes = [180, 255, 255, 180, 255, 255]
|
||||
checkers = [
|
||||
lambda x: min(x, self.settings['high_h'] - 1), # LOW H
|
||||
lambda x: min(x, self.settings['high_s'] - 1), # LOW S
|
||||
lambda x: min(x, self.settings['high_v'] - 1), # LOW H
|
||||
lambda x: max(x, self.settings['low_h'] + 1), # HIGH H
|
||||
lambda x: max(x, self.settings['low_s'] + 1), # HIGH S
|
||||
lambda x: max(x, self.settings['low_v'] + 1), # HIGH V
|
||||
]
|
||||
self.settings = {
|
||||
'low_h': 0,
|
||||
'low_s': 0,
|
||||
'low_v': 0,
|
||||
'high_h': 180,
|
||||
'high_s': 255,
|
||||
'high_v': 255
|
||||
}
|
||||
cv2.namedWindow(self.WINDOW_CAPTURE_NAME)
|
||||
cv2.namedWindow(self.WINDOW_DETECTION_NAME)
|
||||
self.trackers = [
|
||||
cv2.createTrackbar(
|
||||
name, self.WINDOW_DETECTION_NAME, self.settings[name], max_v,
|
||||
lambda val, name=name, checker=checker: self._on_trackbar(
|
||||
val, name, checker
|
||||
)
|
||||
)
|
||||
for name, max_v, checker in zip(parameters, maxes, checkers)
|
||||
]
|
||||
|
||||
def do_print(self):
|
||||
print(self.settings)
|
||||
|
||||
def _on_trackbar(self, val, name, checker):
|
||||
self.settings[name] = checker(val)
|
||||
cv2.setTrackbarPos(name, self.WINDOW_DETECTION_NAME,
|
||||
self.settings[name])
|
||||
|
||||
def show_frame(self, frame, width=None):
|
||||
frame_HSV = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
|
||||
frame_threshold = cv2.inRange(
|
||||
frame_HSV,
|
||||
tuple(map(self.settings.get, ('low_h', 'low_s', 'low_v'))),
|
||||
tuple(map(self.settings.get, ('high_h', 'high_s', 'high_v')))
|
||||
)
|
||||
if width:
|
||||
sf = width / frame.shape[1]
|
||||
frame = cv2.resize(frame, (0, 0), fx=sf, fy=sf)
|
||||
frame_threshold = cv2.resize(frame_threshold, (0, 0), fx=sf, fy=sf)
|
||||
cv2.imshow(self.WINDOW_CAPTURE_NAME, frame)
|
||||
cv2.imshow(self.WINDOW_DETECTION_NAME, frame_threshold)
|
||||
return cv2.waitKey(1)
|
||||
|
||||
def save(self, filename):
|
||||
try:
|
||||
with open(filename) as f:
|
||||
conf = json.load(f)
|
||||
except FileNotFoundError:
|
||||
conf = {}
|
||||
conf.update(self.settings)
|
||||
with open(filename, 'w') as f:
|
||||
json.dump(conf, f, indent=4)
|
||||
|
||||
def load(self, filename):
|
||||
with open(filename) as f:
|
||||
self.settings = json.load(f)
|
||||
for name in self.settings:
|
||||
cv2.setTrackbarPos(name, self.WINDOW_DETECTION_NAME,
|
||||
self.settings[name])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
defaults = read_config()
|
||||
parser = argparse.ArgumentParser(
|
||||
epilog='When called without arguments specifying the video source, ' +
|
||||
'will try to use the webcam'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output-config',
|
||||
help='file, to which the settings will be saved (if given)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-i', '--input-config',
|
||||
help='file, from which to read the initial values'
|
||||
)
|
||||
imsource = parser.add_mutually_exclusive_group()
|
||||
imsource.add_argument(
|
||||
'--video-file',
|
||||
help='video file to use'
|
||||
)
|
||||
imsource.add_argument(
|
||||
'--image-file',
|
||||
help='image to use'
|
||||
)
|
||||
imsource.add_argument(
|
||||
'--nao-ip',
|
||||
help='ip address of the nao robot, from which to capture',
|
||||
default=False,
|
||||
const=defaults['ip'],
|
||||
nargs='?'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--still',
|
||||
help='only take one image from video stream',
|
||||
action='store_true'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--nao-cam',
|
||||
choices=[0, 1],
|
||||
help='0 for top camera, 1 for bottom camera',
|
||||
default=defaults['cam']
|
||||
)
|
||||
parser.add_argument(
|
||||
'--nao-res',
|
||||
choices=[1, 2, 3],
|
||||
help='choose a nao resolution',
|
||||
type=int,
|
||||
default=defaults['res']
|
||||
)
|
||||
parser.add_argument(
|
||||
'--width',
|
||||
help='specify width of the image output',
|
||||
type=int,
|
||||
default=640
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
cp = Colorpicker()
|
||||
if args.input_config:
|
||||
cp.load(args.input_config)
|
||||
|
||||
if args.video_file:
|
||||
rdr = VideoReader(args.video_file, loop=True)
|
||||
elif args.image_file:
|
||||
rdr = PictureReader(args.image_file)
|
||||
elif args.nao_ip:
|
||||
rdr = NaoImageReader(
|
||||
args.nao_ip,
|
||||
cam_id=args.nao_cam,
|
||||
res=args.nao_res
|
||||
)
|
||||
else:
|
||||
rdr = VideoReader(0)
|
||||
|
||||
try:
|
||||
if args.still:
|
||||
frame = rdr.get_frame()
|
||||
rdr.close()
|
||||
while True:
|
||||
if not args.still:
|
||||
frame = rdr.get_frame()
|
||||
key = cp.show_frame(frame, args.width)
|
||||
if key == ord('q') or key == 27:
|
||||
break
|
||||
finally:
|
||||
cp.do_print()
|
||||
if args.output_config:
|
||||
cp.save(args.output_config)
|
||||
if not args.still:
|
||||
rdr.close()
|
||||
17
pykick/copyfiles.sh
Executable file
17
pykick/copyfiles.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit immediately if something fails
|
||||
set -e
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
nao_ip=192.168.0.11
|
||||
else
|
||||
nao_ip=$1
|
||||
fi
|
||||
|
||||
destination="$nao_ip:/home/nao/kicker_scripts/"
|
||||
|
||||
# copy the files with scp
|
||||
sshpass -p 'nao' scp ball_approach.py utils.py finders.py \
|
||||
movements.py imagereaders.py $destination
|
||||
21
pykick/detection_demo.py
Normal file
21
pykick/detection_demo.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
from .utils import read_config
|
||||
from .imagereaders import NaoImageReader, VideoReader
|
||||
from .finders import BallFinder
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
video = VideoReader(0, loop=True)
|
||||
hsv = read_config()
|
||||
hsv_lower = tuple(map(hsv.get, ('low_h', 'low_s', 'low_v')))
|
||||
hsv_upper = tuple(map(hsv.get, ('high_h', 'high_s', 'high_v')))
|
||||
finder = BallFinder(hsv_lower, hsv_upper, hsv['min_radius'], None)
|
||||
try:
|
||||
while True:
|
||||
frame = video.get_frame()
|
||||
finder.find_colored_ball(frame)
|
||||
finder.visualize(frame)
|
||||
finally:
|
||||
video.close()
|
||||
86
pykick/finders.py
Normal file
86
pykick/finders.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import json
|
||||
from collections import deque
|
||||
import cv2
|
||||
|
||||
|
||||
class BallFinder(object):
|
||||
|
||||
def __init__(self, hsv_lower, hsv_upper, min_radius, viz=False):
|
||||
|
||||
self.hsv_lower = hsv_lower
|
||||
self.hsv_upper = hsv_upper
|
||||
self.min_radius = min_radius
|
||||
self.history = deque(maxlen=64)
|
||||
self.viz = viz
|
||||
|
||||
if self.viz:
|
||||
cv2.namedWindow('ball_mask')
|
||||
cv2.namedWindow('Frame')
|
||||
|
||||
def find_colored_ball(self, frame):
|
||||
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
|
||||
|
||||
# construct a mask for the color, then perform a series of
|
||||
# dilations and erosions to remove any small blobs left in the mask
|
||||
mask = cv2.inRange(hsv, self.hsv_lower, self.hsv_upper)
|
||||
mask = cv2.erode(mask, None, iterations=2)
|
||||
mask = cv2.dilate(mask, None, iterations=2)
|
||||
if self.viz:
|
||||
cv2.imshow('ball_mask', mask)
|
||||
|
||||
# find contours in the mask and initialize the current
|
||||
# (x, y) center of the ball
|
||||
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
|
||||
cv2.CHAIN_APPROX_SIMPLE)[-2]
|
||||
|
||||
# only proceed if at least one contour was found
|
||||
if len(cnts) == 0:
|
||||
self.history.appendleft(None)
|
||||
return None
|
||||
|
||||
# find the largest contour in the mask, then use it to compute
|
||||
# the minimum enclosing circle and centroid
|
||||
c = max(cnts, key=cv2.contourArea)
|
||||
((x, y), radius) = cv2.minEnclosingCircle(c)
|
||||
|
||||
if radius < self.min_radius:
|
||||
self.history.appendleft(None)
|
||||
return None
|
||||
|
||||
M = cv2.moments(c)
|
||||
center = (int(M["m10"] / M["m00"]),int(M["m01"] // M["m00"]))
|
||||
self.history.appendleft((center, int(radius)))
|
||||
return center, int(radius)
|
||||
|
||||
def visualize(self, frame):
|
||||
if not self.viz:
|
||||
raise ValueError(
|
||||
'Visualization needs to be enabled when initializing'
|
||||
)
|
||||
|
||||
frame = frame.copy()
|
||||
if self.history[0] is not None:
|
||||
center, radius = self.history[0]
|
||||
cv2.circle(frame, center, radius, (255, 255, 0), 1)
|
||||
cv2.circle(frame, center, 5, (0, 255, 0), -1)
|
||||
|
||||
# loop over the set of tracked points
|
||||
for i in range(1, len(self.history)):
|
||||
# if either of the tracked points are None, ignore them
|
||||
if self.history[i - 1] is None or self.history[i] is None:
|
||||
continue
|
||||
# otherwise, compute the thickness of the line and
|
||||
# draw the connecting lines
|
||||
center_now = self.history[0][0]
|
||||
center_prev = self.history[1][0]
|
||||
thickness = int((64 / (i + 1))**0.5 * 2.5)
|
||||
cv2.line(frame, center_now, center_prev, (0, 255, 0), thickness)
|
||||
# show the frame to screen
|
||||
cv2.imshow("Frame", frame)
|
||||
return cv2.waitKey(1)
|
||||
|
||||
def load_hsv_config(self, filename):
|
||||
with open(filename) as f:
|
||||
hsv = json.load(f)
|
||||
self.hsv_lower = tuple(map(hsv.get, ('low_h', 'low_s', 'low_v')))
|
||||
self.hsv_upper = tuple(map(hsv.get, ('high_h', 'high_s', 'high_v')))
|
||||
173
pykick/goal_video_detection.py
Normal file
173
pykick/goal_video_detection.py
Normal file
@@ -0,0 +1,173 @@
|
||||
# This script recognizes the ball in a given video file
|
||||
# python ball_tracking.py --video test3.avi
|
||||
|
||||
# import the necessary packages
|
||||
from collections import deque
|
||||
import numpy as np
|
||||
import argparse
|
||||
import imutils
|
||||
import cv2
|
||||
from time import sleep
|
||||
|
||||
# construct the argument parse and parse the arguments
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("-v", "--video",
|
||||
help="path to the (optional) video file")
|
||||
ap.add_argument("-b", "--buffer", type=int, default=64,
|
||||
help="max buffer size")
|
||||
args = vars(ap.parse_args())
|
||||
|
||||
# define the lower and upper boundaries of the "green"
|
||||
# ball in the HSV color space, then initialize the
|
||||
# list of tracked points
|
||||
#greenLower = (0, 17, 225)
|
||||
#greenUpper = (42, 255, 255)
|
||||
|
||||
|
||||
greenLower=(0,184,170)
|
||||
greenUpper=(2,255,255)
|
||||
|
||||
#greenLower = (29, 86, 6)
|
||||
#greenUpper = (64, 255, 255)
|
||||
pts = deque(maxlen=args["buffer"])
|
||||
|
||||
# if a video path was not supplied, grab the reference
|
||||
# to the webcam
|
||||
if not args.get("video", False):
|
||||
camera = cv2.VideoCapture(0)
|
||||
|
||||
# otherwise, grab a reference to the video file
|
||||
else:
|
||||
camera = cv2.VideoCapture(args["video"])
|
||||
|
||||
# keep looping
|
||||
while True:
|
||||
# grab the current frame
|
||||
(grabbed, frame) = camera.read()
|
||||
|
||||
# if we are viewing a video and we did not grab a frame,
|
||||
# then we have reached the end of the video
|
||||
if args.get("video") and not grabbed:
|
||||
break
|
||||
|
||||
# resize the frame, blur it, and convert it to the HSV
|
||||
# color space
|
||||
# frame = imutils.resize(frame, width=600)
|
||||
# blurred = cv2.GaussianBlur(frame, (11, 11), 0)
|
||||
# hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
|
||||
|
||||
# construct a mask for the color "green", then perform
|
||||
# a series of dilations and erosions to remove any small
|
||||
# blobs left in the mask
|
||||
|
||||
'''
|
||||
mask = cv2.inRange(hsv, greenLower, greenUpper)
|
||||
mask = cv2.erode(mask, None, iterations=2)
|
||||
mask = cv2.dilate(mask, None, iterations=2)
|
||||
'''
|
||||
|
||||
# create hsv and do some mask stuff
|
||||
frame_HSV = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
|
||||
# frame_test=cv2.cvtColor(frame_HSV,cv2.COLOR_HSV2BGR)
|
||||
# frame_gray=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
|
||||
|
||||
# frame_threshold=cv2.inRange(frame_HSV,(0,9,139),(180,81,255))
|
||||
frame_threshold=cv2.inRange(frame_HSV,(0,0,182),(180,60,255))
|
||||
|
||||
frame_threshold = cv2.GaussianBlur(frame_threshold,(9,9),3,3)
|
||||
|
||||
erode_element = cv2.getStructuringElement(cv2.MORPH_RECT, (10, 10))
|
||||
#dilate_element = cv2.getStructuringElement(cv2.MORPH_RECT, (20, 20))
|
||||
eroded_mask = cv2.erode(frame_threshold,erode_element)
|
||||
#dilated_mask = cv2.dilate(eroded_mask,dilate_element)
|
||||
#frame_threshold=eroded_mask
|
||||
|
||||
# preparation for edge detection
|
||||
res = cv2.bitwise_and(frame,frame, mask=eroded_mask)
|
||||
res=cv2.cvtColor(res,cv2.COLOR_BGR2GRAY)
|
||||
|
||||
|
||||
# use canny edge
|
||||
frame_edge=cv2.Canny(res,90,494,apertureSize=3)
|
||||
#frame_edge=cv2.Canny(res,0,123,apertureSize=3)
|
||||
|
||||
# use hough lines
|
||||
lines = cv2.HoughLines(frame_edge,1,1*np.pi/180,80,0,0)
|
||||
|
||||
for rho,theta in lines[0]:
|
||||
# print(rho, theta)
|
||||
a = np.cos(theta)
|
||||
b = np.sin(theta)
|
||||
x0 = a*rho
|
||||
y0 = b*rho
|
||||
x1 = int(x0 + 200*(-b))
|
||||
y1 = int(y0 + 200*(a))
|
||||
x2 = int(x0 - 200*(-b))
|
||||
y2 = int(y0 - 200*(a))
|
||||
if (theta>np.pi/180*170 or theta<np.pi/180*10):
|
||||
#if (theta>np.pi/180*80 and theta<np.pi/180*100):
|
||||
cv2.line(frame,(x1,y1),(x2,y2),(0,0,255),2)
|
||||
|
||||
|
||||
|
||||
'''
|
||||
|
||||
# find contours in the mask and initialize the current
|
||||
# (x, y) center of the ball
|
||||
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
|
||||
cv2.CHAIN_APPROX_SIMPLE)[-2]
|
||||
center = None
|
||||
|
||||
# only proceed if at least one contour was found
|
||||
if len(cnts) > 0:
|
||||
# find the largest contour in the mask, then use
|
||||
# it to compute the minimum enclosing circle and
|
||||
# centroid
|
||||
c = max(cnts, key=cv2.contourArea)
|
||||
((x, y), radius) = cv2.minEnclosingCircle(c)
|
||||
M = cv2.moments(c)
|
||||
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
|
||||
|
||||
# only proceed if the radius meets a minimum size
|
||||
if radius > 10:
|
||||
# draw the circle and centroid on the frame,
|
||||
# then update the list of tracked points
|
||||
cv2.circle(frame, (int(x), int(y)), int(radius),
|
||||
#(0, 255, 255), 2)
|
||||
(0,255,255),2)
|
||||
cv2.circle(frame, center, 5, (0, 0, 255), -1)
|
||||
|
||||
# update the points queue
|
||||
pts.appendleft(center)
|
||||
|
||||
# loop over the set of tracked points
|
||||
for i in xrange(1, len(pts)):
|
||||
# if either of the tracked points are None, ignore
|
||||
# them
|
||||
if pts[i - 1] is None or pts[i] is None:
|
||||
continue
|
||||
|
||||
# otherwise, compute the thickness of the line and
|
||||
# draw the connecting lines
|
||||
thickness = int(np.sqrt(args["buffer"] / float(i + 1)) * 1.25)
|
||||
cv2.line(frame, pts[i - 1], pts[i], (0, 255, 0), thickness)
|
||||
'''
|
||||
# show the frame to our screen
|
||||
#cv2.imshow("Frame", frame)
|
||||
cv2.imshow("Frame_threshold",frame_threshold)
|
||||
cv2.imshow("eroded_mask",eroded_mask)
|
||||
cv2.imshow("frame edge",frame_edge)
|
||||
cv2.imshow("result eroded_mask applied",res)
|
||||
cv2.imshow("frame with lines",frame)
|
||||
key = cv2.waitKey(1) & 0xFF
|
||||
|
||||
sleep(0.05)
|
||||
# if the 'q' key is pressed, stop the loop
|
||||
if key == ord("q"):
|
||||
break
|
||||
|
||||
# cleanup the camera and close any open windows
|
||||
camera.release()
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
|
||||
81
pykick/imagereaders.py
Normal file
81
pykick/imagereaders.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from __future__ import division
|
||||
|
||||
import numpy as np
|
||||
import cv2
|
||||
from naoqi import ALProxy
|
||||
|
||||
|
||||
class NaoImageReader(object):
|
||||
|
||||
RESOLUTIONS = {
|
||||
1: (240, 320),
|
||||
2: (480, 640),
|
||||
3: (960, 1280)
|
||||
}
|
||||
|
||||
def __init__(self, ip, port=9559, res=1, fps=30, cam_id=0):
|
||||
ip = bytes(ip)
|
||||
self.res = self.RESOLUTIONS[res]
|
||||
self.cam_id=cam_id
|
||||
self.vd = ALProxy('ALVideoDevice', ip, port)
|
||||
self.sub = self.vd.subscribeCamera(
|
||||
"video_streamer", cam_id, res, 13, fps
|
||||
)
|
||||
|
||||
def to_angles(self, x, y):
|
||||
return self.vd.getAngularPositionFromImagePosition(
|
||||
self.cam_id, [x, y]
|
||||
)
|
||||
|
||||
def to_relative(self, x, y):
|
||||
return x / self.res[1], y / self.res[0]
|
||||
|
||||
def get_frame(self):
|
||||
result = self.vd.getImageRemote(self.sub)
|
||||
self.vd.releaseImage(self.sub)
|
||||
if result == None:
|
||||
raise RuntimeError('cannot capture')
|
||||
elif result[6] == None:
|
||||
raise ValueError('no image data string')
|
||||
else:
|
||||
height, width = self.res
|
||||
return np.frombuffer(result[6], dtype=np.uint8).reshape(
|
||||
height, width, 3
|
||||
)
|
||||
|
||||
def close(self):
|
||||
self.vd.unsubscribe(self.sub)
|
||||
|
||||
|
||||
class VideoReader(object):
|
||||
|
||||
def __init__(self, filename=0, loop=False):
|
||||
self.cap = cv2.VideoCapture(filename)
|
||||
self.loop = loop if filename else False
|
||||
self.ctr = 0
|
||||
|
||||
def get_frame(self):
|
||||
succ, frame = self.cap.read()
|
||||
if not succ:
|
||||
raise ValueError('Error while reading video')
|
||||
self.ctr += 1
|
||||
if self.ctr == self.cap.get(cv2.CAP_PROP_FRAME_COUNT) and self.loop:
|
||||
self.ctr = 0
|
||||
self.cap.set(cv2.CAP_PROP_POS_FRAMES, 0)
|
||||
return frame
|
||||
|
||||
def close(self):
|
||||
self.cap.release()
|
||||
|
||||
|
||||
class PictureReader(object):
|
||||
"Dummy class for maybe convenience."
|
||||
|
||||
def __init__(self, filename):
|
||||
self.frame = cv2.imread(filename)
|
||||
|
||||
def get_frame(self):
|
||||
return self.frame.copy()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
16
pykick/motion_setter.py
Normal file
16
pykick/motion_setter.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import argparse
|
||||
|
||||
from .movements import NaoMover
|
||||
from .utils import read_config
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('state', choices=('stand', 'rest'))
|
||||
args = parser.parse_args()
|
||||
cfg = read_config()
|
||||
mover = NaoMover(cfg['ip'], cfg['port'])
|
||||
if args.state == 'stand':
|
||||
mover.stand_up()
|
||||
elif args.state == 'rest':
|
||||
mover.rest()
|
||||
19
pykick/move_robot.py
Normal file
19
pykick/move_robot.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from .movements import NaoMover
|
||||
from .utils import read_config
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cfg = read_config()
|
||||
mover = NaoMover(cfg['ip'], cfg['port'])
|
||||
mover.stand_up()
|
||||
while True:
|
||||
axis = int(raw_input('Axis: '))
|
||||
amount = float(raw_input('How much: '))
|
||||
if axis == 0:
|
||||
mover.move_to(amount, 0, 0)
|
||||
elif axis == 1:
|
||||
mover.move_to(0, amount, 0)
|
||||
elif axis == 2:
|
||||
mover.move_to(0, 0, amount)
|
||||
else:
|
||||
print('Axis out of range (0, 1, 2)')
|
||||
87
pykick/movements.py
Normal file
87
pykick/movements.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from naoqi import ALProxy
|
||||
|
||||
|
||||
class NaoMover(object):
|
||||
|
||||
def __init__(self, nao_ip, nao_port=9559):
|
||||
nao_ip = bytes(nao_ip)
|
||||
self.mp = ALProxy('ALMotion', nao_ip, nao_port)
|
||||
self.pp = ALProxy('ALRobotPosture', nao_ip, nao_port)
|
||||
ap = ALProxy("ALAutonomousLife", nao_ip, nao_port)
|
||||
if ap.getState() != "disabled":
|
||||
ap.setState('disabled')
|
||||
self.set_head_stiffness()
|
||||
self.set_hand_stiffness()
|
||||
self.set_arm_stiffness()
|
||||
self.set_hip_stiffness()
|
||||
self.set_knee_stiffness()
|
||||
self.set_ankle_stiffness()
|
||||
self.ready_to_move = False
|
||||
|
||||
def stand_up(self):
|
||||
self.set_arm_stiffness(0.9)
|
||||
self.set_hand_stiffness(0.9)
|
||||
self.pp.goToPosture('StandInit', 1.0)
|
||||
self.set_hand_stiffness()
|
||||
self.set_arm_stiffness()
|
||||
|
||||
def rest(self):
|
||||
self.mp.rest()
|
||||
self.ready_to_move = False
|
||||
|
||||
def set_head_stiffness(self, stiffness=0.5):
|
||||
self.mp.setStiffnesses("Head", stiffness)
|
||||
|
||||
def set_hand_stiffness(self, stiffness=0.0):
|
||||
self.mp.setStiffnesses("RHand", stiffness)
|
||||
self.mp.setStiffnesses("LHand", stiffness)
|
||||
self.mp.setStiffnesses("LWristYaw", stiffness)
|
||||
self.mp.setStiffnesses("RWristYaw", stiffness)
|
||||
|
||||
def set_arm_stiffness(self, stiffness=0.3):
|
||||
self.mp.setStiffnesses("LShoulderPitch", stiffness)
|
||||
self.mp.setStiffnesses("LShoulderRoll", stiffness)
|
||||
self.mp.setStiffnesses("LElbowRoll", stiffness)
|
||||
self.mp.setStiffnesses("LElbowYaw", stiffness)
|
||||
self.mp.setStiffnesses("RShoulderPitch", stiffness)
|
||||
self.mp.setStiffnesses("RShoulderRoll", stiffness)
|
||||
self.mp.setStiffnesses("RElbowRoll", stiffness)
|
||||
self.mp.setStiffnesses("RElbowYaw", stiffness)
|
||||
|
||||
def set_hip_stiffness(self, stiffness=0.6):
|
||||
self.mp.setStiffnesses("LHipYawPitch", stiffness)
|
||||
self.mp.setStiffnesses("LHipPitch", stiffness)
|
||||
self.mp.setStiffnesses("RHipYawPitch", stiffness)
|
||||
self.mp.setStiffnesses("RHipPitch", stiffness)
|
||||
self.mp.setStiffnesses("LHipRoll", stiffness)
|
||||
self.mp.setStiffnesses("RHipRoll", stiffness)
|
||||
|
||||
def set_ankle_stiffness(self, stiffness=0.6):
|
||||
self.mp.setStiffnesses("LAnklePitch", stiffness)
|
||||
self.mp.setStiffnesses("LAnkleRoll", stiffness)
|
||||
self.mp.setStiffnesses("RAnklePitch", stiffness)
|
||||
self.mp.setStiffnesses("RAnkleRoll", stiffness)
|
||||
|
||||
def set_knee_stiffness(self, stiffness=0.6):
|
||||
self.mp.setStiffnesses("LKneePitch", stiffness)
|
||||
self.mp.setStiffnesses("RKneePitch", stiffness)
|
||||
|
||||
def get_head_angles(self):
|
||||
return self.mp.getAngles(('HeadYaw', 'HeadPitch'), False)
|
||||
|
||||
def change_head_angles(self, d_yaw, d_pitch, speed):
|
||||
self.mp.changeAngles(('HeadYaw', 'HeadPitch'),
|
||||
(d_yaw, d_pitch), speed)
|
||||
|
||||
def set_head_angles(self, yaw, pitch, speed):
|
||||
self.mp.setAngles(('HeadYaw', 'HeadPitch'),
|
||||
(yaw, pitch), speed)
|
||||
|
||||
def move_to(self, front, side, rotation, wait=False):
|
||||
if not self.ready_to_move:
|
||||
self.mp.moveInit()
|
||||
self.ready_to_move = True
|
||||
self.mp.post.moveTo(front, side, rotation)
|
||||
|
||||
def wait(self):
|
||||
self.mp.waitUntilMoveIsFinished()
|
||||
14
pykick/nao_defaults.json
Normal file
14
pykick/nao_defaults.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"ip": "192.168.0.11",
|
||||
"res": 1,
|
||||
"port": 9559,
|
||||
"cam": 0,
|
||||
"fps": 30,
|
||||
"low_s": 175,
|
||||
"low_v": 100,
|
||||
"high_h": 6,
|
||||
"high_v": 255,
|
||||
"low_h": 0,
|
||||
"high_s": 255,
|
||||
"min_radius": 5
|
||||
}
|
||||
25
pykick/photo_capture.py
Normal file
25
pykick/photo_capture.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import argparse
|
||||
import cv2
|
||||
from datetime import datetime
|
||||
|
||||
from .utils import read_config
|
||||
from .imagereaders import NaoImageReader
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cfg = read_config()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--res', type=int, choices=(1, 2, 3),
|
||||
default=3)
|
||||
parser.add_argument('--cam-id', type=int, choices=(0, 1),
|
||||
default=0)
|
||||
args = parser.parse_args()
|
||||
|
||||
video = NaoImageReader(cfg['ip'], res=args.res, cam_id=args.cam_id,
|
||||
fps=1)
|
||||
frame = video.get_frame()
|
||||
video.close()
|
||||
now = datetime.now().strftime('%Y%m%d%H%M%S')
|
||||
|
||||
prefix = 'bottom' if args.cam_id else 'top'
|
||||
cv2.imwrite(prefix + now + '.jpg', frame)
|
||||
129
pykick/reacting_to_events.py
Normal file
129
pykick/reacting_to_events.py
Normal file
@@ -0,0 +1,129 @@
|
||||
from naoqi import ALProxy
|
||||
from naoqi import ALBroker
|
||||
from naoqi import ALModule
|
||||
import time
|
||||
|
||||
NAO_IP = "192.168.0.11"
|
||||
|
||||
# Global variable to store the BallSearcher module instance
|
||||
BallSearcher = None
|
||||
memory = None
|
||||
ball_proxy = None
|
||||
ball_found = False
|
||||
|
||||
|
||||
class BallSearcherModule(ALModule):
|
||||
""" A simple module able to react to facedetection events"""
|
||||
|
||||
def __init__(self, name):
|
||||
ALModule.__init__(self, name)
|
||||
# No need for IP and port here because
|
||||
# we have our Python broker connected to NAOqi broker
|
||||
|
||||
# Create a proxy to ALTextToSpeech for later use
|
||||
self.tts = ALProxy("ALTextToSpeech")
|
||||
self.tts.setParameter('speed', 100)
|
||||
self.mp = ALProxy('ALMotion')
|
||||
self.mp.setStiffnesses("Head", 1.0)
|
||||
|
||||
# Subscribe to the BallDetected event:
|
||||
global memory
|
||||
global ball_proxy
|
||||
global ball_found
|
||||
ball_proxy = ALProxy('ALRedBallDetection')
|
||||
ball_proxy.subscribe('detector')
|
||||
memory = ALProxy("ALMemory")
|
||||
memory.subscribeToEvent("redBallDetected",
|
||||
"BallSearcher",
|
||||
"onBallDetected")
|
||||
|
||||
def searchForBall(self):
|
||||
names = ["HeadYaw", "HeadPitch"]
|
||||
sleep_period = 0.8
|
||||
fractionMaxSpeed = 0.5
|
||||
i=0
|
||||
while i<2:
|
||||
time.sleep(sleep_period)
|
||||
if ball_found:
|
||||
return
|
||||
print i
|
||||
angles = [i,0]
|
||||
self.mp.setAngles(names, angles, fractionMaxSpeed)
|
||||
i=float(i)+3.14/4
|
||||
|
||||
# go back to middle position
|
||||
time.sleep(sleep_period)
|
||||
if ball_found:
|
||||
return
|
||||
print 'go back'
|
||||
angles = [0,0]
|
||||
self.mp.setAngles(names, angles, fractionMaxSpeed)
|
||||
|
||||
# go into the right direction
|
||||
i=0
|
||||
while i > -2:
|
||||
time.sleep(sleep_period)
|
||||
if ball_found:
|
||||
return
|
||||
print i
|
||||
angles = [i,0]
|
||||
self.mp.setAngles(names, angles, fractionMaxSpeed)
|
||||
i=i-3.14/4
|
||||
|
||||
# go back to middle position
|
||||
time.sleep(sleep_period)
|
||||
if ball_found:
|
||||
return
|
||||
print "get back"
|
||||
angles = [0,0]
|
||||
self.mp.setAngles(names, angles, fractionMaxSpeed)
|
||||
|
||||
def onBallDetected(self, *_args):
|
||||
""" This will be called each time a ball is detected."""
|
||||
|
||||
# Unsubscribe to the event when talking, to avoid repetitions
|
||||
memory.unsubscribeToEvent("redBallDetected", "BallSearcher")
|
||||
# time.sleep(0.1)
|
||||
global ball_found
|
||||
ball_found = True
|
||||
print 'gotcha'
|
||||
self.tts.say("Hello, ball")
|
||||
|
||||
# Subscribe again to the event
|
||||
# memory.subscribeToEvent("redBallDetected",
|
||||
# "BallSearcher",
|
||||
# "onBallDetected")
|
||||
|
||||
|
||||
def main():
|
||||
""" Main entry point."""
|
||||
|
||||
# We need this broker to be able to construct
|
||||
# NAOqi modules and subscribe to other modules
|
||||
# The broker must stay alive until the program exists
|
||||
myBroker = ALBroker(
|
||||
"myBroker",
|
||||
"0.0.0.0",
|
||||
0,
|
||||
'192.168.0.11',
|
||||
9559
|
||||
)
|
||||
|
||||
|
||||
# Warning: BallSearcher must be a global variable
|
||||
# The name given to the constructor must be the name of the
|
||||
# variable
|
||||
global BallSearcher
|
||||
BallSearcher = BallSearcherModule("BallSearcher")
|
||||
BallSearcher.searchForBall()
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print "Interrupted by user, shutting down"
|
||||
myBroker.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
60
pykick/scan_for_ball.py
Normal file
60
pykick/scan_for_ball.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# -*- encoding: UTF-8 -*-
|
||||
# syntax
|
||||
# python setangles.py x y
|
||||
|
||||
import sys
|
||||
from naoqi import ALProxy
|
||||
import time
|
||||
|
||||
def main(robotIP):
|
||||
PORT = 9559
|
||||
|
||||
try:
|
||||
motionProxy = ALProxy("ALMotion", robotIP, PORT)
|
||||
except Exception,e:
|
||||
print "Could not create proxy to ALMotion"
|
||||
print "Error was: ",e
|
||||
sys.exit(1)
|
||||
|
||||
# activiert gelenke
|
||||
motionProxy.setStiffnesses("Head", 1.0)
|
||||
|
||||
# Example showing how to set angles, using a fraction of max speed
|
||||
names = ["HeadYaw", "HeadPitch"]
|
||||
|
||||
# go into left direction
|
||||
i=0
|
||||
while i<2:
|
||||
angles = [i,0]
|
||||
fractionMaxSpeed =0.5
|
||||
motionProxy.setAngles(names, angles, fractionMaxSpeed)
|
||||
i=float(i)+3.14/4
|
||||
time.sleep(0.3)
|
||||
|
||||
# go back to middle position
|
||||
angles = [0,0]
|
||||
fractionMaxSpeed =0.5
|
||||
motionProxy.setAngles(names, angles, fractionMaxSpeed)
|
||||
print "head set back"
|
||||
time.sleep(0.3)
|
||||
|
||||
# go into the right direction
|
||||
i=0
|
||||
while i > -2:
|
||||
angles = [i,0]
|
||||
fractionMaxSpeed =0.5
|
||||
motionProxy.setAngles(names, angles, fractionMaxSpeed)
|
||||
i=i-3.14/4
|
||||
time.sleep(0.3)
|
||||
|
||||
# go back to middle position
|
||||
angles = [0,0]
|
||||
fractionMaxSpeed =0.5
|
||||
motionProxy.setAngles(names, angles, fractionMaxSpeed)
|
||||
print "head set back"
|
||||
time.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
robotIp = "192.168.0.11"
|
||||
|
||||
main(robotIp)
|
||||
17
pykick/setangles.py
Normal file
17
pykick/setangles.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import argparse
|
||||
|
||||
from .movements import NaoMover
|
||||
from .utils import read_config
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
cfg = read_config()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('yaw', type=float)
|
||||
parser.add_argument('pitch', type=float)
|
||||
parser.add_argument('speed', type=float, nargs='?', default=0.1)
|
||||
args = parser.parse_args()
|
||||
|
||||
mover = NaoMover(cfg['ip'], cfg['port'])
|
||||
mover.set_head_angles(args.yaw, args.pitch, args.speed)
|
||||
7
pykick/utils.py
Normal file
7
pykick/utils.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import json
|
||||
|
||||
|
||||
def read_config():
|
||||
with open('nao_defaults.json') as f:
|
||||
cfg = json.load(f)
|
||||
return cfg
|
||||
Reference in New Issue
Block a user