more documentation

This commit is contained in:
2019-02-28 15:00:40 +01:00
parent ef4dafdaf4
commit c2d25dfb4b
9 changed files with 84 additions and 13 deletions

View File

@@ -17,6 +17,10 @@ imitate your arm motions.
* [nao_meshes](http://wiki.ros.org/nao_meshes) * [nao_meshes](http://wiki.ros.org/nao_meshes)
* [usb_cam](http://wiki.ros.org/usb_cam) * [usb_cam](http://wiki.ros.org/usb_cam)
We installed the dependencies by cloning them to our workspace and running
`catkin_make`, but sudo apt install ros-indigo-package-name should work even
better.
## Usage ## Usage
Hang an ArUco #0 marker on the chest, take the #1 in the left hand and #2 in Hang an ArUco #0 marker on the chest, take the #1 in the left hand and #2 in
@@ -41,9 +45,13 @@ $ roslaunch nao_bringup nao_full.launch
$ roslaunch teleoperation with_gui.launch $ roslaunch teleoperation with_gui.launch
``` ```
If any problems arise with the GUI, such as the NAO not getting properly
displayed, try pointing the rviz to the correct NAO `.urdf`, which should be
somewhere in the `nao_description` package.
This diagram depicts what you should expect from the teleoperation routine. This diagram depicts what you should expect from the teleoperation routine.
![State Machine](docs/teleoperation_overview.png) ![State Machine](docs/figures/teleoperation_overview.png)
Since this package relies on the NAO voice recognition, you must be in the same Since this package relies on the NAO voice recognition, you must be in the same
room with NAO, and the room must be quiet. room with NAO, and the room must be quiet.

View File

@@ -1,5 +1,9 @@
#! /usr/bin/env python #! /usr/bin/env python
"""The script for calibrating the arm lengths and 'joystick' extents.""" """The script for calibrating the arm lengths and 'joystick' extents.
You will normally not call this script directly but rather with
`roslaunch teleoperation calibration.launch`
"""
import os import os
import json import json
@@ -14,7 +18,23 @@ from teleoperation.msg import RequestSpeechAction, RequestSpeechGoal
def calib_field(sentences, src_frames, targ_frames): def calib_field(sentences, src_frames, targ_frames):
"""Retrieve a relative transform of some marker.""" """Retrieve a relative transform of some marker.
Parameters
----------
sentences : list of str
What to say.
src_frames : list of str
The names of tf source frames (from where to transform)
targ_frames : list of str
The names of tf target frames (to where to transform)
Returns
-------
list of transforms
"""
ok = False ok = False
t_and_q = [] t_and_q = []

View File

@@ -1,5 +1,5 @@
#! /usr/bin/env python #! /usr/bin/env python
"""Module for executing control for a given effector""" """Library for executing control for a given effector"""
from __future__ import division from __future__ import division
import os import os
@@ -29,6 +29,7 @@ NAO_ARM = 0.22
def xyz(joint): def xyz(joint):
"""Get 3D coordinate of a joint in torso frame."""
return np.array(mp.getPosition(joint, FRAME_TORSO, False), return np.array(mp.getPosition(joint, FRAME_TORSO, False),
dtype=np.float64)[:3] dtype=np.float64)[:3]
@@ -88,6 +89,7 @@ def jacobian(side):
ax_order = (y_axis, z_axis, x_axis, z_axis) ax_order = (y_axis, z_axis, x_axis, z_axis)
# Calculate the Jacobian after the formula from the report
axes = np.concatenate( axes = np.concatenate(
[get_transform(side + j).dot(ax) for j, ax in zip(JOINTS, ax_order)], [get_transform(side + j).dot(ax) for j, ax in zip(JOINTS, ax_order)],
axis=1 axis=1

View File

@@ -1,5 +1,10 @@
#! /usr/bin/env python #! /usr/bin/env python
"""A module for graceful fall handling.""" """A module for graceful fall handling.
ROS node: `fall_detector`.
Uses `inform_controller` service.
"""
import os import os
import rospy import rospy

View File

@@ -1,5 +1,9 @@
#! /usr/bin/env python #! /usr/bin/env python
"""A small module for hand conrols service.""" """A small module for hand controls service.
ROS Node: `hand_ler`
Provides `hands` service.
"""
import os import os
import rospy import rospy

View File

@@ -1,5 +1,9 @@
#! /usr/bin/env python #! /usr/bin/env python
"""The node performing the imitation.""" """The node performing the imitation.
ROS Node: `imitator`
Uses `inform_masterloop` service.
"""
from argparse import ArgumentParser from argparse import ArgumentParser
from math import radians from math import radians
@@ -79,9 +83,9 @@ if __name__ == '__main__':
except Exception as e: except Exception as e:
rospy.logwarn(e) rospy.logwarn(e)
continue continue
# Calculate the position of the hand in my chest frame
my_arm_xyz = np.array(arm) - np.array(a0) my_arm_xyz = np.array(arm) - np.array(a0)
# rospy.loginfo('{}'.format(my_arm_xyz))
# rospy.loginfo('{}'.format(dumb(my_arm_xyz, side)))
imitation_cycle(my_arm_xyz, side) # Do the actuation imitation_cycle(my_arm_xyz, side) # Do the actuation

View File

@@ -107,7 +107,10 @@ def inform_masterloop_factory(who):
def handle_request(r): def handle_request(r):
"""Handle a node's request to seize/release control.""" """Handle a node's request to seize/release control.
Update the state if needed.
"""
global state global state
module = r.module module = r.module
message = r.message message = r.message
@@ -176,6 +179,8 @@ if __name__ == '__main__':
ic = rospy.Service('inform_masterloop', InformMasterloop, handle_request) ic = rospy.Service('inform_masterloop', InformMasterloop, handle_request)
# Necessary initializations all have been performed by now
def _shutdown(): def _shutdown():
if speech_in_progress: if speech_in_progress:
speech.cancel_goal() speech.cancel_goal()
@@ -183,7 +188,7 @@ if __name__ == '__main__':
rospy.on_shutdown(_shutdown) rospy.on_shutdown(_shutdown)
while not rospy.is_shutdown(): while not rospy.is_shutdown(): # Run the speech detection loop
if state in ('idle', 'imitate', 'dead'): if state in ('idle', 'imitate', 'dead'):
if not speech_in_progress: if not speech_in_progress:
speech_in_progress = True speech_in_progress = True

View File

@@ -1,4 +1,10 @@
#! /usr/bin/env python #! /usr/bin/env python
"""A module for speech detection.
ROS Node: `speech_server`
Provides `request_speech` action
"""
import os import os
import rospy import rospy
@@ -13,6 +19,7 @@ almem = None
def request_speech(goal): def request_speech(goal):
"""Handle the request for speech detection."""
rospy.loginfo('SPEECH SERVER: NEW GOAL: {}'.format(goal)) rospy.loginfo('SPEECH SERVER: NEW GOAL: {}'.format(goal))
if not speech_detector.start_speech(goal.vocabulary): if not speech_detector.start_speech(goal.vocabulary):
sas.set_succeeded(RequestSpeechResult(word='')) sas.set_succeeded(RequestSpeechResult(word=''))
@@ -33,6 +40,7 @@ def request_speech(goal):
class SpeechDetectorModule(ALModule): class SpeechDetectorModule(ALModule):
"""ALModule for accessing the NAO's speech recognition."""
def __init__(self, name): def __init__(self, name):
ALModule.__init__(self, name) ALModule.__init__(self, name)
@@ -52,6 +60,7 @@ class SpeechDetectorModule(ALModule):
return almem.getData('ALSpeechRecognition/Status') return almem.getData('ALSpeechRecognition/Status')
def start_speech(self, voc, resume=False): def start_speech(self, voc, resume=False):
"""Start recognizing the given vocabulary."""
if self._busy != resume: if self._busy != resume:
return False return False
if not resume: if not resume:
@@ -63,14 +72,17 @@ class SpeechDetectorModule(ALModule):
return True return True
def have_word(self): def have_word(self):
"""Check if something was recognized."""
return self.recognized is not None return self.recognized is not None
def get_recognized_and_erase(self): def get_recognized_and_erase(self):
"""Retrieve the recognized word and erase it from the variable."""
result = self.recognized result = self.recognized
self.recognized = None self.recognized = None
return result return result
def stop_speech(self, pause=False): def stop_speech(self, pause=False):
"""Stop detecting or just pause detection."""
if not self._busy: if not self._busy:
return return
self.asr.pause(True) self.asr.pause(True)
@@ -79,6 +91,7 @@ class SpeechDetectorModule(ALModule):
self._busy = False self._busy = False
def on_word_recognized(self, *_args): def on_word_recognized(self, *_args):
"""Callback for word recognized event."""
word, conf = almem.getData('WordRecognized') word, conf = almem.getData('WordRecognized')
if conf > 0.4: if conf > 0.4:
self.stop_speech(pause=True) self.stop_speech(pause=True)

View File

@@ -1,4 +1,10 @@
#! /usr/bin/env python #! /usr/bin/env python
"""Module for walking in the environment (Human Joystick).
ROS Node: `walker`.
Uses `inform_masterloop` service.
"""
from __future__ import division from __future__ import division
import os import os
@@ -23,6 +29,7 @@ VMAX = 1.0
def n_way(a, b, n=3): def n_way(a, b, n=3):
"""A point which is (b - a)/n away from a."""
return a + (b - a) / n return a + (b - a) / n
@@ -45,6 +52,7 @@ _inform_masterloop = inform_masterloop_factory('walker')
def _speed(pos, interval): def _speed(pos, interval):
"""Calculate speed based on the operators position."""
int_dir = 1 if interval[1] > interval[0] else -1 int_dir = 1 if interval[1] > interval[0] else -1
if int_dir * (pos - interval[0]) < 0: if int_dir * (pos - interval[0]) < 0:
return 0.0 return 0.0
@@ -81,6 +89,7 @@ if __name__ == '__main__':
ref_vec = trans[:2] + rot[2:] ref_vec = trans[:2] + rot[2:]
#-1 1 -1 1 -1 1 #-1 1 -1 1 -1 1
for i, dr in enumerate((BK, FW, RT, LT, RR, LR)): for i, dr in enumerate((BK, FW, RT, LT, RR, LR)):
# figure out in which direction to move
idx = i // 2 idx = i // 2
sign = 1 if (i % 2) else -1 sign = 1 if (i % 2) else -1
speed = _speed(ref_vec[idx], dr) speed = _speed(ref_vec[idx], dr)
@@ -94,16 +103,17 @@ if __name__ == '__main__':
if not any(movement): if not any(movement):
rospy.logdebug('WALKER: STOP') rospy.logdebug('WALKER: STOP')
_inform_masterloop('stop') _inform_masterloop('stop') # release the control
# mp.move(0, 0, 0) # mp.move(0, 0, 0)
mp.stopMove() mp.stopMove()
continue continue
permission = _inform_masterloop('move') permission = _inform_masterloop('move') # try to seize the control
if not permission: if not permission:
mp.stopMove() mp.stopMove()
else: else:
# set the head so that the scene is clearly visible on cameras
mp.setAngles(('HeadYaw', 'HeadPitch'), (0.0, 0.35), 0.3) mp.setAngles(('HeadYaw', 'HeadPitch'), (0.0, 0.35), 0.3)
mp.moveToward(*movement) # DON'T DELETE THIS ONE mp.moveToward(*movement) # DON'T DELETE THIS ONE
pass pass