From c2d25dfb4bcc10e511bbb94fd8c52201ed65dee2 Mon Sep 17 00:00:00 2001 From: Pavel Lutskov Date: Thu, 28 Feb 2019 15:00:40 +0100 Subject: [PATCH] more documentation --- README.md | 10 +++++++++- script/calibrator.py | 24 ++++++++++++++++++++++-- script/controller.py | 4 +++- script/fall_detector.py | 7 ++++++- script/hand_ler.py | 6 +++++- script/imitator.py | 10 +++++++--- script/masterloop.py | 9 +++++++-- script/speech_server.py | 13 +++++++++++++ script/walker.py | 14 ++++++++++++-- 9 files changed, 84 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index e4d6cbf..bcda130 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,10 @@ imitate your arm motions. * [nao_meshes](http://wiki.ros.org/nao_meshes) * [usb_cam](http://wiki.ros.org/usb_cam) +We installed the dependencies by cloning them to our workspace and running +`catkin_make`, but sudo apt install ros-indigo-package-name should work even +better. + ## Usage Hang an ArUco #0 marker on the chest, take the #1 in the left hand and #2 in @@ -41,9 +45,13 @@ $ roslaunch nao_bringup nao_full.launch $ roslaunch teleoperation with_gui.launch ``` +If any problems arise with the GUI, such as the NAO not getting properly +displayed, try pointing the rviz to the correct NAO `.urdf`, which should be +somewhere in the `nao_description` package. + This diagram depicts what you should expect from the teleoperation routine. -![State Machine](docs/teleoperation_overview.png) +![State Machine](docs/figures/teleoperation_overview.png) Since this package relies on the NAO voice recognition, you must be in the same room with NAO, and the room must be quiet. diff --git a/script/calibrator.py b/script/calibrator.py index b46183d..d4c78bb 100755 --- a/script/calibrator.py +++ b/script/calibrator.py @@ -1,5 +1,9 @@ #! /usr/bin/env python -"""The script for calibrating the arm lengths and 'joystick' extents.""" +"""The script for calibrating the arm lengths and 'joystick' extents. + +You will normally not call this script directly but rather with +`roslaunch teleoperation calibration.launch` +""" import os import json @@ -14,7 +18,23 @@ from teleoperation.msg import RequestSpeechAction, RequestSpeechGoal def calib_field(sentences, src_frames, targ_frames): - """Retrieve a relative transform of some marker.""" + """Retrieve a relative transform of some marker. + + Parameters + ---------- + + sentences : list of str + What to say. + src_frames : list of str + The names of tf source frames (from where to transform) + targ_frames : list of str + The names of tf target frames (to where to transform) + + Returns + ------- + list of transforms + + """ ok = False t_and_q = [] diff --git a/script/controller.py b/script/controller.py index 5baf314..dabb64f 100755 --- a/script/controller.py +++ b/script/controller.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -"""Module for executing control for a given effector""" +"""Library for executing control for a given effector""" from __future__ import division import os @@ -29,6 +29,7 @@ NAO_ARM = 0.22 def xyz(joint): + """Get 3D coordinate of a joint in torso frame.""" return np.array(mp.getPosition(joint, FRAME_TORSO, False), dtype=np.float64)[:3] @@ -88,6 +89,7 @@ def jacobian(side): ax_order = (y_axis, z_axis, x_axis, z_axis) + # Calculate the Jacobian after the formula from the report axes = np.concatenate( [get_transform(side + j).dot(ax) for j, ax in zip(JOINTS, ax_order)], axis=1 diff --git a/script/fall_detector.py b/script/fall_detector.py index ddb4b4d..86ec023 100755 --- a/script/fall_detector.py +++ b/script/fall_detector.py @@ -1,5 +1,10 @@ #! /usr/bin/env python -"""A module for graceful fall handling.""" +"""A module for graceful fall handling. + +ROS node: `fall_detector`. +Uses `inform_controller` service. + +""" import os import rospy diff --git a/script/hand_ler.py b/script/hand_ler.py index 505297b..5d2d383 100755 --- a/script/hand_ler.py +++ b/script/hand_ler.py @@ -1,5 +1,9 @@ #! /usr/bin/env python -"""A small module for hand conrols service.""" +"""A small module for hand controls service. + +ROS Node: `hand_ler` +Provides `hands` service. +""" import os import rospy diff --git a/script/imitator.py b/script/imitator.py index 038bca7..24e5194 100755 --- a/script/imitator.py +++ b/script/imitator.py @@ -1,5 +1,9 @@ #! /usr/bin/env python -"""The node performing the imitation.""" +"""The node performing the imitation. + +ROS Node: `imitator` +Uses `inform_masterloop` service. +""" from argparse import ArgumentParser from math import radians @@ -79,9 +83,9 @@ if __name__ == '__main__': except Exception as e: rospy.logwarn(e) continue + + # Calculate the position of the hand in my chest frame my_arm_xyz = np.array(arm) - np.array(a0) - # rospy.loginfo('{}'.format(my_arm_xyz)) - # rospy.loginfo('{}'.format(dumb(my_arm_xyz, side))) imitation_cycle(my_arm_xyz, side) # Do the actuation diff --git a/script/masterloop.py b/script/masterloop.py index 4e140b1..3b19dc1 100755 --- a/script/masterloop.py +++ b/script/masterloop.py @@ -107,7 +107,10 @@ def inform_masterloop_factory(who): def handle_request(r): - """Handle a node's request to seize/release control.""" + """Handle a node's request to seize/release control. + + Update the state if needed. + """ global state module = r.module message = r.message @@ -176,6 +179,8 @@ if __name__ == '__main__': ic = rospy.Service('inform_masterloop', InformMasterloop, handle_request) + # Necessary initializations all have been performed by now + def _shutdown(): if speech_in_progress: speech.cancel_goal() @@ -183,7 +188,7 @@ if __name__ == '__main__': rospy.on_shutdown(_shutdown) - while not rospy.is_shutdown(): + while not rospy.is_shutdown(): # Run the speech detection loop if state in ('idle', 'imitate', 'dead'): if not speech_in_progress: speech_in_progress = True diff --git a/script/speech_server.py b/script/speech_server.py index 1d695ee..55fc3f0 100755 --- a/script/speech_server.py +++ b/script/speech_server.py @@ -1,4 +1,10 @@ #! /usr/bin/env python +"""A module for speech detection. + +ROS Node: `speech_server` +Provides `request_speech` action + +""" import os import rospy @@ -13,6 +19,7 @@ almem = None def request_speech(goal): + """Handle the request for speech detection.""" rospy.loginfo('SPEECH SERVER: NEW GOAL: {}'.format(goal)) if not speech_detector.start_speech(goal.vocabulary): sas.set_succeeded(RequestSpeechResult(word='')) @@ -33,6 +40,7 @@ def request_speech(goal): class SpeechDetectorModule(ALModule): + """ALModule for accessing the NAO's speech recognition.""" def __init__(self, name): ALModule.__init__(self, name) @@ -52,6 +60,7 @@ class SpeechDetectorModule(ALModule): return almem.getData('ALSpeechRecognition/Status') def start_speech(self, voc, resume=False): + """Start recognizing the given vocabulary.""" if self._busy != resume: return False if not resume: @@ -63,14 +72,17 @@ class SpeechDetectorModule(ALModule): return True def have_word(self): + """Check if something was recognized.""" return self.recognized is not None def get_recognized_and_erase(self): + """Retrieve the recognized word and erase it from the variable.""" result = self.recognized self.recognized = None return result def stop_speech(self, pause=False): + """Stop detecting or just pause detection.""" if not self._busy: return self.asr.pause(True) @@ -79,6 +91,7 @@ class SpeechDetectorModule(ALModule): self._busy = False def on_word_recognized(self, *_args): + """Callback for word recognized event.""" word, conf = almem.getData('WordRecognized') if conf > 0.4: self.stop_speech(pause=True) diff --git a/script/walker.py b/script/walker.py index df21d54..1f2895a 100755 --- a/script/walker.py +++ b/script/walker.py @@ -1,4 +1,10 @@ #! /usr/bin/env python +"""Module for walking in the environment (Human Joystick). + +ROS Node: `walker`. +Uses `inform_masterloop` service. + +""" from __future__ import division import os @@ -23,6 +29,7 @@ VMAX = 1.0 def n_way(a, b, n=3): + """A point which is (b - a)/n away from a.""" return a + (b - a) / n @@ -45,6 +52,7 @@ _inform_masterloop = inform_masterloop_factory('walker') def _speed(pos, interval): + """Calculate speed based on the operators position.""" int_dir = 1 if interval[1] > interval[0] else -1 if int_dir * (pos - interval[0]) < 0: return 0.0 @@ -81,6 +89,7 @@ if __name__ == '__main__': ref_vec = trans[:2] + rot[2:] #-1 1 -1 1 -1 1 for i, dr in enumerate((BK, FW, RT, LT, RR, LR)): + # figure out in which direction to move idx = i // 2 sign = 1 if (i % 2) else -1 speed = _speed(ref_vec[idx], dr) @@ -94,16 +103,17 @@ if __name__ == '__main__': if not any(movement): rospy.logdebug('WALKER: STOP') - _inform_masterloop('stop') + _inform_masterloop('stop') # release the control # mp.move(0, 0, 0) mp.stopMove() continue - permission = _inform_masterloop('move') + permission = _inform_masterloop('move') # try to seize the control if not permission: mp.stopMove() else: + # set the head so that the scene is clearly visible on cameras mp.setAngles(('HeadYaw', 'HeadPitch'), (0.0, 0.35), 0.3) mp.moveToward(*movement) # DON'T DELETE THIS ONE pass