about to implement hand controls

This commit is contained in:
Pavel Lutskov
2019-02-02 16:53:54 +01:00
parent 4335d39aaf
commit 3e990eb9ce

View File

@@ -1,6 +1,5 @@
#! /usr/bin/env python #! /usr/bin/env python
from argparse import ArgumentParser from argparse import ArgumentParser
from sys import argv
import rospy import rospy
import actionlib import actionlib
@@ -14,13 +13,25 @@ IMITATE = 'arms'
KILL = 'kill' KILL = 'kill'
REVIVE = 'go' REVIVE = 'go'
STOP = 'stop' STOP = 'stop'
OPEN = 'open'
CLOSE = 'close'
VOC_STATE = None VOC_STATE = None
SPEECH_TRANSITIONS = None SPEECH_TRANSITIONS = None
VOC_HAND = {
'closed': [OPEN],
'opened': [CLOSE]
}
SPEECH_HAND = {
OPEN: (('closed',), 'opened'),
CLOSE: (('opened',), 'closed')
}
AI = False # autoimitate AI = False # autoimitate
# Globals # Globals
state = 'dead' # Also walk, imitate, fallen, idle state = 'dead' # Also walk, imitate, fallen, idle
hand_state = 'closed'
speech_in_progress = False speech_in_progress = False
@@ -43,7 +54,7 @@ def init_voc_state_speech():
def speech_done_cb(_, result): def speech_done_cb(_, result):
global speech_in_progress, state global speech_in_progress, state, hand_state
_state_old = state _state_old = state
rospy.loginfo('SPEECH: {}'.format(result)) rospy.loginfo('SPEECH: {}'.format(result))
@@ -54,6 +65,10 @@ def speech_done_cb(_, result):
allowed, target = SPEECH_TRANSITIONS[result.word] allowed, target = SPEECH_TRANSITIONS[result.word]
if state in allowed: if state in allowed:
state = target state = target
elif result.word in SPEECH_HAND:
allowed, target = SPEECH_HAND[result.word]
if hand_state in allowed:
hand_state = target
if _state_old != state: if _state_old != state:
rospy.loginfo('{} -> {}'.format(_state_old, state)) rospy.loginfo('{} -> {}'.format(_state_old, state))
@@ -115,7 +130,7 @@ def handle_request(r):
if __name__ == '__main__': if __name__ == '__main__':
print(argv) rospy.init_node('controller')
ap = ArgumentParser() ap = ArgumentParser()
ap.add_argument('-i', '--autoimitate', ap.add_argument('-i', '--autoimitate',
help='Switch between moving and imitating automatically', help='Switch between moving and imitating automatically',
@@ -125,7 +140,6 @@ if __name__ == '__main__':
AI = args.autoimitate AI = args.autoimitate
init_voc_state_speech() init_voc_state_speech()
rospy.init_node('controller', log_level=rospy.INFO)
ic = rospy.Service('inform_controller', InformController, handle_request) ic = rospy.Service('inform_controller', InformController, handle_request)
client = actionlib.SimpleActionClient('speech_server', client = actionlib.SimpleActionClient('speech_server',
@@ -139,8 +153,9 @@ if __name__ == '__main__':
if state in ('idle', 'imitate', 'dead'): if state in ('idle', 'imitate', 'dead'):
if not speech_in_progress: if not speech_in_progress:
speech_in_progress = True speech_in_progress = True
client.send_goal(RequestSpeechGoal(VOC_STATE[state]), client.send_goal(RequestSpeechGoal(
speech_done_cb) VOC_STATE[state] + VOC_HAND[hand_state]
), speech_done_cb)
else: else:
if speech_in_progress: if speech_in_progress:
client.cancel_goal() client.cancel_goal()