did work on speech recognition
This commit is contained in:
177
src/speech_2.cpp
Normal file
177
src/speech_2.cpp
Normal file
@@ -0,0 +1,177 @@
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <cmath>
|
||||
#include <stdlib.h>
|
||||
#include <ros/ros.h>
|
||||
#include <sensor_msgs/image_encodings.h>
|
||||
#include "sensor_msgs/JointState.h"
|
||||
#include "sensor_msgs/Imu.h"
|
||||
#include "message_filters/subscriber.h"
|
||||
#include <string.h>
|
||||
#include <naoqi_bridge_msgs/JointAnglesWithSpeed.h>
|
||||
#include <naoqi_bridge_msgs/Bumper.h>
|
||||
#include <naoqi_bridge_msgs/HeadTouch.h>
|
||||
#include <naoqi_bridge_msgs/HandTouch.h>
|
||||
#include <naoqi_bridge_msgs/JointAnglesWithSpeedAction.h>
|
||||
#include <std_srvs/Empty.h>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/date_time.hpp>
|
||||
#include <naoqi_bridge_msgs/SpeechWithFeedbackActionGoal.h>
|
||||
#include <actionlib_msgs/GoalStatusArray.h>
|
||||
#include <naoqi_bridge_msgs/BlinkActionGoal.h>
|
||||
#include <naoqi_bridge_msgs/SetSpeechVocabularyActionGoal.h>
|
||||
#include <std_msgs/ColorRGBA.h>
|
||||
#include <naoqi_bridge_msgs/WordRecognized.h>
|
||||
#include <geometry_msgs/PoseStamped.h>
|
||||
#include <geometry_msgs/Pose2D.h>
|
||||
#include <std_msgs/Bool.h>
|
||||
#include "actionlib/client/simple_action_client.h"
|
||||
|
||||
|
||||
|
||||
|
||||
using namespace std;
|
||||
|
||||
class Nao_control {
|
||||
|
||||
protected:
|
||||
|
||||
// ROS node handler
|
||||
ros::NodeHandle nh_;
|
||||
|
||||
// Subscriber to head tactile states
|
||||
ros::Subscriber tactile_sub;
|
||||
|
||||
// Publisher for nao speech
|
||||
ros::Publisher speech_pub;
|
||||
|
||||
// Publisher for nao vocabulary parameters
|
||||
ros::Publisher voc_params_pub;
|
||||
|
||||
// Client for starting speech recognition
|
||||
ros::ServiceClient recog_start_srv;
|
||||
|
||||
// Client for stopping speech recognition
|
||||
ros::ServiceClient recog_stop_srv;
|
||||
|
||||
// Subscriber to speech recognition
|
||||
ros::Subscriber recog_sub;
|
||||
|
||||
int speech_id_ctr = 0;
|
||||
std::vector<std::string> recognized_words;
|
||||
|
||||
public:
|
||||
|
||||
Nao_control(){
|
||||
|
||||
// Subscribe to topic bumper and specify that
|
||||
// all data will be processed by function bumperCallback
|
||||
//bumper_sub=nh_.subscribe("/bumper",1, &Nao_control::bumperCallback,
|
||||
// this);
|
||||
|
||||
// Subscribe to topic tactile_touch and specify
|
||||
// that all data will be processed by function tactileCallback
|
||||
//tactile_sub=nh_.subscribe("/tactile_touch", 1,
|
||||
// &Nao_control::tactileCallback, this);
|
||||
|
||||
speech_pub = nh_.advertise<naoqi_bridge_msgs::SpeechWithFeedbackActionGoal>(
|
||||
"/speech_action/goal", 1);
|
||||
|
||||
voc_params_pub= nh_.advertise<naoqi_bridge_msgs::SetSpeechVocabularyActionGoal>(
|
||||
"/speech_vocabulary_action/goal", 1);
|
||||
|
||||
recog_start_srv=nh_.serviceClient<std_srvs::Empty>("/start_recognition");
|
||||
|
||||
recog_stop_srv=nh_.serviceClient<std_srvs::Empty>("/stop_recognition");
|
||||
|
||||
recog_sub=nh_.subscribe("/word_recognized", 1,
|
||||
&Nao_control::speechRecognitionCallback, this);
|
||||
|
||||
ROS_INFO("Constructor");
|
||||
tactileCallback();
|
||||
|
||||
}
|
||||
|
||||
|
||||
~Nao_control() {
|
||||
|
||||
ROS_INFO("Destructor");
|
||||
}
|
||||
|
||||
void speechRecognitionCallback(const naoqi_bridge_msgs::WordRecognized::ConstPtr& msg)
|
||||
{
|
||||
for (int i = 0; i < msg->words.size(); i++) {
|
||||
ROS_INFO("RECOGNIZED %s WITH CONFIDENCE %f",
|
||||
msg->words[i].c_str(), msg->confidence_values[i]);
|
||||
if (msg->confidence_values[i] > 0.42) {
|
||||
this->recognized_words.push_back(msg->words[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//void tactileCallback(#const naoqi_bridge_msgs::HeadTouch::ConstPtr& tactileState)
|
||||
void tactileCallback()
|
||||
{
|
||||
|
||||
if (1 ) {
|
||||
ROS_INFO("FRONT BUTTON RELEASED");
|
||||
std::vector<std::string> vocabulary;
|
||||
vocabulary.push_back("hello");
|
||||
vocabulary.push_back("world");
|
||||
vocabulary.push_back("humans");
|
||||
vocabulary.push_back("robots");
|
||||
vocabulary.push_back("kill");
|
||||
|
||||
naoqi_bridge_msgs::SetSpeechVocabularyActionGoal msg;
|
||||
msg.goal.words = vocabulary;
|
||||
msg.goal_id.id = std::to_string(speech_id_ctr);
|
||||
std::cout << msg << std::endl;
|
||||
speech_id_ctr += 1;
|
||||
voc_params_pub.publish(msg);
|
||||
// somehow the message does not get published here
|
||||
|
||||
std_srvs::Empty srv;
|
||||
|
||||
if (recog_start_srv.call(srv)) {
|
||||
ROS_INFO("SUCCESSFULLY STARTED RECOGNITION");
|
||||
}
|
||||
else {
|
||||
ROS_ERROR("COULDN'T START RECOGNITION");
|
||||
}
|
||||
}
|
||||
else if (this->recognized_words.size() > 1) {
|
||||
ROS_INFO("MIDDLE BUTTON RELEASED");
|
||||
std_srvs::Empty srv;
|
||||
if (recog_stop_srv.call(srv)) {
|
||||
ROS_INFO("SUCCESSFULLY STOPPED RECOGNITION");
|
||||
std::string say;
|
||||
for (int i = 0; i < this->recognized_words.size(); i++) {
|
||||
say += this->recognized_words[i] + " ";
|
||||
}
|
||||
naoqi_bridge_msgs::SpeechWithFeedbackActionGoal msg;
|
||||
msg.goal_id.id = std::to_string(this->speech_id_ctr);
|
||||
this->speech_id_ctr += 1;
|
||||
msg.goal.say = say;
|
||||
this->speech_pub.publish(msg);
|
||||
this->recognized_words.clear();
|
||||
}
|
||||
else {
|
||||
ROS_ERROR("COULDN'T STOP RECOGNITION");
|
||||
}
|
||||
}
|
||||
else{
|
||||
ROS_INFO("NO BUTTON PRESSED");
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
ros::init(argc, argv, "speech");
|
||||
|
||||
Nao_control TermiNAOtor;
|
||||
ros::spin();
|
||||
return 0;
|
||||
}
|
||||
Reference in New Issue
Block a user