icub-client
ears.cpp
Go to the documentation of this file.
1 #include <yarp/os/all.h>
2 #include "ears.h"
5 #include "icubclient/functions.h"
7 
8 using namespace std;
9 using namespace icubclient;
10 using namespace yarp::os;
11 
12 bool ears::configure(yarp::os::ResourceFinder &rf)
13 {
14  string moduleName = rf.check("name", Value("ears")).asString().c_str();
15  setName(moduleName.c_str());
16 
17  yInfo() << moduleName << " : finding configuration files...";
18  period = rf.check("period", Value(0.1)).asDouble();
19 
20  //Create an iCub Client and check that all dependencies are here before starting
21  bool isRFVerbose = false;
22  iCub = new ICubClient(moduleName, "ears", "client.ini", isRFVerbose);
23  iCub->opc->isVerbose = false;
24  if (!iCub->connect())
25  {
26  yInfo() << " iCubClient : Some dependencies are not running...";
27  Time::delay(1.0);
28  }
29 
30  portToSpeechRecognizer.open("/" + moduleName + "/speech:o");
31 
32  MainGrammar = rf.findFileByName(rf.check("MainGrammar", Value("MainGrammar.xml")).toString());
33  bShouldListen = true;
34 
35  portToBehavior.open("/" + moduleName + "/behavior:o");
36 
37  rpc.open(("/" + moduleName + "/rpc").c_str());
38  attach(rpc);
39 
40  yInfo() << "\n \n" << "----------------------------------------------" << "\n \n" << moduleName << " ready ! \n \n ";
41 
42  return true;
43 }
44 
46  // speechRecognizer is in a long loop, which prohibits closure of ears
47  // so interrupt the speechRecognizer
48  yDebug() << "interrupt ears";
49  bShouldListen = false;
50  if(Network::connect("/" + getName() + "/speech:o", "/speechRecognizer/rpc")) {
51  Bottle bMessenger, bReply;
52  bMessenger.addString("interrupt");
53  // send the message
54  portToSpeechRecognizer.write(bMessenger, bReply);
55  if(bReply.get(1).asString() != "OK") {
56  yError() << "speechRecognizer was not interrupted";
57  yDebug() << "Reply from speechRecognizer:" << bReply.toString();
58  }
59  }
60 
61  yDebug() << "interrupted speech recognizer";
62  portToSpeechRecognizer.interrupt();
63  portToBehavior.interrupt();
64  rpc.interrupt();
65 
66  yDebug() << "interrupt done";
67 
68  return true;
69 }
70 
71 
72 bool ears::close() {
73  yDebug() << "close ears";
74 
75  if(iCub) {
76  iCub->close();
77  delete iCub;
78  }
79  yDebug() << "closed icub";
80 
81  portToSpeechRecognizer.interrupt();
82  portToSpeechRecognizer.close();
83 
84  portToBehavior.interrupt();
85  portToBehavior.close();
86 
87  yDebug() << "closing rpc port";
88  rpc.interrupt();
89  rpc.close();
90 
91  yDebug() << "end of close. bye!";
92  return true;
93 }
94 
95 
96 bool ears::respond(const Bottle& command, Bottle& reply) {
97  string helpMessage = string(getName().c_str()) +
98  " commands are: \n" +
99  "quit \n";
100 
101  reply.clear();
102 
103  if (command.get(0).asString() == "quit") {
104  reply.addString("quitting");
105  return false;
106  }
107  else if (command.get(0).asString() == "listen")
108  {
109  if (command.size() == 2)
110  {
111  if (command.get(1).asString() == "on")
112  {
113  yDebug() << "should listen on";
114  bShouldListen = true;
115  reply.addString("ack");
116  }
117  else if (command.get(1).asString() == "off")
118  {
119  yDebug() << "should listen off";
120  bShouldListen = false;
121  reply.addString("ack");
122  }
123  else if (command.get(1).asString() == "offShouldWait")
124  {
125  yDebug() << "should listen offShouldWait";
126  bShouldListen = false;
127  LockGuard lg(m);
128  reply.addString("ack");
129  }
130  else {
131  reply.addString("nack");
132  reply.addString("Send either listen on or listen off");
133  }
134  }
135  }
136  else {
137  yInfo() << helpMessage;
138  reply.addString("wrong command");
139  }
140 
141  return true;
142 }
143 
144 /* Called periodically every getPeriod() seconds */
146  if (bShouldListen)
147  {
148  LockGuard lg(m);
149  yDebug() << "bListen";
150  Bottle bRecognized, //received FROM speech recog with transfer information (1/0 (bAnswer))
151  bAnswer, //response from speech recog without transfer information, including raw sentence
152  bSemantic; // semantic information of the content of the recognition
153  bRecognized = iCub->getRecogClient()->recogFromGrammarLoop(grammarToString(MainGrammar), 1, true, true);
154 
155  if (bRecognized.get(0).asInt() == 0)
156  {
157  yDebug() << "ears::updateModule -> speechRecognizer did not recognize anything";
158  return true;
159  }
160 
161  bAnswer = *bRecognized.get(1).asList();
162 
163  if (bAnswer.get(0).asString() == "stop")
164  {
165  yInfo() << " in ears::updateModule | stop called";
166  return true;
167  }
168  // bAnswer is the result of the regognition system (first element is the raw sentence, 2nd is the list of semantic element)
169 
170  if(bAnswer.get(1).asList()->get(1).isList()) {
171  bSemantic = *(*bAnswer.get(1).asList()).get(1).asList();
172  }
173  string sObject, sAction;
174  string sQuestionKind = bAnswer.get(1).asList()->get(0).toString();
175 
176  // forward command appropriately to behaviorManager
177  string sObjectType, sCommand;
178  if(sQuestionKind == "SENTENCEOBJECT") {
179  sAction = bSemantic.check("predicateObject", Value("none")).asString();
180  if (sAction == "please take") {
181  sAction = "back";
182  sCommand = "moveObject";
183  }
184  else if (sAction == "give me") {
185  sAction = "front";
186  sCommand = "moveObject";
187  }
188  else if (sAction == "point") {
189  sAction = "pointing";
190  sCommand = "pointing";
191  }
192  sObjectType = "object";
193  sObject = bSemantic.check("object", Value("none")).asString();
194  } else if (sQuestionKind == "SENTENCERECOGNISE") {
195  sCommand = "recognitionOrder";
196  sAction = "recognitionOrder";
197  sObjectType = "";
198  sObject = "";
199  } else {
200  yError() << "[ears] Unknown predicate: " << sQuestionKind;
201  return true;
202  }
203 
204  Bottle bAction,bArgs;
205  // object might not be known yet, tag it first
206  if (sObject!="") {
207  bAction.addString("tagging");
208  bArgs.addString(sObject);
209  bArgs.addString(sAction);
210  bArgs.addString(sObjectType);
211  bAction.addList()=bArgs;
212  portToBehavior.write(bAction);
213  yDebug() << "Sending " + bAction.toString();
214  }
215 
216  // now execute actual behavior
217  bAction.clear();
218  bAction.addString(sCommand);
219  bArgs.addString(sObject);
220  bArgs.addString(sAction);
221  bArgs.addString(sObjectType);
222  bAction.addList()=bArgs;
223  portToBehavior.write(bAction);
224  yDebug() << "Sending " + bAction.toString();
225  } else {
226  yDebug() << "Not bShouldListen";
227  yarp::os::Time::delay(0.5);
228  }
229 
230  return true;
231 }
Grants access to high level motor commands (grasp, touch, look, goto, etc) of the robot as well as it...
Definition: icubClient.h:66
bool respond(const yarp::os::Bottle &cmd, yarp::os::Bottle &reply)
Definition: ears.cpp:96
bool updateModule()
Definition: ears.cpp:145
STL namespace.
std::string grammarToString(const std::string &sPath)
Get the context path of a .grxml grammar, and return it as a string.
Definition: functions.cpp:58
bool close()
Definition: ears.cpp:72
bool interruptModule()
Definition: ears.cpp:45
bool configure(yarp::os::ResourceFinder &rf)
Definition: ears.cpp:12