1 #include <yarp/os/all.h> 14 string moduleName = rf.check(
"name", Value(
"ears")).asString().c_str();
15 setName(moduleName.c_str());
17 yInfo() << moduleName <<
" : finding configuration files...";
18 period = rf.check(
"period", Value(0.1)).asDouble();
21 bool isRFVerbose =
false;
22 iCub =
new ICubClient(moduleName,
"ears",
"client.ini", isRFVerbose);
23 iCub->opc->isVerbose =
false;
26 yInfo() <<
" iCubClient : Some dependencies are not running...";
30 portToSpeechRecognizer.open(
"/" + moduleName +
"/speech:o");
32 MainGrammar = rf.findFileByName(rf.check(
"MainGrammar", Value(
"MainGrammar.xml")).toString());
35 portToBehavior.open(
"/" + moduleName +
"/behavior:o");
37 rpc.open((
"/" + moduleName +
"/rpc").c_str());
40 yInfo() <<
"\n \n" <<
"----------------------------------------------" <<
"\n \n" << moduleName <<
" ready ! \n \n ";
48 yDebug() <<
"interrupt ears";
49 bShouldListen =
false;
50 if(Network::connect(
"/" + getName() +
"/speech:o",
"/speechRecognizer/rpc")) {
51 Bottle bMessenger, bReply;
52 bMessenger.addString(
"interrupt");
54 portToSpeechRecognizer.write(bMessenger, bReply);
55 if(bReply.get(1).asString() !=
"OK") {
56 yError() <<
"speechRecognizer was not interrupted";
57 yDebug() <<
"Reply from speechRecognizer:" << bReply.toString();
61 yDebug() <<
"interrupted speech recognizer";
62 portToSpeechRecognizer.interrupt();
63 portToBehavior.interrupt();
66 yDebug() <<
"interrupt done";
73 yDebug() <<
"close ears";
79 yDebug() <<
"closed icub";
81 portToSpeechRecognizer.interrupt();
82 portToSpeechRecognizer.close();
84 portToBehavior.interrupt();
85 portToBehavior.close();
87 yDebug() <<
"closing rpc port";
91 yDebug() <<
"end of close. bye!";
97 string helpMessage = string(getName().c_str()) +
103 if (command.get(0).asString() ==
"quit") {
104 reply.addString(
"quitting");
107 else if (command.get(0).asString() ==
"listen")
109 if (command.size() == 2)
111 if (command.get(1).asString() ==
"on")
113 yDebug() <<
"should listen on";
114 bShouldListen =
true;
115 reply.addString(
"ack");
117 else if (command.get(1).asString() ==
"off")
119 yDebug() <<
"should listen off";
120 bShouldListen =
false;
121 reply.addString(
"ack");
123 else if (command.get(1).asString() ==
"offShouldWait")
125 yDebug() <<
"should listen offShouldWait";
126 bShouldListen =
false;
128 reply.addString(
"ack");
131 reply.addString(
"nack");
132 reply.addString(
"Send either listen on or listen off");
137 yInfo() << helpMessage;
138 reply.addString(
"wrong command");
149 yDebug() <<
"bListen";
153 bRecognized = iCub->getRecogClient()->recogFromGrammarLoop(
grammarToString(MainGrammar), 1,
true,
true);
155 if (bRecognized.get(0).asInt() == 0)
157 yDebug() <<
"ears::updateModule -> speechRecognizer did not recognize anything";
161 bAnswer = *bRecognized.get(1).asList();
163 if (bAnswer.get(0).asString() ==
"stop")
165 yInfo() <<
" in ears::updateModule | stop called";
170 if(bAnswer.get(1).asList()->get(1).isList()) {
171 bSemantic = *(*bAnswer.get(1).asList()).
get(1).asList();
173 string sObject, sAction;
174 string sQuestionKind = bAnswer.get(1).asList()->get(0).toString();
177 string sObjectType, sCommand;
178 if(sQuestionKind ==
"SENTENCEOBJECT") {
179 sAction = bSemantic.check(
"predicateObject", Value(
"none")).asString();
180 if (sAction ==
"please take") {
182 sCommand =
"moveObject";
184 else if (sAction ==
"give me") {
186 sCommand =
"moveObject";
188 else if (sAction ==
"point") {
189 sAction =
"pointing";
190 sCommand =
"pointing";
192 sObjectType =
"object";
193 sObject = bSemantic.check(
"object", Value(
"none")).asString();
194 }
else if (sQuestionKind ==
"SENTENCERECOGNISE") {
195 sCommand =
"recognitionOrder";
196 sAction =
"recognitionOrder";
200 yError() <<
"[ears] Unknown predicate: " << sQuestionKind;
204 Bottle bAction,bArgs;
207 bAction.addString(
"tagging");
208 bArgs.addString(sObject);
209 bArgs.addString(sAction);
210 bArgs.addString(sObjectType);
211 bAction.addList()=bArgs;
212 portToBehavior.write(bAction);
213 yDebug() <<
"Sending " + bAction.toString();
218 bAction.addString(sCommand);
219 bArgs.addString(sObject);
220 bArgs.addString(sAction);
221 bArgs.addString(sObjectType);
222 bAction.addList()=bArgs;
223 portToBehavior.write(bAction);
224 yDebug() <<
"Sending " + bAction.toString();
226 yDebug() <<
"Not bShouldListen";
227 yarp::os::Time::delay(0.5);
Grants access to high level motor commands (grasp, touch, look, goto, etc) of the robot as well as it...
bool respond(const yarp::os::Bottle &cmd, yarp::os::Bottle &reply)
std::string grammarToString(const std::string &sPath)
Get the context path of a .grxml grammar, and return it as a string.
bool configure(yarp::os::ResourceFinder &rf)