iCub-main
Loading...
Searching...
No Matches
neuralNetworks.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2006-2018 Istituto Italiano di Tecnologia (IIT)
3 * Copyright (C) 2006-2010 RobotCub Consortium
4 * All rights reserved.
5 *
6 * This software may be modified and distributed under the terms
7 * of the BSD-3-Clause license. See the accompanying LICENSE file for
8 * details.
9*/
10
11#include <sstream>
12#include <iomanip>
13#include <cmath>
14
15#include <yarp/math/Math.h>
17
18using namespace std;
19using namespace yarp::os;
20using namespace yarp::sig;
21using namespace yarp::math;
22using namespace iCub::ctrl;
23
24
25/***************************************************************************/
27{
28 configured=false;
29}
30
31
32/***************************************************************************/
33ff2LayNN::ff2LayNN(const Property &options)
34{
35 configure(options);
36}
37
38
39/***************************************************************************/
41{
42 return configured;
43}
44
45
46/***************************************************************************/
47void ff2LayNN::setItem(Property &options, const string &tag, const Vector &item) const
48{
49 Bottle b; Bottle &v=b.addList();
50 for (size_t i=0; i<item.length(); i++)
51 v.addFloat64(item[i]);
52
53 options.put(tag,b.get(0));
54}
55
56
57/***************************************************************************/
58bool ff2LayNN::getItem(const Property &options, const string &tag, Vector &item) const
59{
60 if (Bottle *b=options.find(tag).asList())
61 {
62 item.resize(b->size());
63 for (size_t i=0; i<item.length(); i++)
64 item[i]=b->get(i).asFloat64();
65
66 return true;
67 }
68 else
69 return false;
70}
71
72
73/***************************************************************************/
75{
76 inMinX.resize(inMinMaxX.size());
77 inMinY.resize(inMinMaxX.size());
78 inRatio.resize(inMinMaxX.size());
79
80 for (size_t i=0; i<inMinX.length(); i++)
81 {
82 inMinX[i]=inMinMaxX[i].min;
83 inMinY[i]=inMinMaxY[i].min;
84 inRatio[i]=(inMinMaxY[i].max-inMinMaxY[i].min)/(inMinMaxX[i].max-inMinMaxX[i].min);
85 }
86
87 outMinX.resize(outMinMaxX.size());
88 outMinY.resize(outMinMaxX.size());
89 outRatio.resize(outMinMaxX.size());
90
91 for (size_t i=0; i<outMinX.length(); i++)
92 {
93 outMinX[i]=outMinMaxX[i].min;
94 outMinY[i]=outMinMaxY[i].min;
95 outRatio[i]=(outMinMaxX[i].max-outMinMaxX[i].min)/(outMinMaxY[i].max-outMinMaxY[i].min);
96 }
97}
98
99
100/***************************************************************************/
101bool ff2LayNN::configure(const Property &options)
102{
103 IW.clear();
104 LW.clear();
105
106 inMinMaxX.clear();
107 inMinMaxY.clear();
108
109 outMinMaxX.clear();
110 outMinMaxY.clear();
111
112 configured=false;
113
114 // acquire options
115 if (!options.check("numInputNodes") || !options.check("numHiddenNodes") ||
116 !options.check("numOutputNodes"))
117 return false;
118
119 int numHiddenNodes=options.find("numHiddenNodes").asInt32();
120 for (int i=0; i<numHiddenNodes; i++)
121 {
122 ostringstream tag;
123 Vector item;
124
125 tag<<"IW_"<<i;
126 if (getItem(options,tag.str(),item))
127 IW.push_back(item);
128 else
129 return false;
130 }
131
132 if (!getItem(options,"b1",b1))
133 return false;
134
135 int numOutputNodes=options.find("numOutputNodes").asInt32();
136 for (int i=0; i<numOutputNodes; i++)
137 {
138 ostringstream tag;
139 Vector item;
140
141 tag<<"LW_"<<i;
142 if (getItem(options,tag.str(),item))
143 LW.push_back(item);
144 else
145 return false;
146 }
147
148 if (!getItem(options,"b2",b2))
149 return false;
150
151 int numInputNodes=options.find("numInputNodes").asInt32();
152 for (int i=0; i<numInputNodes; i++)
153 {
154 ostringstream tagX, tagY;
155 Vector itemX, itemY;
156
157 tagX<<"inMinMaxX_"<<i;
158 tagY<<"inMinMaxY_"<<i;
159 if (!getItem(options,tagX.str(),itemX) || !getItem(options,tagY.str(),itemY))
160 return false;
161 else
162 {
163 minmax X, Y;
164 X.min=itemX[0];
165 X.max=itemX[1];
166 Y.min=itemY[0];
167 Y.max=itemY[1];
168
169 inMinMaxX.push_back(X);
170 inMinMaxY.push_back(Y);
171 }
172 }
173
174 for (int i=0; i<numOutputNodes; i++)
175 {
176 ostringstream tagX, tagY;
177 Vector itemX, itemY;
178
179 tagX<<"outMinMaxX_"<<i;
180 tagY<<"outMinMaxY_"<<i;
181 if (!getItem(options,tagX.str(),itemX) || !getItem(options,tagY.str(),itemY))
182 return false;
183 else
184 {
185 minmax X, Y;
186 X.min=itemX[0];
187 X.max=itemX[1];
188 Y.min=itemY[0];
189 Y.max=itemY[1];
190
191 outMinMaxX.push_back(X);
192 outMinMaxY.push_back(Y);
193 }
194 }
195
196 // prepare some internal variables
197 prepare();
198
199 return configured=true;
200}
201
202
203/***************************************************************************/
204Vector ff2LayNN::scaleInputToNetFormat(const Vector &x) const
205{
206 return (inRatio*(x-inMinX)+inMinY);
207}
208
209
210/***************************************************************************/
211Vector ff2LayNN::scaleInputFromNetFormat(const Vector &x) const
212{
213 return ((x-inMinY)/inRatio+inMinX);
214}
215
216
217/***************************************************************************/
218Vector ff2LayNN::scaleOutputToNetFormat(const Vector &x) const
219{
220 return ((x-outMinX)/outRatio+outMinY);
221}
222
223
224/***************************************************************************/
225Vector ff2LayNN::scaleOutputFromNetFormat(const Vector &x) const
226{
227 return (outRatio*(x-outMinY)+outMinX);
228}
229
230
231/***************************************************************************/
232Vector ff2LayNN::predict(const Vector &x) const
233{
234 if (configured)
235 {
236 // input preprocessing
237 Vector x1=scaleInputToNetFormat(x);
238
239 // compute the output a1 of hidden layer
240 Vector n1(IW.size());
241 for (size_t i=0; i<n1.length(); i++)
242 n1[i]=yarp::math::dot(IW[i],x1)+b1[i];
243 Vector a1=hiddenLayerFcn(n1);
244
245 // compute the output a2 of the network
246 Vector n2(LW.size());
247 for (size_t i=0; i<n2.length(); i++)
248 n2[i]=yarp::math::dot(LW[i],a1)+b2[i];
249 Vector a2=outputLayerFcn(n2);
250
251 // output postprocessing
252 return scaleOutputFromNetFormat(a2);
253 }
254 else
255 return Vector(1);
256}
257
258
259/***************************************************************************/
260bool ff2LayNN::getStructure(Property &options) const
261{
262 options.clear();
263
264 options.put("numHiddenNodes",(int)IW.size());
265 options.put("numOutputNodes",(int)LW.size());
266 options.put("numInputNodes",(int)inMinMaxX.size());
267
268 for (size_t i=0; i<IW.size(); i++)
269 {
270 ostringstream tag;
271 tag<<"IW_"<<i;
272
273 setItem(options,tag.str(),IW[i]);
274 }
275
276 setItem(options,"b1",b1);
277
278 for (size_t i=0; i<LW.size(); i++)
279 {
280 ostringstream tag;
281 tag<<"LW_"<<i;
282
283 setItem(options,tag.str(),LW[i]);
284 }
285
286 setItem(options,"b2",b2);
287
288 for (size_t i=0; i<inMinMaxX.size(); i++)
289 {
290 ostringstream tagX, tagY;
291 tagX<<"inMinMaxX_"<<i;
292 tagY<<"inMinMaxY_"<<i;
293
294 Vector X(2);
295 X[0]=inMinMaxX[i].min;
296 X[1]=inMinMaxX[i].max;
297
298 Vector Y(2);
299 Y[0]=inMinMaxY[i].min;
300 Y[1]=inMinMaxY[i].max;
301
302 setItem(options,tagX.str(),X);
303 setItem(options,tagY.str(),Y);
304 }
305
306 for (size_t i=0; i<outMinMaxX.size(); i++)
307 {
308 ostringstream tagX, tagY;
309 tagX<<"outMinMaxX_"<<i;
310 tagY<<"outMinMaxY_"<<i;
311
312 Vector X(2);
313 X[0]=outMinMaxX[i].min;
314 X[1]=outMinMaxX[i].max;
315
316 Vector Y(2);
317 Y[0]=outMinMaxY[i].min;
318 Y[1]=outMinMaxY[i].max;
319
320 setItem(options,tagX.str(),X);
321 setItem(options,tagY.str(),Y);
322 }
323
324 return true;
325}
326
327
328/***************************************************************************/
329bool ff2LayNN::printStructure(ostream &stream) const
330{
331 stream<<"***** Input Layer Range *****"<<endl;
332 for (size_t i=0; i<inMinMaxX.size(); i++)
333 stream<<i<<": X ["<<inMinMaxX[i].min<<" "<<inMinMaxX[i].max
334 <<"]; Y ["<<inMinMaxY[i].min<<" "<<inMinMaxY[i].max<<"]"<<endl;
335
336 stream<<"***** Hidden Layer Weights *****"<<endl;
337 for (size_t i=0; i<IW.size(); i++)
338 stream<<"IW_"<<i<<": ["<<IW[i].toString(16,1)<<"]"<<endl;
339
340 stream<<"***** Hidden Layer Bias *****"<<endl;
341 stream<<"b1: ["<<b1.toString(16,1)<<"]"<<endl;
342
343 stream<<"***** Output Layer Weights *****"<<endl;
344 for (size_t i=0; i<LW.size(); i++)
345 stream<<"LW_"<<i<<": ["<<LW[i].toString(16,1)<<"]"<<endl;
346
347 stream<<"***** Output Layer Bias *****"<<endl;
348 stream<<"b2: ["<<b2.toString(16,1)<<"]"<<endl;
349
350 stream<<"***** Output Layer Range *****"<<endl;
351 for (size_t i=0; i<outMinMaxX.size(); i++)
352 stream<<i<<": Y ["<<outMinMaxY[i].min<<" "<<outMinMaxY[i].max
353 <<"]; X ["<<outMinMaxX[i].min<<" "<<outMinMaxX[i].max<<"]"<<endl;
354
355 return stream.good();
356}
357
358
359/***************************************************************************/
364
365
366/***************************************************************************/
368 ff2LayNN(options)
369{
370}
371
372
373/***************************************************************************/
375{
376 Vector y(x.length());
377 for (size_t i=0; i<x.length(); i++)
378 y[i]=2.0/(1.0+exp(-2.0*x[i]))-1.0;
379
380 return y;
381}
382
383
384/***************************************************************************/
386{
387 return x;
388}
389
390
391/***************************************************************************/
393{
394 Vector y(x.length());
395 for (size_t i=0; i<x.length(); i++)
396 {
397 double tmp1=exp(-2.0*x[i]);
398 double tmp2=1.0+tmp1;
399 y[i]=(4.0*tmp1)/(tmp2*tmp2);
400 }
401
402 return y;
403}
404
405
406/***************************************************************************/
408{
409 return Vector(x.length(),1.0);
410}
411
412
virtual yarp::sig::Vector hiddenLayerGrad(const yarp::sig::Vector &x) const
Gradient of the Hidden Layer Function.
ff2LayNN_tansig_purelin()
Create an empty network.
virtual yarp::sig::Vector outputLayerGrad(const yarp::sig::Vector &x) const
Gradient of the Output Layer Function.
virtual yarp::sig::Vector outputLayerFcn(const yarp::sig::Vector &x) const
Output Layer Function.
virtual yarp::sig::Vector hiddenLayerFcn(const yarp::sig::Vector &x) const
Hidden Layer Function.
Feed-Forward 2 layers Neural Network.
virtual yarp::sig::Vector hiddenLayerFcn(const yarp::sig::Vector &x) const =0
Hidden Layer Function.
yarp::sig::Vector outRatio
virtual yarp::sig::Vector scaleOutputToNetFormat(const yarp::sig::Vector &x) const
Scale output to be used with the network.
yarp::sig::Vector inRatio
yarp::sig::Vector b2
yarp::sig::Vector b1
void setItem(yarp::os::Property &options, const std::string &tag, const yarp::sig::Vector &item) const
bool getItem(const yarp::os::Property &options, const std::string &tag, yarp::sig::Vector &item) const
virtual bool isValid() const
Return the internal status after a configuration.
std::deque< minmax > inMinMaxX
virtual bool printStructure(std::ostream &stream=std::cout) const
Dump tadily the network structure on the stream.
yarp::sig::Vector outMinY
std::deque< minmax > inMinMaxY
yarp::sig::Vector inMinY
ff2LayNN()
Create an empty network.
std::deque< yarp::sig::Vector > IW
virtual yarp::sig::Vector scaleOutputFromNetFormat(const yarp::sig::Vector &x) const
Scale back output from the network's format.
virtual yarp::sig::Vector scaleInputToNetFormat(const yarp::sig::Vector &x) const
Scale input to be used with the network.
yarp::sig::Vector inMinX
std::deque< minmax > outMinMaxX
virtual bool configure(const yarp::os::Property &options)
Configure/reconfigure the network.
virtual yarp::sig::Vector predict(const yarp::sig::Vector &x) const
Predict the output given a certain input to the network.
std::deque< minmax > outMinMaxY
yarp::sig::Vector outMinX
virtual yarp::sig::Vector outputLayerFcn(const yarp::sig::Vector &x) const =0
Output Layer Function.
std::deque< yarp::sig::Vector > LW
virtual bool getStructure(yarp::os::Property &options) const
Retrieve the network structure as a Property object.
virtual yarp::sig::Vector scaleInputFromNetFormat(const yarp::sig::Vector &x) const
Scale back input from the network's format.
exp(-x3 *T)]