icub-client
SAMDriver_ARWin.py
Go to the documentation of this file.
1 # """"""""""""""""""""""""""""""""""""""""""""""
2 # The University of Sheffield
3 # WYSIWYD Project
4 #
5 # A driver that implements Action Recognition
6 #
7 # Created on 10 April 2016
8 #
9 # @author: Daniel Camilleri, Andreas Damianou
10 #
11 # """"""""""""""""""""""""""""""""""""""""""""""
12 from os import listdir
13 from os.path import isfile, join
14 import copy
15 import numpy
16 import numpy as np
17 from SAM.SAM_Core import SAMDriver
18 from SAM.SAM_Core import SAMTesting
19 from SAM.SAM_Core import SAM_utils as utils
20 import logging
21 np.set_printoptions(threshold=numpy.nan)
22 
23 
24 
26  """
27  Class developed for the implementation of windowed real time action recognition.
28  """
29  def __init__(self):
30  """
31  Initialise class using SAMDriver.__init__ and augment with custom parameters.
32 
33  additionalParameterList is a list of extra parameters to preserve between training and interaction.
34  """
35  SAMDriver.__init__(self)
36  self.data = dict()
37  self.numJoints = 9
38  self.dataLogList = []
39  self.labelsLogList = []
40  self.itemsPerJoint = None
41  self.featureSequence = None
42  self.handsCombined = None
43  self.data2Labels = None
44  self.dataVec = None
45  self.allDataDict = None
47  self.seqTestConf = None
48  self.seqTestPerc = None
49  self.additionalParametersList = ['listOfVectorsToClassify', 'handsCombined', 'featureSequence', 'itemsPerJoint',
50  'segTrainConf', 'segTrainPerc', 'segTestConf', 'segTestPerc', 'seqTestConf',
51  'seqTestPerc']
52 
53  def loadParameters(self, parser, trainName):
54  """
55  Function to load parameters from the model config.ini file.
56 
57  Method to load parameters from file loaded in parser from within section trainName and store these parameters in self.paramsDict.
58 
59  Args:
60  parser: SafeConfigParser with pre-read config file.
61  trainName: Section from which parameters are to be read.
62 
63  Returns:
64  None
65  """
66  if parser.has_option(trainName, 'includeParts'):
67  self.paramsDict['includeParts'] = parser.get(trainName, 'includeParts').split(',')
68  self.paramsDict['includeParts'] = [j for j in self.paramsDict['includeParts'] if j != '']
69  else:
70  self.paramsDict['includeParts'] = ['object']
71 
72  if parser.has_option(trainName, 'actionsAllowedList'):
73  self.paramsDict['actionsAllowedList'] = parser.get(trainName, 'actionsAllowedList').split(',')
74  else:
75  self.paramsDict['actionsAllowedList'] = ['lift_object', 'pull_object', 'push_object', 'drop_object',
76  'carry_object']
77 
78  if parser.has_option(trainName, 'windowSize'):
79  self.paramsDict['windowSize'] = int(parser.get(trainName, 'windowSize'))
80  else:
81  self.paramsDict['windowSize'] = 5
82 
83  if parser.has_option(trainName, 'windowOffset'):
84  self.paramsDict['windowOffset'] = int(parser.get(trainName, 'windowOffset'))
85  else:
86  self.paramsDict['windowOffset'] = 2
87 
88  if parser.has_option(trainName, 'moveThresh'):
89  self.paramsDict['moveThresh'] = float(parser.get(trainName, 'moveThresh'))
90  else:
91  self.paramsDict['moveThresh'] = 0.01
92 
93  if parser.has_option(trainName, 'binWidth'):
94  self.paramsDict['binWidth'] = float(parser.get(trainName, 'binWidth'))
95  else:
96  self.paramsDict['binWidth'] = 0.001
97 
98  if parser.has_option(trainName, 'method'):
99  self.paramsDict['method'] = parser.get(trainName, 'method')
100  else:
101  self.paramsDict['method'] = 'sumProb'
102 
103  if parser.has_option(trainName, 'combineHands'):
104  self.paramsDict['combineHands'] = parser.get(trainName, 'combineHands') == 'True'
105  else:
106  self.paramsDict['combineHands'] = False
107 
108  if parser.has_option(trainName, 'thresholdMovement'):
109  self.paramsDict['thresholdMovement'] = parser.get(trainName, 'thresholdMovement') == 'True'
110  else:
111  self.paramsDict['thresholdMovement'] = False
112 
113  if parser.has_option(trainName, 'sepRL'):
114  self.paramsDict['sepRL'] = parser.get(trainName, 'sepRL') == 'True'
115  else:
116  self.paramsDict['sepRL'] = False
117 
118  if parser.has_option(trainName, 'filterData'):
119  self.paramsDict['filterData'] = parser.get(trainName, 'filterData') == 'True'
120  else:
121  self.paramsDict['filterData'] = False
122 
123  if parser.has_option(trainName, 'filterWindow'):
124  self.paramsDict['filterWindow'] = int(parser.get(trainName, 'filterWindow'))
125  else:
126  self.paramsDict['filterWindow'] = 5
127 
128  if parser.has_option(trainName, 'components'):
129  self.paramsDict['components'] = parser.get(trainName, 'components').split(',')
130  else:
131  self.paramsDict['components'] = ['pos']
132 
133  if parser.has_option(trainName, 'reduce'):
134  self.paramsDict['reduce'] = parser.get(trainName, 'reduce') == 'True'
135  else:
136  self.paramsDict['reduce'] = False
137 
138  if parser.has_option(trainName, 'flip'):
139  self.paramsDict['flip'] = parser.get(trainName, 'flip') == 'True'
140  else:
141  self.paramsDict['flip'] = False
142 
143  if parser.has_option(trainName, 'normaliseWindow'):
144  self.paramsDict['normaliseWindow'] = parser.get(trainName, 'normaliseWindow') == 'True'
145  else:
146  self.paramsDict['normaliseWindow'] = False
147 
148  def saveParameters(self):
149  """
150  Override SAMDriver.saveParameters.
151 
152  This function adds items of additionalParametersList to paramsDict to be saved.
153  """
154  for j in self.additionalParametersList:
155  commandString = 'self.paramsDict[\'' + j + '\'] = self.' + j
156  try:
157  logging.info(str(commandString))
158  exec commandString
159  except:
160  pass
161 
162  def testPerformance(self, testModel, Yall, Lall, YtestAll, LtestAll, verbose):
163  """
164  Custom testPerformance method. This augments the standard testPerformance method by including testing of known and unknown together with testing on known training points and known testing points.
165 
166  Args:
167  testModel : SAMObject Model to be tested.
168  Yall : Numpy array with training data vectors to be tested.
169  Lall : List with corresponding training data labels.
170  YtestAll : Numpy array with testing data vectors to be tested.
171  LtestAll : List with corresponding testing data labels.
172  verbose : Boolean turning logging to stdout on or off.
173 
174  Returns:
175  Square numpy array confusion matrix.
176  """
177 
178  yTrainingData = SAMTesting.formatDataFunc(Yall)
179  [self.segTrainConf, self.segTrainPerc, labelsSegTrain, labelComparisonDict] = \
180  SAMTesting.testSegments(testModel, yTrainingData, Lall, verbose, 'Training')
181 
182  yTrainingData = SAMTesting.formatDataFunc(YtestAll)
183  [self.segTestConf, self.segTestPerc, labelsSegTest, labelComparisonDict] = \
184  SAMTesting.testSegments(testModel, yTrainingData, LtestAll, verbose, 'Testing')
185 
186  # yTrainingData = SAMTesting.formatDataFunc(self.allDataDict['Y'])
187  # [self.seqTestConf, self.seqTestPerc, labelsSeqTest, _] = SAMTesting.testSegments(testModel, yTrainingData,
188  # self.allDataDict['L'], verbose, 'All')
189 
190  return self.segTestConf, labelsSegTest, labelComparisonDict
191 
192  def diskDataToLiveData(self, root_data_dir):
193  """
194  This method reads in time series data from disk for SAMDriver_ARWin.
195 
196  Returns:
197  list of strings with raw data and list of strings with ground truth classifications.
198  """
199  onlyfiles = [f for f in listdir(root_data_dir) if isfile(join(root_data_dir, f))]
200  dataLogList = [f for f in onlyfiles if 'data' in f]
201  dataLogList.sort()
202  labelsLogList = [f for f in onlyfiles if 'label' in f]
203  labelsLogList.sort()
204  rawLabelList = []
205  rawDataList = []
206 
207  logging.info('loading data from files')
208  self.rawTextData = []
209  for k in range(len(dataLogList)):
210  logging.info('data file: ' + str(join(root_data_dir, dataLogList[k])))
211  logging.info('model file: ' + str(join(root_data_dir, labelsLogList[k])))
212  logging.info('')
213  dataFile = open(join(root_data_dir, dataLogList[k]), 'r')
214  self.dataLogList.append(str(join(root_data_dir, dataLogList[k])))
215  labelFile = open(join(root_data_dir, labelsLogList[k]), 'r')
216  self.labelsLogList.append(join(root_data_dir, labelsLogList[k]))
217 
218  # number of lines in dataFile
219  for i, l in enumerate(dataFile):
220  pass
221  lenDataFile = i + 1
222 
223  # number of lines in labelFile
224  for i, l in enumerate(labelFile):
225  pass
226  lenLabelFile = i + 1
227  dataFile.close()
228  labelFile.close()
229 
230  if lenLabelFile != lenDataFile:
231  logging.warning(str(dataLogList[k]) + ' will not be used because its length differs from ' +
232  str(labelsLogList[k]))
233  else:
234  dataFile = open(join(root_data_dir, dataLogList[k]), 'r')
235  labelFile = open(join(root_data_dir, labelsLogList[k]), 'r')
236  windows = lenDataFile // self.paramsDict['windowSize']
237 
238  for curr in range(windows*self.paramsDict['windowSize']):
239  line = dataFile.readline()
240  labelLine = labelFile.readline()
241  self.rawTextData.append(line)
242  # check data line
243  t, goAhead = self.messageChecker(line, 'testing')
244 
245  # check label line
246  v = labelLine.split(' ')[2].replace('\n', '').replace('(', '').replace(')', '')
247  if v == '':
248  v = 'unknown'
249 
250  if goAhead:
251  rawDataList.append(t)
252  rawLabelList.append(v)
253  else:
254  logging.error('messageChecker returned Fail')
255 
256  dataFile.close()
257  labelFile.close()
258 
259  return rawDataList, rawLabelList
260 
261  def convertToDict(self, rawData, mode, verbose):
262  """
263  Convert list of strings time series to dictionary with joints and objects as different items of the dictionary and windows of positions for each joint.
264 
265  Args:
266  rawData: List of strings read in from files.
267  mode: `'testing'` or `'live'`. `'testing'` will format strings read from file. `'live'` will format strings received via Yarp during interaction.
268  verbose: Switch logging to stdout on or off.
269 
270  Returns:
271  Dictionary with the windowed data, list of joints present in dictionary items, list of objects present in dictionary items.
272  """
273  data = dict()
274  firstPass = True
275  jointsList = []
276  objectsList = []
277 
278  # logging.info('*******************')
279  # for j in self.paramsDict:
280  # logging.info(j, self.paramsDict[j]
281  # logging.info('*******************')
282 
283  for t in rawData:
284  # parse skeleton data which has 9 sections by (x,y,z)
285  for i in range(self.numJoints):
286  a = i * 4
287  # if t[a] == 'shoulderCenter':
288  # t[a] = 'chest'
289 
290  if firstPass:
291  data[t[a]] = [None]
292  data[t[a]] = (np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])]))
293  jointsList.append(t[a])
294  else:
295  arr = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
296  if data[t[a]] is not None:
297  data[t[a]] = np.vstack((data[t[a]], arr))
298  else:
299  data[t[a]] = arr
300 
301  currIdx = (self.numJoints * 4 - 1)
302  numObjs = (len(t) - currIdx) / 5
303 
304  for i in range(numObjs):
305  a = currIdx + 1 + (i * 5)
306  if t[a] in data:
307  arr = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
308  if data[t[a]] is not None:
309  data[t[a]] = np.vstack((data[t[a]], arr))
310  else:
311  data[t[a]] = arr
312  else:
313  data[t[a]] = [None]
314  data[t[a]] = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
315  if mode == 'testing' or (mode != 'testing' and t[a+4] == '1'):
316  objectsList.append(t[a])
317 
318  firstPass = False
319  if verbose:
320  logging.info('data has length = ' + str(len(data)) + ' joints')
321  logging.info('each joint has an array of shape ' + str(data['head'].shape))
322 
323  if self.paramsDict['filterData'] or 'vel' in self.paramsDict['components'] or \
324  'acc' in self.paramsDict['components']:
325  if verbose:
326  logging.info('Filtering data with hamming window of size ' + str(self.paramsDict['filterWindow']))
327  for j in data.keys():
328  t1 = utils.smooth1D(data[j][:, 0], self.paramsDict['filterWindow'])
329  t2 = utils.smooth1D(data[j][:, 1], self.paramsDict['filterWindow'])
330  t3 = utils.smooth1D(data[j][:, 2], self.paramsDict['filterWindow'])
331  data[j] = np.hstack([t1[:, None], t2[:, None], t3[:, None]])
332 
333  if verbose:
334  logging.info('data has length = ' + str(len(data)) + ' joints')
335  logging.info('each joint has an array of shape ' + str(data['head'].shape))
336  # convert data and number labels into windows.
337  # data is still in the form of a dictionary with the joints/objects as keys of the dict
338  # Text labels contained in labels
339  if verbose:
340  logging.info('')
341  noY = mode != 'testing'
342  if mode == 'testing':
343  offset = self.paramsDict['windowOffset']
344  else:
345  offset = 1
346 
347  data2 = dict()
348  printExplanation = True
349  for num, key in enumerate(data):
350  data2[key] = None
351  xx, yy = utils.transformTimeSeriesToSeq(data[key], timeWindow=self.paramsDict['windowSize'],
352  offset=offset,
353  normalised=self.paramsDict['normaliseWindow'],
354  reduced=self.paramsDict['reduce'], noY=noY)
355 
356  if self.paramsDict['thresholdMovement'] or 'vel' in self.paramsDict['components'] or 'acc' in \
357  self.paramsDict['components']:
358  winSize = xx.shape[1] / 3
359  g = xx.size / winSize
360  xxshape1 = xx.shape[0]
361  xxshape2 = xx.shape[1]
362 
363  flatxx = xx.flatten()
364  f = flatxx.reshape([g, winSize])
365  xx = f.reshape([xxshape1, xxshape2])
366 
367  if self.paramsDict['thresholdMovement']:
368  if printExplanation and verbose:
369  logging.info('thresholding movement <' + str(self.paramsDict['moveThresh']))
370  ranges = np.ptp(f, axis=1)
371  a = ranges < self.paramsDict['moveThresh']
372  b = ranges > -self.paramsDict['moveThresh']
373  res = list(np.where(np.logical_and(a, b))[0])
374  if self.paramsDict['normaliseWindow']:
375  f[res] = 0
376  else:
377  for ll in res:
378  f[ll] = f[ll][0]
379 
380  if 'vel' in self.paramsDict['components']:
381  if printExplanation and verbose:
382  logging.info('Adding velocity to the feature vector')
383  xxvel = np.diff(f)
384  xxvel = xxvel.reshape([xxshape1, xxshape2 - 3])
385  xx = np.hstack([xx, xxvel])
386 
387  if 'acc' in self.paramsDict['components']:
388  if printExplanation and verbose:
389  logging.info('Adding acceleration to the feature vector')
390  xxacc = np.diff(f, n=2)
391  xxacc = xxacc.reshape([xxshape1, xxshape2 - 6])
392  xx = np.hstack([xx, xxacc])
393 
394  data2[key] = xx
395  printExplanation = False
396 
397  if verbose:
398  logging.info('data has length = ' + str(len(data2)) + ' joints')
399  logging.info('each joint has an array of shape ' + str(data2['head'].shape))
400 
401  return data2, jointsList, objectsList
402 
403 
404 
405  def readData(self, root_data_dir, participant_index, *args, **kw):
406  """
407  Method which accepts a data directory, reads all the data in and outputs self.Y which is a numpy array with n instances of m length feature vectors and self.L which is a list of text Labels of length n.
408 
409  This method reads data<number>.log files and the corresponding labels<number>.log files present in the root_data_dir and formats the time series data into normalised and smoothed windows. Subsequently depending on the list of joints chosen by the user and other parameters set in the config file present in the root_data_dir, the information of the different joints and objects are appended into a single feature vector. Number of feature vectors is equal to the number of windows extracted from the time series data.
410 
411  Args:
412  root_data_dir: Data directory.
413  participant_index: List of subfolders to consider. Can be left as an empty list.
414 
415  Returns:
416  None
417  """
418  self.rawData, labelsList = self.diskDataToLiveData(root_data_dir)
419  data2, jointsList, objectsList = self.convertToDict(self.rawData, 'testing', verbose=self.verbose)
420  logging.info('unique labels' + str(set(labelsList)))
421  # extract a set of labels
422  labels = list(set(labelsList))
423  labels.sort()
424 
425  logging.info('')
426  # convert text labels into numbers
427  labelNumsList = None
428  for n, k in enumerate(labelsList):
429  res = [m for m, l in enumerate(labels) if l == k]
430  if n == 0:
431  labelNumsList = np.array(res)
432  else:
433  labelNumsList = np.vstack([labelNumsList, res])
434  logging.info('shape of number labels:' +str(labelNumsList.shape))
435 
436  uu, tmp = utils.transformTimeSeriesToSeq(labelNumsList, self.paramsDict['windowSize'],
437  self.paramsDict['windowOffset'], False, False)
438  data2NumLabels = uu
439  logging.info('windowed number labels shape:' + str(data2NumLabels.shape))
440 
441  # now that labels are in windowed form it is time to
442  # assign them a text label again that describes them
443  # the rule here is if the window appertains to the same label,
444  # that label is assigned otherwise it is labelled as transition
445  data2Labels = []
446  for j in data2NumLabels:
447  numItems = list(set(j))
448  if len(numItems) == 1:
449  l = labels[int(numItems[0])]
450  data2Labels.append(l)
451  else:
452  # Another way to do this would be to label it according to 75% majority
453  # This would decrease the region size of the transition blocks
454  # which are currently dependant on windowSize
455  data2Labels.append('transition')
456  logging.info('after transition unique set ' + str(set(data2Labels)))
457  logging.info('windowed data labels compressed: ' + str(len(data2Labels)))
458 
459  logging.info('')
460  # create list of specific joints to be used
461 
462  jointsToUse = []
463  objectDict = dict()
464  handDict = dict()
465  for j in self.paramsDict['includeParts']:
466  if j == 'object':
467  for k in objectsList:
468  if k != 'partner':
469  objectDict[k] = (len(jointsToUse))
470  jointsToUse.append(k)
471  elif 'hand' in j:
472  handDict[j] = (len(jointsToUse))
473  jointsToUse.append(j)
474  else:
475  jointsToUse.append(j)
476 
477  combineObjects = len(objectDict) > 1
478 
479  combineHands = len(handDict) > 1
480 
481  logging.info(jointsToUse)
482  logging.info(objectDict)
483  logging.info(handDict)
484 
485  # concatenate data for all joints in a single vector
486  logging.info('')
487  dataVecAll = None
488  for j in jointsToUse:
489  if dataVecAll is None:
490  dataVecAll = data2[j]
491  else:
492  dataVecAll = np.hstack([dataVecAll, data2[j]])
493  itemsPerJoint = dataVecAll.shape[1] / len(jointsToUse)
494  logging.info(dataVecAll.shape)
495  logging.info(itemsPerJoint)
496  self.itemsPerJoint = itemsPerJoint
497  logging.info('')
498  # ------------------------------------------------------------------
499  # it is now time to combine objects if multiple exist
500  #
501 
502  logging.info('')
503  self.featureSequence = []
504  combinedObjs = dict()
505  if combineObjects and 'object' in self.paramsDict['includeParts']:
506  self.featureSequence.append('object')
507  logging.info('Combining Objects')
508  for n in objectDict:
509  idxBase = objectDict[n] * itemsPerJoint
510  combinedObjs[n] = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
511 
512  logging.info(combinedObjs[n].shape)
513 
514  logging.info(dataVecAll.shape)
515 
516  logging.info('')
517  # it is now time to combine hands if multiple exist
518  combinedHands = dict()
519  if combineHands and self.paramsDict['combineHands'] and \
520  len([s for s in self.paramsDict['includeParts'] if 'hand' in s]) > 0:
521  logging.info('Combining hands')
522  self.handsCombined = True
523  self.featureSequence.append('hand')
524  for n in handDict:
525  idxBase = handDict[n] * itemsPerJoint
526  combinedHands[n] = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
527 
528  logging.info(combinedHands[n].shape)
529  logging.info(dataVecAll.shape)
530  else:
531  self.handsCombined = False
532 
533  logging.info(jointsToUse)
534  otherJoints = None
535  for j, item in enumerate(jointsToUse):
536  if self.handsCombined:
537  if item not in handDict and item not in objectDict:
538  self.featureSequence.append(item)
539  idxBase = j * itemsPerJoint
540 
541  if otherJoints is None:
542  otherJoints = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
543  else:
544  otherJoints = np.hstack([otherJoints, dataVecAll[:, idxBase:idxBase + itemsPerJoint]])
545  else:
546  if item not in objectDict:
547  self.featureSequence.append(item)
548  idxBase = j * itemsPerJoint
549 
550  if otherJoints is None:
551  otherJoints = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
552  else:
553  otherJoints = np.hstack([otherJoints, dataVecAll[:, idxBase:idxBase + itemsPerJoint]])
554  if otherJoints is not None:
555  logging.info(otherJoints.shape)
556 
557  self.listOfVectorsToClassify = []
558  for j in self.featureSequence:
559  if j == 'object':
560  for k in objectsList:
561  if k != 'partner':
562  self.listOfVectorsToClassify.append([k])
563 
564  elif 'hand' in j:
565  if self.handsCombined:
566  a = copy.deepcopy(self.listOfVectorsToClassify)
567  b = copy.deepcopy(self.listOfVectorsToClassify)
568  if len(self.listOfVectorsToClassify) > 0:
569  for l, m in enumerate(self.listOfVectorsToClassify):
570  a[l].append('handLeft')
571  b[l].append('handRight')
572  self.listOfVectorsToClassify = a + b
573  else:
574  self.listOfVectorsToClassify.append(['handLeft'])
575  self.listOfVectorsToClassify.append(['handRight'])
576 
577  else:
578  for l, m in enumerate(self.listOfVectorsToClassify):
579  self.listOfVectorsToClassify[l].append(j)
580 
581  else:
582  for l, m in enumerate(self.listOfVectorsToClassify):
583  self.listOfVectorsToClassify[l].append(j)
584  logging.info('Vectors to Classify:')
585  for j in self.listOfVectorsToClassify:
586  logging.info("\t" + str(j))
587 
588  dataVecReq = None
589  objSection = None
590  if combinedObjs:
591  objSection = None
592  for j in self.listOfVectorsToClassify:
593  logging.info(str(j[0]))
594  if objSection is None:
595  objSection = combinedObjs[j[0]]
596  else:
597  objSection = np.vstack([objSection, combinedObjs[j[0]]])
598  dataVecReq = objSection
599  logging.info(str(objSection.shape))
600 
601  handsSection = None
602  if combinedHands:
603  for j in self.listOfVectorsToClassify:
604  for l in j:
605  if 'hand' in l:
606  if handsSection is None:
607  handsSection = combinedHands[l]
608  else:
609  handsSection = np.vstack([handsSection, combinedHands[l]])
610  if dataVecReq is None:
611  dataVecReq = handsSection
612  else:
613  dataVecReq = np.hstack([dataVecReq, handsSection])
614  logging.info(str(handsSection.shape))
615 
616  othersSection = None
617  if otherJoints is not None:
618  for j in self.listOfVectorsToClassify:
619  logging.info(str(j[:]))
620  if othersSection is None:
621  othersSection = otherJoints
622  else:
623  othersSection = np.vstack([othersSection, otherJoints])
624 
625  if dataVecReq is None:
626  dataVecReq = othersSection
627  else:
628  dataVecReq = np.hstack([dataVecReq, othersSection])
629 
630  logging.info(str(dataVecReq.shape))
631  del handsSection, othersSection, objSection, combinedHands, combinedObjs, otherJoints
632 
633  # Also augment the labels list
634  data2LabelsAugment = []
635  for j in self.listOfVectorsToClassify:
636  data2LabelsAugment.append([])
637 
638  for j in data2Labels:
639  splitLabel = j.split('_')
640  action = '_'.join(splitLabel[:2])
641 
642  if len(splitLabel) > 2:
643  obj = splitLabel[2]
644  hand = splitLabel[4]
645 
646  if combineHands:
647  handSubList = [k for k in self.listOfVectorsToClassify if 'hand' + hand.capitalize() in k]
648  if combineObjects:
649  vec = [f for f in handSubList if obj in f][0]
650  else:
651  vec = handSubList[0]
652  else:
653  vec = [f for f in self.listOfVectorsToClassify if obj in f][0]
654  # logging.info(data2Labels.index(j), vec)
655 
656  # printStr = ''
657  for n, k in enumerate(self.listOfVectorsToClassify):
658  if vec == k:
659  data2LabelsAugment[n].append(action)
660  # printStr += action + '\t'
661  # else:
662  data2LabelsAugment[n].append('idle')
663  # printStr += '\tidle'
664  # logging.info(data2LabelsAugment[n][-1],)
665  # print
666  else:
667  obj = ''
668  hand = ''
669  printStr = ''
670  for n, k in enumerate(self.listOfVectorsToClassify):
671  # printStr += action + '\t'
672  data2LabelsAugment[n].append(action)
673  # logging.info(data2LabelsAugment[n][-1],)
674  # print
675  # logging.info(action, obj, hand)
676  # logging.info('---------------------')
677  # logging.info('before augment', set(data2Labels))
678  data2Labels = []
679  for j in data2LabelsAugment:
680  data2Labels += j
681  # logging.info('after augment', set(data2Labels)
682  logging.info('labels ' + str(len(data2Labels)))
683  logging.info('data ' + str(dataVecReq.shape))
684  self.allDataDict = dict()
685  self.allDataDict['Y'] = copy.deepcopy(dataVecReq)
686  self.allDataDict['L'] = copy.deepcopy(data2Labels)
687 
688  # ---------------------------------------------------------------------------------
689 
690  data2ShortLabels = []
691  for j in data2Labels:
692  splitLabel = j.split('_')
693  slabel = ('_'.join(splitLabel[:2]))
694 
695  if splitLabel[0] == 'push' or splitLabel[0] == 'pull':
696  if splitLabel[-1] == 'no':
697  add = splitLabel[-2]
698  else:
699  add = splitLabel[-1]
700 
701  if add == 'left' and self.paramsDict['flip']:
702  if splitLabel[0] == 'push':
703  splitLabel[0] = 'pull'
704  else:
705  splitLabel[0] = 'push'
706  slabel = ('_'.join(splitLabel[:2]))
707 
708  if self.paramsDict['sepRL']:
709  slabel += '_' + add
710 
711  data2ShortLabels.append(slabel)
712 
713  self.data2Labels = copy.deepcopy(data2ShortLabels)
714  logging.info('shortLabels len ' + str(set(self.data2Labels)))
715 
716  if self.paramsDict['sepRL']:
717  if 'pull_object' in self.paramsDict['actionsAllowedList']:
718  self.paramsDict['actionsAllowedList'].index('pull_object') == 'pull_object_right'
719  self.paramsDict['actionsAllowedList'].append('pull_object_left')
720 
721  if 'push_object' in self.paramsDict['actionsAllowedList']:
722  self.paramsDict['actionsAllowedList'].index('push_object') == 'push_object_right'
723  self.paramsDict['actionsAllowedList'].append('push_object_left')
724 
725  # remove labels which will not be trained
726  logging.info('actions allowed: ' + str(self.paramsDict['actionsAllowedList']))
727  listToDelete = []
728  for n in reversed(range(len(data2Labels))):
729  if len([j for j in self.paramsDict['actionsAllowedList'] if j in data2Labels[n]]) == 0 or \
730  'no' in data2Labels[n]:
731  listToDelete.append(n)
732 
733  dataVecReq = np.delete(dataVecReq, listToDelete, axis=0)
734  npdata2ShortLabels = np.asarray(data2ShortLabels)
735  npdata2ShortLabels = np.delete(npdata2ShortLabels, listToDelete, axis=0)
736  # find left hand push and pull and label as pull and push respectively
737  data2ShortLabels = np.ndarray.tolist(npdata2ShortLabels)
738 
739  self.Y = dataVecReq
740  self.L = data2ShortLabels
741  # logging.info('\n'.join(data2Labels))
742  logging.info(self.Y.shape)
743  logging.info(len(self.L))
744 
745  def listOfClassificationVectors(self, featureSequence, objectsList, partnerName='partner'):
746  """
747  Constructs a list of classifications to carry out.
748 
749  Consider an example where featureSequence consists of `object', 'hand', 'head'. Assuming there are 2 objects in the scene, this method constructs 4 possible vectors to classify. \n
750 
751  1) `object1, rightHand, head` \n
752  2) `object1, leftHand, head` \n
753  3) `object2, rightHand, head` \n
754  4) `object2, leftHand, head` \n
755 
756  Args:
757  featureSequence: Organisation of features within the feature vector.
758  objectsList: List of objects currently in the scene.
759  partnerName: Object name to ignore. This ignores the motion of the interacting agent.
760 
761  Returns:
762  List of vectors to classify.
763  """
764  listOfVectorsToClassify = []
765  for j in featureSequence:
766  if j == 'object':
767  for k in objectsList:
768  if k != partnerName and k != 'partner':
769  listOfVectorsToClassify.append([k])
770 
771  elif 'hand' in j:
772  if self.handsCombined:
773  a = copy.deepcopy(listOfVectorsToClassify)
774  b = copy.deepcopy(listOfVectorsToClassify)
775  for l, m in enumerate(listOfVectorsToClassify):
776  a[l].append('handLeft')
777  b[l].append('handRight')
778  listOfVectorsToClassify = a + b
779 
780  else:
781  for l, m in enumerate(listOfVectorsToClassify):
782  listOfVectorsToClassify[l].append(j)
783 
784  else:
785  for l, m in enumerate(listOfVectorsToClassify):
786  listOfVectorsToClassify[l].append(j)
787 
788  return listOfVectorsToClassify
789 
790  def messageChecker(self, dataMessage, mode):
791  """
792  Method to check validity of incoming messages and split into components if valid.
793 
794  Args:
795  dataMessage: String message to validate.
796  mode: Mode of validation. Either `'test'` or `'live'`.
797 
798  Returns:
799  List of strings with split sections and a boolean indicating validity of received string.
800  """
801  goAhead = True
802  try:
803  dataMessage = dataMessage.replace('"' + dataMessage.partition('"')[-1].rpartition('"')[0] + '"', 'partner')
804  if mode == 'testing':
805  t = dataMessage.replace('(', '').replace(')', '').split(' ')[4:-1]
806  elif mode == 'live':
807  t = dataMessage.replace('(', '').replace(')', '').replace('"', '').split(' ')[2:-1]
808  else:
809  logging.error('Non-existing mode. Choose either live or read')
810  t = []
811 
812  if len(t) > 45:
813  for i in range(self.numJoints):
814  a = i*4
815  goAhead = goAhead and type(t[a]) == str
816  goAhead = goAhead and float(t[a+1]) is not None
817  goAhead = goAhead and float(t[a+2]) is not None
818  goAhead = goAhead and float(t[a+3]) is not None
819 
820  currIdx = (self.numJoints*4 - 1)
821  numObjs = (len(t) - currIdx)/5
822 
823  for i in range(numObjs):
824  a = currIdx + 1 + (i*5)
825  goAhead = goAhead and type(t[a]) == str
826  goAhead = goAhead and float(t[a+1]) is not None
827  goAhead = goAhead and float(t[a+2]) is not None
828  goAhead = goAhead and float(t[a+3]) is not None
829  else:
830  goAhead = False
831  except:
832  goAhead = False
833  t = []
834 
835  return [t, goAhead]
836 
837  def processLiveData(self, dataList, thisModel, verbose=False, returnUnknown=False, printClass=True, additionalData=dict()):
838  """
839  Method which receives a list of data frames and outputs a classification if available or 'no_classification' if it is not.
840 
841  Args:
842  dataList: List of dataFrames collected. Length of list is variable.
843  thisModel: List of models required for testing.
844  verbose : Boolean turning logging to stdout on or off.
845  returnUnknown : Boolean to turn on or off the return a sentence if the classification is unknown.
846  printClass : Boolean to turn on or off logging of classification to stdout.
847  additionalData : Dictionary containing additional data required for classification to occur.
848  Returns:
849  String with result of classification, likelihood of the classification, and list of frames with the latest x number of frames popped where x is the window length of the model. Classification result can be string `'None'` if the classification is unknown or message is invalid or `None` if a different error occurs.
850 
851  """
852  mode = 'live'
853  sentence = []
854  classifs = []
855  sentenceProb = []
856  vecList = []
857  errorFlag = False
858 
859  if len(dataList) == self.paramsDict['windowSize']:
860  logging.debug('dataList is of good size')
861  dataStrings = []
862  for j in range(len(dataList)):
863  logging.debug('check message' + str(j))
864  [t, goAhead] = self.messageChecker(dataList[j].toString(), mode)
865  logging.debug('message' + str(j) + 'checked')
866  if goAhead:
867  logging.debug('append')
868  dataStrings.append(t)
869  logging.debug('checked all strings')
870  if len(dataStrings) == self.paramsDict['windowSize']:
871  try:
872  logging.debug('converting to dict')
873  data, jointsList, objectsList = self.convertToDict(dataStrings, mode=mode, verbose=False)
874  logging.debug('converted to dictionary')
875  except:
876  logging.error('Some incorrect messages received')
877  return 'None', None
878 
879  if 'partnerName' in additionalData.keys():
880  listOfVectorsToClassify = self.listOfClassificationVectors(self.featureSequence, objectsList,
881  partnerName=additionalData['partnerName'])
882  else:
883  listOfVectorsToClassify = self.listOfClassificationVectors(self.featureSequence, objectsList,
884  partnerName='partner')
885  logging.debug('finished list of vectors to classify')
886  for j in listOfVectorsToClassify:
887  v = []
888  for k in j:
889  v.append(data[k])
890  vec = np.hstack(v)
891  vecList.append(vec)
892  logging.debug('testing segment')
893  [label, val] = SAMTesting.testSegment(thisModel, vec, verbose, visualiseInfo=None,
894  optimise=thisModel[0].optimiseRecall)
895  logging.debug('tested segment')
896  classification = label.split('_')[0]
897  classifs.append(classification)
898  if self.paramsDict['flip'] and 'handLeft' in j:
899  if classification == 'push':
900  classification = 'pull'
901  elif classification == 'pull':
902  classification = 'push'
903  logging.debug('making sentence')
904  if classification == 'unknown':
905  sentence.append("You did an " + classification + " action on the " + str(j[0]))
906  sentenceProb.append(val)
907  else:
908  sentence.append("You did a " + classification + " action on the " + str(j[0]))
909  sentenceProb.append(val)
910  logging.debug('sentence made')
911 
912  # if len(j) > 1:
913  # sentence[-1] += " with your " + j[1].replace('hand', '') + ' hand'
914  if classification == 'unknown' and not returnUnknown:
915  sentence.pop(-1)
916  sentenceProb.pop(-1)
917  elif printClass:
918  logging.info(sentence[-1])
919  if printClass:
920  logging.info('------------------------------------------------------')
921  logging.debug('modifying datalist')
922  del dataList[:self.paramsDict['windowOffset']]
923  if len(sentence) > 0:
924  # return [str(sentence), data, classifs, vecList]
925  logging.debug('ret success')
926  return sentence, sentenceProb, dataList
927  else:
928  # return ['None', data, classifs, vecList]
929  logging.debug('ret None')
930  return 'None', 0, dataList
931  else:
932  logging.error('Some incorrect messages received')
933  return 'None', 0, None
934  else:
935  logging.error('Not enough data points')
936  return None, 0, None
def processLiveData(self, dataList, thisModel, verbose=False, returnUnknown=False, printClass=True, additionalData=dict())
Method which receives a list of data frames and outputs a classification if available or &#39;no_classifi...
Class developed for the implementation of windowed real time action recognition.
SAM Driver parent class that defines the methods by which models are initialised, trained and saved...
Definition: SAMDriver.py:35
def convertToDict(self, rawData, mode, verbose)
Convert list of strings time series to dictionary with joints and objects as different items of the d...
def testPerformance(self, testModel, Yall, Lall, YtestAll, LtestAll, verbose)
Custom testPerformance method.
def diskDataToLiveData(self, root_data_dir)
This method reads in time series data from disk for SAMDriver_ARWin.
def listOfClassificationVectors(self, featureSequence, objectsList, partnerName='partner')
Constructs a list of classifications to carry out.
def saveParameters(self)
Override SAMDriver.saveParameters.
def __init__(self)
Initialise class using SAMDriver.__init__ and augment with custom parameters.
def loadParameters(self, parser, trainName)
Function to load parameters from the model config.ini file.
def messageChecker(self, dataMessage, mode)
Method to check validity of incoming messages and split into components if valid. ...
def readData(self, root_data_dir, participant_index, args, kw)
Method which accepts a data directory, reads all the data in and outputs self.Y which is a numpy arra...