12 from os
import listdir
13 from os.path
import isfile, join
21 np.set_printoptions(threshold=numpy.nan)
27 Class developed for the implementation of windowed real time action recognition. 31 Initialise class using SAMDriver.__init__ and augment with custom parameters. 33 additionalParameterList is a list of extra parameters to preserve between training and interaction. 35 SAMDriver.__init__(self)
50 'segTrainConf',
'segTrainPerc',
'segTestConf',
'segTestPerc',
'seqTestConf',
55 Function to load parameters from the model config.ini file. 57 Method to load parameters from file loaded in parser from within section trainName and store these parameters in self.paramsDict. 60 parser: SafeConfigParser with pre-read config file. 61 trainName: Section from which parameters are to be read. 66 if parser.has_option(trainName,
'includeParts'):
67 self.
paramsDict[
'includeParts'] = parser.get(trainName,
'includeParts').split(
',')
72 if parser.has_option(trainName,
'actionsAllowedList'):
73 self.
paramsDict[
'actionsAllowedList'] = parser.get(trainName,
'actionsAllowedList').split(
',')
75 self.
paramsDict[
'actionsAllowedList'] = [
'lift_object',
'pull_object',
'push_object',
'drop_object',
78 if parser.has_option(trainName,
'windowSize'):
79 self.
paramsDict[
'windowSize'] = int(parser.get(trainName,
'windowSize'))
83 if parser.has_option(trainName,
'windowOffset'):
84 self.
paramsDict[
'windowOffset'] = int(parser.get(trainName,
'windowOffset'))
88 if parser.has_option(trainName,
'moveThresh'):
89 self.
paramsDict[
'moveThresh'] = float(parser.get(trainName,
'moveThresh'))
93 if parser.has_option(trainName,
'binWidth'):
94 self.
paramsDict[
'binWidth'] = float(parser.get(trainName,
'binWidth'))
98 if parser.has_option(trainName,
'method'):
99 self.
paramsDict[
'method'] = parser.get(trainName,
'method')
103 if parser.has_option(trainName,
'combineHands'):
104 self.
paramsDict[
'combineHands'] = parser.get(trainName,
'combineHands') ==
'True' 108 if parser.has_option(trainName,
'thresholdMovement'):
109 self.
paramsDict[
'thresholdMovement'] = parser.get(trainName,
'thresholdMovement') ==
'True' 113 if parser.has_option(trainName,
'sepRL'):
114 self.
paramsDict[
'sepRL'] = parser.get(trainName,
'sepRL') ==
'True' 118 if parser.has_option(trainName,
'filterData'):
119 self.
paramsDict[
'filterData'] = parser.get(trainName,
'filterData') ==
'True' 123 if parser.has_option(trainName,
'filterWindow'):
124 self.
paramsDict[
'filterWindow'] = int(parser.get(trainName,
'filterWindow'))
128 if parser.has_option(trainName,
'components'):
129 self.
paramsDict[
'components'] = parser.get(trainName,
'components').split(
',')
133 if parser.has_option(trainName,
'reduce'):
134 self.
paramsDict[
'reduce'] = parser.get(trainName,
'reduce') ==
'True' 138 if parser.has_option(trainName,
'flip'):
139 self.
paramsDict[
'flip'] = parser.get(trainName,
'flip') ==
'True' 143 if parser.has_option(trainName,
'normaliseWindow'):
144 self.
paramsDict[
'normaliseWindow'] = parser.get(trainName,
'normaliseWindow') ==
'True' 150 Override SAMDriver.saveParameters. 152 This function adds items of additionalParametersList to paramsDict to be saved. 155 commandString =
'self.paramsDict[\'' + j +
'\'] = self.' + j
157 logging.info(str(commandString))
162 def testPerformance(self, testModel, Yall, Lall, YtestAll, LtestAll, verbose):
164 Custom testPerformance method. This augments the standard testPerformance method by including testing of known and unknown together with testing on known training points and known testing points. 167 testModel : SAMObject Model to be tested. 168 Yall : Numpy array with training data vectors to be tested. 169 Lall : List with corresponding training data labels. 170 YtestAll : Numpy array with testing data vectors to be tested. 171 LtestAll : List with corresponding testing data labels. 172 verbose : Boolean turning logging to stdout on or off. 175 Square numpy array confusion matrix. 178 yTrainingData = SAMTesting.formatDataFunc(Yall)
180 SAMTesting.testSegments(testModel, yTrainingData, Lall, verbose,
'Training')
182 yTrainingData = SAMTesting.formatDataFunc(YtestAll)
184 SAMTesting.testSegments(testModel, yTrainingData, LtestAll, verbose,
'Testing')
190 return self.
segTestConf, labelsSegTest, labelComparisonDict
194 This method reads in time series data from disk for SAMDriver_ARWin. 197 list of strings with raw data and list of strings with ground truth classifications. 199 onlyfiles = [f
for f
in listdir(root_data_dir)
if isfile(join(root_data_dir, f))]
200 dataLogList = [f
for f
in onlyfiles
if 'data' in f]
202 labelsLogList = [f
for f
in onlyfiles
if 'label' in f]
207 logging.info(
'loading data from files')
209 for k
in range(len(dataLogList)):
210 logging.info(
'data file: ' + str(join(root_data_dir, dataLogList[k])))
211 logging.info(
'model file: ' + str(join(root_data_dir, labelsLogList[k])))
213 dataFile = open(join(root_data_dir, dataLogList[k]),
'r') 214 self.dataLogList.append(str(join(root_data_dir, dataLogList[k]))) 215 labelFile = open(join(root_data_dir, labelsLogList[k]), 'r') 216 self.labelsLogList.append(join(root_data_dir, labelsLogList[k])) 219 for i, l
in enumerate(dataFile):
224 for i, l
in enumerate(labelFile):
230 if lenLabelFile != lenDataFile:
231 logging.warning(str(dataLogList[k]) +
' will not be used because its length differs from ' +
232 str(labelsLogList[k]))
234 dataFile = open(join(root_data_dir, dataLogList[k]),
'r') 235 labelFile = open(join(root_data_dir, labelsLogList[k]), 'r') 236 windows = lenDataFile // self.paramsDict['windowSize']
238 for curr
in range(windows*self.
paramsDict[
'windowSize']):
239 line = dataFile.readline()
240 labelLine = labelFile.readline()
246 v = labelLine.split(
' ')[2].replace(
'\n',
'').replace(
'(',
'').replace(
')',
'')
251 rawDataList.append(t)
252 rawLabelList.append(v)
254 logging.error(
'messageChecker returned Fail')
259 return rawDataList, rawLabelList
263 Convert list of strings time series to dictionary with joints and objects as different items of the dictionary and windows of positions for each joint. 266 rawData: List of strings read in from files. 267 mode: `'testing'` or `'live'`. `'testing'` will format strings read from file. `'live'` will format strings received via Yarp during interaction. 268 verbose: Switch logging to stdout on or off. 271 Dictionary with the windowed data, list of joints present in dictionary items, list of objects present in dictionary items. 292 data[t[a]] = (np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])]))
293 jointsList.append(t[a])
295 arr = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
296 if data[t[a]]
is not None:
297 data[t[a]] = np.vstack((data[t[a]], arr))
302 numObjs = (len(t) - currIdx) / 5
304 for i
in range(numObjs):
305 a = currIdx + 1 + (i * 5)
307 arr = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
308 if data[t[a]]
is not None:
309 data[t[a]] = np.vstack((data[t[a]], arr))
314 data[t[a]] = np.array([float(t[a + 1]), float(t[a + 2]), float(t[a + 3])])
315 if mode ==
'testing' or (mode !=
'testing' and t[a+4] ==
'1'):
316 objectsList.append(t[a])
320 logging.info(
'data has length = ' + str(len(data)) +
' joints')
321 logging.info(
'each joint has an array of shape ' + str(data[
'head'].shape))
326 logging.info(
'Filtering data with hamming window of size ' + str(self.
paramsDict[
'filterWindow']))
327 for j
in data.keys():
328 t1 = utils.smooth1D(data[j][:, 0], self.
paramsDict[
'filterWindow'])
329 t2 = utils.smooth1D(data[j][:, 1], self.
paramsDict[
'filterWindow'])
330 t3 = utils.smooth1D(data[j][:, 2], self.
paramsDict[
'filterWindow'])
331 data[j] = np.hstack([t1[:,
None], t2[:,
None], t3[:,
None]])
334 logging.info(
'data has length = ' + str(len(data)) +
' joints')
335 logging.info(
'each joint has an array of shape ' + str(data[
'head'].shape))
341 noY = mode !=
'testing' 342 if mode ==
'testing':
348 printExplanation =
True 349 for num, key
in enumerate(data):
351 xx, yy = utils.transformTimeSeriesToSeq(data[key], timeWindow=self.
paramsDict[
'windowSize'],
353 normalised=self.
paramsDict[
'normaliseWindow'],
358 winSize = xx.shape[1] / 3
359 g = xx.size / winSize
360 xxshape1 = xx.shape[0]
361 xxshape2 = xx.shape[1]
363 flatxx = xx.flatten()
364 f = flatxx.reshape([g, winSize])
365 xx = f.reshape([xxshape1, xxshape2])
368 if printExplanation
and verbose:
369 logging.info(
'thresholding movement <' + str(self.
paramsDict[
'moveThresh']))
370 ranges = np.ptp(f, axis=1)
373 res = list(np.where(np.logical_and(a, b))[0])
381 if printExplanation
and verbose:
382 logging.info(
'Adding velocity to the feature vector')
384 xxvel = xxvel.reshape([xxshape1, xxshape2 - 3])
385 xx = np.hstack([xx, xxvel])
388 if printExplanation
and verbose:
389 logging.info(
'Adding acceleration to the feature vector')
390 xxacc = np.diff(f, n=2)
391 xxacc = xxacc.reshape([xxshape1, xxshape2 - 6])
392 xx = np.hstack([xx, xxacc])
395 printExplanation =
False 398 logging.info(
'data has length = ' + str(len(data2)) +
' joints')
399 logging.info(
'each joint has an array of shape ' + str(data2[
'head'].shape))
401 return data2, jointsList, objectsList
405 def readData(self, root_data_dir, participant_index, *args, **kw):
407 Method which accepts a data directory, reads all the data in and outputs self.Y which is a numpy array with n instances of m length feature vectors and self.L which is a list of text Labels of length n. 409 This method reads data<number>.log files and the corresponding labels<number>.log files present in the root_data_dir and formats the time series data into normalised and smoothed windows. Subsequently depending on the list of joints chosen by the user and other parameters set in the config file present in the root_data_dir, the information of the different joints and objects are appended into a single feature vector. Number of feature vectors is equal to the number of windows extracted from the time series data. 412 root_data_dir: Data directory. 413 participant_index: List of subfolders to consider. Can be left as an empty list. 420 logging.info(
'unique labels' + str(set(labelsList)))
422 labels = list(set(labelsList))
428 for n, k
in enumerate(labelsList):
429 res = [m
for m, l
in enumerate(labels)
if l == k]
431 labelNumsList = np.array(res)
433 labelNumsList = np.vstack([labelNumsList, res])
434 logging.info(
'shape of number labels:' +str(labelNumsList.shape))
436 uu, tmp = utils.transformTimeSeriesToSeq(labelNumsList, self.
paramsDict[
'windowSize'],
437 self.
paramsDict[
'windowOffset'],
False,
False)
439 logging.info(
'windowed number labels shape:' + str(data2NumLabels.shape))
446 for j
in data2NumLabels:
447 numItems = list(set(j))
448 if len(numItems) == 1:
449 l = labels[int(numItems[0])]
450 data2Labels.append(l)
455 data2Labels.append(
'transition')
456 logging.info(
'after transition unique set ' + str(set(data2Labels)))
457 logging.info(
'windowed data labels compressed: ' + str(len(data2Labels)))
467 for k
in objectsList:
469 objectDict[k] = (len(jointsToUse))
470 jointsToUse.append(k)
472 handDict[j] = (len(jointsToUse))
473 jointsToUse.append(j)
475 jointsToUse.append(j)
477 combineObjects = len(objectDict) > 1
479 combineHands = len(handDict) > 1
481 logging.info(jointsToUse)
482 logging.info(objectDict)
483 logging.info(handDict)
488 for j
in jointsToUse:
489 if dataVecAll
is None:
490 dataVecAll = data2[j]
492 dataVecAll = np.hstack([dataVecAll, data2[j]])
493 itemsPerJoint = dataVecAll.shape[1] / len(jointsToUse)
494 logging.info(dataVecAll.shape)
495 logging.info(itemsPerJoint)
504 combinedObjs = dict()
505 if combineObjects
and 'object' in self.
paramsDict[
'includeParts']:
507 logging.info(
'Combining Objects')
509 idxBase = objectDict[n] * itemsPerJoint
510 combinedObjs[n] = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
512 logging.info(combinedObjs[n].shape)
514 logging.info(dataVecAll.shape)
518 combinedHands = dict()
519 if combineHands
and self.
paramsDict[
'combineHands']
and \
520 len([s
for s
in self.
paramsDict[
'includeParts']
if 'hand' in s]) > 0:
521 logging.info(
'Combining hands')
525 idxBase = handDict[n] * itemsPerJoint
526 combinedHands[n] = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
528 logging.info(combinedHands[n].shape)
529 logging.info(dataVecAll.shape)
533 logging.info(jointsToUse)
535 for j, item
in enumerate(jointsToUse):
537 if item
not in handDict
and item
not in objectDict:
539 idxBase = j * itemsPerJoint
541 if otherJoints
is None:
542 otherJoints = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
544 otherJoints = np.hstack([otherJoints, dataVecAll[:, idxBase:idxBase + itemsPerJoint]])
546 if item
not in objectDict:
548 idxBase = j * itemsPerJoint
550 if otherJoints
is None:
551 otherJoints = dataVecAll[:, idxBase:idxBase + itemsPerJoint]
553 otherJoints = np.hstack([otherJoints, dataVecAll[:, idxBase:idxBase + itemsPerJoint]])
554 if otherJoints
is not None:
555 logging.info(otherJoints.shape)
560 for k
in objectsList:
570 a[l].append(
'handLeft')
571 b[l].append(
'handRight')
584 logging.info(
'Vectors to Classify:')
586 logging.info(
"\t" + str(j))
593 logging.info(str(j[0]))
594 if objSection
is None:
595 objSection = combinedObjs[j[0]]
597 objSection = np.vstack([objSection, combinedObjs[j[0]]])
598 dataVecReq = objSection
599 logging.info(str(objSection.shape))
606 if handsSection
is None:
607 handsSection = combinedHands[l]
609 handsSection = np.vstack([handsSection, combinedHands[l]])
610 if dataVecReq
is None:
611 dataVecReq = handsSection
613 dataVecReq = np.hstack([dataVecReq, handsSection])
614 logging.info(str(handsSection.shape))
617 if otherJoints
is not None:
619 logging.info(str(j[:]))
620 if othersSection
is None:
621 othersSection = otherJoints
623 othersSection = np.vstack([othersSection, otherJoints])
625 if dataVecReq
is None:
626 dataVecReq = othersSection
628 dataVecReq = np.hstack([dataVecReq, othersSection])
630 logging.info(str(dataVecReq.shape))
631 del handsSection, othersSection, objSection, combinedHands, combinedObjs, otherJoints
634 data2LabelsAugment = []
636 data2LabelsAugment.append([])
638 for j
in data2Labels:
639 splitLabel = j.split(
'_')
640 action =
'_'.join(splitLabel[:2])
642 if len(splitLabel) > 2:
649 vec = [f
for f
in handSubList
if obj
in f][0]
659 data2LabelsAugment[n].append(action)
662 data2LabelsAugment[n].append(
'idle')
672 data2LabelsAugment[n].append(action)
679 for j
in data2LabelsAugment:
682 logging.info(
'labels ' + str(len(data2Labels)))
683 logging.info(
'data ' + str(dataVecReq.shape))
690 data2ShortLabels = []
691 for j
in data2Labels:
692 splitLabel = j.split(
'_')
693 slabel = (
'_'.join(splitLabel[:2]))
695 if splitLabel[0] ==
'push' or splitLabel[0] ==
'pull':
696 if splitLabel[-1] ==
'no':
702 if splitLabel[0] ==
'push':
703 splitLabel[0] =
'pull' 705 splitLabel[0] =
'push' 706 slabel = (
'_'.join(splitLabel[:2]))
711 data2ShortLabels.append(slabel)
714 logging.info(
'shortLabels len ' + str(set(self.
data2Labels)))
717 if 'pull_object' in self.
paramsDict[
'actionsAllowedList']:
718 self.
paramsDict[
'actionsAllowedList'].index(
'pull_object') ==
'pull_object_right' 719 self.
paramsDict[
'actionsAllowedList'].append(
'pull_object_left')
721 if 'push_object' in self.
paramsDict[
'actionsAllowedList']:
722 self.
paramsDict[
'actionsAllowedList'].index(
'push_object') ==
'push_object_right' 723 self.
paramsDict[
'actionsAllowedList'].append(
'push_object_left')
726 logging.info(
'actions allowed: ' + str(self.
paramsDict[
'actionsAllowedList']))
728 for n
in reversed(range(len(data2Labels))):
729 if len([j
for j
in self.
paramsDict[
'actionsAllowedList']
if j
in data2Labels[n]]) == 0
or \
730 'no' in data2Labels[n]:
731 listToDelete.append(n)
733 dataVecReq = np.delete(dataVecReq, listToDelete, axis=0)
734 npdata2ShortLabels = np.asarray(data2ShortLabels)
735 npdata2ShortLabels = np.delete(npdata2ShortLabels, listToDelete, axis=0)
737 data2ShortLabels = np.ndarray.tolist(npdata2ShortLabels)
740 self.
L = data2ShortLabels
742 logging.info(self.
Y.shape)
743 logging.info(len(self.
L))
747 Constructs a list of classifications to carry out. 749 Consider an example where featureSequence consists of `object', 'hand', 'head'. Assuming there are 2 objects in the scene, this method constructs 4 possible vectors to classify. \n 751 1) `object1, rightHand, head` \n 752 2) `object1, leftHand, head` \n 753 3) `object2, rightHand, head` \n 754 4) `object2, leftHand, head` \n 757 featureSequence: Organisation of features within the feature vector. 758 objectsList: List of objects currently in the scene. 759 partnerName: Object name to ignore. This ignores the motion of the interacting agent. 762 List of vectors to classify. 764 listOfVectorsToClassify = []
765 for j
in featureSequence:
767 for k
in objectsList:
768 if k != partnerName
and k !=
'partner':
769 listOfVectorsToClassify.append([k])
773 a = copy.deepcopy(listOfVectorsToClassify)
774 b = copy.deepcopy(listOfVectorsToClassify)
775 for l, m
in enumerate(listOfVectorsToClassify):
776 a[l].append(
'handLeft')
777 b[l].append(
'handRight')
778 listOfVectorsToClassify = a + b
781 for l, m
in enumerate(listOfVectorsToClassify):
782 listOfVectorsToClassify[l].append(j)
785 for l, m
in enumerate(listOfVectorsToClassify):
786 listOfVectorsToClassify[l].append(j)
788 return listOfVectorsToClassify
792 Method to check validity of incoming messages and split into components if valid. 795 dataMessage: String message to validate. 796 mode: Mode of validation. Either `'test'` or `'live'`. 799 List of strings with split sections and a boolean indicating validity of received string. 803 dataMessage = dataMessage.replace(
'"' + dataMessage.partition(
'"')[-1].rpartition(
'"')[0] +
'"',
'partner')
804 if mode ==
'testing':
805 t = dataMessage.replace(
'(',
'').replace(
')',
'').split(
' ')[4:-1]
807 t = dataMessage.replace(
'(',
'').replace(
')',
'').replace(
'"',
'').split(
' ')[2:-1]
809 logging.error(
'Non-existing mode. Choose either live or read')
815 goAhead = goAhead
and type(t[a]) == str
816 goAhead = goAhead
and float(t[a+1])
is not None 817 goAhead = goAhead
and float(t[a+2])
is not None 818 goAhead = goAhead
and float(t[a+3])
is not None 821 numObjs = (len(t) - currIdx)/5
823 for i
in range(numObjs):
824 a = currIdx + 1 + (i*5)
825 goAhead = goAhead
and type(t[a]) == str
826 goAhead = goAhead
and float(t[a+1])
is not None 827 goAhead = goAhead
and float(t[a+2])
is not None 828 goAhead = goAhead
and float(t[a+3])
is not None 837 def processLiveData(self, dataList, thisModel, verbose=False, returnUnknown=False, printClass=True, additionalData=dict()):
839 Method which receives a list of data frames and outputs a classification if available or 'no_classification' if it is not. 842 dataList: List of dataFrames collected. Length of list is variable. 843 thisModel: List of models required for testing. 844 verbose : Boolean turning logging to stdout on or off. 845 returnUnknown : Boolean to turn on or off the return a sentence if the classification is unknown. 846 printClass : Boolean to turn on or off logging of classification to stdout. 847 additionalData : Dictionary containing additional data required for classification to occur. 849 String with result of classification, likelihood of the classification, and list of frames with the latest x number of frames popped where x is the window length of the model. Classification result can be string `'None'` if the classification is unknown or message is invalid or `None` if a different error occurs. 859 if len(dataList) == self.
paramsDict[
'windowSize']:
860 logging.debug(
'dataList is of good size')
862 for j
in range(len(dataList)):
863 logging.debug(
'check message' + str(j))
865 logging.debug(
'message' + str(j) +
'checked')
867 logging.debug(
'append')
868 dataStrings.append(t)
869 logging.debug(
'checked all strings')
870 if len(dataStrings) == self.
paramsDict[
'windowSize']:
872 logging.debug(
'converting to dict')
873 data, jointsList, objectsList = self.
convertToDict(dataStrings, mode=mode, verbose=
False)
874 logging.debug(
'converted to dictionary')
876 logging.error(
'Some incorrect messages received')
879 if 'partnerName' in additionalData.keys():
881 partnerName=additionalData[
'partnerName'])
884 partnerName=
'partner')
885 logging.debug(
'finished list of vectors to classify')
886 for j
in listOfVectorsToClassify:
892 logging.debug(
'testing segment')
893 [label, val] = SAMTesting.testSegment(thisModel, vec, verbose, visualiseInfo=
None,
894 optimise=thisModel[0].optimiseRecall)
895 logging.debug(
'tested segment')
896 classification = label.split(
'_')[0]
897 classifs.append(classification)
898 if self.
paramsDict[
'flip']
and 'handLeft' in j:
899 if classification ==
'push':
900 classification =
'pull' 901 elif classification ==
'pull':
902 classification =
'push' 903 logging.debug(
'making sentence')
904 if classification ==
'unknown':
905 sentence.append(
"You did an " + classification +
" action on the " + str(j[0]))
906 sentenceProb.append(val)
908 sentence.append(
"You did a " + classification +
" action on the " + str(j[0]))
909 sentenceProb.append(val)
910 logging.debug(
'sentence made')
914 if classification ==
'unknown' and not returnUnknown:
918 logging.info(sentence[-1])
920 logging.info(
'------------------------------------------------------')
921 logging.debug(
'modifying datalist')
922 del dataList[:self.
paramsDict[
'windowOffset']]
923 if len(sentence) > 0:
925 logging.debug(
'ret success')
926 return sentence, sentenceProb, dataList
929 logging.debug(
'ret None')
930 return 'None', 0, dataList
932 logging.error(
'Some incorrect messages received')
933 return 'None', 0,
None 935 logging.error(
'Not enough data points')
def processLiveData(self, dataList, thisModel, verbose=False, returnUnknown=False, printClass=True, additionalData=dict())
Method which receives a list of data frames and outputs a classification if available or 'no_classifi...
Class developed for the implementation of windowed real time action recognition.
SAM Driver parent class that defines the methods by which models are initialised, trained and saved...
def convertToDict(self, rawData, mode, verbose)
Convert list of strings time series to dictionary with joints and objects as different items of the d...
def testPerformance(self, testModel, Yall, Lall, YtestAll, LtestAll, verbose)
Custom testPerformance method.
def diskDataToLiveData(self, root_data_dir)
This method reads in time series data from disk for SAMDriver_ARWin.
def listOfClassificationVectors(self, featureSequence, objectsList, partnerName='partner')
Constructs a list of classifications to carry out.
def saveParameters(self)
Override SAMDriver.saveParameters.
def __init__(self)
Initialise class using SAMDriver.__init__ and augment with custom parameters.
def loadParameters(self, parser, trainName)
Function to load parameters from the model config.ini file.
def messageChecker(self, dataMessage, mode)
Method to check validity of incoming messages and split into components if valid. ...
def readData(self, root_data_dir, participant_index, args, kw)
Method which accepts a data directory, reads all the data in and outputs self.Y which is a numpy arra...