-
Notifications
You must be signed in to change notification settings - Fork 1
/
EmotivSMILE.asv
executable file
·564 lines (498 loc) · 27.4 KB
/
EmotivSMILE.asv
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
classdef EmotivSMILE < handle
%----------
%EmotivSMILE
%----------
%queries the EDK library in order to recieve data from the
%Emotiv EEG headset
%
%% Usage;
% To create the object
% a = EmotivEEG;
% % To start a timer (keeps running until stopped) and get latest data
% a.Run
% % To plot the latest /c data variable values
% a.Plot
% % To stop the timmer
% a.Stop
% % To record 10 seconds of data and save to a file
% a.Record(10)
% % To load recorded data from file into /c data variable (NOTE if you have a timer on this will be overwritten)
% LoadRecordedData(self,filename)
% % To disconnect and unload the library
% delete(a)
properties (Access = public)
sampFreq = 128;
recordSeconds = 1;
acqisitionSeconds = 1;
timerPeriod = 0.35; %seconds *MUST BE LESS THAN acqisitionSeconds. Also used for updating the record data
plotTimerPeriod = 0.5;
fig_h;
end
properties (Constant)
unitIdentifier = 'Emotiv Systems-5'
end
properties (SetAccess = protected)
runAsyncTimer
plotTimer
channelCount
data = [];
eEvent
dataRowCount
hData % handles (pointer) to the data stored in library
DataChannels
EE_DataChannels_enum = struct('ED_COUNTER',0,'ED_INTERPOLATED',1,'ED_RAW_CQ',2,'ED_AF3',3,'ED_F7',4,'ED_F3',5,'ED_FC5',6,'ED_T7',7,'ED_P7',8,'ED_O1',9,'ED_O2',10,'ED_P8',11,'ED_T8',12,'ED_FC6',13,'ED_F4',14,'ED_F8',15,'ED_AF4',16,'ED_GYROX',17,'ED_GYROY',18,'ED_TIMESTAMP',19,'ED_ES_TIMESTAMP',20,'ED_FUNC_ID',21,'ED_FUNC_VALUE',22,'ED_MARKER',23,'ED_SYNC_SIGNAL',24);
% EE_DataChannels_enum = struct('ED_COUNTER',0,'ED_AF3',1,'ED_F7',2,'ED_F3',3,'ED_FC5',4,'ED_FC6',5,'ED_F4',6,'ED_F8',7,'ED_AF4',8,'ED_TIMESTAMP',9,'ED_ES_TIMESTAMP',10);
EE_CognitivTrainingControl_enum = struct('COG_NONE',0,'COG_START',1,'COG_ACCEPT',2,'COG_REJECT',3,'COG_ERASE',4,'COG_RESET',5);
EE_ExpressivAlgo_enum = struct('EXP_NEUTRAL',1,'EXP_BLINK',2,'EXP_WINK_LEFT',4,'EXP_WINK_RIGHT',8,'EXP_HORIEYE',16,'EXP_EYEBROW',32,'EXP_FURROW',64,'EXP_SMILE',128,'EXP_CLENCH',256,'EXP_LAUGH',512,'EXP_SMIRK_LEFT',1024,'EXP_SMIRK_RIGHT',2048);
EE_ExpressivTrainingControl_enum = struct('EXP_NONE',0,'EXP_START',1,'EXP_ACCEPT',2,'EXP_REJECT',3,'EXP_ERASE',4,'EXP_RESET',5);
EE_ExpressivThreshold_enum = struct('EXP_SENSITIVITY',0);
EE_CognitivEvent_enum = struct('EE_CognitivNoEvent',0,'EE_CognitivTrainingStarted',1,'EE_CognitivTrainingSucceeded',2,'EE_CognitivTrainingFailed',3,'EE_CognitivTrainingCompleted',4,'EE_CognitivTrainingDataErased',5,'EE_CognitivTrainingRejected',6,'EE_CognitivTrainingReset',7,'EE_CognitivAutoSamplingNeutralCompleted',8,'EE_CognitivSignatureUpdated',9);
EE_EmotivSuite_enum=struct('EE_EXPRESSIV',0,'EE_AFFECTIV',1,'EE_COGNITIV',2);
EE_ExpressivEvent_enum=struct('EE_ExpressivNoEvent',0,'EE_ExpressivTrainingStarted',1,'EE_ExpressivTrainingSucceeded',2,'EE_ExpressivTrainingFailed',3,'EE_ExpressivTrainingCompleted',4,'EE_ExpressivTrainingDataErased',5,'EE_ExpressivTrainingRejected',6,'EE_ExpressivTrainingReset',7);
EE_CognitivAction_enum=struct('COG_NEUTRAL',1,'COG_PUSH',2,'COG_PULL',4,'COG_LIFT',8,'COG_DROP',16,'COG_LEFT',32,'COG_RIGHT',64,'COG_ROTATE_LEFT',128,'COG_ROTATE_RIGHT',256,'COG_ROTATE_CLOCKWISE',512,'COG_ROTATE_COUNTER_CLOCKWISE',1024,'COG_ROTATE_FORWARDS',2048,'COG_ROTATE_REVERSE',4096,'COG_DISAPPEAR',8192);
EE_InputChannels_enum=struct('EE_CHAN_CMS',0,'EE_CHAN_DRL',1,'EE_CHAN_FP1',2,'EE_CHAN_AF3',3,'EE_CHAN_F7',4,'EE_CHAN_F3',5,'EE_CHAN_FC5',6,'EE_CHAN_T7',7,'EE_CHAN_P7',8,'EE_CHAN_O1',9,'EE_CHAN_O2',10,'EE_CHAN_P8',11,'EE_CHAN_T8',12,'EE_CHAN_FC6',13,'EE_CHAN_F4',14,'EE_CHAN_F8',15,'EE_CHAN_AF4',16,'EE_CHAN_FP2',17);
EE_ExpressivSignature_enum=struct('EXP_SIG_UNIVERSAL',0,'EXP_SIG_TRAINED',1);
EE_Event_enum=struct('EE_UnknownEvent',0,'EE_EmulatorError',1,'EE_ReservedEvent',2,'EE_UserAdded',16,'EE_UserRemoved',32,'EE_EmoStateUpdated',64,'EE_ProfileEvent',128,'EE_CognitivEvent',256,'EE_ExpressivEvent',512,'EE_InternalStateChanged',1024,'EE_AllEvent',2032);
DataChannelsNames = {'ED_COUNTER','ED_INTERPOLATED','ED_RAW_CQ','ED_AF3','ED_F7','ED_F3','ED_FC5','ED_T7','ED_P7','ED_O1','ED_O2','ED_P8','ED_T8','ED_FC6','ED_F4','ED_F8','ED_AF4','ED_GYROX','ED_GYROY','ED_TIMESTAMP','ED_ES_TIMESTAMP','ED_FUNC_ID','ED_FUNC_VALUE','ED_MARKER','ED_SYNC_SIGNAL'};
% DataChannelsNames = {'ED_COUNTER','ED_AF3','ED_F7','ED_F3','ED_FC5','ED_FC6','ED_F4','ED_F8','ED_AF4','ED_TIMESTAMP','ED_ES_TIMESTAMP'};
end
properties (SetAccess = private)
tempData; % holds the data while it is being read from the library
showDebug; % makes more show up when taking data
ctree; % Classification tree
demoResults = java.util.ArrayList(); % List of predictions during the demo mode
responses = java.util.ArrayList(); % List of responses from machine learning
classifyData = java.util.HashMap(); % Data for machine learning mapped to the frequency range taken at
bandSize = 1;
audio;
emotionFreqs = [167, 193, 157, 208, 244, 293, 294, 335, 388, 415,...
418, 135, 151, 159, 161, 163, 181, 183, 189, 150, 170, 199,...
226, 234, 239, 245, 255, 260, 262, 268, 270, 273, 280, 281,...
286, 289, 291, 293, 299, 304, 318, 323, 325, 331, 336, 348,...
349, 352, 359, 362, 370, 379, 381, 405, 408]; % Based off of analysis.m with setup1 (ross)
end
methods
%% Constructor
function self = EmotivSMILE()
% Check to see if library was already loaded
if ~libisloaded('edk')
[notfound,warnings] = loadlibrary('edk.dll','edk.h'); %#ok<NASGU,ASGLU>
disp('EDK library loaded');
else
disp('EDK library already loaded');
end
% Success if value returned is 0
if calllib('edk','EE_EngineConnect',int8([self.unitIdentifier 0]))
error(['An error occured when using EE_EngineConnect to connect to ',self.unitIdentifier])
else
disp(['Successfully connected to ',self.unitIdentifier])
end
% Create a data pointer
self.hData = calllib('edk','EE_DataCreate');
% Set the record time buffer size
calllib('edk','EE_DataSetBufferSizeInSec',self.acqisitionSeconds);
% Create an events engine
self.eEvent = calllib('edk','EE_EmoEngineEventCreate');
% Set other fields
self.dataRowCount = self.acqisitionSeconds * self.sampFreq;
self.data = zeros(self.dataRowCount,self.channelCount);
self.tempData = self.data; % required to work on the data variable but still allow external access to data
self.showDebug = false;
self.DataChannels = self.EE_DataChannels_enum; % THIS MAY BE REDUNDANT
self.channelCount = length(self.DataChannelsNames);
% Import audio files
self.audio = struct();
filenames = {'Detecting Happiness.wav', 'Detecting Indifference.wav', 'Detecting Sadness.wav',...
'Happy Sad or Neither.wav', 'Would you agree.wav'};
fields = {'positive', 'indifferent', 'negative', 'question', 'verify'};
for i = 1:numel(fields)
[sound, samplingRate] = audioread(filenames{i});
self.audio.(fields{i}) = audioplayer(sound, samplingRate);
end
end
%% delete
function delete(self)
self.Stop()
self.StopPlot()
if calllib('edk','EE_EngineDisconnect')
warning(['Some error disconnecting from ',self.unitIdentifier]) %#ok<WNTAG>
else
disp(['Successfully disconnected from ',self.unitIdentifier])
end
pause(0.1);
unloadlibrary('edk');
end
%% Run
% Start a timer to continuously update the state of the data variable every /c imerPeriod
function Run(self)
self.runAsyncTimer = timer('TimerFcn', @(src,event)UpdateData(self), 'name', 'RunAsync','Period', self.timerPeriod,'BusyMode','drop','ExecutionMode','fixedDelay');
start(self.runAsyncTimer);
pause(1);
end
function Stop(self)
if ~isempty(self.runAsyncTimer) && strcmp(self.runAsyncTimer.Running,'on')
stop(self.runAsyncTimer);
end
end
%% Plot
% Starts a plot timer to keep updating a figure
function Plot(self)
if isempty(self.runAsyncTimer) || strcmp(self.runAsyncTimer.Running,'off')
warning('The update data timer is not currently started. Please start if you want to see new data.') %#ok<WNTAG>
end
self.plotTimer = timer('TimerFcn', @(src,event)UpdatePlot(self), 'name', 'UpdatePlotTimer','Period', self.plotTimerPeriod,'BusyMode','drop','ExecutionMode','fixedDelay');
start(self.plotTimer);
end
% Stops the plot timer
function StopPlot(self)
if ~isempty(self.plotTimer) && strcmp(self.plotTimer.Running,'on')
stop(self.plotTimer);
end
end
% The update plot function, which should be changed to show what needs to be shown
function UpdatePlot(self)
if isempty(self.fig_h)
self.fig_h = figure;
end
set(0,'CurrentFigure',self.fig_h)
surf(self.data);
end
%% Record
% This will block until there is recordSeconds worth of new data then it
% will save this to file (as .mat)
function recordFilename = Record(self,recordSeconds_local)
if ~isempty(self.runAsyncTimer) && strcmp(self.runAsyncTimer.Running,'on')
warning('You should not run the timer and record simultaneously. Stopping the timer') %#ok<WNTAG>
self.Stop()
end
if nargin < 2
recordSeconds_local = self.recordSeconds;
end
starttime = clock;
originalDesiredSampleCount = recordSeconds_local * self.sampFreq;
remainingDesiredSampleCount = originalDesiredSampleCount;
recordData = zeros(recordSeconds_local * self.sampFreq, self.channelCount);
currentIndex = 1;
while remainingDesiredSampleCount > 0
% Make sure it breaks out in 2* the correct time even if the number of samples is not there
if etime(clock,starttime) > 2 * recordSeconds_local
break;
end
nSamplesTaken = UpdateData(self);
if nSamplesTaken == 0
continue;
end
% Note that this matrix WILL PROBABLY be bigger than the desiredSampleCount
overShoot = currentIndex + nSamplesTaken - originalDesiredSampleCount;
if overShoot <= 0
recordData(currentIndex:currentIndex+nSamplesTaken-1,:) = self.data(1:nSamplesTaken,:);
remainingDesiredSampleCount = remainingDesiredSampleCount - nSamplesTaken;
else
recordData(currentIndex:originalDesiredSampleCount,:) = self.data(overShoot:nSamplesTaken,:);
remainingDesiredSampleCount = 0;
end
currentIndex = currentIndex + nSamplesTaken;
%disp(['Recording... recieved ,',num2str(currentIndex + 1), ' samples so far in' ,num2str(etime(clock,starttime)),' secs. Still waiting for ', num2str(remainingDesiredSampleCount),' of ', num2str(originalDesiredSampleCount),' samples']);
disp(['Recording... ',num2str(etime(clock,starttime)),' s']);
pause(self.timerPeriod);
end
recordFilename = ['EEGlog',datestr(now,30),'.mat'];
save(recordFilename,'recordData');
disp(['Recording complete. Saved to ',recordFilename]);
end
%% LoadRecordedData
function LoadRecordedData(self,filename)
if ~isempty(self.runAsyncTimer) && strcmp(self.runAsyncTimer.Running,'on')
warning('You should not run the timer and load record data simultaneously (else it will overwrite it). I''m stopping the timmer') %#ok<WNTAG>
self.Stop()
end
temp = load(filename);
self.data = temp.recordData;
end
%% UpdateData
% Update the data matrix by pushing old values on and adding new values at
% the front
function nSamplesTaken = UpdateData(self)
state = calllib('edk','EE_EngineGetNextEvent',self.eEvent);
if state~=0 % state = 0 if everything's OK
warning('Everything is NOT OK: Is the headset connected, turned on, and on someone''s head?'); %#ok<WNTAG>
end
eventType = calllib('edk','EE_EmoEngineEventGetType',self.eEvent);
%disp(eventType);
userID=libpointer('uint32Ptr',0);
calllib('edk','EE_EmoEngineEventGetUserId',self.eEvent, userID);
if strcmp(eventType,'EE_UserAdded') == true
userID_value = get(userID,'value');
calllib('edk','EE_DataAcquisitionEnable',userID_value,true);
end
%
calllib('edk','EE_DataUpdateHandle', 0, self.hData);
nSamples = libpointer('uint32Ptr',0);
calllib('edk','EE_DataGetNumberOfSample', self.hData, nSamples);
nSamplesTaken = get(nSamples,'value') ;
if (nSamplesTaken ~= 0)
% Should check this and only get data if full amount is available and new
% (it is required for the get call to know how big dataPtr is)
if self.showDebug
disp(['nSamplesTaken = ',num2str(nSamplesTaken)])
end
% Assign memory for the number (in lib) for samples taken on a single channel
channelDataPtr = libpointer('doublePtr',zeros(1,nSamplesTaken));
% Push the old values of the end, move newest ones to end
self.tempData = [zeros(nSamplesTaken,self.channelCount) ...
;self.data(1:self.dataRowCount-nSamplesTaken,:)];
% Go through each of the channels and get all the samples for that channel
for i = 1:length(fieldnames(self.EE_DataChannels_enum))
calllib('edk','EE_DataGet',self.hData, self.DataChannels.([self.DataChannelsNames{i}]), channelDataPtr, uint32(nSamplesTaken));
channelData = get(channelDataPtr,'value');
% Fill in our matrix of the latest data
self.tempData(1:nSamplesTaken,i) = channelData;
if (nSamplesTaken~=length(channelData))
warning('The nSamplesTaken shoul == length(channelData), Ask Gavin'); %#ok<WNTAG>
end
end
% Required so data variable is safely available externally to work with,
% Only update the data once the samples from the most recent read operation have been added
self.data = self.tempData;
end
end
%% runSMILE
% Run loop to collect and analyze data
function [] = runSMILE(self, mode)
% Check input
mode = lower(mode);
validatestring(mode, {'learn', 'test', 'run','demo'}, mfilename, 'mode', 1)
% Ask to import old data
icanhaz = lower(input('Would you like to import previous learning data? (y/n): ','s'));
while ~strcmp(icanhaz, {'y', 'n'})
disp('Please answer y or n.');
icanhaz = lower(input('Would you like to import previous learning data? (y/n): ','s'));
end
% Import old data and write to data structures in class, overwriting current data
if icanhaz == 'y'
% Import data
[filename, pathname] = uigetfile('*.xlsx', 'Pick a previous data file');
[oldData, oldResponses] = xlsread(fullfile(pathname, filename));
self.bandSize = 22 / size(oldData, 2);
self.ctree = ClassificationTree.fit(oldData, oldResponses);
% Reset data structures
self.responses.clear();
self.classifyData.clear();
for i = 8 : self.bandSize : 30 - self.bandSize
fRange = sprintf('%.1f - %.1f Hz', i, i + self.bandSize);
self.classifyData.put(fRange, java.util.ArrayList);
end
% Add data
for i = 1:numel(oldResponses)
index = 0;
self.responses.add(oldResponses{i});
for j = 8 : self.bandSize : 30 - self.bandSize
index = index + 1;
fRange = sprintf('%.1f - %.1f Hz', j, j + self.bandSize);
self.classifyData.get(fRange).add(oldData(i, index));
end
end
else
% Prepare HashMap of data
self.responses.clear();
self.classifyData.clear();
for i = 8 : self.bandSize : 30 - self.bandSize
fRange = sprintf('%.1f - %.1f Hz', i, i + self.bandSize);
self.classifyData.put(fRange, java.util.ArrayList);
end
end
%-----------% Execution loop
running = true;
figure(1), hold on
while running
lastFilename = self.Record(10);
running = self.analyzeData(lastFilename, mode);
end
hold off
if strcmp(mode, {'learn', 'test'})
% Write learned data to an excel file
measures = self.responses.size();
respCell = cell(measures, 1);
dataCell = cell(measures, length(8:self.bandSize:29));
for i = 1:measures
index = 0;
respCell{i} = self.responses.get(i - 1);
for j = 8:self.bandSize:29
index = index + 1;
fRange = sprintf('%.1f - %.1f Hz', j, j + self.bandSize);
dataCell{i, index} = self.classifyData.get(fRange).get(i - 1);
end
end
fullCell = [respCell, dataCell];
[filename, pathname] = uiputfile('*.xlsx', 'Save decision tree data to an excel file');
filepath = fullfile(pathname, filename);
if exist(filepath,'file')
delete(filepath);
end
xlswrite(filepath, fullCell);
% Create new classification tree
numData = cell2mat(dataCell);
predictors = cell(22 / self.bandSize, 1);
for i = 8 : self.bandSize : 30 - self.bandSize
predictors{i - 7} = sprintf('%d Hz', i);
end
self.ctree = ClassificationTree.fit(numData, respCell, 'PredictorNames', predictors);
self.viewTree();
end
end
%% analyzeData
% Analyze data with FFT and look for certain frequencies
function [running] = analyzeData(self, lastFilename, mode)
% load data
prevData = load(lastFilename);
% Channel indexes
% F7 = 5 | F3 = 6 | O1 = 10 | F4 = 14 | AF4 = 17
left = 6;
right = 5;
center = 10;
% Time axis
samples = size(prevData.recordData, 1);
t = 10/samples : 10/samples : 10;
% Extract channel vectors
chanL = prevData.recordData(:, left);
chanR = prevData.recordData(:, right);
chanC = prevData.recordData(:, center);
% Prepare frequency range of alpha and beta waves
len = size(chanL, 1); % Length of signal
next2 = 2^nextpow2(len); % Next power of 2 from length of y
f = self.sampFreq / 2 * linspace(0, 1, next2 / 2 + 1)';
a = find(f == 8);
ab = find(f == 12);
b = find(f == 30);
% Calculate FFT for each channel we are interested in
fftL = fft(chanL, next2) / len;
fftR = fft(chanR, next2) / len;
fftC = fft(chanC,next2) / len;
magL = 10*log10(abs(fftL(1 : next2 / 2 + 1)));
magR = 10*log10(abs(fftR(1 : next2 / 2 + 1)));
magC = 10*log10(abs(fftC(1 : next2 / 2 + 1)));
% Find relative quantitites, each L and R channel is adjusted by C first
alphaRQ = (magL(a:ab) - magC(a:ab)) ./ (magR(a:ab) - magC(a:ab));
betaRQ = (magL(ab:b) - magC(ab:b)) ./ (magR(ab:b) - magC(ab:b));
RQ = [alphaRQ; betaRQ];
% Plot raw data on left and magnitude spectrum (happy / sad) on right
subplot(1, 2, 1), plot(t, chanL, t, chanR), legend('Left', 'Right')
subplot(1, 2, 2), plot(f(a:ab), alphaRQ, f(ab:b), betaRQ), legend('Alpha', 'Beta')
% Valence and arousal
alphaSumL = sum(magL(a:ab));
alphaSumR = sum(magR(a:ab));
betaSumL = sum(magL(ab:b));
betaSumR = sum(magR(ab:b));
arNew = betaSumL / alphaSumL;
valNew = betaSumR / alphaSumR;
if (valNew >= 4.5)
if (arNew >= 4.5)
emotion = 'excited/happy';
else
emotion = 'angry/afraid';
end
else
if (arNew >= 4.5)
emotion = 'calm/content';
else
emotion = 'sad/depressed';
end
end
fprintf('Valence and arousal analysis: %s\n', emotion)
% % Plot emotional state
% img = imread('emotion.png');
% subplot(1,3,3), hold on
% imagesc([0 10],[0 10], flipdim(img,1));
% plot(arNew, valNew, 'bo', 'MarkerSize', 12);
% set(gca, 'ydir', 'normal');
% axis([0 10 0 10]);
% hline = refline([0 5]);
% set(hline,'Color','r')
% Machine learning with a classification-based decision tree. If learning is on, then
% the user is probed for their emotional state. When learning is off, the program tries
% to predict emotional state. User input is added to data fields. When the runSMILE
% method ends, the new data is used to update the dicision tree. In addition, the data
% is stored in an excel file, which can later be recalled.
if strcmp(mode, 'learn') % Learning mode: collect data
self.audio.question.play()
feels = lower(input('Was your emotion more positive, negative, or indifferent? (p/n/i/q=quit): ','s'));
while ~strcmp(feels, {'p', 'n', 'i', 'q'})
disp('Please answer p, n, i, or q.');
feels = lower(input('Was your emotion more positive, negative, or indifferent? (p/n/i/q=quit): ','s'));
end
else % Make a prediction
bandAvg = zeros(1, 22 / self.bandSize - 1);
index = 0;
for i = 8 : self.bandSize : 30 - self.bandSize
low = find(f == i) - a + 1;
high = find(f == i + 1) - a;
index = index + 1;
bandAvg(index) = mean(RQ(low : high));
end
prediction = predict(self.ctree, bandAvg);
end
if strcmp(mode, {'test', 'run'}) % Don't display prediction during demo
self.audio.(prediction{:}).play()
fprintf('Prediction: you are feeling more %s right now\n', prediction{:});
end
if strcmp(mode, 'demo')
self.demoResults.add(prediction);
end
if strcmp(mode, 'test') % Get user feedback in testing mode
% Note: will not affect predictions immediately, must rerun
% runSMILE in order for feedback to take effect currently
self.audio.verify.play();
vfeels = lower(input('Is this prediction correct? (y/n/q=quit): ','s'));
while ~strcmp(vfeels, {'y', 'n', 'q'})
disp('Please answer y, n, or q.');
vfeels = lower(input('Is this prediction correct? (y/n/q=quit): ','s'));
end
% Match feedback
if vfeels == 'y'
if strcmp(prediction, 'positive')
feels = 'p';
elseif strcmp(prediction, 'negative')
feels = 'n';
else
feels = 'i';
end
elseif vfeels == 'n'
self.audio.quest
feels = lower(input('Was your emotion more positive, negative, or indifferent? (p/n/i/q=quit): ','s'));
while ~strcmp(feels, {'p', 'n', 'i', 'q'})
disp('Please answer p, n, i, or q.');
feels = lower(input('Was your emotion more positive, negative, or indifferent? (p/n/i/q=quit): ','s'));
end
else
feels = 'q';
end
end
if strcmp(mode, {'learn', 'test'}) % Classify data
% Currently averages each 1 Hz frequency band in alpha and beta
% ranges. Can make finer resolution if we wish.
running = true;
if feels == 'q'
running = false;
elseif feels == 'p'
self.responses.add('positive');
elseif feels == 'n'
self.responses.add('negative');
else
self.responses.add('indifferent');
end
for i = 8 : self.bandSize : 30 - self.bandSize
low = find(f == i) - a + 1;
high = find(f == i + 1) - a;
bandAvg = mean(RQ(low : high));
fRange = sprintf('%.1f - %.1f Hz', i, i + self.bandSize);
self.classifyData.get(fRange).add(bandAvg);
end
end
end
%% viewTree
% Simple visualization of the current classification tree
function [] = viewTree(self)
view(self.ctree, 'mode', 'graph')
end
end
end