forked from treder/MVPA-Light
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample1_train_and_test.m
More file actions
82 lines (67 loc) · 2.76 KB
/
example1_train_and_test.m
File metadata and controls
82 lines (67 loc) · 2.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
%%% Train and test a classifier "by hand", i.e. without the
%%% crossvalidation and classification across time functions provided by
%%% MVPA-Light
% Load data (in /examples folder)
[dat, clabel] = load_example_data('epoched3');
%% Let's have a look at the data first: Calculate and plot ERP for attended and unattended deviants
% ERP for each condition
erp_attended = squeeze(mean(dat.trial(clabel == 1,:,:)));
erp_unattended = squeeze(mean(dat.trial(clabel == 2,:,:)));
% Plot ERP: attended deviants in red, unattended deviants in green. Each
% line is one EEG channel.
close
h1= plot(dat.time, erp_attended, 'r'); hold on
h2 =plot(dat.time, erp_unattended, 'b');
grid on
xlabel('Time [s]'),ylabel('EEG amplitude')
title('ERP')
legend([h1(1),h2(1)],{'Attended', 'Unattended'})
%% Train and test classifier
% Looking at the ERP the classes seem to be well-separated between in the
% interval 0.6-0.8 seconds. We will apply a classifier to this interval. First,
% find the sample corresponding to this interval, and then average the
% activity across time within this interval. Then use the averaged activity
% for classification.
ival_idx = find(dat.time >= 0.6 & dat.time <= 0.8);
% Extract the mean activity in the interval as features
X = squeeze(mean(dat.trial(:,:,ival_idx),3));
% Get default hyperparameters for the LDA classifier
param = mv_get_classifier_param('lda');
% We also want to calculate class probabilities (prob variable) for each
% sample (do not use if not explicitly required since it slows down
% calculations a bit)
param.prob = 1;
% Train an LDA classifier
cf = train_lda(param, X, clabel);
% Test classifier on the same data: the function gives the predicted
% labels (predlabel), the decision values (dval) which represent the
% distance to the hyperplane, and the class probability for the sample
% belonging to class 1 (prob)
[predlabel, dval, prob] = test_lda(cf, X);
% To calculate classification accuracy, compare the predicted labels to
% the true labels and take the mean
fprintf('Classification accuracy: %2.2f\n', mean(predlabel==clabel))
% Calculate AUC
auc = mv_calculate_performance('auc', 'dval', dval, clabel);
% Look at the distribution of the decision values. dvals should be positive
% for clabel 1 (attended deviant) and negative for clabel 2 (unattended
% deviant). dval = 0 is the decision boundary
figure(2)
clf
boxplot(dval, clabel)
hold on
plot(xlim, [0 0],'k--')
ylabel('Decision values')
xlabel('Class')
title('Distribution of decision values')
% Look at the distribution of the probabilities. prob should be higher
% for clabel 1 (attended deviant) than clabel 2 (unattended
% deviant)
figure(3)
clf
boxplot(prob, clabel)
hold on
plot(xlim, [0.5 0.5],'k--')
ylabel('Probability for class 1')
xlabel('Class')
title('Distribution of class probabilities')