File: VREyetrackingTest.m

package info (click to toggle)
psychtoolbox-3 3.0.19.14.dfsg1-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 86,796 kB
  • sloc: ansic: 176,245; cpp: 20,103; objc: 5,393; sh: 2,753; python: 1,397; php: 384; makefile: 193; java: 113
file content (355 lines) | stat: -rw-r--r-- 11,181 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
function VREyetrackingTest(stereoscopic, render3D, testEye, modulateBackgroundBrightness, text)
% VREyetrackingTest([stereoscopic=0][, render3D=0][, testEye=0][, modulateBackgroundBrightness=0][, text])
%
% A test for eye gaze tracking in VR HMD's via PsychVRHMD supported methods.
% Tested with HTC Vive Pro Eye binocular eyetracker.
%
% Parameters:
%
% 'stereoscopic' if set to 1, configures the HMD as a stereoscopic display.
% A default setting of 0 configures it as a monoscopic display with both eyes
% seeing the same stimulus.
%
% 'render3D' if set to 1, use OpenGL 3D rendering of target with use of
% perspective projection, using the driver provided MODELVIEW and
% PROJECTION matrices.
%
% 'testEye' which eye gaze source to test and plot further. Defaults to 0.
% Choose 1 for left-eye in binocular setup, or mono-eye in monocular setup.
% Choose 2 for right eye in binocular setup, 3 for cyclops eye in binocular setup.
%
% 'modulateBackgroundBrightness' if set greater than zero, change brightness
% of background over time, to see if that affects gaze tracking, and for
% suitable gaze trackers if it correlates with measured/reported pupil diameter.
%
% 'text' An optional text string that can be displayed on the screen.

% History:
% 31-May-2023  mk Written, derived from VRHMDDemo.

global gazeX; %#ok<*GVMIS>
global gazeY;
global tx;
global ty;

% Setup unified keymapping and unit color range:
PsychDefaultSetup(2);

close all;

if nargin < 1 || isempty(stereoscopic)
    stereoscopic = 0;
end

if nargin < 2 || isempty(render3D)
    render3D = 0;
end

if nargin < 3 || isempty(testEye)
    testEye = 0;
end

if nargin < 4 || isempty(modulateBackgroundBrightness)
    modulateBackgroundBrightness = 0;
end

if nargin < 5
    text = '';
end

% Needed for 3D rendering, if any:
InitializeMatlabOpenGL;

% Select screen with highest id as debug output display:
screenid = max(Screen('Screens'));

% Open our fullscreen onscreen window with black background clear color:
PsychImaging('PrepareConfiguration');

% Build final task requirements: PerEyeFOV
basicReqs = 'Use2DViewsWhen3DStopped NoTimingSupport TimestampingSupport Eyetracking';

if render3D
    hmd = PsychVRHMD('AutoSetupHMD', 'Tracked3DVR', basicReqs);
    stereoscopic = 1;
else
    if ~stereoscopic
        % Setup the HMD to act as a regular "monoscopic" display monitor
        % by displaying the same image to both eyes:
        hmd = PsychVRHMD('AutoSetupHMD', 'Monoscopic', basicReqs);
    else
        % Setup for stereoscopic presentation:
        hmd = PsychVRHMD('AutoSetupHMD', 'Stereoscopic', basicReqs);
    end
end

if isempty(hmd)
    fprintf('No support for VR, so can not run this test. Bye.\n');
    return;
end

% Background color is 50% gray:
bgColor = 0.5;

[win, rect] = PsychImaging('OpenWindow', screenid, bgColor);
hmdinfo = PsychVRHMD('GetInfo', hmd);

% Disable gaze tracking test if unsupported by setup:
if ~hmdinfo.eyeTrackingSupported
    fprintf('No support for eyetracking in XR device, so can not run this test. Bye.\n');
    sca;
    return;
end

Screen('TextStyle', win, 1);
Screen('TextSize', win, 100);

tOnset = [];
tSample = [];
tNow = [];
gazeX = [];
gazeY = [];
gazeOpen = [];
gazePupil = [];
gazeSensor = [];
gazeDir = [];
tx = [];
ty = [];
cnt = 1;

[cx, cy] = RectCenter(rect);
radius = 200;

ipd = 0.065;
%PsychVRHMD('View2DParameters', hmd, 0, [-ipd/2, 0.040, -1]);
%PsychVRHMD('View2DParameters', hmd, 1, [+ipd/2, 0.040, -1]);

% Retrieve and set camera projection matrix for optimal rendering on the HMD:
[projMatrix{1}, projMatrix{2}] = PsychVRHMD('GetStaticRenderParameters', hmd);

while ~KbCheck
    % Get current eyetracking state and possibly head tracking (in render3D case):
    state = PsychVRHMD('PrepareRender', hmd, [], render3D + 4);
    tNow(cnt) = GetSecs; %#ok<*AGROW>

    % Query and store latest gaze samples:
    for i = 1:length(state.gazeStatus)
        % fprintf('Eye %i: status %i pos = %i %i\n', i, state.gazeStatus(i), state.gazePos{i}(1), state.gazePos{i}(2));
        if state.gazeStatus(i) >= 3
            [gazeX(i, cnt), gazeY(i, cnt), gazeOpen(i, cnt), gazePupil(i, cnt)] = deal(state.gazePos{i}(1), state.gazePos{i}(2), state.gazeEyeOpening(i), state.gazeEyePupilDiameter(i));
            gazeDir(i, cnt) = state.gazeRayLocal{i}.gazeD(1);
        else
            [gazeX(i, cnt), gazeY(i, cnt), gazeOpen(i, cnt), gazePupil(i, cnt)] = deal(NaN, NaN, state.gazeEyeOpening(i), state.gazeEyePupilDiameter(i));
            gazeDir(i, cnt) = NaN;
        end
    end

    if isfield(state.gazeRaw(1), 'sensor2D')
        gazeSensor(1, cnt) = state.gazeRaw(1).sensor2D(1);
        gazeSensor(2, cnt) = state.gazeRaw(1).sensor2D(2);
        gazeSensor(3, cnt) = state.gazeRaw(2).sensor2D(1);
        gazeSensor(4, cnt) = state.gazeRaw(2).sensor2D(2);
    end

    % If gaze sample time is in the past, ie. it is a previously measured gaze
    % position, then take a screenshot of the ROI of fixation here of the
    % formerly presented stimulus image, and before rendering a new future
    % to-be-presented stimulus image:
    if testEye > 0 && ~isnan(gazeX(testEye, cnt)) && state.gazeTime(1) <= tNow(cnt)
        if testEye ~= 2
            Screen('SelectStereoDrawBuffer', win, 0);
        else
            Screen('SelectStereoDrawBuffer', win, 1);
        end

        roi = ClipRect(CenterRectOnPointd([0 0 150 150], gazeX(testEye, cnt), gazeY(testEye, cnt)), rect);
        if ~IsEmptyRect(roi)
            gazeImage{cnt} = Screen('GetImage', win, roi, 'drawBuffer');
        end
    end

    if render3D
        cx = 0;
        cy = 0;
        radius = 1;
    end

    angle = mod(GetSecs, 10) * 36;
    tx(cnt) = cx + radius * cos(angle / 360 * 2 * pi);
    ty(cnt) = cy + radius * sin(angle / 360 * 2 * pi);

    % Render one view for each eye in stereoscopic mode, one common view in mono:
    for eye = 0:stereoscopic
        Screen('SelectStereoDrawBuffer', win, eye);

        if modulateBackgroundBrightness
            Screen('FillRect', win, mod(GetSecs / 10, 1) * modulateBackgroundBrightness);
        else
            Screen('FillRect', win, bgColor);
        end

        if render3D
            Screen('BeginOpenGL', win, 1);

            glMatrixMode(GL.PROJECTION);
            glLoadMatrixd(projMatrix{eye+1});

            % Setup camera position and orientation for this eyes view:
            glMatrixMode(GL.MODELVIEW);
            modelView = state.modelView{eye + 1};
            %modelView = diag([1 1 1 1]);
            glLoadMatrixd(modelView);
            zplane = -3;
            glPointSize(10);
            glBegin(GL.POINTS);
            glColor3dv([mod(GetSecs, 1) 0 0]);
            glVertex3d(cx, cy, zplane);
            glColor3dv([0 1 1]);
            glVertex3d(tx(cnt), ty(cnt), zplane);
            glEnd;

            Screen('EndOpenGL', win);
        else
            Screen('FillOval', win, [mod(GetSecs, 1) 0 0], CenterRect([0 0 3 3], rect));
            Screen('DrawDots', win, [tx(cnt) ; ty(cnt)], 10, [0,1,1], [], 1);
        end

        if ~isempty(text)
            DrawFormattedText(win, text, 'center', 'center', [1, 0, 0]);
        end
    end

    % If gaze sample time is in the future, ie. it is a predicted gaze
    % position, then take a screenshot of the ROI of fixation here after
    % rendering the future to-be-presented stimulus image:
    if testEye > 0 && ~isnan(gazeX(testEye, cnt)) && state.gazeTime(1) > tNow(cnt)
        if testEye ~= 2
            Screen('SelectStereoDrawBuffer', win, 0);
        else
            Screen('SelectStereoDrawBuffer', win, 1);
        end

        roi = ClipRect(CenterRectOnPointd([0 0 150 150], gazeX(testEye, cnt), gazeY(testEye, cnt)), rect);
        if ~IsEmptyRect(roi)
            gazeImage{cnt} = Screen('GetImage', win, roi, 'drawBuffer');
        end
    end

    % Display new image asap, but don't clear draw buffer after flip, so we
    % can screenshot it in the next iteration:
    tOnset(cnt) = Screen('Flip', win, [], 1);
    tSample(cnt) = state.gazeTime(1);
    cnt = cnt + 1;
end

% Close XR display device:
sca;

fps = 1 / mean(diff(tOnset)) %#ok<NOPRT>

% Data analysis and visualization:
if 1
    figure;
    plot(1:length(tOnset), [tOnset ; tNow; tSample ; tOnset - tSample ; tNow - tSample]);
    legend('StimOnset', 'NowTime', 'GazeTime', 'StimOnset vs GazeTime', 'Now vs GazeTime');
    title('Absolute timestamps and relative timestamp delta for stim onset vs. gaze sample time');

    figure;
    hold on;
    for i = 1:size(gazeX, 1)
        plot(1:length(tOnset), [gazeOpen(i, :); gazePupil(i, :)]);
    end
    hold off;
    title('Top: Pupil size (mm), Bottom: Eye opening (normalized)');
end

figure
hold on;
for i = 1:size(gazeX, 1)
    scatter(gazeX(i,:), gazeY(i,:));
    idxs = find(~isnan(gazeX(i,:)));
    avgpos{i} = round(mean([gazeX(i,idxs); gazeY(i,idxs)]')); %#ok<UDIM>
end
scatter(tx, ty);
scatter(cx, cy);

if size(gazeX, 1) == 3
    legend('Lefteye', 'Righteye', 'Cyclopseye', 'Target', 'Fixation');
elseif size(gazeX, 1) == 2
    legend('Lefteye', 'Righteye', 'Target', 'Fixation');
else
    legend('Cyclopseye', 'Target', 'Fixation');
end
hold off;
title('2D target/fix position vs. left/right/cyclops tracked gaze');

if 0
    figure; %#ok<UNRCH>
    KbReleaseWait;
    for i=1:2:size(gazeX, 2)
        hold on;
        scatter(gazeX(testEye,1:i), gazeY(testEye,1:i));
        scatter(tx(1:i), ty(1:i));
        hold off;
        drawnow;
        if KbCheck
            break;
        end
    end
end

if testEye > 0 && exist('gazeImage', 'var')
    oldskip = Screen('Preference', 'SkipSyncTests', 2);
    [wfig, figrect] = Screen('OpenWindow', 0, [], [0 0 155, 155]);
    Screen('Preference', 'SkipSyncTests', oldskip);
    KbReleaseWait;
    vbl = Screen('Flip', wfig);
    for i=1:length(gazeImage)
        if numel(gazeImage{i}) > 0
            tex = Screen('MakeTexture', wfig, gazeImage{i});
            Screen('DrawTexture', wfig, tex);
            Screen('FillOval', wfig, 0, CenterRect([0 0 5 5], figrect));
            Screen('Close', tex);
            vbl = Screen('Flip', wfig, vbl + 1 / fps);
        end
        if KbCheck
            break;
        end
    end

    if ~render3D
        figure;
        idxs = find(~isnan(gazeX(testEye,:)));
        idxs = idxs(2:end);
        dx = gazeX(testEye, idxs) - tx(idxs-1);
        dy = gazeY(testEye, idxs) - ty(idxs-1);
        dd = sqrt(dx.^2 + dy.^2);
        plot(idxs, [dx ; dy ; dd]);
        legend('x-error', 'y-error', 'abs error');
        title('Error gazepos vs. moving target in pixels');
    end
end

if ~isempty(gazeSensor)
    figure;
    plot(1:length(gazeSensor), [gazeSensor(1,:); gazeSensor(2,:); gazeSensor(3,:); gazeSensor(4,:)]);
    legend('x-SensorL', 'y-SensorL','x-SensorR', 'y-SensorR');
    title('Normalized sensor coordinates of pupil:')
end

if ~isempty(gazeDir)
    figure;
    if size(gazeDir, 1) == 3
        plot(1:length(gazeDir), [gazeDir(1,:); gazeDir(2,:); gazeDir(3,:)]);
        legend('L', 'R','C');
    else
        plot(gazeDir(1,:));
        legend('Cyclops/Mono');
    end
    title('gazeRayLocal component:');
end

sca;

% Display means:
disp(avgpos);