ソースを参照

Initial commit

Ioannis Agtzidis 5 年 前
コミット
1ad872fc5a

+ 122 - 0
DataReprojDetection.m

@@ -0,0 +1,122 @@
+% DataReprojDetection.m
+%
+% This function detects eye movement by reprojectinig 360-degree
+% equirectangular data and calling another eye movement detection function. It
+% woks by splitting the equirectangular input data into intervals where the
+% vertical spread is no more than 45 degrees. It then reprojects them around
+% the equatorial line of the sphere and creates a new ARFF that is equivalent
+% to a monitor recorded experiment. Finally we can call the provided monitor
+% designed algorithm with the new ARFF object as input. The input data should
+% have the relation "gaze_360" to mark they were recorded in 360-degree
+% equirectangular.
+%
+% The eye movement detection function is provided as string in the input
+% arguments. This function should have at least 3 input variables, namely
+% data, metadata, and attributes as loaded from the LoadArff function. If the
+% provided detection function requires more input than the 3 default arguments,
+% these can be provided as extra arguments in the argument list of the
+% current function. The extra arguments are placed in the provided order after
+% the 3 default arguments in the detection function. The output of the
+% detection function should be a vector with a unique integer value for each
+% detected eye movement. These should correspond to the provided attValues
+% input argument as in the case of an enumeration.
+%
+% input:
+%   arffFile    - file to process
+%   outFile     - file to store results
+%   outputAtt   - name of the attribute in the output ARFF
+%   attValues   - nominal values of the added attributes. They are a string in the 
+%                 form '{unassigned, fixation, sacacde, sp, noise}'
+%   detFuncName - detection function name
+%   varargin    - required extra arguments for calling the detection function.
+%                 The data, metadata, attributes are passed to the detection
+%                 function by default in this order followed by the varargin
+%                 arguments
+
+function DataReprojDetection(arffFile, outFile, outputAtt, attValues, detFuncName, varargin)
+    DetectionFunction = str2func(detFuncName);
+    c_maxVertDiff = 45 * pi / 180;
+    [data, metadata, attributes, relation, comments] = LoadArff(arffFile);
+    xInd = GetAttPositionArff(attributes, 'x');
+    yInd = GetAttPositionArff(attributes, 'y');
+
+    assert(strcmp(relation, 'gaze_360'), 'Input data should be from 360-degree recordings');
+
+    % create metadata representing a monitor experiment
+    metaMonitor = metadata;
+    % ppd for 360 experiment
+    ppdx = metadata.width_px / 360;
+    ppdy = metadata.height_px / 180;
+    metaMonitor.distance_mm = 800; % this stays fixed
+    metaMonitor.width_mm = ppd2distance(ppdx, metaMonitor.width_px, metaMonitor.distance_mm);
+    metaMonitor.height_mm = ppd2distance(ppdy, metaMonitor.height_px, metaMonitor.distance_mm);
+
+    iniData = data;
+    RemoveChanges();
+
+    [~, eyeHeadVec] = GetCartVectors(data, metadata, attributes);
+
+    labelledAtt = zeros(size(data,1),1);
+
+    ints = GetIntervals(eyeHeadVec);
+    for ind=1:size(ints,1)
+        intData = data(ints(ind,1):ints(ind,2),:);
+        coords = Project3dVectors(eyeHeadVec(ints(ind,1):ints(ind,2),:), metadata);
+        % change x,y to the projected data
+        intData(:,xInd) = coords(:,1);
+        intData(:,yInd) = coords(:,2);
+
+        if (isempty(varargin))
+            labelledAtt(ints(ind,1):ints(ind,2)) = DetectionFunction(intData, metaMonitor, attributes);
+        else
+            labelledAtt(ints(ind,1):ints(ind,2)) = DetectionFunction(intData, metaMonitor, attributes, varargin{:});
+        end
+    end
+
+    [newData, newAttributes] = AddAttArff(iniData, attributes, labelledAtt, outputAtt, attValues);
+    SaveArff(outFile, newData, metadata, newAttributes, relation, comments);
+
+    function RemoveChanges()
+        confInd = GetAttPositionArff(attributes, 'confidence');
+        c_minConf = 0.75;
+        for i=2:size(data,1)
+            if (data(i,confInd) < c_minConf)
+                data(i, xInd) = data(i-1, xInd);
+                data(i, yInd) = data(i-1, yInd);
+            end
+        end
+    end
+
+
+    % This function returns intervals, which have all the samples within the maximum range
+    % specified by c_maxVertDiff
+    function [l_ints] = GetIntervals(vectors)
+        l_ints = zeros(0,2);
+
+        startInd = 1;
+        [~, minVert] = CartToSpherical(vectors(startInd,:));
+        maxVert = minVert;
+        for i=1:size(vectors,1)
+            [hor, vert] = CartToSpherical(vectors(i,:));
+
+            if (vert < minVert)
+                minVert = vert;
+            end
+            if (vert > maxVert)
+                maxVert = vert;
+            end
+
+            if (maxVert - minVert > c_maxVertDiff)
+                l_ints = [l_ints; startInd i];
+                startInd = i+1;
+                if (startInd <= size(vectors,1))
+                    [~, minVert] = CartToSpherical(vectors(startInd,:));
+                    maxVert = minVert;
+                end
+            end
+        end
+        if (startInd <= size(vectors,1))
+            l_ints = [l_ints; startInd size(vectors,1)];
+        end
+    end
+end

+ 47 - 0
DataReprojFovDetection.m

@@ -0,0 +1,47 @@
+% DataReprojFovDetection.m
+%
+% This function detects eye movement by reprojectinig the 360-degree
+% equirectangular data in the field-of-view (FOV) coordinates of the headset.
+% By doing this we disentagle the head from the eye motion. On the converted data
+% we can then call another eye movement detection function. The input data should
+% have the relation "gaze_360" to mark they were recorded in 360-degree
+% equirectangular.
+%
+% The eye movement detection function is provided as string in the input
+% arguments. This function should have at least 3 input variables, namely
+% data, metadata, and attributes as loaded from the LoadArff function. If the
+% provided detection function requires more input than the 3 default arguments,
+% these can be provided as extra arguments in the argument list of the
+% current function. The extra arguments are placed in the provided order after
+% the 3 default arguments in the detection function. The output of the
+% detection function should be a vector with a unique integer value for each
+% detected eye movement. These should correspond to the provided attValues
+% input argument as in the case of an enumeration.
+%
+% input:
+%   arffFile    - file to process
+%   outFile     - file to store results
+%   outputAtt   - name of the attribute in the output ARFF
+%   attValues   - nominal values of the added attributes. They are a string in the 
+%                 form '{unassigned, fixation, sacacde, sp, noise}'
+%   detFuncName - detection function name
+%   varargin    - required extra arguments for calling the detection function.
+%                 The data, metadata, attributes are used by default in this
+%                 order followed by the varargin arguments
+
+function DataReprojDetection(arffFile, outFile, outputAtt, attValues, detFuncName, varargin)
+    DetectionFunction = str2func(detFuncName);
+    
+    [data, metadata, attributes, relation, comments] = LoadArff(arffFile);
+
+    [fovData, fovMetadata, fovAttributes, fovRelation] = ProjectEquirect2Fov(data, metadata, attributes, relation);
+
+    if (isempty(varargin))
+        labelledAtt = DetectionFunction(fovData, fovMetadata, fovAttributes);
+    else
+        labelledAtt = DetectionFunction(fovData, fovMetadata, fovAttributes, varargin{:});
+    end
+
+    [newData, newAttributes] = AddAttArff(data, attributes, labelledAtt, outputAtt, attValues);
+    SaveArff(outFile, newData, metadata, newAttributes, relation, comments);
+end

+ 97 - 0
DetectBlinks360.m

@@ -0,0 +1,97 @@
+% DetectBlinks360.m
+%
+% This function detects blinks by using intervals of noise in arff as well as
+% saccade detection. For every noise interval searches on both direction
+% (forward and backwards in time) and if it finds a saccade within a given time
+% distance it labels the noise and saccade interval as blink.
+%
+% input:
+%   data        - data from the ARFF file
+%   metadata    - metadata from the ARFF file
+%   attributes  - attributes from the ARFF file
+%   typeOfMotion- 1 -> eye FOV, 2 -> eye+head
+%   params      - parameters to use for saccade detection
+%
+% output:
+%   result      - logical vector with same length as data and true for every sample that is part of a blink
+
+function result = DetectBlinks360(data, metadata, attributes, typeOfMotion, params)
+	% initialize search interval on both sides of the blink in us
+	c_searchRange = 40000;
+    c_minConf = 0.5;
+
+    timeInd = GetAttPositionArff(attributes, 'time');
+    confInd = GetAttPositionArff(attributes, 'confidence');
+    
+    noise = false(size(data,1),1);
+    noise(data(:,confInd) < c_minConf) = 1;
+
+    saccades = DetectSaccades360(data, metadata, attributes, typeOfMotion, params);
+
+
+    % initially
+    result = noise;
+
+    % search for noise indices
+    isNoiseActive = 0;
+    startIndex = -1;
+    endIndex = -1;
+    for noiseIndex=1:size(noise,1)
+        if (isNoiseActive == 0 && noise(noiseIndex) == 1)
+            isNoiseActive = 1;
+            startIndex = noiseIndex;
+        end
+
+        if (isNoiseActive == 1 && noise(noiseIndex) == 0)
+            isNoiseActive = 0;
+            endIndex = noiseIndex-1;
+            UpdateResult();
+        end
+    end
+
+    % function UpdateResult:
+    % It searches on both sides of the noise intervals for blinks.
+
+    function UpdateResult()
+        % search backwards
+        searchIndex = startIndex;
+        saccadeFound = false;
+        while (searchIndex > 0)
+            if (data(startIndex,timeInd)-data(searchIndex,timeInd) > c_searchRange && saccadeFound==false)
+                break;
+            end
+
+            if (saccades(searchIndex) && saccadeFound==false)
+                saccadeFound = true;
+            end
+
+            if (~saccades(searchIndex) && saccadeFound==true)
+                result(searchIndex+1:startIndex) = 1;
+                break;
+            end
+        
+            searchIndex = searchIndex-1;
+        end
+
+        % search forward
+        searchIndex = endIndex;
+        saccadeFound = false;
+        while (searchIndex <= size(data,1))
+            if (data(searchIndex,timeInd)-data(endIndex,timeInd) > c_searchRange && saccadeFound==false)
+                break;
+            end
+
+            if (saccades(searchIndex) && saccadeFound==false)
+                saccadeFound = true;
+            end
+
+            if (~saccades(searchIndex) && saccadeFound==true)
+                result(endIndex+1:searchIndex) = 1;
+                break;
+            end
+        
+            searchIndex = searchIndex+1;
+        end
+
+    end
+end

+ 50 - 0
DetectBlinks360File.m

@@ -0,0 +1,50 @@
+% function DetectBlinks360File:
+%
+% This function detects blinks from the input file and stores them in the
+% provided attribute of the output file. The values of the attribute in the
+% output file are '{unassigned, blink}'
+%
+%
+% input:
+%   inputfile   - ARFF file containing gaze coordinates
+%   outputfile  - ARFF file to store detected fixations
+%   outputAtt   - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion- 1 -> eye FOV, 2 -> eye+head
+%   paramfile   - (optional) txt file containing parameters for saccade detection (explanation below)
+%
+% paramfile format:
+% The file is indipendent of parameter ordering and letter case. Each parameter is followed by 
+% an equal sign and then the value. The available values are below
+%   tolerance=
+%   thresholdOnsetFast=
+%   thresholdOnsetSlow=
+%   thresholdOffset=
+%   maxSpeed=
+%   minDuration=
+%   maxDuration=
+%   velIntegrationInterv=
+%   minConfidence=
+
+function DetectBlinks360File(inputfile, outputFile, outputAtt, typeOfMotion, paramfile)
+    % load gaze coordinates from arff file
+    [data, metadata, attributes, relation, comments] = LoadArff(inputfile);
+    
+    if (nargin < 5)
+        params.tolerance = 0.1;
+        params.thresholdonsetfast = 137.5;
+        params.thresholdonsetslow = 17.1875;
+        params.thresholdoffset = 17.1875;
+        params.maxspeed = 1031.25;
+        params.minduration = 15000;
+        params.maxduration = 160000;
+        params.velintegrationinterv = 4000;
+        params.minconfidence = 0.25;
+    else
+        params = LoadParams(paramfile);
+    end
+
+    res = DetectBlinks360(data, metadata, attributes, typeOfMotion, params);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, '{unassigned,blink}');
+
+    SaveArff(outputFile, data, metadata, attributes, relation, comments);
+end

+ 203 - 0
DetectFixations360.m

@@ -0,0 +1,203 @@
+% DetectFixations360.m
+%
+% This function detects fixations from the provided data. It is based on the
+% fixation detector desctibed in Dorr, Michael, et al. "Variability of eye
+% movements when viewing dynamic natural scenes." Journal of vision 10.10
+% (2010): 28-28.
+%
+% NOTE: It requires that saccades have already been detected.
+%
+% input:
+%   data        - data from the ARFF file
+%   metadata    - metadata from the ARFF file
+%   attributes  - attributes from the ARFF file
+%   saccAtt     - saccade attribute name
+%   saccValue   - integer value representing saccades
+%	typeOfMotion- 1 -> eye FOV, 2 -> eye+head, 3 -> head
+%   params      - parameters to use for fixation detection
+%
+% output:
+%   result      - logical vector with the same length as data in inputfile and true where a fixation is detected
+%
+% params format:
+% params is a data structure with the following fields
+%
+% params.minDixationDur;
+% params.maxDistanceDeg;
+% params.velThresholdDdegDsec;
+% params.intersaccadicDist;
+% params.intersaccadicLength;
+% params.minConfidence;
+
+function m_result = DetectFixations360(data, metadata, attributes, saccAtt, saccValue, typeOfMotion, params)
+
+    c_minFixationDurUs = params.minFixationDur;
+    c_maxDistanceDeg = params.maxDistanceDeg;
+    c_velThresholdDegSec = params.velThresholdDegSec;
+    c_intersaccadicDist = params.intersaccadicDist;
+    c_intersaccadicLength = params.intersaccadicLength;
+    c_minConf = params.minConfidence;
+
+	timeInd = GetAttPositionArff(attributes, 'time');
+    xInd = GetAttPositionArff(attributes, 'x');
+    yInd = GetAttPositionArff(attributes, 'y');
+    confInd = GetAttPositionArff(attributes, 'confidence');
+
+    % initialize return result
+    m_result = false(size(data,1),1);
+
+    if (size(data,1)<10)
+        return;
+    end
+
+	[eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+    if (typeOfMotion == 1)
+        vecList = eyeFovVec;
+    elseif (typeOfMotion == 2)
+        vecList = eyeHeadVec;
+    elseif (typeOfMotion == 3)
+        vecList = headVec;
+    else
+        error('Uknown motion');
+    end
+
+    % get inter-sacacdic intervals and start processing them
+    intersaccInts = GetIntersaccadicIntervals();
+
+    % member  variables
+    m_left = 0; % used in AnnotateFixation and the other local functions
+    m_right = 0;
+
+    % process each interval
+    for intersaccIndex=1:size(intersaccInts,1)
+        AnnotateFixation(intersaccInts(intersaccIndex,1), intersaccInts(intersaccIndex,2));
+    end
+
+    % remove noise from fixations
+    m_result(data(:,confInd) < c_minConf) = 0;
+
+
+    %-----------------------------------------------------------------------------------
+    % local functions
+    %-----------------------------------------------------------------------------------
+
+    % function GetIntersaccadicIntervals:
+    % Get the inter-saccadic intervals for the saccadic attribute of the ARFF file.
+    function [l_intersaccInts] = GetIntersaccadicIntervals()
+        [l_saccIndex] = GetAttPositionArff(attributes, saccAtt);
+
+        l_intersaccInts = zeros(0,2);
+        l_startOfInt = 1;
+        l_isSaccActive = false;
+
+        for l_i=1:size(data,1);
+            % end of fixation interval found
+            if (data(l_i,l_saccIndex) == saccValue && l_isSaccActive == false)
+                l_isSaccActive = true;
+                l_intersaccInts = [l_intersaccInts; l_startOfInt l_i-1];
+            end
+
+            if (data(l_i,l_saccIndex) ~= saccValue && l_isSaccActive == true)
+                l_isSaccActive = false;
+                l_startOfInt = l_i;
+            end
+        end
+        
+        % check for last interval
+        if (data(end,l_saccIndex) ~= saccValue)
+            l_intersaccInts = [l_intersaccInts; l_startOfInt size(data,1)];
+        end
+    end
+
+    % function AnnotateFixation:
+    % Processes the samples between the start and end indices.
+
+    function AnnotateFixation(startIndex, endIndex)
+        % if interval is too long check for its diplacement
+        if (endIndex-startIndex > 1 && data(endIndex,timeInd)-data(startIndex,timeInd) > c_intersaccadicLength)
+			l_maxDisp = GetMaxDispersion(vecList(startIndex:endIndex,:));
+
+            % do not process if distance is too big
+            if (l_maxDisp > c_intersaccadicDist)
+                return;
+            end
+        end
+
+        % continue processing
+        m_left = startIndex;
+        m_right = startIndex;
+
+        while (DetermineSearchWindow(startIndex, endIndex))
+            if (IsFixation())
+                ExtendFixationWindow(startIndex, endIndex);
+
+                m_left = m_right;
+            else
+                m_left = m_left+1;
+            end
+        end
+
+
+    end
+
+    % function DetermineSearchWindow:
+    % Move m_right to accomodate the minimum fixation duration. If it can't return false.
+    function l_result = DetermineSearchWindow(startIndex, endIndex)
+        l_result = true;
+
+        if (m_left > endIndex)
+            l_result = false;
+            return;
+        end
+
+        while (m_right <= endIndex && (data(m_right,timeInd)-data(m_left,timeInd)) < c_minFixationDurUs)
+            m_right = m_right+1;
+        end
+
+        if (m_right > endIndex)
+            l_result = false;
+            return;
+        end
+    end
+
+    % function ExtendFixationWindow:
+    % Extend m_right until we reach end of interval of stops to be a fixation.
+    function ExtendFixationWindow(startIndex, endIndex)
+        while (true)
+            m_right = m_right+1;
+            if (m_right > endIndex || IsFixation()==false)
+                break;
+            end
+        end
+
+        % result of main function
+        m_result(m_left:m_right-1) = 1;
+    end
+
+    % function IsFixation:
+    % Determines from m_left, m_right if the interval is a valid fixation.
+    function l_result = IsFixation()
+        l_result = true;
+		% get distance between first and last vector in list
+        l_distance = GetDispersion(vecList(m_left,:), vecList(m_right,:));
+        l_speed = 1000000*l_distance/(data(m_right,timeInd)-data(m_left,timeInd));
+
+        if (l_speed > c_velThresholdDegSec)
+            l_result = false;
+            return;
+        end
+
+		meanVec = sum(vecList(m_left:m_right,:),1) / (m_right - m_left + 1);
+
+        l_duration = data(m_right,timeInd) - data(m_left,timeInd);
+        l_duration = (l_duration - c_minFixationDurUs)/1000 + 1;
+
+        % increase threshold depending on fixation duration
+        l_dispThres = c_maxDistanceDeg*(1+0.05*log2(l_duration));
+
+		l_disp = GetDispersion(meanVec, vecList(m_left:m_right,:));
+		if (l_disp > l_dispThres)
+			l_result = false;
+		end
+    end
+end

+ 47 - 0
DetectFixations360File.m

@@ -0,0 +1,47 @@
+% DetectFixations360File.m
+%
+% This function detects fixations in input ARFF file. 
+% The result is stored as a new attribute in the output file with values of
+% '{unassigned, fixation}'
+%
+% NOTE: It requires that the ARFF file has saccades already detected.
+%
+% input:
+%   inputfile   - ARFF file containing gaze coordinates
+%   outputfile  - ARFF file to store detected fixations
+%   outputAtt   - attribute that holds detected fixations in the output ARFF file 
+%   saccAtt     - saccade attribute name
+%   saccValue   - integer value representing saccades
+%	typeOfMotion- 1 -> eye FOV, 2 -> eye+head, 3 -> head
+%   paramfile   - (optional) txt file containing parameters for saccade detection (explanation below)
+%
+% paramfile format:
+% The file is indipendent of parameter ordering and letter case. Each parameter is followed by 
+% an equal sign and then the value. The available values are below
+%	minfixationdurus=
+%	maxdistancedeg=
+%	velthresholddegsec=
+%	intersaccadicdist=
+%	intersaccadiclength=
+%   minConfidence=
+
+function DetectFixations360File(inputfile, outputFile, outputAtt, saccAtt, saccValue, typeOfMotion, paramfile)
+	% load gaze coordinates from ARFF file
+    [data, metadata, attributes, relation, comments] = LoadArff(inputfile);
+
+    if (nargin < 7)
+        params.minFixationDur = 100000;
+        params.maxDistanceDeg = 0.35;
+        params.velThresholdDegSec = 5;
+        params.intersaccadicDist = 10.0;
+        params.intersaccadicLength = 500000;
+        params.minConfidence = 0.25;
+    else
+        params = LoadParams(paramfile);
+    end
+
+    res = DetectFixations360(data, metadata, attributes, saccAtt, saccValue, typeOfMotion, params);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, '{unassigned,fixation}');
+
+    SaveArff(outputFile, data, metadata, attributes, relation, comments);
+end

+ 61 - 0
DetectFixations360IDT.m

@@ -0,0 +1,61 @@
+% DetectFixations360IDT.m
+%
+% This function detects fixations based on the I-DT algorithm of Salvucci,
+% Dario D., and Joseph H. Goldberg. "Identifying fixations and saccades in
+% eye-tracking protocols." Proceedings of the 2000 symposium on Eye
+% tracking research & applications. ACM, 2000.
+%
+% input:
+%   data            - data from the ARFF file
+%   metadata        - metadata from the ARFF file
+%   attributes      - attributes from the ARFF file
+%   typeOfMotion    - 1 -> eye FOV, 2 -> eye+head
+%   dispThres       - dispersion threshold in degrees
+%   windowDur       - window duration in us
+%
+% output:
+%   result          - logical vector with same length as input and true where a 
+%                     fixaton was detected
+
+function result = DetectFixations360IDT(data, metadata, attributes, typeOfMotion, dispThres, windowDur)
+    [eyeFovVec, eyeHeadVec] = GetCartVectors(data, metadata, attributes);
+    if (typeOfMotion == 1)
+        vecList = eyeFovVec;
+    elseif (typeOfMotion == 2)
+        vecList = eyeHeadVec;
+    else
+        error('Uknown motion');
+    end
+
+    timeInd = GetAttPositionArff(attributes, 'time');
+    startInd = 1;
+    result = false(size(data,1),1);
+    endInd = FindEndInd();
+    while (endInd > 0)
+        dispersion = GetMaxDispersion(vecList(startInd:endInd,:));
+        if (dispersion > dispThres)
+            startInd = startInd + 1;
+            endInd = FindEndInd();
+            continue;
+        end
+
+        while(dispersion < dispThres && endInd <= size(data,1))
+            newDisp = GetDispersion(vecList(endInd,:), vecList(startInd:endInd,:));
+            dispersion = max([newDisp dispersion]);
+            endInd = endInd + 1;
+        end
+        result(startInd:endInd-1) = 1;
+        startInd = endInd;
+        endInd = FindEndInd();
+    end
+
+    function l_endInd = FindEndInd()
+        l_endInd = -1;
+        for l_ind=startInd:size(data,1)
+            if (data(l_ind, timeInd) - data(startInd,timeInd) > windowDur)
+                break;
+            end
+            l_endInd = l_ind;
+        end
+    end
+end

+ 29 - 0
DetectFixations360IDTFile.m

@@ -0,0 +1,29 @@
+% DetectFixations360IDTFile.m
+%
+% This function detects fixations based on the I-DT algorithm. 
+% The result is stored as a new attribute in the output file with values of 
+% '{unassigned, fixation}'
+%
+% input:
+%   arffFile        - file to label
+%   outputfile      - ARFF file to store detected fixations
+%   outputAtt       - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion    - 1 -> eye FOV, 2 -> eye+head
+%   dispThres       - dispersion threshold in degrees
+%   windowDur       - window duration in us
+
+function DetectFixations360IDTFile(arffFile, outputFile, outputAtt, typeOfMotion, dispThres, windowDur)
+    if (nargin < 6)
+        windowDur = 100000;
+    end
+    if (nargin < 5)
+        dispThres = 1.5;
+    end
+
+	[data, metadata, attributes, relation, comments] = LoadArff(arffFile);
+
+    res = DetectFixations360IDT(data, metadata, attributes, typeOfMotion, dispThres, windowDur);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, '{unassigned,fixation}');
+
+    SaveArff(outputFile, data, metadata, attributes, relation, comments);
+end

+ 136 - 0
DetectSaccades360.m

@@ -0,0 +1,136 @@
+% function DetectSaccades360.m
+%
+% This function detects saccades from the provided data. It is based on the
+% saccade detector described in Dorr, Michael, et al. "Variability of eye
+% movements when viewing dynamic natural scenes." Journal of vision 10.10
+% (2010): 28-28.
+%
+% input:
+%   data        - data from the ARFF file
+%   metadata    - metadata from the ARFF file
+%   attributes  - attributes from the ARFF file
+%   typeOfMotion- 1 -> eye FOV, 2 -> eye+head
+%   params      - parameters to use for saccade detection
+%
+% output:
+%   result      - logical vector with the same length as data and true where a saccade is detected
+%
+% params format:
+% params is a data structure with the following fields
+% 
+% params.tolerance;
+% params.thresholdOnsetFast;
+% params.thresholdOnsetSlow;
+% params.thresholdOffset;
+% params.maxSpeed;
+% params.minDuration;
+% params.maxDuration;
+% params.velIntegrationInterv;
+% params.minConfidence
+
+function result = DetectSaccades360(data, metadata, attributes, typeOfMotion, params)
+
+    c_tolerance = params.tolerance;
+    c_thresholdOnsetFast = params.thresholdOnsetFast;
+    c_thresholdOnsetSlow = params.thresholdOnsetSlow;
+    c_thresholdOffset = params.thresholdOffset;
+    c_maxSpeed = params.maxSpeed;
+    c_minDuration = params.minDuration;
+    c_maxDuration = params.maxDuration;
+    c_velIntegrationInterv = params.velIntegrationInterv;
+    c_minConf = params.minConfidence;
+
+    timeInd = GetAttPositionArff(attributes, 'time');
+    xInd = GetAttPositionArff(attributes, 'x');
+    yInd = GetAttPositionArff(attributes, 'y');
+    confInd = GetAttPositionArff(attributes, 'confidence');
+
+    % initialize result
+    result = false(size(data,1),1);
+
+    if (size(data,1) < 10)
+        return;
+    end
+
+    [eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+    if (typeOfMotion == 1)
+        vecList = eyeFovVec;
+    elseif (typeOfMotion == 2)
+        vecList = eyeHeadVec;
+    elseif (typeOfMotion == 3)
+        vecList = headVec;
+    else
+        error('Uknown motion');
+    end
+
+    speed = GetSpeed(vecList, data(:,timeInd));
+
+    % create glitch array
+    glitch = zeros(size(data,1),1);
+    glitch(data(:,xInd) > (1+c_tolerance)*metadata.width_px) = 1;
+    glitch(data(:,xInd) < -c_tolerance*metadata.width_px) = 1;
+    glitch(data(:,yInd) > (1+c_tolerance)*metadata.height_px) = 1;
+    glitch(data(:,yInd) < -c_tolerance*metadata.height_px) = 1;
+    glitch(data(:,confInd) < c_minConf) = 0.75;
+
+    isSaccActive = 0;
+    onsetSlowIndex = 1;
+
+    for i=1:size(data,1)
+        % not in glitch
+        if (glitch(i) == 0)
+            if (isSaccActive == 0)
+                % if speed less than onset slow move index
+                if (speed(i) < c_thresholdOnsetSlow)
+                    onsetSlowIndex = i+1;
+                end
+
+                % saccade above fast threshold but below physically impossible
+                if (speed(i) > c_thresholdOnsetFast && speed(i) < c_maxSpeed)
+                    isSaccActive = 1;
+                    
+                    % allocate all samples from onset slow as saccade
+                    result(onsetSlowIndex:i) = 1;
+                end
+            end
+
+            % if within saccade check for termination cases otherwise make sample part of saccade
+            if (isSaccActive == 1)
+                % saccade termination cases
+                if (speed(i) < c_thresholdOffset)
+                    isSaccActive = 0;
+
+                    % check for minDuration
+                    if (data(i,timeInd)-data(onsetSlowIndex,timeInd) < c_minDuration)
+                        result(onsetSlowIndex:i-1) = 0;
+                    end
+                    continue; % skip rest of the loop
+                end
+
+                if (data(i,timeInd)-data(onsetSlowIndex,timeInd) > c_maxDuration)
+                    isSaccActive = 0;
+                    continue;
+                end
+
+                % check if onset and current point are the same
+                if (i-onsetSlowIndex < 1)
+                    continue
+                end
+                meanVel = mean(speed(onsetSlowIndex:i-1));
+
+                if (meanVel < c_thresholdOnsetSlow)
+                    isSaccActive = 0;
+
+                    % check for minDuration
+                    if (data(i,timeInd)-data(onsetSlowIndex,timeInd) < c_minDuration)
+                        result(onsetSlowIndex:i-1) = 0;
+                    end
+                    continue;
+                end
+                result(i) = 1;
+            end
+        else
+            onsetSlowIndex = i+1;
+        end
+    end
+end

+ 52 - 0
DetectSaccades360File.m

@@ -0,0 +1,52 @@
+% function DetectSaccades360File:
+%
+% This function detects saccades from the input file and store them in the provided
+% attribute of the output file. The values of the attribute in the output 
+% file are '{unassigned, saccade}'
+
+% It tries to replicate SaccadeDetector
+% of dsf for use in matlab. So for comparison of the logic look this documentation
+%
+% input:
+%   inputfile   - ARFF file containing gaze coordinates
+%   outputfile  - ARFF file to store detected fixations
+%   outputAtt   - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion- 1 -> eye FOV, 2 -> eye+head
+%   paramfile   - (optional) txt file containing parameters for saccade detection (explanation below)
+%
+% paramfile format:
+% The file is indipendent of parameter ordering and letter case. Each parameter is followed by 
+% an equal sign and then the value. The available values are below
+%   tolerance=
+%   thresholdOnsetFast=
+%   thresholdOnsetSlow=
+%   thresholdOffset=
+%   maxSpeed=
+%   minDuration=
+%   maxDuration=
+%   velIntegrationInterv=
+%   minConfidence=
+
+function DetectSaccades360File(inputfile, outputFile, outputAtt, typeOfMotion, paramfile)
+    % load gaze coordinates from arff file
+    [data, metadata, attributes, relation, comments] = LoadArff(inputfile);
+    
+    if (nargin < 5)
+        params.tolerance = 0.1;
+        params.thresholdOnsetFast = 137.5;
+        params.thresholdOnsetSlow = 17.1875;
+        params.thresholdOffset = 17.1875;
+        params.maxSpeed = 1031.25;
+        params.minDuration = 15000;
+        params.maxDuration = 160000;
+        params.velIntegrationInterv = 4000;
+        params.minConfidence = 0.25;
+    else
+        params = LoadParams(paramfile);
+    end
+
+    res = DetectSaccades360(data, metadata, attributes, typeOfMotion, params);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, '{unassigned,saccade}');
+
+    SaveArff(outputFile, data, metadata, attributes, relation, comments);
+end

+ 28 - 0
DetectSaccades360FileRegex.m

@@ -0,0 +1,28 @@
+% DetectSaccades360FileRegex.m
+%
+% This function calls the CombineViews functino for all the files in the regular
+% expression
+%
+% input:
+%   regex       - regular expression to ARFF files
+%   outDir      - output directory
+%   typeOfMotion - 1 -> eye FOV, 2 -> eye+head
+%   paramfile   - file containing saccade detection parameters
+%
+% ex. DetectSaccades360Regex('/mnt/syno8/data/VideoGaze360/gaze/labelled_ioannis/*.arff', '/mnt/scratch/VideoGaze360_buffer/labelled_files_algorithm/', 1, 'params_saccades_fov.txt')
+                                                                                
+
+function DetectSaccades360FileRegex(regex, outDir, outputAtt, typeOfMotion, paramfile)
+    c_outAtt = 'saccades';
+    filelist = glob(regex);
+
+    for i=1:size(filelist,1)
+        filename = filelist{i,1};
+        disp(filename);
+        [path, name, ext] = fileparts(filename);
+
+        outFile = fullfile(outDir, [name ext]);
+
+        DetectSaccades360File(filename, outFile, c_outAtt, typeOfMotion, paramfile);
+    end
+end

+ 32 - 0
DetectSaccades360IVT.m

@@ -0,0 +1,32 @@
+% DetectSaccade360IVT.m
+%
+% This function uses a simple speed threshold to detect saccades as in the I-VT
+% algorithm of Salvucci, Dario D., and Joseph H. Goldberg. "Identifying
+% fixations and saccades in eye-tracking protocols." Proceedings of the 2000
+% symposium on Eye tracking research & applications. ACM, 2000.
+%
+% input:
+%   data         - data from the ARFF file
+%   metadata     - metadata from the ARFF file
+%   attributes   - attributes from the ARFF file
+%   typeOfMotion - 1 -> eye FOV, 2 -> eye+head
+%   velThreshold - velocity threshold for I-Vt
+%
+% output:
+%   result       - logical vector with same length as input data and true where a saccade was detected
+
+function result = DetectSaccades360IVT(data, metadata, attributes, typeOfMotion, velThreshold)
+    [eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+	if (typeOfMotion == 1)
+        vecList = eyeFovVec;
+    elseif (typeOfMotion == 2)
+        vecList = eyeHeadVec;
+    else
+        error('Uknown motion');
+    end
+
+    timeInd = GetAttPositionArff(attributes, 'time');
+    speed = GetSpeed(vecList, data(:,timeInd));
+
+    result = speed > velThreshold;
+end

+ 25 - 0
DetectSaccades360IVTFile.m

@@ -0,0 +1,25 @@
+% DetectSaccade360IVTFile.m
+%
+% This function uses a simple speed threshold as in I-VT to detect saccades. 
+% The result is stored as a attribute in the output file with values of 
+% '{unassigned, saccade}'
+%
+% input:
+%   arffFile     - file to label
+%   outputfile   - ARFF file to store detected fixations
+%   outputAtt    - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion - 1 -> eye FOV, 2 -> eye+head
+%   velThreshold - velocity threshold for I-Vt
+
+function DatectSaccades360IVT(arffFile, outputFile, outputAtt, typeOfMotion, velThreshold)
+    if (nargin < 5)
+        velThreshold = 100;
+    end
+
+    [data, metadata, attributes, relation, comments] = LoadArff(arffFile);
+
+    res = DetectSaccades360IVT(data, metadata, attributes, typeOfMotion, velThreshold);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, '{unassigned,saccade}');
+
+    SaveArff(outputFile, data, metadata, attributes, relation, comments);
+ end

+ 186 - 0
README.md

@@ -0,0 +1,186 @@
+Here we provide the source code for 5 popular eye movement classification algorithms. These
+algorithms have been converted in order to work with **360-degree equirectangular** gaze recordings.
+
+Moreover we provide a function that reprojects the **360-degree equirectangular data** 
+around the equator of the sphere (area with the lowest distortions) and then applies
+the original algorithms that were developed for monitor based experiments.
+
+## 1. CONTENT
+
+Before starting using the provided algorithms of this repository you should
+first clone (or download) the repository that offers utilities that allow as to
+handle ARFF files from [here](https://github.com/IoAg/matlab_utils). We also
+need to clone the repository that offers utilities for handling 360-degree
+utilities from [here](https://github.com/IoAg/matlab_360_utils). Then add the 
+previous folders to the search path of Matlab with the *pathtool* or *addpath*
+commands.
+
+### 1.1 Converted Algorithms
+
+The algorithms that have word *File* at the end take a file as an input and store the
+result as a new attributes at the output file. The algorithms with the same basename 
+work on preloaded data. The algorithms that require few parameters, they are provided
+directly as input arguments. For the more complex algorithms the parameters are stored 
+in a file and are loaded from it. For all the algorithms we provide default parameters 
+which were provided by the original authors except for one parameter in the Larsson
+et al. (2015) algorithm which is explained in our paper.
+
+Another distinct functionality of the eye movement classification algorithms
+that are provided here is their ability to distinguish between eye together
+gaze motion (E+H) eye in head gaze motion (field-of-view, FOV). This can be set
+in the *typeOfMotion* parameter.
+
+The 360-degree ware implementation of the Larsson et al. (2015) is provided in its own
+directory and the algorithm names follow the same convention.
+
+The list of available algorithms is given below
+
+| File | Use |
+| --------- | -------- |
+| DetectSaccades360.m |  runs the saccade detector from Dorr et al. (2010) on loaded data and returns a logical vector with true where a saccade was detected |
+| DetectSaccades360IVT.m |  runs the I-VT saccade detector from Salvucci and Goldberg (2000) on loaded data and returns a logical vector with true where a saccade was detected |
+| DetectFixations360.m |  runs the fixation detector from Dorr et al. (2010) on loaded data and returns a logical vector with true where a fixation was detected |
+| DetectSaccades360IDT.m |  runs the I-DT saccade detector from Salvucci and Goldberg (2000) on loaded data and returns a logical vector with true where a fixation was detected |
+| DetectBlinks360.m |  detects blinks based on noise intervals and the distance of saccade intervals from them and returns a logical vector with true where a blink was detected |
+| larsson360/DetectLarsson360.m | runs the Larsson et al (2015)detection on loaded data and returns a column vector with the detected eye movements |
+| DetectSaccades360File.m |  runs the saccade detector from Dorr et al. (2010) for one file and stores the result in another one |
+| DetectSaccades360IVTFile.m |  runs the I-VT saccade detector from Salvucci and Goldberg (2000) for one file and stores the result in another one |
+| DetectFixations360File.m |  runs the fixation detector from Dorr et al. (2010) for one file and stores the result in another one |
+| DetectSaccades360IDTFile.m |  runs the I-DT saccade detector from Salvucci and Goldberg (2000) for one file and stores the result in another one |
+| DetectBlinks360File.m |  runs the above blink detector for one file and stores the result in another one |
+| larsson_360/DetectLarsson360File.m | runs the Larsson et al. (2015) detection for one file and stores the result in another one |
+| larsson360/DetectLarsson360FileRegex.m | runs the Larsson detection for all files matched by the wildcard-regex (ex. '../GazeCom/gaze_arff/*/*.arff') |
+| params_saccades_equirect.txt | file containing the parameters that are used for saccade and blink detection per Dorr et al. (2010). The speed thresholds where for the eye+head representation |
+| params_saccades_fov.txt | file containing the parameters that are used for saccade and blink detection er Dorr et al. (2010). The speed thresholds were optimized for eye within head representation |
+| params_fixation.txt | file containing the parameters that are used for fixation detection per Dorr et al. (2010) |
+| larsson_360/params_larsson.txt | file containing the parameters that are used for SP and fixation detection per Larsson et al. (2015) |
+
+### 1.2 Data Reprojection
+
+If the conversion of an algorithm is not easy because it is either very complex
+or it will be applied to small amount of data, we can reproject the
+equirectangular data to areas with low distortions and apply the original
+algorithms directly. Here we also offer the possibility of distinguishing
+between E+H and FOV gaze motion as in the case of the converted algorithms with
+the *DataReprojDetection.m* and *DataReprojFovDetection.m* functions.
+
+The main idea behind reprojection is to provide the eye movement detection
+implementation function name as input to the reprojection detection functions
+and then call them with converted data as input. The detection functions should
+take as input at least the *data, metadata, and attributes* as returned from *LoadArff.m*.
+All the extra arguments can provided through the *varargin* input argument. Their output
+comprises of a column vector with integer when an eye movement is detected.
+
+A more detailed explanation of the input arguments is given below
+
+| Input arguments | Use | 
+| --------- | -------- |
+| arffFile | ARFF file to process |
+| outFile | file name to store result |
+| outputAtt | name of the attribute in the output ARFF that holds detected eye movements |
+| attValues | nominal values of the added attributes as returned from eye movement detection algorithm. They are a string in the form '{unassigned, fixation}' if the detection algorithm returns 0 for unassigned and 1 for fixations |
+| detFuncName | detection function name as string. Ex. 'DetectSaccadesIVVT' |
+| varargin | required extra arguments for calling the detection function. The data, metadata, attributes are passed to the detection function by default in this order followed by the varargin arguments | 
+
+The list of used files for data reprojection is given below
+
+| File | Use |
+| --------- | -------- |
+| DataReprojDetection.m | (main function) calls the provided eye movement detection function on E+H (eye and head) motion data |
+| DataReprojFovDetection.m | (main function) calls the provided eye movement detection function on FOV (eye within head) motion data |
+| ProjectEquirect2Fov.m | projects data to the field-of-view |
+| ProjectEquirect2FovFile.m | projects data to the field-of-view and stores it to a file |
+
+## 2. DATA FORMAT
+
+All the function use the ARFF data format for input and output to the disk. The initial
+ARFF format was extended as described in Agtzidis et al. (2016) and was further expanded 
+for 360-degree gaze data.
+
+Here the "@relation" is set to gaze_360 to distinguish the recordings from
+plain gaze recordings. We also make use of the "%@METADATA" special comments
+which describe the field of view of the used headset. Apart from the default
+metadata *width_px, height_px, distance_mm, width_mm, height_mm* we also use
+the extra metadata *fov_width_px, fov_width_deg, fov_height_px, fov_height_deg* 
+that describe the headset properties. 
+
+### 2.1. ARFF example
+
+```
+@RELATION gaze_360
+
+%@METADATA distance_mm 0.00
+%@METADATA height_mm 0.00
+%@METADATA height_px 1080
+%@METADATA width_mm 0.00
+%@METADATA width_px 1920
+
+%@METADATA fov_height_deg 100.00
+%@METADATA fov_height_px 1440
+%@METADATA fov_width_deg 100.00
+%@METADATA fov_width_px 1280
+
+@ATTRIBUTE time INTEGER
+@ATTRIBUTE x NUMERIC
+@ATTRIBUTE y NUMERIC
+@ATTRIBUTE confidence NUMERIC
+@ATTRIBUTE x_head NUMERIC
+@ATTRIBUTE y_head NUMERIC
+@ATTRIBUTE angle_deg_head NUMERIC
+@ATTRIBUTE labeller_1 {unassigned,fixation,saccade,SP,noise,VOR,OKN}
+
+
+@DATA
+0,960.00,540.00,1.00,960.00,540.00,1.22,fixation
+5000,959.00,539.00,1.00,959.00,539.00,1.23,fixation
+13000,959.00,539.00,1.00,959.00,539.00,1.23,fixation
+18000,959.00,539.00,1.00,959.00,539.00,1.23,fixation
+29000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+34000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+45000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+49000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+61000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+66000,959.00,539.00,1.00,959.00,539.00,1.24,fixation
+77000,959.00,539.00,1.00,959.00,540.00,1.24,fixation
+82000,959.00,539.00,1.00,959.00,540.00,1.24,fixation
+94000,959.00,539.00,1.00,960.00,540.00,1.24,fixation
+99000,959.00,539.00,1.00,960.00,540.00,1.24,fixation
+110000,959.00,539.00,1.00,960.00,540.00,1.25,fixation
+114000,959.00,539.00,1.00,960.00,540.00,1.25,fixation
+125000,958.00,538.00,1.00,960.00,540.00,1.26,saccade
+129000,956.00,537.00,1.00,960.00,540.00,1.27,saccade
+141000,948.00,530.00,1.00,960.00,540.00,1.28,saccade
+```
+
+## 3. GENERAL INFORMATION
+
+Author: Ioannis Agtzidis
+Contact: ioannis.agtzidis@tum.de
+
+If you use either this Larsson algorithm re-implementation for 360-degree equirectangular stimuli, please cite:
+
+
+> \@inproceedings{agtzidis2019conversion, <br/>
+>    title={Getting (More) Real: Bringing Eye Movement Classification to HMD Experiments with Equirectangular Stimuli}, <br/>
+>    author={Agtzidis, Ioannis and Dorr, Michael}, <br/>
+>    booktitle={Proceedings of the 2019 ACM Symposium on Eye Tracking Research \& Applications}, <br/>
+>    pages={303--306}, <br/>
+>    year={2019}, <br/>
+>    organization={ACM} <br/>
+> }
+
+## 4. REFERENCES
+
+> Ioannis Agtzidis, Mikhail Startsev, and Michael Dorr. 2016. In the pursuit of (ground)
+> truth: A hand-labelling tool for eye movements recorded during dynamic scene
+> viewing. In 2016 IEEE Second Workshop on Eye Tracking and Visualization (ETVIS).
+> 65–68. https://doi.org/10.1109/ETVIS.2016.7851169
+> 
+> Michael Dorr, Thomas Martinetz, Karl R Gegenfurtner, and Erhardt Barth. 2010.
+> Variability of eye movements when viewing dynamic natural scenes. Journal of
+> Vision 10, 10 (2010), 28–28
+> 
+> Linnéa Larsson, Marcus Nyström, Richard Andersson, and Martin Stridh. 2015.
+> Detection of fixations and smooth pursuit movements in high-speed eye-tracking
+> data.  Biomedical signal processing and control 18 (2015), 145–152.
+> <http://dx.doi.org/10.  1016/j.bspc.2014.12.008> 

+ 75 - 0
larsson_360/DetectLarsson360.m

@@ -0,0 +1,75 @@
+% DetectLarsson360.m
+%
+% This function detect fixations, saccades, smooth pursuit and blinks. Saccades
+% and blinks are detected with the DetectSaccades360 and DetectBlinks360 functions.
+% The fixations and smooth pursuit are detected per the Larsson et al. 2015 paper
+% "Detection of fixations and smooth pursuit movements in high-speed eye-tracking data"
+%
+% input:
+%   data            - data from the ARFF file
+%   metadata        - metadata from the ARFF file
+%   attributes      - attributes from the ARFF file
+%   typeOfMotion    - 1 -> eye FOV, 2 -> eye+head
+%   paramSacc       - parameters to use for saccade detection. See function DetectSaccades360
+%                     for details
+%   paramLarsson    - parameters for fixation and saccade detection
+%
+% output:
+%   result          - vector with the same length as data and values (0,1,2,3,4) representing
+%                     {unassigned, fixation, saccade, sp, noise}
+%
+% paramLarsson format:
+% paramLarsson is a data structure with the following fields. For an explanation 
+% of each field refer to the original paper
+%
+% paramLarsson.preprocessVelThres;
+% paramLarsson.t_window;
+% paramLarsson.t_overlap;
+% paramLarsson.eta_p;
+% paramLarsson.t_min;
+% paramLarsson.eta_d;
+% paramLarsson.eta_cd;
+% paramLarsson.eta_pd;
+% paramLarsson.eta_maxFix;
+% paramLarsson.eta_minSmp;
+% paramLarsson.phi;
+% paramLarsson.minConfidence;
+
+function result = DetectLarsson360(data, metadata, attributes, typeOfMotion, paramSacc, paramLarsson)
+    c_fix = 1;
+    c_sacc = 2;
+    c_sp = 3;
+    c_noise = 4;
+	attType = '{unassigned,fixation,saccade,sp,noise}';
+
+    result = zeros(size(data,1),1);
+    % detect saccades (parameters for 360 degrees)
+    saccades = DetectSaccades360(data, metadata, attributes, typeOfMotion, paramSacc);
+    result(saccades) = c_sacc;
+
+    % detect blinks (parameters for 360 degrees) and merge with saccades
+    blinks = DetectBlinks360(data, metadata, attributes, typeOfMotion, paramSacc);
+    result(blinks) = c_noise;
+
+    % when confidence below threshold assign noise label
+    confInd = GetAttPositionArff(attributes, 'confidence');
+    result(data(:,confInd) < paramLarsson.minConfidence) = c_noise;
+
+    thd = paramLarsson.preprocessVelThres;
+    result = preprocessing(data, metadata, attributes, result, thd, typeOfMotion);
+
+    t_window = paramLarsson.t_window;
+    t_overlap = paramLarsson.t_overlap;
+    eta_p = paramLarsson.eta_p;
+    prelSeg = preliminary_segment(data, metadata, attributes, result, typeOfMotion, t_window, t_overlap, eta_p);
+
+    t_min = paramLarsson.t_min;
+    eta_d = paramLarsson.eta_d;
+    eta_cd = paramLarsson.eta_cd;
+    eta_pd = paramLarsson.eta_pd;
+    eta_maxFix = paramLarsson.eta_maxFix;
+    eta_minSmp = paramLarsson.eta_minSmp;
+    phi = paramLarsson.phi;
+    result = extract_sp(data, metadata, attributes, result, typeOfMotion, prelSeg, t_min, eta_d, eta_cd, eta_pd, eta_maxFix, eta_minSmp, phi);
+
+end

+ 53 - 0
larsson_360/DetectLarsson360File.m

@@ -0,0 +1,53 @@
+% DetectLarsson360File.m
+%
+% This function detect fixations, saccades, smooth pursuit and blinks and
+% stores them to the output file. Saccades and blinks are detected with the
+% DetectSaccades360 and DetectBlinks360 functions.  The fixations and smooth
+% pursuit are detected per the Larsson et al. 2015 paper "Detection of
+% fixations and smooth pursuit movements in high-speed eye-tracking data"
+%
+% input:
+%   inputfile       - ARFF file containing gaze coordinates
+%   outputfile      - ARFF file to store detected fixations
+%   outputAtt       - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion    - 1 -> eye FOV, 2 -> eye+head
+%   paramSaccFile   - file contining parameters to use for saccade detection.
+%                     See function DetectSaccades360File for details
+%   paramLarssonFile- file containing parameters for fixation and saccade detection
+%
+% paramLarssonFile format:
+% paramLarsson is a data structure with the following fields. For an
+% explanation of each field refer to the original paper. The file is
+% indipendent of parameter ordering and letter case. Each parameter is followed
+% by an equal sign and then the value. The available values are givenbelow
+%
+% preprocessVelThres=100.0
+% t_window=44000 
+% t_overlap=12000
+% eta_p=0.01
+% t_min=32000
+% eta_d=0.45
+% eta_cd=0.5
+% eta_pd=0.2
+% eta_maxFix=1.9
+% eta_minSmp=1.1
+% phi=45
+% minConfidence=0.75
+
+function DetectLarsson360File(inputfile, outputfile, outputAtt, typeOfMotion, paramSaccFile, paramLarssonFile)
+    c_fix = 1;
+    c_sacc = 2;
+    c_sp = 3;
+    c_noise = 4;
+	attType = '{unassigned,fixation,saccade,sp,noise}';
+
+    [data, metadata, attributes, relation, comments] = LoadArff(inputfile);
+
+    paramSacc = LoadParams(paramSaccFile);
+    paramLarsson = LoadParams(paramLarssonFile);
+
+    res = DetectLarsson360(data, metadata, attributes, typeOfMotion, paramSacc, paramLarsson);
+    [data, attributes] = AddAttArff(data, attributes, res, outputAtt, attType);
+
+    SaveArff(outputfile, data, metadata, attributes, relation, comments);
+end

+ 24 - 0
larsson_360/DetectLarsson360FileRegex.m

@@ -0,0 +1,24 @@
+% DetectLarsson360FileRegex.m
+%
+% This function calls the DetectLarssonFile for all the files found with the
+% regular expression
+%
+% input:
+%   regex           - regular expression to ARFF files
+%   outDir          - output directory to stire files after detection
+%   outputAtt       - attribute that holds detected fixations in the output ARFF file
+%   typeOfMotion    - 1 -> eye FOV, 2 -> eye+head
+%   paramSaccFile   - file contining parameters to use for saccade detection.
+%                     See function DetectSaccades360File for details
+%   paramLarssonFile- file containing parameters for fixation and saccade detection
+
+function DetectLarsson360FileRegex(regex, outDir, outputAtt, typeOfMotion, paramSaccFile, paramLarssonFile)
+    filelist = glob(regex);
+    for i=1:size(filelist,1)
+        filename = filelist{i,1};
+        [dir, name, ext] = fileparts(filename);
+        outputfile = fullfile(outDir, [name ext]);
+
+        DetectLarsson360File(filename, outputfile, outputAtt, typeOfMotion, paramSaccFile, paramLarssonFile);
+    end
+end

+ 82 - 0
larsson_360/README.md

@@ -0,0 +1,82 @@
+An implementation of the Larsson smooth pursuit and fixation detector from [1]  for 
+**360-degree** data.
+
+This implementation is an extension to the original implementation of the authors in [2] 
+which can be found in <https://www.michaeldorr.de/smoothpursuit/larsson_reimplementation.zip>
+
+## 1. GENERAL INFORMATION
+
+Author: Ioannis Agtzidis
+Contact: ioannis.agtzidis@tum.de
+
+If you use either this Larsson algorithm re-implementation for 360-degree equirectangular stimuli, please cite:
+
+
+> \@inproceedings{agtzidis2019conversion, <br/>
+>    title={Getting (More) Real: Bringing Eye Movement Classification to HMD Experiments with Equirectangular Stimuli}, <br/>
+>    author={Agtzidis, Ioannis and Dorr, Michael}, <br/>
+>    booktitle={Proceedings of the 2019 ACM Symposium on Eye Tracking Research \& Applications}, <br/>
+>    pages={303--306}, <br/>
+>    year={2019}, <br/>
+>    organization={ACM} <br/>
+> }
+
+## 2. CONTENTS
+
+The main 'interface' functions are located in 
+| File | Use |
+| --------- | -------- |
+| DetectLarsson360.m | runs the Larsson et al. (2015) detection on loaded data and returns a column vector with the detected eye movements |
+| DetectLarsson360File.m | runs the Larsson et al (2015) detection for one file |
+| DetectLarsson360FileRegex.m | runs the Larsson detection for all files matched by the wildcard-regex (ex. '../GazeCom/gaze_arff/*/*.arff') |
+| params_saccades.txt | file containing the parameters that are used for saccade and blink detection |
+| params_larsson.txt | file containing the parameters that are used for SP and fixation detection |
+| parameters.txt | file provided **only** as a reference, as well as to note the changes in default parameters made to adjust the algorithm for 250Hz data instead of 500Hz (in the original article [1]). |
+
+In order to be able to run the above function use the *pathtool* or *addpath* functions in Matlab to add the above directory (../) in its search path.
+
+Some examples are given below
+
+```
+DetectLarsson360File('test.arff', 'test_larsson.arff', 'larsson_fov', 1, 'params_saccades_fov.txt', 'params_larsson.txt')
+
+DetectLarsson360File('test.arff', 'test_larsson.arff', 'larsson_eye_head', 2, 'params_saccades.txt', 'params_larsson.txt')
+
+DetectLarsson360FileRegex('~/dataset/*.arff', '~/dataset_with_em', 'larsson_fov', 1, 'params_saccades_fov.txt', 'params_larsson.txt')
+```
+
+## 3. LICENSE
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see <http://www.gnu.org/licenses/>.
+license - GPL.
+
+
+> [1] \@article{larsson2015detection, <br/>
+>        title={Detection of fixations and smooth pursuit movements in high-speed eye-tracking data}, <br/>
+>        author={Larsson, Linn{\'e}a and Nystr{\"o}m, Marcus and Andersson, Richard and Stridh, Martin}, <br/>
+>        journal={Biomedical Signal Processing and Control}, <br/>
+>        volume={18}, <br/>
+>        pages={145--152}, <br/>
+>        year={2015}, <br/>
+>        publisher={Elsevier} <br/>
+>    } <br/>
+> <br/>
+> [2] \@inproceedings{agtzidis2016smooth, <br/>
+>        title={Smooth pursuit detection based on multiple observers}, <br/>
+>        author={Agtzidis, Ioannis and Startsev, Mikhail and Dorr, Michael}, <br/>
+>        booktitle={Proceedings of the Ninth Biennial ACM Symposium on Eye Tracking Research \& Applications}, <br/>
+>        pages={303--306}, <br/>
+>        year={2016}, <br/>
+>        organization={ACM} <br/>
+>    } <br/>

+ 207 - 0
larsson_360/extract_sp.m

@@ -0,0 +1,207 @@
+function [ result ] = extract_sp(data, metadata, attributes, labelVec, typeOfMotion, prelInt, t_min, eta_D, eta_CD, eta_PD, eta_maxFix, eta_minSmp, phi)
+%Classifies intervals as fixations or SP
+%   @data of gaze recordings
+%   @metadata of the ARFF data
+%   @attributes describing data
+%   @labelVec label vector to consider for intervals with value 0 (unassigned)
+%   @typeOfMotion gets the values 1 -> eye FOV, 2 -> eye+head
+%   @prelInt preliminary intervals for labelVec
+%
+%   @t_min is the threshold for minimal fixation duration us, default is 40000us
+%   @eta_D is the threshold for p_D (dispersion), default is 0.45
+%   @eta_CD is the threshold for consistency of direction, default is 0.5
+%   @eta_PD is the threshold for position displacement, default is 0.2
+%   @eta_maxFix is the threshold for spacial range, default is 1.9 deg 
+%   @eta_minSmp is the threshold for merged segments spacial range, default is
+%   1.7 deg
+%   @phi is the threshold for mean direction difference, default is 45 degrees
+%
+%   @result is the same length as labelVec with fixations and sp labelled
+
+    if nargin < 12
+        phi = 180 / 4;
+    end
+    if nargin < 11
+        eta_minSmp = 1.7;
+    end
+    if nargin < 10
+        eta_maxFix = 1.9;
+    end
+    if nargin < 9
+        eta_PD = 0.2;
+    end
+    if nargin < 8
+        eta_CD = 0.5;
+    end
+    if nargin < 7
+        eta_D = 0.45;
+    end
+    if nargin < 6
+        t_min = 40000;
+    end
+
+	[eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+	if (typeOfMotion == 1)
+		vecList = eyeFovVec;
+	elseif (typeOfMotion == 2)
+		vecList = eyeHeadVec;
+	elseif (typeOfMotion == 3)
+		vecList = headVec;
+	else
+		error('Uknown motion');
+	end
+
+    c_fix = 1;
+    c_sp = 3;
+
+    % convert minSp,maxFix to pixels
+    eta_minSmp = eta_minSmp;
+    eta_maxFix = eta_maxFix;
+
+    % find position of attributes
+    timeIndex = GetAttPositionArff(attributes, 'time');
+   
+    moveId = 0; % unassigned
+    index = GetIntervalsIndex(labelVec, moveId);
+    segment_class = zeros(size(prelInt,1),1); % 0 is unsure, 1 is SP, -1 if fixation
+    segment_parameters = zeros(size(prelInt, 1), 7); % 4 criteria and mean direction (x,y,z)
+    segment_intersacc_index = zeros(size(prelInt,1),1);
+   
+    segm_i = 1;
+    for i = 1:size(index, 1)
+    	start_ind = index(i,1);
+   		end_ind = index(i,2);
+		
+		while segm_i < size(prelInt, 1)    	
+	    	segm_begin =  prelInt(segm_i,1);
+	    	segm_end = prelInt(segm_i,2);
+
+	    	if segm_begin < start_ind || segm_end > end_ind
+	    		break;
+            end
+            
+            if segm_begin == segm_end
+                segment_class(segm_i) = -1; % segment of 0 length, let's label it as a fixation
+                segm_i = segm_i + 1;
+                continue
+            end
+            if (data(segm_end, timeIndex) - data(segm_begin, timeIndex)) < t_min 
+                segment_class(segm_i) = -1; % too short segment, let it be a fixation
+                segm_i = segm_i + 1;
+                continue
+            elseif (segm_end-segm_begin < 3) %% added by ioannis for jumps in time
+                segment_class(segm_i) = -1; % too short segment, let it be a fixation
+                segm_i = segm_i + 1;
+                continue
+            end
+            
+	    	segment_intersacc_index(segm_i) = i;
+
+            part = vecList(segm_begin:segm_end,:);
+
+	    	[coeff, transformed, d_pc] = pca(part);
+
+            maxDisp = GetMaxDispersion(part);
+            
+	        d_ed = GetDispersion(part(end, :), part(1, :));
+
+            shifts = zeros(size(part,1)-1,1);
+            for shiftInd=1:size(shifts,1)
+                shifts(shiftInd) = GetDispersion(part(shiftInd,:), part(shiftInd+1,:));
+            end
+	    	traj_len = sum(shifts);
+
+	    	sp_range = GetMaxDispersion(part);
+	    
+            dirs = diff(part); % vectors to the direction of gaze
+            dirsTmp = dirs;
+            % normalize all direction vectors in order to have same contribution to mean vector
+            for dirInd=1:size(dirs,1)
+                if (sum(dirs(dirInd,:)) == 0)
+                    if (dirInd > 1)
+                        dirs(dirInd,:) = dirs(dirInd-1,:);
+                    else
+                        dirs(dirInd,:) = [1 0 0];
+                    end
+                end
+                dirs(dirInd,:) = dirs(dirInd,:) / norm(dirs(dirInd,:));
+            end
+            mean_dir = sum(dirs,1) / size(dirs,1);
+
+	    	p_D = d_pc(2) / d_pc(1);
+	    	p_CD = d_ed / maxDisp;
+	    	p_PD = d_ed / traj_len;
+	    	p_R = sp_range;
+
+            duration = data(segm_end,timeIndex) - data(segm_begin,timeIndex);
+            l_eta_maxFix = eta_maxFix * (1 + 0.05 * log2(duration)); % increase/decrease spread based on duration of segment
+
+	    	%criteria = [-p_D, p_CD, p_PD, p_R] > [-eta_D, eta_CD, eta_PD, eta_maxFix];
+	    	criteria = [-p_D, p_CD, p_PD, p_R] > [-eta_D, eta_CD, eta_PD, l_eta_maxFix];
+	    	
+	    	segment_parameters(segm_i, 1:4) = criteria;
+	    	segment_parameters(segm_i, 5:7) = mean_dir;
+
+    		if sum(criteria) == 4
+    			segment_class(segm_i) = 1; 
+    		elseif sum(criteria) == 0
+    			segment_class(segm_i) = -1;
+    		else
+    			% uncertain segment
+    			% do nothing yet
+    		end
+
+	    	segm_i = segm_i + 1;
+	    end
+	end
+
+    % initialize result to the labelled input vector
+    result = labelVec;
+
+    for segm_i = 1:size(prelInt, 1)
+    	if segment_class(segm_i) == 0
+    		% uncertain segment
+    		if segment_parameters(segm_i, 3) == 0 %similar to fixation
+    			if segment_parameters(segm_i, 4) == 1
+    				segment_class(segm_i) = 2; % SP, as determined on 2nd stage
+    			else
+    				segment_class(segm_i) = -1; % fixation
+    			end
+    		else % similar to SP
+    			% FIXME subject to change if a typo in a paper is found around Table 1
+                % Removed assertion because of adaptive eta_maxFix
+    			%assert(eta_maxFix >= eta_minSmp)
+    			% then if we find any SP segments within the same intersaccadic interval, it's a SP
+    			target_ISI = segment_intersacc_index(segm_i);
+    			for search_i = 1:size(prelInt, 1)
+    				if segment_intersacc_index(search_i) < target_ISI
+    					continue
+    				elseif  segment_intersacc_index(search_i) > target_ISI
+    					break
+    				end
+    				if segment_class(search_i) ~= 1
+    					continue
+    				end
+    				% angle comparisson
+                    dir1 = segment_parameters(search_i, 5:7);
+                    dir2 = segment_parameters(segm_i, 5:7);
+                    rel_angle = GetDispersion(dir1, dir2);
+                    if (rel_angle <= phi)
+    					segment_class(segm_i) = 2; % SP, as determined on 2nd stage
+    				end
+    			end
+    			if segment_class(segm_i) == 0
+    				segment_class(segm_i) = -1; % if no similar SP were found, it's a fixation
+    			end
+    		end
+    	end
+
+    	if segment_class(segm_i) > 0 %any of SP
+		    result(prelInt(segm_i,1):prelInt(segm_i,2)) = c_sp; % assign sp
+    	end
+
+        if segment_class(segm_i) < 0 % -1 fixation
+		    result(prelInt(segm_i,1):prelInt(segm_i,2)) = c_fix; % assign fixation
+        end
+    end
+end

+ 44 - 0
larsson_360/parameters.txt

@@ -0,0 +1,44 @@
+Used parameters:
+    Sampling frequency: 250Hz
+    preprocessing:
+        thd = 100.0 deg/s
+    preliminary_segment:
+        t_wind = 44000 us
+        t_overlap = 12000 us
+        eta_p = 0.01 (p-value treshold)
+    extract_sp:
+        t_min = 32000
+        eta_D = 0.45
+        eta_CD = 0.5
+        eta_PD = 0.2
+        eta_maxFix = 1.9
+        eta_minSmp = 1.1
+        phi = 45
+
+Default parameters (from [1]):
+    Sampling frequency: 500Hz
+    preprocessing:
+        thd = 100.0 deg/s
+    preliminary_segment:
+        t_wind = 22000 us
+        t_overlap = 6000 us
+        eta_p = 0.01 (p-value treshold)
+    extract_sp:
+        t_min = 40 ms
+        eta_D = 0.45
+        eta_CD = 0.5
+        eta_PD = 0.2
+        eta_maxFix = 1.9
+        eta_minSmp = 1.7
+        phi = pi/4
+
+
+[1] @article{larsson2015detection,
+        title={Detection of fixations and smooth pursuit movements in high-speed eye-tracking data},
+        author={Larsson, Linn{\'e}a and Nystr{\"o}m, Marcus and Andersson, Richard and Stridh, Martin},
+        journal={Biomedical Signal Processing and Control},
+        volume={18},
+        pages={145--152},
+        year={2015},
+        publisher={Elsevier}
+    }

+ 12 - 0
larsson_360/params_larsson.txt

@@ -0,0 +1,12 @@
+preprocessVelThres=100.0
+t_window=44000 
+t_overlap=12000
+eta_p=0.01
+t_min=32000
+eta_d=0.45
+eta_cd=0.5
+eta_pd=0.2
+eta_maxFix=1.9
+eta_minSmp=1.1
+phi=45
+minConfidence=0.75

+ 148 - 0
larsson_360/preliminary_segment.m

@@ -0,0 +1,148 @@
+function [ prelIntArray ] = preliminary_segment(data, metadata, attributes, labelVec, typeOfMotion, t_wind, t_overlap, eta_p)
+%This funcation performs Larsson's preliminary segmentation
+%   @data to use
+%   @metadata of the ARFF data
+%   @attributes describing the data
+%   @labelVec label vector to consider for intervals with value 0 (unassigned)
+%   @typeOfMotion gets the values 1 -> eye FOV, 2 -> eye+head, 3 -> head
+%
+%   @t_wind is window size in us, default is 22000
+%   @t_overlap is overlp size in us, default is 6000
+%   @eta_p - threshold for average p-value of Reyleigh test for each sample,
+%   default value (from paper) is 0.01
+%   
+%   @prelIntArray preliminary segmentation of labelVec intervals
+
+if nargin < 3
+    error('At least 3 arguments are needed');
+end
+if nargin < 4
+    t_wind = 22000; % us
+end
+if nargin < 5
+    t_overlap = 6000; % us
+end 
+if nargin < 6
+    eta_p = 0.01;
+end
+
+% get position of attributes in data
+timeIndex = GetAttPositionArff(attributes, 'time');
+
+[eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+if (typeOfMotion == 1)
+    vecList = eyeFovVec;
+elseif (typeOfMotion == 2)
+    vecList = eyeHeadVec;
+elseif (typeOfMotion == 3)
+    vecList = headVec;
+else
+    error('Uknown motion');
+end
+
+moveId = 0; % unassigned
+intArray = GetIntervalsIndex(labelVec, moveId);
+
+% calculate direction of motion as difference between vector positions in 3D
+dirList = zeros(size(vecList));
+for i=1:size(intArray,1)
+    startIndex = intArray(i,1);
+    endIndex = intArray(i,2);
+
+    for j=startIndex:endIndex-1
+        dirList(j,:) = vecList(j+1,:) - vecList(j,:);
+        if (sum(dirList(j,:)) == 0)
+            if (j > startIndex)
+                dirList(j,:) = dirList(j-1,:);
+            end
+        else
+            % nomalize directions to unit vectors
+            dirList(j,:) = dirList(j,:)/norm(dirList(j,:));
+        end;
+    end
+    % assing same value to the last entry as the penultimate
+    if (endIndex-startIndex > 1)
+        dirList(endIndex,:) = dirList(endIndex-1,:);
+    end
+end
+
+% calculate P mean
+Pmean = zeros(1, size(data,1));
+N = zeros(1, size(data,1));
+
+for i=1:size(intArray,1)
+    startIndex = intArray(i,1);
+    endIndex = intArray(i,2);
+
+    j=startIndex;
+    startInterval = startIndex;
+    newStartInterval = -1;
+    while (j<=endIndex)
+        if (data(j,timeIndex) > data(startInterval,timeIndex) + t_overlap && newStartInterval < 0)
+            newStartInterval = j;
+        end
+
+        if (data(j,timeIndex) < data(startInterval,timeIndex) + t_wind)
+            j = j + 1; % just move to next
+        else
+            % get r for all values
+            p = Rtest(dirList(startInterval:j-1,:));
+            Pmean(startInterval:j-1) = Pmean(startInterval:j-1) + p;
+            N(startInterval:j-1) = N(startInterval:j-1) + 1;
+            startInterval = newStartInterval;
+            j = newStartInterval;
+            newStartInterval = -1;
+        end
+    end
+
+    % get last entries of intersaccadic interval
+    p = Rtest(dirList(startInterval:endIndex,:));
+    Pmean(startInterval:j-1) = Pmean(startInterval:endIndex) + p;
+    N(startInterval:j-1) = N(startInterval:endIndex) + 1;
+
+end
+
+Pmean = Pmean./N;
+
+% convert Pmean to 0 1 array
+Pmean(Pmean(:) < eta_p) = 0;
+Pmean(Pmean(:) ~= 0) = 1;
+
+prelIntArray = zeros(0,2);
+
+for i=1:size(intArray,1)
+    startIndex = intArray(i,1);
+    endIndex = intArray(i,2);
+
+    intStart = startIndex;
+    for j=startIndex+1:endIndex
+        if (Pmean(j) ~= Pmean(j-1))
+            prelIntArray = [prelIntArray; intStart j-1];
+            intStart = j;
+        end
+    end
+
+    % add last part of interval
+    if (intStart ~= endIndex)
+        prelIntArray = [prelIntArray; intStart endIndex];
+    end
+end
+
+% Function that returns mean Reyleigh-R
+%
+%   @l_velList nomalized vector list for 3D points in 360 deg videos
+function R = meanR(l_vecList)
+    R = sum(l_vecList,1);
+    R = norm(R) / size(l_vecList,1);
+end
+
+% This function returns the p-value calculated from the Reyleigh test
+function pval = Rtest(l_vecList)
+    r = meanR(l_vecList);
+    n = size(l_vecList,1);
+
+    R = n*r;
+
+    pval = exp(sqrt(1 + 4*n + 4*(n^2 - R^2)) - (1 + 2*n));
+end
+end

+ 78 - 0
larsson_360/preprocessing.m

@@ -0,0 +1,78 @@
+function [ result ] = preprocessing(data, metadata, attributes, labelVec, thd, typeOfMotion)
+%This function processes the intersaccadic intervals (for 1 observer)
+%
+%   @data from ARFF
+%   @metadata of the ARFF file
+%   @attributes describing data
+%   @labelVec label vector to consider for intervals with value 0 (unassigned)
+%   @thd denotes the velocity threshold (in deg/s, samples with speed above which from the 
+%   beginning and end of each interval will be removed). Default value is 100
+%   deg/s.
+%   @typeOfMotion gets the values 1 -> eye FOV, 2 -> eye+head, 3 -> head
+%
+%   @result cleared labelVec after high velocity changes removal
+
+
+%intArray = [index size(data,1)+1];
+moveId = 0; % unassigned part
+intArray = GetIntervalsIndex(labelVec, moveId);
+% get position for time, x, y
+timeInd = GetAttPositionArff(attributes, 'time');
+
+c_speedStep = 2; % allow for a bit of filtering during speed calculation
+
+[eyeFovVec, eyeHeadVec, headVec] = GetCartVectors(data, metadata, attributes);
+if (typeOfMotion == 1)
+    vecList = eyeFovVec;
+elseif (typeOfMotion == 2)
+    vecList = eyeHeadVec;
+elseif (typeOfMotion == 3)
+    vecList = headVec;
+else
+    error('Uknown motion');
+end
+
+speed = GetSpeed(vecList, data(:,timeInd), c_speedStep);
+
+exceedVel = zeros(1, size(data,1)); % denotes if a pair exceeds velocity threshold
+
+% fill exceedVel array
+for i=1:size(intArray,1)
+    startIndex = intArray(i,1);
+    endIndex = intArray(i,2);
+
+    exceedVel(startIndex:endIndex-1) = speed(startIndex:endIndex-1) > thd;
+end
+
+% leave high velocity assignement only at start-end of each intersaccadic interval
+for i=1:size(intArray,1)
+    startIndex = intArray(i,1);
+    endIndex = intArray(i,2);
+    PSOstart = endIndex; % just to initialize at something that doesn't break the last if
+    PSOend = startIndex;
+
+    for j=startIndex:endIndex
+        if (exceedVel(j) == 0)
+            PSOstart = j;
+            break;
+        end
+    end
+
+    for j=endIndex:-1:startIndex
+        if (exceedVel(j) == 0)
+            PSOend = j;
+            break;
+        end
+    end
+    
+    if (PSOstart < PSOend)
+        exceedVel(PSOstart:PSOend) = 0;
+    end
+end
+
+% assign initial labels to result 
+result = labelVec;
+
+% where velocity exceeds threshold assign value of 4 (noise)
+result(exceedVel(:)==1) = 4;
+end

+ 6 - 0
params_fixations.txt

@@ -0,0 +1,6 @@
+minFixationDur=100000
+maxDistanceDeg=0.55
+velThresholdDegSec=5
+intersaccadicDist=20.0
+intersaccadicLength=500000
+minConfidence=0.25

+ 9 - 0
params_saccades_equirect.txt

@@ -0,0 +1,9 @@
+tolerance=0.1
+thresholdOnsetFast=150
+thresholdOnsetSlow=35
+thresholdOffset=35
+maxSpeed=1031.25
+minDuration=15000
+maxDuration=200000
+velIntegrationInterv=4000
+minConfidence=0.25

+ 9 - 0
params_saccades_fov.txt

@@ -0,0 +1,9 @@
+tolerance=0.1
+thresholdOnsetFast=160
+thresholdOnsetSlow=70
+thresholdOffset=60
+maxSpeed=1031.25
+minDuration=15000
+maxDuration=200000
+velIntegrationInterv=4000
+minConfidence=0.25