THE AUDITORY MODELING TOOLBOX

Applies to version: 1.0.0

View the help

Go to function

EXP_BAUMGARTNER2021 - - Simulations of Baumgartner and Majdak (2021)

Program code:

function data = exp_baumgartner2021(varargin)
%EXP_BAUMGARTNER2021 - Simulations of Baumgartner and Majdak (2021)
%   Usage: data = exp_baumgartner2021(flag) 
%
%   EXP_BAUMGARTNER2021(flag) reproduces figures of the study from 
%   Baumgartner and Majdak (2021).
%
%   The following flags can be specified
%
%     fig2 shows cue-specific predictions, sensitivities, prediction errors,  
%         and optimal relative weights in order to best explain the actual 
%         externalization ratings. Oracle: prediction errors obtained with 
%         the oracle model.
%
%     fig3 displays the prediction errors for all decision strategies, 
%         optimal cue weights and results for paired combinations.
%
%     hartmann1996 models experiments from Hartmann & Wittenberg (1996; Fig.7-8)
%         1st panel: Synthesis of zero-ILD signals. Only the harmonics 
%         from 1 to nprime had zero interaural level difference; 
%         harmonics above nprime retained the amplitudes of the baseline  
%         synthesis. Externalization scores as a function of the boundary  
%         harmonic number nprime. Fundamental frequency of 125 Hz.
%         2nd panel: Synthesis of signals to test the ISLD hypothesis. 
%         Harmonics at and below the boundary retained only the interaural 
%         spectral level differences of the baseline synthesis. Higher 
%         harmonics retained left and right baseline harmonic levels. 
%         Externalization scores as a function of the boundary
%         frequency.
%
%     hassager2016 models experiments from Hassager et al. (2016; Fig.6). 
%         The mean of the seven listeners perceived sound source 
%         location (black) as a function of the bandwidth factor 
%         and the corresponding model predictions (colored). 
%         The model predictions have been shifted slightly to the right 
%         for a better visual interpretation. The error bars are one 
%         standard error of the mean.
%
%     baumgartner2017 models experiments from Baumgartner et al. (2017). 
%         Effect of HRTF spectral contrast manipulations on sound externalization. 
%         Externalization scores were derived from paired comparisons via 
%         Bradley-Terry-Luce modeling.
%
%     boyd2012 models experiments from Boyd et al. (2012; Fig.1, top).
%         Average externalization ratings of 1 talker for NH participants 
%         against mix point as a function of microphone position (ITE/BTE) 
%         and frequency response (BB/LP). The reference condition (ref) is 
%         the same as ITE/BB. Error bars show SEM. 
%
%   Requirements: 
%   -------------
%
%   1) SOFA API v0.4.3 or higher from http://sourceforge.net/projects/sofacoustics for Matlab (in e.g. thirdparty/SOFA)
% 
%   2) Data in hrtf/baumgartner2017
%
%   3) Statistics Toolbox for Matlab (for some of the figures)
%
%   Examples:
%   ---------
%
%   To display indivdual predictions for all experiments use :
%
%     exp_baumgartner2021('fig2');
%
%   To display results for different decision strategies use :
%
%     exp_baumgartner2021('fig3');
%
%   References:
%     A. W. Boyd, W. M. Whitmer, J. J. Soraghan, and M. A. Akeroyd. Auditory
%     externalization in hearing-impaired listeners: The effect of pinna cues
%     and number of talkers. J. Acoust. Soc. Am., 131(3):EL268--EL274, 2012.
%     [1]arXiv | [2]www: ]
%     
%     W. M. Hartmann and A. Wittenberg. On the externalization of sound
%     images. J. Acoust. Soc. Am., 99(6):3678--88, June 1996.
%     
%     H. G. Hassager, F. Gran, and T. Dau. The role of spectral detail in the
%     binaural transfer function on perceived externalization in a
%     reverberant environment. J. Acoust. Soc. Am., 139(5):2992--3000, 2016.
%     [3]arXiv | [4]www: ]
%     
%     References
%     
%     1. http://arxiv.org/abs/ http://dx.doi.org/10.1121/1.3687015
%     2. http://dx.doi.org/10.1121/1.3687015
%     3. http://arxiv.org/abs/http://dx.doi.org/10.1121/1.4950847
%     4. http://dx.doi.org/10.1121/1.4950847
%     
%   See also: baumgartner2017 baumgartner2021
%
%   Url: http://amtoolbox.sourceforge.net/amt-0.10.0/doc/experiments/exp_baumgartner2021.php

% Copyright (C) 2009-2020 Piotr Majdak and the AMT team.
% This file is part of Auditory Modeling Toolbox (AMT) version 1.0.0
%
% This program is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License, or
% (at your option) any later version.
%
% This program is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with this program.  If not, see <http://www.gnu.org/licenses/>.
   
% AUTHOR: Robert Baumgartner, Acoustics Research Institute, Vienna, Austria

definput.import={'amt_cache','baumgartner2021'};
definput.flags.type = {'all','fig2','fig3','hartmann1996','hassager2016','baumgartner2017','boyd2012','pairs','figStrat','tab2','tab3'};
definput.flags.gradients={'positive','negative','both'};
definput.flags.quickCheck = {'','quickCheck'};
definput.flags.disp = {'no_debug','debug'};
definput.keyvals.Sintra = 2;
definput.keyvals.Sinter = 2;
definput.keyvals.numCue = 2;
definput.keyvals.ignore = {'ITSD'};

[flags,kv]  = ltfatarghelper({},definput,varargin);

Ncues = 8;

% Normalized externalization rating scale
Erange = 1;
Eoffset = 0;

% For cue combination analysis
% Sfix = [.16,.40,nan,.45,.70,.37,nan,nan]; % obtained from tab2
cueSelLbl = {{'MSS','ISS','MSSD','ISSD','ITIT','IC','MI'};...
             {'MSS','ISS','MSSD','ISSD','ITIT','IC'};...
             {'MSS','ISS','ISSD','ITIT','IC'};...
             {'MSS','ISS','ISSD','ITIT'};...
             {'MSS','ISSD','ITIT'};...
             {'MSS','ITIT'};...
             {'MSS'};...
            };
cueSelLblTxt = {'MSS, ISS, MSSD, ISSD, ITIT, IC, MI';...
                'MSS, ISS, MSSD, ISSD, ITIT, IC';...
                'MSS, ISS, ISSD, ITIT, IC';...
                'MSS, ISS, ISSD, ITIT';...
                'MSS, ISSD, ITIT';...
                'MSS, ITIT';...
                'MSS'};
              
strategies = {'Oracle','WSM','LTA','MTA','WTA'};

% Symbol order for plotting
symb = {'-o','-s','-h','-d','-<','->','-^','-x','-p',':*','--+',':v'};
colors = 0.8*[zeros(1,3);jet(Ncues);lines(3)];

%% Hartmann & Wittenberg (1996)
if flags.do_hartmann1996
  
  cond = {'ILD','ISLD'};
%   condPlotLbl = {'ILD to 0','ILD const.'};
  nprime = [0,1,8,14,19,22,25,38];
  f0 = 125; %Hz
  
  fncache = flags.type;%['hartmann1996'];
  if not(flags.do_positive)
    fncache = [fncache,'_',flags.gradients,'Grad'];
  end
  [Pext,cues,cueLbl] = amt_cache('get',fncache,flags.cachemode);
  if isempty(Pext)
    azi = -37;
    data = data_baumgartner2017;
    if flags.do_quickCheck
      data = data(1:5);
    end
    Pext = repmat({nan(length(data),length(nprime))},2,2);
    cues = repmat({nan(length(data),length(nprime),Ncues)},2,1);
    for isub = 1:length(data)
      Obj = data(isub).Obj;
      template = sig_hartmann1996(0,'Obj',Obj,'dur',0.1);
      for ee = 1:length(cond)
        for nn = 1:length(nprime)
          target = sig_hartmann1996(nprime(nn),'Obj',Obj,'dur',0.1,cond{ee});
          [~,cues{ee}(isub,nn,:),cueLbl] = baumgartner2021(target,template,'flow',100,'fhigh',5000,'lat',azi,flags.gradients);
        end
      end
      amt_disp([num2str(isub),' of ',num2str(length(data)),' subjects completed.']);
    end
    if not(flags.do_quickCheck)
      amt_cache('set',fncache,Pext,cues,cueLbl);
    end
  end
  
  %% Cue weighting optimization procedure
  
%   Erange = 1; % original: 3
%   Eoffset = 0;
  Pext = cell(Ncues-length(kv.ignore),length(cond));
  PextNumCue = cell(length(strategies),length(cond));
  figure
  hax{1} = tight_subplot(1,2,0,[.15,.1],[.1,.05]);
  figure;
  hax{2} = tight_subplot(1,2,0,[.15,.1],[.1,.05]);
  for ee = 1:length(cond)
    act = data_hartmann1996(cond{ee});
    actual = act.avg.Escore(:)/3; % normalized response scale
    
    % align actual and simulated data format
    tmp = permute(cues{ee},[2,1,3]);
    tmp = interp1(nprime,tmp,act.avg.nprime);
    predCue = squeeze(num2cell(tmp,1:2));
    
    fncache2 = [fncache,'_exp',num2str(ee)];
    amt_disp([flags.type,', Exp. ',num2str(ee)]);
    amt_cache('set',[fncache2,'_fitting'],predCue,actual);
    
    % cue analysis: first optimal then fixed sensitivities
    [Pext(:,ee),PextNumCue(:,ee),cueTab{ee},cueSelTab{ee}] = cueAnalysis(...
      cueLbl,cueSelLbl,cueSelLblTxt,predCue,actual,Erange,Eoffset,fncache2,kv);
  
  %% Plot
    dataLbl = [{'Actual'};cueTab{ee}.Properties.RowNames];
    Nsubj = size(Pext{1},2);
    act = data_hartmann1996(cond{ee});
    x = 1e-3*f0*act.avg.nprime;
    yall = {Pext,PextNumCue};
    for iy = 1:length(yall)
      axes(hax{iy}(ee)); clear h
      y = yall{iy};
      for cc = 1:size(y,1)
          dx = 0.025*(cc - size(y,1)/2);
          h(cc+1) = errorbar(x+dx,mean(y{cc,ee},2),std(y{cc,ee},0,2)/sqrt(Nsubj),...
            symb{cc+1},'Color',colors(cc+1,:));
          hold on
      end
      h(1) = plot(x,act.avg.Escore/3,symb{1},'Color',colors(1,:));
      set(h(2:end),'MarkerFaceColor','w')
      set(h(1),'MarkerFaceColor','k')
      if ee == 1
        xlabel('Highest frequency altered in kHz') %n^{\prime}
        ylabel('Externalization')
      else
%         xlabel('Highest frequency altered in kHz')
        set(gca,'YTickLabel',{})
      end
      axis([x(1)-.5,x(end)+.5,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
      box off
      if ee == 1
        if iy == 1
          leg= [{'Actual'};cueTab{ee}.Properties.RowNames];
          legend(h,leg(1:end-1),'Location','southwest','box','off');
        else
          leg= [{'Actual'};strategies(:)];
          legend(h,leg,'Location','southwest','box','off');
        end
      end
    end
  end

end

%% Hassager et al. (2016)
if flags.do_hassager2016
  azi = [0,50];
  flp = 6000; % lowpass filtered noise stimuli
  
  Pext_A = data_hassager2016;
  B = Pext_A.B;
  
  fncache = flags.type;
  if not(flags.do_positive)
    fncache = [fncache,'_',flags.gradients,'Grad'];
  end
  [Pext,cues,cueLbl] = amt_cache('get',fncache,flags.cachemode);
  if isempty(cues)
    
    data = data_baumgartner2017;
    if flags.do_quickCheck
      data = data(1:5);
    end

    cues = nan(length(B),length(azi),length(data),Ncues);
    for isubj = 1:length(data)
      Obj = data(isubj).Obj;
      for iazi = 1:length(azi)
        idazi = Obj.SourcePosition(:,1) == azi(iazi) & Obj.SourcePosition(:,2) == 0;
        template = squeeze(shiftdim(Obj.Data.IR(idazi,:,:),2));
        for iB = 1:length(B)
          amt_disp(num2str(iB),'volatile');
          if isnan(B(iB))
            target = template;
          else
            Obj_tar = sig_hassager2016(Obj,B(iB));
            target = squeeze(shiftdim(Obj_tar.Data.IR(idazi,:,:),2));
          end
          [~,cues(iB,iazi,isubj,:),cueLbl] = baumgartner2021(target,template,'flow',100,'fhigh',flp,'lat',azi(iazi),flags.gradients);
        end
      end
      amt_disp([num2str(isubj),' of ',num2str(length(data)),' subjects completed.']);
    end
     
    if not(flags.do_quickCheck)
      amt_cache('set',fncache,Pext,cues,cueLbl);
    end
  end
  
  %% Cue weighting optimization procedure
  dimSubj = 3;
%   dimCues = 4;
  Nsubj = size(cues,dimSubj);

  predCue = squeeze(num2cell(reshape(cues,[],Nsubj,Ncues),1:2));
  % optimize sensitivities
  actual = Pext_A.rating(:);%repmat(Pext_A.rating(:),Nsubj,1); 
  actual = (actual-1)/4; % normalized response scale
%   Erange = 1; % original: 4
%   Eoffset = 0; % original: 1
  amt_disp(flags.type,'documentation');
  
  amt_cache('set',[fncache,'_fitting'],predCue,actual);
  
  % cue analysis
  [Pext,PextNumCue,cueTab,cueSelTab] = cueAnalysis(...
    cueLbl,cueSelLbl,cueSelLblTxt,predCue,actual,Erange,Eoffset,fncache,kv);

  
  %% Plot
  BplotTicks = logspace(log10(1),log10(64),3);
  BplotTicks = round(BplotTicks*100)/100;
  BplotStr = ['Ref.';[repmat(' ',3,1),num2str(BplotTicks(:)),repmat(' ',3,1)]];
  BplotTicks = [0.25/2,BplotTicks];
  B(1) = B(2)/2;
  yall = {Pext,PextNumCue};
  for iy = 1:length(yall)
    y = yall{iy};
    figure 
    hax = tight_subplot(1,2,0,[.15,.1],[.1,.05]);
    for iazi = 1:length(azi)
      axes(hax(iazi)); clear h
      h(1) = plot(B,(Pext_A.rating(:,iazi)-1)/4,symb{1},'Color',colors(1,:));
      hold on
      for m = 1:length(y)
        dx = 1+0.01*(m - (Ncues+1)/2);
        pred = reshape(y{m},length(B),[],Nsubj);
        h(m+1) = errorbar(dx*B,mean(pred(:,iazi,:),dimSubj),std(pred(:,iazi,:),0,dimSubj)/sqrt(Nsubj),...
          symb{m+1},'Color',colors(m+1,:));
      end
      set(h,'MarkerFaceColor','w')
      set(h(1),'MarkerFaceColor','k')
      set(gca,'XTick',BplotTicks,'XTickLabel',BplotStr,'XScale','log')
      axis([BplotTicks(1)/1.5,BplotTicks(end)*1.5,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
      xlabel('Bandwidth Factor [ERB]')
      if iazi==1
        ylabel('Externalization')
      else
        set(gca,'YTickLabel',{})
      end
      text(0.125,Eoffset,[num2str(azi(iazi)),'\circ'])
    end
    if iy == 1
      leg=[{'Actual'};cueTab.Properties.RowNames];
      leg = legend(h,leg(1:end-1),'Location','southwest');
    else
      leg = legend(h,[{'Actual'};strategies(:)],'Location','southwest');
    end
    set(leg,'Box','off')
  end

  
end

%% Baumgartner et al. (2017) ----------------------------------------------
if flags.do_baumgartner2017
  
  % Behavioral results
  fncache = [flags.type,'_actual'];%'baumgartner2017_E_BTL';
  data = amt_cache('get',fncache,flags.cachemode);
  if isempty(data)
    exp2 = data_baumgartner2017looming('exp2');
    Nsubj = length(exp2.meta.subject);
    iISI = exp2.meta.ISI == 0.1;
    iresp = strcmp(exp2.meta.response,'approaching');
    data.C = 0:0.5:1;
    data.subj = [];%exp2.meta.subject;
    data.Escore = [];%nan(Nsubj,length(A));
    data.azi = [exp2.rawData.azimuth];
    M = zeros(3,3);
    iM = [7,4,8,3,2,6]; % indices to map judgements to preference matrix (
    iss = 0;
    for ss = 1:Nsubj
      M(iM) = exp2.data(ss,1:6,iresp,iISI)+eps;
      Ns = 200; % max # hits  
      E = nansum(M,2)/Ns;
      E = E/E(3);
      if E(1)<=1 % if C = 0 is more externalized than the reference (C = 1) either the subject misunderstood the task or something was wrong with the reference 
        iss = iss+1;
        data.subj{iss} = exp2.meta.subject{ss};
        data.Escore(iss,:) = E;
      end
    end 
    amt_cache('set',fncache,data)
  end
  Nsubj = length(data.subj);
  
  hrtf = data_baumgartner2017looming('exp2','hrtf');
  
  % Modeling
  fncache = flags.type;
  if not(flags.do_positive)
    fncache = [fncache,'_',flags.gradients,'Grad'];
  end
  [Pext,cues,cueLbl] = amt_cache('get',fncache,flags.cachemode);
  if isempty(cues)
    
    Pext = nan(length(data.C),Nsubj);
    cues = nan(length(data.C),Nsubj,Ncues);
    for isubj = 1:Nsubj
      template = hrtf(ismember({hrtf.id},data.subj(isubj))).Obj;
      for iC = 1:length(data.C)
        target = sig_baumgartner2017looming(template,data.C(iC));
        [Pext(iC,isubj),cues(iC,isubj,:),cueLbl] = baumgartner2021(target,template,...
                          'lat',data.azi(isubj),'flow',1000,'fhigh',18e3,flags.gradients);
        
      end
      amt_disp([num2str(isubj),' of ',num2str(Nsubj),' subjects completed.'],'progress');
    end
    amt_cache('set',fncache,Pext,cues,cueLbl);
  end
 
  %% Cue weighting optimization procedure
  dimSubj = 2;
  dimCues = 3;
%   Erange = 1;
%   Eoffset = 0;
  
  Nsubj = size(cues,dimSubj);
  actual = permute(data.Escore,[2,1]);
  predCue = squeeze(num2cell(cues,1:dimCues-1));
  amt_disp(flags.type,'documentation');
  amt_cache('set',[fncache,'_fitting'],predCue,actual);
  [Pext,PextNumCue,cueTab,cueSelTab] = cueAnalysis(...
    cueLbl,cueSelLbl,cueSelLblTxt,predCue,actual,Erange,Eoffset,fncache,kv);
  
  %% Figure
  flags.do_plot_individual = false;
  yall = {[{data.Escore'};Pext],[{data.Escore'};PextNumCue]};
  for iy = 1:length(yall)
    y = yall{iy};
    figure; clear h;
    if not(flags.do_plot_individual)
      for iE = 1:length(y)
        dx = .005*(iE - length(y)/2);
        h(iE) = errorbar(data.C+dx,mean(y{iE},2),std(y{iE},0,2)/sqrt(Nsubj),...
          symb{iE},'Color',colors(iE,:));
        set(h,'MarkerFaceColor','w')
        hold on
      end
      set(h(1),'MarkerFaceColor','k')
      set(gca,'XTick',data.C)
      axis([-0.2,1.2,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
      ylabel('Externalization')
      xlabel('Spectral contrast, C')
    else % flags.do_plot_individual
      for isubj = 1:Nsubj
        subplot(3,ceil(Nsubj/3),isubj); clear h;
        for iE = 1:length(y)
          dx = .005*(iE - length(y)/2);
          h(iE) = plot(data.C+dx,y{iE}(:,isubj),...
            symb{iE},'Color',colors(iE,:));
          set(h,'MarkerFaceColor','w')
          hold on
        end
        set(h(1),'MarkerFaceColor','k')
        set(gca,'XTick',data.C)
        axis([-0.2,1.2,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
        ylabel('Externalization')
        xlabel('Spectral contrast, C')
      end
    end
    if iy == 1
      leg=[{'Actual'};cueTab.Properties.RowNames];
      legend(h,leg(1:end-1),'Location','EastOutside','box','off')
    else
      leg=[{'Actual'};strategies(:)];
      legend(h,leg,'Location','EastOutside','box','off')
    end
  end
  
end

%% Boyd et al. (2012) -----------------------------------------------------
if flags.do_boyd2012
  
  flags.do_BRIR = true; % directly based on BRIRs
  flags.do_noise = false; % or based on BRIR-convoluted noise
  % otherwise: original speech samples
  
  flp = [nan,6500]; % Low-pass cut-off frequency
  azi = -30;
  
  subjects = data_boyd2012;
  if flags.do_noise || flags.do_BRIR
    subjects = subjects([3,6,7]); % only the ones with BRIRs
  else
    subjects = subjects([1,3,4,6,7]); % only the ones with stimuli
  end
  mix = subjects(1).Resp.mix/100;
  idn = not(isnan(mix));
  mix = mix(idn); % omit reference
  
  % determine cache name
  fncache = flags.type;%['boyd2012'];
  if not(flags.do_positive)
    fncache = [fncache,'_',flags.gradients,'Grad'];
  end
  if flags.do_noise
    fncache = [fncache,'_noise'];
  elseif flags.do_BRIR
    % standard
  else
    fncache = [fncache,'_speech'];
  end
    
  [E,cues,cueLbl] = amt_cache('get',fncache,flags.cachemode);
  if isempty(cues)
    
    if flags.do_noise || flags.do_BRIR
      sig = noise(50e3,1,'pink');
      fs = subjects(1).BRIR.fs;
      [b,a]=butter(10,2*flp(2)/fs,'low');
    end

    E.all = repmat({nan([length(mix),2,2,length(subjects)])},1,1);
    cues = nan(length(mix),2,2,length(subjects),Ncues);
    for isub = 1:length(subjects)
      E.all{1}(:,:,:,isub) = cat(3,...
        [subjects(isub).Resp.ITE_BB_1T(idn),subjects(isub).Resp.BTE_BB_1T(idn)],...
        [subjects(isub).Resp.ITE_LP_1T(idn),subjects(isub).Resp.BTE_LP_1T(idn)]);
      if flags.do_noise
        stim{1} = lconv(sig,subjects(isub).BRIR.ITE(:,:,1));
        stim{2} = lconv(sig,subjects(isub).BRIR.BTE(:,:,1));
        stim{3} = lconv(sig,subjects(isub).BRIR.noHead(:,:,1));
        template = stim{1};
        targetSet = cell(length(mix),2,2);
      elseif flags.do_BRIR
        template = subjects(isub).BRIR.ITE(:,:,1);
        stim{1} = subjects(isub).BRIR.ITE(:,:,1);
        stim{2} = subjects(isub).BRIR.BTE(:,:,1);
        stim{3} = subjects(isub).BRIR.noHead(:,:,1);
        targetSet = cell(length(mix),2,2);
      else % speech samples
        template = subjects(isub).Reference_1T;
        targetSet = cat(3,...
          [subjects(isub).Target.ITE_BB_1T(:),subjects(isub).Target.BTE_BB_1T(:)],...
          [subjects(isub).Target.ITE_LP_1T(:),subjects(isub).Target.BTE_LP_1T(:)]);
      end

      %% Model simulations
      flow = 100;
      fhigh = [16e3,16e3];  % set fhigh(2) to flp(2) for manual bandwidth adjustment
            
      for c = 1:2
        for lp = 1:2
          for m = 1:length(mix)
            % mixing
            if flags.do_noise || flags.do_BRIR
              targetSet{m,c,lp} = mix(m)*stim{c} + (1-mix(m))*stim{3};
              % low-pass filtering
              if lp == 2
                targetSet{m,c,lp} = filter(b,a,targetSet{m,c,lp});
              end
            end
            target = targetSet{m,c,lp};
            [~,cues(m,c,lp,isub,:),cueLbl] = baumgartner2021(target,template...
              ,'argimport',flags,kv,'flow',flow,'fhigh',fhigh(lp),'lat',azi,flags.gradients);%,'reflectionOnsetTime',5e-3);
             
          end
        end
      end
      amt_disp([num2str(isub),' of ',num2str(length(subjects)),' subjects completed.']);

    end
    
    if not(flags.do_quickCheck)
      amt_cache('set',fncache,E,cues,cueLbl);
    end
  end

  %% Cue weighting optimization procedure
  dimSubj = 4;
%   Erange = 1; % original: 100
%   Eoffset = 0;
  
  Nsubj = size(cues,dimSubj);
  if flags.do_BRIR % comparison on individual basis
    actual = E.all{1};
    actual = reshape(actual,[],Nsubj);
  else % average comparison
    actual = mean(E.all{1},4);
    actual = actual(:);
  end
  
  actual = actual/100; % normalized response range
  E.all{1} = E.all{1}/100;
  
  predCue = squeeze(num2cell(reshape(cues,[],Nsubj,Ncues),1:2));
  amt_disp(flags.type,'documentation');
  amt_cache('set',[fncache,'_fitting'],predCue,actual);
  [Pext,PextNumCue,cueTab,cueSelTab] = cueAnalysis(...
    cueLbl,cueSelLbl,cueSelLblTxt,predCue,actual,Erange,Eoffset,fncache,kv);
  
  dims = [length(mix),size(cues,2),size(cues,3),size(cues,4)];
  for ii = 1:length(Pext)
    E.all{ii+1} = reshape(Pext{ii},dims);
  end
  EnumCue = E.all(1);
  for ii = 1:length(PextNumCue)
    EnumCue{ii+1} = reshape(PextNumCue{ii},dims);
  end
  
  %% Plot
  flags.do_plot_individual = false;
  
  condLbl = {'ITE | BB','BTE | BB','ITE | LP','BTE | LP'};
  mix = mix*100;
  
  if not(flags.do_plot_individual)
    yall = {E.all,EnumCue};
    for iy = 1:length(yall)
      y = yall{iy};
      figure
      hax = tight_subplot(2,2,0,[.15,.1],[.1,.05]);
      for cc = 1:length(condLbl)
        axes(hax(cc))
        for ee = 1:length(y)
          dx = 0.5*(ee - length(y)/2);
          m = mean(y{ee},4);
          s = std(y{ee},0,4);
          h = errorbar(mix+dx,m(:,cc),s(:,cc),...
            symb{ee},'Color',colors(ee,:));
          if ee == 1
            set(h,'MarkerFaceColor','k')
          else
            set(h,'MarkerFaceColor','w')
          end
          hold on
        end
        set(gca,'XDir','reverse')

        if cc == 3 || cc == 4
          xlabel('Mix (%)')
        else
          set(gca,'XTickLabel',[])
        end

        if cc == 1 || cc == 3
          ylabel('Externalization')
        else
          set(gca,'YTickLabel',[])
        end
        text(110,0,condLbl{cc},'HorizontalAlignment','left')
        axis([-20,120,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
        if cc == 4
        end
      end
    end
  else % flags.do_plot_individual
    
    for isub = 1:size(E.all{1},4)
      figure
      hax = tight_subplot(1,4,0,[.15,.1],[.1,.05]);
      for cc = 1:length(condLbl)
        axes(hax(cc))
        for ee = 1:Nee
          Eisub = E.all{ee}(:,:,:,isub);
          h = plot(mix,Eisub(2:end,cc),symb{ee},'Color',colors(ee,:));
          set(h,'MarkerFaceColor','w')
          hold on
        end
        set(gca,'XDir','reverse')
        xlabel('Mix (%)')
        if cc == 1
          ylabel('Externalization')
        else
          set(gca,'YTickLabel',[])
        end
        title(condLbl{cc})
        axis([-20,120,Eoffset-0.1*Erange,Eoffset+1.1*Erange])
        if cc == 4
          leg = legend(dataLbl);
          set(leg,'Box','off','Location','north')
        end
      end
    end
    
  end

%% Output
Pext = E;

end

if flags.do_all
  exp = {'hartmann1996','hassager2016','baumgartner2017','boyd2012'};
  for ii = 1:length(exp)
    exp_baumgartner2021(exp{ii},flags.cachemode);
  end
  
elseif flags.do_fig3 || flags.do_pairs
  exp = {'hartmann1996_exp1','hartmann1996_exp2','hassager2016','baumgartner2017','boyd2012'};
  Nexp = length(exp);
  [~,~,cueLbl] = amt_cache('get','hassager2016');
  predCue = [];
  actual = [];
  for ii = 1:Nexp
    if flags.do_positive
      fn = [exp{ii},'_fitting'];
    else
      if ii <=2
        fn = [exp{ii}(1:12),'_',flags.gradients,'Grad',exp{ii}(13:end),'_fitting'];
      else
        fn = [exp{ii},'_',flags.gradients,'Grad','_fitting'];
      end
    end
    [p,a] = amt_cache('get',fn);
    if isempty(p)
      amt_disp('Execution stopped because cached data not found. Please run calculations for fig2 first.');
      return
    end
    p = mean(cat(3,p{:}),2);
    predCue = cat(1,predCue,p);
    actual = cat(1,actual,mean(a,2));
  end
  predCue = squeeze(num2cell(predCue,1:2));
  
  Pext = cell(length(cueSelLbl),1);
  Nstrat = length(strategies)-1; % no Oracle
  Wsel = nan(length(cueSelLbl),length(cueSelLbl{1}),Nstrat);
  RMSE = nan(length(cueSelLbl),Nstrat);
  RMSE_CI = nan(length(cueSelLbl),Nstrat,2);
  BIC = RMSE;
  for ics = 1:length(cueSelLbl)
    cueSelId = ismember(cueLbl(:,1),cueSelLbl{ics});
    [Pext{ics},S{ics},Wtmp,MSEtmp,BICtmp] = mapAndDecide(...
      predCue(cueSelId),actual,Erange,Eoffset);
    Wsel(ics , ismember(cueSelLbl{1},cueSelLbl{ics}),:) = Wtmp(:,2:end);
    RMSE(ics,:) = sqrt(MSEtmp.m(end-Nstrat+1:end));
    RMSE_CI(ics,:,:) = sqrt(MSEtmp.ci(end-Nstrat+1:end,:));
    BIC(ics,:) = BICtmp(end-Nstrat+1:end);
  end
  cueSelTab{1} = table(RMSE(:,1),RMSE(:,2),RMSE(:,3),RMSE(:,4),...
            BIC(:,1),BIC(:,2),BIC(:,3),BIC(:,4),...
      'RowNames',cueSelLblTxt,...
      'VariableNames',...
      {'RMSE_WSM','RMSE_LTA','RMSE_MTA','RMSE_WTA',...
       'BIC_WSM','BIC_LTA','BIC_MTA','BIC_WTA'});
  amt_disp('Prediciton errors for tested decisions:','documentation');
  amt_disp(cueSelTab{1},'documentation');
  for ss = 1:Nstrat
    amt_disp([strategies{1+ss} ' weights/percentages'],'documentation');
    cueSelTab{1+ss} = array2table([squeeze(Wsel(:,:,ss)),RMSE(:,ss),BIC(:,ss)],...
      'VariableNames',[cueSelLbl{1},{'RMSE','BIC'}],...
      'RowNames',cueSelLblTxt);
     amt_disp(cueSelTab{1+ss},'documentation');
  end

  figure;
  colors = [0.8500    0.3250    0.0980;... % red
            0.9290    0.6940    0.1250;... % yellow
            0.4940    0.1840    0.5560;... % purple
            0.4660    0.6740    0.1880]; % green
  x = length(cueSelLbl{1}):-1:1;
  b = bar(x,RMSE);
  for i = 1:length(b)
    b(i).FaceColor = colors(i,:);
  end
  hold on
  ngroups = size(RMSE, 1);
  nbars = size(RMSE, 2);
  % Calculating the width for each bar group
  groupwidth = min(0.8, nbars/(nbars + 1.5));
  errlow = RMSE-RMSE_CI(:,:,1);
  errhigh = RMSE_CI(:,:,2)-RMSE;
  for i = 1:nbars
      x = (1:ngroups) - groupwidth/2 + (2*i-1) * groupwidth / (2*nbars);
      errorbar(fliplr(x), RMSE(:,i), errlow(:,i), errhigh(:,i), 'k.');
  end
  hold off
  leg = legend(strategies(2:end),'Location','northwest');
  leg.Box = 'off';
  xlabel('Number of cues')
  ylabel('Mean squared prediction error')

  %% only plot results for 7 cues
  figure
  for ii = 1:4
    b(ii) = bar(ii,RMSE(1,ii));
    b(ii).FaceColor = colors(ii,:);
    hold on
    errorbar(ii, RMSE(1,ii), errlow(1,ii), errhigh(1,ii), 'k.');
  end
  hold off
  set(gca,'XLim',[.4,4.6],'XTick',1:4,'XTickLabel',strategies(2:end))
  set(gca,'XTickLabelRotation',45)
  xlabel('Strategy')
  ylabel('RMS prediction error')
  box off
%     RB_print(gcf,[3.5,8],'exp_baumgartner2021_tab1_MSEallCues',10,2,'-r1000',1)
  %%
  figure
  x = 1:length(cueSelLbl{1});
  Wsel(:,:,1) = 100*Wsel(:,:,1); % also weights in percent
  b = barh(fliplr(x),squeeze(Wsel(1,:,:)));
  for i = 1:length(b)
    b(i).FaceColor = colors(i,:);
  end
  set(gca,'YTick',x,'YTickLabel',fliplr(cueSelLbl{1}))
%     xlabel('Cue')
  xlabel('Weight or selection rate in %')
  box off
%     RB_print(gcf,[5.5,8],'exp_baumgartner2021_tab1_weights',10,2,'-r1000',1)

  Ncues = length(cueSelLbl{1});
  combs = nchoosek(1:Ncues,2);
  Ncombs = size(combs,1);
  Pext = cell(Ncombs,1);
  Wsel = nan(Ncombs,Ncues);
  S = nan(Ncombs,Ncues);
  Nstrat = length(strategies)-1; % no Oracle
  RMSE = nan(Ncombs,Nstrat);
  RMSE_CI = nan(Ncombs,Nstrat,2);
  BIC = RMSE;
  for ics = 1:Ncombs
    cueSelId = false(length(predCue),1);
    cueSelId(combs(ics,:)) = true;
    [Pext{ics},Stmp,Wtmp,MSEtmp,BICtmp] = mapAndDecide(...
      predCue(cueSelId),actual,Erange,Eoffset);
    Wsel(ics , cueSelId(1:Ncues)) = Wtmp(:,2);
    S(ics , cueSelId(1:Ncues)) = Stmp;
    RMSE(ics,:) = sqrt(MSEtmp.m(end-Nstrat+1:end));
    RMSE_CI(ics,:,:) = sqrt(MSEtmp.ci(end-Nstrat+1:end,:));
    BIC(ics,:) = BICtmp(end-Nstrat+1:end);
  end
  Stab = array2table(S,'VariableNames',cueSelLbl{1});
  data = array2table(cat(2,Wsel,RMSE(:,1),squeeze(RMSE_CI(:,1,:))),...
          'VariableNames',cat(2,cueSelLbl{1},{'WSM_RMSE','CI_low','CI_high'}));
  [data,isort]=sortrows(data,8);
  Stab = Stab(isort,:);
  amt_disp('Contributions and Prediction errors:','documentation');
  amt_disp(data,'documentation');
  amt_disp('Sensitivities:','documentation');
  amt_disp(Stab,'documentation');
  %%
  figure
  ha = tight_subplot(2,1,0,[.12 .03],[.15 .22]);
  axes(ha(1))
  x = 1:length(isort);
  xlim = [x(1)-.5,x(end)+.5];
  y = RMSE(isort,1);
  try
    h = errorbar(x,y,y-RMSE_CI(isort,1,1),RMSE_CI(isort,1,2)-y,'k.','CapSize',0);
  catch
    h = errorbar(x,y,y-RMSE_CI(isort,1,1),RMSE_CI(isort,1,2)-y,'k.');
  end
  hold on
  plot(xlim,ones(2,1)*RMSE_CI(isort(1),1,2),'k--')
  ylabel('Error')
  axis([xlim,.01,.5])
  box off
  set(gca,'XTickLabel',{},'XTick',x)
  axes(ha(2))
  imagesc(Wsel(isort,:)')
  colormap(flipud(hot(100)));
  c = colorbar('eastoutside');
  c.Position(1) = c.Position(1)+.1;
  c.Label.String = 'Weight';
  c.Box = 'off';
  XTickLabel = num2cell(x);
  XTickLabel(2:2:end) = repmat({[]},floor(length(x)/2),1);
  set(gca,'YTick',1:length(cueSelLbl{1}),'YTickLabel',cueSelLbl{1},'XTick',x,'XTickLabel',XTickLabel)
  xlabel('Error rank')
  box off
%     RB_print(gcf,[8,8],'exp_baumgartner2021_pairs',10,10,'-r1000',1)
  
elseif flags.do_tab2 || flags.do_fig2

  exp = {'hartmann1996_exp1','hartmann1996_exp2','hassager2016','baumgartner2017','boyd2012'};
  Nexp = length(exp);
  for ii = 1:Nexp
    
    if flags.do_positive
      fn = [exp{ii},'_tab2'];
    else
      if ii <=2
        fn = [exp{ii}(1:12),'_',flags.gradients,'Grad',exp{ii}(13:end),'_tab2'];
      else
        fn = [exp{ii},'_',flags.gradients,'Grad','_tab2'];
      end
    end
    tab = amt_cache('get',fn,flags.cachemode);

    if isempty(tab)
      if ii == 1
        exp{ii} = 'hartmann1996';
      end
      if ii == 2
        tab = amt_cache('get',[exp{ii},'_tab2'],'localonly'); % has been (re)computed together with exp. 1
      else
        data = exp_baumgartner2021(exp{ii},flags.cachemode);
        if ii < 3
          tab = data{1}{2};
        else
          tab = data{1};
        end
      end
    end
    
    tabT = array2table(transpose(table2array(tab(:,1:3))));
    for rr = 1:3
      tabT.Properties.RowNames{rr} = ['Exp',num2str(ii),'_',tab.Properties.VariableNames{rr}];
    end
    tab.Properties.RowNames = strrep(tab.Properties.RowNames,'Oracle','Oracle');
    tabT.Properties.VariableNames = tab.Properties.RowNames;
    
    if ii == 1
      tab2 = tabT;
    else
      tab2 = [tab2;tabT];
    end
    
  end
  Error = tab2{2:3:end,:};
  Sensitivity = tab2{1:3:end,:};
  Weight = tab2{3:3:end,:};
  
  MeanError = mean(Error);
  WavgS_cue = sum(Sensitivity.*Error.^-2./repmat(sum(Error.^-2),5,1));
  MeanWeight = mean(Weight);
  
  sumTab = array2table([WavgS_cue;MeanError;MeanWeight]);
  sumTab.Properties.VariableNames = tab2.Properties.VariableNames;
  sumTab.Properties.RowNames = {'W. Avg. S_cue','Mean Error','Mean Weight'};
  tab2 = [tab2;sumTab];
%   disp('here we want to display Tab2');
  if ~flags.do_fig2, amt_disp(tab2,flags.disp); end
  % Output
  data = tab2;
  
  if flags.do_fig2
    exp_baumgartner2021('all')
    
    figure
    x = 1:5;
    imagesc(Error(:,1:7)')
    colormap(flipud(copper(100)));
    c = colorbar('eastoutside');
    c.Label.String = 'Error';
    c.Box = 'off';
    c.Limits = round(c.Limits*10)/10;
    c.Ticks = c.Limits(1):.1:c.Limits(2);
    XTickLabel = {'I','II','III','IV','V'};
    set(gca,'YTick',1:length(cueSelLbl{1}),'YTickLabel',cueSelLbl{1},'XTick',x,'XTickLabel',XTickLabel)
    xlabel('Experiment')
    box off
%     RB_print(gcf,[6,5],'exp_baumgartner2021_singleCueErr',10,10,'-r1000',1)
    
    figure
    imagesc(log10(1./Sensitivity(:,1:7)'))
    colormap(flipud(summer(100)));
    c = colorbar('eastoutside');
    c.Label.String = 'Log Sensitivity';
    c.Box = 'off';
%     set(gca,'ColorScale','log')
    XTickLabel = {'I','II','III','IV','V'};
    set(gca,'YTick',1:length(cueSelLbl{1}),'YTickLabel',cueSelLbl{1},'XTick',x,'XTickLabel',XTickLabel)
    xlabel('Experiment')
    box off
%     RB_print(gcf,[6,5],'exp_baumgartner2021_singleCueS',10,10,'-r1000',1)
  end
  
elseif flags.do_tab3 || flags.do_figStrat
    
  experiments = {'Exp.I','Exp.II','Exp.III','Exp.IV','Exp.V'};
  exp = {'hartmann1996_exp1','hartmann1996_exp2','hassager2016','baumgartner2017','boyd2012'};
  Nexp = length(exp);
  MSE = [];
  BIC = [];
  for ii = 1:Nexp
    
    if flags.do_positive
      fn = [exp{ii},'_tab3'];
    else
      if ii <=2
        fn = [exp{ii}(1:12),'_',flags.gradients,'Grad',exp{ii}(13:end),'_tab3'];
      else
        fn = [exp{ii},'_',flags.gradients,'Grad','_tab3'];
      end
    end
    tab = amt_cache('get',fn,flags.cachemode);
    
    if isempty(tab)
      if ii == 1
        exp{ii} = 'hartmann1996';
      end
      if ii == 2
        tab = amt_cache('get',[exp{ii},'_tab3']); % has been (re)computed together with exp. 1
      else
        data = exp_baumgartner2021(exp{ii},flags.cachemode);
        if ii < 3
          tab = data{2}{2};
        else
          tab = data{2};
        end
      end
    end
    
    if ii == 1
      meanTab = tab;
    else
      meanTab{:,:} = meanTab{:,:} + tab{:,:};
    end
    
    MSE(:,:,ii) = tab{:,7+(1:length(strategies))};
    BIC(:,:,ii) = tab{:,8+length(strategies):end};
    
  end
  meanTab{:,:} = 1/Nexp * meanTab{:,:};
  
  if flags.do_tab3
%     disp('Here we want to display Tab 3');
    amt_disp(meanTab,flags.disp);
    % Output
    data = meanTab;
    
  else % flags.do_figStrat
      dBIC = BIC-repmat(BIC(:,1,:),1,length(strategies),1);
      y = {MSE,dBIC};
      ylbl = {'Mean squared prediction error','BIC'};
      for yy = 1:length(y)
          figure
          Ncues = size(y{yy},1):-1:1;
          b = bar(Ncues,mean(y{yy},3));
    %       set(gca,'XDir','reverse')
          hold on
          symbols = {'o','s','<','p','h'};
          hleg = nan(Nexp,1);
          for ii = 1:Nexp
            for jj = 1:length(strategies)
                h = plot(Ncues+ .9*(jj/(1+length(strategies))-0.5),y{yy}(:,jj,ii),['k',symbols{ii}]);
                set(h,'MarkerFaceColor',b(jj).FaceColor)
            end
            hleg(ii) = h(1);
          end
          xlabel('Number of cues')
          ylabel(ylbl{yy})
          l = legend([b';hleg],[strategies,experiments]);
    %       l = legend([flipud(b');hleg],[fliplr(strategies),experiments]);
          set(l,'Box','off','Location','eastoutside')
          box off
      end
  end
      
  
else
  
  %% Output
  data = {cueTab,cueSelTab,Pext};
end

end


%%%%%%%%%%%%%%%%%%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%
function ha = tight_subplot(Nh, Nw, gap, marg_h, marg_w)

% tight_subplot creates "subplot" axes with adjustable gaps and margins
%
% ha = tight_subplot(Nh, Nw, gap, marg_h, marg_w)
%
%   in:  Nh      number of axes in hight (vertical direction)
%        Nw      number of axes in width (horizontaldirection)
%        gap     gaps between the axes in normalized units (0...1)
%                   or [gap_h gap_w] for different gaps in height and width 
%        marg_h  margins in height in normalized units (0...1)
%                   or [lower upper] for different lower and upper margins 
%        marg_w  margins in width in normalized units (0...1)
%                   or [left right] for different left and right margins 
%
%  out:  ha     array of handles of the axes objects
%                   starting from upper left corner, going row-wise as in
%                   going row-wise as in
%
%  Example: ha = tight_subplot(3,2,[.01 .03],[.1 .1],[.1 .1])
%           for ii = 1:6; axes(ha(ii)); plot(randn(10,ii)); end
%           set(ha(1:4),'XTickLabel',''); set(ha,'YTickLabel','')

% Pekka Kumpulainen 20.6.2010   @tut.fi
% Tampere University of Technology / Automation Science and Engineering


if nargin<3; gap = .02; end
if nargin<4 || isempty(marg_h); marg_h = .05; end
if nargin<5; marg_w = .05; end

if numel(gap)==1; 
    gap = [gap gap];
end
if numel(marg_w)==1; 
    marg_w = [marg_w marg_w];
end
if numel(marg_h)==1; 
    marg_h = [marg_h marg_h];
end

axh = (1-sum(marg_h)-(Nh-1)*gap(1))/Nh; 
axw = (1-sum(marg_w)-(Nw-1)*gap(2))/Nw;

py = 1-marg_h(2)-axh; 

ha = zeros(Nh*Nw,1);
ii = 0;
for ih = 1:Nh
    px = marg_w(1);
    
    for ix = 1:Nw
        ii = ii+1;
        ha(ii) = axes('Units','normalized', ...
            'Position',[px py axw axh], ...
            'XTickLabel','', ...
            'YTickLabel','');
        px = px+axw+gap(2);
    end
    py = py-axh-gap(1);
end
end

function [Pext,PextNumCue,cueTab,cueSelTab] = cueAnalysis(cueLbl,cueSelLbl,cueSelLblTxt,predCue,actual,Erange,Eoffset,fncache,kv)

% remove to be ignored cues
idkeep = not(ismember(cueLbl(:,1),kv.ignore));
cueLbl = cueLbl(idkeep,:);
predCue = predCue(idkeep);
    
[Pext,S,W,MSE] = mapAndDecide(predCue,actual,Erange,Eoffset);

PextLbl = [cueLbl(:,1);{'Oracle'}];
cueTab = table([S;nan],MSE.m(1:length(S)+1),[W(:,1);nan],...
  [cueLbl(:,2);...
  {'Weighted combination'}],...
  'RowNames',PextLbl,...
  'VariableNames',{'S_cue','Error','Weight','Description'});
flags=amt_flags;

amt_disp(['Showing modelling details for ' fncache]);
amt_disp(cueTab); 

amt_cache('set',[fncache,'_tab2'],cueTab);  

try
  tab2 = exp_baumgartner2021('tab2','cached');
catch
  amt_disp('Table 2 cannot be created. Re-run individual experiments.');
end 
if exist('tab2','var') && not(isempty(tab2))
  Sfix = tab2{16,:};
  MSE = nan(length(cueSelLbl),5);
  BIC = nan(length(cueSelLbl),5);
  Wsel = nan(length(cueSelLbl),length(cueSelLbl{1}));
  try 
    tab3 = exp_baumgartner2021('tab3','cached'); 
  catch
    tab3 = array2table(nan(length(cueSelLbl),length(cueSelLbl{1})));  
    amt_disp('Re-run to calculate WSM');
  end
  PextNumCue = cell(length(cueSelLbl),1);
  for ics = 1:length(cueSelLbl)
    Wfix = tab3{ics,ismember(cueSelLbl{1},cueSelLbl{ics})};
    cueSelId = find(ismember(cueLbl(:,1),cueSelLbl{ics}));
    [PextNumCue{ics},~,Wtmp,MSEtmp,BICtmp] = mapAndDecide(...
        predCue(cueSelId),actual,Erange,Eoffset,Sfix(cueSelId),Wfix);
    Wsel(ics , ismember(cueSelLbl{1},cueSelLbl{ics})) = Wtmp(:,1);
    MSE(ics,:) = MSEtmp.m(end-4:end);
    BIC(ics,:) = BICtmp(end-4:end);
  end
  cueSelTab = [array2table(Wsel,'VariableNames',cueSelLbl{1}),...
      table(MSE(:,1),MSE(:,2),MSE(:,3),MSE(:,4),MSE(:,5),...
            BIC(:,1),BIC(:,2),BIC(:,3),BIC(:,4),BIC(:,5),...
      'RowNames',cueSelLblTxt,...
      'VariableNames',...
      {'Error_Oracle','Error_WSM','Error_LTA','Error_MTA','Error_WTA',...
       'BIC_Oracle','BIC_WSM','BIC_LTA','BIC_MTA','BIC_WTA'})];
   amt_disp(['Showing cue contributions for ' fncache],'documentation');
   amt_disp(cueSelTab,'documentation');
  amt_cache('set',[fncache,'_tab3'],cueSelTab);
else
  cueSelTab = nan;
end

% only output to be plotted data
Pext = Pext(1:length(S)); % only single-cue predictions
if exist('PextNumCue','var')
  PextNumCue = PextNumCue{end-kv.numCue+1}; % only specific # cues
  PextNumCue = PextNumCue(kv.numCue+1:end); % only combination strategies
else
  PextNumCue = Pext(kv.numCue+1:end);
  amt_disp('Warning: Used *Pext* instead of *PextNumCue*.');
end
end

function [Pext,S,W,MSE,BIC] = mapAndDecide(predCue,actual,Erange,Eoffset,S,Wfix)
% [Pext,S,W,MSE,BIC] = mapAndDecide(predCue,actual,Erange,Eoffset,S,Wfix)
%
% Input parameters:
% predCue ... cue-specific deviation metrics; 
%             matrix dimensions: conditions x listeners
% actual ... listeners' externalization scores; matrix dimensions must
%            match *predCue*
% Erange ... range of externalization rating scale, typically 1
% Eoffset ... offset of externalization rating scale, typically 0
% S ... sensitivity parameters, one for each cue [optional, otherwise optimized]
% Wfix ... fixed weights for cues [optional, otherwise optimized]
% 
% Output parameters:
% Pext ... predicted externalization scores (first) for individual cues and 
%          (then) for cue combinations (see decision strategies)
% S ... sensitivity parameters, one for each cue
% W ... weights to combine selected cues and percentages of cue selection 
%       for optimally predicting *actual*
% MSE ... mean squared error
% BIC ... Bayesian information criterion
%
% Decision strategies:
% 1) optimal weighting ("oracle")
% 2) fixed weighting ("weighted sum model")
% 3) minimum externalization selection ("loser takes all")
% 4) median externalization selection ("median takes all")
% 5) maximum externalization selection ("winner takes all")

% Definitions of error functions
f_cue2E = @(cue,s) baumgartner2021_mapping(cue,s,Erange,Eoffset);
if size(actual,2) > 1 % Individual data
  f_predErr = @(p) nanmean((p(:)-actual(:)).^2);%nanrms(p(:)-actual(:)) / (Erange-Eoffset);
  f_predErrA = @(p,a) nanmean((p(:)-a(:)).^2); % used for bootstrapping
else % Average data
  f_predErr = @(p) nanmean((mean(p,2) - actual).^2);%nanrms(p(:)-actual(:)) / (Erange-Eoffset);
  f_predErrA = @(p,a) nanmean((mean(p,2) - a).^2);
end

Ncues = length(predCue);

% Optimize sensitivities
if not(exist('S','var'))
  S = ones(Ncues,1); % sensitivity to cue
  for cc = 1:Ncues
    S(cc) = fminsearch(@(s) f_predErr(f_cue2E(predCue{cc},s)),S(cc));
  end
end

% Compute externalization scores
predExtSingle = nan([size(predCue{1}),Ncues]);
for cc = 1:Ncues
  if all(isnan(predCue{cc}(:)))
    predExtSingle(:,:,cc) = zeros(size(predCue{cc}));
  else
    predExtSingle(:,:,cc) = f_cue2E(predCue{cc},S(cc));
  end
%   MSE.m(cc) = f_predErr(predExtSingle(:,:,cc)); % mean(abs( predE(:,cc) - actual )) / (Erange-Eoffset);
end

% Optimize weights
W = 1/Ncues*ones(size(S)); % weighting of cue
f_weight = @(p,w) reshape( reshape(p,[],length(w))*w(:) ,size(p,1),[]);
f_opt = @(p,w) f_predErr( f_weight(p,abs(w)./sum(abs(w))) );
o = optimset('Display','off');
W = fminsearch(@(w) f_opt(predExtSingle,w) , W,o); 
W = abs( W );
W = W/sum(W);
W(W<.005) = 0; % threshold the weights
W = W/sum(W);

if not(exist('Wfix','var'))
  Wfix = W;
end
if sum(isnan(Wfix))>0, Wfix=W; end
% Decision strategies
  function [E,cueStat] = f_select(cue,s,type) 
    Eall = reshape(...
      f_cue2E(cue(:),repmat(s(:),length(cue(:))/length(s(:)),1)),...
      size(cue,1),size(cue,2),size(cue,3));
    switch type
      case 'LTA'
        [E,iCue] = nanmin(Eall,[],3);
      case 'MTA'
        E = median(Eall,3,'omitnan');
        [~,iCue] = nanmin((Eall-repmat(E,1,1,size(Eall,3))).^2,[],3);
      case 'WTA'
        [E,iCue] = nanmax(Eall,[],3);
      otherwise
        error('Undefined type of decision strategy')
    end
    cueStat = 100*histcounts(iCue,1:Ncues+1)/length(iCue);
  end

predExtComb(:,:,1) = f_weight(predExtSingle,W); % Oracle
predExtComb(:,:,2) = f_weight(predExtSingle,Wfix); % WSM

predCueMtx = cat(3,predCue{:});
cueStat = nan(Ncues,3);
o = optimset('MaxFunEvals',300*length(S),'Display','off');
S_LTA=fminsearch(@(s) f_predErr(f_select(predCueMtx,s,'LTA')),S,o);
[predExtComb(:,:,3),cueStat(:,1)] = f_select(predCueMtx,S_LTA,'LTA');
S_MTA=fminsearch(@(s) f_predErr(f_select(predCueMtx,s,'MTA')),S,o);
[predExtComb(:,:,4),cueStat(:,2)] = f_select(predCueMtx,S_MTA,'MTA');
S_WTA=fminsearch(@(s) f_predErr(f_select(predCueMtx,s,'WTA')),S,o);
[predExtComb(:,:,5),cueStat(:,3)] = f_select(predCueMtx,S_WTA,'WTA');


predExt = cat(3,predExtSingle,predExtComb);
Pext = squeeze(num2cell(predExt,1:2));

% Mean squared errors
Nstrat = 5;
MSE.m = nan(Ncues+Nstrat,1);
MSE.ci = nan(Ncues+Nstrat,2);
for ii = 1:Ncues+Nstrat
  if ii <= Ncues
    p = predExtSingle(:,:,ii);
  else
    p = predExtComb(:,:,ii-Ncues);
  end
  MSE.m(ii) = f_predErr(p);
  MSE.ci(ii,:) = bootci(1000,f_predErrA,p,actual);
end

% Bayesian Information Criteria
n = size(actual,1); % # conditions (no individual parameters for # listeners)
mc = ones(size(MSE.m)); % model complexity = # parameters; 1 sensitivity parameter per cue
mc(Ncues+1:end) = Ncues; % combinations -> all sensitivity parameters
mc(Ncues+(1:2)) = 2*Ncues; % weighted combinations -> all sensitivity parameters + weights
BIC = n*log(MSE.m) + mc*log(n);

W = [W(:),Wfix(:),cueStat];

end