% This function gives the testing RMSE curve of EI-ELM.

% EI-ELM is equivalent to I-ELM when parameter k = 1.

% Input:

%     N: Number of training samples

%     n: Number of input attributes

%     X: Input of raining data set, a N*n matrix

%     T: Target of training data set, a N*1 vector

%     tX: Input of testing data set

%     tT: Target of testing data set

%     maxnum: maximum number of network hidden nodes - 1

%     fun: Type of activation function: 0 for sigmoidal additive function and 1 for Gaussian radial basis function

%     k: Parameter of EI-ELM

% Output:

%     E: A (maxnum+1)*1 vector which contains the testing RMSE of each time when adding a hidden node

function E = ei_elm(N,n,X,T,tX,tT,maxnum,fun,k)

e = T;

E = [];

count = 0;

W = [];

b = [];

beta = [];

while count <= maxnum

wi = rand(n,k)*2-1;

switch fun

case 0

bi = rand(1,k)*2-1;

case 1

bi = rand(1,k)*0.5;

otherwise

end

bestInd = 0;

h = [];

bestCon = 0;

for i = 1:k

switch fun

case 0

case 1

hi = activerbf(X,wi(:,i),bi(:,i));

otherwise

end

contri = (e'*hi)^2/(hi'*hi);

if contri > bestCon

bestCon = contri;

h = hi;

bestInd = i;

end

end

c = e'*h/(h'*h);

e = e - c*h;

beta = [beta;c];

W = [W wi(:,bestInd)];

b = [b bi(:,bestInd)];

switch fun

case 0

case 1

O = activerbf(tX,W,b)*beta;

otherwise

end

error = sqrt(mse(tT - O));

E = [E;error];

count = count+1;

end

num = count;

% calculate the feature matrix for sigmoidal additive networks

index = ones(1,size(x,1));

x = x*wi + bi(index,:);

value = 1./(1+exp(-x));

% calculate the feature matrix for Gaussian RBF networks

function value = activerbf(x,wi,bi)

n = size(x,1);

m = size(wi,2);

value = zeros(n,m);

for i = 1:m

w = wi(:,i)';

index = ones(n,1);

wm = w(index,:);

value(:,i) = -bi(i)*sum((x-wm).^2,2);

end

value = exp(value);