% This function gives the testing RMSE curve of EM-ELM.

% Input:

%     N: Number of training samples

%     n: Number of input attributes

%     X: Input of raining data set, a N*n matrix

%     T: Target of training data set, a N*1 vector

%     tX: Input of testing data set

%     tT: Target of testing data set

%     maxnum: maximum number of network hidden nodes - 1

%     fun: Type of activation function: 0 for sigmoidal additive function and 1 for Gaussian radial basis function

%     k and lambda: The same parameters as for LCI-ELM

% Output:

%     E: A vector which contains the testing RMSE of each time when adding a chunk of hidden nodes

%     gap: A vector which contains the hidden layer size of each time when adding a chunk of hidden nodes

function [gap,E] = em_elm(N,n,X,T,tX,tT,maxnum,fun,k,lambda)

num = k+1;

W = rand(n,num)*2-1;

switch fun

    case 0

        b = rand(1,num)*2-1;

        H = activeadd(X,W,b);

    case 1

        b = rand(1,num)*0.5;

        H = activerbf(X,W,b);

    otherwise

end

Hp = inv(H'*H)*H';

beta = Hp*T;

switch fun

    case 0

        O = activeadd(tX,W,b)*beta;

    case 1

        O = activerbf(tX,W,b)*beta;

    otherwise

end

error = sqrt(mse(tT - O));

E = error;

gap = num;

count = 1;

 

while num <= maxnum

    len = floor(k*exp(-lambda*count))+1;

    wi = rand(n,len)*2-1;

    switch fun

        case 0

            bi = rand(1,len)*2-1;

            dH = activeadd(X,wi,bi);

        case 1

            bi = rand(1,len)*0.5;

            dH = activerbf(X,wi,bi);

        otherwise

    end

    temp = (diag(ones(1,N))-H*Hp)*dH;

    D = inv(temp'*temp)*temp';

    U = Hp*(diag(ones(1,N))-dH*D);

    Hp = [U;D];

    W = [W wi];

    b = [b bi];

    H = [H dH];

    beta = Hp*T;

    switch fun

        case 0

            O = activeadd(tX,W,b)*beta;

        case 1

            O = activerbf(tX,W,b)*beta;

        otherwise

    end

    error = sqrt(mse(tT - O));

    E = [E error];

    num = num+len;

    gap = [gap num];

    count = count+1;

end

% calculate the feature matrix for sigmoidal additive networks

function value = activeadd(x,wi,bi)

index = ones(1,size(x,1));

x = x*wi + bi(index,:);

value = 1./(1+exp(-x));

% calculate the feature matrix for Gaussian RBF networks

function value = activerbf(x,wi,bi)

n = size(x,1);

m = size(wi,2);

value = zeros(n,m);

for i = 1:m

    w = wi(:,i)';

    index = ones(n,1);

    wm = w(index,:);

   value(:,i) = -bi(i)*sum((x-wm).^2,2);

end

value = exp(value);