% This function gives the testing RMSE curve of CI-ELM.

% Input:

%     N: Number of training samples

%     n: Number of input attributes

%     X: Input of raining data set, a N*n matrix

%     T: Target of training data set, a N*1 vector

%     tX: Input of testing data set

%     tT: Target of testing data set

%     maxnum: maximum number of network hidden nodes - 1

%     fun: Type of activation function: 0 for sigmoidal additive function and 1 for Gaussian radial basis function

% Output:

%     E: A (maxnum+1)*1 vector which contains the testing RMSE of each time when adding a hidden node

function E = ci_elm(N,n,X,T,tX,tT,maxnum,fun)

e = T;

g = zeros(N,1);

E = [];

count = 0;

W = [];

b = [];

beta = [];

 

while count <= maxnum

    wi = rand(n,1)*2-1;

    switch fun

        case 0

            bi = rand(1,1)*2-1;

            h = activeadd(X,wi,bi);

        case 1

            bi = rand(1,1)*0.5;

            h = activerbf(X,wi,bi);

        otherwise

    end

    hi = h-g;

    k = e'*hi/(hi'*hi);

    g = (1-k)*g + k*h;

    e = T-g;

    beta = [beta*(1-k);k];

    W = [W wi];

    b = [b bi];

    switch fun

        case 0

            O = activeadd(tX,W,b)*beta;

        case 1

            O = activerbf(tX,W,b)*beta;

        otherwise

    end

    error = sqrt(mse(tT - O));

    E = [E;error];

    count = count+1;

end

num = count;

% calculate the feature matrix for sigmoidal additive networks

function value = activeadd(x,wi,bi)

index = ones(1,size(x,1));

x = x*wi + bi(index,:);

value = 1./(1+exp(-x));

% calculate the feature matrix for Gaussian RBF networks

function value = activerbf(x,wi,bi)

n = size(x,1);

m = size(wi,2);

value = zeros(n,m);

for i = 1:m

    w = wi(:,i)';

    index = ones(n,1);

    wm = w(index,:);

   value(:,i) = -bi(i)*sum((x-wm).^2,2);

end

value = exp(value);