%zeros %average_classifier = zeros(80,80);m_patterns_error = zeros(80,1);%least square%n = 100 dimensions,m_loop =1100for n = 1:80 %m patterns x1 , . .
. xm for m = 1:m_loop %train data generation using the function randi %Generate a x_training =randi(0 1, m, n); % getting the first coordinate of x. y_trainign = x_training(:,1); %weight_vector = pinv(x_training)*y_trainign; weight_vector = ones(1,n); for i=1:size(x_training,1) if x_training(i,:)*weight_vector’ < n predict_y = 0; else predict_y = 1; end if sum(predict_y~=y_trainign(i,:)) > 0 weight_vector = weight_vector .
* power(2,(y_trainign(i,:)-predict_y)*x_training(i,:)); end end classifier = zeros(1,80); % takes number of iterations size for i = 1:80 X_test = randi(0 1, 300, n); % getting the first coordinate of x. y_test = X_test(:,1); %regression vector w to define a classifier fw(x) := sign(w?x) estimate_y = X_test*weight_vector’; classifier(1,i) = sum(estimate_y~=y_test); end error = mean(classifier*100/300); if error <= 10 %re assigning m patterns for all average classifier less than %or equal to 10% m_patterns_error(n) = m; break; end endend%ploting the perceptronfigure;plot(m_patterns_error);title('Winnow Sample Complexity')xlabel('n');ylabel('m');