% simple x and o detector by O. Krigolson, updated Apr 4, 2023 % get ready to go clear; close all; clc; % use initial weights of zero (1) or random (2) initialWeightsAre = 1; % how many trials training goes on for nRuns = 100; % define the two letters in a grid space % the x inputs{1} = [1 0 0 0 0 0 1; 0 1 0 0 0 1 0; 0 0 1 0 1 0 0; 0 0 0 1 0 0 0; 0 0 1 0 1 0 0; 0 1 0 0 0 1 0; 1 0 0 0 0 0 1]; % the o inputs{2} = [1 1 1 1 1 1 1; 1 0 0 0 0 0 1; 1 0 0 0 0 0 1; 1 0 0 0 0 0 1; 1 0 0 0 0 0 1; 1 0 0 0 0 0 1; 1 1 1 1 1 1 1]; % plot the inputs - THIS IS THE INPUT LAYER subplot(3,2,1); imagesc(inputs{1}); title('X STIMULUS'); subplot(3,2,2); imagesc(inputs{2}); title('O STIMULUS'); % determine the size of the input inputDimension = size(inputs{1},1); % define the initial weights - THIS IS THE HIDDEN LAYER if initialWeightsAre == 1 weights(1:inputDimension,1:inputDimension) = 0; else weights(1:inputDimension,1:inputDimension) = rand([inputDimension,inputDimension])*2-1; end % plot the initial weights subplot(3,2,3); imagesc(weights); title('INITIAL WEIGHTS'); % the learning rate of the model learningRate = 0.1; % define some empty variables xActivations = []; oActivations = []; % train the network for runs = 1:nRuns % randomly pick an x or an o to show whichInput = randi(2); % compute the input to the output neuron, the input function netInput = sum(sum(inputs{whichInput} .* weights)); % compute the output based on the input, the activation function netOutput = 1 / (1 + exp(-1*netInput)); % loop through to adjust the weights for counter1 = 1:inputDimension for counter2 = 1:inputDimension % reward it for an X if inputs{whichInput}(counter1,counter2) == 1 && whichInput == 1 % update the weights - note this is not a prediction error, % just an update rule weights(counter1,counter2) = weights(counter1,counter2) + (1 - netOutput)*learningRate; % ensure the weights do not blow up if weights(counter1,counter2) > 1 weights(counter1,counter2) = 1; end % punish it for an O elseif inputs{whichInput}(counter1,counter2) == 1 && whichInput == 2 % update the weights weights(counter1,counter2) = weights(counter1,counter2) + (0 - netOutput)*learningRate; % ensure the weights do not blow up if weights(counter1,counter2) < -1 weights(counter1,counter2) = -1; end end end end % store the activations - THIS IS THE OUTPUT LAYER if whichInput == 1 xActivations = [xActivations netOutput]; else oActivations = [oActivations netOutput]; end end % show the final weights subplot(3,2,4); imagesc(weights); title('FINAL WEIGHTS'); % plot accuracy subplot(3,2,[5 6]); plot(xActivations); hold on; plot(oActivations); title('X and O ACTIVATIONS ACROSS TRIALS');