MATLAB
function E =BlockEntropy01(Series,Window,Base )
%-----------------------------------------------------------
% Calculates BLOCK ENTROPY of Series
% Series: a Vector of numbers
% Base: 2 or 10 (affects logarithm of the Calculation)
% for 2 we use log2, for 10 log10
% Windows: Length of the "Sliding" BLOCK
% E: Entropy
%-----------------------------------------------------------
% For the ENTROPY Calculations
% http://matlabdatamining.blogspot.gr/2006/....
% 11/introduction-to-entropy.html
% BlogSpot: Will Dwinnell
%-----------------------------------------------------------
% For the BLOCK ENTROPY
% http://codegolf.stackexchange.com/...
% questions/24316/calculate-the-block-entropy
%-----------------------------------------------------------
% Test (Base=10)
% Series=[2, 3, 4, 1, 2, 3, 0, 0, 3, 2, 3, 0, ....
% 2, 2, 4, 4, 4, 1, 1, 1, 0, 4, 1,2, 2, 4, 0, ....
% 1, 2, 3, 0, 2, 3, 2, 3, 2, 0, 1, 3, 4, 4, 0, ....
% 2, 1, 4, 3,0, 2, 4, 1, 0, 4, 0, 0, 2, 2, 0, ....
% 2, 3, 0, 0, 4, 4, 2, 3, 1, 3, 1, 1,3, 1, 3, 1, ....
% 0, 0, 2, 2, 4, 0, 3, 2, 2, 3, 0, 3, 3, 0, 0, 4, ...
% 4, 1, 0,2, 3, 0, 0, 1, 4, 4, 3]';
%
% Results
%
% Window=1: 1.599
% Window=2: 3.065
% Window=3: 4.067
% Window=4: 4.412
% Window=5: 4.535
% Window=6: 4.554
%-----------------------------------------------------------
n=length(Series);
D=zeros(n,Window); % Pre Allocate Memory
for k=1:Window; D(:,k)=circshift(Series,1-k);end
D=D(1:end-Window+1,:); % Truncate Last Part
%
% Repace each Row with a "SYMBOL"
% in this Case a Number ...............
[K l]=size(D);
for k=1:K; MyData(k)=polyval(D(k,:),Base);end
clear D
%-----------------------------------------------------------
% ENTROPY CALCULATIONS on MyData
% following Will Dwinnell
%-----------------------------------------------------------
UniqueMyData = unique(MyData);
nUniqueMyData = length(UniqueMyData);
FreqMyData = zeros(nUniqueMyData,1); % Initialization
for i = 1:nUniqueMyData
FreqMyData(i) = ....
sum(double(MyData == UniqueMyData(i)));
end
% Calculate sample class probabilities
P = FreqMyData / sum(FreqMyData);
% Calculate entropy in bits
% Note: floating point underflow is never an issue since we are
% dealing only with the observed alphabet
if Base==10
E= -sum(P .* log(P));
elseif BASE==2
E= -sum(P .* log2(P));
else
end
end
WITH TEST SCRIPT
%-----------------------------------------------------------
Series=[2, 3, 4, 1, 2, 3, 0, 0, 3, 2, 3, 0, ....
2, 2, 4, 4, 4, 1, 1, 1, 0, 4, 1,2, 2, 4, 0, ....
1, 2, 3, 0, 2, 3, 2, 3, 2, 0, 1, 3, 4, 4, 0, ....
2, 1, 4, 3,0, 2, 4, 1, 0, 4, 0, 0, 2, 2, 0, ....
2, 3, 0, 0, 4, 4, 2, 3, 1, 3, 1, 1,3, 1, 3, 1, ....
0, 0, 2, 2, 4, 0, 3, 2, 2, 3, 0, 3, 3, 0, 0, 4, ...
4, 1, 0,2, 3, 0, 0, 1, 4, 4, 3]';
Base=10;
%-----------------------------------------------------------
for Window=1:6
E =BlockEntropy01(Series,Window,Base )
end
entropy(probabilities(blocks(A,m)))
.