/**
* \file
*
* calculate the available entropy. This is taken from timed_entropyd.
*
* \author Georg Hopp .
*/
#include
#include
#define min(x, y) ((x)<(y)?(x):(y))
/**
* This is taken from timer_entropyd and modified so
* that the constant 1/log(2.0) is not calculated but
* set directly.
*
* As far as i can say this correlates to the shannon
* entropy algorithm with equal probabilities
* for entropy where the entropy units are bits.
*
* But actually i am no mathemacian and my analysis capabilities
* are limited. Additionally i have not analysed the linux random
* character device code, so i trusted the code in timer_entropyd.
*/
int
get_entropy_bits(const unsigned char * data, size_t ndata)
{
size_t byte_count[256];
size_t iterator;
static double log2inv = 1.442695; //!< 1 / log(2.0): the entropy unit size
double entropy = 0.0;
memset(byte_count, 0, sizeof(byte_count));
/**
* first get the amount each byte occurs in the array
*/
for (iterator = 0; iterator < ndata; iterator++) {
byte_count[data[iterator]]++;
}
/**
* calculate the entropy value
*/
for (iterator = 0; iterator < 256; iterator++) {
double probability = (double)byte_count[iterator] / (double)ndata;
if (0.0 < probability) {
entropy += probability * log2inv * (log(1.0 / probability));
}
}
/**
* prepare for use with linux kernel ioctl RNDADDENTROPY
*/
entropy *= (double)ndata;
entropy = (entropy < 0.0)? 0.0 : entropy;
entropy = min((double)(ndata * 8), entropy);
return entropy;
}
// vim: set ts=4 sw=4: