#!/usr/bin/python3 """Measure the worst-case inaccuracy of a linearly interpolated logarithm table. Interpolating this version of the logarithm table is apparently accurate to within 0.35%, with rounding rather than interpolation being the major source of error. But it is small enough to easily memorize. I suspect you can do better with a quadratic spline of similar size, but that might be harder to compute in your head. """ from __future__ import division import math logs = list(map(float, '10 12.6 15.8 20 25 31.6 40 50 63 79 100'.split())) # more precisely (though I think something is wrong here:) # logs = [10,12.589,15.849,19.953,25.119,31.623,39.811,50.119,63.096,79.433,100] # less precisely (1.25% error): # logs = list(map(float, '10 13 16 20 25 32 40 50 63 79 100'.split())) def logterp(x): "Compute a Briggsian logarithm with linear interpolation from a table." m = min(logs) i = 0 while m > 1: i += 1 m /= 10 while x > max(logs): i += 1 x /= 10 while x <= min(logs): i -= 1 x *= 10 n = 0 while logs[n] < x: n += 1 assert logs[n] >= x assert logs[n] > min(logs) d = x - logs[n-1] f = d / (logs[n] - logs[n-1]) return i + (n-1+f)/10 def badness(x): true = math.log10(x) fake = logterp(x) return abs(true-fake)/true def worst(): worst = max(range(2, 1000000), key=badness) return worst, badness(worst), logterp(worst), math.log10(worst) if __name__ == '__main__': print(worst())