+ return lp(v1, v2, 3)
+
+def linf(v1, v2=None):
+ if v2:
+ vec = {k: abs(v1[k] - v2[k]) for k in (v1.keys() | v2.keys())}
+ else:
+ vec = v1
+ return max(v for v in vec.values())
+
+
+def scale(frequencies, norm=l2):
+ length = norm(frequencies)
+ return collections.defaultdict(int,
+ {k: v / length for k, v in frequencies.items()})
+
+def l2_scale(f):
+ """Scale a set of frequencies so they have a unit euclidean length
+
+ >>> sorted(euclidean_scale({1: 1, 2: 0}).items())
+ [(1, 1.0), (2, 0.0)]
+ >>> sorted(euclidean_scale({1: 1, 2: 1}).items()) # doctest: +ELLIPSIS
+ [(1, 0.7071067...), (2, 0.7071067...)]
+ >>> sorted(euclidean_scale({1: 1, 2: 1, 3: 1}).items()) # doctest: +ELLIPSIS
+ [(1, 0.577350...), (2, 0.577350...), (3, 0.577350...)]
+ >>> sorted(euclidean_scale({1: 1, 2: 2, 3: 1}).items()) # doctest: +ELLIPSIS
+ [(1, 0.408248...), (2, 0.81649658...), (3, 0.408248...)]
+ """
+ return scale(f, l2)
+
+def l1_scale(f):
+ """Scale a set of frequencies so they sum to one
+
+ >>> sorted(normalise({1: 1, 2: 0}).items())
+ [(1, 1.0), (2, 0.0)]
+ >>> sorted(normalise({1: 1, 2: 1}).items())
+ [(1, 0.5), (2, 0.5)]
+ >>> sorted(normalise({1: 1, 2: 1, 3: 1}).items()) # doctest: +ELLIPSIS
+ [(1, 0.333...), (2, 0.333...), (3, 0.333...)]
+ >>> sorted(normalise({1: 1, 2: 2, 3: 1}).items())
+ [(1, 0.25), (2, 0.5), (3, 0.25)]
+ """
+ return scale(f, l1)
+
+normalise = l1_scale
+euclidean_distance = l2
+euclidean_scale = l2_scale
+