X-Git-Url: https://git.njae.me.uk/?a=blobdiff_plain;f=language_models.py;h=59d858868dd5b67d5de9dd848fe26d6b5f1c6391;hb=9ab85aeac37f3ceada3d7d959a7f5ef8835638f5;hp=929746888d036fb54de3f1fbf228e296e0bcd027;hpb=3e8d2bd8cd7c623116fa3d2b77db954f51b191e4;p=cipher-training.git diff --git a/language_models.py b/language_models.py index 9297468..59d8588 100644 --- a/language_models.py +++ b/language_models.py @@ -120,14 +120,22 @@ def log_probability_of_unknown_word(key, N): return -log10(N * 10**((len(key) - 2) * 1.4)) Pw = Pdist(datafile('count_1w.txt'), log_probability_of_unknown_word) +Pw_wrong = Pdist(datafile('count_1w.txt'), lambda _k, N: log10(1/N)) Pl = Pdist(datafile('count_1l.txt'), lambda _k, _N: 0) P2l = Pdist(datafile('count_2l.txt'), lambda _k, _N: 0) +P3l = Pdist(datafile('count_3l.txt'), lambda _k, _N: 0) def Pwords(words): """The Naive Bayes log probability of a sequence of words. """ return sum(Pw[w.lower()] for w in words) +def Pwords_wrong(words): + """The Naive Bayes log probability of a sequence of words. + """ + return sum(Pw_wrong[w.lower()] for w in words) + + def Pletters(letters): """The Naive Bayes log probability of a sequence of letters. """ @@ -139,15 +147,21 @@ def Pbigrams(letters): """ return sum(P2l[p] for p in ngrams(letters, 2)) +def Ptrigrams(letters): + """The Naive Bayes log probability of the trigrams formed from a sequence + of letters. + """ + return sum(P3l[p] for p in ngrams(letters, 3)) + -def cosine_distance_score(text): +def cosine_similarity_score(text): """Finds the dissimilarity of a text to English, using the cosine distance of the frequency distribution. - >>> cosine_distance_score('abcabc') # doctest: +ELLIPSIS - 0.370847405... + >>> cosine_similarity_score('abcabc') # doctest: +ELLIPSIS + 0.26228882... """ - return norms.cosine_distance(english_counts, + return norms.cosine_similarity(english_counts, collections.Counter(sanitise(text)))