-
Notifications
You must be signed in to change notification settings - Fork 0
/
testing.py
72 lines (65 loc) · 2.87 KB
/
testing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
from typing import Counter
from trialyzer import *
import timeit
import cProfile
import corpus
import typingdata
import time
# ok what is the chain here
# note vscode debugger makes this roughly 10x slower than straight Python
# layout things
# layout construction: 0.3 ms
# calculate_counts: 1798 ms (includes all_nstrokes)
# just a plain Counter instead (without calculating sums): 1878 ms
# after making all the nstroke category functions @functools.cache:
# calculate_counts: 1266 ms
# Counter(): 1348 ms
# ngram to nstroke:
# how long does it take to iterate all tristrokes
# tuple(all_nstrokes()): 1416 ms
# after caching to_nstroke(): 275 ms!!
# tuple(itertools all trigrams): 3 ms
# Very interesting, seems like to_nstroke is the big deal. Cache it?
# After unwrapping layout.finger() and .coord() from their functions:
# tuple(all_nstrokes()): 1359 ms
# After making dedicated dictionaries to replace the functions:
# tuple(all_nstrokes()): 1216 ms
# trialyzer things
# stuff that only runs through what is saved
# load csv data: 3 ms
# get medians: 125 ms
# tristroke_category_data: 14 ms
# runs through all tristrokes
# summary_tristroke_analysis: 1923 ms
# load shai: 162 ms
# summary tristroke rank 3 layouts: 3687 ms
# after caching layout.to_nstroke(): 1069 ms!!
# full tristroke rank 3 layouts: 4897 ms
# after caching layout.to_nstroke(): 1206 ms!!
# nstroke things
# tristroke_category: 180 ms to go through a precomputed list of qwerty nstrokes
qwerty = layout.Layout("qwerty", False)
# typingdata_ = typingdata.TypingData("tanamr")
# corpus_ = corpus.get_corpus("shai.txt", precision=5000)
# constraintmap_ = constraintmap.get_constraintmap("trad-dead-pinkies")
# n = 3
# keys = ("a", "b", "c")
def stuff():
# csvdata = load_csv_data("default")
# for layoutname in ("qwerty", "semimak", "boom"):
# lay = layout.get_layout(layoutname)
# medians = get_medians_for_layout(csvdata, lay)
# tricatdata = tristroke_category_data(medians)
# summary_tristroke_analysis(lay, tricatdata, medians)
# set_1 = {nstroke for nstroke in qwerty.nstrokes_with_any_of(keys, n)} # 116
# set_2 = {nstroke for nstroke in qwerty.by_brute_force(keys, n)} # 235
# corpus.Corpus("tr_quotes.txt")
# for lay, score, swap in steepest_ascent(qwerty, typingdata_,
# corpus_.trigram_counts, constraintmap_,
# pins=qwerty.get_board_keys()[0].values()):
# print(f"{swap} gives {score:.3f}")
# print(repr(lay))
corpus_ = corpus.get_corpus("shai.txt", space_key="space_r")
# n_ = 1
# print(timeit.timeit("stuff()", globals=globals(), number=n_)/n_ * 1000)
cProfile.run("corpus.get_corpus('shai.txt', space_key='space_r')", sort="tottime")