-
Notifications
You must be signed in to change notification settings - Fork 0
/
requirements-dev.lock
378 lines (377 loc) · 7.76 KB
/
requirements-dev.lock
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
# universal: false
-e file:.
aiohappyeyeballs==2.3.7
# via aiohttp
aiohttp==3.10.4
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.4.0
# via httpx
# via openai
asttokens==2.4.1
# via stack-data
attrs==24.2.0
# via aiohttp
beautifulsoup4==4.12.3
# via bs4
# via llama-index-readers-file
blis==0.7.11
# via thinc
bs4==0.0.2
# via semanticsilm
catalogue==2.0.10
# via spacy
# via srsly
# via thinc
certifi==2024.7.4
# via httpcore
# via httpx
# via requests
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via nltk
# via typer
cloudpathlib==0.18.1
# via weasel
confection==0.1.5
# via thinc
# via weasel
contourpy==1.2.1
# via matplotlib
cycler==0.12.1
# via matplotlib
cymem==2.0.8
# via preshed
# via spacy
# via thinc
dataclasses-json==0.6.7
# via llama-index-core
# via llama-index-legacy
decorator==5.1.1
# via ipython
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via openai
executing==2.0.1
# via stack-data
fonttools==4.53.1
# via matplotlib
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.6.1
# via llama-index-core
# via llama-index-legacy
greenlet==3.0.3
# via sqlalchemy
h11==0.14.0
# via httpcore
httpcore==1.0.5
# via httpx
httpx==0.27.0
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via openai
idna==3.7
# via anyio
# via httpx
# via requests
# via yarl
ipython==8.26.0
# via pyvis
jedi==0.19.1
# via ipython
jinja2==3.1.4
# via pyvis
# via spacy
jiter==0.5.0
# via openai
joblib==1.4.2
# via nltk
jsonpickle==3.2.2
# via pyvis
kiwisolver==1.4.5
# via matplotlib
langcodes==3.4.0
# via spacy
language-data==1.2.0
# via langcodes
llama-cloud==0.0.13
# via llama-index-indices-managed-llama-cloud
llama-index==0.10.65
# via semanticsilm
llama-index-agent-openai==0.2.9
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.1.13
# via llama-index
llama-index-core==0.10.66
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.1.11
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.2.7
# via llama-index
llama-index-legacy==0.9.48.post2
# via llama-index
llama-index-llms-openai==0.1.29
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.1.9
# via llama-index
llama-index-program-openai==0.1.7
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.1.3
# via llama-index
llama-index-readers-file==0.1.33
# via llama-index
llama-index-readers-llama-parse==0.1.6
# via llama-index
llama-parse==0.4.9
# via llama-index-readers-llama-parse
marisa-trie==1.2.0
# via language-data
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via jinja2
marshmallow==3.21.3
# via dataclasses-json
matplotlib==3.9.2
# via semanticsilm
matplotlib-inline==0.1.7
# via ipython
mdurl==0.1.2
# via markdown-it-py
multidict==6.0.5
# via aiohttp
# via yarl
murmurhash==1.0.10
# via preshed
# via spacy
# via thinc
mypy-extensions==1.0.0
# via typing-inspect
nest-asyncio==1.6.0
# via llama-index-core
# via llama-index-legacy
networkx==3.3
# via llama-index-core
# via llama-index-legacy
# via python-louvain
# via pyvis
# via semanticsilm
nltk==3.8.1
# via llama-index-core
# via llama-index-legacy
numpy==1.26.4
# via blis
# via contourpy
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via pandas
# via python-louvain
# via scipy
# via spacy
# via thinc
openai==1.41.0
# via llama-index-agent-openai
# via llama-index-core
# via llama-index-legacy
# via llama-index-llms-openai
packaging==24.1
# via marshmallow
# via matplotlib
# via plotly
# via spacy
# via thinc
# via weasel
pandas==2.2.2
# via llama-index-core
# via llama-index-legacy
parso==0.8.4
# via jedi
pexpect==4.9.0
# via ipython
pillow==10.4.0
# via llama-index-core
# via matplotlib
plotly==5.23.0
# via semanticsilm
preshed==3.0.9
# via spacy
# via thinc
prompt-toolkit==3.0.47
# via ipython
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pydantic==2.8.2
# via confection
# via llama-cloud
# via openai
# via spacy
# via thinc
# via weasel
pydantic-core==2.20.1
# via pydantic
pygments==2.18.0
# via ipython
# via rich
pyparsing==3.1.2
# via matplotlib
pypdf==4.3.1
# via llama-index-readers-file
python-dateutil==2.9.0.post0
# via matplotlib
# via pandas
python-louvain==0.16
# via semanticsilm
pytz==2024.1
# via pandas
pyvis==0.3.2
# via semanticsilm
pyyaml==6.0.2
# via llama-index-core
rapidfuzz==3.9.6
# via thefuzz
regex==2024.7.24
# via nltk
# via tiktoken
requests==2.32.3
# via llama-index-core
# via llama-index-legacy
# via spacy
# via tiktoken
# via weasel
rich==13.7.1
# via typer
scipy==1.14.0
# via semanticsilm
setuptools==72.2.0
# via marisa-trie
# via spacy
# via thinc
shellingham==1.5.4
# via typer
six==1.16.0
# via asttokens
# via python-dateutil
smart-open==7.0.4
# via weasel
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
spacy==3.7.5
# via semanticsilm
spacy-legacy==3.0.12
# via spacy
spacy-loggers==1.0.5
# via spacy
sqlalchemy==2.0.32
# via llama-index-core
# via llama-index-legacy
srsly==2.4.8
# via confection
# via spacy
# via thinc
# via weasel
stack-data==0.6.3
# via ipython
striprtf==0.0.26
# via llama-index-readers-file
tenacity==8.5.0
# via llama-index-core
# via llama-index-legacy
# via plotly
thefuzz==0.22.1
# via semanticsilm
thinc==8.2.5
# via spacy
tiktoken==0.7.0
# via llama-index-core
# via llama-index-legacy
tqdm==4.66.5
# via llama-index-core
# via nltk
# via openai
# via semanticsilm
# via spacy
traitlets==5.14.3
# via ipython
# via matplotlib-inline
typer==0.12.4
# via spacy
# via weasel
typing-extensions==4.12.2
# via llama-index-core
# via llama-index-legacy
# via openai
# via pydantic
# via pydantic-core
# via sqlalchemy
# via typer
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
tzdata==2024.1
# via pandas
urllib3==2.2.2
# via requests
wasabi==1.1.3
# via spacy
# via thinc
# via weasel
wcwidth==0.2.13
# via prompt-toolkit
weasel==0.4.1
# via spacy
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via smart-open
yarl==1.9.4
# via aiohttp