-
Notifications
You must be signed in to change notification settings - Fork 0
/
Pipfile
91 lines (85 loc) · 3.37 KB
/
Pipfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
# protobuf is needed for serializing data
protobuf=">=3"
# PySpark has to match the version of Spark we use for testing
pyspark = "==3.5.1"
# pyarrow is needed for Pandas UDF and mapInPandas
# https://spark.apache.org/docs/latest/api/python/user_guide/sql/arrow_pandas.html#recommended-pandas-and-pyarrow-versions
# https://spark.apache.org/docs/latest/api/python/getting_started/install.html#dependencies
pyarrow=">=17.0.0"
# delta-spark is needed for Delta Lake
delta-spark="==3.2.0"
# Deprecated is needed for marking deprecated functions
Deprecated = ">=1.2.13"
# helix.fhir.client.sdk is needed for interacting with FHIR servers
"helix.fhir.client.sdk" = ">=3.0.5"
# helix-mockserver-client is needed for mocking servers
helix-mockserver-client=">=2.0.1"
# sparkdataframecomparer is needed for comparing Spark DataFrames
"sparkdataframecomparer" = ">=2.0.12"
[dev-packages]
# setuptools is needed for building the package
setuptools=">=74.1.2"
# wheel is needed for building the package
wheel = ">=0.44.0"
# twine is needed for uploading the package to PyPI
twine=">=5.1.1"
# pre-commit is needed for running code quality checks
pre-commit=">=3.7.1"
# autoflake is needed for removing unused imports
autoflake=">=2.3.1"
# mypy is needed for type checking
mypy = ">=1.11.2"
# pytest is needed for running tests
pytest = ">=8.3.3"
# pytest-ayncio is needed for running async tests
pytest-asyncio = ">=0.23.8"
# black is needed for formatting code
black = ">=24.8.0"
# py4j is needed for connecting to the JVM from Spark
py4j = "==0.10.9.7" # https://spark.apache.org/docs/latest/api/python/getting_started/install.html#dependencies
# pyspark is needed for running Spark jobs
pyspark="==3.5.1" # should match the version of spark we use for testing
# pygments is needed for syntax highlighting
pygments=">=2.18.0" # not directly required, pinned by Snyk to avoid a vulnerability
types-Deprecated=">=0.1.2"
# sparkautomapper is needed for mapping data
sparkautomapper = "==3.0.1"
# sparkautomapper.fhir is needed for mapping FHIR data
"sparkautomapper.fhir" = "==3.0.1"
# sparkfhirschemas is needed for FHIR schemas
"sparkfhirschemas" = "==2.0.2"
# sparkpipelineframework is needed for building pipelines
sparkpipelineframework = "==3.0.12"
# requests is needed for making HTTP requests
requests = ">=2.31.0"
dictdiffer = ">=0.9.0"
# Sphinx is needed for generating documentation
Sphinx="==7.4.7"
# sphinx-autoapi is needed for generating API documentation
sphinx-autoapi="==3.2.1"
# sphinx-rtd-theme is needed for the Read the Docs theme
sphinx-rtd-theme="==2.0.0"
# myst-parser is needed for parsing Markdown
myst-parser="==3.0.1"
recommonmark="==0.7.1"
# types-requests is needed for type hints for requests
types-requests=">=2.31.0"
types-Pygments="*"
# These dependencies are required for pipenv-setup. They conflict with ones above, so we install these
# only when running pipenv-setup
[pipenvsetup]
# vistr is needed for visualizing the dependency graph
vistir=">=0.6.1, <0.7.0" # https://github.com/Madoshakalaka/pipenv-setup/issues/138
# plete is needed for tab completion
plette = "<1.0.0" # https://github.com/Madoshakalaka/pipenv-setup/issues/138
# pipenv-setup is needed for updating setup.py with the dependencies for anyone installing this package
pipenv-setup = ">=3.2.0"
[requires]
python_version = "3.12"
[pipenv]
allow_prereleases = false