From dbf4bb462af3050879b4ac10b6bc1d4dcb3da07d Mon Sep 17 00:00:00 2001 From: mlflow-automation Date: Tue, 22 Oct 2024 12:57:06 +0000 Subject: [PATCH] Run python3 dev/update_ml_package_versions.py Signed-off-by: mlflow-automation --- mlflow/ml-package-versions.yml | 24 ++++++++++++------------ mlflow/ml_package_versions.py | 24 ++++++++++++------------ 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/mlflow/ml-package-versions.yml b/mlflow/ml-package-versions.yml index 81bc0b72aa513e..294dc3936cb7c1 100644 --- a/mlflow/ml-package-versions.yml +++ b/mlflow/ml-package-versions.yml @@ -38,7 +38,7 @@ pytorch: models: minimum: "1.9.0" - maximum: "2.4.1" + maximum: "2.5.0" requirements: ">= 0.0.0": ["torchvision", "scikit-learn"] ">= 1.8": ["transformers"] @@ -49,7 +49,7 @@ pytorch: autologging: minimum: "1.9.0" - maximum: "2.4.1" + maximum: "2.5.0" requirements: ">= 0.0.0": ["tensorboard"] python: @@ -246,7 +246,7 @@ fastai: models: minimum: "2.4.1" - maximum: "2.7.17" + maximum: "2.7.18" requirements: # TODO: fastai depends on spacy, but spacy>=3.8.2 requires python>=3.9 # Once migrating fastai tests to python 3.9, we should remove spacy<3.8.2 @@ -259,7 +259,7 @@ fastai: autologging: minimum: "2.4.1" - maximum: "2.7.17" + maximum: "2.7.18" requirements: "> 0.0.0": ["spacy<3.8.2", "torch<1.13.0", "torchvision<0.14.0"] run: | @@ -640,7 +640,7 @@ openai: pip install git+https://github.com/openai/openai-python models: minimum: "1.0.1" - maximum: "1.51.2" + maximum: "1.52.0" requirements: ">= 0.0.0": [ "pyspark", @@ -655,7 +655,7 @@ openai: pytest tests/openai --ignore tests/openai/test_openai_autolog.py autologging: minimum: "1.17.0" - maximum: "1.51.2" + maximum: "1.52.0" requirements: ">= 0.0.0": [ "pyspark", @@ -676,7 +676,7 @@ dspy: pip install git+https://github.com/stanfordnlp/dspy.git models: minimum: "2.5.6" - maximum: "2.5.6" + maximum: "2.5.15" python: ">= 2.5.6": "3.9" requirements: @@ -696,7 +696,7 @@ langchain: models: # Where the large package update was made (langchain-core, community, ...) minimum: "0.0.354" - maximum: "0.3.3" + maximum: "0.3.4" python: "== dev": "3.9" requirements: @@ -741,7 +741,7 @@ langchain: fi autologging: minimum: "0.1.0" - maximum: "0.3.3" + maximum: "0.3.4" python: "== dev": "3.9" requirements: @@ -789,7 +789,7 @@ llama_index: models: # New event/span framework is fully implemented in 0.10.44 minimum: "0.10.44" - maximum: "0.11.17" + maximum: "0.11.19" python: "== dev": "3.9" requirements: @@ -809,7 +809,7 @@ llama_index: run: pytest tests/llama_index --ignore tests/llama_index/test_llama_index_autolog.py --ignore tests/llama_index/test_llama_index_tracer.py autologging: minimum: "0.10.44" - maximum: "0.11.17" + maximum: "0.11.19" python: "== dev": "3.9" requirements: @@ -850,7 +850,7 @@ sentence_transformers: pip install git+https://github.com/UKPLab/sentence-transformers#egg=sentence-transformers models: minimum: "2.2.2" - maximum: "3.2.0" + maximum: "3.2.1" requirements: ">= 0.0.0": [ "pyspark", diff --git a/mlflow/ml_package_versions.py b/mlflow/ml_package_versions.py index 5159e9e1ef3795..5da0bdd14dd874 100644 --- a/mlflow/ml_package_versions.py +++ b/mlflow/ml_package_versions.py @@ -22,11 +22,11 @@ }, "models": { "minimum": "1.9.0", - "maximum": "2.4.1" + "maximum": "2.5.0" }, "autologging": { "minimum": "1.9.0", - "maximum": "2.4.1" + "maximum": "2.5.0" } }, "pytorch-lightning": { @@ -118,11 +118,11 @@ }, "models": { "minimum": "2.4.1", - "maximum": "2.7.17" + "maximum": "2.7.18" }, "autologging": { "minimum": "2.4.1", - "maximum": "2.7.17" + "maximum": "2.7.18" } }, "onnx": { @@ -256,11 +256,11 @@ }, "models": { "minimum": "1.0.1", - "maximum": "1.51.2" + "maximum": "1.52.0" }, "autologging": { "minimum": "1.17.0", - "maximum": "1.51.2" + "maximum": "1.52.0" } }, "dspy": { @@ -269,7 +269,7 @@ }, "models": { "minimum": "2.5.6", - "maximum": "2.5.6" + "maximum": "2.5.15" } }, "langchain": { @@ -278,11 +278,11 @@ }, "models": { "minimum": "0.0.354", - "maximum": "0.3.3" + "maximum": "0.3.4" }, "autologging": { "minimum": "0.1.0", - "maximum": "0.3.3" + "maximum": "0.3.4" } }, "llama_index": { @@ -292,11 +292,11 @@ }, "models": { "minimum": "0.10.44", - "maximum": "0.11.17" + "maximum": "0.11.19" }, "autologging": { "minimum": "0.10.44", - "maximum": "0.11.17" + "maximum": "0.11.19" } }, "autogen": { @@ -314,7 +314,7 @@ }, "models": { "minimum": "2.2.2", - "maximum": "3.2.0" + "maximum": "3.2.1" } }, "johnsnowlabs": {