diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh index 16fbb71be429..8ba93aa56f00 100755 --- a/dev/make-distribution.sh +++ b/dev/make-distribution.sh @@ -233,7 +233,7 @@ if [ "$MAKE_PIP" == "true" ]; then pushd "$SPARK_HOME/python" > /dev/null # Delete the egg info file if it exists, this can cache older setup files. rm -rf pyspark.egg-info || echo "No existing egg info file, skipping deletion" - python setup.py sdist + python3 setup.py sdist popd > /dev/null else echo "Skipping building python distribution package" diff --git a/docs/building-spark.md b/docs/building-spark.md index 898f904461a6..891d6225bbdd 100644 --- a/docs/building-spark.md +++ b/docs/building-spark.md @@ -66,7 +66,7 @@ with Maven profile settings and so on like the direct Maven build. Example: ./dev/make-distribution.sh --name custom-spark --pip --r --tgz -Psparkr -Phive -Phive-thriftserver -Pmesos -Pyarn -Pkubernetes -This will build Spark distribution along with Python pip and R packages. For more information on usage, run `./dev/make-distribution.sh --help` +This will build Spark distribution along with Python pip and R packages. (Note that build with Python pip package requires Python 3.6). For more information on usage, run `./dev/make-distribution.sh --help` ## Specifying the Hadoop Version and Enabling YARN