@@ -153,21 +153,15 @@ def _supports_symlinks():
153153 # will search for SPARK_HOME with Python.
154154 scripts .append ("pyspark/find_spark_home.py" )
155155
156- # Parse the README markdown file into rst for PyPI
157- long_description = "!!!!! missing pandoc do not upload to PyPI !!!!"
158- try :
159- import pypandoc
160- long_description = pypandoc .convert ('README.md' , 'rst' )
161- except ImportError :
162- print ("Could not import pypandoc - required to package PySpark" , file = sys .stderr )
163- except OSError :
164- print ("Could not convert - pandoc is not installed" , file = sys .stderr )
156+ with open ('README.md' ) as f :
157+ long_description = f .read ()
165158
166159 setup (
167160 name = 'pyspark' ,
168161 version = VERSION ,
169162 description = 'Apache Spark Python API' ,
170163 long_description = long_description ,
164+ long_description_content_type = "text/markdown" ,
171165 author = 'Spark Developers' ,
172166 author_email = 'dev@spark.apache.org' ,
173167 url = 'https://github.com/apache/spark/tree/master/python' ,
@@ -213,7 +207,6 @@ def _supports_symlinks():
213207 scripts = scripts ,
214208 license = 'http://www.apache.org/licenses/LICENSE-2.0' ,
215209 install_requires = ['py4j==0.10.8.1' ],
216- setup_requires = ['pypandoc' ],
217210 extras_require = {
218211 'ml' : ['numpy>=1.7' ],
219212 'mllib' : ['numpy>=1.7' ],
0 commit comments