From 5706d8c39571348973026ab0b0eb988f4d83a9fe Mon Sep 17 00:00:00 2001 From: Shuai Lin Date: Sun, 4 Dec 2016 18:35:33 +0800 Subject: [PATCH] Address review comments. --- python/MANIFEST.in | 4 ++-- python/setup.py | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/python/MANIFEST.in b/python/MANIFEST.in index 3b8c99ef9cc41..40f1fb2f1ee7e 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -17,8 +17,8 @@ global-exclude *.py[cod] __pycache__ .DS_Store recursive-include deps/jars *.jar graft deps/bin -graft deps/data -graft deps/licenses +recursive-include deps/data *.data *.txt +recursive-include deps/licenses *.txt recursive-include deps/examples *.py recursive-include lib *.zip include README.md diff --git a/python/setup.py b/python/setup.py index e1bdb952698e5..317279a97e078 100644 --- a/python/setup.py +++ b/python/setup.py @@ -77,8 +77,6 @@ LICENSES_PATH = os.path.join(SPARK_HOME, "licenses") LICENSES_TARGET = os.path.join(TEMP_PATH, "licenses") -data_files = glob.glob(os.path.join(LICENSES_PATH, "*")) - # Check and see if we are under the spark path in which case we need to build the symlink farm. # This is important because we only want to build the symlink farm while under Spark otherwise we # want to use the symlink farm. And if the symlink farm exists under while under Spark (e.g. a @@ -189,7 +187,6 @@ def _supports_symlinks(): 'pyspark.data': ['*.txt', '*.data'], 'pyspark.licenses': ['*.txt'], 'pyspark.examples.src.main.python': ['*.py', '*/*.py']}, - data_files=[('', data_files)], scripts=scripts, license='http://www.apache.org/licenses/LICENSE-2.0', install_requires=['py4j==0.10.4'],