diff --git a/bin/pyspark b/bin/pyspark index ad4132fb59eb0..ed35e1f445450 100755 --- a/bin/pyspark +++ b/bin/pyspark @@ -50,7 +50,7 @@ export PYSPARK_DRIVER_PYTHON_OPTS # Add the PySpark classes to the Python path: export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH" -export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9-src.zip:$PYTHONPATH" +export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.1-src.zip:$PYTHONPATH" # Load the PySpark shell.py script when ./pyspark is used interactively: export OLD_PYTHONSTARTUP="$PYTHONSTARTUP" diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index dc34be1a41706..5741480fe5501 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -30,7 +30,7 @@ if "x%PYSPARK_DRIVER_PYTHON%"=="x" ( ) set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH% -set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9-src.zip;%PYTHONPATH% +set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9.1-src.zip;%PYTHONPATH% set OLD_PYTHONSTARTUP=%PYTHONSTARTUP% set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py diff --git a/core/pom.xml b/core/pom.xml index 561a35402c1c3..9cd8848bca2ac 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -418,7 +418,7 @@ net.sf.py4j py4j - 0.10.9 + 0.10.9.1 org.apache.spark diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala index 490b48719b6be..a6277747cb5fd 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala @@ -27,7 +27,7 @@ import org.apache.spark.SparkContext import org.apache.spark.api.java.{JavaRDD, JavaSparkContext} private[spark] object PythonUtils { - val PY4J_ZIP_NAME = "py4j-0.10.9-src.zip" + val PY4J_ZIP_NAME = "py4j-0.10.9.1-src.zip" /** Get the PYTHONPATH for PySpark, either from SPARK_HOME, if it is set, or from our JAR */ def sparkPythonPath: String = { diff --git a/dev/deps/spark-deps-hadoop-palantir b/dev/deps/spark-deps-hadoop-palantir index 96785ce62437e..c9a28caed465f 100644 --- a/dev/deps/spark-deps-hadoop-palantir +++ b/dev/deps/spark-deps-hadoop-palantir @@ -145,7 +145,7 @@ parquet-format-structures/1.12.0-palantir.8//parquet-format-structures-1.12.0-pa parquet-hadoop/1.12.0-palantir.8//parquet-hadoop-1.12.0-palantir.8.jar parquet-jackson/1.12.0-palantir.8//parquet-jackson-1.12.0-palantir.8.jar protobuf-java/2.5.0//protobuf-java-2.5.0.jar -py4j/0.10.9//py4j-0.10.9.jar +py4j/0.10.9.1//py4j-0.10.9.1.jar pyrolite/4.30//pyrolite-4.30.jar safe-logging/1.5.1//safe-logging-1.5.1.jar scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar diff --git a/python/docs/Makefile b/python/docs/Makefile index b11e3589065c3..17d6943558ad5 100644 --- a/python/docs/Makefile +++ b/python/docs/Makefile @@ -7,7 +7,7 @@ SPHINXBUILD ?= sphinx-build PAPER ?= BUILDDIR ?= _build -export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9-src.zip) +export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9.1-src.zip) # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) diff --git a/python/docs/make2.bat b/python/docs/make2.bat index 2d7d929755a04..19226d4728de1 100644 --- a/python/docs/make2.bat +++ b/python/docs/make2.bat @@ -2,7 +2,7 @@ REM Command file for Sphinx documentation -set PYTHONPATH=..;..\lib\py4j-0.10.9-src.zip +set PYTHONPATH=..;..\lib\py4j-0.10.9.1-src.zip if "%SPHINXBUILD%" == "" ( diff --git a/python/lib/py4j-0.10.9-src.zip b/python/lib/py4j-0.10.9.1-src.zip similarity index 94% rename from python/lib/py4j-0.10.9-src.zip rename to python/lib/py4j-0.10.9.1-src.zip index 2c49836147030..11eb331b6f2c6 100644 Binary files a/python/lib/py4j-0.10.9-src.zip and b/python/lib/py4j-0.10.9.1-src.zip differ diff --git a/python/setup.py b/python/setup.py index a1cde31aa0ada..e61b7d65608db 100644 --- a/python/setup.py +++ b/python/setup.py @@ -207,7 +207,7 @@ def _supports_symlinks(): 'pyspark.examples.src.main.python': ['*.py', '*/*.py']}, scripts=scripts, license='http://www.apache.org/licenses/LICENSE-2.0', - install_requires=['py4j==0.10.9'], + install_requires=['py4j==0.10.9.1'], extras_require={ 'ml': ['numpy>=1.7'], 'mllib': ['numpy>=1.7'], diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 77e1976c152f7..4d21e8dee7184 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -446,7 +446,7 @@ class YarnClusterSuite extends BaseYarnClusterSuite { // needed locations. val sparkHome = sys.props("spark.test.home") val pythonPath = Seq( - s"$sparkHome/python/lib/py4j-0.10.9-src.zip", + s"$sparkHome/python/lib/py4j-0.10.9.1-src.zip", s"$sparkHome/python") val extraEnvVars = Map( "PYSPARK_ARCHIVES_PATH" -> pythonPath.map("local:" + _).mkString(File.pathSeparator), diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh index b53442ec096a1..7389416bb3192 100755 --- a/sbin/spark-config.sh +++ b/sbin/spark-config.sh @@ -28,6 +28,6 @@ export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}" # Add the PySpark classes to the PYTHONPATH: if [ -z "${PYSPARK_PYTHONPATH_SET}" ]; then export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}" - export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9-src.zip:${PYTHONPATH}" + export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.1-src.zip:${PYTHONPATH}" export PYSPARK_PYTHONPATH_SET=1 fi