diff --git a/binder/ApacheSedonaCore.ipynb b/binder/ApacheSedonaCore.ipynb index e82d5b225e..01a5f5847d 100644 --- a/binder/ApacheSedonaCore.ipynb +++ b/binder/ApacheSedonaCore.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -45,53 +45,14 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-7c4309f6-195c-4473-b3ac-629607126e04;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 121ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-7c4309f6-195c-4473-b3ac-629607126e04\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", - "23/07/03 20:34:33 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/03 20:34:33 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)" @@ -99,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -137,29 +98,18 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, True, 10, StorageLevel.MEMORY_ONLY, \"epsg:4326\", \"epsg:4326\")" + "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, True, 10)" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "3000" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "## Getting approximate total count\n", "point_rdd.approximateTotalCount" @@ -167,37 +117,9 @@ }, { "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.minx = minx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxx = maxx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.miny = miny\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxy = maxy\n" - ] - }, - { - "data": { - "image/svg+xml": [ - "" - ], - "text/plain": [ - "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# getting boundary for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -206,20 +128,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# To run analyze please use function analyze\n", "point_rdd.analyze()" @@ -227,23 +138,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": [ - "" - ], - "text/plain": [ - "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Finding boundary envelope for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -252,20 +149,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "2996" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Calculate number of records without duplicates\n", "point_rdd.countWithoutDuplicates()" @@ -273,20 +159,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'epsg:4326'" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting source epsg code\n", "point_rdd.getSourceEpsgCode()" @@ -294,20 +169,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'epsg:4326'" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting target epsg code\n", "point_rdd.getTargetEpsgCode()" @@ -315,20 +179,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Spatial partitioning data\n", "point_rdd.spatialPartitioning(GridType.KDBTREE)" @@ -354,30 +207,9 @@ }, { "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "[Stage 8:> (0 + 1) / 1]\r", - "\r", - " \r" - ] - }, - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# take firs element\n", "point_rdd.rawSpatialRDD.take(1)" @@ -385,24 +217,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# collect to Python list\n", "point_rdd.rawSpatialRDD.collect()[:5]" @@ -410,24 +227,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[111.08786851399313,\n", - " 110.92828303170774,\n", - " 111.1385974283527,\n", - " 110.97450594034112,\n", - " 110.97122518072091]" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# apply map functions, for example distance to Point(52 21)\n", "point_rdd.rawSpatialRDD.map(lambda x: x.geom.distance(Point(21, 52))).take(5)" @@ -444,7 +246,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Loaded data can be transfomred to GeoPandas DataFrame few ways" + "## Loaded data can be transformed to GeoPandas DataFrame in a few ways" ] }, { @@ -456,7 +258,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -465,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -476,90 +278,9 @@ }, { "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geomattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geom attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "point_gdf[:5]" ] @@ -573,16 +294,18 @@ }, { "cell_type": "code", - "execution_count": 21, - "metadata": {}, + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "# Adapter allow you to convert geospatial data types introduced with sedona to other ones" + "# Adapter allows you to convert geospatial data types introduced with sedona to other ones" ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -595,117 +318,18 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------+--------------+--------------+----------------------------+\n", - "|attr1 |attr2 |attr3 |geom |\n", - "+--------------+--------------+--------------+----------------------------+\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.331492 32.324142)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.175933 32.360763)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.388954 32.357073)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.221102 32.35078) |\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.323995 32.950671)|\n", - "+--------------+--------------+--------------+----------------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "spatial_gdf.show(5, False)" ] }, { "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
attr1attr2attr3geom
0testattribute0testattribute1testattribute2POINT (-88.33149 32.32414)
1testattribute0testattribute1testattribute2POINT (-88.17593 32.36076)
2testattribute0testattribute1testattribute2POINT (-88.38895 32.35707)
3testattribute0testattribute1testattribute2POINT (-88.22110 32.35078)
4testattribute0testattribute1testattribute2POINT (-88.32399 32.95067)
\n", - "
" - ], - "text/plain": [ - " attr1 attr2 attr3 geom\n", - "0 testattribute0 testattribute1 testattribute2 POINT (-88.33149 32.32414)\n", - "1 testattribute0 testattribute1 testattribute2 POINT (-88.17593 32.36076)\n", - "2 testattribute0 testattribute1 testattribute2 POINT (-88.38895 32.35707)\n", - "3 testattribute0 testattribute1 testattribute2 POINT (-88.22110 32.35078)\n", - "4 testattribute0 testattribute1 testattribute2 POINT (-88.32399 32.95067)" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(spatial_gdf.toPandas(), geometry=\"geom\")[:5]" ] @@ -719,7 +343,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -735,7 +359,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -744,90 +368,9 @@ }, { "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geometryattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geometry attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(geo_df.toPandas(), geometry=\"geometry\")[:5]" ] @@ -853,32 +396,21 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "rectangle_rdd = RectangleRDD(sc, \"data/zcta510-small.csv\", FileDataSplitter.CSV, True, 11)\n", "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, False, 11)\n", "polygon_rdd = PolygonRDD(sc, \"data/primaryroads-polygon.csv\", FileDataSplitter.CSV, True, 11)\n", - "linestring_rdd = LineStringRDD(sc, \"data/primaryroads-linestring.csv\", FileDataSplitter.CSV, True, StorageLevel.MEMORY_ONLY)" + "linestring_rdd = LineStringRDD(sc, \"data/primaryroads-linestring.csv\", FileDataSplitter.CSV, True)" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "rectangle_rdd.analyze()\n", "point_rdd.analyze()\n", @@ -902,20 +434,9 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "point_rdd.spatialPartitioning(GridType.KDBTREE)" ] @@ -936,7 +457,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -984,7 +505,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1007,83 +528,36 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[64] at map at FlatPairRddConverter.scala:30" - ] - }, - "execution_count": 33, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.take(2)" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.collect()[:3]" ] }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.0, 0.0, 0.0, 0.0, 0.0]" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting distance using SpatialObjects\n", "result.map(lambda x: x[0].geom.distance(x[1].geom)).take(5)" @@ -1091,24 +565,9 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.026651558685001447,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575,\n", - " 0.05189354027999942]" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting area of polygon data\n", "result.map(lambda x: x[0].geom.area).take(5)" @@ -1116,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1125,7 +584,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1139,27 +598,9 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| geom_left| geom_right|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-87.229...|POINT (-87.204299...|\n", - "|POLYGON ((-87.082...|POINT (-87.059583...|\n", - "|POLYGON ((-87.082...|POINT (-87.075409...|\n", - "|POLYGON ((-87.082...|POINT (-87.08084 ...|\n", - "|POLYGON ((-87.092...|POINT (-87.08084 ...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# Set verifySchema to False\n", "spatial_join_result = result.map(lambda x: [x[0].geom, x[1].geom])\n", @@ -1168,7 +609,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1177,20 +618,9 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_left: geometry (nullable = false)\n", - " |-- geom_right: geometry (nullable = false)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(spatial_join_result, schema, verifySchema=False).printSchema()" ] @@ -1204,27 +634,9 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-----+--------------------+-----+\n", - "| geom_1|attr1| geom_2|attr2|\n", - "+--------------------+-----+--------------------+-----+\n", - "|POLYGON ((-87.229...| |POINT (-87.204299...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.059583...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.075409...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.08084 ...| |\n", - "|POLYGON ((-87.092...| |POINT (-87.08084 ...| |\n", - "+--------------------+-----+--------------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).show(5, True)" ] @@ -1238,22 +650,9 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_1: geometry (nullable = true)\n", - " |-- attr1: string (nullable = true)\n", - " |-- geom_2: geometry (nullable = true)\n", - " |-- attr2: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).printSchema()" ] @@ -1275,7 +674,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1285,7 +684,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1294,7 +693,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1304,7 +703,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1316,31 +715,9 @@ }, { "cell_type": "code", - "execution_count": 49, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+----------------+\n", - "| geometry|number_of_points|\n", - "+--------------------+----------------+\n", - "|POLYGON ((-86.749...| 4|\n", - "|POLYGON ((-87.229...| 7|\n", - "|POLYGON ((-87.114...| 15|\n", - "|POLYGON ((-87.082...| 12|\n", - "|POLYGON ((-86.697...| 1|\n", - "|POLYGON ((-86.816...| 6|\n", - "|POLYGON ((-87.285...| 26|\n", - "|POLYGON ((-87.105...| 15|\n", - "|POLYGON ((-86.860...| 12|\n", - "|POLYGON ((-87.092...| 5|\n", - "+--------------------+----------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "sedona.createDataFrame(number_of_points, schema, verifySchema=False).show()" ] @@ -1367,17 +744,15 @@ ] }, { - "cell_type": "code", - "execution_count": 50, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# Finds 5 closest points from PointRDD to given Point" + "### Finds 5 closest points from PointRDD to given Point" ] }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1386,24 +761,9 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ]" - ] - }, - "execution_count": 52, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] @@ -1417,7 +777,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1431,44 +791,18 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ]" - ] - }, - "execution_count": 54, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby" ] }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'POLYGON ((-83.993559 34.087259, -83.993559 34.131247, -83.959903 34.131247, -83.959903 34.087259, -83.993559 34.087259))'" - ] - }, - "execution_count": 55, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby[0].geom.wkt" ] @@ -1494,7 +828,7 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1503,24 +837,9 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.minx = minx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxx = maxx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.miny = miny\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxy = maxy\n" - ] - } - ], + "outputs": [], "source": [ "query_envelope = Envelope(-85.01, -60.01, 34.01, 50.01)\n", "\n", @@ -1529,59 +848,25 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[128] at map at GeometryRddConverter.scala:30" - ] - }, - "execution_count": 58, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result_range_query" ] }, { "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 20:34:42 WARN BlockManager: Task 405 already completed, not releasing lock for rdd_45_0\n" - ] - }, - { - "data": { - "text/plain": [ - "[Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ]" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "result_range_query.take(6)" ] }, { "cell_type": "code", - "execution_count": 60, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1590,7 +875,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1599,27 +884,9 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(\n", " result_range_query.map(lambda x: [x.geom]),\n", @@ -1648,7 +915,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1657,7 +924,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1666,54 +933,25 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 65, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "shape_rdd" ] }, { "cell_type": "code", - "execution_count": 66, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|MULTIPOLYGON (((1...|\n", - "|MULTIPOLYGON (((-...|\n", - "|MULTIPOLYGON (((1...|\n", - "|POLYGON ((118.362...|\n", - "|MULTIPOLYGON (((-...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(shape_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 67, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1731,7 +969,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1740,54 +978,25 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 69, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geo_json_rdd" ] }, { "cell_type": "code", - "execution_count": 70, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "| geometry|STATEFP|COUNTYFP|TRACTCE|BLKGRPCE| AFFGEOID| GEOID|NAME|LSAD| ALAND|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "|POLYGON ((-87.621...| 01| 077| 011501| 5|1500000US01077011...|010770115015| 5| BG| 6844991|\n", - "|POLYGON ((-85.719...| 01| 045| 021102| 4|1500000US01045021...|010450211024| 4| BG|11360854|\n", - "|POLYGON ((-86.000...| 01| 055| 001300| 3|1500000US01055001...|010550013003| 3| BG| 1378742|\n", - "|POLYGON ((-86.574...| 01| 089| 001700| 2|1500000US01089001...|010890017002| 2| BG| 1040641|\n", - "|POLYGON ((-85.382...| 01| 069| 041400| 1|1500000US01069041...|010690414001| 1| BG| 8243574|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(geo_json_rdd, sedona).drop(\"AWATER\").show(5, True)" ] }, { "cell_type": "code", - "execution_count": 71, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1796,7 +1005,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1805,73 +1014,34 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 73, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "wkt_rdd" ] }, { "cell_type": "code", - "execution_count": 74, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).printSchema()" ] }, { "cell_type": "code", - "execution_count": 75, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 76, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1880,7 +1050,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1889,27 +1059,9 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkb_rdd, sedona).show(5, True)" ] @@ -1923,7 +1075,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1937,7 +1089,7 @@ }, { "cell_type": "code", - "execution_count": 80, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1947,67 +1099,27 @@ }, { "cell_type": "code", - "execution_count": 81, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- leftgeometry: geometry (nullable = true)\n", - " |-- rightgeometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.printSchema()" ] }, { "cell_type": "code", - "execution_count": 82, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| leftgeometry| rightgeometry|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-87.285...|POINT (-87.28468 ...|\n", - "|POLYGON ((-87.285...|POINT (-87.215491...|\n", - "|POLYGON ((-87.285...|POINT (-87.210001...|\n", - "|POLYGON ((-87.285...|POINT (-87.278485...|\n", - "|POLYGON ((-87.285...|POINT (-87.280556...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 83, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Row(leftgeometry=, rightgeometry=)" - ] - }, - "execution_count": 83, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geometry_df.collect()[0]" ] @@ -2021,7 +1133,7 @@ }, { "cell_type": "code", - "execution_count": 84, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2030,27 +1142,9 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------+--------------------+---------------+\n", - "| leftgeometry|left_user_data| rightgeometry|right_user_data|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "|POLYGON ((-87.285...| |POINT (-87.28468 ...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.215491...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.210001...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.278485...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.280556...| null|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] @@ -2064,7 +1158,7 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2075,7 +1169,7 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2085,53 +1179,25 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.show(5)" ] }, { "cell_type": "code", - "execution_count": 89, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.printSchema()" ] }, { "cell_type": "code", - "execution_count": 90, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2142,47 +1208,18 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+---+\n", - "| geometry|_c1|\n", - "+--------------------+---+\n", - "|LINESTRING (-72.1...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-73.4...| |\n", - "|LINESTRING (-73.6...| |\n", - "+--------------------+---+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.show(5)" ] }, { "cell_type": "code", - "execution_count": 92, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- _c1: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.printSchema()" ] diff --git a/binder/ApacheSedonaRaster.ipynb b/binder/ApacheSedonaRaster.ipynb index d3c3b8ae0b..a5cafa3d8b 100644 --- a/binder/ApacheSedonaRaster.ipynb +++ b/binder/ApacheSedonaRaster.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "fbefc0bd-731b-43e4-b271-6cb4cba5c256", "metadata": {}, "source": [ "```\n", @@ -23,1105 +24,492 @@ ] }, { - "cell_type": "code", - "execution_count": 1, + "cell_type": "markdown", + "id": "b443d3d3-1667-4770-b57c-7f79a3ea5d42", "metadata": {}, + "source": [ + "## Import Sedona" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "328d0b74-1efd-468c-bc96-a469965df60b", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "from IPython.display import display, HTML\n", - "from pyspark.sql import SparkSession\n", - "from pyspark import StorageLevel\n", - "import pandas as pd\n", - "from pyspark.sql.types import StructType, StructField,StringType, LongType, IntegerType, DoubleType, ArrayType\n", - "from pyspark.sql.functions import regexp_replace\n", - "from pyspark.sql.functions import col, split, expr\n", - "from pyspark.sql.functions import udf, lit\n", "from sedona.spark import *\n", - "from pyspark.sql.functions import col, split, expr\n", - "from pyspark.sql.functions import udf, lit\n", - "import os\n" + "from IPython.display import display, HTML" ] }, { "cell_type": "markdown", + "id": "f28c8117-069c-431c-ac58-6ff258b1196d", "metadata": {}, "source": [ - "# Create Spark Session for application" + "## Create a Sedona Context object.\n", + "If you already have a spark instance available, simply use ```SedonaContext.create(spark)```." ] }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-f6cc1c05-35e7-48b0-8060-745906834ca0;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 79ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-f6cc1c05-35e7-48b0-8060-745906834ca0\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", - "23/06/30 14:06:36 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" - ] - } - ], + "execution_count": null, + "id": "e3495923-7fb4-4a6e-b62e-a4eeb9c2b306", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n", "\n", - "sc = sedona.sparkContext\n" + "sc = sedona.sparkContext" ] }, { "cell_type": "markdown", + "id": "91d4e6ae-eeb6-46ca-89fd-8f82e6056924", "metadata": {}, "source": [ - "# Geotiff Loader \n", - "\n", - "1. Loader takes as input a path to directory which contains geotiff files or a path to particular geotiff file\n", - "2. Loader will read geotiff image in a struct named image which contains multiple fields as shown in the schema below which can be extracted using spark SQL" + "## Read GeoTiff files" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, + "id": "58c05200-27f7-46ce-b2c5-4c1dc058c96e", "metadata": {}, "outputs": [], "source": [ - "# Path to directory of geotiff images \n", - "DATA_DIR = \"./data/raster/\"" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- image: struct (nullable = true)\n", - " | |-- origin: string (nullable = true)\n", - " | |-- geometry: string (nullable = true)\n", - " | |-- height: integer (nullable = true)\n", - " | |-- width: integer (nullable = true)\n", - " | |-- nBands: integer (nullable = true)\n", - " | |-- data: array (nullable = true)\n", - " | | |-- element: double (containsNull = true)\n", - "\n" - ] - } - ], - "source": [ - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", - "df.printSchema()" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "[Stage 3:> (0 + 1) / 1]\r" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "| origin| Geom|height|width| data|bands|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - " \r" - ] - } - ], - "source": [ - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df.show(5)" + "geotiff_df = sedona.read.format(\"binaryFile\").load(\"data/raster/test5.tiff\")\n", + "geotiff_df.show(2)\n", + "geotiff_df.createOrReplaceTempView(\"binary_raster\")" ] }, { "cell_type": "markdown", + "id": "db66242c-d0b3-4348-b2ef-4344d266cb4c", "metadata": {}, "source": [ - "# Extract a particular band from geotiff dataframe using RS_GetBand()\n" + "## Create raster columns from the read binary data" ] }, { "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "| Geom| Band1| Band2| Band3| Band4|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "|POLYGON ((-58.702...|[1081.0, 1068.0, ...|[909.0, 909.0, 82...|[677.0, 660.0, 66...|[654.0, 652.0, 66...|\n", - "|POLYGON ((-58.286...|[1151.0, 1141.0, ...|[894.0, 956.0, 10...|[751.0, 802.0, 87...|[0.0, 0.0, 0.0, 0...|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "36eb9e36-cbcb-472a-96c6-79d49305cf66", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_GetBand() will fetch a particular band from given data array which is the concatenation of all the bands'''\n", - "\n", - "df = df.selectExpr(\"Geom\",\"RS_GetBand(data, 1,bands) as Band1\",\"RS_GetBand(data, 2,bands) as Band2\",\"RS_GetBand(data, 3,bands) as Band3\", \"RS_GetBand(data, 4,bands) as Band4\")\n", - "df.createOrReplaceTempView(\"allbands\")\n", - "df.show(5)" + "raster_df = sedona.sql(\"SELECT RS_FromGeoTiff(content) as raster from binary_raster\")\n", + "raster_df.show(2)\n", + "raster_df.createOrReplaceTempView(\"raster_table\")" ] }, { "cell_type": "markdown", + "id": "3932eb9e-aeb6-4abe-a986-f26a11eb1fe3", "metadata": {}, "source": [ - "# Map Algebra operations on band values" + "## Operate on rasters using Sedona\n", + "Once a raster column is created, you're now free to use the entire catalog of Sedona's [raster functions](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/). The following part of notebook contains a few examples." ] }, { - "cell_type": "code", - "execution_count": 8, + "cell_type": "markdown", + "id": "1b47699e-8ce4-4859-ace9-d12ea1f4d0b9", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| normDiff|\n", - "+--------------------+\n", - "|[-0.09, -0.08, -0...|\n", - "|[-0.13, -0.09, -0...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_NormalizedDifference can be used to calculate NDVI for a particular geotiff image since it uses same computational formula as ndvi'''\n", - "\n", - "NomalizedDifference = df.selectExpr(\"RS_NormalizedDifference(Band1, Band2) as normDiff\")\n", - "NomalizedDifference.show(5)" + "### Access raster metadata\n", + "[RS_MetaData](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_metadata) can be used to view the loaded raster's metadata (orientation and georeferencing attributes)." ] }, { "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------+\n", - "| mean|\n", - "+-------+\n", - "|1153.85|\n", - "|1293.77|\n", - "+-------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "6d635263-9e8b-4f74-9b91-d360d196b966", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_Mean() can used to calculate mean of piel values in a particular spatial band'''\n", - "meanDF = df.selectExpr(\"RS_Mean(Band1) as mean\")\n", - "meanDF.show(5)" + "raster_metadata = sedona.sql(\"SELECT RS_MetaData(raster) as metadata from raster_table\")\n", + "metadata = raster_metadata.first()[0]\n", + "raster_srid = metadata[8]\n", + "metadata" ] }, { - "cell_type": "code", - "execution_count": 10, + "cell_type": "markdown", + "id": "713bc8db-3143-4a79-abb5-08ad81f9393a", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+\n", - "| mode|\n", - "+----------------+\n", - "| [1011.0, 927.0]|\n", - "|[1176.0, 1230.0]|\n", - "+----------------+\n", - "\n" - ] - } - ], "source": [ - "\"\"\" RS_Mode() is used to calculate mode in an array of pixels and returns a array of double with size 1 in case of unique mode\"\"\"\n", - "modeDF = df.selectExpr(\"RS_Mode(Band1) as mode\")\n", - "modeDF.show(5)" + "### Visualize rasters\n", + "Sedona 1.5.0 provides [multiple ways to be able to visualize rasters](https://sedona.apache.org/1.5.0/api/sql/Raster-visualizer/). Throughout this notebook, [RS_AsImage](https://sedona.apache.org/1.5.0/api/sql/Raster-visualizer/#rs_asimage) will be used to visualize any changes to the rasters." ] }, { "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| greaterthan|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 0...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "d5f615f4-a3d6-407c-aea9-58891c1e55e3", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_GreaterThan() is used to mask all the values with 1 which are greater than a particular threshold'''\n", - "greaterthanDF = sedona.sql(\"Select RS_GreaterThan(Band1,1000.0) as greaterthan from allbands\")\n", - "greaterthanDF.show()" + "# Define a simple wrapper to display HTML in jupyter notebook environment\n", + "class SedonaUtils:\n", + " @classmethod\n", + " def display_image(cls, df):\n", + " display(HTML(df.toPandas().to_html(escape=False)))" ] }, { "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| greaterthanEqual|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "7fad137f-331c-4c2f-905d-dbc42cff11b6", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_GreaterThanEqual() is used to mask all the values with 1 which are greater than a particular threshold'''\n", - "\n", - "greaterthanEqualDF = sedona.sql(\"Select RS_GreaterThanEqual(Band1,360.0) as greaterthanEqual from allbands\")\n", - "greaterthanEqualDF.show()" + "SedonaUtils.display_image(raster_df.selectExpr(\"RS_AsImage(raster, 500)\"))" ] }, { - "cell_type": "code", - "execution_count": 14, + "cell_type": "markdown", + "id": "cef34e54-ac3c-48f3-836c-5a5385b79481", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| lessthan|\n", - "+--------------------+\n", - "|[0.0, 0.0, 0.0, 1...|\n", - "|[0.0, 0.0, 0.0, 0...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_LessThan() is used to mask all the values with 1 which are less than a particular threshold'''\n", - "lessthanDF = sedona.sql(\"Select RS_LessThan(Band1,1000.0) as lessthan from allbands\")\n", - "lessthanDF.show()" + "### Join based on raster predicates\n", + "Sedona 1.5.0 now supports join predicates between raster and geometry columns.\n", + "\n", + "Below is a simple example that carves a small rectangle from the existing raster and attempts to join it with the original raster" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, + "id": "6442349c-be2e-4609-a16e-b856745ddf46", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| lessthanequal|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "'''RS_LessThanEqual() is used to mask all the values with 1 which are less than equal to a particular threshold'''\n", - "lessthanEqualDF = sedona.sql(\"Select RS_LessThanEqual(Band1,2890.0) as lessthanequal from allbands\")\n", - "lessthanEqualDF.show()" + "(width, height) = sedona.sql(\"SELECT RS_Width(raster) as width, RS_Height(raster) as height from raster_table\").first()\n", + "(p1X, p1Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {width / 2}, {height / 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {width / 2}, {height / 2}) as pY from raster_table\").first()\n", + "(p2X, p2Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {(width / 2) + 2}, {height / 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {(width / 2) + 2}, {height / 2}) as pY from raster_table\").first()\n", + "(p3X, p3Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {width / 2}, {(height / 2) + 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {width / 2}, {(height / 2) + 2}) as pY from raster_table\").first()\n", + "(p4X, p4Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {(width / 2) + 2}, {(height / 2) + 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {(width / 2) + 2}, {(height / 2) + 2}) as pY from raster_table\").first() " ] }, { "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| sumOfBand|\n", - "+--------------------+\n", - "|[1990.0, 1977.0, ...|\n", - "|[2045.0, 2097.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "ed399ee8-42b7-488b-8141-320c2bf6d9c3", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_Add() can add two spatial bands together'''\n", - "sumDF = df.selectExpr(\"RS_Add(Band1, Band2) as sumOfBand\")\n", - "sumDF.show(5)" + "geom_wkt = f\"SRID={int(raster_srid)};POLYGON (({p1X} {p1Y}, {p2X} {p2Y}, {p3X} {p3Y}, {p4X} {p4Y}, {p1X} {p1Y}))\"" ] }, { "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| diffOfBand|\n", - "+--------------------+\n", - "|[-172.0, -159.0, ...|\n", - "|[-257.0, -185.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "cb8aa25f-4706-4ee7-9994-3da474c3eb2c", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_Subtract() can subtract two spatial bands together'''\n", - "subtractDF = df.selectExpr(\"RS_Subtract(Band1, Band2) as diffOfBand\")\n", - "subtractDF.show(5)" + "geom_df = sedona.sql(f\"SELECT ST_GeomFromEWKT('{geom_wkt}') as geom\")\n", + "geom_df.createOrReplaceTempView(\"geom_table\")" ] }, { "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| productOfBand|\n", - "+--------------------+\n", - "|[982629.0, 970812...|\n", - "|[1028994.0, 10907...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "6461c14c-d479-4c64-8f8f-8c21903dedf5", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_Multiply() can multiple two bands together'''\n", - "multiplyDF = df.selectExpr(\"RS_Multiply(Band1, Band2) as productOfBand\")\n", - "multiplyDF.show(5)" + "joined_df = sedona.sql(\"SELECT g.geom from raster_table r, geom_table g where RS_Intersects(r.raster, g.geom)\")\n", + "joined_df.show()" ] }, { - "cell_type": "code", - "execution_count": 19, + "cell_type": "markdown", + "id": "9be5e7db-17e5-4bab-b7a3-8ee278374355", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| divisionOfBand|\n", - "+--------------------+\n", - "|[1.19, 1.17, 1.25...|\n", - "|[1.29, 1.19, 1.19...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_Divide() can divide two bands together'''\n", - "divideDF = df.selectExpr(\"RS_Divide(Band1, Band2) as divisionOfBand\")\n", - "divideDF.show(5)" + "### Interoperability between raster and vector data types\n", + "Sedona allows for conversions from raster to geometry and vice-versa. " ] }, { - "cell_type": "code", - "execution_count": 20, + "cell_type": "markdown", + "id": "cc0bfd1c-7117-444a-8189-881da19846c9", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| target|\n", - "+--------------------+\n", - "|[1818.0, 1818.0, ...|\n", - "|[1788.0, 1912.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_MultiplyFactor() will multiply a factor to a spatial band'''\n", - "mulfacDF = df.selectExpr(\"RS_MultiplyFactor(Band2, 2) as target\")\n", - "mulfacDF.show(5)" + "### Convert a raster to vector using convex hull\n", + "A convex hull geometry can be created out of a raster using [RS_ConvexHull](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_convexhull)\n", + "\n", + "Additionally, if a raster has noDataValue specified, and you wish to tighten the convexhull to exclude noDataValue boundaries, [RS_MinConvexHull](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_minconvexhull) can be used." ] }, { "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| AND|\n", - "+--------------------+\n", - "|[9.0, 12.0, 2.0, ...|\n", - "|[126.0, 52.0, 102...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "22b9dd16-f720-4fa4-acb9-b80c34702a93", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_BitwiseAND() will return AND between two values of Bands'''\n", - "bitwiseAND = df.selectExpr(\"RS_BitwiseAND(Band1, Band2) as AND\")\n", - "bitwiseAND.show(5)" + "raster_convex_hull = sedona.sql(\"SELECT RS_ConvexHull(raster) as convex_hull from raster_table\")\n", + "raster_min_convex_hull = sedona.sql(\"SELECT RS_MinConvexHull(raster) as min_convex_hull from raster_table\")\n", + "raster_convex_hull.show(truncate=False)\n", + "raster_min_convex_hull.show(truncate=False)" ] }, { - "cell_type": "code", - "execution_count": 22, + "cell_type": "markdown", + "id": "8ca7e862-45c9-4559-a2e1-4e044d6b5c84", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| OR|\n", - "+--------------------+\n", - "|[1981.0, 1965.0, ...|\n", - "|[1919.0, 2045.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_BitwiseOR() will return OR between two values of Bands'''\n", - "bitwiseOR = df.selectExpr(\"RS_BitwiseOR(Band1, Band2) as OR\")\n", - "bitwiseOR.show(5)" + "### Convert a geometry to raster (Rasterize a geometry)\n", + "A geometry can be converted to a raster using [RS_AsRaster](https://sedona.apache.org/1.5.0/api/sql/Raster-writer/#rs_asraster)" ] }, { "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----+\n", - "|count|\n", - "+-----+\n", - "| 753|\n", - "| 1017|\n", - "+-----+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "8bc32fc6-d418-4e7c-8631-57e2c623f14c", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_Count() will calculate the total number of occurrence of a target value'''\n", - "countDF = df.selectExpr(\"RS_Count(RS_GreaterThan(Band1,1000.0), 1.0) as count\")\n", - "countDF.show(5)" + "rasterized_geom_df = sedona.sql(\"SELECT RS_AsRaster(ST_GeomFromWKT('POLYGON((150 150, 220 260, 190 300, 300 220, 150 150))'), r.raster, 'b', 230) as rasterized_geom from raster_table r\")\n", + "rasterized_geom_df.show()" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, + "id": "a7eecae9-3763-405f-a22e-c7d77ff703b0", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| modulo|\n", - "+--------------------+\n", - "|[10.0, 18.0, 18.0...|\n", - "|[17.0, 7.0, 2.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "'''RS_Modulo() will calculate the modulus of band value with respect to a given number'''\n", - "moduloDF = df.selectExpr(\"RS_Modulo(Band1, 21.0) as modulo \")\n", - "moduloDF.show(5)" + "SedonaUtils.display_image(rasterized_geom_df.selectExpr(\"RS_AsImage(rasterized_geom, 250) as rasterized_geom\"))" ] }, { - "cell_type": "code", - "execution_count": 25, + "cell_type": "markdown", + "id": "df954a81-5004-40f7-b80e-795f8569757c", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| root|\n", - "+--------------------+\n", - "|[32.88, 32.68, 32...|\n", - "|[33.93, 33.78, 35...|\n", - "+--------------------+\n", - "\n" - ] - } - ], "source": [ - "'''RS_SquareRoot() will calculate calculate square root of all the band values up to two decimal places'''\n", - "rootDF = df.selectExpr(\"RS_SquareRoot(Band1) as root\")\n", - "rootDF.show(5)\n" + "### Perform Map Algebra operations\n", + "Sedona provides two ways to perform [Map Algebra](https://sedona.apache.org/1.5.0/api/sql/Raster-map-algebra/) on rasters:\n", + "1. Using RS_MapAlgebra (preferred for simpler algebraic functions)\n", + "2. Using RS_BandAsArray and array based map algebra functions such as RS_Add, RS_Multiply (Useful for complex algebriac functions involving mutating each grid value differently.)\n", + "\n", + "The following example illustrates how RS_MapAlgebra can be used. \n", + "This example uses jiffle script to invert the colors of the above illustrated rasterized geometry." ] }, { "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| loggDifference|\n", - "+--------------------+\n", - "|[1081.0, 1068.0, ...|\n", - "|[1151.0, 1141.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "53abef31-b1aa-42ef-8eb0-f1d9227e3893", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_LogicalDifference() will return value from band1 if value at that particular location is not equal tp band1 else it will return 0'''\n", - "logDiff = df.selectExpr(\"RS_LogicalDifference(Band1, Band2) as loggDifference\")\n", - "logDiff.show(5)" + "raster_white_bg = rasterized_geom_df.selectExpr(\"RS_MapAlgebra(rasterized_geom, NULL, 'out[0] = rast[0] == 0 ? 230 : 0;') as raster\")" ] }, { "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| logicalOver|\n", - "+--------------------+\n", - "|[677.0, 660.0, 66...|\n", - "|[751.0, 802.0, 87...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "id": "75f06a1b-1ab6-478b-a50e-b621a10d6d8b", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''RS_LogicalOver() will iterate over two bands and return value of first band if it is not equal to 0 else it will return value from later band'''\n", - "logOver = df.selectExpr(\"RS_LogicalOver(Band3, Band2) as logicalOver\")\n", - "logOver.show(5)" + "SedonaUtils.display_image(raster_white_bg.selectExpr(\"RS_AsImage(raster, 250) as resampled_raster\"))" ] }, { "cell_type": "markdown", + "id": "fde725ec-2941-4b6e-9b52-5fd35cea6c01", "metadata": {}, "source": [ - "# Visualising Geotiff Images\n", + "### Resample a raster.\n", + "Sedona 1.5.0 supports resampling a raster to different height/width or scale. It also supports changing the pivot of the raster.\n", "\n", - "1. Normalize the bands in range [0-255] if values are greater than 255\n", - "2. Process image using RS_Base64() which converts in into a base64 string\n", - "3. Embed results of RS_Base64() in RS_HTML() to embed into IPython notebook\n", - "4. Process results of RS_HTML() as below:" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "| Geom| RedBand| BlueBand| GreenBand| CombinedBand|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "|POLYGON ((-58.702...|\n", - " \n", - " \n", - " \n", - " Geom\n", - " RedBand\n", - " BlueBand\n", - " GreenBand\n", - " CombinedBand\n", - " \n", - " \n", - " \n", - " \n", - " 0\n", - " POLYGON ((-58.70271939504447 -34.418775445554786, -58.702776058228636 -34.421569880680615, -58.6994039180242 -34.42161679331493, -58.69934736692278 -34.4188223533111, -58.70271939504447 -34.418775445554786))\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " 1\n", - " POLYGON ((-58.286636576261145 -34.758580906202866, -58.286679941749476 -34.76137571668496, -58.28329340123003 -34.76141146033393, -58.28325014980317 -34.75861664615162, -58.286636576261145 -34.758580906202866))\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, + "execution_count": null, + "id": "c8fdb8c7-52d5-49fa-83f2-44a9438bd509", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "# Writing GeoTiff Images" + "resampled_raster_df = sedona.sql(\"SELECT RS_Resample(raster, 1000, 1000, false, 'NearestNeighbor') as resampled_raster from raster_table\")" ] }, { "cell_type": "code", - "execution_count": 31, - "metadata": {}, + "execution_count": null, + "id": "b14820dc-ed04-41cd-9220-73a5179f52df", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Writing GeoTiff DataFrames as GeoTiff Images'''\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "\n", - "SAVE_PATH = \"./data/raster-written/\"\n", - "df.write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"Geom\").option(\"fieldNBands\", \"bands\").save(SAVE_PATH)" + "SedonaUtils.display_image(resampled_raster_df.selectExpr(\"RS_AsImage(resampled_raster, 500) as resampled_raster\"))" ] }, { "cell_type": "code", - "execution_count": 32, - "metadata": {}, + "execution_count": null, + "id": "bee36339-d0c1-469d-9354-980a23f24401", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Writing GeoTiff Images in a Single Partition'''\n", - "df.coalesce(1).write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"Geom\").option(\"fieldNBands\", \"bands\").save(SAVE_PATH)" + "resampled_raster_df.selectExpr(\"RS_MetaData(resampled_raster) as resampled_raster_metadata\").show(truncate=False)" ] }, { "cell_type": "code", - "execution_count": 33, - "metadata": {}, + "execution_count": null, + "id": "2b0aa64e-4a02-4c85-9ba5-6459d2002f8a", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Find the Partition of the Written GeoTiff Images.\n", - " If you did not write with coalesce(1), change the below code to adjust the writtenPath'''\n", - "writtenPath = SAVE_PATH\n", - "dirList = os.listdir(writtenPath)\n", - "for item in dirList:\n", - " if os.path.isdir(writtenPath + \"/\" + item):\n", - " writtenPath += \"/\" + item\n", - " break" + "# Load another raster for some more examples\n", + "elevation_raster_df = sedona.read.format('binaryFile').load('data/raster/test1.tiff')\n", + "elevation_raster_df.createOrReplaceTempView(\"elevation_raster_binary\")" ] }, { "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
GeomRedBandBlueBandGreenBandCombinedBand
0POLYGON ((-58.702667236328125 -34.418819427490234, -58.702667236328125 -34.421573638916016, -58.69945526123047 -34.421573638916016, -58.69945526123047 -34.418819427490234, -58.702667236328125 -34.418819427490234))
1POLYGON ((-58.286582946777344 -34.75862503051758, -58.286582946777344 -34.76136779785156, -58.28334426879883 -34.76136779785156, -58.28334426879883 -34.75862503051758, -58.286582946777344 -34.75862503051758))
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "execution_count": null, + "id": "623123ac-98bc-4d51-828d-9d874cc6f471", + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ - "'''Load and Visualize Written GeoTiff Image.'''\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(writtenPath)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "\n", - "df = df.selectExpr(\"RS_GetBand(data,1,bands) as targetband\", \"height\", \"width\", \"bands\", \"Geom\")\n", - "df_base64 = df.selectExpr(\"Geom\", \"RS_Base64(height,width,RS_Normalize(targetBand), RS_Array(height*width,0.0), RS_Array(height*width, 0.0)) as red\",\"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Normalize(targetBand), RS_Array(height*width, 0.0)) as green\", \"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Array(height*width, 0.0), RS_Normalize(targetBand)) as blue\",\"RS_Base64(height,width,RS_Normalize(targetBand), RS_Normalize(targetBand),RS_Normalize(targetBand)) as RGB\" )\n", - "df_HTML = df_base64.selectExpr(\"Geom\",\"RS_HTML(red) as RedBand\",\"RS_HTML(blue) as BlueBand\",\"RS_HTML(green) as GreenBand\", \"RS_HTML(RGB) as CombinedBand\")\n", - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))" + "elevation_raster_df = sedona.sql(\"SELECT RS_FromGeoTiff(content) as raster from elevation_raster_binary\")\n", + "elevation_raster_df.createOrReplaceTempView(\"elevation_raster\")" ] }, { "cell_type": "markdown", + "id": "2a6afdf3-e774-432f-96a3-96a4ca8249c7", "metadata": {}, "source": [ - "# Transformation of GeoTiff Images" + "### Access individual values from rasters\n", + "Sedona provides [RS_Value](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_value) and [RS_Values](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_values) that allow accessing raster values at given geometrical point(s).\n", + "\n", + "The following example extracts raster values at specific geographical points." ] }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, + "id": "ffe589e1-50b7-431a-ba84-b2c297b77f65", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "| origin| geom|height|width| data|bands|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "'''First load GeoTiff Images'''\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df.show(5)" + "point_wkt_1 = 'SRID=3857;POINT (-13095600.809482181 4021100.7487925636)'\n", + "point_wkt_2 = 'SRID=3857;POINT (-13095500.809482181 4021000.7487925636)'\n", + "point_df = sedona.sql(\"SELECT ST_GeomFromEWKT('{}') as point_1, ST_GeomFromEWKT('{}') as point_2\".format(point_wkt_1, point_wkt_2))\n", + "point_df.createOrReplaceTempView(\"point_table\")\n", + "test_df = sedona.sql(\"SELECT RS_Values(raster, Array(point_1, point_2)) as raster_values from elevation_raster, point_table\")\n", + "test_df.show()" ] }, { - "cell_type": "code", - "execution_count": 38, + "cell_type": "markdown", + "id": "8643ed69-9128-49a9-80e7-f9115694695f", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "| origin| geom|width|height| data|bands|normalizedDifference|\n", - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|[0.09, 0.08, 0.11...|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|[0.13, 0.09, 0.09...|\n", - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "\n" - ] - } - ], "source": [ - "# First extract the bands for which normalized difference index needs to be calculated\n", - "df = df.selectExpr(\"origin\", \"geom\", \"width\", \"height\", \"data\", \"bands\", \"RS_GetBand(data, 1, bands) as band1\", \"RS_GetBand(data, 2, bands) as band2\")\n", - "# Get the normalized difference index between the extracted bands\n", - "df = df.selectExpr(\"origin\", \"geom\", \"width\", \"height\", \"data\", \"bands\", \"RS_NormalizedDifference(band2, band1) as normalizedDifference\")\n", - "df.show(5)" + "### Extract individual bands from rasters\n", + "[RS_BandAsArray](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_bandasarray) can be used to extract entire band values from a given raster" ] }, { "cell_type": "code", - "execution_count": 39, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "| origin| geom| data_edited|height|width|nBand_edited|\n", - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "|file:/Users/niles...|POLYGON ((-58.702...|[1081.0, 1068.0, ...| 32| 32| 5|\n", - "|file:/Users/niles...|POLYGON ((-58.286...|[1151.0, 1141.0, ...| 32| 32| 5|\n", - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "\n" - ] - } - ], - "source": [ - "'''RS_Append() takes the data array containing bands, a new band to be appended, and number of total bands in the data array.\n", - " It appends the new band to the end of the data array and returns the appended data'''\n", - "\n", - "df = df.selectExpr(\"origin\", \"geom\", \"RS_Append(data, normalizedDifference, bands) as data_edited\", \"height\", \"width\", \"bands\").drop(\"data\")\n", - "df = df.withColumn(\"nBand_edited\", col(\"bands\") + 1).drop(\"bands\")\n", - "df.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 40, + "execution_count": null, + "id": "48f56157-ab07-456e-83fe-75d23f5bb28e", "metadata": {}, "outputs": [], "source": [ - "'''Writing GeoTiff DataFrames as GeoTiff Images'''\n", - "SAVE_PATH = \"./data/raster-written/\"\n", - "df.coalesce(1).write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"geom\").option(\"fieldNBands\", \"nBand_edited\").option(\"fieldData\", \"data_edited\").save(SAVE_PATH)" + "band = elevation_raster_df.selectExpr(\"RS_BandAsArray(raster, 1)\").first()[0]\n", + "print(band[500: 520],) #Print a part of a band as an array horizontally" ] }, { "cell_type": "markdown", + "id": "e586b0e5-935a-47fa-8ebf-b63ddd9a48a8", "metadata": {}, "source": [ - "# User can also create some UDF manually to manipulate Geotiff dataframes" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+------+\n", - "| sum|\n", - "+------+\n", - "| 753.0|\n", - "|1017.0|\n", - "+------+\n", - "\n" - ] - } - ], - "source": [ - "'''Sample UDF calculates sum of all the values in a band which are greater than 1000.0'''\n", - "\n", - "def SumOfValues(band):\n", - " total = 0.0\n", - " for num in band:\n", - " if num>1000.0:\n", - " total+=1\n", - " return total\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df = df.selectExpr(\"RS_GetBand(data,1,bands) as targetband\", \"height\", \"width\", \"bands\", \"Geom\")\n", - " \n", - "calculateSum = udf(SumOfValues, DoubleType())\n", - "sedona.udf.register(\"RS_Sum\", calculateSum)\n", - "\n", - "sumDF = df.selectExpr(\"RS_Sum(targetband) as sum\")\n", - "sumDF.show()" + "### Visualize Raster MBRs" ] }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, + "id": "2a2c7086-9588-48a7-a710-c10b8c5e4875", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
Geomselectedregion
0POLYGON ((-58.70271939504447 -34.418775445554786, -58.702776058228636 -34.421569880680615, -58.6994039180242 -34.42161679331493, -58.69934736692278 -34.4188223533111, -58.70271939504447 -34.418775445554786))
1POLYGON ((-58.286636576261145 -34.758580906202866, -58.286679941749476 -34.76137571668496, -58.28329340123003 -34.76141146033393, -58.28325014980317 -34.75861664615162, -58.286636576261145 -34.758580906202866))
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ - "'''Sample UDF to visualize a particular region of a GeoTiff image'''\n", - "\n", - "def generatemask(band, width,height):\n", - " for (i,val) in enumerate(band):\n", - " if (i%width>=12 and i%width<26) and (i%height>=12 and i%height<26):\n", - " band[i] = 255.0\n", - " else:\n", - " band[i] = 0.0\n", - " return band\n", - "\n", - "maskValues = udf(generatemask, ArrayType(DoubleType()))\n", - "sedona.udf.register(\"RS_MaskValues\", maskValues)\n", - "\n", - "\n", - "df_base64 = df.selectExpr(\"Geom\", \"RS_Base64(height,width,RS_Normalize(targetband), RS_Array(height*width,0.0), RS_Array(height*width, 0.0), RS_MaskValues(targetband,width,height)) as region\" )\n", - "df_HTML = df_base64.selectExpr(\"Geom\",\"RS_HTML(region) as selectedregion\")\n", - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))\n" + "# Convert raster to its convex hull and transform it to EPSG:4326 to be able to visualize\n", + "raster_mbr_df = elevation_raster_df.selectExpr(\"ST_Transform(RS_ConvexHull(raster), 'EPSG:3857', 'EPSG:4326') as raster_mbr\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "6f39b3db-a0b1-4842-a5ca-b5a5850f3ea7", + "metadata": { + "tags": [] + }, "outputs": [], - "source": [] + "source": [ + "sedona_kepler_map_elevation = SedonaKepler.create_map(df=raster_mbr_df, name='RasterMBR')\n", + "sedona_kepler_map_elevation" + ] } ], "metadata": { @@ -1144,5 +532,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 5 } diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index a58c264eed..9873e84646 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -24,8 +24,10 @@ }, { "cell_type": "code", - "execution_count": 1, - "metadata": {}, + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "import os\n", @@ -38,55 +40,16 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-ade932f0-a9e8-47af-b559-0d52a6a087e9;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 81ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-ade932f0-a9e8-47af-b559-0d52a6a087e9\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", - "23/07/03 21:13:44 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/03 21:13:44 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.\n", - "23/07/03 21:13:44 WARN Utils: Service 'SparkUI' could not bind on port 4041. Attempting port 4042.\n", - " \r" - ] - } - ], + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n" @@ -108,27 +71,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----------------+\n", - "| arealandmark|\n", - "+-----------------+\n", - "|POINT (1.1 101.1)|\n", - "|POINT (2.1 102.1)|\n", - "|POINT (3.1 103.1)|\n", - "|POINT (4.1 104.1)|\n", - "|POINT (5.1 105.1)|\n", - "+-----------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -150,27 +95,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkt_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -191,27 +118,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkb_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -232,27 +141,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| countyshape|\n", - "+--------------------+\n", - "|POLYGON ((-87.621...|\n", - "|POLYGON ((-85.719...|\n", - "|POLYGON ((-86.000...|\n", - "|POLYGON ((-86.574...|\n", - "|POLYGON ((-85.382...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_json_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -280,36 +171,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "== Physical Plan ==\n", - "BroadcastIndexJoin pointshape2#253: geometry, LeftSide, LeftSide, Inner, INTERSECTS, ( **org.apache.spark.sql.sedona_sql.expressions.ST_Distance** < 2.0) ST_INTERSECTS(pointshape1#228, pointshape2#253)\n", - ":- SpatialIndex pointshape1#228: geometry, QUADTREE, false, 2.0\n", - ": +- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape1#228, abc AS name1#229]\n", - ": +- FileScan csv [_c0#224,_c1#225] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/Desktop/sedona/sedona/binder/data/testpoint...., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "+- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape2#253, def AS name2#254]\n", - " +- FileScan csv [_c0#249,_c1#250] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/Desktop/sedona/sedona/binder/data/testpoint...., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "\n", - "\n", - "+-----------------+-----+-----------------+-----+\n", - "| pointshape1|name1| pointshape2|name2|\n", - "+-----------------+-----+-----------------+-----+\n", - "|POINT (1.1 101.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (1.1 101.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (3.1 103.1)| abc|POINT (2.1 102.1)| def|\n", - "+-----------------+-----+-----------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df_1 = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -351,7 +215,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -365,57 +229,25 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- osm_id: string (nullable = true)\n", - " |-- code: long (nullable = true)\n", - " |-- fclass: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.printSchema()" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geometry|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (15.3393145...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (14.8709625...|\n", - "|29947493|2402| motel| |POINT (15.0946636...|\n", - "|29947498|2602| atm| |POINT (15.0732014...|\n", - "|29947499|2401| hotel| |POINT (15.0696777...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.show(5)" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -424,7 +256,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -441,34 +273,16 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geom|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (-3288183.3...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (-3341183.9...|\n", - "|29947493|2402| motel| |POINT (-3320466.5...|\n", - "|29947498|2602| atm| |POINT (-3323205.7...|\n", - "|29947499|2401| hotel| |POINT (-3323655.1...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "transformed_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -477,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -492,49 +306,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 21:13:53 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------+---------+--------------------+\n", - "| id_1| id_2| geom|\n", - "+----------+---------+--------------------+\n", - "| 197624402|197624402|POINT (-3383818.5...|\n", - "| 197663196|197663196|POINT (-3383367.1...|\n", - "| 197953474|197953474|POINT (-3383763.3...|\n", - "| 262310516|262310516|POINT (-3384257.6...|\n", - "|1074233123|262310516|POINT (-3384262.1...|\n", - "| 270281140|270281140|POINT (-3385421.2...|\n", - "|1074232906|270281140|POINT (-3385408.6...|\n", - "| 270306609|270306609|POINT (-3383982.8...|\n", - "| 270306746|270306746|POINT (-3383898.4...|\n", - "| 280402616|280402616|POINT (-3378817.6...|\n", - "| 839725400|280402616|POINT (-3378841.1...|\n", - "| 293896571|293896571|POINT (-3385029.0...|\n", - "|3256728465|293896571|POINT (-3385002.4...|\n", - "| 310838954|310838954|POINT (-3390510.5...|\n", - "| 311395303|311395303|POINT (-3389444.4...|\n", - "| 311395425|311395425|POINT (-3389867.6...|\n", - "|6339786017|311395425|POINT (-3389850.1...|\n", - "| 825853330|311395425|POINT (-3389877.4...|\n", - "| 945009922|311395425|POINT (-3389878.6...|\n", - "| 320100848|320100848|POINT (-3389610.6...|\n", - "+----------+---------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "neighbours_within_1000m.show()" ] @@ -548,24 +322,16 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 21:13:54 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - } - ], + "outputs": [], "source": [ "df = neighbours_within_1000m.toPandas()" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -574,139 +340,12 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
id_1id_2geom
0197624402197624402POINT (-3383818.580 4179182.169)
1197663196197663196POINT (-3383367.151 4179427.096)
2197953474197953474POINT (-3383763.332 4179408.785)
3262310516262310516POINT (-3384257.682 4178033.053)
41074233123262310516POINT (-3384262.187 4178036.442)
............
4531467855483546785548354POINT (-3271487.870 4337964.529)
4531567855483566785548356POINT (-3273379.389 4338379.126)
4531667855483576785548357POINT (-3273745.222 4338528.241)
4531767855483586785548358POINT (-3273027.996 4338093.401)
4531868174167046817416704POINT (-3214549.268 4314872.904)
\n", - "

45319 rows × 3 columns

\n", - "
" - ], - "text/plain": [ - " id_1 id_2 geom\n", - "0 197624402 197624402 POINT (-3383818.580 4179182.169)\n", - "1 197663196 197663196 POINT (-3383367.151 4179427.096)\n", - "2 197953474 197953474 POINT (-3383763.332 4179408.785)\n", - "3 262310516 262310516 POINT (-3384257.682 4178033.053)\n", - "4 1074233123 262310516 POINT (-3384262.187 4178036.442)\n", - "... ... ... ...\n", - "45314 6785548354 6785548354 POINT (-3271487.870 4337964.529)\n", - "45315 6785548356 6785548356 POINT (-3273379.389 4338379.126)\n", - "45316 6785548357 6785548357 POINT (-3273745.222 4338528.241)\n", - "45317 6785548358 6785548358 POINT (-3273027.996 4338093.401)\n", - "45318 6817416704 6817416704 POINT (-3214549.268 4314872.904)\n", - "\n", - "[45319 rows x 3 columns]" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "gdf" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -729,5 +368,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index 225e6ad8e2..e104ac791b 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -32,14 +32,12 @@ "\n", "import geopandas as gpd\n", "from pyspark.sql import SparkSession\n", - "from pyspark.sql.functions import col, expr, when\n", + "from pyspark.sql.functions import col, expr, when, explode, hex\n", "\n", "\n", "\n", "from sedona.spark import *\n", - "from keplergl import KeplerGl\n", - "from utilities import getConfig\n", - "\n" + "from utilities import getConfig" ] }, { @@ -51,53 +49,14 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-2ebc22b4-bd08-4a3f-a2dc-bd50e2f0f728;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 85ms :: artifacts dl 3ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-2ebc22b4-bd08-4a3f-a2dc-bd50e2f0f728\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", - "23/07/12 14:17:39 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/12 14:17:43 WARN SimpleFunctionRegistry: The function st_affine replaced a previously registered function.\n" - ] - } - ], + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n", @@ -115,120 +74,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- featurecla: string (nullable = true)\n", - " |-- scalerank: string (nullable = true)\n", - " |-- LABELRANK: string (nullable = true)\n", - " |-- SOVEREIGNT: string (nullable = true)\n", - " |-- SOV_A3: string (nullable = true)\n", - " |-- ADM0_DIF: string (nullable = true)\n", - " |-- LEVEL: string (nullable = true)\n", - " |-- TYPE: string (nullable = true)\n", - " |-- ADMIN: string (nullable = true)\n", - " |-- ADM0_A3: string (nullable = true)\n", - " |-- GEOU_DIF: string (nullable = true)\n", - " |-- GEOUNIT: string (nullable = true)\n", - " |-- GU_A3: string (nullable = true)\n", - " |-- SU_DIF: string (nullable = true)\n", - " |-- SUBUNIT: string (nullable = true)\n", - " |-- SU_A3: string (nullable = true)\n", - " |-- BRK_DIFF: string (nullable = true)\n", - " |-- NAME: string (nullable = true)\n", - " |-- NAME_LONG: string (nullable = true)\n", - " |-- BRK_A3: string (nullable = true)\n", - " |-- BRK_NAME: string (nullable = true)\n", - " |-- BRK_GROUP: string (nullable = true)\n", - " |-- ABBREV: string (nullable = true)\n", - " |-- POSTAL: string (nullable = true)\n", - " |-- FORMAL_EN: string (nullable = true)\n", - " |-- FORMAL_FR: string (nullable = true)\n", - " |-- NAME_CIAWF: string (nullable = true)\n", - " |-- NOTE_ADM0: string (nullable = true)\n", - " |-- NOTE_BRK: string (nullable = true)\n", - " |-- NAME_SORT: string (nullable = true)\n", - " |-- NAME_ALT: string (nullable = true)\n", - " |-- MAPCOLOR7: string (nullable = true)\n", - " |-- MAPCOLOR8: string (nullable = true)\n", - " |-- MAPCOLOR9: string (nullable = true)\n", - " |-- MAPCOLOR13: string (nullable = true)\n", - " |-- POP_EST: string (nullable = true)\n", - " |-- POP_RANK: string (nullable = true)\n", - " |-- GDP_MD_EST: string (nullable = true)\n", - " |-- POP_YEAR: string (nullable = true)\n", - " |-- LASTCENSUS: string (nullable = true)\n", - " |-- GDP_YEAR: string (nullable = true)\n", - " |-- ECONOMY: string (nullable = true)\n", - " |-- INCOME_GRP: string (nullable = true)\n", - " |-- WIKIPEDIA: string (nullable = true)\n", - " |-- FIPS_10_: string (nullable = true)\n", - " |-- ISO_A2: string (nullable = true)\n", - " |-- ISO_A3: string (nullable = true)\n", - " |-- ISO_A3_EH: string (nullable = true)\n", - " |-- ISO_N3: string (nullable = true)\n", - " |-- UN_A3: string (nullable = true)\n", - " |-- WB_A2: string (nullable = true)\n", - " |-- WB_A3: string (nullable = true)\n", - " |-- WOE_ID: string (nullable = true)\n", - " |-- WOE_ID_EH: string (nullable = true)\n", - " |-- WOE_NOTE: string (nullable = true)\n", - " |-- ADM0_A3_IS: string (nullable = true)\n", - " |-- ADM0_A3_US: string (nullable = true)\n", - " |-- ADM0_A3_UN: string (nullable = true)\n", - " |-- ADM0_A3_WB: string (nullable = true)\n", - " |-- CONTINENT: string (nullable = true)\n", - " |-- REGION_UN: string (nullable = true)\n", - " |-- SUBREGION: string (nullable = true)\n", - " |-- REGION_WB: string (nullable = true)\n", - " |-- NAME_LEN: string (nullable = true)\n", - " |-- LONG_LEN: string (nullable = true)\n", - " |-- ABBREV_LEN: string (nullable = true)\n", - " |-- TINY: string (nullable = true)\n", - " |-- HOMEPART: string (nullable = true)\n", - " |-- MIN_ZOOM: string (nullable = true)\n", - " |-- MIN_LABEL: string (nullable = true)\n", - " |-- MAX_LABEL: string (nullable = true)\n", - " |-- NE_ID: string (nullable = true)\n", - " |-- WIKIDATAID: string (nullable = true)\n", - " |-- NAME_AR: string (nullable = true)\n", - " |-- NAME_BN: string (nullable = true)\n", - " |-- NAME_DE: string (nullable = true)\n", - " |-- NAME_EN: string (nullable = true)\n", - " |-- NAME_ES: string (nullable = true)\n", - " |-- NAME_FR: string (nullable = true)\n", - " |-- NAME_EL: string (nullable = true)\n", - " |-- NAME_HI: string (nullable = true)\n", - " |-- NAME_HU: string (nullable = true)\n", - " |-- NAME_ID: string (nullable = true)\n", - " |-- NAME_IT: string (nullable = true)\n", - " |-- NAME_JA: string (nullable = true)\n", - " |-- NAME_KO: string (nullable = true)\n", - " |-- NAME_NL: string (nullable = true)\n", - " |-- NAME_PL: string (nullable = true)\n", - " |-- NAME_PT: string (nullable = true)\n", - " |-- NAME_RU: string (nullable = true)\n", - " |-- NAME_SV: string (nullable = true)\n", - " |-- NAME_TR: string (nullable = true)\n", - " |-- NAME_VI: string (nullable = true)\n", - " |-- NAME_ZH: string (nullable = true)\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/12 14:17:43 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" - ] - } - ], + "outputs": [], "source": [ "countries = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_admin_0_countries_lakes/\")\n", "countries_df = Adapter.toDf(countries, sedona)\n", @@ -246,29 +94,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- scalerank: string (nullable = true)\n", - " |-- featurecla: string (nullable = true)\n", - " |-- type: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- abbrev: string (nullable = true)\n", - " |-- location: string (nullable = true)\n", - " |-- gps_code: string (nullable = true)\n", - " |-- iata_code: string (nullable = true)\n", - " |-- wikipedia: string (nullable = true)\n", - " |-- natlscale: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "airports = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_airports/\")\n", "airports_df = Adapter.toDf(airports, sedona)\n", @@ -280,12 +108,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ + "\n", + "\n", "## Run Spatial Join using SQL API" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -301,17 +131,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[('3.0', '2.12', '1.4.1')]\n" - ] - } - ], + "outputs": [], "source": [ "airports_rdd = Adapter.toSpatialRdd(airports_df, \"geometry\")\n", "# Drop the duplicate name column in countries_df\n", @@ -348,75 +170,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/12 14:17:44 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((1...|Taiwan ...|POINT (121.231370...|Taoyuan ...|\n", - "|MULTIPOLYGON (((5...|Netherlands ...|POINT (4.76437693...|Schiphol ...|\n", - "|POLYGON ((103.969...|Singapore ...|POINT (103.986413...|Singapore Changi ...|\n", - "|MULTIPOLYGON (((-...|United Kingdom ...|POINT (-0.4531566...|London Heathrow ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-149.98172...|Anchorage Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (116.588174...|Beijing Capital ...|\n", - "|MULTIPOLYGON (((-...|Colombia ...|POINT (-74.143371...|Eldorado Int'l ...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (72.8745639...|Chhatrapati Shiva...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-71.016406...|Gen E L Logan Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-76.668642...|Baltimore-Washing...|\n", - "|POLYGON ((36.8713...|Egypt ...|POINT (31.3997430...|Cairo Int'l ...|\n", - "|POLYGON ((-2.2196...|Morocco ...|POINT (-7.6632188...|Casablanca-Anfa ...|\n", - "|MULTIPOLYGON (((-...|Venezuela ...|POINT (-67.005748...|Simon Bolivar Int...|\n", - "|MULTIPOLYGON (((2...|South Africa ...|POINT (18.5976565...|Cape Town Int'l ...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (103.956136...|Chengdushuang Liu...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (77.0878362...|Indira Gandhi Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-104.67379...|Denver Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((1...|Thailand ...|POINT (100.602578...|Don Muang Int'l ...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.145258...|Fort Lauderdale H...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.278971...|Miami Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-95.333704...|George Bush Inter...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-90.256693...|New Orleans Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-81.307371...|Orlando Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-82.534824...|Tampa Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-112.01363...|Sky Harbor Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-118.40246...|Los Angeles Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-116.97547...|General Abelardo ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|POLYGON ((-69.965...|Peru ...|POINT (-77.107565...|Jorge Chavez ...|\n", - "|MULTIPOLYGON (((-...|Panama ...|POINT (-79.387134...|Tocumen Int'l ...|\n", - "|POLYGON ((-83.157...|Nicaragua ...|POINT (-86.171284...|Augusto Cesar San...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-96.183570...|Gen. Heriberto Ja...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-106.27001...|General Rafael Bu...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.754508...|General Juan N Al...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.570649...|Jose Maria Morelo...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-98.375759...|Puebla ...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.082607...|Lic Benito Juarez...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# The result of SQL API\n", "result.show()\n", @@ -433,49 +189,17 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------------+\n", - "| NAME_EN| country_geom|AirportCount|\n", - "+--------------------+--------------------+------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...| 1|\n", - "|Mexico ...|MULTIPOLYGON (((-...| 12|\n", - "|Panama ...|MULTIPOLYGON (((-...| 1|\n", - "|Nicaragua ...|POLYGON ((-83.157...| 1|\n", - "|Honduras ...|MULTIPOLYGON (((-...| 1|\n", - "|Colombia ...|MULTIPOLYGON (((-...| 4|\n", - "|United States of ...|MULTIPOLYGON (((-...| 35|\n", - "|Ecuador ...|MULTIPOLYGON (((-...| 1|\n", - "|The Bahamas ...|MULTIPOLYGON (((-...| 1|\n", - "|Peru ...|POLYGON ((-69.965...| 1|\n", - "|Guatemala ...|POLYGON ((-92.235...| 1|\n", - "|Canada ...|MULTIPOLYGON (((-...| 15|\n", - "|Venezuela ...|MULTIPOLYGON (((-...| 3|\n", - "|Argentina ...|MULTIPOLYGON (((-...| 3|\n", - "|Bolivia ...|MULTIPOLYGON (((-...| 2|\n", - "|Paraguay ...|POLYGON ((-58.159...| 1|\n", - "|Benin ...|POLYGON ((1.62265...| 1|\n", - "|Guinea ...|POLYGON ((-10.283...| 1|\n", - "|Chile ...|MULTIPOLYGON (((-...| 5|\n", - "|Nigeria ...|MULTIPOLYGON (((7...| 3|\n", - "+--------------------+--------------------+------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# result.createOrReplaceTempView(\"result\")\n", "result2.createOrReplaceTempView(\"result\")\n", "groupedresult = sedona.sql(\"SELECT c.NAME_EN, c.country_geom, count(*) as AirportCount FROM result c GROUP BY c.NAME_EN, c.country_geom\")\n", - "groupedresult.show()" + "groupedresult.show()\n", + "groupedresult.createOrReplaceTempView(\"grouped_result\")" ] }, { @@ -485,39 +209,78 @@ "## Visualize the number of airports in each country" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize using SedonaKepler" + ] + }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0646646608754887811eee12e5516d16", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "KeplerGl(config={'version': 'v1', 'config': {'visState': {'filters': [], 'layers': [{'id': 'ikzru0t', 'type': …" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ - "df = groupedresult.toPandas()\n", - "gdf = gpd.GeoDataFrame(df, geometry=\"country_geom\").rename(columns={'country_geom':'geometry'})\n", + "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", + "sedona_kepler_map" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize using SedonaPyDeck\n", + "The above visualization is generated by a pre-set config informing SedonaKepler that the map to be rendered has to be a choropleth map with choropleth of the `AirportCount` column value.\n", "\n", - "map = KeplerGl(data={\"AirportCount\": gdf}, config=getConfig())\n", - "map" + "This can be also be achieved using [SedonaPyDeck](https://sedona.apache.org/1.5.0/tutorial/sql/#sedonapydeck) and its `create_choropleth_map` API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sedona_pydeck_map = SedonaPyDeck.create_choropleth_map(df=groupedresult, plot_col='AirportCount')\n", + "sedona_pydeck_map" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize Uber H3 cells using SedonaKepler\n", + "The following tutorial depicts how Uber H3 cells can be generated using Sedona and visualized using SedonaKepler." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate H3 cell IDs\n", + "[ST_H3CellIDs](https://sedona.apache.org/1.5.0/api/flink/Function/#st_h3cellids) can be used to generated cell IDs for given geometries" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_H3CellIDs(g.country_geom, 3, false) as h3_cellID from grouped_result g\")\n", + "h3_df.show(2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Since each geometry can have multiple H3 cell IDs, let's explode the generated H3 cell ID array to get individual cells" ] }, { @@ -527,7 +290,52 @@ "tags": [] }, "outputs": [], - "source": [] + "source": [ + "exploded_h3 = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.h3_cellID).alias(\"h3\"))\n", + "exploded_h3.show(2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Convert generated long H3 cell ID to a hex cell ID\n", + "SedonaKepler accepts each H3 cell ID as a hexadecimal to automatically visualize them. Also, let us sample the data to be able to visualize sparse cells on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "exploded_h3 = exploded_h3.sample(0.3)\n", + "exploded_h3.createOrReplaceTempView(\"exploded_h3\")\n", + "hex_exploded_h3 = exploded_h3.select(exploded_h3.NAME_EN, hex(exploded_h3.h3).alias(\"ex_h3\"))\n", + "hex_exploded_h3.show(2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize using SedonaKepler\n", + "Now, simply provide the final df to SedonaKepler.create_map and you can automagically visualize the H3 cells on the map!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sedona_kepler_h3 = SedonaKepler.create_map(df=hex_exploded_h3, name=\"h3\")\n", + "sedona_kepler_h3" + ] } ], "metadata": { diff --git a/binder/Pipfile b/binder/Pipfile index 080c3087d6..69f7569520 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -14,14 +14,15 @@ pytest-cov = "*" pandas="1.3.5" shapely="==1.8.4" geopandas="==0.11.1" -pyspark="==3.3.2" +pyspark="==3.4.0" attrs="*" -apache-sedona="==1.4.1" +apache-sedona="==1.5.0" matplotlib = "*" descartes = "*" keplergl = "==0.3.2" +pydeck = "==0.8.0" ipywidgets = "*" -jupyterlab-widgets = "*" +jupyterlab-widgets = "==1.1.7" ipykernel = "*" jupyterlab = "==3.6.4" diff --git a/binder/Sedona_OvertureMaps_GeoParquet.ipynb b/binder/Sedona_OvertureMaps_GeoParquet.ipynb index ba2b6623fa..588cdf415f 100644 --- a/binder/Sedona_OvertureMaps_GeoParquet.ipynb +++ b/binder/Sedona_OvertureMaps_GeoParquet.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "92984a1c", "metadata": {}, "outputs": [], @@ -14,7 +14,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "e4392353", "metadata": {}, "outputs": [], @@ -24,17 +24,64 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "08c71acb", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Warning: Ignoring non-Spark config property: fs.s3a.aws.credentials.provider\n", + "23/10/31 18:38:20 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", + "23/10/31 18:38:20 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", + "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", + "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", + "org.apache.sedona#sedona-spark-shaded-3.4_2.12 added as a dependency\n", + "org.datasyslab#geotools-wrapper added as a dependency\n", + ":: resolving dependencies :: org.apache.spark#spark-submit-parent-44adfadb-de72-4666-a002-5a5ad761da2e;1.0\n", + "\tconfs: [default]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + ":: loading settings :: url = jar:file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\tfound org.apache.sedona#sedona-spark-shaded-3.4_2.12;1.5.0 in central\n", + "\tfound org.datasyslab#geotools-wrapper;1.5.0-28.2 in central\n", + ":: resolution report :: resolve 70ms :: artifacts dl 6ms\n", + "\t:: modules in use:\n", + "\torg.apache.sedona#sedona-spark-shaded-3.4_2.12;1.5.0 from central in [default]\n", + "\torg.datasyslab#geotools-wrapper;1.5.0-28.2 from central in [default]\n", + "\t---------------------------------------------------------------------\n", + "\t| | modules || artifacts |\n", + "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", + "\t---------------------------------------------------------------------\n", + "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", + "\t---------------------------------------------------------------------\n", + ":: retrieving :: org.apache.spark#spark-submit-parent-44adfadb-de72-4666-a002-5a5ad761da2e\n", + "\tconfs: [default]\n", + "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", + "23/10/31 18:38:21 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", + "Setting default log level to \"WARN\".\n", + "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" + ] + } + ], "source": [ "config = SedonaContext.builder() .\\\n", " config(\"spark.hadoop.fs.s3a.aws.credentials.provider\", \"org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider\"). \\\n", " config(\"fs.s3a.aws.credentials.provider\", \"org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider\"). \\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)" @@ -45,7 +92,7 @@ "id": "8f3340ee", "metadata": {}, "source": [ - "# State Boundary" + "# State BoundarySedonaContext" ] }, { @@ -61,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "045f24b3", "metadata": {}, "outputs": [], @@ -116,10 +163,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "id": "d0c55157", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 18:38:24 WARN MetricsConfig: Cannot locate configuration: tried hadoop-metrics2-s3a-file-system.properties,hadoop-metrics2.properties\n", + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 15.4 ms, sys: 5.42 ms, total: 20.8 ms\n", + "Wall time: 9.82 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -135,12 +199,26 @@ "execution_count": null, "id": "bb62f16e", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], "source": [ "%%time\n", "\n", "map_building = SedonaKepler.create_map(df_building, 'Building')\n", - "\n", "map_building" ] }, @@ -154,10 +232,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "a6443d1d", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 29.1 ms, sys: 9.95 ms, total: 39.1 ms\n", + "Wall time: 13.2 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -198,10 +292,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "6b9d6296", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 6.31 ms, sys: 2.74 ms, total: 9.04 ms\n", + "Wall time: 3.86 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -234,10 +344,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "id": "8541ada7", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 12.3 ms, sys: 4.61 ms, total: 17 ms\n", + "Wall time: 5.03 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -278,10 +404,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "id": "224167dc", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 17.2 ms, sys: 5.56 ms, total: 22.8 ms\n", + "Wall time: 7.55 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -294,9 +436,7 @@ "cell_type": "code", "execution_count": null, "id": "e5f8de53", - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "%%time\n", @@ -316,10 +456,10565 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "ba8eff7c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 18.5 ms, sys: 6.13 ms, total: 24.6 ms\n", + "Wall time: 7.13 s\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 22:58:32 WARN HeartbeatReceiver: Removing executor driver with no recent heartbeats: 327091 ms exceeds timeout 120000 ms\n", + "23/10/31 22:58:32 WARN SparkContext: Killing executors is not supported by current scheduler.\n", + "23/10/31 22:58:40 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 22:58:40 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:22:37 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:22:37 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:38:34 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:38:34 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:39:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/10/31 23:39:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 00:14:30 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:14:30 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:23:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:23:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:58:17 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:58:17 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:24:39 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:24:39 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:42:00 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:42:00 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:13:26 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:13:26 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:42:06 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:42:06 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:03:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:03:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:04:06 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:04:06 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:26:39 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:26:39 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:43:17 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:43:17 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:17:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:17:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:27:46 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:27:46 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:02:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:02:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:28:38 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:28:38 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:45:38 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:45:38 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:18:12 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:18:12 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:47:37 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 06:47:37 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 07:04:36 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:04:36 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:36:29 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:36:29 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:52:54 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:52:54 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 08:26:19 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 08:26:19 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 08:44:00 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 08:44:00 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:17:34 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:17:34 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:44:58 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:44:58 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:17:21 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:17:21 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:51:25 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 10:51:25 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 11:08:49 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:08:49 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:33:51 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:33:51 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:05:45 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:05:45 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:19:25 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:19:25 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:54:52 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 12:54:52 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:12:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:13:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 ERROR Executor: Exit as unable to send heartbeats to driver more than 60 times\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -343,13 +11038,21 @@ "\n", "map_segment" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf6ef3b0-d76a-4ae1-a96e-8077a7a64581", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "apache-sedona", "language": "python", - "name": "python3" + "name": "apache-sedona" }, "language_info": { "codemirror_mode": { @@ -361,7 +11064,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.9.6" } }, "nbformat": 4, diff --git a/binder/data/raster/test1.tiff b/binder/data/raster/test1.tiff new file mode 100644 index 0000000000..bebd68232e Binary files /dev/null and b/binder/data/raster/test1.tiff differ diff --git a/binder/data/raster/test5.tiff b/binder/data/raster/test5.tiff new file mode 100644 index 0000000000..6caabeadae Binary files /dev/null and b/binder/data/raster/test5.tiff differ diff --git a/binder/postBuild b/binder/postBuild index 88f44ad81a..e7c4e95998 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,6 +1,12 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz -tar -xzf spark-3.3.2-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz +tar -xzf spark-3.4.0-bin-hadoop3.tgz +# Get AWS jars +hadoop_s3_version=3.3.4 +aws_sdk_version=1.12.402 + +curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${hadoop_s3_version}/hadoop-aws-${hadoop_s3_version}.jar -o $HOME/spark-3.4.0-bin-hadoop3/jars/hadoop-aws-${hadoop_s3_version}.jar +curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${aws_sdk_version}/aws-java-sdk-bundle-${aws_sdk_version}.jar -o $HOME/spark-3.4.0-bin-hadoop3/jars/aws-java-sdk-bundle-${aws_sdk_version}.jar #Tidy up -rm spark-3.3.2-bin-hadoop3.tgz \ No newline at end of file +rm spark-3.4.0-bin-hadoop3.tgz \ No newline at end of file diff --git a/binder/start b/binder/start index fe679bda2c..923b372b7c 100755 --- a/binder/start +++ b/binder/start @@ -1,6 +1,6 @@ #!/bin/bash -SPARK_HOME=$HOME/spark-3.3.2-bin-hadoop3 +SPARK_HOME=$HOME/spark-3.4.0-bin-hadoop3 export PATH=$SPARK_HOME/bin:$PATH export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH export PYSPARK_SUBMIT_ARGS="--master local[*] pyspark-shell"